oneacct-export 0.2.7 → 0.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (92) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +2 -0
  3. data/.rspec +1 -0
  4. data/.travis.yml +1 -21
  5. data/bin/oneacct-export +7 -6
  6. data/config/conf.yml +25 -7
  7. data/lib/data_validators/apel_data_validator.rb +99 -0
  8. data/lib/data_validators/data_compute.rb +57 -0
  9. data/lib/data_validators/data_validator.rb +12 -0
  10. data/lib/data_validators/data_validator_helper.rb +15 -0
  11. data/lib/data_validators/logstash_data_validator.rb +82 -0
  12. data/lib/data_validators/pbs_data_validator.rb +86 -0
  13. data/lib/errors/not_implemented_error.rb +3 -0
  14. data/lib/errors/validation_error.rb +3 -0
  15. data/lib/errors.rb +2 -0
  16. data/lib/input_validator.rb +12 -2
  17. data/lib/one_data_accessor.rb +11 -10
  18. data/lib/one_worker.rb +109 -137
  19. data/lib/oneacct_exporter/version.rb +1 -1
  20. data/lib/oneacct_exporter.rb +9 -7
  21. data/lib/oneacct_opts.rb +36 -13
  22. data/lib/output_types.rb +5 -0
  23. data/lib/redis_conf.rb +2 -2
  24. data/lib/settings.rb +3 -3
  25. data/lib/templates/apel-0.2.erb +6 -6
  26. data/lib/templates/logstash-0.1.erb +3 -0
  27. data/lib/templates/pbs-0.1.erb +6 -0
  28. data/mock/{one_worker_vm8.xml → one_worker_vm_dn01.xml} +76 -74
  29. data/mock/one_worker_vm_dn02.xml +174 -0
  30. data/mock/{one_worker_DISK_missing.xml → one_worker_vm_empty_disk_records.xml} +10 -6
  31. data/mock/one_worker_vm_empty_history_records.xml +131 -0
  32. data/mock/one_worker_vm_image_name01.xml +175 -0
  33. data/mock/{one_worker_valid_machine.xml → one_worker_vm_image_name02.xml} +35 -7
  34. data/mock/one_worker_vm_image_name03.xml +167 -0
  35. data/mock/{one_worker_vm2.xml → one_worker_vm_image_name04.xml} +38 -9
  36. data/mock/{one_worker_vm1.xml → one_worker_vm_image_name05.xml} +36 -8
  37. data/mock/{one_worker_vm9.xml → one_worker_vm_image_name06.xml} +8 -5
  38. data/oneacct-export.gemspec +1 -0
  39. data/spec/data_validators/apel_data_validator_spec.rb +497 -0
  40. data/spec/data_validators/data_compute_spec.rb +193 -0
  41. data/spec/data_validators/data_validator_helper_spec.rb +66 -0
  42. data/spec/data_validators/data_validator_spec.rb +14 -0
  43. data/spec/data_validators/logstash_data_validator_spec.rb +469 -0
  44. data/spec/data_validators/pbs_data_validator_spec.rb +353 -0
  45. data/spec/one_worker_spec.rb +234 -542
  46. data/spec/oneacct_exporter_spec.rb +1 -41
  47. data/spec/oneacct_opts_spec.rb +135 -32
  48. data/spec/spec_helper.rb +18 -1
  49. metadata +51 -52
  50. data/mock/one_worker_DEPLOY_ID_missing.xml +0 -136
  51. data/mock/one_worker_DISK_SIZE_nan.xml +0 -147
  52. data/mock/one_worker_ETIME_0.xml +0 -137
  53. data/mock/one_worker_ETIME_missing.xml +0 -136
  54. data/mock/one_worker_ETIME_nan.xml +0 -137
  55. data/mock/one_worker_GID_missing.xml +0 -136
  56. data/mock/one_worker_GNAME_missing.xml +0 -136
  57. data/mock/one_worker_HISTORY_RECORDS_missing.xml +0 -91
  58. data/mock/one_worker_HISTORY_many.xml +0 -137
  59. data/mock/one_worker_HISTORY_missing.xml +0 -93
  60. data/mock/one_worker_HISTORY_one.xml +0 -115
  61. data/mock/one_worker_IMAGE_ID_missing.xml +0 -136
  62. data/mock/one_worker_MEMORY_0.xml +0 -137
  63. data/mock/one_worker_MEMORY_missing.xml +0 -135
  64. data/mock/one_worker_MEMORY_nan.xml +0 -137
  65. data/mock/one_worker_NET_RX_0.xml +0 -137
  66. data/mock/one_worker_NET_RX_missing.xml +0 -136
  67. data/mock/one_worker_NET_RX_nan.xml +0 -137
  68. data/mock/one_worker_NET_TX_0.xml +0 -137
  69. data/mock/one_worker_NET_TX_missing.xml +0 -136
  70. data/mock/one_worker_NET_TX_nan.xml +0 -137
  71. data/mock/one_worker_RETIME_0_RUNNING.xml +0 -115
  72. data/mock/one_worker_RETIME_0_STOPPED.xml +0 -115
  73. data/mock/one_worker_RETIME_missing.xml +0 -114
  74. data/mock/one_worker_RSTIME_0.xml +0 -115
  75. data/mock/one_worker_RSTIME_>_RETIME.xml +0 -115
  76. data/mock/one_worker_RSTIME_missing.xml +0 -114
  77. data/mock/one_worker_STATE_missing.xml +0 -136
  78. data/mock/one_worker_STATE_out_of_range.xml +0 -137
  79. data/mock/one_worker_STIME_>_ETIME.xml +0 -137
  80. data/mock/one_worker_STIME_missing.xml +0 -136
  81. data/mock/one_worker_STIME_nan.xml +0 -137
  82. data/mock/one_worker_TEMPLATE_missing.xml +0 -79
  83. data/mock/one_worker_UID_missing.xml +0 -136
  84. data/mock/one_worker_VCPU_0.xml +0 -137
  85. data/mock/one_worker_VCPU_missing.xml +0 -136
  86. data/mock/one_worker_VCPU_nan.xml +0 -137
  87. data/mock/one_worker_malformed_vm.xml +0 -136
  88. data/mock/one_worker_vm3.xml +0 -137
  89. data/mock/one_worker_vm4.xml +0 -106
  90. data/mock/one_worker_vm5.xml +0 -106
  91. data/mock/one_worker_vm6.xml +0 -107
  92. data/mock/one_worker_vm7.xml +0 -147
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 841473513aa37713ab102ac8b54d85d05fd52b75
4
- data.tar.gz: c3195af5db1345b1471fb28e90909e01200525fb
3
+ metadata.gz: e33fa6806cd2ea1a65ac02313c0a3aa3cf549095
4
+ data.tar.gz: 3691a9b9a6eea6c22987c3f78934c199dc7e6596
5
5
  SHA512:
6
- metadata.gz: d09c93b909f2c534a62c3c58d8cc5d70a4d82d9ceba4725b8a983d5d584bf15a3680fcdf63631c615c25e68145ae3c6da66eab46e890b3776857f291e883cda4
7
- data.tar.gz: 92039510265e1bd5decbd534e79e67c324ce069faedad79e9d6fbd2e466f1107aa01d63df970f2bc9fa3954a146d14e72b8fabdedd9a001af693f4af6ccd1640
6
+ metadata.gz: 0b049a0279a7cbdb37b002faa9c554596201aee6965a13789f0b234657421f4e32ce03add5708458540cfc37d80307a922837acf10028ecb60e311bdb98da60a
7
+ data.tar.gz: bf8ad5f4cce2931091cfcd5851780b2cbf6c42eb0f5d7e949135147387bb657653fec11cc7b8e0bfcb76fe5af69773aef608b18ece6ab873d438d9ae2978d685
data/.gitignore CHANGED
@@ -14,3 +14,5 @@ mkmf.log
14
14
  *.gem
15
15
  *.swp
16
16
  *.lock
17
+ /.idea/
18
+ /vendor/
data/.rspec ADDED
@@ -0,0 +1 @@
1
+ --require spec_helper --color --format Fuubar
data/.travis.yml CHANGED
@@ -3,32 +3,12 @@ language: ruby
3
3
  rvm:
4
4
  - 2.0.0
5
5
  - 2.1
6
+ - 2.2
6
7
  - ruby-head
7
- - jruby-19mode
8
- - jruby-head
9
-
10
- jdk:
11
- - openjdk7
12
- - oraclejdk7
13
- - openjdk6
14
8
 
15
9
  matrix:
16
10
  allow_failures:
17
11
  - rvm: ruby-head
18
- - rvm: jruby-head
19
- exclude:
20
- - rvm: 2.0.0
21
- jdk: openjdk7
22
- - rvm: 2.0.0
23
- jdk: oraclejdk7
24
- - rvm: 2.1
25
- jdk: openjdk7
26
- - rvm: 2.1
27
- jdk: oraclejdk7
28
- - rvm: ruby-head
29
- jdk: openjdk7
30
- - rvm: ruby-head
31
- jdk: oraclejdk7
32
12
  fast_finish: true
33
13
 
34
14
  branches:
data/bin/oneacct-export CHANGED
@@ -6,15 +6,16 @@ require 'oneacct_exporter'
6
6
  require 'oneacct_exporter/log'
7
7
  require 'settings'
8
8
  require 'fileutils'
9
+ require 'json'
9
10
  require 'oneacct_opts'
10
11
 
11
- #parse options from command line
12
+ # parse options from command line
12
13
  options = OneacctOpts.parse(ARGV)
13
14
 
14
- #initialize default logger
15
+ # initialize default logger
15
16
  log = Logger.new(STDOUT)
16
17
 
17
- #initialize specific logger according to the configuration
18
+ # initialize specific logger according to the configuration
18
19
  if Settings['logging'] && Settings['logging']['log_type'] == 'file'
19
20
  begin
20
21
  log_file = File.open(Settings['logging']['log_file'], File::WRONLY | File::CREAT | File::APPEND)
@@ -38,7 +39,7 @@ groups = {}
38
39
  groups[:include] = options.include_groups if options.include_groups
39
40
  groups[:exclude] = options.exclude_groups if options.exclude_groups
40
41
 
41
- #read groups restriction from file if chosen
42
+ # read groups restriction from file if chosen
42
43
  if options.groups_file
43
44
  log.debug('Reading groups from file...')
44
45
  if File.exist?(options.groups_file) && File.readable?(options.groups_file)
@@ -54,7 +55,7 @@ if options.groups_file
54
55
  end
55
56
  end
56
57
 
57
- #create output directory
58
+ # create output directory
58
59
  begin
59
60
  FileUtils.mkdir_p Settings.output['output_dir']
60
61
  rescue SystemCallError => e
@@ -73,6 +74,6 @@ opts[:compatibility] = options.compatibility
73
74
 
74
75
  log.debug(opts)
75
76
 
76
- #run the export
77
+ # run the export
77
78
  oneacct_exporter = OneacctExporter.new(opts, log)
78
79
  oneacct_exporter.export
data/config/conf.yml CHANGED
@@ -1,12 +1,21 @@
1
1
  ---
2
2
  defaults: &defaults
3
- site_name: Undefined # Usually a short provider name, e.g. CESNET
4
- cloud_type: OpenNebula # CMF type, only OpenNebula is supported
5
- endpoint: https://occi.localhost.com:11443/ # URL of your OCCI endpoint, e.g. https://fqdn.example.com:11443/
6
3
  output:
7
4
  output_dir: /var/spool/apel/outgoing/00000000 # Directory for outgoing messages
8
- output_type: apel-0.2 # Format of outgoing messages. apel-0.2 is currently the only option
5
+ output_type: apel-0.2 # Format of outgoing messages. Choices are: apel-0.2, pbs-0.1, logstash-0.1
9
6
  num_of_vms_per_file: 500 # Maximum number of virtual machine records per one output file
7
+ apel: # Options for apel output format
8
+ site_name: Undefined # Usually a short provider name, e.g. CESNET
9
+ cloud_type: OpenNebula # CMF type, only OpenNebula is supported
10
+ endpoint: https://occi.localhost.com:11443/ # URL of your OCCI endpoint, e.g. https://fqdn.example.com:11443/
11
+ pbs: # Options for pbs output format
12
+ realm: REALM # Owner's realm, e.g. META
13
+ queue: cloud # Queue name
14
+ scratch_type: local # Data store type
15
+ host_identifier: on_localhost # Identifier for host OpenNebula is running on
16
+ logstash: # Options for logstash output format
17
+ host: localhost # Host OpenNebula is running on
18
+ port: 11443 # Port OpenNebula's RPC is listening on
10
19
  logging:
11
20
  log_type: file # Two options: file, syslog. Defaults to stdout
12
21
  log_file: /var/log/oneacct-export/oneacct-export.log # Used when type file selected
@@ -33,13 +42,22 @@ development:
33
42
 
34
43
 
35
44
  test:
36
- site_name: <placeholder>
37
- cloud_type: <placeholder>
38
- endpoint: <placeholder>
39
45
  output:
40
46
  output_dir: <placeholder>
41
47
  output_type: <placeholder>
42
48
  num_of_vms_per_file: <placeholder>
49
+ apel:
50
+ site_name: <placeholder>
51
+ cloud_type: <placeholder>
52
+ endpoint: <placeholder>
53
+ pbs:
54
+ realm: <placeholder>
55
+ queue: <placeholder>
56
+ scratch_type: <placeholder>
57
+ host_identifier: <placeholder>
58
+ logstash:
59
+ host: <placeholder>
60
+ port: <placeholder>
43
61
  logging:
44
62
  log_type: <placeholder>
45
63
  log_file: <placeholder>
@@ -0,0 +1,99 @@
1
+ require 'data_validators/data_validator'
2
+ require 'data_validators/data_compute'
3
+ require 'data_validators/data_validator_helper'
4
+ require 'errors'
5
+
6
+ module DataValidators
7
+ # Data validator class for apel output type
8
+ class ApelDataValidator < DataValidator
9
+ include InputValidator
10
+ include Errors
11
+ include DataCompute
12
+ include DataValidatorHelper
13
+
14
+ B_IN_GB = 1_073_741_824
15
+ STATES = %w(started started suspended started suspended suspended completed completed suspended)
16
+ DEFAULT_VALUE = 'NULL'
17
+
18
+ attr_reader :log
19
+
20
+ def initialize(log = Logger.new(STDOUT))
21
+ @log = log
22
+ end
23
+
24
+ # All possible output fields and their default values:
25
+ #
26
+ # valid_data['endpoint'] - required
27
+ # valid_data['site_name'] - required
28
+ # valid_data['cloud_type'] - required
29
+ # valid_data['vm_uuid'] - required
30
+ # valid_data['start_time'] - required
31
+ # valid_data['end_time'] - defaults to NULL, has to be bigger than valid_data['start_time'] if number
32
+ # valid_data['machine_name'] - defaults to "one-#{valid_data['vm_uuid']}"
33
+ # valid_data['user_id'] - defaults to NULL
34
+ # valid_data['group_id'] - defaults to NULL
35
+ # valid_data['user_dn'] - defaults to NULL
36
+ # valid_data['group_name'] - defaults to nil
37
+ # valid_data['status'] - defaults to NULL
38
+ # valid_data['duration'] - required
39
+ # valid_data['suspend'] - defaults to NULL
40
+ # valid_data['cpu_count'] - defaults to 1
41
+ # valid_data['network_inbound'] - defaults to 0
42
+ # valid_data['network_outbound'] - defaults to 0
43
+ # valid_data['memory'] - defaults to 0
44
+ # valid_data['image_name'] - defaults to NULL
45
+ # valid_data['disk_size'] -defaults to NULL
46
+ def validate_data(data = nil)
47
+ unless data
48
+ fail Errors::ValidationError, 'Skipping a malformed record. '\
49
+ 'No data available to validate'
50
+ end
51
+
52
+ valid_data = data.clone
53
+
54
+ fail_validation 'Endpoint' unless string?(data['endpoint'])
55
+ fail_validation 'SiteName' unless string?(data['site_name'])
56
+ fail_validation 'CloudType' unless string?(data['cloud_type'])
57
+ fail_validation 'VMUUID' unless string?(data['vm_uuid'])
58
+
59
+ fail_validation 'StartTime' unless non_zero_number?(data['start_time'])
60
+ start_time = data['start_time'].to_i
61
+ valid_data['start_time'] = Time.at(start_time)
62
+ fail_validation 'EndTime' unless number?(data['end_time'])
63
+ end_time = data['end_time'].to_i
64
+ valid_data['end_time'] = end_time == 0 ? 'NULL' : Time.at(end_time)
65
+ fail_validation 'EndTime' if end_time != 0 && valid_data['start_time'] > valid_data['end_time']
66
+
67
+ valid_data['machine_name'] = default(data['machine_name'], :string, "one-#{valid_data['vm_uuid']}")
68
+ valid_data['user_id'] = default(data['user_id'], :string, DEFAULT_VALUE)
69
+ valid_data['group_id'] = default(data['group_id'], :string, DEFAULT_VALUE)
70
+ valid_data['user_dn'] = default(data['user_dn'], :string, DEFAULT_VALUE)
71
+ valid_data['user_name'] = default(data['user_name'], :string, DEFAULT_VALUE)
72
+ valid_data['group_name'] = default(data['group_name'], :string, nil)
73
+
74
+ status = default(data['status_code'], :number, nil)
75
+ if status
76
+ status = status.to_i
77
+ fail_validation 'Status' unless status.to_s == data['status_code'] && status < STATES.size && status >= 0
78
+ end
79
+ valid_data['status'] = status ? STATES[status] : 'NULL'
80
+
81
+ fail_validation 'HISTORY_RECORDS' if (!data['history']) || data['history'].empty?
82
+
83
+ duration = sum_rstime(data['history'], valid_data['status'] == 'completed', valid_data['vm_uuid'])
84
+ valid_data['duration'] = Time.at(duration)
85
+ valid_data['suspend'] = end_time == 0 ? 'NULL' : (end_time - start_time) - duration
86
+ valid_data['cpu_count'] = default(data['cpu_count'], :nzn, '1')
87
+
88
+ valid_data['network_inbound'] = (default(data['network_inbound'], :number, 0).to_i / B_IN_GB).round
89
+ valid_data['network_outbound'] = (default(data['network_outbound'], :number, 0).to_i / B_IN_GB).round
90
+
91
+ valid_data['memory'] = default(data['memory'], :number, '0')
92
+ valid_data['image_name'] = default(data['image_name'], :string, DEFAULT_VALUE)
93
+ disk_size_sum = sum_disk_size(data['disks'], valid_data['vm_uuid'])
94
+ valid_data['disk_size'] = disk_size_sum ? disk_size_sum : 'NULL'
95
+
96
+ valid_data
97
+ end
98
+ end
99
+ end
@@ -0,0 +1,57 @@
1
+ # This module expects module DataValidatorHelper to be included with him
2
+ module DataValidators
3
+ module DataCompute
4
+ # Sums RSTIME (time when virtual machine was actually running)
5
+ #
6
+ # @param [Array] history records
7
+ # @param [Boolean] completed whether vm was completed or not
8
+ # @param [Fixnum] vm_id vm's id
9
+ #
10
+ # @return [Integer] sum of time when virtual machine was actually running
11
+ def sum_rstime(history_records, completed, vm_id)
12
+ return nil unless history_records
13
+
14
+ rstime = 0
15
+
16
+ history_records.each do |record|
17
+ next unless default(record['rstart_time'], :nzn, nil) && default(record['rend_time'], :number, nil)
18
+ rstart_time = record['rstart_time'].to_i
19
+ rend_time = record['rend_time'].to_i
20
+
21
+ if (rend_time > 0 && rstart_time > rend_time) || (rend_time == 0 && completed)
22
+ fail Errors::ValidationError, 'Skipping a malformed record. '\
23
+ "History records' times are invalid for vm with id #{vm_id}."
24
+ end
25
+
26
+ rend_time = rend_time == 0 ? Time.now.to_i : rend_time
27
+
28
+ rstime += rend_time - rstart_time
29
+ end
30
+
31
+ rstime
32
+ end
33
+
34
+ # Sums disk size of all disks within the virtual machine
35
+ #
36
+ # @param [Array] disk records
37
+ #
38
+ # @return [Integer] sum of disk sizes in GB rounded up
39
+ def sum_disk_size(disks, vm_id)
40
+ return nil unless disks
41
+
42
+ disk_size = 0
43
+
44
+ disks.each do |disk|
45
+ size = default(disk['size'], :number, nil)
46
+ unless size
47
+ log.warn("Disk size invalid for vm with id #{vm_id}")
48
+ return nil
49
+ end
50
+
51
+ disk_size += size.to_i
52
+ end
53
+
54
+ disk_size
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,12 @@
1
+ module DataValidators
2
+ # Interface class for data validator implementations
3
+ class DataValidator
4
+ # Validates data for specific output formate and sets default values if necessary.
5
+ #
6
+ # @param data [Hash] data to be validated
7
+ # @return [Hash] data with default values set if necessary
8
+ def validate_data(data = nil)
9
+ fail Errors::NotImplementedError, "#{__method__} is just a stub!"
10
+ end
11
+ end
12
+ end
@@ -0,0 +1,15 @@
1
+ # This module expects modules Errors and InputValidator to be included with him
2
+ module DataValidators
3
+ module DataValidatorHelper
4
+ def fail_validation(field)
5
+ fail Errors::ValidationError, 'Skipping a malformed record. '\
6
+ "Field '#{field}' is invalid."
7
+ end
8
+
9
+ def default(value, condition_method, default_value)
10
+ return string?(value) ? value : default_value if condition_method == :string
11
+ return number?(value) ? value : default_value if condition_method == :number
12
+ return non_zero_number?(value) ? value : default_value if condition_method == :nzn
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,82 @@
1
+ require 'data_validators/data_validator'
2
+ require 'data_validators/data_compute'
3
+ require 'errors'
4
+
5
+ module DataValidators
6
+ class LogstashDataValidator
7
+ include InputValidator
8
+ include Errors
9
+ include DataCompute
10
+ include DataValidatorHelper
11
+
12
+ attr_reader :log
13
+
14
+ def initialize(log = Logger.new(STDOUT))
15
+ @log = log
16
+ end
17
+
18
+ def validate_data(data = nil)
19
+ unless data
20
+ fail Errors::ValidationError, 'Skipping a malformed record. '\
21
+ 'No data available to validate'
22
+ end
23
+
24
+ valid_data = data.clone
25
+
26
+ fail_validation 'start_time' unless non_zero_number?(data['start_time'])
27
+ valid_data['start_time'] = data['start_time'].to_i
28
+ fail_validation 'end_time' unless number?(data['end_time'])
29
+ valid_data['end_time'] = data['end_time'].to_i
30
+ fail_validation 'end_time' if valid_data['end_time'] != 0 && valid_data['start_time'] > valid_data['end_time']
31
+
32
+ fail_validation 'user_id' unless number?(data['user_id'])
33
+ valid_data['user_id'] = data['user_id'].to_i
34
+ fail_validation 'group_id' unless number?(data['group_id'])
35
+ valid_data['group_id'] = data['group_id'].to_i
36
+
37
+ fail_validation 'status_code' unless number?(data['status_code'])
38
+ valid_data['status_code'] = data['status_code'].to_i
39
+
40
+ fail_validation 'cpu_count' unless number?(data['cpu_count'])
41
+ valid_data['cpu_count'] = data['cpu_count'].to_i
42
+ fail_validation 'network_inbound' unless number?(data['network_inbound'])
43
+ valid_data['network_inbound'] = data['network_inbound'].to_i
44
+ fail_validation 'network_outbound' unless number?(data['network_outbound'])
45
+ valid_data['network_outbound'] = data['network_outbound'].to_i
46
+ fail_validation 'memory' unless number?(data['memory'])
47
+ valid_data['memory'] = data['memory'].to_i
48
+
49
+ fail_validation 'history' unless data['history']
50
+ history = []
51
+ data['history'].each do |h|
52
+ history_record = h.clone
53
+ fail_validation 'history record start_time' unless non_zero_number?(h['start_time'])
54
+ history_record['start_time'] = h['start_time'].to_i
55
+ fail_validation 'history record end_time' unless number?(h['end_time'])
56
+ history_record['end_time'] = h['end_time'].to_i
57
+ fail_validation 'history record rstart_time' unless non_zero_number?(h['rstart_time'])
58
+ history_record['rstart_time'] = h['rstart_time'].to_i
59
+ fail_validation 'history record rend_time' unless number?(h['rend_time'])
60
+ history_record['rend_time'] = h['rend_time'].to_i
61
+ fail_validation 'history record seq' unless number?(h['seq'])
62
+ history_record['seq'] = h['seq'].to_i
63
+
64
+ history << history_record
65
+ end
66
+ valid_data['history'] = history
67
+
68
+ fail_validation 'disks' unless data['disks']
69
+ disks = []
70
+ data['disks'].each do |d|
71
+ disk = d.clone
72
+ disk['size'] = d['size']
73
+ disk['size'] = d['size'].to_i if number?(d['size'])
74
+
75
+ disks << disk
76
+ end
77
+ valid_data['disks'] = disks
78
+
79
+ valid_data
80
+ end
81
+ end
82
+ end
@@ -0,0 +1,86 @@
1
+ require 'data_validators/data_validator'
2
+ require 'data_validators/data_compute'
3
+ require 'errors'
4
+
5
+ module DataValidators
6
+ # Data validator class for pbs output type
7
+ class PbsDataValidator < DataValidator
8
+ include InputValidator
9
+ include Errors
10
+ include DataCompute
11
+ include DataValidatorHelper
12
+
13
+ COMPLETED = '6'
14
+
15
+ attr_reader :log
16
+
17
+ def initialize(log = Logger.new(STDOUT))
18
+ @log = log
19
+ end
20
+
21
+ # All possible output fields and their default values:
22
+ #
23
+ # valid_data['host'] - required
24
+ # valid_data['pbs_queue'] - required
25
+ # valid_data['realm'] - required
26
+ # valid_data['scratch_type'] - optional, defaults to nil
27
+ # valid_data['vm_uuid'] - required
28
+ # valid_data['machine_name'] - required, defaults to "one-#{valid_data['vm_uuid']}"
29
+ # valid_data['user_name'] - required
30
+ # valid_data['group_name'] - required
31
+ # valid_data['duration'] - required, defaults to 00:00:00
32
+ # valid_data['cpu_count'] - required
33
+ # valid_data['memory'] - required
34
+ # valid_data['disk_size'] - optional, defaults to nil
35
+ # valid_data['history'] - set of history records
36
+ # history_record['start_time'] - required
37
+ # history_record['end_time'] - required
38
+ # history_record['state'] - required, either all history records 'U' or last history record with 'E' if vm finished
39
+ # history_record['seq'] - required
40
+ # history_record['hostname'] - required
41
+ def validate_data(data = nil)
42
+ unless data
43
+ fail Errors::ValidationError, 'Skipping a malformed record. '\
44
+ 'No data available to validate'
45
+ end
46
+
47
+ valid_data = data.clone
48
+
49
+ fail_validation 'host' unless string?(data['host'])
50
+ fail_validation 'queue' unless string?(data['pbs_queue'])
51
+ fail_validation 'owner' unless string?(data['realm'])
52
+ fail_validation 'VMUUID' unless string?(data['vm_uuid'])
53
+ fail_validation 'owner' unless string?(data['user_name'])
54
+ fail_validation 'group' unless string?(data['group_name'])
55
+ fail_validation 'ppn' unless number?(data['cpu_count'])
56
+ fail_validation 'mem' unless number?(data['memory'])
57
+ fail_validation 'HISTORY_RECORDS' if (!data['history']) || data['history'].empty?
58
+
59
+ history = []
60
+ data['history'].each do |h|
61
+ history_record = h.clone
62
+ fail_validation 'start' unless non_zero_number?(h['start_time'])
63
+ history_record['start_time'] = Time.at(h['start_time'].to_i)
64
+ fail_validation 'end' unless number?(h['end_time'])
65
+ history_record['end_time'] = Time.at(h['end_time'].to_i)
66
+ fail_validation 'seq' unless number?(h['seq'])
67
+ fail_validation 'hostname' unless string?(h['hostname'])
68
+
69
+ history_record['state'] = 'U'
70
+ history << history_record
71
+ end
72
+
73
+ history.last['state'] = 'E' if data['status_code'] == COMPLETED
74
+ valid_data['history'] = history
75
+
76
+ valid_data['machine_name'] = default(data['machine_name'], :string, "one-#{valid_data['vm_uuid']}")
77
+
78
+ duration = sum_rstime(data['history'], data['status_code'] == COMPLETED, valid_data['vm_uuid'])
79
+ valid_data['duration'] = Time.at(duration)
80
+
81
+ valid_data['disk_size'] = sum_disk_size(data['disks'], valid_data['vm_uuid'])
82
+
83
+ valid_data
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,3 @@
1
+ module Errors
2
+ class NotImplementedError < ::NotImplementedError; end
3
+ end
@@ -0,0 +1,3 @@
1
+ module Errors
2
+ class ValidationError < ::StandardError; end
3
+ end
data/lib/errors.rb CHANGED
@@ -3,5 +3,7 @@ require 'errors/resource_not_found_error'
3
3
  require 'errors/resource_retrieval_error'
4
4
  require 'errors/resource_state_error'
5
5
  require 'errors/user_not_authorized_error'
6
+ require 'errors/validation_error'
7
+ require 'errors/not_implemented_error'
6
8
 
7
9
  module Errors; end
@@ -4,16 +4,26 @@ require 'uri'
4
4
  module InputValidator
5
5
  URI_RE = /\A#{URI.regexp}\z/
6
6
  NUMBER_RE = /\A[[:digit:]]+\z/
7
+ STRING_RE = /\A[[:print:]]+\z/
8
+ NON_ZERO_NUMBER_RE = /\A[1-9][[:digit:]]*\z/
7
9
 
8
10
  def is?(object, regexp)
9
11
  object.to_s =~ regexp
10
12
  end
11
13
 
12
- def is_number?(object)
14
+ def number?(object)
13
15
  is?(object, NUMBER_RE)
14
16
  end
15
17
 
16
- def is_uri?(object)
18
+ def uri?(object)
17
19
  is?(object, URI_RE)
18
20
  end
21
+
22
+ def string?(object)
23
+ is?(object, STRING_RE)
24
+ end
25
+
26
+ def non_zero_number?(object)
27
+ is?(object, NON_ZERO_NUMBER_RE)
28
+ end
19
29
  end
@@ -9,7 +9,8 @@ require 'input_validator'
9
9
  # @attr_reader [any logger] logger
10
10
  # @attr_reader [Integer] batch_size number of vm records to request
11
11
  # @attr_reader [OpenNebula::Client] client client for communicaton with OpenNebula
12
- # @attr_reader [TrueClass, FalseClass] compatibility whether or not communicate in compatibility mode (omit some newer API functions)
12
+ # @attr_reader [TrueClass, FalseClass] compatibility whether or not communicate in
13
+ # compatibility mode (omit some newer API functions)
13
14
  class OneDataAccessor
14
15
  include Errors
15
16
  include InputValidator
@@ -23,7 +24,7 @@ class OneDataAccessor
23
24
  @compatibility = compatibility
24
25
 
25
26
  @batch_size = Settings.output['num_of_vms_per_file'] ? Settings.output['num_of_vms_per_file'] : 500
26
- fail ArgumentError, 'Wrong number of vms per file.' unless is_number?(@batch_size)
27
+ fail ArgumentError, 'Wrong number of vms per file.' unless number?(@batch_size)
27
28
 
28
29
  @compatibility_vm_pool = nil
29
30
 
@@ -34,7 +35,7 @@ class OneDataAccessor
34
35
  def initialize_client
35
36
  secret = Settings['xml_rpc'] ? Settings.xml_rpc['secret'] : nil
36
37
  endpoint = Settings['xml_rpc'] ? Settings.xml_rpc['endpoint'] : nil
37
- fail ArgumentError, "#{endpoint} is not a valid URL." if endpoint && !is_uri?(endpoint)
38
+ fail ArgumentError, "#{endpoint} is not a valid URL." if endpoint && !uri?(endpoint)
38
39
 
39
40
  @client = OpenNebula::Client.new(secret, endpoint)
40
41
  end
@@ -48,7 +49,7 @@ class OneDataAccessor
48
49
  def mapping(pool_class, xpath)
49
50
  @log.debug("Generating mapping for class: #{pool_class} and xpath: '#{xpath}'.")
50
51
  pool = pool_class.new(@client)
51
- #call info_all method instead of info on pools that support it
52
+ # call info_all method instead of info on pools that support it
52
53
  if pool.respond_to? 'info_all'
53
54
  rc = pool.info_all
54
55
  check_retval(rc, Errors::ResourceRetrievalError)
@@ -57,7 +58,7 @@ class OneDataAccessor
57
58
  check_retval(rc, Errors::ResourceRetrievalError)
58
59
  end
59
60
 
60
- #generate mapping
61
+ # generate mapping
61
62
  map = {}
62
63
  pool.each do |item|
63
64
  unless item['ID']
@@ -76,7 +77,7 @@ class OneDataAccessor
76
77
  #
77
78
  # @return [OpenNebula::VirtualMachine] virtual machine
78
79
  def vm(vm_id)
79
- fail ArgumentError, "#{vm_id} is not a valid id." unless is_number?(vm_id)
80
+ fail ArgumentError, "#{vm_id} is not a valid id." unless number?(vm_id)
80
81
  @log.debug("Retrieving virtual machine with id: #{vm_id}.")
81
82
  vm = OpenNebula::VirtualMachine.new(OpenNebula::VirtualMachine.build_xml(vm_id), @client)
82
83
  rc = vm.info
@@ -93,7 +94,7 @@ class OneDataAccessor
93
94
  # @return [Array] array with virtual machines' IDs
94
95
  def vms(batch_number, range, groups)
95
96
  vms = []
96
- #load specific batch
97
+ # load specific batch
97
98
  vm_pool = load_vm_pool(batch_number)
98
99
  return nil if vm_pool.count == 0
99
100
 
@@ -104,7 +105,7 @@ class OneDataAccessor
104
105
  next
105
106
  end
106
107
 
107
- #skip unsuitable virtual machines
108
+ # skip unsuitable virtual machines
108
109
  next unless want?(vm, range, groups)
109
110
 
110
111
  vms << vm['ID'].to_i
@@ -145,12 +146,12 @@ class OneDataAccessor
145
146
  #
146
147
  # @param [Integer] batch_number
147
148
  def load_vm_pool(batch_number)
148
- fail ArgumentError, "#{batch_number} is not a valid number" unless is_number?(batch_number)
149
+ fail ArgumentError, "#{batch_number} is not a valid number" unless number?(batch_number)
149
150
  @log.debug("Loading vm pool with batch number: #{batch_number}.")
150
151
  from = batch_number * @batch_size
151
152
  to = (batch_number + 1) * @batch_size - 1
152
153
 
153
- #if in compatibility mode, whole virtual machine pool has to be loaded for the first time
154
+ # if in compatibility mode, whole virtual machine pool has to be loaded for the first time
154
155
  if @compatibility
155
156
  unless @compatibility_vm_pool
156
157
  vm_pool = OpenNebula::VirtualMachinePool.new(@client)