fog-aws 0.13.0 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: badc87e616be93da06befa9fd46e268c422729db
4
- data.tar.gz: 35e964f02dbd63c4b6911fca95bf53e72fffa4b1
3
+ metadata.gz: b81ad3bfcfe0482650342c825be51289c0530ab5
4
+ data.tar.gz: 72f9f0dfc281ec1454f318a43d148bd049b68b21
5
5
  SHA512:
6
- metadata.gz: a6ca15f2dcb849b4f9a1bdf89fe9c22b9b69c5e8f916223f75158cf70adab53bbe70d3bff1b782823d022173b56e5a4b94cf3dfadc1d1815b52625406ba9ea5b
7
- data.tar.gz: 87078c05b1e205f2230cb37b60cc65c57db0b694969bb88bb757d5b5c0127f77f900e79cf035ce392632843d49b11dfaf81fae8a3ba1b93f068d09bc89f8a178
6
+ metadata.gz: 9a06e4f197b2cdc74e01d4e3b975fcfc696b44c09fd82e2ffef77e35b84697a6b9afcdfb271f68ecb7d7a3b0326d7f0898742a2ca0d58d6f120e371e5cc12266
7
+ data.tar.gz: eb2936873d29e923a8a9e9b66d0fa3aba653c185332c0ee733ccecf44d29db09911b22c7be59d4a8b9aa7dfa208bad09ea3d3418a5ef2103fa8c79dad0a5b1eb
data/CHANGELOG.md CHANGED
@@ -2,7 +2,17 @@
2
2
 
3
3
  ## [Unreleased](https://github.com/fog/fog-aws/tree/HEAD)
4
4
 
5
- [Full Changelog](https://github.com/fog/fog-aws/compare/v0.12.0...HEAD)
5
+ [Full Changelog](https://github.com/fog/fog-aws/compare/v0.13.0...HEAD)
6
+
7
+ **Merged pull requests:**
8
+
9
+ - fix host header with another port on s3 [\#327](https://github.com/fog/fog-aws/pull/327) ([rodrigoapereira](https://github.com/rodrigoapereira))
10
+ - Add new t2.xlarge, t2.2xlarge and r4 class instances. [\#326](https://github.com/fog/fog-aws/pull/326) ([rogersd](https://github.com/rogersd))
11
+ - Fix the bug that can't create fifo queue in SQS. [\#323](https://github.com/fog/fog-aws/pull/323) ([ebihara99999](https://github.com/ebihara99999))
12
+ - data pipeline mocks [\#318](https://github.com/fog/fog-aws/pull/318) ([ehowe](https://github.com/ehowe))
13
+
14
+ ## [v0.13.0](https://github.com/fog/fog-aws/tree/v0.13.0) (2016-11-29)
15
+ [Full Changelog](https://github.com/fog/fog-aws/compare/v0.12.0...v0.13.0)
6
16
 
7
17
  **Closed issues:**
8
18
 
@@ -9,6 +9,7 @@ module Fog
9
9
  request_path 'fog/aws/requests/data_pipeline'
10
10
  request :activate_pipeline
11
11
  request :create_pipeline
12
+ request :deactivate_pipeline
12
13
  request :delete_pipeline
13
14
  request :describe_pipelines
14
15
  request :list_pipelines
@@ -22,8 +23,58 @@ module Fog
22
23
  collection :pipelines
23
24
 
24
25
  class Mock
26
+ include Fog::AWS::CredentialFetcher::ConnectionMethods
27
+
28
+ def self.data
29
+ @data ||= Hash.new do |hash, region|
30
+ hash[region] = Hash.new do |region_hash, key|
31
+ region_hash[key] = {
32
+ :pipelines => {},
33
+ :pipeline_definitions => {},
34
+ }
35
+ end
36
+ end
37
+ end
38
+
39
+ def self.reset
40
+ @data = nil
41
+ end
42
+
43
+ def data
44
+ self.class.data[@region][@aws_access_key_id]
45
+ end
46
+
47
+ def reset
48
+ self.class.reset
49
+ end
50
+
51
+ attr_accessor :region
52
+
25
53
  def initialize(options={})
26
- Fog::Mock.not_implemented
54
+ @region = options[:region] || "us-east-1"
55
+ @aws_access_key_id = options[:aws_access_key_id]
56
+ @aws_secret_access_key = options[:aws_secret_access_key]
57
+ end
58
+
59
+ def stringify_keys(object)
60
+ case object
61
+ when Hash
62
+ object.inject({}) { |h,(k,v)| h[k.to_s] = stringify_keys(v); h }
63
+ when Array
64
+ object.map { |v| stringify_keys(v) }
65
+ else
66
+ object
67
+ end
68
+ end
69
+
70
+ def find_pipeline(id)
71
+ pipeline = self.data[:pipelines].values.detect { |p| p["pipelineId"] == id }
72
+
73
+ if pipeline.nil? || pipeline[:deleted]
74
+ raise Fog::AWS::DataPipeline::NotFound.new("Pipeline with id: #{id} does not exist")
75
+ end
76
+
77
+ pipeline
27
78
  end
28
79
  end
29
80
 
@@ -116,7 +167,19 @@ module Fog
116
167
  end
117
168
 
118
169
  def _request(params)
119
- @connection.request(params)
170
+ response = @connection.request(params)
171
+
172
+ unless response.body.empty?
173
+ response.body = Fog::JSON.decode(response.body)
174
+ end
175
+
176
+ response
177
+ rescue Excon::Error::BadRequest => error
178
+ match = Fog::AWS::Errors.match_error(error)
179
+ raise if match.empty?
180
+ if %w(PipelineNotFoundException PipelineDeletedException).include?(match[:code])
181
+ raise Fog::AWS::DataPipeline::NotFound.slurp(error, match[:message])
182
+ end
120
183
  end
121
184
  end
122
185
  end
data/lib/fog/aws/mock.rb CHANGED
@@ -170,6 +170,10 @@ module Fog
170
170
  "sir-#{Fog::Mock.random_letters_and_numbers(8)}"
171
171
  end
172
172
 
173
+ def self.data_pipeline_id
174
+ "df-#{Fog::Mock.random_letters_and_numbers(19).capitalize}"
175
+ end
176
+
173
177
  def self.spot_product_descriptions
174
178
  [
175
179
  'Linux/UNIX',
@@ -54,6 +54,26 @@ module Fog
54
54
  :ebs_optimized_available => false,
55
55
  :instance_store_volumes => 0
56
56
  },
57
+ {
58
+ :id => 't2.xlarge',
59
+ :name => 'Micro Instance',
60
+ :bits => 64,
61
+ :cores => 4,
62
+ :disk => 0,
63
+ :ram => 16384,
64
+ :ebs_optimized_available => false,
65
+ :instance_store_volumes => 0
66
+ },
67
+ {
68
+ :id => 't2.2xlarge',
69
+ :name => 'Micro Instance',
70
+ :bits => 64,
71
+ :cores => 8,
72
+ :disk => 0,
73
+ :ram => 32768,
74
+ :ebs_optimized_available => false,
75
+ :instance_store_volumes => 0
76
+ },
57
77
  {
58
78
  :id => 'm1.small',
59
79
  :name => 'Small Instance',
@@ -95,7 +115,7 @@ module Fog
95
115
  :instance_store_volumes => 4
96
116
  },
97
117
  {
98
- :id => 'c1.medium',
118
+ :id => 'c1.medium',
99
119
  :bits => 32,
100
120
  :cores => 5,
101
121
  :disk => 350,
@@ -255,7 +275,7 @@ module Fog
255
275
  :instance_store_volumes => 1
256
276
  },
257
277
  {
258
- :id => 'm2.2xlarge',
278
+ :id => 'm2.2xlarge',
259
279
  :name => 'High Memory Double Extra Large',
260
280
  :bits => 64,
261
281
  :cores => 13,
@@ -328,7 +348,7 @@ module Fog
328
348
  :id => "hi1.4xlarge",
329
349
  :name => "High I/O Quadruple Extra Large Instance",
330
350
  :bits => 64,
331
- :cores => 35,
351
+ :cores => 35,
332
352
  :disk => 2048,
333
353
  :ram => 61952,
334
354
  :ebs_optimized_available => false,
@@ -411,7 +431,7 @@ module Fog
411
431
  :cores => 2,
412
432
  :ram => 15616,
413
433
  :disk => 32,
414
- :ebs_optimized_available => true,
434
+ :ebs_optimized_available => false,
415
435
  :instance_store_volumes => 1
416
436
  },
417
437
  {
@@ -454,6 +474,66 @@ module Fog
454
474
  :ebs_optimized_available => false,
455
475
  :instance_store_volumes => 2
456
476
  },
477
+ {
478
+ :id => "r4.large",
479
+ :name => "R4 Large",
480
+ :bits => 64,
481
+ :cores => 2,
482
+ :ram => 15616,
483
+ :disk => 0,
484
+ :ebs_optimized_available => true,
485
+ :instance_store_volumes => 0
486
+ },
487
+ {
488
+ :id => "r4.xlarge",
489
+ :name => "R4 Extra Large",
490
+ :bits => 64,
491
+ :cores => 4,
492
+ :ram => 31232,
493
+ :disk => 0,
494
+ :ebs_optimized_available => true,
495
+ :instance_store_volumes => 0
496
+ },
497
+ {
498
+ :id => "r4.2xlarge",
499
+ :name => "R4 Double Extra Large",
500
+ :bits => 64,
501
+ :cores => 8,
502
+ :ram => 62464,
503
+ :disk => 0,
504
+ :ebs_optimized_available => true,
505
+ :instance_store_volumes => 0
506
+ },
507
+ {
508
+ :id => "r4.4xlarge",
509
+ :name => "R4 Quadruple Extra Large",
510
+ :bits => 64,
511
+ :cores => 16,
512
+ :ram => 124928,
513
+ :disk => 0,
514
+ :ebs_optimized_available => true,
515
+ :instance_store_volumes => 0
516
+ },
517
+ {
518
+ :id => "r4.8xlarge",
519
+ :name => "R4 Eight Extra Large",
520
+ :bits => 64,
521
+ :cores => 32,
522
+ :ram => 249856,
523
+ :disk => 0,
524
+ :ebs_optimized_available => true,
525
+ :instance_store_volumes => 0
526
+ },
527
+ {
528
+ :id => "r4.16xlarge",
529
+ :name => "R4 Sixteen Extra Large",
530
+ :bits => 64,
531
+ :cores => 32,
532
+ :ram => 499712,
533
+ :disk => 0,
534
+ :ebs_optimized_available => true,
535
+ :instance_store_volumes => 0
536
+ },
457
537
  {
458
538
  :id => "d2.xlarge",
459
539
  :name => "D2 Extra Large",
@@ -14,16 +14,20 @@ module Fog
14
14
 
15
15
  response = request({
16
16
  :body => Fog::JSON.encode(params),
17
- :headers => { 'X-Amz-Target' => 'DataPipeline.ActivatePipeline' },
17
+ :headers => { 'X-Amz-Target' => 'DataPipeline.ActivatePipeline' }
18
18
  })
19
-
20
- Fog::JSON.decode(response.body)
21
19
  end
22
20
  end
23
21
 
24
22
  class Mock
25
23
  def activate_pipeline(id)
26
- Fog::Mock.not_implemented
24
+ response = Excon::Response.new
25
+
26
+ pipeline = find_pipeline(id)
27
+ pipeline[:active] = true
28
+
29
+ response.body = {}
30
+ response
27
31
  end
28
32
  end
29
33
  end
@@ -24,14 +24,35 @@ module Fog
24
24
  :body => Fog::JSON.encode(params),
25
25
  :headers => { 'X-Amz-Target' => 'DataPipeline.CreatePipeline' },
26
26
  })
27
-
28
- Fog::JSON.decode(response.body)
29
27
  end
30
28
  end
31
29
 
32
30
  class Mock
33
31
  def create_pipeline(unique_id, name, description=nil, tags=nil)
34
- Fog::Mock.not_implemented
32
+ response = Excon::Response.new
33
+
34
+ if existing_pipeline = self.data[:pipelines][unique_id]
35
+ {"pipelineId" => existing_pipeline["pipelineId"]}
36
+ else
37
+ pipeline_id = Fog::AWS::Mock.data_pipeline_id
38
+ mapped_tags = if tags
39
+ tags.map { |k,v| {"key" => k.to_s, "value" => v.to_s}}
40
+ else
41
+ []
42
+ end
43
+
44
+ pipeline = {
45
+ "name" => name,
46
+ "description" => description,
47
+ "fields" => mapped_tags,
48
+ "pipelineId" => pipeline_id,
49
+ }
50
+
51
+ self.data[:pipelines][unique_id] = pipeline
52
+
53
+ response.body = {"pipelineId" => pipeline_id}
54
+ end
55
+ response
35
56
  end
36
57
  end
37
58
  end
@@ -0,0 +1,36 @@
1
+ module Fog
2
+ module AWS
3
+ class DataPipeline
4
+ class Real
5
+ # Activate a pipeline
6
+ # http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_DectivatePipeline.html
7
+ # ==== Parameters
8
+ # * PipelineId <~String> - The ID of the pipeline to activate
9
+ # ' cancelActive <~Boolean> - Indicates whether to cancel any running objects. The default is true, which sets the state of any running objects to CANCELED. If this value is false, the pipeline is deactivated after all running objects finish.
10
+ # ==== Returns
11
+ # * response<~Excon::Response>:
12
+ # * body<~Hash>:
13
+ def deactivate_pipeline(id, cancel_active=true)
14
+ params = { 'pipelineId' => id, 'cancelActive' => cancel_active }
15
+
16
+ response = request({
17
+ :body => Fog::JSON.encode(params),
18
+ :headers => { 'X-Amz-Target' => 'DataPipeline.DectivatePipeline' }
19
+ })
20
+ end
21
+ end
22
+
23
+ class Mock
24
+ def deactivate_pipeline(id, cancel_active=true)
25
+ response = Excon::Response.new
26
+
27
+ pipeline = find_pipeline(id)
28
+ pipeline[:active] = false
29
+
30
+ response.body = {}
31
+ response
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
@@ -22,7 +22,12 @@ module Fog
22
22
 
23
23
  class Mock
24
24
  def delete_pipeline(id)
25
- Fog::Mock.not_implemented
25
+ response = Excon::Response.new
26
+
27
+ pipeline = find_pipeline(id)
28
+ pipeline[:deleted] = true
29
+
30
+ true
26
31
  end
27
32
  end
28
33
  end
@@ -23,14 +23,28 @@ module Fog
23
23
  :body => Fog::JSON.encode(params),
24
24
  :headers => { 'X-Amz-Target' => 'DataPipeline.DescribeObjects' },
25
25
  })
26
-
27
- Fog::JSON.decode(response.body)
28
26
  end
29
27
  end
30
28
 
31
29
  class Mock
32
30
  def describe_objects(id, objects, options={})
33
- Fog::Mock.not_implemented
31
+ response = Excon::Response.new
32
+
33
+ find_pipeline(id)
34
+
35
+ pipeline_objects = self.data[:pipeline_definitions][id]["pipelineObjects"].select { |o| objects.include?(o["id"]) }
36
+
37
+ response.body = {
38
+ "hasMoreResults" => false,
39
+ "marker" => options[:marker],
40
+ "pipelineObjects" => [
41
+ {
42
+ "fields" => pipeline_objects
43
+ }
44
+ ]
45
+ }
46
+
47
+ response
34
48
  end
35
49
  end
36
50
  end
@@ -17,14 +17,14 @@ module Fog
17
17
  :body => Fog::JSON.encode(params),
18
18
  :headers => { 'X-Amz-Target' => 'DataPipeline.DescribePipelines' },
19
19
  })
20
-
21
- Fog::JSON.decode(response.body)
22
20
  end
23
21
  end
24
22
 
25
23
  class Mock
26
24
  def describe_pipelines(ids)
27
- Fog::Mock.not_implemented
25
+ response = Excon::Response.new
26
+ response.body = {"pipelineDescriptionList" => self.data[:pipelines].values.select { |p| !p[:deleted] && ids.include?(p["pipelineId"]) } }
27
+ response
28
28
  end
29
29
  end
30
30
  end
@@ -18,14 +18,17 @@ module Fog
18
18
  :body => Fog::JSON.encode(params),
19
19
  :headers => { 'X-Amz-Target' => 'DataPipeline.GetPipelineDefinition' },
20
20
  })
21
-
22
- Fog::JSON.decode(response.body)
23
21
  end
24
22
  end
25
23
 
26
24
  class Mock
27
- def get_pipeline_definition(id, objects)
28
- Fog::Mock.not_implemented
25
+ def get_pipeline_definition(id)
26
+ response = Excon::Response.new
27
+
28
+ pipeline = find_pipeline(id)
29
+
30
+ response.body = self.data[:pipeline_definitions][id] || {"pipelineObjects" => []}
31
+ response
29
32
  end
30
33
  end
31
34
  end
@@ -17,14 +17,14 @@ module Fog
17
17
  :body => Fog::JSON.encode(params),
18
18
  :headers => { 'X-Amz-Target' => 'DataPipeline.ListPipelines' },
19
19
  })
20
-
21
- Fog::JSON.decode(response.body)
22
20
  end
23
21
  end
24
22
 
25
23
  class Mock
26
24
  def list_pipelines(options={})
27
- Fog::Mock.not_implemented
25
+ response = Excon::Response.new
26
+ response.body = {"pipelineIdList" => self.data[:pipelines].values.map { |p| {"id" => p["pipelineId"], "name" => p["name"]} } }
27
+ response
28
28
  end
29
29
  end
30
30
  end
@@ -1,35 +1,7 @@
1
1
  module Fog
2
2
  module AWS
3
3
  class DataPipeline
4
- class Real
5
- # Put raw pipeline definition JSON
6
- # http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_PutPipelineDefinition.html
7
- # ==== Parameters
8
- # * PipelineId <~String> - The ID of the pipeline
9
- # * PipelineObjects <~String> - Objects in the pipeline
10
- # ==== Returns
11
- # * response<~Excon::Response>:
12
- # * body<~Hash>:
13
- def put_pipeline_definition(id, objects)
14
- params = {
15
- 'pipelineId' => id,
16
- 'pipelineObjects' => transform_objects(objects),
17
- }
18
-
19
- response = request({
20
- :body => Fog::JSON.encode(params),
21
- :headers => { 'X-Amz-Target' => 'DataPipeline.PutPipelineDefinition' },
22
- })
23
-
24
- Fog::JSON.decode(response.body)
25
- end
26
-
27
- # Take a list of pipeline object hashes as specified in the Data Pipeline JSON format
28
- # and transform it into the format expected by the API
29
- def transform_objects(objects)
30
- objects.map { |object| JSONObject.new(object).to_api }
31
- end
32
-
4
+ module Shared
33
5
  class JSONObject
34
6
  def initialize(object)
35
7
  @json_fields = object.clone
@@ -53,7 +25,7 @@ module Fog
53
25
 
54
26
  def field_for_kv(key, value)
55
27
  if value.is_a?(Hash)
56
- { 'key' => key, 'refValue' => value['ref'] }
28
+ { 'key' => key, 'refValue' => value['ref'], 'stringValue' => value['stringValue'] }
57
29
 
58
30
  elsif value.is_a?(Array)
59
31
  value.map { |subvalue| field_for_kv(key, subvalue) }
@@ -64,11 +36,60 @@ module Fog
64
36
  end
65
37
  end
66
38
  end
39
+
40
+ # Take a list of pipeline object hashes as specified in the Data Pipeline JSON format
41
+ # and transform it into the format expected by the API
42
+ def transform_objects(objects)
43
+ objects.map { |object| JSONObject.new(object).to_api }
44
+ end
45
+ end
46
+
47
+ class Real
48
+ include Shared
49
+ # Put raw pipeline definition JSON
50
+ # http://docs.aws.amazon.com/datapipeline/latest/APIReference/API_PutPipelineDefinition.html
51
+ # ==== Parameters
52
+ # * PipelineId <~String> - The ID of the pipeline
53
+ # * PipelineObjects <~String> - Objects in the pipeline
54
+ # ==== Returns
55
+ # * response<~Excon::Response>:
56
+ # * body<~Hash>:
57
+ def put_pipeline_definition(id, pipeline_objects, options={})
58
+ params = {
59
+ 'pipelineId' => id,
60
+ 'pipelineObjects' => transform_objects(pipeline_objects),
61
+ }.merge(options)
62
+
63
+ response = request({
64
+ :body => Fog::JSON.encode(params),
65
+ :headers => { 'X-Amz-Target' => 'DataPipeline.PutPipelineDefinition' },
66
+ })
67
+ end
67
68
  end
68
69
 
69
70
  class Mock
70
- def put_pipeline_definition(id, objects)
71
- Fog::Mock.not_implemented
71
+ include Shared
72
+
73
+ def put_pipeline_definition(id, pipeline_objects, _options={})
74
+ response = Excon::Response.new
75
+ options = _options.dup
76
+
77
+ pipeline = find_pipeline(id)
78
+
79
+ stringified_objects = if pipeline_objects.any?
80
+ transform_objects(stringify_keys(pipeline_objects))
81
+ else
82
+ options.each { |k,v| options[k] = transform_objects(stringify_keys(v)) }
83
+ end
84
+
85
+ if stringified_objects.is_a?(Array)
86
+ stringified_objects = {"pipelineObjects" => stringified_objects}
87
+ end
88
+
89
+ self.data[:pipeline_definitions][id] = stringified_objects
90
+
91
+ response.body = {"errored" => false, "validationErrors" => [], "validationWarnings" => []}
92
+ response
72
93
  end
73
94
  end
74
95
  end
@@ -23,14 +23,17 @@ module Fog
23
23
  :body => Fog::JSON.encode(params),
24
24
  :headers => { 'X-Amz-Target' => 'DataPipeline.QueryObjects' },
25
25
  })
26
-
27
- Fog::JSON.decode(response.body)
28
26
  end
29
27
  end
30
28
 
31
29
  class Mock
32
30
  def query_objects(id, sphere, options={})
33
- Fog::Mock.not_implemented
31
+ response = Excon::Response.new
32
+
33
+ find_pipeline(id)
34
+
35
+ response.body = {"hasMoreResults" => false, "ids" => ["Default"]}
36
+ response
34
37
  end
35
38
  end
36
39
  end
data/lib/fog/aws/sqs.rb CHANGED
@@ -128,7 +128,7 @@ module Fog
128
128
  :host => @host,
129
129
  :path => path || @path,
130
130
  :port => @port,
131
- :version => '2009-02-01'
131
+ :version => '2012-11-05'
132
132
  }
133
133
  )
134
134
 
@@ -190,6 +190,7 @@ module Fog
190
190
 
191
191
  params = request_params(params)
192
192
  params[:headers][:host] = params[:host]
193
+ params[:headers][:host] += ":#{params[:port]}" if params.fetch(:port, nil)
193
194
 
194
195
  signature_query_params = @signer.signature_parameters(params, now, "UNSIGNED-PAYLOAD")
195
196
  params[:query] = (params[:query] || {}).merge(signature_query_params)
@@ -1,5 +1,5 @@
1
1
  module Fog
2
2
  module AWS
3
- VERSION = "0.13.0"
3
+ VERSION = "1.0.0"
4
4
  end
5
5
  end
@@ -35,9 +35,13 @@ class AWS
35
35
  "marker" => Fog::Nullable::String,
36
36
  "pipelineObjects" => [
37
37
  {
38
- 'id' => String,
39
- 'name' => String,
40
- 'fields' => FIELDS,
38
+ "fields" => [
39
+ {
40
+ 'id' => String,
41
+ 'name' => String,
42
+ 'fields' => FIELDS,
43
+ }
44
+ ]
41
45
  }
42
46
  ]
43
47
  }
@@ -65,7 +69,9 @@ class AWS
65
69
  "name" => String,
66
70
  "fields" => FIELDS,
67
71
  }
68
- ]
72
+ ],
73
+ "parameterObjects" => Fog::Nullable::Array,
74
+ "parameterValues" => Fog::Nullable::Array,
69
75
  }
70
76
  end
71
77
  end
@@ -1,6 +1,4 @@
1
1
  Shindo.tests('AWS::DataPipeline | pipeline_tests', ['aws', 'data_pipeline']) do
2
- pending if Fog.mocking?
3
-
4
2
  @pipeline_id = nil
5
3
 
6
4
  tests('success') do
@@ -9,18 +7,18 @@ Shindo.tests('AWS::DataPipeline | pipeline_tests', ['aws', 'data_pipeline']) do
9
7
  name = 'fog-test-pipeline-name'
10
8
  description = 'Fog test pipeline'
11
9
 
12
- result = Fog::AWS[:data_pipeline].create_pipeline(unique_id, name, description, {})
10
+ result = Fog::AWS[:data_pipeline].create_pipeline(unique_id, name, description, {}).body
13
11
  @pipeline_id = result['pipelineId']
14
12
  result
15
13
  end
16
14
 
17
15
  tests("#list_pipelines").formats(AWS::DataPipeline::Formats::LIST_PIPELINES) do
18
- Fog::AWS[:data_pipeline].list_pipelines()
16
+ Fog::AWS[:data_pipeline].list_pipelines.body
19
17
  end
20
18
 
21
19
  tests("#describe_pipelines").formats(AWS::DataPipeline::Formats::DESCRIBE_PIPELINES) do
22
20
  ids = [@pipeline_id]
23
- Fog::AWS[:data_pipeline].describe_pipelines(ids)
21
+ Fog::AWS[:data_pipeline].describe_pipelines(ids).body
24
22
  end
25
23
 
26
24
  tests("#put_pipeline_definition").formats(AWS::DataPipeline::Formats::PUT_PIPELINE_DEFINITION) do
@@ -39,35 +37,39 @@ Shindo.tests('AWS::DataPipeline | pipeline_tests', ['aws', 'data_pipeline']) do
39
37
  },
40
38
  ]
41
39
 
42
- Fog::AWS[:data_pipeline].put_pipeline_definition(@pipeline_id, objects)
40
+ Fog::AWS[:data_pipeline].put_pipeline_definition(@pipeline_id, objects).body
43
41
  end
44
42
 
45
43
  tests("#activate_pipeline") do
46
44
  Fog::AWS[:data_pipeline].activate_pipeline(@pipeline_id)
47
45
  end
48
46
 
47
+ tests("#deactivate_pipeline") do
48
+ Fog::AWS[:data_pipeline].activate_pipeline(@pipeline_id)
49
+ end
50
+
49
51
  tests("#get_pipeline_definition").formats(AWS::DataPipeline::Formats::GET_PIPELINE_DEFINITION) do
50
- Fog::AWS[:data_pipeline].get_pipeline_definition(@pipeline_id)
52
+ Fog::AWS[:data_pipeline].get_pipeline_definition(@pipeline_id).body
51
53
  end
52
54
 
53
55
  tests("#query_objects") do
54
56
  tests("for COMPONENTs").formats(AWS::DataPipeline::Formats::QUERY_OBJECTS) do
55
- Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'COMPONENT')
57
+ Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'COMPONENT').body
56
58
  end
57
59
 
58
60
  tests("for INSTANCEs").formats(AWS::DataPipeline::Formats::QUERY_OBJECTS) do
59
- Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'INSTANCE')
61
+ Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'INSTANCE').body
60
62
  end
61
63
 
62
64
  tests("for ATTEMPTs").formats(AWS::DataPipeline::Formats::QUERY_OBJECTS) do
63
- Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'ATTEMPT')
65
+ Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'ATTEMPT').body
64
66
  end
65
67
  end
66
68
 
67
69
  tests('#describe_objects').formats(AWS::DataPipeline::Formats::DESCRIBE_OBJECTS) do
68
- attempts = Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'ATTEMPT')
70
+ attempts = Fog::AWS[:data_pipeline].query_objects(@pipeline_id, 'ATTEMPT').body
69
71
  object_ids = attempts['ids'][0..5]
70
- Fog::AWS[:data_pipeline].describe_objects(@pipeline_id, object_ids)
72
+ Fog::AWS[:data_pipeline].describe_objects(@pipeline_id, object_ids).body
71
73
  end
72
74
 
73
75
  tests("#delete_pipeline").returns(true) do
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fog-aws
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.13.0
4
+ version: 1.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Josh Lane
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2016-11-29 00:00:00.000000000 Z
12
+ date: 2016-12-12 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: bundler
@@ -960,6 +960,7 @@ files:
960
960
  - lib/fog/aws/requests/compute/unmonitor_instances.rb
961
961
  - lib/fog/aws/requests/data_pipeline/activate_pipeline.rb
962
962
  - lib/fog/aws/requests/data_pipeline/create_pipeline.rb
963
+ - lib/fog/aws/requests/data_pipeline/deactivate_pipeline.rb
963
964
  - lib/fog/aws/requests/data_pipeline/delete_pipeline.rb
964
965
  - lib/fog/aws/requests/data_pipeline/describe_objects.rb
965
966
  - lib/fog/aws/requests/data_pipeline/describe_pipelines.rb
@@ -1678,9 +1679,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
1678
1679
  version: '0'
1679
1680
  requirements: []
1680
1681
  rubyforge_project:
1681
- rubygems_version: 2.5.1
1682
+ rubygems_version: 2.5.2
1682
1683
  signing_key:
1683
1684
  specification_version: 4
1684
1685
  summary: Module for the 'fog' gem to support Amazon Web Services.
1685
1686
  test_files: []
1686
- has_rdoc: