pipely 0.13.0 → 0.14.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: b731960bf5c0239cac639066f033a49a4e5e6c0f
4
- data.tar.gz: 1612176e9731abbe4d00ad0e9d900a92fb0a3f37
3
+ metadata.gz: c7b441e79e14a90c03f6db511d4e8504ac5f8360
4
+ data.tar.gz: 692e36969e5b3100caa338d474127498ee870df6
5
5
  SHA512:
6
- metadata.gz: 2fd1252c31ad76533f47cd2e5262e8e99710354af17308e50490773ca4670720dfeecab393938936db58a9f0a1b79656e88a8b143a9cd48b3223accd23958c1f
7
- data.tar.gz: a4426e893afb1ea016504ef4853a452c99332b13b347810d66b2a9b0816fffe18eb60da85a6d83b7a1e771066bf3245dd8ed42d84e0b7d6e0d7b5a5b2d14b554
6
+ metadata.gz: 153c54d53fcbdc2f123616d5284e303df8f4b5b557520eb65d774d7873bbd206a90fbfb9e5e68235f37747eef030979b8bdd3dcaf745774702bfae6ea997b75c
7
+ data.tar.gz: 3f3b17eb81bc8e5b712cc5ddc5e23785e52c3cb7ba9f60882aec6069851e4ee4c97ff80c5b53c6b739c790c71228f788ddf7c025afeb214b417fd184f6f5226f
data/lib/pipely.rb CHANGED
@@ -11,6 +11,8 @@ require 'pipely/s3_writer'
11
11
  #
12
12
  module Pipely
13
13
 
14
+ ENV['AWS_REGION'] ||= 'us-east-1'
15
+
14
16
  def self.draw(definition_json, filename, component_attributes=nil)
15
17
  definition = Definition.parse(definition_json)
16
18
 
@@ -14,31 +14,30 @@ module Pipely
14
14
  $stdout.puts pipeline_ids.to_json
15
15
  else
16
16
  $stdout.puts pipeline_ids.map { |pipeline|
17
- [ pipeline['name'], pipeline['id'] ].join("\t")
17
+ [ pipeline.name, pipeline.id ].join("\t")
18
18
  }
19
19
  end
20
20
  end
21
21
 
22
- private
22
+ private
23
23
 
24
24
  def pipeline_ids
25
25
  ids = []
26
26
 
27
+ data_pipeline = Aws::DataPipeline::Client.new
28
+
29
+
30
+ marker = nil
27
31
  begin
28
- result = data_pipeline.list_pipelines
29
- ids += result['pipelineIdList']
30
- end while (result['hasMoreResults'] && result['marker'])
32
+ result = data_pipeline.list_pipelines(
33
+ marker: marker,
34
+ )
35
+ ids += result.pipeline_id_list
36
+ marker = result.marker
37
+ end while (result.has_more_results && marker)
31
38
 
32
39
  ids
33
40
  end
34
-
35
- def data_pipeline
36
- Fog::AWS::DataPipeline.new
37
- rescue ArgumentError
38
- $stderr.puts "#{self.class.name}: Falling back to IAM profile"
39
- Fog::AWS::DataPipeline.new(use_iam_profile: true)
40
- end
41
-
42
41
  end
43
42
 
44
43
  end
@@ -1,16 +1,3 @@
1
- # Note: We are in the process of migrating from Fog to aws-sdk for communicating
2
- # with the Data Pipeline API. aws-sdk offers several benefits, such as:
3
- #
4
- # * Built-in automated exponential back-off, to avoid hitting rate limits.
5
- # * Working pagination of ListPipelines responses.
6
- # * Authentication using IAM resource roles.
7
- # * Faster installation.
8
- #
9
- # On the downside, aws-sdk does not yet support tagging of pipelines. We can
10
- # not yet port pipely entirely away from Fog until this is resolved, so we will
11
- # temporarily require both libraries.
12
-
13
- require 'fog'
14
1
  require 'aws-sdk'
15
2
  require 'logger'
16
3
  require 'tempfile'
@@ -24,23 +11,26 @@ module Pipely
24
11
  #
25
12
  class Client
26
13
 
14
+ attr_reader :base_tags
15
+
27
16
  # Generic error representing failure to deploy a rendered definition.
28
17
  class PipelineDeployerError < RuntimeError; end
29
18
 
30
19
  def initialize(log=nil)
31
20
  @log = log || Logger.new(STDOUT)
32
- @data_pipelines = Fog::AWS::DataPipeline.new
33
- @aws = AWS::DataPipeline.new.client
21
+ @aws = Aws::DataPipeline::Client.new
22
+ @base_tags = {
23
+ "environment" => ENV['env'],
24
+ "creator" => ENV['USER']
25
+ }
34
26
  end
35
27
 
36
- def deploy_pipeline(pipeline_basename, definition=nil, &block)
37
- pipeline_name = [
38
- ('P' if ENV['env'] == 'production'),
39
- ENV['USER'],
40
- pipeline_basename
41
- ].compact.join(':')
28
+ def deploy_pipeline(pipeline_basename, definition = nil, &block)
29
+ pipeline_name = pipeline_name(pipeline_basename)
42
30
 
43
- tags = { "basename" => pipeline_basename }
31
+ tags = base_tags.merge(
32
+ "basename" => pipeline_basename,
33
+ "deploy_id" => SecureRandom.uuid )
44
34
 
45
35
  # Get a list of all existing pipelines
46
36
  pipeline_ids = existing_pipelines(pipeline_name)
@@ -87,23 +77,19 @@ module Pipely
87
77
  end
88
78
 
89
79
  def create_pipeline(pipeline_name, definition, tags={})
90
- # Use Fog gem, instead of aws-sdk gem, to create pipeline with tags.
91
- #
92
- # TODO: Consolidate on aws-sdk when tagging support is added.
93
- #
94
- created_pipeline = @data_pipelines.pipelines.create(
95
- unique_id: SecureRandom.uuid,
80
+ created_pipeline = @aws.create_pipeline(
96
81
  name: pipeline_name,
97
- tags: default_tags.merge(tags)
82
+ unique_id: tags['deploy_id'] || SecureRandom.uuid,
83
+ description: "Pipely Deployed Data Pipeline",
84
+ tags: base_tags.merge(tags).map do |k,v|
85
+ { key: k, value: v } unless v.nil?
86
+ end.compact,
98
87
  )
99
88
 
100
- definition ||= yield(created_pipeline.id) if block_given?
89
+ definition ||= yield(created_pipeline.pipeline_id) if block_given?
101
90
 
102
- # Use aws-sdk gem, instead of Fog, to put definition and activate
103
- # pipeline, for improved reporting of validation errors.
104
- #
105
91
  response = @aws.put_pipeline_definition(
106
- pipeline_id: created_pipeline.id,
92
+ pipeline_id: created_pipeline.pipeline_id,
107
93
  pipeline_objects: JSONDefinition.parse(definition)
108
94
  )
109
95
 
@@ -116,25 +102,24 @@ module Pipely
116
102
  @log.error(response[:validation_errors].inspect)
117
103
  false
118
104
  else
119
- @aws.activate_pipeline(pipeline_id: pipeline.id)
120
- pipeline.id
105
+ @aws.activate_pipeline(pipeline_id: pipeline.pipeline_id)
106
+ pipeline.pipeline_id
121
107
  end
122
108
  end
123
109
 
124
110
  def delete_pipeline(pipeline_id)
125
- @data_pipelines.pipelines.get(pipeline_id).destroy
111
+ @aws.delete_pipeline(pipeline_id: pipeline_id)
126
112
  end
127
113
 
128
- private
114
+ private
129
115
 
130
- def default_tags
131
- {
132
- "environment" => ENV['env'],
133
- "creator" => ENV['USER'],
134
- "deploy_id" => SecureRandom.uuid
135
- }
116
+ def pipeline_name(basename)
117
+ [
118
+ ('P' if ENV['env'] == 'production'),
119
+ ENV['USER'],
120
+ basename
121
+ ].compact.join(':')
136
122
  end
137
-
138
123
  end
139
124
  end
140
125
  end
@@ -9,21 +9,22 @@ module Pipely
9
9
  #
10
10
  class JSONDefinition
11
11
  def self.parse(definition)
12
- definition_objects = JSON.parse(definition)['objects']
12
+ definition_objects =
13
+ JSON.parse(definition, symbolize_names: true)[:objects]
13
14
  definition_objects.map { |object| new(object).to_api }
14
15
  end
15
16
 
16
17
  def initialize(object)
17
18
  @json_fields = object.clone
18
- @id = @json_fields.delete('id')
19
- @name = @json_fields.delete('name') || @id
19
+ @id = @json_fields.delete(:id)
20
+ @name = @json_fields.delete(:name) || @id
20
21
  end
21
22
 
22
23
  def to_api
23
24
  {
24
- 'id' => @id,
25
- 'name' => @name,
26
- 'fields' => fields
25
+ id: @id,
26
+ name: @name,
27
+ fields: fields
27
28
  }
28
29
  end
29
30
 
@@ -35,13 +36,13 @@ module Pipely
35
36
 
36
37
  def field_for_kv(key, value)
37
38
  if value.is_a?(Hash)
38
- { 'key' => key, 'refValue' => value['ref'] }
39
+ { key: key, ref_value: value[:ref] }
39
40
 
40
41
  elsif value.is_a?(Array)
41
42
  value.map { |subvalue| field_for_kv(key, subvalue) }
42
43
 
43
44
  else
44
- { 'key' => key, 'stringValue' => value }
45
+ { key: key, string_value: value }
45
46
 
46
47
  end
47
48
  end
@@ -39,7 +39,7 @@ module Pipely
39
39
  #
40
40
  def upload_file(file)
41
41
  target_path = s3_file_path(file)
42
- s3_object = @s3_bucket.objects[target_path]
42
+ s3_object = @s3_bucket.object(target_path)
43
43
 
44
44
  content = File.read(file)
45
45
  digest = Digest::MD5.hexdigest(content)
@@ -48,7 +48,7 @@ module Pipely
48
48
  puts "skipping #{file} to #{target_path} (ETAG matches)"
49
49
  else
50
50
  puts "uploading #{file} to #{target_path}"
51
- s3_object.write(content)
51
+ s3_object.put(body: content)
52
52
  end
53
53
  end
54
54
 
@@ -1,4 +1,3 @@
1
- require 'pipely/fog_client'
2
1
  require 'pipely/runs_report'
3
2
 
4
3
  module Pipely
@@ -9,9 +8,8 @@ module Pipely
9
8
  def initialize(pipeline_id)
10
9
  @pipeline_id = pipeline_id
11
10
 
12
- client = FogClient.new(pipeline_id)
13
- @definition_json = client.definition
14
- @task_states_by_scheduled_start = client.task_states_by_scheduled_start
11
+ @definition_json = definition(pipeline_id)
12
+ @task_states_by_scheduled_start = task_states_by_scheduled_start
15
13
 
16
14
  unless @definition_json
17
15
  raise "No definition found for #{client.pipeline_id}"
@@ -38,7 +36,11 @@ module Pipely
38
36
  end
39
37
  end
40
38
 
41
- private
39
+ private
40
+
41
+ def data_pipeline
42
+ @data_pipeline ||= Aws::DataPipeline::Client.new
43
+ end
42
44
 
43
45
  def render_graph(start, task_states, output_path)
44
46
  utc_time = Time.now.to_i
@@ -50,5 +52,81 @@ module Pipely
50
52
  Pipely.draw(@definition_json, filename, task_states)
51
53
  end
52
54
 
55
+ def definition(pipeline_id)
56
+ objects = data_pipeline.get_pipeline_definition(pipeline_id: pipeline_id)
57
+ { objects: flatten_pipeline_objects(objects.pipeline_objects) }.to_json
58
+ end
59
+
60
+ def task_states_by_scheduled_start
61
+ task_states_by_scheduled_start = {}
62
+
63
+ all_instances.each do |pipeline_object|
64
+ component_id = status = scheduled_start = nil
65
+
66
+ pipeline_object.fields.each do |field|
67
+ case field.key
68
+ when '@componentParent'
69
+ component_id = field.ref_value
70
+ when '@status'
71
+ status = field.string_value
72
+ when '@scheduledStartTime'
73
+ scheduled_start = field.string_value
74
+ end
75
+ end
76
+
77
+ task_states_by_scheduled_start[scheduled_start] ||= {}
78
+ task_states_by_scheduled_start[scheduled_start][component_id] = {
79
+ execution_state: status
80
+ }
81
+ end
82
+
83
+ task_states_by_scheduled_start
84
+ end
85
+
86
+ def all_instances
87
+ result = {}
88
+ pipeline_objects = []
89
+
90
+ begin
91
+ result = data_pipeline.query_objects(
92
+ pipeline_id: pipeline_id,
93
+ sphere: "INSTANCE",
94
+ marker: result.marker,
95
+ )
96
+
97
+ instance_details = data_pipeline.describe_objects(
98
+ pipeline_id: pipeline_id,
99
+ object_ids: result.ids
100
+ )
101
+
102
+ data_pipeline.describe_objects(pipeline_id, result['ids'])
103
+ pipeline_objects += instance_details.pipeline_objects
104
+
105
+ end while (result.has_more_results && result.marker)
106
+
107
+ pipeline_objects
108
+ end
109
+
110
+
111
+ def flatten_pipeline_objects(objects)
112
+ objects.each_with_object([]) do |object, result|
113
+ h = {
114
+ id: object.id,
115
+ name: object.name,
116
+ }
117
+
118
+ object.fields.each do |field|
119
+ k = field.key
120
+ if field.ref_value
121
+ h[k] ||= []
122
+ h[k] << { ref: field.ref_value }
123
+ else
124
+ h[k] = field.string_value
125
+ end
126
+ end
127
+
128
+ result << h
129
+ end
130
+ end
53
131
  end
54
132
  end
@@ -1,4 +1,4 @@
1
- require 'fog/storage'
1
+ require 'aws-sdk'
2
2
 
3
3
  module Pipely
4
4
 
@@ -11,31 +11,11 @@ module Pipely
11
11
  @host, @path = uri.host, uri.path.gsub(/^\//,'')
12
12
  end
13
13
 
14
- def directory
15
- directory = storage.directories.detect{ |d| d.key == @host }
16
-
17
- directory or raise("Couldn't find S3 bucket '#{@host}'")
18
- end
19
-
20
14
  def write(content)
21
- remote_file = directory.files.create({
22
- :key => @path,
23
- :body => content,
24
- :public => true,
25
- })
26
-
27
- remote_file.public_url
15
+ s3_bucket = Aws::S3::Bucket.new(@host)
16
+ s3_object = s3_bucket.object(@path)
17
+ s3_object.put(body: content, acl: 'public')
18
+ s3_object.public_url
28
19
  end
29
-
30
- private
31
-
32
- def storage
33
- Fog::Storage.new({ provider: 'AWS' })
34
- rescue ArgumentError
35
- $stderr.puts "#{self.class.name}: Falling back to IAM profile"
36
- Fog::Storage.new({ provider: 'AWS', use_iam_profile: true })
37
- end
38
-
39
20
  end
40
-
41
21
  end
@@ -1,6 +1,6 @@
1
1
  require 'rake'
2
2
  require 'rake/tasklib'
3
- require 'aws'
3
+ require 'aws-sdk'
4
4
  require 'erubis'
5
5
  require 'pipely/deploy/bootstrap'
6
6
 
@@ -59,7 +59,7 @@ module Pipely
59
59
 
60
60
  private
61
61
  def s3_bucket
62
- @s3_bucket ||= AWS::S3.new.buckets[@bucket_name]
62
+ @s3_bucket ||= Aws::S3::Bucket.new(@bucket_name)
63
63
  end
64
64
 
65
65
  def upload_gems
@@ -65,8 +65,8 @@ module Pipely
65
65
  private
66
66
 
67
67
  def with_bucket
68
- s3 = AWS::S3.new
69
- bucket = s3.buckets[s3_bucket_name]
68
+ s3 = Aws::S3::Resource.new
69
+ bucket = s3.bucket(s3_bucket_name)
70
70
 
71
71
  if bucket.exists?
72
72
  yield(bucket)
@@ -1,3 +1,3 @@
1
1
  module Pipely
2
- VERSION = '0.13.0' unless defined?(::Pipely::VERSION)
2
+ VERSION = '0.14.0' unless defined?(::Pipely::VERSION)
3
3
  end
@@ -31,18 +31,18 @@ describe Pipely::Deploy::Client do
31
31
  describe '#create_pipeline' do
32
32
  let(:pipeline_name) { 'NewPipeline' }
33
33
  let(:pipeline_id) { 123 }
34
- let(:created_pipeline) { double(:created_pipeline, id: pipeline_id) }
34
+ let(:created_pipeline) do
35
+ double(:created_pipeline, pipeline_id: pipeline_id)
36
+ end
35
37
  let(:definition) { "Pipeline ID: 123" }
36
38
 
37
- let(:data_pipelines) { subject.instance_variable_get(:@data_pipelines) }
38
39
  let(:aws) { subject.instance_variable_get(:@aws) }
39
40
 
40
41
  it 'gets the definition from the block' do
41
- data_pipelines.stub_chain(:pipelines, :create)
42
- .and_return(created_pipeline)
43
42
 
44
43
  Pipely::Deploy::JSONDefinition.should_receive(:parse).with(definition)
45
44
 
45
+ aws.should_receive(:create_pipeline).and_return(created_pipeline)
46
46
  aws.should_receive(:put_pipeline_definition).and_return({})
47
47
  aws.should_receive(:activate_pipeline)
48
48
  subject.create_pipeline(pipeline_name, nil) do |pipeline_id|
@@ -8,10 +8,9 @@ describe Pipely::Deploy::S3Uploader do
8
8
 
9
9
  subject { described_class.new(s3_bucket, 'test_path/gems') }
10
10
 
11
- let(:s3_bucket) do
12
- s3 = AWS::S3.new
13
- s3.buckets['a-test-bucket']
14
- end
11
+ let(:s3_object) { double(:s3_object) }
12
+ let(:bucket_name) { 'a-test-bucket' }
13
+ let(:s3_bucket) { double(:bucket, object: s3_object, name: bucket_name) }
15
14
 
16
15
  it "should have bucket name" do
17
16
  expect(subject.bucket_name).to eq('a-test-bucket')
@@ -22,12 +21,6 @@ describe Pipely::Deploy::S3Uploader do
22
21
  end
23
22
 
24
23
  describe "#upload(files)" do
25
- let(:objects) { double(:objects) }
26
-
27
- let(:s3_object) do
28
- double('s3_object', write: nil, exists?: true, etag: 'mismatch')
29
- end
30
-
31
24
  let(:files) do
32
25
  [
33
26
  Tempfile.new('packaged-gem1.gem').path,
@@ -35,17 +28,14 @@ describe Pipely::Deploy::S3Uploader do
35
28
  ]
36
29
  end
37
30
 
38
- before do
39
- allow(objects).to receive(:[]) { s3_object }
40
- allow(s3_bucket).to receive(:objects) { objects }
41
- end
42
-
43
31
  it 'uploads each file' do
32
+ allow(s3_object).to receive(:exists?).and_return(true)
33
+ allow(s3_object).to receive(:etag).and_return('mismatch')
44
34
  files.each do |file|
45
- expect(objects).to receive(:[]).with(subject.s3_file_path(file))
35
+ expect(s3_bucket).to receive(:object).with(subject.s3_file_path(file))
46
36
  end
47
37
 
48
- expect(s3_bucket).to receive(:objects).exactly(files.size).times
38
+ expect(s3_object).to receive(:put).exactly(files.size).times
49
39
 
50
40
  subject.upload(files)
51
41
  end
data/spec/spec_helper.rb CHANGED
@@ -1,21 +1,15 @@
1
1
  require 'timecop'
2
2
  require 'aws-sdk'
3
- require 'fog'
4
3
  require 'rspec'
5
4
  require 'vcr'
6
5
  require 'pry'
7
6
 
8
7
  $LOAD_PATH << File.join(File.dirname(__FILE__), '..', 'lib')
9
8
 
10
- AWS.config(
11
- access_key_id: "xxx",
12
- secret_access_key: "xxx"
13
- )
14
-
15
- Fog.credentials = {
16
- aws_access_key_id: "xxx",
17
- aws_secret_access_key: "xxx"
18
- }
9
+ Aws.config.update({
10
+ region: 'us-east-1',
11
+ credentials: Aws::Credentials.new('xxx', 'xxx'),
12
+ })
19
13
 
20
14
  VCR.configure do |c|
21
15
  c.allow_http_connections_when_no_cassette = true
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pipely
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.13.0
4
+ version: 0.14.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Matt Gillooly
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-05-27 00:00:00.000000000 Z
11
+ date: 2015-06-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-graphviz
@@ -52,34 +52,20 @@ dependencies:
52
52
  - - "~>"
53
53
  - !ruby/object:Gem::Version
54
54
  version: 1.0.0
55
- - !ruby/object:Gem::Dependency
56
- name: fog
57
- requirement: !ruby/object:Gem::Requirement
58
- requirements:
59
- - - "~>"
60
- - !ruby/object:Gem::Version
61
- version: '1.23'
62
- type: :runtime
63
- prerelease: false
64
- version_requirements: !ruby/object:Gem::Requirement
65
- requirements:
66
- - - "~>"
67
- - !ruby/object:Gem::Version
68
- version: '1.23'
69
55
  - !ruby/object:Gem::Dependency
70
56
  name: aws-sdk
71
57
  requirement: !ruby/object:Gem::Requirement
72
58
  requirements:
73
59
  - - "~>"
74
60
  - !ruby/object:Gem::Version
75
- version: '1.48'
61
+ version: '2.0'
76
62
  type: :runtime
77
63
  prerelease: false
78
64
  version_requirements: !ruby/object:Gem::Requirement
79
65
  requirements:
80
66
  - - "~>"
81
67
  - !ruby/object:Gem::Version
82
- version: '1.48'
68
+ version: '2.0'
83
69
  - !ruby/object:Gem::Dependency
84
70
  name: unf
85
71
  requirement: !ruby/object:Gem::Requirement
@@ -136,6 +122,20 @@ dependencies:
136
122
  - - "~>"
137
123
  - !ruby/object:Gem::Version
138
124
  version: 0.1.0
125
+ - !ruby/object:Gem::Dependency
126
+ name: safe_yaml
127
+ requirement: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - "~>"
130
+ - !ruby/object:Gem::Version
131
+ version: 1.0.4
132
+ type: :development
133
+ prerelease: false
134
+ version_requirements: !ruby/object:Gem::Requirement
135
+ requirements:
136
+ - - "~>"
137
+ - !ruby/object:Gem::Version
138
+ version: 1.0.4
139
139
  - !ruby/object:Gem::Dependency
140
140
  name: rspec
141
141
  requirement: !ruby/object:Gem::Requirement
@@ -259,7 +259,6 @@ files:
259
259
  - lib/pipely/deploy/client.rb
260
260
  - lib/pipely/deploy/json_definition.rb
261
261
  - lib/pipely/deploy/s3_uploader.rb
262
- - lib/pipely/fog_client.rb
263
262
  - lib/pipely/graph_builder.rb
264
263
  - lib/pipely/live_pipeline.rb
265
264
  - lib/pipely/options.rb
@@ -332,7 +331,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
332
331
  version: '0'
333
332
  requirements: []
334
333
  rubyforge_project:
335
- rubygems_version: 2.2.2
334
+ rubygems_version: 2.4.5
336
335
  signing_key:
337
336
  specification_version: 4
338
337
  summary: Generate dependency graphs from pipeline definitions.
@@ -1,96 +0,0 @@
1
- require 'fog'
2
-
3
- module Pipely
4
-
5
- # Uses Fog to communicate with the AWS Data Pipeline service
6
- class FogClient < Struct.new(:pipeline_id)
7
-
8
- def definition
9
- objects = data_pipeline.get_pipeline_definition(pipeline_id)
10
-
11
- flattened_objects = []
12
-
13
- objects['pipelineObjects'].each do |object|
14
- h = {
15
- id: object['id'],
16
- name: object['name'],
17
- }
18
-
19
- object['fields'].each do |field|
20
- k = field['key']
21
- if field['refValue']
22
- h[k] ||= []
23
- h[k] << { ref: field['refValue'] }
24
- else
25
- h[k] = field['stringValue']
26
- end
27
- end
28
-
29
- flattened_objects << h
30
- end
31
-
32
- { objects: flattened_objects }.to_json
33
- end
34
-
35
- def task_states_by_scheduled_start
36
- task_states_by_scheduled_start = {}
37
-
38
- all_instances.each do |pipeline_object|
39
- component_id = status = scheduled_start = nil
40
-
41
- pipeline_object['fields'].each do |field|
42
- case field['key']
43
- when '@componentParent'
44
- component_id = field['refValue']
45
- when '@status'
46
- status = field['stringValue']
47
- when '@scheduledStartTime'
48
- scheduled_start = field['stringValue']
49
- end
50
- end
51
-
52
- task_states_by_scheduled_start[scheduled_start] ||= {}
53
- task_states_by_scheduled_start[scheduled_start][component_id] = {
54
- execution_state: status
55
- }
56
- end
57
-
58
- task_states_by_scheduled_start
59
- end
60
-
61
- private
62
-
63
- def all_instances
64
- c = data_pipeline
65
-
66
- result = {}
67
- pipeline_objects = []
68
-
69
- begin
70
- if result['marker']
71
- marker = JSON.parse(result['marker'])['primary']
72
- end
73
-
74
- result = c.query_objects(
75
- pipeline_id,
76
- 'INSTANCE',
77
- marker: result['marker']
78
- )
79
-
80
- instance_details = c.describe_objects(pipeline_id, result['ids'])
81
- pipeline_objects += instance_details['pipelineObjects']
82
-
83
- end while (result['hasMoreResults'] && result['marker'])
84
-
85
- pipeline_objects
86
- end
87
-
88
- def data_pipeline
89
- Fog::AWS::DataPipeline.new
90
- rescue ArgumentError
91
- $stderr.puts "#{self.class.name}: Falling back to IAM profile"
92
- Fog::AWS::DataPipeline.new(use_iam_profile: true)
93
- end
94
-
95
- end
96
- end