nf-conductor 0.0.1 → 0.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: ba7313ec020d08caef6fc2f2197023fb5654e7af
4
- data.tar.gz: 57b6d1d827a77b4194d409a283ee65b6b8b209a0
3
+ metadata.gz: f08424a1df3af50e9fafb6b266a91adb9e030c70
4
+ data.tar.gz: f9f926426fec2d8d96f33d22c45b8269de0ec611
5
5
  SHA512:
6
- metadata.gz: 5147f3b0c84f105a76e7725ba6859b9a8475d138fa8259ffe4207e17ee70b147ed2fa76ef384e42e6addb5f48da621f1e1699d5a35cefda8d8e5ccc045154fea
7
- data.tar.gz: f0d3596b6c67e980b169e1e3f87575810acbf881dc38e757de33d64fea20dd53eed788da8d1cabb3b6435797ece3165b381b688dc9f38012f69c7ce191afac7e
6
+ metadata.gz: bbe0708f9ff458d5763d3132c6b063b109f29a7380047c080d1bd119b531538aea390358e2a3d2e5c513ec5915f019baec467948fe35812184bdd14a37385c15
7
+ data.tar.gz: f2cc8bac16811da8db285b87b58c5bb6968d8ad0829985719bec4ff6ea944f0b5b5df2a55a88d3ae168586b4fc8eb2eb013164c8483d60582e7459d1423a7566
@@ -1,5 +1,51 @@
1
- class Conductor
2
- def self.conduct
3
- puts 'Hey look at that Conductor is done'
1
+ require 'faraday'
2
+
3
+ require 'nf-conductor/http/connection'
4
+ require 'nf-conductor/http/model'
5
+ require 'nf-conductor/http/metadata'
6
+ require 'nf-conductor/http/tasks'
7
+ require 'nf-conductor/http/workflow'
8
+
9
+ require 'nf-conductor/worker/worker'
10
+
11
+ require 'nf-conductor/coordinator/coordinator'
12
+
13
+ module Conductor
14
+ SERVICE_URI_DEVELOPMENT = 'http://cpeworkflowdevint.dyntest.netflix.net:7001/'
15
+ SERVICE_URI_TESTING = 'http://cpeworkflowtestintg.dyntest.netflix.net:7001/'
16
+ SERVICE_URI_PRODUCTION = 'http://cpeworkflow.dynprod.netflix.net:7001/'
17
+
18
+ class << self
19
+ attr_accessor :config
20
+
21
+ def configure
22
+ self.config ||= Configuration.new
23
+ yield(config) if block_given?
24
+ end
25
+
26
+ def initialize(service_env)
27
+ configure if self.config.nil?
28
+ self.config.service_env ||= service_env
29
+
30
+ # Ensure service_uri is set in configuration
31
+ if self.config.service_env.nil? && self.config.service_uri.nil?
32
+ raise "Service information is required"
33
+ elsif self.config.service_uri
34
+ # No action required
35
+ elsif self.config.service_env
36
+ self.config.service_uri = case self.config.service_env
37
+ when 'development'
38
+ SERVICE_URI_DEVELOPMENT
39
+ when 'testing'
40
+ SERVICE_URI_TESTING
41
+ when 'production'
42
+ SERVICE_URI_PRODUCTION
43
+ end
44
+ end
45
+ end
46
+ end
47
+
48
+ class Configuration
49
+ attr_accessor :service_env, :service_uri
4
50
  end
5
51
  end
@@ -0,0 +1,87 @@
1
+ require 'pry'
2
+ require 'concurrent'
3
+
4
+ module Conductor
5
+ class Coordinator
6
+ attr_accessor :workers, :polling_timers, :max_thread_count
7
+
8
+ # Create a new Coordinator for a certain set of Workers.
9
+ # A Worker is an implementatino of the Worker Interface for a specific task.
10
+ # Conductor::Coordinator.new([Conductor::Worker.new('matt-1'), Conductor::Worker.new('matt-2')])
11
+ def initialize(workers, max_thread_count: 5)
12
+ self.workers = workers
13
+ self.polling_timers = []
14
+ self.max_thread_count = max_thread_count
15
+ end
16
+
17
+ # Creates and executes a TimerTask for each Worker that the Coordinator has been instantiated with.
18
+ # http://ruby-concurrency.github.io/concurrent-ruby/Concurrent/TimerTask.html
19
+ def run(execution_interval=15)
20
+ self.workers.each do |worker|
21
+ polling_timer = Concurrent::TimerTask.new(execution_interval: execution_interval) do
22
+ puts "Conductor::Coordinator : Worker (#{worker.task_type}) polling..."
23
+ poll_for_task(worker)
24
+ end
25
+
26
+ self.polling_timers << polling_timer
27
+ polling_timer.execute
28
+ end
29
+ end
30
+
31
+ # Shuts down all polling_timers for the Coordinator. Workers will no longer poll for new Tasks.
32
+ def stop
33
+ self.polling_timers.each do |polling_timer|
34
+ polling_timer.shutdown
35
+ end
36
+ end
37
+
38
+ # Executed once every x seconds based on the parent polling_timer.
39
+ # Batch polls the Conductor task queue for the given worker and task type,
40
+ # and executes as many tasks concurrently as possible, using a CachedThreadPool
41
+ # http://ruby-concurrency.github.io/concurrent-ruby/file.thread_pools.html
42
+ def poll_for_task(worker)
43
+ # bulk poll for task, concurrently, up to size of queue
44
+ tasks = [Conductor::Tasks.poll_task(worker.task_type)]
45
+ tasks.each do |task|
46
+ next if task[:status] != 200
47
+ process_task(worker, task[:body])
48
+ end
49
+ rescue => e
50
+ puts "Conductor::Coordinator : Failed to poll worker (#{worker.task_type}) with error #{e.message}"
51
+ end
52
+
53
+ # Acknowledges the Task in Conductor, then passes the Task to the Worker to execute.
54
+ # Update the Task in Conductor with status and output data.
55
+ def process_task(worker, task)
56
+ puts "Conductor::Coordinator : Processing task #{task}"
57
+
58
+ task_identifiers = {
59
+ taskId: task['taskId'],
60
+ workflowInstanceId: task['workflowInstanceId']
61
+ }
62
+
63
+ # Acknowledge the task, so other pollers will not be able to see the task in Conductor's queues
64
+ Conductor::Tasks.acknowledge_task(*(task_identifiers.values))
65
+
66
+ # Execute the task with the implementing application's worker
67
+ result = worker.execute(task)
68
+ task_body = result.merge!(task_identifiers)
69
+
70
+ # Update Conductor about the result of the task
71
+ update_task_with_retry(task_body, 0)
72
+ rescue => e
73
+ puts "Conductor::Coordinator : Failed to process task (#{task}) with error #{e.message}"
74
+ update_task_with_retry({ status: 'FAILED' }.merge(task_identifiers), 0)
75
+ end
76
+
77
+ def update_task_with_retry(task_body, count)
78
+ # Put this in a retryable block instead
79
+ begin
80
+ return if count >= 3
81
+ Conductor::Tasks.update_task(task_body)
82
+ rescue
83
+ update_task_with_retry(task_body, count+1)
84
+ end
85
+ end
86
+ end
87
+ end
@@ -0,0 +1,9 @@
1
+ module Conductor
2
+ class Data::Task
3
+ def initialize()
4
+
5
+ end
6
+
7
+ # def to_json
8
+ end
9
+ end
File without changes
@@ -0,0 +1,55 @@
1
+ require 'faraday_middleware'
2
+
3
+ module Conductor
4
+ class Connection
5
+ attr_reader :connection, :args
6
+
7
+ def initialize(args = {})
8
+ @connection ||= Faraday.new(url: Conductor.config.service_uri) do |c|
9
+ c.request :json
10
+ c.response :json, :content_type => /\bjson$/
11
+ c.adapter Faraday.default_adapter
12
+ end
13
+
14
+ args.each do |k,v|
15
+ @connection.headers[k] = v
16
+ end
17
+ end
18
+
19
+ def get(url, args={})
20
+ puts "Conductor::Connection : GET #{url} with args #{args}"
21
+ connection.get do |req|
22
+ req.url url
23
+ req.headers['Content-Type'] = ( args[:headers] && args[:headers]['Content-Type'] || 'application/json' )
24
+ req.body = args[:body] if args[:body]
25
+ end
26
+ end
27
+
28
+ def post(url, args={})
29
+ puts "Conductor::Connection : POST #{url} with args #{args}"
30
+ connection.post do |req|
31
+ req.url url
32
+ req.headers['Content-Type'] = ( args[:headers] && args[:headers]['Content-Type'] || 'application/json' )
33
+ req.body = args[:body] if args[:body]
34
+ end
35
+ end
36
+
37
+ def put(url, args={})
38
+ puts "Conductor::Connection : PUT #{url} with args #{args}"
39
+ connection.put do |req|
40
+ req.url url
41
+ req.headers['Content-Type'] = ( args[:headers] && args[:headers]['Content-Type'] || 'application/json' )
42
+ req.body = args[:body] if args[:body]
43
+ end
44
+ end
45
+
46
+ def delete(url, args={})
47
+ puts "Conductor::Connection : DELETE #{url} with args #{args}"
48
+ connection.delete do |req|
49
+ req.url url
50
+ req.headers['Content-Type'] = ( args[:headers] && args[:headers]['Content-Type'] || 'application/json' )
51
+ req.body = args[:body] if args[:body]
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,89 @@
1
+ module Conductor
2
+ class Metadata < Model
3
+ def initialize(response)
4
+ super(response)
5
+ end
6
+
7
+ class << self
8
+ # GET /metadata/taskdefs
9
+ def get_all_tasks
10
+ response = Connection.new.get(
11
+ "/metadata/taskdefs"
12
+ )
13
+ Metadata.build(response)
14
+ end
15
+
16
+ # GET /metadata/taskdefs/{taskType}
17
+ def get_task(task_type)
18
+ response = Connection.new.get(
19
+ "/metadata/taskdefs/#{task_type}"
20
+ )
21
+ Metadata.build(response)
22
+ end
23
+
24
+ # POST /metadata/taskdefs
25
+ # 204 success
26
+ def create_tasks(task_list)
27
+ response = Connection.new.post(
28
+ "/metadata/taskdefs",
29
+ { body: (task_list.is_a?(Array) ? task_list : [task_list]).to_json }
30
+ )
31
+ Metadata.build(response)
32
+ end
33
+
34
+ # PUT /metadata/taskdefs
35
+ def update_task(task_definition)
36
+ response = Connection.new.put(
37
+ "/metadata/taskdefs",
38
+ { body: task_definition.to_json }
39
+ )
40
+ Metadata.build(response)
41
+ end
42
+
43
+ # DELETE /metadata/taskdefs/{taskType}
44
+ def delete_task(task_type)
45
+ response = Connection.new.delete(
46
+ "/metadata/taskdefs/#{task_type}"
47
+ )
48
+ Metadata.build(response)
49
+ end
50
+
51
+ # GET /metadata/workflow
52
+ def get_all_workflows
53
+ response = Connection.new.get(
54
+ "/metadata/workflow"
55
+ )
56
+ Metadata.build(response)
57
+ end
58
+
59
+ # GET /metadata/workflow/{name}?version=
60
+ # Workflow name required, version optional
61
+ def get_workflow(workflow_name, version=nil)
62
+ response = Connection.new.get(
63
+ "/metadata/workflow/#{workflow_name}?version=#{version}"
64
+ )
65
+ Metadata.build(response)
66
+ end
67
+
68
+ # POST /metadata/workflow
69
+ # Only one workflow
70
+ def create_workflow(workflow)
71
+ response = Connection.new.post(
72
+ "/metadata/workflow",
73
+ { body: workflow.to_json }
74
+ )
75
+ Metadata.build(response)
76
+ end
77
+
78
+ # PUT /metadata/workflow
79
+ # Multiple workflows
80
+ def create_or_update_workflows(workflow_list)
81
+ response = Connection.new.put(
82
+ "/metadata/taskdefs",
83
+ { body: (workflow_list.is_a?(Array) ? workflow_list : [workflow_list]).to_json }
84
+ )
85
+ Metadata.build(response)
86
+ end
87
+ end
88
+ end
89
+ end
@@ -0,0 +1,18 @@
1
+ module Conductor
2
+ class Model
3
+ attr_reader :response
4
+ attr_accessor :status
5
+
6
+ def initialize(response)
7
+ @response = response
8
+ end
9
+
10
+ def self.build(response)
11
+ # Add error handling
12
+ {
13
+ status: response.status,
14
+ body: response.body
15
+ }
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,176 @@
1
+ module Conductor
2
+ class Tasks < Model
3
+ def initialize(response)
4
+ super(response)
5
+ end
6
+
7
+ class << self
8
+ # GET /tasks/poll/batch/{tasktype}
9
+ # batch Poll for a task of a certain type
10
+ def batch_poll_for_tasks(task_type, worker_id, domain, count, timeout)
11
+ response = Connection.new.get(
12
+ "/tasks/poll/batch/#{task_type}?workerid=#{worker_id}&domain=#{domain}&count=#{count}&timeout=#{timeout}"
13
+ )
14
+ Tasks.build(response)
15
+ end
16
+
17
+ # GET /tasks/in_progress/{tasktype}
18
+ # Get in progress tasks. The results are paginated.
19
+ def get_in_progress_tasks(task_type, start_key, count)
20
+ response = Connection.new.get(
21
+ "/tasks/in_progress/#{task_type}?startKey=#{start_key}&count=#{count}"
22
+ )
23
+ Tasks.build(response)
24
+ end
25
+
26
+ # GET /tasks/in_progress/{workflowId}/{taskRefName}
27
+ # Get in progress task for a given workflow id.
28
+ def get_in_progress_task_in_workflow(workflow_id, task_name)
29
+ response = Connection.new.get(
30
+ "/tasks/in_progress/#{workflow_id}/#{task_name}"
31
+ )
32
+ Tasks.build(response)
33
+ end
34
+
35
+ # POST /tasks
36
+ # Update a task
37
+ def update_task(task_body)
38
+ response = Connection.new.post(
39
+ "/tasks",
40
+ { body: task_body.to_json }
41
+ )
42
+ Tasks.build(response)
43
+ end
44
+
45
+ # POST /tasks/{taskId}/ack
46
+ # Ack Task is recieved
47
+ def acknowledge_task(task_id, worker_id=nil)
48
+ response = Connection.new.post(
49
+ "/tasks/#{task_id}/ack?workerid=#{worker_id}"
50
+ )
51
+ Tasks.build(response)
52
+ end
53
+
54
+ # GET /tasks/{taskId}/log
55
+ # Get Task Execution Logs
56
+ def get_task_logs(task_id)
57
+ response = Connection.new.get(
58
+ "/tasks/#{task_id}/log"
59
+ )
60
+ Tasks.build(response)
61
+ end
62
+
63
+ # POST /tasks/{taskId}/log
64
+ # Log Task Execution Details
65
+ def add_task_log(task_id, task_log)
66
+ response = Connection.new.post(
67
+ "/tasks/#{task_id}/log",
68
+ { body: task_log.to_json }
69
+ )
70
+ Tasks.build(response)
71
+ end
72
+
73
+ # DELETE /tasks/queue/{taskType}/{taskId}
74
+ # Remove Task from a Task type queue
75
+ def remove_task(task_type, task_id)
76
+ response = Connection.new.delete(
77
+ "/tasks/queue/#{task_type}/#{task_id}"
78
+ )
79
+ Tasks.build(response)
80
+ end
81
+
82
+ # GET /tasks/queue/all/verbose
83
+ # Get the details about each queue
84
+ def get_all_tasks_verbose
85
+ response = Connection.new.get(
86
+ "/tasks/queue/all/verbose"
87
+ )
88
+ Tasks.build(response)
89
+ end
90
+
91
+ # GET /tasks/queue/polldata
92
+ # Get the last poll data for a given task type
93
+ def get_poll_data(task_type)
94
+ response = Connection.new.get(
95
+ "/tasks/queue/polldata?taskType=#{task_type}"
96
+ )
97
+ Tasks.build(response)
98
+ end
99
+
100
+ # GET /tasks/queue/polldata/all
101
+ # Get the last poll data for a given task type
102
+ def get_all_poll_data
103
+ response = Connection.new.get(
104
+ "/tasks/queue/polldata/all"
105
+ )
106
+ Tasks.build(response)
107
+ end
108
+
109
+ # POST /tasks/queue/requeue/{taskType}
110
+ # Requeue pending tasks
111
+ def requeue_tasks(task_type)
112
+ response = Connection.new.post(
113
+ "/tasks/queue/requeue/#{task_type}"
114
+ )
115
+ Tasks.build(response)
116
+ end
117
+
118
+ # POST /tasks/queue/requeue
119
+ # Requeue pending tasks for all the running workflows
120
+ def requeue_all_tasks
121
+ response = Connection.new.post(
122
+ "/tasks/queue/requeue"
123
+ )
124
+ Tasks.build(response)
125
+ end
126
+
127
+ # GET /tasks/queue/sizes
128
+ # Get Task type queue sizes
129
+ def get_queue_sizes(task_types)
130
+ task_types_query = task_types.is_a?(Array) ? task_types.to_query('taskType') : "taskType=#{taskType}"
131
+ response = Connection.new.get(
132
+ "/tasks/queue/sizes?#{task_types_query}"
133
+ )
134
+ Tasks.build(response)
135
+ end
136
+
137
+ # GET /tasks/poll/{tasktype}
138
+ # Poll for a task of a certain type
139
+ def poll_task(task_type, worker_id=nil, domain=nil)
140
+ query_string = "/tasks/poll/#{task_type}"
141
+ query_string += "?workerid=#{worker_id}" if worker_id
142
+ query_string += "&domain=#{domain}" if domain
143
+
144
+ response = Connection.new.get(query_string)
145
+ Tasks.build(response)
146
+ end
147
+
148
+ # GET /tasks/search
149
+ # Search for tasks based in payload and other parameters
150
+ def search_task(start, size, sort, free_text, query)s
151
+ response = Connection.new.get(
152
+ "/tasks/search?start=#{start}&size=#{size}&sort=#{sort}&freeText=#{free_text}&query=#{query}"
153
+ )
154
+ Tasks.build(response)
155
+ end
156
+
157
+ # GET /tasks/queue/all
158
+ # Get the details about each queue
159
+ def get_all_tasks
160
+ response = Connection.new.get(
161
+ "/tasks/queue/all"
162
+ )
163
+ Tasks.build(response)
164
+ end
165
+
166
+ # GET /tasks/{taskId}
167
+ # Get task by Id
168
+ def get_task(task_id)
169
+ response = Connection.new.get(
170
+ "/tasks/#{task_id}"
171
+ )
172
+ Tasks.build(response)
173
+ end
174
+ end
175
+ end
176
+ end
@@ -0,0 +1,141 @@
1
+ module Conductor
2
+ class Workflow < Model
3
+ def initialize(response)
4
+ super(response)
5
+ end
6
+
7
+ class << self
8
+ # POST /workflow/{name}
9
+ def start_workflow(name, version=nil, correlation_id=nil, body={})
10
+ response = Connection.new.post(
11
+ "/workflow/#{name}?version=#{version}&correlationId=#{correlation_id}",
12
+ { body: body.to_json }
13
+ )
14
+ Workflow.build(response)
15
+ end
16
+
17
+ # POST /workflow
18
+ def start_workflow_with_domains(workflow)
19
+ response = Connection.new.post(
20
+ "/workflow",
21
+ { body: workflow.to_json }
22
+ )
23
+ Workflow.build(response)
24
+ end
25
+
26
+ # GET /workflow/{name}/correlated/{correlationId}
27
+ def get_correlated_workflows(workflow_name, correlation_id, include_closed=false, include_tasks=false)
28
+ response = Connection.new.get(
29
+ "/workflow/#{workflow_name}/correlated/#{correlation_id}?includeClosed=#{include_closed}&includeTasks=#{include_tasks}"
30
+ )
31
+ Workflow.build(response)
32
+ end
33
+
34
+ # DELETE /workflow/{workflowId}
35
+ def terminate_workflow(workflow_id, reason)
36
+ response = Connection.new.delete(
37
+ "/workflow/#{workflow_id}?reason=#{reason}"
38
+ )
39
+ Workflow.build(response)
40
+ end
41
+
42
+ # GET /workflow/{workflowId}
43
+ def get_workflow(workflow_id, include_tasks=true)
44
+ response = Connection.new.get(
45
+ "/workflow/#{workflow_id}?includeTasks=#{include_tasks}"
46
+ )
47
+ Workflow.build(response)
48
+ end
49
+
50
+ # GET /workflow/running/{name}
51
+ def get_running_workflow(workflow_name, version, start_time, end_time)
52
+ response = Connection.new.get(
53
+ "/workflow/running/#{workflow_name}?version=#{version}&startTime=#{start_time}&endTime=#{end_time}"
54
+ )
55
+ Workflow.build(response)
56
+ end
57
+
58
+ # PUT /workflow/decide/{workflowId}
59
+ def decide_workflow(workflow_id)
60
+ response = Connection.new.put(
61
+ "/workflow/decide/#{workflow_id}"
62
+ )
63
+ Workflow.build(response)
64
+ end
65
+
66
+ # PUT /workflow/{workflowId}/pause
67
+ def pause_workflow(workflow_id)
68
+ response = Connection.new.put(
69
+ "/workflow/#{workflow_id}/pause"
70
+ )
71
+ Workflow.build(response)
72
+ end
73
+
74
+ # PUT /workflow/{workflowId}/resume
75
+ def resume_workflow(workflow_id)
76
+ response = Connection.new.put(
77
+ "/workflow/#{workflow_id}/resume"
78
+ )
79
+ Workflow.build(response)
80
+ end
81
+
82
+ # PUT /workflow/{workflowId}/skiptask/{taskReferenceName}
83
+ def skip_task_for_workflow(workflow_id, task_name, task_body)
84
+ response = Connection.new.put(
85
+ "/workflow/#{workflow_id}/skiptask/#{task_name}",
86
+ { body: task_body.to_json }
87
+ )
88
+ Workflow.build(response)
89
+ end
90
+
91
+ # POST /workflow/{workflowId}/rerun
92
+ def rerun_workflow(workflow_id, rerun_body)
93
+ response = Connection.new.post(
94
+ "/workflow/#{workflow_id}/rerun",
95
+ { body: rerun_body.to_json }
96
+ )
97
+ Workflow.build(response)
98
+ end
99
+
100
+ # POST /workflow/{workflowId}/restart
101
+ def restart_workflow(workflow_id)
102
+ response = Connection.new.post(
103
+ "/workflow/#{workflow_id}/restart"
104
+ )
105
+ Workflow.build(response)
106
+ end
107
+
108
+ # POST /workflow/{workflowId}/retry
109
+ def retry_workflow(workflow_id)
110
+ response = Connection.new.post(
111
+ "/workflow/#{workflow_id}/retry"
112
+ )
113
+ Workflow.build(response)
114
+ end
115
+
116
+ # DELETE /workflow/{workflowId}/remove
117
+ def delete_workflow(workflow_id)
118
+ response = Connection.new.delete(
119
+ "/workflow/#{workflow_id}/remove"
120
+ )
121
+ Workflow.build(response)
122
+ end
123
+
124
+ # POST /workflow/{workflowId}/resetcallbacks
125
+ def reset_callbacks_for_workflow(workflow_id)
126
+ response = Connection.new.post(
127
+ "/workflow/#{workflow_id}/resetcallbacks"
128
+ )
129
+ Workflow.build(response)
130
+ end
131
+
132
+ # GET /workflow/search
133
+ def search_workflows(start, size, sort, free_text, query)
134
+ response = Connection.new.get(
135
+ "/workflow/search?start=#{start}&size=#{size}&sort=#{sort}&freeText=#{free_text}&query=#{query}"
136
+ )
137
+ Workflow.build(response)
138
+ end
139
+ end
140
+ end
141
+ end
@@ -0,0 +1,3 @@
1
+ module Conductor
2
+ VERSION = '0.0.2'
3
+ end
@@ -0,0 +1,17 @@
1
+ module Conductor
2
+ class Worker
3
+ attr_accessor :task_type
4
+
5
+ def method_not_implemented
6
+ raise "Conductor::Worker: Interface method must be implemented by worker subclass"
7
+ end
8
+
9
+ def initialize(task_type)
10
+ self.task_type = task_type
11
+ end
12
+
13
+ def execute(task)
14
+ method_not_implemented
15
+ end
16
+ end
17
+ end
metadata CHANGED
@@ -1,15 +1,85 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: nf-conductor
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
4
+ version: 0.0.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Matthew Rials
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-10-17 00:00:00.000000000 Z
12
- dependencies: []
11
+ date: 2017-11-04 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: faraday
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: 0.13.1
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: 0.13.1
27
+ - !ruby/object:Gem::Dependency
28
+ name: faraday_middleware
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: 0.12.2
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: 0.12.2
41
+ - !ruby/object:Gem::Dependency
42
+ name: json
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '1.8'
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '1.8'
55
+ - !ruby/object:Gem::Dependency
56
+ name: pry
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - "~>"
60
+ - !ruby/object:Gem::Version
61
+ version: '0.11'
62
+ type: :runtime
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - "~>"
67
+ - !ruby/object:Gem::Version
68
+ version: '0.11'
69
+ - !ruby/object:Gem::Dependency
70
+ name: concurrent-ruby
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - "~>"
74
+ - !ruby/object:Gem::Version
75
+ version: '1.0'
76
+ type: :runtime
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - "~>"
81
+ - !ruby/object:Gem::Version
82
+ version: '1.0'
13
83
  description: Ruby client containing a task worker interface, worker coordinator, and
14
84
  HTTP client for Netflix's Conductor API
15
85
  email: mrials@netflix.com
@@ -18,6 +88,16 @@ extensions: []
18
88
  extra_rdoc_files: []
19
89
  files:
20
90
  - lib/nf-conductor.rb
91
+ - lib/nf-conductor/coordinator/coordinator.rb
92
+ - lib/nf-conductor/data/task.rb
93
+ - lib/nf-conductor/data/workflow.rb
94
+ - lib/nf-conductor/http/connection.rb
95
+ - lib/nf-conductor/http/metadata.rb
96
+ - lib/nf-conductor/http/model.rb
97
+ - lib/nf-conductor/http/tasks.rb
98
+ - lib/nf-conductor/http/workflow.rb
99
+ - lib/nf-conductor/version.rb
100
+ - lib/nf-conductor/worker/worker.rb
21
101
  homepage: http://rubygems.org/gems/nf-conductor
22
102
  licenses:
23
103
  - MIT