nf-conductor 0.0.2 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/nf-conductor.rb +3 -2
- data/lib/nf-conductor/coordinator/coordinator.rb +8 -8
- data/lib/nf-conductor/http/connection.rb +13 -5
- data/lib/nf-conductor/http/metadata.rb +18 -19
- data/lib/nf-conductor/http/model.rb +0 -1
- data/lib/nf-conductor/http/tasks.rb +43 -52
- data/lib/nf-conductor/http/workflow.rb +53 -39
- data/lib/nf-conductor/version.rb +1 -1
- metadata +2 -4
- data/lib/nf-conductor/data/task.rb +0 -9
- data/lib/nf-conductor/data/workflow.rb +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b038c08555e695b14aaf16be7fae2124eb942580
|
4
|
+
data.tar.gz: 503f51d731b93c051b4f7538536cddab3dac3540
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5c46a8f25124fd8a7afc0a41d31b6e59c4d83ce9983becce3d5f3f17fb55314adc1ee3c9d5c994d39ef5f262d8a023479a0a338e45a24324bd556d8d5a7c689f
|
7
|
+
data.tar.gz: ce23b04bcea5c2b0900f4dd81e0662c23df9c75db336cd8f9ec2d056921aaafc3dc74812e51b6cae1a1d713c528d927b1dc44252f5a2efd7467c86065c8b77f2
|
data/lib/nf-conductor.rb
CHANGED
@@ -23,9 +23,10 @@ module Conductor
|
|
23
23
|
yield(config) if block_given?
|
24
24
|
end
|
25
25
|
|
26
|
-
def initialize(service_env)
|
26
|
+
def initialize(service_env, verbose: false)
|
27
27
|
configure if self.config.nil?
|
28
28
|
self.config.service_env ||= service_env
|
29
|
+
self.config.verbose ||= verbose
|
29
30
|
|
30
31
|
# Ensure service_uri is set in configuration
|
31
32
|
if self.config.service_env.nil? && self.config.service_uri.nil?
|
@@ -46,6 +47,6 @@ module Conductor
|
|
46
47
|
end
|
47
48
|
|
48
49
|
class Configuration
|
49
|
-
attr_accessor :service_env, :service_uri
|
50
|
+
attr_accessor :service_env, :service_uri, :verbose
|
50
51
|
end
|
51
52
|
end
|
@@ -19,7 +19,7 @@ module Conductor
|
|
19
19
|
def run(execution_interval=15)
|
20
20
|
self.workers.each do |worker|
|
21
21
|
polling_timer = Concurrent::TimerTask.new(execution_interval: execution_interval) do
|
22
|
-
|
22
|
+
Rails.logger.info("Conductor::Coordinator : Worker (#{worker.task_type}) polling...") if Conductor.config.verbose
|
23
23
|
poll_for_task(worker)
|
24
24
|
end
|
25
25
|
|
@@ -40,28 +40,28 @@ module Conductor
|
|
40
40
|
# and executes as many tasks concurrently as possible, using a CachedThreadPool
|
41
41
|
# http://ruby-concurrency.github.io/concurrent-ruby/file.thread_pools.html
|
42
42
|
def poll_for_task(worker)
|
43
|
-
# bulk poll for task, concurrently, up to size of queue
|
43
|
+
# TODO bulk poll for task, concurrently, up to size of queue
|
44
44
|
tasks = [Conductor::Tasks.poll_task(worker.task_type)]
|
45
45
|
tasks.each do |task|
|
46
46
|
next if task[:status] != 200
|
47
47
|
process_task(worker, task[:body])
|
48
48
|
end
|
49
49
|
rescue => e
|
50
|
-
|
50
|
+
Rails.logger.debug("Conductor::Coordinator : Failed to poll worker (#{worker.task_type}) with error #{e.message}")
|
51
51
|
end
|
52
52
|
|
53
53
|
# Acknowledges the Task in Conductor, then passes the Task to the Worker to execute.
|
54
54
|
# Update the Task in Conductor with status and output data.
|
55
55
|
def process_task(worker, task)
|
56
|
-
|
56
|
+
Rails.logger.info("Conductor::Coordinator : Processing task #{task}") if Conductor.config.verbose
|
57
57
|
|
58
58
|
task_identifiers = {
|
59
|
-
taskId: task[
|
60
|
-
workflowInstanceId: task[
|
59
|
+
taskId: task[:taskId],
|
60
|
+
workflowInstanceId: task[:workflowInstanceId]
|
61
61
|
}
|
62
62
|
|
63
63
|
# Acknowledge the task, so other pollers will not be able to see the task in Conductor's queues
|
64
|
-
Conductor::Tasks.acknowledge_task(
|
64
|
+
Conductor::Tasks.acknowledge_task(task[:taskId])
|
65
65
|
|
66
66
|
# Execute the task with the implementing application's worker
|
67
67
|
result = worker.execute(task)
|
@@ -70,7 +70,7 @@ module Conductor
|
|
70
70
|
# Update Conductor about the result of the task
|
71
71
|
update_task_with_retry(task_body, 0)
|
72
72
|
rescue => e
|
73
|
-
|
73
|
+
Rails.logger.debug("Conductor::Coordinator : Failed to process task (#{task}) with error #{e.message} at location #{e.backtrace}")
|
74
74
|
update_task_with_retry({ status: 'FAILED' }.merge(task_identifiers), 0)
|
75
75
|
end
|
76
76
|
|
@@ -7,7 +7,7 @@ module Conductor
|
|
7
7
|
def initialize(args = {})
|
8
8
|
@connection ||= Faraday.new(url: Conductor.config.service_uri) do |c|
|
9
9
|
c.request :json
|
10
|
-
c.response :json, :
|
10
|
+
c.response :json, content_type: /\bjson$/, parser_options: { symbolize_names: true }
|
11
11
|
c.adapter Faraday.default_adapter
|
12
12
|
end
|
13
13
|
|
@@ -17,39 +17,47 @@ module Conductor
|
|
17
17
|
end
|
18
18
|
|
19
19
|
def get(url, args={})
|
20
|
-
|
20
|
+
Rails.logger.info("Conductor::Connection : GET #{url} with args #{args}") if Conductor.config.verbose
|
21
21
|
connection.get do |req|
|
22
22
|
req.url url
|
23
23
|
req.headers['Content-Type'] = ( args[:headers] && args[:headers]['Content-Type'] || 'application/json' )
|
24
24
|
req.body = args[:body] if args[:body]
|
25
25
|
end
|
26
|
+
rescue Faraday::ParsingError
|
27
|
+
Struct.new(:status, :body).new(500, 'Conductor::Connection : Faraday failed to properly parse response.')
|
26
28
|
end
|
27
29
|
|
28
30
|
def post(url, args={})
|
29
|
-
|
31
|
+
Rails.logger.info("Conductor::Connection : POST #{url} with args #{args}") if Conductor.config.verbose
|
30
32
|
connection.post do |req|
|
31
33
|
req.url url
|
32
34
|
req.headers['Content-Type'] = ( args[:headers] && args[:headers]['Content-Type'] || 'application/json' )
|
33
35
|
req.body = args[:body] if args[:body]
|
34
36
|
end
|
37
|
+
rescue Faraday::ParsingError
|
38
|
+
Struct.new(:status, :body).new(500, 'Conductor::Connection : Faraday failed to properly parse response.')
|
35
39
|
end
|
36
40
|
|
37
41
|
def put(url, args={})
|
38
|
-
|
42
|
+
Rails.logger.info("Conductor::Connection : PUT #{url} with args #{args}") if Conductor.config.verbose
|
39
43
|
connection.put do |req|
|
40
44
|
req.url url
|
41
45
|
req.headers['Content-Type'] = ( args[:headers] && args[:headers]['Content-Type'] || 'application/json' )
|
42
46
|
req.body = args[:body] if args[:body]
|
43
47
|
end
|
48
|
+
rescue Faraday::ParsingError
|
49
|
+
Struct.new(:status, :body).new(500, 'Conductor::Connection : Faraday failed to properly parse response.')
|
44
50
|
end
|
45
51
|
|
46
52
|
def delete(url, args={})
|
47
|
-
|
53
|
+
Rails.logger.info("Conductor::Connection : DELETE #{url} with args #{args}") if Conductor.config.verbose
|
48
54
|
connection.delete do |req|
|
49
55
|
req.url url
|
50
56
|
req.headers['Content-Type'] = ( args[:headers] && args[:headers]['Content-Type'] || 'application/json' )
|
51
57
|
req.body = args[:body] if args[:body]
|
52
58
|
end
|
59
|
+
rescue Faraday::ParsingError
|
60
|
+
Struct.new(:status, :body).new(500, 'Conductor::Connection : Faraday failed to properly parse response.')
|
53
61
|
end
|
54
62
|
end
|
55
63
|
end
|
@@ -6,22 +6,21 @@ module Conductor
|
|
6
6
|
|
7
7
|
class << self
|
8
8
|
# GET /metadata/taskdefs
|
9
|
+
# Gets all task definition
|
9
10
|
def get_all_tasks
|
10
|
-
response = Connection.new.get(
|
11
|
-
"/metadata/taskdefs"
|
12
|
-
)
|
11
|
+
response = Connection.new.get("/metadata/taskdefs")
|
13
12
|
Metadata.build(response)
|
14
13
|
end
|
15
14
|
|
16
15
|
# GET /metadata/taskdefs/{taskType}
|
16
|
+
# Gets the task definition
|
17
17
|
def get_task(task_type)
|
18
|
-
response = Connection.new.get(
|
19
|
-
"/metadata/taskdefs/#{task_type}"
|
20
|
-
)
|
18
|
+
response = Connection.new.get("/metadata/taskdefs/#{task_type}")
|
21
19
|
Metadata.build(response)
|
22
20
|
end
|
23
21
|
|
24
22
|
# POST /metadata/taskdefs
|
23
|
+
# Create new task definition(s)
|
25
24
|
# 204 success
|
26
25
|
def create_tasks(task_list)
|
27
26
|
response = Connection.new.post(
|
@@ -32,6 +31,7 @@ module Conductor
|
|
32
31
|
end
|
33
32
|
|
34
33
|
# PUT /metadata/taskdefs
|
34
|
+
# Update an existing task
|
35
35
|
def update_task(task_definition)
|
36
36
|
response = Connection.new.put(
|
37
37
|
"/metadata/taskdefs",
|
@@ -41,32 +41,31 @@ module Conductor
|
|
41
41
|
end
|
42
42
|
|
43
43
|
# DELETE /metadata/taskdefs/{taskType}
|
44
|
+
# Remove a task definition
|
44
45
|
def delete_task(task_type)
|
45
|
-
response = Connection.new.delete(
|
46
|
-
"/metadata/taskdefs/#{task_type}"
|
47
|
-
)
|
46
|
+
response = Connection.new.delete("/metadata/taskdefs/#{task_type}")
|
48
47
|
Metadata.build(response)
|
49
48
|
end
|
50
49
|
|
51
50
|
# GET /metadata/workflow
|
51
|
+
# Retrieves all workflow definition along with blueprint
|
52
52
|
def get_all_workflows
|
53
|
-
response = Connection.new.get(
|
54
|
-
"/metadata/workflow"
|
55
|
-
)
|
53
|
+
response = Connection.new.get("/metadata/workflow")
|
56
54
|
Metadata.build(response)
|
57
55
|
end
|
58
56
|
|
59
57
|
# GET /metadata/workflow/{name}?version=
|
60
|
-
#
|
61
|
-
def get_workflow(workflow_name, version
|
62
|
-
|
63
|
-
|
64
|
-
|
58
|
+
# Retrieves workflow definition along with blueprint
|
59
|
+
def get_workflow(workflow_name, version: nil)
|
60
|
+
query_string = "/metadata/workflow/#{workflow_name}?"
|
61
|
+
query_string += "version=#{version}" if version
|
62
|
+
|
63
|
+
response = Connection.new.get(query_string)
|
65
64
|
Metadata.build(response)
|
66
65
|
end
|
67
66
|
|
68
67
|
# POST /metadata/workflow
|
69
|
-
#
|
68
|
+
# Create a new workflow definition
|
70
69
|
def create_workflow(workflow)
|
71
70
|
response = Connection.new.post(
|
72
71
|
"/metadata/workflow",
|
@@ -76,7 +75,7 @@ module Conductor
|
|
76
75
|
end
|
77
76
|
|
78
77
|
# PUT /metadata/workflow
|
79
|
-
#
|
78
|
+
# Create or update workflow definition
|
80
79
|
def create_or_update_workflows(workflow_list)
|
81
80
|
response = Connection.new.put(
|
82
81
|
"/metadata/taskdefs",
|
@@ -7,28 +7,32 @@ module Conductor
|
|
7
7
|
class << self
|
8
8
|
# GET /tasks/poll/batch/{tasktype}
|
9
9
|
# batch Poll for a task of a certain type
|
10
|
-
def batch_poll_for_tasks(task_type, worker_id, domain, count, timeout)
|
11
|
-
|
12
|
-
|
13
|
-
|
10
|
+
def batch_poll_for_tasks(task_type, worker_id: nil, domain: nil, count: nil, timeout: nil)
|
11
|
+
query_string = "/tasks/poll/batch/#{task_type}?"
|
12
|
+
query_string += "workerid=#{worker_id}" if worker_id
|
13
|
+
query_string += "&domain=#{domain}" if domain
|
14
|
+
query_string += "&count=#{count}" if count
|
15
|
+
query_string += "&timeout=#{timeout}" if timeout
|
16
|
+
|
17
|
+
response = Connection.new.get(query_string)
|
14
18
|
Tasks.build(response)
|
15
19
|
end
|
16
20
|
|
17
21
|
# GET /tasks/in_progress/{tasktype}
|
18
22
|
# Get in progress tasks. The results are paginated.
|
19
|
-
def get_in_progress_tasks(task_type, start_key, count)
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
+
def get_in_progress_tasks(task_type, start_key: nil, count: nil)
|
24
|
+
query_string = "/tasks/in_progress/#{task_type}?"
|
25
|
+
query_string += "startKey=#{start_key}" if start_key
|
26
|
+
query_string += "&count=#{count}" if count
|
27
|
+
|
28
|
+
response = Connection.new.get(query_string)
|
23
29
|
Tasks.build(response)
|
24
30
|
end
|
25
31
|
|
26
32
|
# GET /tasks/in_progress/{workflowId}/{taskRefName}
|
27
33
|
# Get in progress task for a given workflow id.
|
28
34
|
def get_in_progress_task_in_workflow(workflow_id, task_name)
|
29
|
-
response = Connection.new.get(
|
30
|
-
"/tasks/in_progress/#{workflow_id}/#{task_name}"
|
31
|
-
)
|
35
|
+
response = Connection.new.get("/tasks/in_progress/#{workflow_id}/#{task_name}")
|
32
36
|
Tasks.build(response)
|
33
37
|
end
|
34
38
|
|
@@ -44,19 +48,18 @@ module Conductor
|
|
44
48
|
|
45
49
|
# POST /tasks/{taskId}/ack
|
46
50
|
# Ack Task is recieved
|
47
|
-
def acknowledge_task(task_id, worker_id
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
+
def acknowledge_task(task_id, worker_id: nil)
|
52
|
+
query_string = "/tasks/#{task_id}/ack?"
|
53
|
+
query_string += "workerid=#{worker_id}" if worker_id
|
54
|
+
|
55
|
+
response = Connection.new.post(query_string)
|
51
56
|
Tasks.build(response)
|
52
57
|
end
|
53
58
|
|
54
59
|
# GET /tasks/{taskId}/log
|
55
60
|
# Get Task Execution Logs
|
56
61
|
def get_task_logs(task_id)
|
57
|
-
response = Connection.new.get(
|
58
|
-
"/tasks/#{task_id}/log"
|
59
|
-
)
|
62
|
+
response = Connection.new.get("/tasks/#{task_id}/log")
|
60
63
|
Tasks.build(response)
|
61
64
|
end
|
62
65
|
|
@@ -73,54 +76,42 @@ module Conductor
|
|
73
76
|
# DELETE /tasks/queue/{taskType}/{taskId}
|
74
77
|
# Remove Task from a Task type queue
|
75
78
|
def remove_task(task_type, task_id)
|
76
|
-
response = Connection.new.delete(
|
77
|
-
"/tasks/queue/#{task_type}/#{task_id}"
|
78
|
-
)
|
79
|
+
response = Connection.new.delete("/tasks/queue/#{task_type}/#{task_id}")
|
79
80
|
Tasks.build(response)
|
80
81
|
end
|
81
82
|
|
82
83
|
# GET /tasks/queue/all/verbose
|
83
84
|
# Get the details about each queue
|
84
85
|
def get_all_tasks_verbose
|
85
|
-
response = Connection.new.get(
|
86
|
-
"/tasks/queue/all/verbose"
|
87
|
-
)
|
86
|
+
response = Connection.new.get("/tasks/queue/all/verbose")
|
88
87
|
Tasks.build(response)
|
89
88
|
end
|
90
89
|
|
91
90
|
# GET /tasks/queue/polldata
|
92
91
|
# Get the last poll data for a given task type
|
93
92
|
def get_poll_data(task_type)
|
94
|
-
response = Connection.new.get(
|
95
|
-
"/tasks/queue/polldata?taskType=#{task_type}"
|
96
|
-
)
|
93
|
+
response = Connection.new.get("/tasks/queue/polldata?taskType=#{task_type}")
|
97
94
|
Tasks.build(response)
|
98
95
|
end
|
99
96
|
|
100
97
|
# GET /tasks/queue/polldata/all
|
101
98
|
# Get the last poll data for a given task type
|
102
99
|
def get_all_poll_data
|
103
|
-
response = Connection.new.get(
|
104
|
-
"/tasks/queue/polldata/all"
|
105
|
-
)
|
100
|
+
response = Connection.new.get("/tasks/queue/polldata/all")
|
106
101
|
Tasks.build(response)
|
107
102
|
end
|
108
103
|
|
109
104
|
# POST /tasks/queue/requeue/{taskType}
|
110
105
|
# Requeue pending tasks
|
111
106
|
def requeue_tasks(task_type)
|
112
|
-
response = Connection.new.post(
|
113
|
-
"/tasks/queue/requeue/#{task_type}"
|
114
|
-
)
|
107
|
+
response = Connection.new.post("/tasks/queue/requeue/#{task_type}")
|
115
108
|
Tasks.build(response)
|
116
109
|
end
|
117
110
|
|
118
111
|
# POST /tasks/queue/requeue
|
119
112
|
# Requeue pending tasks for all the running workflows
|
120
113
|
def requeue_all_tasks
|
121
|
-
response = Connection.new.post(
|
122
|
-
"/tasks/queue/requeue"
|
123
|
-
)
|
114
|
+
response = Connection.new.post("/tasks/queue/requeue")
|
124
115
|
Tasks.build(response)
|
125
116
|
end
|
126
117
|
|
@@ -128,17 +119,16 @@ module Conductor
|
|
128
119
|
# Get Task type queue sizes
|
129
120
|
def get_queue_sizes(task_types)
|
130
121
|
task_types_query = task_types.is_a?(Array) ? task_types.to_query('taskType') : "taskType=#{taskType}"
|
131
|
-
|
132
|
-
|
133
|
-
)
|
122
|
+
|
123
|
+
response = Connection.new.get("/tasks/queue/sizes?#{task_types_query}")
|
134
124
|
Tasks.build(response)
|
135
125
|
end
|
136
126
|
|
137
127
|
# GET /tasks/poll/{tasktype}
|
138
128
|
# Poll for a task of a certain type
|
139
|
-
def poll_task(task_type, worker_id
|
140
|
-
query_string = "/tasks/poll/#{task_type}"
|
141
|
-
query_string += "
|
129
|
+
def poll_task(task_type, worker_id: nil, domain: nil)
|
130
|
+
query_string = "/tasks/poll/#{task_type}?"
|
131
|
+
query_string += "workerid=#{worker_id}" if worker_id
|
142
132
|
query_string += "&domain=#{domain}" if domain
|
143
133
|
|
144
134
|
response = Connection.new.get(query_string)
|
@@ -147,28 +137,29 @@ module Conductor
|
|
147
137
|
|
148
138
|
# GET /tasks/search
|
149
139
|
# Search for tasks based in payload and other parameters
|
150
|
-
def search_task(start, size, sort, free_text, query)
|
151
|
-
|
152
|
-
|
153
|
-
|
140
|
+
def search_task(start: nil, size: nil, sort: nil, free_text: nil, query: nil)
|
141
|
+
query_string = "/tasks/search?"
|
142
|
+
query_string += "start=#{start}" if start
|
143
|
+
query_string += "&size=#{size}" if size
|
144
|
+
query_string += "&sort=#{sort}" if sort
|
145
|
+
query_string += "&freeText=#{free_text}" if free_text
|
146
|
+
query_string += "&query=#{query}" if query
|
147
|
+
|
148
|
+
response = Connection.new.get(query_string)
|
154
149
|
Tasks.build(response)
|
155
150
|
end
|
156
151
|
|
157
152
|
# GET /tasks/queue/all
|
158
153
|
# Get the details about each queue
|
159
154
|
def get_all_tasks
|
160
|
-
response = Connection.new.get(
|
161
|
-
"/tasks/queue/all"
|
162
|
-
)
|
155
|
+
response = Connection.new.get("/tasks/queue/all")
|
163
156
|
Tasks.build(response)
|
164
157
|
end
|
165
158
|
|
166
159
|
# GET /tasks/{taskId}
|
167
160
|
# Get task by Id
|
168
161
|
def get_task(task_id)
|
169
|
-
response = Connection.new.get(
|
170
|
-
"/tasks/#{task_id}"
|
171
|
-
)
|
162
|
+
response = Connection.new.get("/tasks/#{task_id}")
|
172
163
|
Tasks.build(response)
|
173
164
|
end
|
174
165
|
end
|
@@ -6,15 +6,21 @@ module Conductor
|
|
6
6
|
|
7
7
|
class << self
|
8
8
|
# POST /workflow/{name}
|
9
|
-
|
9
|
+
# Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking
|
10
|
+
def start_workflow(name, version: nil, correlation_id: nil, body: {})
|
11
|
+
query_string = "/workflow/#{name}?"
|
12
|
+
query_string += "version=#{version}" if version
|
13
|
+
query_string += "&correlationId=#{correlation_id}" if correlation_id
|
14
|
+
|
10
15
|
response = Connection.new.post(
|
11
|
-
|
16
|
+
query_string,
|
12
17
|
{ body: body.to_json }
|
13
18
|
)
|
14
19
|
Workflow.build(response)
|
15
20
|
end
|
16
21
|
|
17
22
|
# POST /workflow
|
23
|
+
# Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain
|
18
24
|
def start_workflow_with_domains(workflow)
|
19
25
|
response = Connection.new.post(
|
20
26
|
"/workflow",
|
@@ -24,7 +30,8 @@ module Conductor
|
|
24
30
|
end
|
25
31
|
|
26
32
|
# GET /workflow/{name}/correlated/{correlationId}
|
27
|
-
|
33
|
+
# Lists workflows for the given correlation id
|
34
|
+
def get_correlated_workflows(workflow_name, correlation_id, include_closed: false, include_tasks: false)
|
28
35
|
response = Connection.new.get(
|
29
36
|
"/workflow/#{workflow_name}/correlated/#{correlation_id}?includeClosed=#{include_closed}&includeTasks=#{include_tasks}"
|
30
37
|
)
|
@@ -32,15 +39,18 @@ module Conductor
|
|
32
39
|
end
|
33
40
|
|
34
41
|
# DELETE /workflow/{workflowId}
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
42
|
+
# Terminate workflow execution
|
43
|
+
def terminate_workflow(workflow_id, reason: nil)
|
44
|
+
query_string = "/workflow/#{workflow_id}?"
|
45
|
+
query_string += "reason=#{reason}" if reason
|
46
|
+
|
47
|
+
response = Connection.new.delete(query_string)
|
39
48
|
Workflow.build(response)
|
40
49
|
end
|
41
50
|
|
42
51
|
# GET /workflow/{workflowId}
|
43
|
-
|
52
|
+
# Gets the workflow by workflow id
|
53
|
+
def get_workflow(workflow_id, include_tasks: true)
|
44
54
|
response = Connection.new.get(
|
45
55
|
"/workflow/#{workflow_id}?includeTasks=#{include_tasks}"
|
46
56
|
)
|
@@ -48,39 +58,41 @@ module Conductor
|
|
48
58
|
end
|
49
59
|
|
50
60
|
# GET /workflow/running/{name}
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
61
|
+
# Retrieve all the running workflows
|
62
|
+
def get_running_workflow(workflow_name, version: nil, start_time: nil, end_time: nil)
|
63
|
+
query_string = "/workflow/running/#{workflow_name}?"
|
64
|
+
query_string += "version=#{version}" if version
|
65
|
+
query_string += "&startTime=#{start_time}" if start_time
|
66
|
+
query_string += "&endTime=#{end_time}" if end_time
|
67
|
+
|
68
|
+
response = Connection.new.get(query_string)
|
55
69
|
Workflow.build(response)
|
56
70
|
end
|
57
71
|
|
58
72
|
# PUT /workflow/decide/{workflowId}
|
73
|
+
# Starts the decision task for a workflow
|
59
74
|
def decide_workflow(workflow_id)
|
60
|
-
response = Connection.new.put(
|
61
|
-
"/workflow/decide/#{workflow_id}"
|
62
|
-
)
|
75
|
+
response = Connection.new.put("/workflow/decide/#{workflow_id}")
|
63
76
|
Workflow.build(response)
|
64
77
|
end
|
65
78
|
|
66
79
|
# PUT /workflow/{workflowId}/pause
|
80
|
+
# Pauses the workflow
|
67
81
|
def pause_workflow(workflow_id)
|
68
|
-
response = Connection.new.put(
|
69
|
-
"/workflow/#{workflow_id}/pause"
|
70
|
-
)
|
82
|
+
response = Connection.new.put("/workflow/#{workflow_id}/pause")
|
71
83
|
Workflow.build(response)
|
72
84
|
end
|
73
85
|
|
74
86
|
# PUT /workflow/{workflowId}/resume
|
87
|
+
# Resumes the workflow
|
75
88
|
def resume_workflow(workflow_id)
|
76
|
-
response = Connection.new.put(
|
77
|
-
"/workflow/#{workflow_id}/resume"
|
78
|
-
)
|
89
|
+
response = Connection.new.put("/workflow/#{workflow_id}/resume")
|
79
90
|
Workflow.build(response)
|
80
91
|
end
|
81
92
|
|
82
93
|
# PUT /workflow/{workflowId}/skiptask/{taskReferenceName}
|
83
|
-
|
94
|
+
# Skips a given task from a current running workflow
|
95
|
+
def skip_task_for_workflow(workflow_id, task_name, task_body: {})
|
84
96
|
response = Connection.new.put(
|
85
97
|
"/workflow/#{workflow_id}/skiptask/#{task_name}",
|
86
98
|
{ body: task_body.to_json }
|
@@ -89,7 +101,8 @@ module Conductor
|
|
89
101
|
end
|
90
102
|
|
91
103
|
# POST /workflow/{workflowId}/rerun
|
92
|
-
|
104
|
+
# Reruns the workflow from a specific task
|
105
|
+
def rerun_workflow(workflow_id, rerun_body: {})
|
93
106
|
response = Connection.new.post(
|
94
107
|
"/workflow/#{workflow_id}/rerun",
|
95
108
|
{ body: rerun_body.to_json }
|
@@ -98,42 +111,43 @@ module Conductor
|
|
98
111
|
end
|
99
112
|
|
100
113
|
# POST /workflow/{workflowId}/restart
|
114
|
+
# Restarts a completed workflow
|
101
115
|
def restart_workflow(workflow_id)
|
102
|
-
response = Connection.new.post(
|
103
|
-
"/workflow/#{workflow_id}/restart"
|
104
|
-
)
|
116
|
+
response = Connection.new.post("/workflow/#{workflow_id}/restart")
|
105
117
|
Workflow.build(response)
|
106
118
|
end
|
107
119
|
|
108
120
|
# POST /workflow/{workflowId}/retry
|
121
|
+
# Retries the last failed task
|
109
122
|
def retry_workflow(workflow_id)
|
110
|
-
response = Connection.new.post(
|
111
|
-
"/workflow/#{workflow_id}/retry"
|
112
|
-
)
|
123
|
+
response = Connection.new.post("/workflow/#{workflow_id}/retry")
|
113
124
|
Workflow.build(response)
|
114
125
|
end
|
115
126
|
|
116
127
|
# DELETE /workflow/{workflowId}/remove
|
128
|
+
# Removes the workflow from the system
|
117
129
|
def delete_workflow(workflow_id)
|
118
|
-
response = Connection.new.delete(
|
119
|
-
"/workflow/#{workflow_id}/remove"
|
120
|
-
)
|
130
|
+
response = Connection.new.delete("/workflow/#{workflow_id}/remove")
|
121
131
|
Workflow.build(response)
|
122
132
|
end
|
123
133
|
|
124
134
|
# POST /workflow/{workflowId}/resetcallbacks
|
135
|
+
# Resets callback times of all in_progress tasks to 0
|
125
136
|
def reset_callbacks_for_workflow(workflow_id)
|
126
|
-
response = Connection.new.post(
|
127
|
-
"/workflow/#{workflow_id}/resetcallbacks"
|
128
|
-
)
|
137
|
+
response = Connection.new.post("/workflow/#{workflow_id}/resetcallbacks")
|
129
138
|
Workflow.build(response)
|
130
139
|
end
|
131
140
|
|
132
141
|
# GET /workflow/search
|
133
|
-
def search_workflows(start, size, sort, free_text, query)
|
134
|
-
|
135
|
-
|
136
|
-
|
142
|
+
def search_workflows(start: nil, size: nil, sort: nil, free_text: nil, query: nil)
|
143
|
+
query_string = "/workflow/search?"
|
144
|
+
query_string += "start=#{start}" if start
|
145
|
+
query_string += "&size=#{size}" if size
|
146
|
+
query_string += "&sort=#{sort}" if sort
|
147
|
+
query_string += "&freeText=#{free_text}" if free_text
|
148
|
+
query_string += "&query=#{query}" if query
|
149
|
+
|
150
|
+
response = Connection.new.get(query_string)
|
137
151
|
Workflow.build(response)
|
138
152
|
end
|
139
153
|
end
|
data/lib/nf-conductor/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: nf-conductor
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Matthew Rials
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-11-
|
11
|
+
date: 2017-11-10 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: faraday
|
@@ -89,8 +89,6 @@ extra_rdoc_files: []
|
|
89
89
|
files:
|
90
90
|
- lib/nf-conductor.rb
|
91
91
|
- lib/nf-conductor/coordinator/coordinator.rb
|
92
|
-
- lib/nf-conductor/data/task.rb
|
93
|
-
- lib/nf-conductor/data/workflow.rb
|
94
92
|
- lib/nf-conductor/http/connection.rb
|
95
93
|
- lib/nf-conductor/http/metadata.rb
|
96
94
|
- lib/nf-conductor/http/model.rb
|
File without changes
|