rbbt-util 6.0.3 → 6.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/LICENSE +1 -1
- data/bin/rbbt +1 -1
- data/bin/rbbt_exec.rb +2 -2
- data/lib/rbbt/hpc/batch.rb +1 -1
- data/lib/rbbt/knowledge_base/enrichment.rb +9 -9
- data/lib/rbbt/knowledge_base/entity.rb +128 -128
- data/lib/rbbt/knowledge_base/query.rb +94 -94
- data/lib/rbbt/knowledge_base/registry.rb +189 -189
- data/lib/rbbt/knowledge_base/syndicate.rb +26 -26
- data/lib/rbbt/knowledge_base/traverse.rb +315 -315
- data/lib/rbbt/knowledge_base.rb +37 -34
- data/lib/rbbt/tsv/excel.rb +1 -1
- data/lib/rbbt/util/filecache.rb +1 -1
- data/lib/rbbt/util/migrate.rb +4 -4
- data/lib/rbbt/util/misc/system.rb +92 -90
- data/lib/rbbt/workflow/refactor/export.rb +66 -66
- data/lib/rbbt/workflow/refactor/recursive.rb +64 -64
- data/lib/rbbt/workflow/refactor/task_info.rb +66 -65
- data/lib/rbbt/workflow/refactor.rb +0 -3
- data/lib/rbbt/workflow/remote_workflow/driver/rest.rb +1 -2
- data/lib/rbbt/workflow/remote_workflow/driver/ssh.rb +11 -11
- data/lib/rbbt/workflow/remote_workflow/remote_step.rb +1 -1
- data/lib/rbbt/workflow/remote_workflow.rb +2 -1
- data/lib/rbbt-util.rb +2 -2
- data/lib/rbbt.rb +1 -1
- data/python/rbbt/'/Users/miki/config/tmp/undodir'/%Users%miki%git%rbbt-util%python%rbbt%__init__.py +0 -0
- data/python/rbbt/__init__.py +78 -4
- data/python/rbbt/workflow/remote.py +104 -0
- data/python/rbbt/workflow.py +64 -0
- data/python/test.py +10 -0
- data/share/rbbt_commands/hpc/list +1 -1
- data/share/rbbt_commands/lsf/list +1 -1
- data/share/rbbt_commands/pbs/list +1 -1
- data/share/rbbt_commands/resource/find +1 -1
- data/share/rbbt_commands/slurm/list +1 -1
- data/share/rbbt_commands/system/status +2 -2
- data/share/rbbt_commands/workflow/info +1 -1
- data/share/rbbt_commands/workflow/retry +43 -0
- data/share/rbbt_commands/workflow/server +1 -1
- data/share/rbbt_commands/workflow/task +4 -2
- metadata +8 -6
@@ -1,65 +1,66 @@
|
|
1
|
-
require_relative 'export'
|
2
|
-
require_relative 'recursive'
|
3
|
-
module Workflow
|
4
|
-
def task_info(name)
|
5
|
-
name = name.to_sym
|
6
|
-
task = tasks[name]
|
7
|
-
raise "No '#{name}' task in '#{self.to_s}' Workflow" if task.nil?
|
8
|
-
id = File.join(self.to_s, name.to_s)
|
9
|
-
@task_info ||= {}
|
10
|
-
@task_info[id] ||= begin
|
11
|
-
description = task.description
|
12
|
-
result_description = task.result_description
|
13
|
-
|
14
|
-
|
15
|
-
inputs = rec_inputs(name).uniq
|
16
|
-
input_types = rec_input_types(name)
|
17
|
-
input_descriptions = rec_input_descriptions(name)
|
18
|
-
input_use = rec_input_use(name)
|
19
|
-
input_defaults = rec_input_defaults(name)
|
20
|
-
input_options = rec_input_options(name)
|
21
|
-
extension = task.extension
|
22
|
-
export = case
|
23
|
-
when (synchronous_exports.include?(name.to_sym) or synchronous_exports.include?(name.to_s))
|
24
|
-
:synchronous
|
25
|
-
when (asynchronous_exports.include?(name.to_sym) or asynchronous_exports.include?(name.to_s))
|
26
|
-
:asynchronous
|
27
|
-
when (exec_exports.include?(name.to_sym) or exec_exports.include?(name.to_s))
|
28
|
-
:exec
|
29
|
-
when (stream_exports.include?(name.to_sym) or stream_exports.include?(name.to_s))
|
30
|
-
:stream
|
31
|
-
else
|
32
|
-
:none
|
33
|
-
end
|
34
|
-
|
35
|
-
dependencies = tasks[name].deps
|
36
|
-
{ :id => id,
|
37
|
-
:description => description,
|
38
|
-
:export => export,
|
39
|
-
:inputs => inputs,
|
40
|
-
:input_types => input_types,
|
41
|
-
:input_descriptions => input_descriptions,
|
42
|
-
:input_defaults => input_defaults,
|
43
|
-
:input_options => input_options,
|
44
|
-
:input_use => input_use,
|
45
|
-
:
|
46
|
-
|
47
|
-
|
48
|
-
:
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
end
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
1
|
+
#require_relative 'export'
|
2
|
+
#require_relative 'recursive'
|
3
|
+
#module Workflow
|
4
|
+
# def task_info(name)
|
5
|
+
# name = name.to_sym
|
6
|
+
# task = tasks[name]
|
7
|
+
# raise "No '#{name}' task in '#{self.to_s}' Workflow" if task.nil?
|
8
|
+
# id = File.join(self.to_s, name.to_s)
|
9
|
+
# @task_info ||= {}
|
10
|
+
# @task_info[id] ||= begin
|
11
|
+
# description = task.description
|
12
|
+
# result_description = task.result_description
|
13
|
+
# returns = task.returns
|
14
|
+
#
|
15
|
+
# inputs = rec_inputs(name).uniq
|
16
|
+
# input_types = rec_input_types(name)
|
17
|
+
# input_descriptions = rec_input_descriptions(name)
|
18
|
+
# input_use = rec_input_use(name)
|
19
|
+
# input_defaults = rec_input_defaults(name)
|
20
|
+
# input_options = rec_input_options(name)
|
21
|
+
# extension = task.extension
|
22
|
+
# export = case
|
23
|
+
# when (synchronous_exports.include?(name.to_sym) or synchronous_exports.include?(name.to_s))
|
24
|
+
# :synchronous
|
25
|
+
# when (asynchronous_exports.include?(name.to_sym) or asynchronous_exports.include?(name.to_s))
|
26
|
+
# :asynchronous
|
27
|
+
# when (exec_exports.include?(name.to_sym) or exec_exports.include?(name.to_s))
|
28
|
+
# :exec
|
29
|
+
# when (stream_exports.include?(name.to_sym) or stream_exports.include?(name.to_s))
|
30
|
+
# :stream
|
31
|
+
# else
|
32
|
+
# :none
|
33
|
+
# end
|
34
|
+
#
|
35
|
+
# dependencies = tasks[name].deps
|
36
|
+
# { :id => id,
|
37
|
+
# :description => description,
|
38
|
+
# :export => export,
|
39
|
+
# :inputs => inputs,
|
40
|
+
# :input_types => input_types,
|
41
|
+
# :input_descriptions => input_descriptions,
|
42
|
+
# :input_defaults => input_defaults,
|
43
|
+
# :input_options => input_options,
|
44
|
+
# :input_use => input_use,
|
45
|
+
# :returns => returns,
|
46
|
+
# #:result_type => result_type,
|
47
|
+
# #:result_description => result_description,
|
48
|
+
# :dependencies => dependencies,
|
49
|
+
# :extension => extension
|
50
|
+
# }
|
51
|
+
# end
|
52
|
+
# end
|
53
|
+
#end
|
54
|
+
#
|
55
|
+
#module Task
|
56
|
+
# def result_description
|
57
|
+
# ""
|
58
|
+
# end
|
59
|
+
#
|
60
|
+
# def result_type
|
61
|
+
# @returns
|
62
|
+
# end
|
63
|
+
#
|
64
|
+
#end
|
65
|
+
#
|
66
|
+
#
|
@@ -146,7 +146,6 @@ class RemoteWorkflow
|
|
146
146
|
task_info = RemoteWorkflow::REST.get_json(File.join(url, task.to_s, 'info'))
|
147
147
|
task_info = RemoteWorkflow.fix_hash(task_info)
|
148
148
|
|
149
|
-
task_info[:result_type] = task_info[:result_type].to_sym
|
150
149
|
task_info[:export] = task_info[:export].to_sym
|
151
150
|
task_info[:input_types] = RemoteWorkflow.fix_hash(task_info[:input_types], true)
|
152
151
|
task_info[:inputs] = task_info[:inputs].collect{|input| input.to_sym }
|
@@ -248,7 +247,6 @@ class RemoteWorkflow
|
|
248
247
|
end
|
249
248
|
end
|
250
249
|
|
251
|
-
|
252
250
|
def task_info(task)
|
253
251
|
RemoteWorkflow::REST.task_info(url, task)
|
254
252
|
end
|
@@ -260,6 +258,7 @@ class RemoteWorkflow
|
|
260
258
|
@exec_exports = (task_exports["exec"] || []).collect{|task| task.to_sym }
|
261
259
|
@stream_exports = (task_exports["stream"] || []).collect{|task| task.to_sym }
|
262
260
|
@can_stream = task_exports["can_stream"]
|
261
|
+
(@asynchronous_exports + @synchronous_exports + @exec_exports).uniq.each do |e| tasks[e] end
|
263
262
|
end
|
264
263
|
end
|
265
264
|
end
|
@@ -94,7 +94,7 @@ job = wf.job(task, jobname, job_inputs)
|
|
94
94
|
STDOUT.write res.to_json
|
95
95
|
EOF
|
96
96
|
|
97
|
-
json =
|
97
|
+
json = SSHLine.ruby(server, script)
|
98
98
|
Log.debug "JSON (#{ url }): #{json}" if RBBT_DEBUG_REMOTE_JSON
|
99
99
|
JSON.parse(json)
|
100
100
|
end
|
@@ -107,7 +107,7 @@ STDOUT.write res.to_json
|
|
107
107
|
STDOUT.write res
|
108
108
|
EOF
|
109
109
|
|
110
|
-
|
110
|
+
SSHLine.ruby(server, script)
|
111
111
|
end
|
112
112
|
|
113
113
|
def self.post_job(url, inputs_id, jobname = nil)
|
@@ -119,7 +119,7 @@ STDOUT.write res
|
|
119
119
|
job.init_info
|
120
120
|
STDOUT.write job.path
|
121
121
|
EOF
|
122
|
-
|
122
|
+
SSHLine.ruby(server, script)
|
123
123
|
end
|
124
124
|
|
125
125
|
def self.run_job(url, input_id, jobname = nil)
|
@@ -133,7 +133,7 @@ job.clean if job.error? and job.recoverable_error?
|
|
133
133
|
job.run unless job.done? || job.error?
|
134
134
|
STDOUT.write job.path
|
135
135
|
EOF
|
136
|
-
|
136
|
+
SSHLine.ruby(server, script)
|
137
137
|
end
|
138
138
|
|
139
139
|
def self.run_batch_job(url, input_id, jobname = nil, batch_options = {})
|
@@ -149,7 +149,7 @@ job.clean if job.error? and job.recoverable_error?
|
|
149
149
|
HPC::BATCH_MODULE.run_job(job, batch_options) unless job.done? || job.error?
|
150
150
|
STDOUT.write job.path
|
151
151
|
EOF
|
152
|
-
|
152
|
+
SSHLine.ruby(server, script)
|
153
153
|
end
|
154
154
|
|
155
155
|
def self.orchestrate_batch_job(url, input_id, jobname = nil, batch_options = {})
|
@@ -165,7 +165,7 @@ job.clean if job.error? and job.recoverable_error?
|
|
165
165
|
HPC::BATCH_MODULE.orchestrate_job(job, batch_options) unless job.done? || job.error?
|
166
166
|
STDOUT.write job.path
|
167
167
|
EOF
|
168
|
-
|
168
|
+
SSHLine.ruby(server, script)
|
169
169
|
end
|
170
170
|
|
171
171
|
def self.clean(url, input_id, jobname = nil)
|
@@ -175,7 +175,7 @@ STDOUT.write job.path
|
|
175
175
|
script +=<<-EOF
|
176
176
|
job.clean
|
177
177
|
EOF
|
178
|
-
|
178
|
+
SSHLine.ruby(server, script)
|
179
179
|
end
|
180
180
|
|
181
181
|
def self.upload_inputs(server, inputs, input_types, input_id)
|
@@ -234,7 +234,7 @@ job.clean
|
|
234
234
|
all_deps.each do |dep,jobs|
|
235
235
|
next if dep.done?
|
236
236
|
next if job_list.include?(dep)
|
237
|
-
Log.medium "Producing #{dep.workflow}:#{dep.short_path} dependency for #{
|
237
|
+
Log.medium "Producing #{dep.workflow}:#{dep.short_path} dependency for #{Log.fingerprint jobs}"
|
238
238
|
dep.produce
|
239
239
|
missing_deps << dep
|
240
240
|
end if produce_dependencies
|
@@ -243,7 +243,7 @@ job.clean
|
|
243
243
|
|
244
244
|
#migrate_dependencies = all_deps.keys.collect{|d| [d] + d.rec_dependencies + d.input_dependencies }.flatten.select{|d| d.done? }.collect{|d| d.path }
|
245
245
|
migrate_dependencies = all_deps.keys.collect{|d| [d] + d.input_dependencies }.flatten.select{|d| d.done? }.collect{|d| d.path }
|
246
|
-
Log.low "Migrating #{migrate_dependencies.length} dependencies from #{
|
246
|
+
Log.low "Migrating #{migrate_dependencies.length} dependencies from #{Log.fingerprint job_list} to #{ server }"
|
247
247
|
Step.migrate(migrate_dependencies, search_path, :target => server) if migrate_dependencies.any?
|
248
248
|
end
|
249
249
|
|
@@ -287,7 +287,7 @@ job.clean
|
|
287
287
|
|
288
288
|
if options[:migrate]
|
289
289
|
rjob.produce
|
290
|
-
Step.migrate(
|
290
|
+
Step.migrate(Path.identify(job.path), 'user', :source => server)
|
291
291
|
end
|
292
292
|
|
293
293
|
rjob
|
@@ -337,7 +337,7 @@ job.clean
|
|
337
337
|
if options[:migrate]
|
338
338
|
rjobs_job.each do |rjob,job|
|
339
339
|
rjob.produce
|
340
|
-
Step.migrate(
|
340
|
+
Step.migrate(Path.identify(job.path), 'user', :source => server)
|
341
341
|
end
|
342
342
|
end
|
343
343
|
|
@@ -5,7 +5,8 @@ class RemoteWorkflow
|
|
5
5
|
|
6
6
|
attr_accessor :url, :name, :exec_exports, :synchronous_exports, :asynchronous_exports, :stream_exports
|
7
7
|
|
8
|
-
def initialize(url, name)
|
8
|
+
def initialize(url, name = nil)
|
9
|
+
name = File.basename(url) if name.nil?
|
9
10
|
Log.debug{ "Loading remote workflow #{ name }: #{ url }" }
|
10
11
|
@url, @name = url, name
|
11
12
|
|
data/lib/rbbt-util.rb
CHANGED
@@ -20,7 +20,7 @@ require_relative 'rbbt/tsv'
|
|
20
20
|
require_relative 'rbbt/workflow'
|
21
21
|
|
22
22
|
Persist.cache_dir = Rbbt.var.cache.persistence
|
23
|
-
FileCache.cachedir = Rbbt.var.cache.filecache.find
|
24
|
-
TmpFile.tmpdir = Rbbt.tmp.find
|
23
|
+
FileCache.cachedir = Rbbt.var.cache.filecache.find
|
24
|
+
TmpFile.tmpdir = Rbbt.tmp.find
|
25
25
|
Resource.default_resource = Rbbt
|
26
26
|
|
data/lib/rbbt.rb
CHANGED
@@ -3,7 +3,7 @@ $LOAD_PATH.unshift File.join(__dir__, '../../lib')
|
|
3
3
|
|
4
4
|
require 'scout/path'
|
5
5
|
require 'scout/resource'
|
6
|
-
Path.add_path :
|
6
|
+
Path.add_path :rbbt_util_lib, File.join(Path.caller_lib_dir(__FILE__), "{TOPLEVEL}/{SUBPATH}")
|
7
7
|
module Rbbt
|
8
8
|
extend Resource
|
9
9
|
|
data/python/rbbt/'/Users/miki/config/tmp/undodir'/%Users%miki%git%rbbt-util%python%rbbt%__init__.py
ADDED
Binary file
|
data/python/rbbt/__init__.py
CHANGED
@@ -1,22 +1,29 @@
|
|
1
|
-
import warnings
|
2
1
|
import sys
|
3
2
|
import os
|
4
3
|
import subprocess
|
4
|
+
import tempfile
|
5
|
+
import shutil
|
6
|
+
import pandas
|
7
|
+
import numpy
|
5
8
|
|
6
|
-
|
9
|
+
|
10
|
+
def cmd(cmd=None):
|
7
11
|
if cmd is None:
|
8
12
|
print("Rbbt")
|
9
13
|
else:
|
10
14
|
return subprocess.run('rbbt_exec.rb', input=cmd.encode('utf-8'), capture_output=True).stdout.decode()
|
11
15
|
|
16
|
+
|
12
17
|
def libdir():
|
13
|
-
return
|
18
|
+
return cmd('puts Rbbt.find(:lib)').rstrip()
|
19
|
+
|
14
20
|
|
15
21
|
def add_libdir():
|
16
22
|
pythondir = os.path.join(libdir(), 'python')
|
17
23
|
sys.path.insert(0, pythondir)
|
18
24
|
|
19
|
-
|
25
|
+
|
26
|
+
def path(subdir=None, base_dir=None):
|
20
27
|
from pathlib import Path
|
21
28
|
import os
|
22
29
|
|
@@ -144,4 +151,71 @@ def save_tsv(filename, df, key=None):
|
|
144
151
|
key = "#" + key
|
145
152
|
df.to_csv(filename, sep="\t", index_label=key)
|
146
153
|
|
154
|
+
def save_job_inputs(data):
|
155
|
+
temp_dir = tempfile.mkdtemp() # Create a temporary directory
|
156
|
+
|
157
|
+
for name, value in data.items():
|
158
|
+
file_path = os.path.join(temp_dir, name)
|
159
|
+
|
160
|
+
if isinstance(value, str):
|
161
|
+
file_path += ".txt"
|
162
|
+
with open(file_path, "w") as f:
|
163
|
+
f.write(value)
|
164
|
+
|
165
|
+
elif isinstance(value, (bool)):
|
166
|
+
with open(file_path, "w") as f:
|
167
|
+
if value:
|
168
|
+
f.write('true')
|
169
|
+
else:
|
170
|
+
f.write('false')
|
171
|
+
|
172
|
+
elif isinstance(value, (int, float)):
|
173
|
+
with open(file_path, "w") as f:
|
174
|
+
f.write(str(value))
|
175
|
+
|
176
|
+
elif isinstance(value, pandas.DataFrame):
|
177
|
+
file_path += ".tsv"
|
178
|
+
save_tsv(file_path, value)
|
147
179
|
|
180
|
+
elif isinstance(value, numpy.ndarray) or isinstance(value, list):
|
181
|
+
file_path += ".list"
|
182
|
+
with open(file_path, "w") as f:
|
183
|
+
f.write("\n".join(value))
|
184
|
+
|
185
|
+
else:
|
186
|
+
raise TypeError(f"Unsupported data type for argument '{name}': {type(value)}")
|
187
|
+
|
188
|
+
return temp_dir
|
189
|
+
|
190
|
+
|
191
|
+
def run_job(workflow, task, name='Default', fork=False, clean=False, **kwargs):
|
192
|
+
inputs_dir = save_job_inputs(kwargs)
|
193
|
+
cmd = ['rbbt', 'workflow', 'task', workflow, task, '--jobname', name, '--load_inputs', inputs_dir, '--nocolor']
|
194
|
+
|
195
|
+
if fork:
|
196
|
+
cmd.append('--fork')
|
197
|
+
cmd.append('--detach')
|
198
|
+
|
199
|
+
if clean:
|
200
|
+
if clean == 'recursive':
|
201
|
+
cmd.append('--recursive_clean')
|
202
|
+
else:
|
203
|
+
cmd.append('--clean')
|
204
|
+
|
205
|
+
proc = subprocess.run(
|
206
|
+
cmd,
|
207
|
+
capture_output=True, # Capture both stdout and stderr
|
208
|
+
text=True # Automatically decode outputs to strings
|
209
|
+
)
|
210
|
+
shutil.rmtree(inputs_dir)
|
211
|
+
if proc.returncode != 0:
|
212
|
+
output = proc.stderr.strip()
|
213
|
+
if output == '' :
|
214
|
+
output = proc.stdout.strip()
|
215
|
+
raise RuntimeError(output) # Raise error with cleaned stderr content
|
216
|
+
return proc.stdout.strip()
|
217
|
+
|
218
|
+
if __name__ == "__main__":
|
219
|
+
import json
|
220
|
+
res = run_job('Baking', 'bake_muffin_tray', 'test', add_blueberries=True, fork=True)
|
221
|
+
print(res)
|
@@ -0,0 +1,104 @@
|
|
1
|
+
import requests
|
2
|
+
import logging
|
3
|
+
import json
|
4
|
+
from urllib.parse import urlencode, urljoin
|
5
|
+
from time import sleep
|
6
|
+
import itertools
|
7
|
+
|
8
|
+
def request_post(url, params):
|
9
|
+
response = requests.post(url, params)
|
10
|
+
return response
|
11
|
+
|
12
|
+
def request_get(url, params):
|
13
|
+
query = urlencode(params)
|
14
|
+
full_url = f"{url}?{query}"
|
15
|
+
response = requests.get(full_url)
|
16
|
+
return response
|
17
|
+
|
18
|
+
def get_json(url, params={}):
|
19
|
+
params['_format'] = 'json'
|
20
|
+
response = request_get(url, params)
|
21
|
+
if response.status_code == 200:
|
22
|
+
return json.loads(response.content) # parse the JSON content from the response
|
23
|
+
else:
|
24
|
+
logging.error("Failed to initialize remote tasks")
|
25
|
+
|
26
|
+
def get_raw(url, params={}):
|
27
|
+
params['_format'] = 'raw'
|
28
|
+
response = request_get(url, params)
|
29
|
+
if response.status_code == 200:
|
30
|
+
return response.content # parse the JSON content from the response
|
31
|
+
else:
|
32
|
+
logging.error("Failed to initialize remote tasks")
|
33
|
+
|
34
|
+
def join(url, *subpaths):
|
35
|
+
return url + "/" + "/".join(subpaths)
|
36
|
+
|
37
|
+
class RemoteStep:
|
38
|
+
def __init__(self, url):
|
39
|
+
self.url = url
|
40
|
+
|
41
|
+
def info(self):
|
42
|
+
return get_json(join(self.url, 'info'))
|
43
|
+
def status(self):
|
44
|
+
return self.info()['status']
|
45
|
+
|
46
|
+
def done(self):
|
47
|
+
return self.status() == 'done'
|
48
|
+
|
49
|
+
def error(self):
|
50
|
+
return self.status() == 'error' or self.status() == 'aborted'
|
51
|
+
|
52
|
+
def running(self):
|
53
|
+
return not (self.done() or self.error())
|
54
|
+
|
55
|
+
def wait(self, time=1):
|
56
|
+
while self.running():
|
57
|
+
sleep(time)
|
58
|
+
|
59
|
+
|
60
|
+
def raw(self):
|
61
|
+
return get_raw(self.url)
|
62
|
+
|
63
|
+
def json(self):
|
64
|
+
return get_json(self.url)
|
65
|
+
|
66
|
+
class RemoteWorkflow:
|
67
|
+
def __init__(self, url):
|
68
|
+
self.url = url
|
69
|
+
self.task_exports = {}
|
70
|
+
self.init_remote_tasks()
|
71
|
+
|
72
|
+
def init_remote_tasks(self):
|
73
|
+
self.task_exports = get_json(self.url)
|
74
|
+
self.tasks = []
|
75
|
+
self.tasks += self.task_exports['asynchronous']
|
76
|
+
self.tasks += self.task_exports['synchronous']
|
77
|
+
self.tasks += self.task_exports['exec']
|
78
|
+
|
79
|
+
def task_info(self, name):
|
80
|
+
return get_json(join(self.url, name, '/info'))
|
81
|
+
|
82
|
+
def job(self, task, **kwargs):
|
83
|
+
kwargs['_format'] = 'jobname'
|
84
|
+
response = request_post(join(self.url, task), kwargs)
|
85
|
+
if response.status_code == 200:
|
86
|
+
jobname = response.content.decode('utf-8')
|
87
|
+
step_url = join(self.url, task, jobname)
|
88
|
+
print(step_url)
|
89
|
+
return RemoteStep(step_url)
|
90
|
+
else:
|
91
|
+
logging.error("Failed to initialize remote tasks")
|
92
|
+
|
93
|
+
|
94
|
+
if __name__ == "__main__":
|
95
|
+
wf = RemoteWorkflow('http://localhost:1900/Baking')
|
96
|
+
print(wf.tasks)
|
97
|
+
print(wf.task_info('bake_muffin_tray'))
|
98
|
+
|
99
|
+
step = wf.job('bake_muffin_tray', add_blueberries=True)
|
100
|
+
step.wait()
|
101
|
+
print(step.json())
|
102
|
+
|
103
|
+
|
104
|
+
|
@@ -0,0 +1,64 @@
|
|
1
|
+
from . import cmd, run_job
|
2
|
+
import subprocess
|
3
|
+
import json
|
4
|
+
import time
|
5
|
+
|
6
|
+
def save_inputs(directory, inputs, types):
|
7
|
+
return
|
8
|
+
|
9
|
+
class Workflow:
|
10
|
+
def __init__(self, name):
|
11
|
+
self.name = name
|
12
|
+
|
13
|
+
def tasks(self):
|
14
|
+
ruby=f'Workflow.require_workflow("{self.name}").tasks.keys * "\n"'
|
15
|
+
return cmd(ruby).strip().split("\n")
|
16
|
+
|
17
|
+
def task_info(self, name):
|
18
|
+
ruby=f'Workflow.require_workflow("{self.name}").task_info("{name}").to_json'
|
19
|
+
return cmd(ruby)
|
20
|
+
|
21
|
+
def run(self, task, **kwargs):
|
22
|
+
return run_job(self.name, task, **kwargs)
|
23
|
+
|
24
|
+
def fork(self, task, **kwargs):
|
25
|
+
path = run_job(self.name, task, fork=True, **kwargs)
|
26
|
+
return Step(path)
|
27
|
+
|
28
|
+
class Step:
|
29
|
+
def __init__(self, path):
|
30
|
+
self.path = path
|
31
|
+
self.info_content = None
|
32
|
+
|
33
|
+
def info(self):
|
34
|
+
if self.info_content:
|
35
|
+
return self.info_content
|
36
|
+
ruby=f'puts Step.load("{self.path}").info.to_json'
|
37
|
+
txt = cmd(ruby)
|
38
|
+
info_content = json.loads(txt)
|
39
|
+
status = info_content["status"]
|
40
|
+
if status == "done" or status == "error" or status == "aborted":
|
41
|
+
self.info_content = info_content
|
42
|
+
return info_content
|
43
|
+
|
44
|
+
def status(self):
|
45
|
+
return self.info()["status"]
|
46
|
+
|
47
|
+
def done(self):
|
48
|
+
return self.status() == 'done'
|
49
|
+
|
50
|
+
def error(self):
|
51
|
+
return self.status() == 'error'
|
52
|
+
|
53
|
+
def aborted(self):
|
54
|
+
return self.status() == 'aborted'
|
55
|
+
|
56
|
+
def join(self):
|
57
|
+
while not (self.done() or self.error() or self.aborted()):
|
58
|
+
time.sleep(1)
|
59
|
+
|
60
|
+
def load(self):
|
61
|
+
ruby=f'puts Step.load("{self.path}").load.to_json'
|
62
|
+
txt = cmd(ruby)
|
63
|
+
return json.loads(txt)
|
64
|
+
|
data/python/test.py
ADDED
@@ -338,7 +338,7 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
|
|
338
338
|
step_path = step_line.split(": ").last.strip
|
339
339
|
step = Step.new step_path
|
340
340
|
has_bar = false
|
341
|
-
(step.rec_dependencies + [step]).reverse.each do |j|
|
341
|
+
(step.rec_dependencies + [step]).to_a.reverse.each do |j|
|
342
342
|
next if j.done?
|
343
343
|
if j.file(:progress).exists?
|
344
344
|
bar = Log::ProgressBar.new
|
@@ -338,7 +338,7 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
|
|
338
338
|
step_path = step_line.split(": ").last.strip
|
339
339
|
step = Step.new step_path
|
340
340
|
has_bar = false
|
341
|
-
(step.rec_dependencies + [step]).reverse.each do |j|
|
341
|
+
(step.rec_dependencies + [step]).to_a.reverse.each do |j|
|
342
342
|
next if j.done?
|
343
343
|
if j.file(:progress).exists?
|
344
344
|
bar = Log::ProgressBar.new
|
@@ -338,7 +338,7 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
|
|
338
338
|
step_path = step_line.split(": ").last.strip
|
339
339
|
step = Step.new step_path
|
340
340
|
has_bar = false
|
341
|
-
(step.rec_dependencies + [step]).reverse.each do |j|
|
341
|
+
(step.rec_dependencies + [step]).to_a.reverse.each do |j|
|
342
342
|
next if j.done?
|
343
343
|
if j.file(:progress).exists?
|
344
344
|
bar = Log::ProgressBar.new
|
@@ -74,7 +74,7 @@ end if resource
|
|
74
74
|
|
75
75
|
path = (resource || Rbbt)[path]
|
76
76
|
|
77
|
-
if where.nil? || where == 'all' || path.
|
77
|
+
if where.nil? || where == 'all' || path.map_maps.include?(where.to_sym)
|
78
78
|
location = path.find(where)
|
79
79
|
|
80
80
|
if Array === location
|
@@ -338,7 +338,7 @@ workdir.glob("**/command.batch").sort_by{|f| File.mtime(f)}.each do |fcmd|
|
|
338
338
|
step_path = step_line.split(": ").last.strip
|
339
339
|
step = Step.new step_path
|
340
340
|
has_bar = false
|
341
|
-
(step.rec_dependencies + [step]).reverse.each do |j|
|
341
|
+
(step.rec_dependencies + [step]).to_a.reverse.each do |j|
|
342
342
|
next if j.done?
|
343
343
|
if j.file(:progress).exists?
|
344
344
|
bar = Log::ProgressBar.new
|
@@ -36,7 +36,7 @@ inputs = (options[:inputs] || "").split(",")
|
|
36
36
|
info_fields = (options[:info_fields] || "").split(",")
|
37
37
|
|
38
38
|
def pid_msg(pid)
|
39
|
-
color = if pid and Misc.
|
39
|
+
color = if pid and Misc.pid_alive? pid
|
40
40
|
:green
|
41
41
|
else
|
42
42
|
:red
|
@@ -211,7 +211,7 @@ workflows.sort.each do |workflow,tasks|
|
|
211
211
|
status = :missing if status == :done and not (Open.exist?(file) && ! Open.broken_link?(file))
|
212
212
|
status = :broken if Open.broken_link?(file)
|
213
213
|
status = status.to_s
|
214
|
-
if status != "done" and pid and not Misc.
|
214
|
+
if status != "done" and pid and not Misc.pid_alive?(pid)
|
215
215
|
if File.exist? file
|
216
216
|
status << Log.color(:red, " (out of sync)")
|
217
217
|
else
|