rbbt-util 6.0.3 → 6.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e256d45aa4a4f9262d4d9a918f0336bc8b9af78de07af1b43cd9f4f9963c180b
4
- data.tar.gz: 93edc17e9ef31a721d303ca9282b185f4035839795ff730b3d2592eb16657001
3
+ metadata.gz: b6312e564dc119b942bb279017a12bd3571f27b1caa57db746c62996e8c73407
4
+ data.tar.gz: f08802c255d5967bf211bf801fbeb7f91e9d2a82804f51393061ce422e7fc26e
5
5
  SHA512:
6
- metadata.gz: b174c5a4f056d11b6ea62550a997cd6f55265bd036c310d4dab02e09a970f552a45073cc099326548004e0fc155eb305d3f5248acd3c7c4b4dfc9b879fc17c5b
7
- data.tar.gz: c9df520b857e38361df51aa0c4867e98abe3a52421a1cca8861f175b498c65172ddee62d60b3d174c56d5b614109263954810dace7877c867217ef98c7716cf2
6
+ metadata.gz: 2f74e126c73d121853002d2918ef5cb8c9b181c7c2264d7eb7c9f4a4d78441fc6b026f1b27eca445613717fff6a8f0169f9aeb86d33afeafe2eebed696685cc3
7
+ data.tar.gz: 93ddc50c5c0f5b45e79449f81788405fada4ad9aabaeac1f6fd908765184a79220e2960472a460a973afedd31ae2ad389c60d67a80e4b82ae63ac70748bf8a25
data/LICENSE CHANGED
@@ -1,4 +1,4 @@
1
- Copyright (c) 2010-2022 Miguel Vázquez García
1
+ Copyright (c) 2010-2024 Miguel Vázquez García
2
2
 
3
3
  Permission is hereby granted, free of charge, to any person obtaining
4
4
  a copy of this software and associated documentation files (the
data/bin/rbbt_exec.rb CHANGED
@@ -30,7 +30,7 @@ data = data * "\n" if Array === data
30
30
 
31
31
  case
32
32
  when (output.nil? or output == '-')
33
- puts data
33
+ STDOUT.write data
34
34
  when output == "file"
35
35
  if Misc.filename? data
36
36
  tmpfile = data
@@ -39,7 +39,7 @@ when output == "file"
39
39
  Open.write(tmpfile, data.to_s)
40
40
  end
41
41
 
42
- puts tmpfile
42
+ STDOUT.puts tmpfile
43
43
  else
44
44
  Open.write(output, data.to_s)
45
45
  end
@@ -1,5 +1,5 @@
1
- require_relative '../refactor'
2
1
  Rbbt.require_instead 'scout/tsv'
2
+ require_relative '../refactor'
3
3
  require 'spreadsheet'
4
4
  require 'rubyXL'
5
5
 
@@ -12,7 +12,7 @@ module FileCache
12
12
 
13
13
  def self.cachedir=(cachedir)
14
14
  CACHEDIR.replace cachedir
15
- FileUtils.mkdir_p CACHEDIR unless File.exist? CACHEDIR
15
+ Open.mkdir CACHEDIR unless Open.exist? CACHEDIR
16
16
  end
17
17
 
18
18
  def self.cachedir
@@ -6,7 +6,7 @@ module Rbbt
6
6
  require 'rbbt-util'
7
7
  path = "#{path}"
8
8
  if Open.exists?(path)
9
- path = #{resource.to_s}.identify(path)
9
+ path = Path.setup(#{resource.to_s}.identify(path))
10
10
  else
11
11
  path = Path.setup(path)
12
12
  end
@@ -1,90 +1,92 @@
1
- module Misc
2
-
3
- def self.hostname
4
- @hostanem ||= `hostname`.strip
5
- end
6
-
7
- def self.pid_exists?(pid)
8
- return false if pid.nil?
9
- begin
10
- Process.getpgid(pid.to_i)
11
- true
12
- rescue Errno::ESRCH
13
- false
14
- end
15
- end
16
-
17
- def self.env_add(var, value, sep = ":", prepend = true)
18
- ENV[var] ||= ""
19
- return if ENV[var] =~ /(#{sep}|^)#{Regexp.quote value}(#{sep}|$)/
20
- if prepend
21
- ENV[var] = value + sep + ENV[var]
22
- else
23
- ENV[var] += sep + ENV[var]
24
- end
25
- end
26
-
27
- def self.with_env(var, value, &block)
28
- var = var.to_s
29
- value = value.to_s
30
- current = ENV[var]
31
- begin
32
- ENV[var] = value
33
- yield
34
- ensure
35
- ENV[var] = current
36
- end
37
- end
38
-
39
- def self.common_path(dir, file)
40
- file = File.expand_path file
41
- dir = File.expand_path dir
42
-
43
- return true if file == dir
44
- while File.dirname(file) != file
45
- file = File.dirname(file)
46
- return true if file == dir
47
- end
48
-
49
- return false
50
- end
51
-
52
-
53
- def self.relative_link(source, target_dir)
54
- path = "."
55
- current = target_dir
56
- while ! Misc.common_path current, source
57
- current = File.dirname(current)
58
- path = File.join(path, '..')
59
- return nil if current == "/"
60
- end
61
-
62
- File.join(path, Misc.path_relative_to(current, source))
63
- end
64
-
65
- # WARN: probably not thread safe...
66
- def self.in_dir(dir)
67
- old_pwd = FileUtils.pwd
68
- res = nil
69
- begin
70
- FileUtils.mkdir_p dir unless File.exist?(dir)
71
- FileUtils.cd dir
72
- res = yield
73
- ensure
74
- FileUtils.cd old_pwd
75
- end
76
- res
77
- end
78
-
79
- def self.is_filename?(string, need_to_exists = true)
80
- return false if string.nil?
81
- return true if defined? Path and Path === string
82
- return true if string.respond_to? :exists
83
- return true if String === string and ! string.include?("\n") and string.split("/").select{|p| p.length > 265}.empty? and (! need_to_exists || File.exist?(string))
84
- return false
85
- end
86
-
87
- class << self
88
- alias filename? is_filename?
89
- end
90
- end
1
+ require_relative 'refactor'
2
+ Rbbt.require_instead 'scout/misc/format'
3
+ #module Misc
4
+ #
5
+ # def self.hostname
6
+ # @hostanem ||= `hostname`.strip
7
+ # end
8
+ #
9
+ # def self.pid_exists?(pid)
10
+ # return false if pid.nil?
11
+ # begin
12
+ # Process.getpgid(pid.to_i)
13
+ # true
14
+ # rescue Errno::ESRCH
15
+ # false
16
+ # end
17
+ # end
18
+ #
19
+ # def self.env_add(var, value, sep = ":", prepend = true)
20
+ # ENV[var] ||= ""
21
+ # return if ENV[var] =~ /(#{sep}|^)#{Regexp.quote value}(#{sep}|$)/
22
+ # if prepend
23
+ # ENV[var] = value + sep + ENV[var]
24
+ # else
25
+ # ENV[var] += sep + ENV[var]
26
+ # end
27
+ # end
28
+ #
29
+ # def self.with_env(var, value, &block)
30
+ # var = var.to_s
31
+ # value = value.to_s
32
+ # current = ENV[var]
33
+ # begin
34
+ # ENV[var] = value
35
+ # yield
36
+ # ensure
37
+ # ENV[var] = current
38
+ # end
39
+ # end
40
+ #
41
+ # def self.common_path(dir, file)
42
+ # file = File.expand_path file
43
+ # dir = File.expand_path dir
44
+ #
45
+ # return true if file == dir
46
+ # while File.dirname(file) != file
47
+ # file = File.dirname(file)
48
+ # return true if file == dir
49
+ # end
50
+ #
51
+ # return false
52
+ # end
53
+ #
54
+ #
55
+ # def self.relative_link(source, target_dir)
56
+ # path = "."
57
+ # current = target_dir
58
+ # while ! Misc.common_path current, source
59
+ # current = File.dirname(current)
60
+ # path = File.join(path, '..')
61
+ # return nil if current == "/"
62
+ # end
63
+ #
64
+ # File.join(path, Misc.path_relative_to(current, source))
65
+ # end
66
+ #
67
+ # # WARN: probably not thread safe...
68
+ # def self.in_dir(dir)
69
+ # old_pwd = FileUtils.pwd
70
+ # res = nil
71
+ # begin
72
+ # FileUtils.mkdir_p dir unless File.exist?(dir)
73
+ # FileUtils.cd dir
74
+ # res = yield
75
+ # ensure
76
+ # FileUtils.cd old_pwd
77
+ # end
78
+ # res
79
+ # end
80
+ #
81
+ # def self.is_filename?(string, need_to_exists = true)
82
+ # return false if string.nil?
83
+ # return true if defined? Path and Path === string
84
+ # return true if string.respond_to? :exists
85
+ # return true if String === string and ! string.include?("\n") and string.split("/").select{|p| p.length > 265}.empty? and (! need_to_exists || File.exist?(string))
86
+ # return false
87
+ # end
88
+ #
89
+ # class << self
90
+ # alias filename? is_filename?
91
+ # end
92
+ #end
@@ -1,66 +1,66 @@
1
- module Workflow
2
-
3
- annotation :asynchronous_exports, :synchronous_exports, :exec_exports, :stream_exports
4
-
5
- def asynchronous_exports
6
- @asynchronous_exports ||= []
7
- end
8
-
9
- def synchronous_exports
10
- @synchronous_exports ||= []
11
- end
12
-
13
- def exec_exports
14
- @exec_exports ||= []
15
- end
16
-
17
- def stream_exports
18
- @exec_exports ||= []
19
- end
20
-
21
-
22
- def all_exports
23
- asynchronous_exports + synchronous_exports + exec_exports + stream_exports
24
- end
25
-
26
- alias task_exports all_exports
27
-
28
- def unexport(*names)
29
- names = names.collect{|n| n.to_s} + names.collect{|n| n.to_sym}
30
- names.uniq!
31
- exec_exports.replace exec_exports - names if exec_exports
32
- synchronous_exports.replace synchronous_exports - names if synchronous_exports
33
- asynchronous_exports.replace asynchronous_exports - names if asynchronous_exports
34
- stream_exports.replace stream_exports - names if stream_exports
35
- end
36
-
37
- def export_exec(*names)
38
- unexport *names
39
- exec_exports.concat names
40
- exec_exports.uniq!
41
- exec_exports
42
- end
43
-
44
- def export_synchronous(*names)
45
- unexport *names
46
- synchronous_exports.concat names
47
- synchronous_exports.uniq!
48
- synchronous_exports
49
- end
50
-
51
- def export_asynchronous(*names)
52
- unexport *names
53
- asynchronous_exports.concat names
54
- asynchronous_exports.uniq!
55
- asynchronous_exports
56
- end
57
-
58
- def export_stream(*names)
59
- unexport *names
60
- stream_exports.concat names
61
- stream_exports.uniq!
62
- stream_exports
63
- end
64
-
65
- alias export export_asynchronous
66
- end
1
+ #module Workflow
2
+ #
3
+ # annotation :asynchronous_exports, :synchronous_exports, :exec_exports, :stream_exports
4
+ #
5
+ # def asynchronous_exports
6
+ # @asynchronous_exports ||= []
7
+ # end
8
+ #
9
+ # def synchronous_exports
10
+ # @synchronous_exports ||= []
11
+ # end
12
+ #
13
+ # def exec_exports
14
+ # @exec_exports ||= []
15
+ # end
16
+ #
17
+ # def stream_exports
18
+ # @exec_exports ||= []
19
+ # end
20
+ #
21
+ #
22
+ # def all_exports
23
+ # asynchronous_exports + synchronous_exports + exec_exports + stream_exports
24
+ # end
25
+ #
26
+ # alias task_exports all_exports
27
+ #
28
+ # def unexport(*names)
29
+ # names = names.collect{|n| n.to_s} + names.collect{|n| n.to_sym}
30
+ # names.uniq!
31
+ # exec_exports.replace exec_exports - names if exec_exports
32
+ # synchronous_exports.replace synchronous_exports - names if synchronous_exports
33
+ # asynchronous_exports.replace asynchronous_exports - names if asynchronous_exports
34
+ # stream_exports.replace stream_exports - names if stream_exports
35
+ # end
36
+ #
37
+ # def export_exec(*names)
38
+ # unexport *names
39
+ # exec_exports.concat names
40
+ # exec_exports.uniq!
41
+ # exec_exports
42
+ # end
43
+ #
44
+ # def export_synchronous(*names)
45
+ # unexport *names
46
+ # synchronous_exports.concat names
47
+ # synchronous_exports.uniq!
48
+ # synchronous_exports
49
+ # end
50
+ #
51
+ # def export_asynchronous(*names)
52
+ # unexport *names
53
+ # asynchronous_exports.concat names
54
+ # asynchronous_exports.uniq!
55
+ # asynchronous_exports
56
+ # end
57
+ #
58
+ # def export_stream(*names)
59
+ # unexport *names
60
+ # stream_exports.concat names
61
+ # stream_exports.uniq!
62
+ # stream_exports
63
+ # end
64
+ #
65
+ # alias export export_asynchronous
66
+ #end
@@ -1,64 +1,64 @@
1
- module Workflow
2
- def rec_inputs(task_name)
3
- tasks[task_name].recursive_inputs.collect{|name, _| name }
4
- end
5
-
6
- def rec_input_types(task_name)
7
- tasks[task_name].recursive_inputs.inject({}) do |acc,l|
8
- name, type, desc, default, options = l
9
- acc.merge!(name => type) unless acc.include?(name)
10
- acc
11
- end
12
- end
13
-
14
-
15
- def rec_input_descriptions(task_name)
16
- tasks[task_name].recursive_inputs.inject({}) do |acc,l|
17
- name, type, desc, default, options = l
18
- acc.merge!(name => desc) unless desc.nil? || acc.include?(name)
19
- acc
20
- end
21
- end
22
-
23
- def rec_input_defaults(task_name)
24
- tasks[task_name].recursive_inputs.inject({}) do |acc,l|
25
- name, type, desc, default, options = l
26
- acc.merge!(name => default) unless default.nil? || acc.include?(name)
27
- acc
28
- end
29
- end
30
-
31
- def rec_input_options(task_name)
32
- tasks[task_name].recursive_inputs.inject({}) do |acc,l|
33
- name, type, desc, default, options = l
34
- acc.merge!(name => options) unless options.nil? unless acc.include?(name)
35
- acc
36
- end
37
- end
38
-
39
-
40
- def rec_input_use(task_name)
41
- input_use = {}
42
- task = self.tasks[task_name]
43
- task.inputs.each do |name,_|
44
- input_use[name] ||= {}
45
- input_use[name][self] ||= []
46
- input_use[name][self] << task_name
47
- end
48
-
49
- task.deps.inject(input_use) do |acc,p|
50
- workflow, task_name = p
51
- next if task_name.nil?
52
- workflow.rec_input_use(task_name).each do |name,uses|
53
- acc[name] ||= {}
54
- uses.each do |workflow, task_names|
55
- acc[name][workflow] ||= []
56
- acc[name][workflow].concat(task_names)
57
- end
58
- end
59
- acc
60
- end if task.deps
61
-
62
- input_use
63
- end
64
- end
1
+ #module Workflow
2
+ # def rec_inputs(task_name)
3
+ # tasks[task_name].recursive_inputs.collect{|name, _| name }
4
+ # end
5
+ #
6
+ # def rec_input_types(task_name)
7
+ # tasks[task_name].recursive_inputs.inject({}) do |acc,l|
8
+ # name, type, desc, default, options = l
9
+ # acc.merge!(name => type) unless acc.include?(name)
10
+ # acc
11
+ # end
12
+ # end
13
+ #
14
+ #
15
+ # def rec_input_descriptions(task_name)
16
+ # tasks[task_name].recursive_inputs.inject({}) do |acc,l|
17
+ # name, type, desc, default, options = l
18
+ # acc.merge!(name => desc) unless desc.nil? || acc.include?(name)
19
+ # acc
20
+ # end
21
+ # end
22
+ #
23
+ # def rec_input_defaults(task_name)
24
+ # tasks[task_name].recursive_inputs.inject({}) do |acc,l|
25
+ # name, type, desc, default, options = l
26
+ # acc.merge!(name => default) unless default.nil? || acc.include?(name)
27
+ # acc
28
+ # end
29
+ # end
30
+ #
31
+ # def rec_input_options(task_name)
32
+ # tasks[task_name].recursive_inputs.inject({}) do |acc,l|
33
+ # name, type, desc, default, options = l
34
+ # acc.merge!(name => options) unless options.nil? unless acc.include?(name)
35
+ # acc
36
+ # end
37
+ # end
38
+ #
39
+ #
40
+ # def rec_input_use(task_name)
41
+ # input_use = {}
42
+ # task = self.tasks[task_name]
43
+ # task.inputs.each do |name,_|
44
+ # input_use[name] ||= {}
45
+ # input_use[name][self] ||= []
46
+ # input_use[name][self] << task_name
47
+ # end
48
+ #
49
+ # task.deps.inject(input_use) do |acc,p|
50
+ # workflow, task_name = p
51
+ # next if task_name.nil?
52
+ # workflow.rec_input_use(task_name).each do |name,uses|
53
+ # acc[name] ||= {}
54
+ # uses.each do |workflow, task_names|
55
+ # acc[name][workflow] ||= []
56
+ # acc[name][workflow].concat(task_names)
57
+ # end
58
+ # end
59
+ # acc
60
+ # end if task.deps
61
+ #
62
+ # input_use
63
+ # end
64
+ #end
@@ -1,65 +1,66 @@
1
- require_relative 'export'
2
- require_relative 'recursive'
3
- module Workflow
4
- def task_info(name)
5
- name = name.to_sym
6
- task = tasks[name]
7
- raise "No '#{name}' task in '#{self.to_s}' Workflow" if task.nil?
8
- id = File.join(self.to_s, name.to_s)
9
- @task_info ||= {}
10
- @task_info[id] ||= begin
11
- description = task.description
12
- result_description = task.result_description
13
- result_type = task.result_type
14
-
15
- inputs = rec_inputs(name).uniq
16
- input_types = rec_input_types(name)
17
- input_descriptions = rec_input_descriptions(name)
18
- input_use = rec_input_use(name)
19
- input_defaults = rec_input_defaults(name)
20
- input_options = rec_input_options(name)
21
- extension = task.extension
22
- export = case
23
- when (synchronous_exports.include?(name.to_sym) or synchronous_exports.include?(name.to_s))
24
- :synchronous
25
- when (asynchronous_exports.include?(name.to_sym) or asynchronous_exports.include?(name.to_s))
26
- :asynchronous
27
- when (exec_exports.include?(name.to_sym) or exec_exports.include?(name.to_s))
28
- :exec
29
- when (stream_exports.include?(name.to_sym) or stream_exports.include?(name.to_s))
30
- :stream
31
- else
32
- :none
33
- end
34
-
35
- dependencies = tasks[name].deps
36
- { :id => id,
37
- :description => description,
38
- :export => export,
39
- :inputs => inputs,
40
- :input_types => input_types,
41
- :input_descriptions => input_descriptions,
42
- :input_defaults => input_defaults,
43
- :input_options => input_options,
44
- :input_use => input_use,
45
- :result_type => result_type,
46
- :result_description => result_description,
47
- :dependencies => dependencies,
48
- :extension => extension
49
- }
50
- end
51
- end
52
- end
53
-
54
- module Task
55
- def result_description
56
- ""
57
- end
58
-
59
- def result_type
60
- @returns
61
- end
62
-
63
- end
64
-
65
-
1
+ #require_relative 'export'
2
+ #require_relative 'recursive'
3
+ #module Workflow
4
+ # def task_info(name)
5
+ # name = name.to_sym
6
+ # task = tasks[name]
7
+ # raise "No '#{name}' task in '#{self.to_s}' Workflow" if task.nil?
8
+ # id = File.join(self.to_s, name.to_s)
9
+ # @task_info ||= {}
10
+ # @task_info[id] ||= begin
11
+ # description = task.description
12
+ # result_description = task.result_description
13
+ # returns = task.returns
14
+ #
15
+ # inputs = rec_inputs(name).uniq
16
+ # input_types = rec_input_types(name)
17
+ # input_descriptions = rec_input_descriptions(name)
18
+ # input_use = rec_input_use(name)
19
+ # input_defaults = rec_input_defaults(name)
20
+ # input_options = rec_input_options(name)
21
+ # extension = task.extension
22
+ # export = case
23
+ # when (synchronous_exports.include?(name.to_sym) or synchronous_exports.include?(name.to_s))
24
+ # :synchronous
25
+ # when (asynchronous_exports.include?(name.to_sym) or asynchronous_exports.include?(name.to_s))
26
+ # :asynchronous
27
+ # when (exec_exports.include?(name.to_sym) or exec_exports.include?(name.to_s))
28
+ # :exec
29
+ # when (stream_exports.include?(name.to_sym) or stream_exports.include?(name.to_s))
30
+ # :stream
31
+ # else
32
+ # :none
33
+ # end
34
+ #
35
+ # dependencies = tasks[name].deps
36
+ # { :id => id,
37
+ # :description => description,
38
+ # :export => export,
39
+ # :inputs => inputs,
40
+ # :input_types => input_types,
41
+ # :input_descriptions => input_descriptions,
42
+ # :input_defaults => input_defaults,
43
+ # :input_options => input_options,
44
+ # :input_use => input_use,
45
+ # :returns => returns,
46
+ # #:result_type => result_type,
47
+ # #:result_description => result_description,
48
+ # :dependencies => dependencies,
49
+ # :extension => extension
50
+ # }
51
+ # end
52
+ # end
53
+ #end
54
+ #
55
+ #module Task
56
+ # def result_description
57
+ # ""
58
+ # end
59
+ #
60
+ # def result_type
61
+ # @returns
62
+ # end
63
+ #
64
+ #end
65
+ #
66
+ #
@@ -1,6 +1,3 @@
1
- require_relative 'refactor/export'
2
- require_relative 'refactor/recursive'
3
- require_relative 'refactor/task_info'
4
1
  require_relative 'refactor/inputs'
5
2
  require_relative 'refactor/entity'
6
3
 
@@ -146,7 +146,6 @@ class RemoteWorkflow
146
146
  task_info = RemoteWorkflow::REST.get_json(File.join(url, task.to_s, 'info'))
147
147
  task_info = RemoteWorkflow.fix_hash(task_info)
148
148
 
149
- task_info[:result_type] = task_info[:result_type].to_sym
150
149
  task_info[:export] = task_info[:export].to_sym
151
150
  task_info[:input_types] = RemoteWorkflow.fix_hash(task_info[:input_types], true)
152
151
  task_info[:inputs] = task_info[:inputs].collect{|input| input.to_sym }
@@ -248,7 +247,6 @@ class RemoteWorkflow
248
247
  end
249
248
  end
250
249
 
251
-
252
250
  def task_info(task)
253
251
  RemoteWorkflow::REST.task_info(url, task)
254
252
  end
@@ -260,6 +258,7 @@ class RemoteWorkflow
260
258
  @exec_exports = (task_exports["exec"] || []).collect{|task| task.to_sym }
261
259
  @stream_exports = (task_exports["stream"] || []).collect{|task| task.to_sym }
262
260
  @can_stream = task_exports["can_stream"]
261
+ (@asynchronous_exports + @synchronous_exports + @exec_exports).uniq.each do |e| tasks[e] end
263
262
  end
264
263
  end
265
264
  end
data/lib/rbbt-util.rb CHANGED
@@ -20,7 +20,7 @@ require_relative 'rbbt/tsv'
20
20
  require_relative 'rbbt/workflow'
21
21
 
22
22
  Persist.cache_dir = Rbbt.var.cache.persistence
23
- FileCache.cachedir = Rbbt.var.cache.filecache.find :user
24
- TmpFile.tmpdir = Rbbt.tmp.find :user
23
+ FileCache.cachedir = Rbbt.var.cache.filecache.find
24
+ TmpFile.tmpdir = Rbbt.tmp.find
25
25
  Resource.default_resource = Rbbt
26
26
 
@@ -1,22 +1,29 @@
1
- import warnings
2
1
  import sys
3
2
  import os
4
3
  import subprocess
4
+ import tempfile
5
+ import shutil
6
+ import pandas
7
+ import numpy
5
8
 
6
- def cmd(cmd = None):
9
+
10
+ def cmd(cmd=None):
7
11
  if cmd is None:
8
12
  print("Rbbt")
9
13
  else:
10
14
  return subprocess.run('rbbt_exec.rb', input=cmd.encode('utf-8'), capture_output=True).stdout.decode()
11
15
 
16
+
12
17
  def libdir():
13
- return rbbt('puts Rbbt.find(:lib)').rstrip()
18
+ return cmd('puts Rbbt.find(:lib)').rstrip()
19
+
14
20
 
15
21
  def add_libdir():
16
22
  pythondir = os.path.join(libdir(), 'python')
17
23
  sys.path.insert(0, pythondir)
18
24
 
19
- def path(subdir = None, base_dir = None):
25
+
26
+ def path(subdir=None, base_dir=None):
20
27
  from pathlib import Path
21
28
  import os
22
29
 
@@ -144,4 +151,71 @@ def save_tsv(filename, df, key=None):
144
151
  key = "#" + key
145
152
  df.to_csv(filename, sep="\t", index_label=key)
146
153
 
154
+ def save_job_inputs(data):
155
+ temp_dir = tempfile.mkdtemp() # Create a temporary directory
156
+
157
+ for name, value in data.items():
158
+ file_path = os.path.join(temp_dir, name)
159
+
160
+ if isinstance(value, str):
161
+ file_path += ".txt"
162
+ with open(file_path, "w") as f:
163
+ f.write(value)
164
+
165
+ elif isinstance(value, (bool)):
166
+ with open(file_path, "w") as f:
167
+ if value:
168
+ f.write('true')
169
+ else:
170
+ f.write('false')
171
+
172
+ elif isinstance(value, (int, float)):
173
+ with open(file_path, "w") as f:
174
+ f.write(str(value))
175
+
176
+ elif isinstance(value, pandas.DataFrame):
177
+ file_path += ".tsv"
178
+ save_tsv(file_path, value)
147
179
 
180
+ elif isinstance(value, numpy.ndarray) or isinstance(value, list):
181
+ file_path += ".list"
182
+ with open(file_path, "w") as f:
183
+ f.write("\n".join(value))
184
+
185
+ else:
186
+ raise TypeError(f"Unsupported data type for argument '{name}': {type(value)}")
187
+
188
+ return temp_dir
189
+
190
+
191
+ def run_job(workflow, task, name='Default', fork=False, clean=False, **kwargs):
192
+ inputs_dir = save_job_inputs(kwargs)
193
+ cmd = ['rbbt', 'workflow', 'task', workflow, task, '--jobname', name, '--load_inputs', inputs_dir, '--nocolor']
194
+
195
+ if fork:
196
+ cmd.append('--fork')
197
+ cmd.append('--detach')
198
+
199
+ if clean:
200
+ if clean == 'recursive':
201
+ cmd.append('--recursive_clean')
202
+ else:
203
+ cmd.append('--clean')
204
+
205
+ proc = subprocess.run(
206
+ cmd,
207
+ capture_output=True, # Capture both stdout and stderr
208
+ text=True # Automatically decode outputs to strings
209
+ )
210
+ shutil.rmtree(inputs_dir)
211
+ if proc.returncode != 0:
212
+ output = proc.stderr.strip()
213
+ if output == '' :
214
+ output = proc.stdout.strip()
215
+ raise RuntimeError(output) # Raise error with cleaned stderr content
216
+ return proc.stdout.strip()
217
+
218
+ if __name__ == "__main__":
219
+ import json
220
+ res = run_job('Baking', 'bake_muffin_tray', 'test', add_blueberries=True, fork=True)
221
+ print(res)
@@ -0,0 +1,104 @@
1
+ import requests
2
+ import logging
3
+ import json
4
+ from urllib.parse import urlencode, urljoin
5
+ from time import sleep
6
+ import itertools
7
+
8
+ def request_post(url, params):
9
+ response = requests.post(url, params)
10
+ return response
11
+
12
+ def request_get(url, params):
13
+ query = urlencode(params)
14
+ full_url = f"{url}?{query}"
15
+ response = requests.get(full_url)
16
+ return response
17
+
18
+ def get_json(url, params={}):
19
+ params['_format'] = 'json'
20
+ response = request_get(url, params)
21
+ if response.status_code == 200:
22
+ return json.loads(response.content) # parse the JSON content from the response
23
+ else:
24
+ logging.error("Failed to initialize remote tasks")
25
+
26
+ def get_raw(url, params={}):
27
+ params['_format'] = 'raw'
28
+ response = request_get(url, params)
29
+ if response.status_code == 200:
30
+ return response.content # parse the JSON content from the response
31
+ else:
32
+ logging.error("Failed to initialize remote tasks")
33
+
34
+ def join(url, *subpaths):
35
+ return url + "/" + "/".join(subpaths)
36
+
37
+ class RemoteStep:
38
+ def __init__(self, url):
39
+ self.url = url
40
+
41
+ def info(self):
42
+ return get_json(join(self.url, 'info'))
43
+ def status(self):
44
+ return self.info()['status']
45
+
46
+ def done(self):
47
+ return self.status() == 'done'
48
+
49
+ def error(self):
50
+ return self.status() == 'error' or self.status() == 'aborted'
51
+
52
+ def running(self):
53
+ return not (self.done() or self.error())
54
+
55
+ def wait(self, time=1):
56
+ while self.running():
57
+ sleep(time)
58
+
59
+
60
+ def raw(self):
61
+ return get_raw(self.url)
62
+
63
+ def json(self):
64
+ return get_json(self.url)
65
+
66
+ class RemoteWorkflow:
67
+ def __init__(self, url):
68
+ self.url = url
69
+ self.task_exports = {}
70
+ self.init_remote_tasks()
71
+
72
+ def init_remote_tasks(self):
73
+ self.task_exports = get_json(self.url)
74
+ self.tasks = []
75
+ self.tasks += self.task_exports['asynchronous']
76
+ self.tasks += self.task_exports['synchronous']
77
+ self.tasks += self.task_exports['exec']
78
+
79
+ def task_info(self, name):
80
+ return get_json(join(self.url, name, '/info'))
81
+
82
+ def job(self, task, **kwargs):
83
+ kwargs['_format'] = 'jobname'
84
+ response = request_post(join(self.url, task), kwargs)
85
+ if response.status_code == 200:
86
+ jobname = response.content.decode('utf-8')
87
+ step_url = join(self.url, task, jobname)
88
+ print(step_url)
89
+ return RemoteStep(step_url)
90
+ else:
91
+ logging.error("Failed to initialize remote tasks")
92
+
93
+
94
+ if __name__ == "__main__":
95
+ wf = RemoteWorkflow('http://localhost:1900/Baking')
96
+ print(wf.tasks)
97
+ print(wf.task_info('bake_muffin_tray'))
98
+
99
+ step = wf.job('bake_muffin_tray', add_blueberries=True)
100
+ step.wait()
101
+ print(step.json())
102
+
103
+
104
+
@@ -0,0 +1,64 @@
1
+ from . import cmd, run_job
2
+ import subprocess
3
+ import json
4
+ import time
5
+
6
+ def save_inputs(directory, inputs, types):
7
+ return
8
+
9
+ class Workflow:
10
+ def __init__(self, name):
11
+ self.name = name
12
+
13
+ def tasks(self):
14
+ ruby=f'Workflow.require_workflow("{self.name}").tasks.keys * "\n"'
15
+ return cmd(ruby).strip().split("\n")
16
+
17
+ def task_info(self, name):
18
+ ruby=f'Workflow.require_workflow("{self.name}").task_info("{name}").to_json'
19
+ return cmd(ruby)
20
+
21
+ def run(self, task, **kwargs):
22
+ return run_job(self.name, task, **kwargs)
23
+
24
+ def fork(self, task, **kwargs):
25
+ path = run_job(self.name, task, fork=True, **kwargs)
26
+ return Step(path)
27
+
28
+ class Step:
29
+ def __init__(self, path):
30
+ self.path = path
31
+ self.info_content = None
32
+
33
+ def info(self):
34
+ if self.info_content:
35
+ return self.info_content
36
+ ruby=f'puts Step.load("{self.path}").info.to_json'
37
+ txt = cmd(ruby)
38
+ info_content = json.loads(txt)
39
+ status = info_content["status"]
40
+ if status == "done" or status == "error" or status == "aborted":
41
+ self.info_content = info_content
42
+ return info_content
43
+
44
+ def status(self):
45
+ return self.info()["status"]
46
+
47
+ def done(self):
48
+ return self.status() == 'done'
49
+
50
+ def error(self):
51
+ return self.status() == 'error'
52
+
53
+ def aborted(self):
54
+ return self.status() == 'aborted'
55
+
56
+ def join(self):
57
+ while not (self.done() or self.error() or self.aborted()):
58
+ time.sleep(1)
59
+
60
+ def load(self):
61
+ ruby=f'puts Step.load("{self.path}").load.to_json'
62
+ txt = cmd(ruby)
63
+ return json.loads(txt)
64
+
data/python/test.py ADDED
@@ -0,0 +1,10 @@
1
+ if __name__ == "__main__":
2
+ import rbbt
3
+ import rbbt.workflow
4
+ wf = rbbt.workflow.Workflow('Baking')
5
+ step = wf.fork('bake_muffin_tray', add_blueberries=True, clean='recursive')
6
+ step.join()
7
+ print(step.load())
8
+
9
+
10
+
@@ -0,0 +1,43 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'rbbt/workflow'
4
+
5
+ require 'rbbt-util'
6
+ require 'rbbt-util'
7
+ require 'rbbt/util/simpleopt'
8
+
9
+ $0 = "rbbt #{$previous_commands*""} #{ File.basename(__FILE__) }" if $previous_commands
10
+
11
+ options = SOPT.setup <<EOF
12
+ Retry a failed job
13
+
14
+ $ rbbt workflow retry <job-path>
15
+
16
+ Does not retry if job is done unless clean is specified
17
+
18
+ -h--help Help
19
+ -cl--clean Clean the job
20
+ -rcl--recursive_clean Recursively clean the job
21
+ EOF
22
+
23
+ SOPT.usage if options[:help]
24
+
25
+ file = ARGV.shift
26
+ def get_step(file)
27
+ file = file.sub(/\.(info|files)/,'')
28
+ step = Workflow.load_step file
29
+ step
30
+ end
31
+
32
+ step = get_step(file)
33
+ inputs = step.recursive_inputs
34
+
35
+ step.clean if options[:clean]
36
+ step.recursive_clean if options[:recursive_clean]
37
+
38
+ if ! step.done?
39
+ wf = Workflow.require_workflow step.workflow
40
+ job = wf.job(step.task_name, step.clean_name, inputs.to_hash)
41
+ job.run
42
+ end
43
+
@@ -140,7 +140,7 @@ TmpFile.with_file do |app_dir|
140
140
 
141
141
  begin
142
142
  Rack::Server.start(options)
143
- rescue LoadError
143
+ rescue LoadError, Exception
144
144
  require 'rackup'
145
145
 
146
146
  begin
@@ -535,12 +535,14 @@ begin
535
535
  end
536
536
  end
537
537
 
538
- if do_fork
538
+ if do_fork || detach
539
539
  ENV["SCOUT_NO_PROGRESS"] = "true"
540
540
  if detach
541
541
  job.fork
542
542
  Process.detach job.pid if job.pid
543
- puts Log.color(:magenta, "Issued: ") + Log.color(:magenta, job.pid ? job.pid.to_s : 'no pid') + ' -- ' + job.path
543
+ Log.info(Log.color(:magenta, "Issued: ") + Log.color(:magenta, job.pid ? job.pid.to_s : 'no pid') + ' -- ' + job.path)
544
+ puts job.path
545
+
544
546
  exit 0
545
547
  end
546
548
 
metadata CHANGED
@@ -1,14 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rbbt-util
3
3
  version: !ruby/object:Gem::Version
4
- version: 6.0.3
4
+ version: 6.0.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Miguel Vazquez
8
- autorequire:
9
8
  bindir: bin
10
9
  cert_chain: []
11
- date: 2025-02-13 00:00:00.000000000 Z
10
+ date: 2025-03-31 00:00:00.000000000 Z
12
11
  dependencies:
13
12
  - !ruby/object:Gem::Dependency
14
13
  name: scout-gear
@@ -376,6 +375,9 @@ files:
376
375
  - lib/rbbt/workflow/util/provenance.rb
377
376
  - lib/rbbt/workflow/util/trace.rb
378
377
  - python/rbbt/__init__.py
378
+ - python/rbbt/workflow.py
379
+ - python/rbbt/workflow/remote.py
380
+ - python/test.py
379
381
  - share/Rlib/plot.R
380
382
  - share/Rlib/svg.R
381
383
  - share/Rlib/util.R
@@ -488,6 +490,7 @@ files:
488
490
  - share/rbbt_commands/workflow/remote/add
489
491
  - share/rbbt_commands/workflow/remote/list
490
492
  - share/rbbt_commands/workflow/remote/remove
493
+ - share/rbbt_commands/workflow/retry
491
494
  - share/rbbt_commands/workflow/server
492
495
  - share/rbbt_commands/workflow/task
493
496
  - share/rbbt_commands/workflow/trace
@@ -500,7 +503,6 @@ homepage: http://github.com/mikisvaz/rbbt-util
500
503
  licenses:
501
504
  - MIT
502
505
  metadata: {}
503
- post_install_message:
504
506
  rdoc_options: []
505
507
  require_paths:
506
508
  - lib
@@ -515,8 +517,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
515
517
  - !ruby/object:Gem::Version
516
518
  version: '0'
517
519
  requirements: []
518
- rubygems_version: 3.5.23
519
- signing_key:
520
+ rubygems_version: 3.6.5
520
521
  specification_version: 4
521
522
  summary: Utilities for the Ruby Bioinformatics Toolkit (rbbt)
522
523
  test_files: []