scout-rig 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.document +5 -0
- data/.vimproject +30 -0
- data/LICENSE.txt +20 -0
- data/README.rdoc +18 -0
- data/Rakefile +50 -0
- data/VERSION +1 -0
- data/lib/scout/python/paths.rb +27 -0
- data/lib/scout/python/run.rb +122 -0
- data/lib/scout/python/script.rb +110 -0
- data/lib/scout/python/util.rb +52 -0
- data/lib/scout/python.rb +140 -0
- data/lib/scout-rig.rb +5 -0
- data/python/scout/__init__.py +221 -0
- data/python/scout/__pycache__/__init__.cpython-310.pyc +0 -0
- data/python/scout/__pycache__/workflow.cpython-310.pyc +0 -0
- data/python/scout/workflow/remote.py +103 -0
- data/python/scout/workflow.py +64 -0
- data/python/test.py +12 -0
- data/scout-rig.gemspec +56 -0
- data/test/scout/python/test_script.rb +61 -0
- data/test/scout/python/test_util.rb +25 -0
- data/test/scout/test_python.rb +158 -0
- data/test/test_helper.rb +5 -0
- metadata +92 -0
@@ -0,0 +1,221 @@
|
|
1
|
+
import sys
|
2
|
+
import os
|
3
|
+
import subprocess
|
4
|
+
import tempfile
|
5
|
+
import shutil
|
6
|
+
import pandas
|
7
|
+
import numpy
|
8
|
+
|
9
|
+
|
10
|
+
def cmd(cmd=None):
|
11
|
+
if cmd is None:
|
12
|
+
print("Rbbt")
|
13
|
+
else:
|
14
|
+
return subprocess.run('rbbt_exec.rb', input=cmd.encode('utf-8'), capture_output=True).stdout.decode()
|
15
|
+
|
16
|
+
|
17
|
+
def libdir():
|
18
|
+
return cmd('puts Rbbt.find(:lib)').rstrip()
|
19
|
+
|
20
|
+
|
21
|
+
def add_libdir():
|
22
|
+
pythondir = os.path.join(libdir(), 'python')
|
23
|
+
sys.path.insert(0, pythondir)
|
24
|
+
|
25
|
+
|
26
|
+
def path(subdir=None, base_dir=None):
|
27
|
+
from pathlib import Path
|
28
|
+
import os
|
29
|
+
|
30
|
+
if (base_dir == 'base'):
|
31
|
+
base_dir = os.path.join(Path.home(), ".rbbt")
|
32
|
+
elif (base_dir == 'lib'):
|
33
|
+
base_dir = libdir()
|
34
|
+
else:
|
35
|
+
for base_dir in ('lib', 'base'):
|
36
|
+
file = path(subdir, base_dir)
|
37
|
+
if os.path.exists(file):
|
38
|
+
return file
|
39
|
+
return path(subdir, 'base')
|
40
|
+
|
41
|
+
if (subdir == None):
|
42
|
+
return base_dir
|
43
|
+
else:
|
44
|
+
return os.path.join(base_dir, subdir)
|
45
|
+
|
46
|
+
def read(subdir, base_dir = None, encoding='utf-8'):
|
47
|
+
file = path(subdir, base_dir)
|
48
|
+
with open(file, encoding=encoding) as f:
|
49
|
+
return f.read()
|
50
|
+
|
51
|
+
def inspect(obj):
|
52
|
+
print(dir(obj))
|
53
|
+
|
54
|
+
def rich(obj):
|
55
|
+
import rich
|
56
|
+
rich.inspect(obj)
|
57
|
+
|
58
|
+
def log_tsv(tsv):
|
59
|
+
print(tsv)
|
60
|
+
print(tsv.keys())
|
61
|
+
|
62
|
+
def benchmark():
|
63
|
+
import time
|
64
|
+
tic: float = time.perf_counter()
|
65
|
+
try:
|
66
|
+
yield
|
67
|
+
finally:
|
68
|
+
toc: float = time.perf_counter()
|
69
|
+
print(f"Computation time = {1000*(toc - tic):.3f}ms")
|
70
|
+
|
71
|
+
def tsv_preamble(line, comment_char="#"):
|
72
|
+
import re
|
73
|
+
header = dict()
|
74
|
+
entries = re.sub(f"^{comment_char}:", '', line)
|
75
|
+
entries = re.sub(f"^{comment_char}:", '', line).split("#")
|
76
|
+
for entry in entries:
|
77
|
+
entry = entry.strip()
|
78
|
+
key, value = entry.split("=")
|
79
|
+
key = re.sub("^:","",key)
|
80
|
+
value = re.sub("^:","",value)
|
81
|
+
header[key] = value
|
82
|
+
|
83
|
+
return header
|
84
|
+
|
85
|
+
|
86
|
+
def tsv_header(filename, sep="\t", comment_char="#", encoding='utf8'):
|
87
|
+
import re
|
88
|
+
|
89
|
+
f = open(filename, encoding=encoding)
|
90
|
+
line = f.readline().strip()
|
91
|
+
|
92
|
+
if (not line.startswith(comment_char)):
|
93
|
+
header = {"fields":None, "type":"list", "start": 0}
|
94
|
+
else:
|
95
|
+
header = dict()
|
96
|
+
start = 0
|
97
|
+
if (line.startswith(f"{comment_char}:")):
|
98
|
+
header["preamble"]=tsv_preamble(line, comment_char)
|
99
|
+
if ("type" in header["preamble"]):
|
100
|
+
header["type"] = header["preamble"]["type"]
|
101
|
+
line = f.readline().strip()
|
102
|
+
start = 1
|
103
|
+
|
104
|
+
if (line.startswith(comment_char)):
|
105
|
+
header["all_fields"] = re.sub(f"^{comment_char}", "", line).split(sep)
|
106
|
+
header["key_field"] = header["all_fields"][0]
|
107
|
+
header["fields"] = header["all_fields"][1:]
|
108
|
+
|
109
|
+
header["start"] = start
|
110
|
+
|
111
|
+
f.close()
|
112
|
+
return header
|
113
|
+
|
114
|
+
|
115
|
+
def tsv_pandas(filename, sep="\t", comment_char="#", index_col=0, **kwargs):
|
116
|
+
import pandas
|
117
|
+
|
118
|
+
if (comment_char == ""):
|
119
|
+
tsv = pandas.read_table(filename, sep=sep, index_col=index_col, **kwargs)
|
120
|
+
else:
|
121
|
+
header = tsv_header(filename, sep=sep, comment_char="#")
|
122
|
+
|
123
|
+
if ("type" in header and header["type"] == "flat"):
|
124
|
+
if ("sep" in header):
|
125
|
+
sep=header["sep"]
|
126
|
+
|
127
|
+
tsv = pandas.read_table(filename, sep=sep, index_col=index_col, header=None, skiprows=[0,1], **kwargs)
|
128
|
+
|
129
|
+
if ("key_field" in header):
|
130
|
+
tsv.index.name = header["key_field"]
|
131
|
+
else:
|
132
|
+
if ("sep" in header):
|
133
|
+
sep=header["sep"]
|
134
|
+
|
135
|
+
tsv = pandas.read_table(filename, sep=sep, index_col=index_col, header=header["start"], **kwargs)
|
136
|
+
|
137
|
+
if ("fields" in header):
|
138
|
+
tsv.columns = header["fields"]
|
139
|
+
tsv.index.name = header["key_field"]
|
140
|
+
|
141
|
+
return tsv
|
142
|
+
|
143
|
+
def tsv(*args, **kwargs):
|
144
|
+
return tsv_pandas(*args, **kwargs)
|
145
|
+
|
146
|
+
def save_tsv(filename, df, key=None):
|
147
|
+
if (key == None):
|
148
|
+
key = df.index.name
|
149
|
+
if (key == None):
|
150
|
+
key = "Key"
|
151
|
+
key = "#" + key
|
152
|
+
df.to_csv(filename, sep="\t", index_label=key)
|
153
|
+
|
154
|
+
def save_job_inputs(data):
|
155
|
+
temp_dir = tempfile.mkdtemp() # Create a temporary directory
|
156
|
+
|
157
|
+
for name, value in data.items():
|
158
|
+
file_path = os.path.join(temp_dir, name)
|
159
|
+
|
160
|
+
if isinstance(value, str):
|
161
|
+
file_path += ".txt"
|
162
|
+
with open(file_path, "w") as f:
|
163
|
+
f.write(value)
|
164
|
+
|
165
|
+
elif isinstance(value, (bool)):
|
166
|
+
with open(file_path, "w") as f:
|
167
|
+
if value:
|
168
|
+
f.write('true')
|
169
|
+
else:
|
170
|
+
f.write('false')
|
171
|
+
|
172
|
+
elif isinstance(value, (int, float)):
|
173
|
+
with open(file_path, "w") as f:
|
174
|
+
f.write(str(value))
|
175
|
+
|
176
|
+
elif isinstance(value, pandas.DataFrame):
|
177
|
+
file_path += ".tsv"
|
178
|
+
save_tsv(file_path, value)
|
179
|
+
|
180
|
+
elif isinstance(value, numpy.ndarray) or isinstance(value, list):
|
181
|
+
file_path += ".list"
|
182
|
+
with open(file_path, "w") as f:
|
183
|
+
f.write("\n".join(value))
|
184
|
+
|
185
|
+
else:
|
186
|
+
raise TypeError(f"Unsupported data type for argument '{name}': {type(value)}")
|
187
|
+
|
188
|
+
return temp_dir
|
189
|
+
|
190
|
+
|
191
|
+
def run_job(workflow, task, name='Default', fork=False, clean=False, **kwargs):
|
192
|
+
inputs_dir = save_job_inputs(kwargs)
|
193
|
+
cmd = ['rbbt', 'workflow', 'task', workflow, task, '--jobname', name, '--load_inputs', inputs_dir, '--nocolor']
|
194
|
+
|
195
|
+
if fork:
|
196
|
+
cmd.append('--fork')
|
197
|
+
cmd.append('--detach')
|
198
|
+
|
199
|
+
if clean:
|
200
|
+
if clean == 'recursive':
|
201
|
+
cmd.append('--recursive_clean')
|
202
|
+
else:
|
203
|
+
cmd.append('--clean')
|
204
|
+
|
205
|
+
proc = subprocess.run(
|
206
|
+
cmd,
|
207
|
+
capture_output=True, # Capture both stdout and stderr
|
208
|
+
text=True # Automatically decode outputs to strings
|
209
|
+
)
|
210
|
+
shutil.rmtree(inputs_dir)
|
211
|
+
if proc.returncode != 0:
|
212
|
+
output = proc.stderr.strip()
|
213
|
+
if output == '' :
|
214
|
+
output = proc.stdout.strip()
|
215
|
+
raise RuntimeError(output) # Raise error with cleaned stderr content
|
216
|
+
return proc.stdout.strip()
|
217
|
+
|
218
|
+
if __name__ == "__main__":
|
219
|
+
import json
|
220
|
+
res = run_job('Baking', 'bake_muffin_tray', 'test', add_blueberries=True, fork=True)
|
221
|
+
print(res)
|
Binary file
|
Binary file
|
@@ -0,0 +1,103 @@
|
|
1
|
+
import requests
|
2
|
+
import logging
|
3
|
+
import json
|
4
|
+
from urllib.parse import urlencode, urljoin
|
5
|
+
from time import sleep
|
6
|
+
import itertools
|
7
|
+
|
8
|
+
def request_post(url, params):
|
9
|
+
response = requests.post(url, params)
|
10
|
+
return response
|
11
|
+
|
12
|
+
def request_get(url, params):
|
13
|
+
query = urlencode(params)
|
14
|
+
full_url = f"{url}?{query}"
|
15
|
+
response = requests.get(full_url)
|
16
|
+
return response
|
17
|
+
|
18
|
+
def get_json(url, params={}):
|
19
|
+
params['_format'] = 'json'
|
20
|
+
response = request_get(url, params)
|
21
|
+
if response.status_code == 200:
|
22
|
+
return json.loads(response.content) # parse the JSON content from the response
|
23
|
+
else:
|
24
|
+
logging.error("Failed to initialize remote tasks")
|
25
|
+
|
26
|
+
def get_raw(url, params={}):
|
27
|
+
params['_format'] = 'raw'
|
28
|
+
response = request_get(url, params)
|
29
|
+
if response.status_code == 200:
|
30
|
+
return response.content # parse the JSON content from the response
|
31
|
+
else:
|
32
|
+
logging.error("Failed to initialize remote tasks")
|
33
|
+
|
34
|
+
def join(url, *subpaths):
|
35
|
+
return url + "/" + "/".join(subpaths)
|
36
|
+
|
37
|
+
class RemoteStep:
|
38
|
+
def __init__(self, url):
|
39
|
+
self.url = url
|
40
|
+
|
41
|
+
def info(self):
|
42
|
+
return get_json(join(self.url, 'info'))
|
43
|
+
def status(self):
|
44
|
+
return self.info()['status']
|
45
|
+
|
46
|
+
def done(self):
|
47
|
+
return self.status() == 'done'
|
48
|
+
|
49
|
+
def error(self):
|
50
|
+
return self.status() == 'error' or self.status() == 'aborted'
|
51
|
+
|
52
|
+
def running(self):
|
53
|
+
return not (self.done() or self.error())
|
54
|
+
|
55
|
+
def wait(self, time=1):
|
56
|
+
while self.running():
|
57
|
+
sleep(time)
|
58
|
+
|
59
|
+
|
60
|
+
def raw(self):
|
61
|
+
return get_raw(self.url)
|
62
|
+
|
63
|
+
def json(self):
|
64
|
+
return get_json(self.url)
|
65
|
+
|
66
|
+
class RemoteWorkflow:
|
67
|
+
def __init__(self, url):
|
68
|
+
self.url = url
|
69
|
+
self.task_exports = {}
|
70
|
+
self.init_remote_tasks()
|
71
|
+
|
72
|
+
def init_remote_tasks(self):
|
73
|
+
self.task_exports = get_json(self.url)
|
74
|
+
self.tasks = []
|
75
|
+
self.tasks += self.task_exports['asynchronous']
|
76
|
+
self.tasks += self.task_exports['synchronous']
|
77
|
+
self.tasks += self.task_exports['exec']
|
78
|
+
|
79
|
+
def task_info(self, name):
|
80
|
+
return get_json(join(self.url, name, '/info'))
|
81
|
+
|
82
|
+
def job(self, task, **kwargs):
|
83
|
+
kwargs['_format'] = 'jobname'
|
84
|
+
response = request_post(join(self.url, task), kwargs)
|
85
|
+
if response.status_code == 200:
|
86
|
+
jobname = response.content.decode('utf-8')
|
87
|
+
step_url = join(self.url, task, jobname)
|
88
|
+
return RemoteStep(step_url)
|
89
|
+
else:
|
90
|
+
logging.error("Failed to initialize remote tasks")
|
91
|
+
|
92
|
+
|
93
|
+
if __name__ == "__main__":
|
94
|
+
wf = RemoteWorkflow('http://localhost:1900/Baking')
|
95
|
+
print(wf.tasks)
|
96
|
+
print(wf.task_info('bake_muffin_tray'))
|
97
|
+
|
98
|
+
step = wf.job('bake_muffin_tray', add_blueberries=True)
|
99
|
+
step.wait()
|
100
|
+
print(step.json())
|
101
|
+
|
102
|
+
|
103
|
+
|
@@ -0,0 +1,64 @@
|
|
1
|
+
from . import cmd, run_job
|
2
|
+
import subprocess
|
3
|
+
import json
|
4
|
+
import time
|
5
|
+
|
6
|
+
def save_inputs(directory, inputs, types):
|
7
|
+
return
|
8
|
+
|
9
|
+
class Workflow:
|
10
|
+
def __init__(self, name):
|
11
|
+
self.name = name
|
12
|
+
|
13
|
+
def tasks(self):
|
14
|
+
ruby=f'Workflow.require_workflow("{self.name}").tasks.keys * "\n"'
|
15
|
+
return cmd(ruby).strip().split("\n")
|
16
|
+
|
17
|
+
def task_info(self, name):
|
18
|
+
ruby=f'Workflow.require_workflow("{self.name}").task_info("{name}").to_json'
|
19
|
+
return cmd(ruby)
|
20
|
+
|
21
|
+
def run(self, task, **kwargs):
|
22
|
+
return run_job(self.name, task, **kwargs)
|
23
|
+
|
24
|
+
def fork(self, task, **kwargs):
|
25
|
+
path = run_job(self.name, task, fork=True, **kwargs)
|
26
|
+
return Step(path)
|
27
|
+
|
28
|
+
class Step:
|
29
|
+
def __init__(self, path):
|
30
|
+
self.path = path
|
31
|
+
self.info_content = None
|
32
|
+
|
33
|
+
def info(self):
|
34
|
+
if self.info_content:
|
35
|
+
return self.info_content
|
36
|
+
ruby=f'puts Step.load("{self.path}").info.to_json'
|
37
|
+
txt = cmd(ruby)
|
38
|
+
info_content = json.loads(txt)
|
39
|
+
status = info_content["status"]
|
40
|
+
if status == "done" or status == "error" or status == "aborted":
|
41
|
+
self.info_content = info_content
|
42
|
+
return info_content
|
43
|
+
|
44
|
+
def status(self):
|
45
|
+
return self.info()["status"]
|
46
|
+
|
47
|
+
def done(self):
|
48
|
+
return self.status() == 'done'
|
49
|
+
|
50
|
+
def error(self):
|
51
|
+
return self.status() == 'error'
|
52
|
+
|
53
|
+
def aborted(self):
|
54
|
+
return self.status() == 'aborted'
|
55
|
+
|
56
|
+
def join(self):
|
57
|
+
while not (self.done() or self.error() or self.aborted()):
|
58
|
+
time.sleep(1)
|
59
|
+
|
60
|
+
def load(self):
|
61
|
+
ruby=f'puts Step.load("{self.path}").load.to_json'
|
62
|
+
txt = cmd(ruby)
|
63
|
+
return json.loads(txt)
|
64
|
+
|
data/python/test.py
ADDED
@@ -0,0 +1,12 @@
|
|
1
|
+
if __name__ == "__main__":
|
2
|
+
import sys
|
3
|
+
sys.path.append('python')
|
4
|
+
import scout
|
5
|
+
import scout.workflow
|
6
|
+
wf = scout.workflow.Workflow('Baking')
|
7
|
+
step = wf.fork('bake_muffin_tray', add_blueberries=True, clean='recursive')
|
8
|
+
step.join()
|
9
|
+
print(step.load())
|
10
|
+
|
11
|
+
|
12
|
+
|
data/scout-rig.gemspec
ADDED
@@ -0,0 +1,56 @@
|
|
1
|
+
# Generated by juwelier
|
2
|
+
# DO NOT EDIT THIS FILE DIRECTLY
|
3
|
+
# Instead, edit Juwelier::Tasks in Rakefile, and run 'rake gemspec'
|
4
|
+
# -*- encoding: utf-8 -*-
|
5
|
+
# stub: scout-rig 0.1.0 ruby lib
|
6
|
+
|
7
|
+
Gem::Specification.new do |s|
|
8
|
+
s.name = "scout-rig".freeze
|
9
|
+
s.version = "0.1.0".freeze
|
10
|
+
|
11
|
+
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
12
|
+
s.require_paths = ["lib".freeze]
|
13
|
+
s.authors = ["Miguel Vazquez".freeze]
|
14
|
+
s.date = "1980-01-02"
|
15
|
+
s.description = "Use other coding languages in your scout applications".freeze
|
16
|
+
s.email = "mikisvaz@gmail.com".freeze
|
17
|
+
s.extra_rdoc_files = [
|
18
|
+
"LICENSE.txt",
|
19
|
+
"README.rdoc"
|
20
|
+
]
|
21
|
+
s.files = [
|
22
|
+
".document",
|
23
|
+
".vimproject",
|
24
|
+
"LICENSE.txt",
|
25
|
+
"README.rdoc",
|
26
|
+
"Rakefile",
|
27
|
+
"VERSION",
|
28
|
+
"lib/scout-rig.rb",
|
29
|
+
"lib/scout/python.rb",
|
30
|
+
"lib/scout/python/paths.rb",
|
31
|
+
"lib/scout/python/run.rb",
|
32
|
+
"lib/scout/python/script.rb",
|
33
|
+
"lib/scout/python/util.rb",
|
34
|
+
"python/scout/__init__.py",
|
35
|
+
"python/scout/__pycache__/__init__.cpython-310.pyc",
|
36
|
+
"python/scout/__pycache__/workflow.cpython-310.pyc",
|
37
|
+
"python/scout/workflow.py",
|
38
|
+
"python/scout/workflow/remote.py",
|
39
|
+
"python/test.py",
|
40
|
+
"scout-rig.gemspec",
|
41
|
+
"test/scout/python/test_script.rb",
|
42
|
+
"test/scout/python/test_util.rb",
|
43
|
+
"test/scout/test_python.rb",
|
44
|
+
"test/test_helper.rb"
|
45
|
+
]
|
46
|
+
s.homepage = "http://github.com/mikisvaz/scout-rig".freeze
|
47
|
+
s.licenses = ["MIT".freeze]
|
48
|
+
s.rubygems_version = "3.6.8".freeze
|
49
|
+
s.summary = "Scouts rigging things together".freeze
|
50
|
+
|
51
|
+
s.specification_version = 4
|
52
|
+
|
53
|
+
s.add_development_dependency(%q<juwelier>.freeze, ["~> 2.1.0".freeze])
|
54
|
+
s.add_runtime_dependency(%q<pycall>.freeze, ["> 0".freeze])
|
55
|
+
end
|
56
|
+
|
@@ -0,0 +1,61 @@
|
|
1
|
+
require File.expand_path(__FILE__).sub(%r(/test/.*), '/test/test_helper.rb')
|
2
|
+
require File.expand_path(__FILE__).sub(%r(.*/test/), '').sub(/test_(.*)\.rb/,'\1')
|
3
|
+
|
4
|
+
require 'scout/tsv'
|
5
|
+
require 'scout/python/paths'
|
6
|
+
class TestPythonScript < Test::Unit::TestCase
|
7
|
+
def test_script
|
8
|
+
result = ScoutPython.script <<-EOF, value: 2
|
9
|
+
result = value * 3
|
10
|
+
EOF
|
11
|
+
assert_equal 6, result
|
12
|
+
end
|
13
|
+
|
14
|
+
def test_script_tsv
|
15
|
+
|
16
|
+
tsv = TSV.setup({}, "Key~ValueA,ValueB#:type=:list")
|
17
|
+
tsv["k1"] = ["a1", "b1"]
|
18
|
+
tsv["k2"] = ["a2", "b2"]
|
19
|
+
|
20
|
+
TmpFile.with_file(tsv.to_s) do |tsv_file|
|
21
|
+
TmpFile.with_file do |target|
|
22
|
+
result = ScoutPython.script <<-EOF, file: tsv_file, target: target
|
23
|
+
import scout
|
24
|
+
df = scout.tsv(file)
|
25
|
+
result = df.loc["k2", "ValueB"]
|
26
|
+
scout.save_tsv(target, df)
|
27
|
+
EOF
|
28
|
+
assert_equal "b2", result
|
29
|
+
|
30
|
+
assert_equal "b2", TSV.open(target, type: :list)["k2"]["ValueB"]
|
31
|
+
end
|
32
|
+
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def test_script_tsv_save
|
37
|
+
|
38
|
+
tsv = TSV.setup({}, "Key~ValueA,ValueB#:type=:list")
|
39
|
+
tsv["k1"] = ["a1", "b1"]
|
40
|
+
tsv["k2"] = ["a2", "b2"]
|
41
|
+
|
42
|
+
TmpFile.with_file do |target|
|
43
|
+
result = ScoutPython.script <<-EOF, df: tsv, target: target
|
44
|
+
result = df.loc["k2", "ValueB"]
|
45
|
+
scout.save_tsv(target, df)
|
46
|
+
EOF
|
47
|
+
assert_equal "b2", result
|
48
|
+
|
49
|
+
assert_equal "b2", TSV.open(target, type: :list)["k2"]["ValueB"]
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
def test_script_exception
|
54
|
+
assert_raises ConcurrentStreamProcessFailed do
|
55
|
+
result = ScoutPython.script <<-EOF
|
56
|
+
afsdfasdf
|
57
|
+
EOF
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
@@ -0,0 +1,25 @@
|
|
1
|
+
require File.expand_path(__FILE__).sub(%r(/test/.*), '/test/test_helper.rb')
|
2
|
+
require File.expand_path(__FILE__).sub(%r(.*/test/), '').sub(/test_(.*)\.rb/,'\1')
|
3
|
+
|
4
|
+
require 'scout/python'
|
5
|
+
class TestPythonUtil < Test::Unit::TestCase
|
6
|
+
|
7
|
+
def test_tuple
|
8
|
+
tsv = TSV.setup([], :key_field => "Key", :fields => %w(Value1 Value2), :type => :list)
|
9
|
+
tsv["k1"] = %w(V1_1 V2_1)
|
10
|
+
tsv["k2"] = %w(V1_2 V2_2)
|
11
|
+
df = ScoutPython.tsv2df(tsv)
|
12
|
+
new_tsv = ScoutPython.df2tsv(df)
|
13
|
+
assert_equal tsv, new_tsv
|
14
|
+
end
|
15
|
+
|
16
|
+
def test_numpy
|
17
|
+
ra = ScoutPython.run :numpy, :as => :np do
|
18
|
+
na = np.array([[[1,2,3], [4,5,6]]])
|
19
|
+
ScoutPython.numpy2ruby na
|
20
|
+
end
|
21
|
+
assert_equal 6, ra[0][1][2]
|
22
|
+
end
|
23
|
+
|
24
|
+
end
|
25
|
+
|
@@ -0,0 +1,158 @@
|
|
1
|
+
require File.expand_path(__FILE__).sub(%r(/test/.*), '/test/test_helper.rb')
|
2
|
+
require File.expand_path(__FILE__).sub(%r(.*/test/), '').sub(/test_(.*)\.rb/,'\1')
|
3
|
+
|
4
|
+
class TestPython < Test::Unit::TestCase
|
5
|
+
|
6
|
+
def _test_python
|
7
|
+
TmpFile.with_file do |tmpdir|
|
8
|
+
code =<<-EOF
|
9
|
+
def python_test(a, b):
|
10
|
+
c = a + b
|
11
|
+
return c
|
12
|
+
EOF
|
13
|
+
Open.write(File.join(tmpdir, 'file1.py'), code)
|
14
|
+
Open.write(File.join(tmpdir, 'file2.py'), code)
|
15
|
+
Open.write(File.join(tmpdir, 'file3.py'), code)
|
16
|
+
Open.write(File.join(tmpdir, 'file4.py'), code)
|
17
|
+
ScoutPython.add_path tmpdir
|
18
|
+
|
19
|
+
res = nil
|
20
|
+
|
21
|
+
ScoutPython.run 'file2', :python_test do
|
22
|
+
res = python_test(1, 3)
|
23
|
+
end
|
24
|
+
assert_equal 4, res
|
25
|
+
|
26
|
+
ScoutPython.run do
|
27
|
+
pyfrom :file3, :import => :python_test
|
28
|
+
res = python_test(1, 4)
|
29
|
+
end
|
30
|
+
assert_equal 5, res
|
31
|
+
|
32
|
+
ScoutPython.run do
|
33
|
+
pyimport :file4
|
34
|
+
res = file4.python_test(1, 4)
|
35
|
+
end
|
36
|
+
assert_equal 5, res
|
37
|
+
|
38
|
+
ScoutPython.run 'file1' do
|
39
|
+
res = file1.python_test(1, 2)
|
40
|
+
end
|
41
|
+
assert_equal 3, res
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
def _test_run_log
|
46
|
+
TmpFile.with_file do |tmpdir|
|
47
|
+
code =<<-EOF
|
48
|
+
import sys
|
49
|
+
def python_print():
|
50
|
+
print("Test STDERR", file=sys.stderr)
|
51
|
+
print("Test STDOUT")
|
52
|
+
EOF
|
53
|
+
Open.write(File.join(tmpdir, 'file_print.py'), code)
|
54
|
+
ScoutPython.add_path tmpdir
|
55
|
+
|
56
|
+
ScoutPython.run_log 'file_print' do
|
57
|
+
file_print.python_print
|
58
|
+
end
|
59
|
+
ScoutPython.run_log_stderr 'file_print' do
|
60
|
+
file_print.python_print
|
61
|
+
end
|
62
|
+
|
63
|
+
ScoutPython.run_log 'file_print' do
|
64
|
+
file_print.python_print
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
def _test_keras
|
70
|
+
keyword_test :tensorflow do
|
71
|
+
defined = ScoutPython.run do
|
72
|
+
pyimport "tensorflow.keras.models", as: :km
|
73
|
+
defined?(km.Sequential)
|
74
|
+
end
|
75
|
+
assert defined
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
def _test_keras_import
|
80
|
+
keyword_test :tensorflow do
|
81
|
+
defined = ScoutPython.run do
|
82
|
+
pyfrom "tensorflow.keras.models", import: :Sequential
|
83
|
+
defined?(self::Sequential)
|
84
|
+
end
|
85
|
+
assert defined
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
def _test_iterate
|
90
|
+
a2, b2 = nil, nil
|
91
|
+
ScoutPython.run :numpy, as: :np do
|
92
|
+
a = np.array([1,2])
|
93
|
+
a2 = ScoutPython.collect a do |e|
|
94
|
+
e * 2
|
95
|
+
end
|
96
|
+
b = PyCall.tuple([1,2])
|
97
|
+
b2 = ScoutPython.collect b do |e|
|
98
|
+
e * 2
|
99
|
+
end
|
100
|
+
end
|
101
|
+
assert_equal [2,4], a2
|
102
|
+
assert_equal [2,4], b2
|
103
|
+
end
|
104
|
+
|
105
|
+
def _test_lambda
|
106
|
+
l = PyCall.eval "lambda e: e + 2"
|
107
|
+
assert_equal 5, l.(3)
|
108
|
+
end
|
109
|
+
|
110
|
+
def test_binding
|
111
|
+
raised = false
|
112
|
+
ScoutPython.binding_run do
|
113
|
+
pyimport :torch
|
114
|
+
pyfrom :torch, import: ["nn"]
|
115
|
+
begin
|
116
|
+
torch
|
117
|
+
rescue
|
118
|
+
raised = true
|
119
|
+
end
|
120
|
+
end
|
121
|
+
assert ! raised
|
122
|
+
|
123
|
+
raised = false
|
124
|
+
ScoutPython.binding_run do
|
125
|
+
begin
|
126
|
+
torch
|
127
|
+
rescue
|
128
|
+
raised = true
|
129
|
+
end
|
130
|
+
end
|
131
|
+
assert raised
|
132
|
+
end
|
133
|
+
|
134
|
+
def _test_import_method
|
135
|
+
random = ScoutPython.import_method :torch, :rand, :random
|
136
|
+
assert random.call(1).numpy.to_f > 0
|
137
|
+
end
|
138
|
+
|
139
|
+
def _test_class_new_obj
|
140
|
+
obj = ScoutPython.class_new_obj("torch.nn", "Module")
|
141
|
+
assert_equal "Module()", obj.to_s
|
142
|
+
end
|
143
|
+
|
144
|
+
def _test_single
|
145
|
+
a = ScoutPython.run_direct :numpy do
|
146
|
+
numpy.array([1,2])
|
147
|
+
end
|
148
|
+
assert a.methods.include? :__pyptr__
|
149
|
+
end
|
150
|
+
|
151
|
+
def _test_threaded
|
152
|
+
a = ScoutPython.run_threaded :numpy do
|
153
|
+
numpy.array([1,2])
|
154
|
+
end
|
155
|
+
assert a.methods.include? :__pyptr__
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|