ChessAnalysisPipeline 0.0.4__py3-none-any.whl → 0.0.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ChessAnalysisPipeline might be problematic. Click here for more details.
- CHAP/TaskManager.py +214 -0
- CHAP/common/models/__init__.py +0 -2
- CHAP/common/models/integration.py +392 -249
- CHAP/common/models/map.py +350 -198
- CHAP/common/processor.py +229 -191
- CHAP/common/reader.py +52 -39
- CHAP/common/utils/__init__.py +0 -37
- CHAP/common/utils/fit.py +1197 -991
- CHAP/common/utils/general.py +629 -372
- CHAP/common/utils/material.py +158 -121
- CHAP/common/utils/scanparsers.py +735 -339
- CHAP/common/writer.py +31 -25
- CHAP/edd/models.py +65 -51
- CHAP/edd/processor.py +136 -113
- CHAP/edd/reader.py +1 -1
- CHAP/edd/writer.py +1 -1
- CHAP/inference/processor.py +35 -28
- CHAP/inference/reader.py +1 -1
- CHAP/inference/writer.py +1 -1
- CHAP/pipeline.py +14 -28
- CHAP/processor.py +44 -75
- CHAP/reader.py +49 -40
- CHAP/runner.py +73 -32
- CHAP/saxswaxs/processor.py +1 -1
- CHAP/saxswaxs/reader.py +1 -1
- CHAP/saxswaxs/writer.py +1 -1
- CHAP/server.py +130 -0
- CHAP/sin2psi/processor.py +1 -1
- CHAP/sin2psi/reader.py +1 -1
- CHAP/sin2psi/writer.py +1 -1
- CHAP/tomo/__init__.py +1 -4
- CHAP/tomo/models.py +53 -31
- CHAP/tomo/processor.py +1326 -902
- CHAP/tomo/reader.py +4 -2
- CHAP/tomo/writer.py +4 -2
- CHAP/writer.py +47 -41
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/METADATA +1 -1
- ChessAnalysisPipeline-0.0.6.dist-info/RECORD +52 -0
- ChessAnalysisPipeline-0.0.4.dist-info/RECORD +0 -50
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/LICENSE +0 -0
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/WHEEL +0 -0
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/entry_points.txt +0 -0
- {ChessAnalysisPipeline-0.0.4.dist-info → ChessAnalysisPipeline-0.0.6.dist-info}/top_level.txt +0 -0
CHAP/processor.py
CHANGED
|
@@ -9,101 +9,67 @@ Description: Processor module
|
|
|
9
9
|
|
|
10
10
|
# system modules
|
|
11
11
|
import argparse
|
|
12
|
-
import
|
|
12
|
+
from inspect import getfullargspec
|
|
13
13
|
import logging
|
|
14
|
-
import
|
|
14
|
+
from sys import modules
|
|
15
15
|
from time import time
|
|
16
16
|
|
|
17
|
-
# local modules
|
|
18
|
-
# from pipeline import PipelineObject
|
|
19
17
|
|
|
20
18
|
class Processor():
|
|
21
|
-
"""
|
|
22
|
-
Processor represent generic processor
|
|
23
|
-
"""
|
|
19
|
+
"""Processor represent generic processor"""
|
|
24
20
|
def __init__(self):
|
|
25
|
-
"""
|
|
26
|
-
Processor constructor
|
|
27
|
-
"""
|
|
21
|
+
"""Processor constructor"""
|
|
28
22
|
self.__name__ = self.__class__.__name__
|
|
29
23
|
self.logger = logging.getLogger(self.__name__)
|
|
30
24
|
self.logger.propagate = False
|
|
31
25
|
|
|
32
|
-
def process(self, data):
|
|
33
|
-
"""
|
|
34
|
-
|
|
26
|
+
def process(self, data, **_process_kwargs):
|
|
27
|
+
"""process data API
|
|
28
|
+
|
|
29
|
+
:param _process_kwargs: keyword arguments to pass to
|
|
30
|
+
`self._process`, defaults to `{}`
|
|
31
|
+
:type _process_kwargs: dict, optional
|
|
35
32
|
"""
|
|
36
33
|
|
|
37
34
|
t0 = time()
|
|
38
35
|
self.logger.info(f'Executing "process" with type(data)={type(data)}')
|
|
39
36
|
|
|
40
|
-
|
|
37
|
+
_valid_process_args = {}
|
|
38
|
+
allowed_args = getfullargspec(self._process).args \
|
|
39
|
+
+ getfullargspec(self._process).kwonlyargs
|
|
40
|
+
for k, v in _process_kwargs.items():
|
|
41
|
+
if k in allowed_args:
|
|
42
|
+
_valid_process_args[k] = v
|
|
43
|
+
else:
|
|
44
|
+
self.logger.warning(f'Ignoring invalid arg to _process: {k}')
|
|
45
|
+
|
|
46
|
+
data = self._process(data, **_valid_process_args)
|
|
41
47
|
|
|
42
48
|
self.logger.info(f'Finished "process" in {time()-t0:.3f} seconds\n')
|
|
43
49
|
|
|
44
|
-
return
|
|
50
|
+
return data
|
|
45
51
|
|
|
46
52
|
def _process(self, data):
|
|
53
|
+
"""Private method to carry out the mechanics of the specific
|
|
54
|
+
Processor.
|
|
55
|
+
|
|
56
|
+
:param data: input data
|
|
57
|
+
:return: processed data
|
|
58
|
+
"""
|
|
47
59
|
# If needed, extract data from a returned value of Reader.read
|
|
48
60
|
if isinstance(data, list):
|
|
49
|
-
if all(
|
|
61
|
+
if all(isinstance(d, dict) for d in data):
|
|
50
62
|
data = data[0]['data']
|
|
63
|
+
if data is None:
|
|
64
|
+
return []
|
|
51
65
|
# process operation is a simple print function
|
|
52
66
|
data += "process part\n"
|
|
53
67
|
# and we return data back to pipeline
|
|
54
68
|
return data
|
|
55
69
|
|
|
56
70
|
|
|
57
|
-
class TFaaSImageProcessor(Processor):
|
|
58
|
-
'''
|
|
59
|
-
A Processor to get predictions from TFaaS inference server.
|
|
60
|
-
'''
|
|
61
|
-
def process(self, data, url, model, verbose=False):
|
|
62
|
-
"""
|
|
63
|
-
process data API
|
|
64
|
-
"""
|
|
65
|
-
|
|
66
|
-
t0 = time()
|
|
67
|
-
self.logger.info(f'Executing "process" with url {url} model {model}')
|
|
68
|
-
|
|
69
|
-
data = self._process(data, url, model, verbose)
|
|
70
|
-
|
|
71
|
-
self.logger.info(f'Finished "process" in {time()-t0:.3f} seconds\n')
|
|
72
|
-
|
|
73
|
-
return(data)
|
|
74
|
-
|
|
75
|
-
def _process(self, data, url, model, verbose):
|
|
76
|
-
'''Print and return the input data.
|
|
77
|
-
|
|
78
|
-
:param data: Input image data, either file name or actual image data
|
|
79
|
-
:type data: object
|
|
80
|
-
:return: `data`
|
|
81
|
-
:rtype: object
|
|
82
|
-
'''
|
|
83
|
-
from MLaaS.tfaas_client import predictImage
|
|
84
|
-
from pathlib import Path
|
|
85
|
-
self.logger.info(f"input data {type(data)}")
|
|
86
|
-
if isinstance(data, str) and Path(data).is_file():
|
|
87
|
-
imgFile = data
|
|
88
|
-
data = predictImage(url, imgFile, model, verbose)
|
|
89
|
-
else:
|
|
90
|
-
rdict = data[0]
|
|
91
|
-
import requests
|
|
92
|
-
img = rdict['data']
|
|
93
|
-
session = requests.Session()
|
|
94
|
-
rurl = url + '/predict/image'
|
|
95
|
-
payload = dict(model=model)
|
|
96
|
-
files = dict(image=img)
|
|
97
|
-
self.logger.info(f"HTTP request {rurl} with image file and {payload} payload")
|
|
98
|
-
req = session.post(rurl, files=files, data=payload )
|
|
99
|
-
data = req.content
|
|
100
|
-
data = data.decode("utf-8").replace('\n', '')
|
|
101
|
-
self.logger.info(f"HTTP response {data}")
|
|
102
|
-
|
|
103
|
-
return(data)
|
|
104
|
-
|
|
105
71
|
class OptionParser():
|
|
106
|
-
|
|
72
|
+
"""User based option parser"""
|
|
107
73
|
def __init__(self):
|
|
108
74
|
self.parser = argparse.ArgumentParser(prog='PROG')
|
|
109
75
|
self.parser.add_argument(
|
|
@@ -116,26 +82,29 @@ class OptionParser():
|
|
|
116
82
|
'--log-level', choices=logging._nameToLevel.keys(),
|
|
117
83
|
dest='log_level', default='INFO', help='logging level')
|
|
118
84
|
|
|
85
|
+
|
|
119
86
|
def main(opt_parser=OptionParser):
|
|
120
|
-
|
|
87
|
+
"""Main function"""
|
|
121
88
|
|
|
122
|
-
optmgr
|
|
89
|
+
optmgr = opt_parser()
|
|
123
90
|
opts = optmgr.parser.parse_args()
|
|
124
|
-
|
|
91
|
+
cls_name = opts.processor
|
|
125
92
|
try:
|
|
126
|
-
|
|
127
|
-
except:
|
|
128
|
-
print(f'Unsupported processor {
|
|
129
|
-
|
|
93
|
+
processor_cls = getattr(modules[__name__], cls_name)
|
|
94
|
+
except AttributeError:
|
|
95
|
+
print(f'Unsupported processor {cls_name}')
|
|
96
|
+
raise
|
|
130
97
|
|
|
131
|
-
processor =
|
|
98
|
+
processor = processor_cls()
|
|
132
99
|
processor.logger.setLevel(getattr(logging, opts.log_level))
|
|
133
100
|
log_handler = logging.StreamHandler()
|
|
134
|
-
log_handler.setFormatter(logging.Formatter(
|
|
101
|
+
log_handler.setFormatter(logging.Formatter(
|
|
102
|
+
'{name:20}: {message}', style='{'))
|
|
135
103
|
processor.logger.addHandler(log_handler)
|
|
136
104
|
data = processor.process(opts.data)
|
|
137
105
|
|
|
138
|
-
print(f
|
|
106
|
+
print(f'Processor {processor} operates on data {data}')
|
|
107
|
+
|
|
139
108
|
|
|
140
109
|
if __name__ == '__main__':
|
|
141
110
|
main()
|
CHAP/reader.py
CHANGED
|
@@ -1,82 +1,88 @@
|
|
|
1
1
|
#!/usr/bin/env python
|
|
2
|
-
|
|
2
|
+
"""
|
|
3
3
|
File : reader.py
|
|
4
4
|
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
|
|
5
5
|
Description: generic Reader module
|
|
6
|
-
|
|
6
|
+
"""
|
|
7
7
|
|
|
8
8
|
# system modules
|
|
9
9
|
import argparse
|
|
10
|
-
import
|
|
10
|
+
from inspect import getfullargspec
|
|
11
11
|
import logging
|
|
12
|
-
import
|
|
12
|
+
from sys import modules
|
|
13
13
|
from time import time
|
|
14
14
|
|
|
15
|
-
# local modules
|
|
16
|
-
# from pipeline import PipelineObject
|
|
17
15
|
|
|
18
16
|
class Reader():
|
|
19
|
-
|
|
20
|
-
Reader represent generic file writer
|
|
21
|
-
'''
|
|
17
|
+
"""Reader represent generic file writer"""
|
|
22
18
|
|
|
23
19
|
def __init__(self):
|
|
24
|
-
|
|
25
|
-
Constructor of Reader class
|
|
26
|
-
'''
|
|
20
|
+
"""Constructor of Reader class"""
|
|
27
21
|
self.__name__ = self.__class__.__name__
|
|
28
22
|
self.logger = logging.getLogger(self.__name__)
|
|
29
23
|
self.logger.propagate = False
|
|
30
24
|
|
|
31
25
|
def read(self, type_=None, schema=None, encoding=None, **_read_kwargs):
|
|
32
|
-
|
|
26
|
+
"""Read API
|
|
33
27
|
|
|
34
28
|
Wrapper to read, format, and return the data requested.
|
|
35
29
|
|
|
36
|
-
:param type_: the expected type of data read from `filename`,
|
|
37
|
-
to `None`
|
|
30
|
+
:param type_: the expected type of data read from `filename`,
|
|
31
|
+
defualts to `None`
|
|
38
32
|
:type type_: type, optional
|
|
39
|
-
:param schema: the expected schema of the data read from
|
|
40
|
-
defaults to `None`
|
|
33
|
+
:param schema: the expected schema of the data read from
|
|
34
|
+
`filename`, defaults to `None`
|
|
41
35
|
:type schema: str, otional
|
|
42
|
-
:param _read_kwargs: keyword arguments to pass to
|
|
43
|
-
to `{}`
|
|
36
|
+
:param _read_kwargs: keyword arguments to pass to
|
|
37
|
+
`self._read`, defaults to `{}`
|
|
44
38
|
:type _read_kwargs: dict, optional
|
|
45
|
-
:return: list with one item: a dictionary containing the data
|
|
46
|
-
`filename`, the name of this `Reader`, and the
|
|
47
|
-
`schema`.
|
|
39
|
+
:return: list with one item: a dictionary containing the data
|
|
40
|
+
read from `filename`, the name of this `Reader`, and the
|
|
41
|
+
values of `type_` and `schema`.
|
|
48
42
|
:rtype: list[dict[str,object]]
|
|
49
|
-
|
|
43
|
+
"""
|
|
50
44
|
|
|
51
45
|
t0 = time()
|
|
52
|
-
self.logger.info(f'Executing "read" with type={type_},
|
|
46
|
+
self.logger.info(f'Executing "read" with type={type_}, '
|
|
47
|
+
f'schema={schema}, kwargs={_read_kwargs}')
|
|
48
|
+
|
|
49
|
+
_valid_read_args = {}
|
|
50
|
+
allowed_args = getfullargspec(self._read).args \
|
|
51
|
+
+ getfullargspec(self._read).kwonlyargs
|
|
52
|
+
for k, v in _read_kwargs.items():
|
|
53
|
+
if k in allowed_args:
|
|
54
|
+
_valid_read_args[k] = v
|
|
55
|
+
else:
|
|
56
|
+
self.logger.warning(f'Ignoring invalid arg to _read: {k}')
|
|
53
57
|
|
|
54
58
|
data = [{'name': self.__name__,
|
|
55
|
-
'data': self._read(**
|
|
59
|
+
'data': self._read(**_valid_read_args),
|
|
56
60
|
'type': type_,
|
|
57
61
|
'schema': schema,
|
|
58
62
|
'encoding': encoding}]
|
|
59
63
|
|
|
60
64
|
self.logger.info(f'Finished "read" in {time()-t0:.3f} seconds\n')
|
|
61
|
-
return
|
|
65
|
+
return data
|
|
62
66
|
|
|
63
67
|
def _read(self, filename):
|
|
64
|
-
|
|
68
|
+
"""Read and return the data from requested from `filename`
|
|
65
69
|
|
|
66
70
|
:param filename: Name of file to read from
|
|
67
71
|
:return: specific number of bytes from a file
|
|
68
|
-
|
|
72
|
+
"""
|
|
69
73
|
|
|
70
74
|
if not filename:
|
|
71
|
-
self.logger.warning(
|
|
75
|
+
self.logger.warning(
|
|
76
|
+
'No file name is given, will skip read operation')
|
|
72
77
|
return None
|
|
73
78
|
|
|
74
79
|
with open(filename) as file:
|
|
75
80
|
data = file.read()
|
|
76
|
-
return
|
|
81
|
+
return data
|
|
82
|
+
|
|
77
83
|
|
|
78
84
|
class OptionParser():
|
|
79
|
-
|
|
85
|
+
"""User based option parser"""
|
|
80
86
|
def __init__(self):
|
|
81
87
|
self.parser = argparse.ArgumentParser(prog='PROG')
|
|
82
88
|
self.parser.add_argument(
|
|
@@ -89,26 +95,29 @@ class OptionParser():
|
|
|
89
95
|
'--log-level', choices=logging._nameToLevel.keys(),
|
|
90
96
|
dest='log_level', default='INFO', help='logging level')
|
|
91
97
|
|
|
98
|
+
|
|
92
99
|
def main(opt_parser=OptionParser):
|
|
93
|
-
|
|
100
|
+
"""Main function"""
|
|
94
101
|
|
|
95
|
-
optmgr
|
|
102
|
+
optmgr = opt_parser()
|
|
96
103
|
opts = optmgr.parser.parse_args()
|
|
97
|
-
|
|
104
|
+
cls_name = opts.reader
|
|
98
105
|
try:
|
|
99
|
-
|
|
100
|
-
except:
|
|
101
|
-
print(f'Unsupported reader {
|
|
102
|
-
|
|
106
|
+
reader_cls = getattr(modules[__name__], cls_name)
|
|
107
|
+
except AttributeError:
|
|
108
|
+
print(f'Unsupported reader {cls_name}')
|
|
109
|
+
raise
|
|
103
110
|
|
|
104
|
-
reader =
|
|
111
|
+
reader = reader_cls()
|
|
105
112
|
reader.logger.setLevel(getattr(logging, opts.log_level))
|
|
106
113
|
log_handler = logging.StreamHandler()
|
|
107
|
-
log_handler.setFormatter(logging.Formatter(
|
|
114
|
+
log_handler.setFormatter(logging.Formatter(
|
|
115
|
+
'{name:20}: {message}', style='{'))
|
|
108
116
|
reader.logger.addHandler(log_handler)
|
|
109
117
|
data = reader.read(filename=opts.filename)
|
|
110
118
|
|
|
111
119
|
print(f'Reader {reader} reads from {opts.filename}, data {data}')
|
|
112
120
|
|
|
121
|
+
|
|
113
122
|
if __name__ == '__main__':
|
|
114
123
|
main()
|
CHAP/runner.py
CHANGED
|
@@ -1,80 +1,121 @@
|
|
|
1
|
-
#!/usr/bin/env python
|
|
2
|
-
#-*- coding: utf-8 -*-
|
|
3
|
-
#pylint: disable=
|
|
4
1
|
"""
|
|
5
2
|
File : runner.py
|
|
6
3
|
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
|
|
7
|
-
Description:
|
|
4
|
+
Description:
|
|
8
5
|
"""
|
|
9
6
|
|
|
10
7
|
# system modules
|
|
11
8
|
import argparse
|
|
12
9
|
import logging
|
|
13
|
-
import
|
|
14
|
-
import sys
|
|
15
|
-
import yaml
|
|
10
|
+
from yaml import safe_load
|
|
16
11
|
|
|
17
12
|
# local modules
|
|
18
13
|
from CHAP.pipeline import Pipeline
|
|
19
14
|
|
|
20
15
|
|
|
21
16
|
class OptionParser():
|
|
17
|
+
"""User based option parser"""
|
|
22
18
|
def __init__(self):
|
|
23
|
-
"
|
|
19
|
+
"""OptionParser class constructor"""
|
|
24
20
|
self.parser = argparse.ArgumentParser(prog='PROG')
|
|
25
|
-
self.parser.add_argument(
|
|
26
|
-
dest=
|
|
27
|
-
|
|
21
|
+
self.parser.add_argument(
|
|
22
|
+
'--config', action='store', dest='config', default='',
|
|
23
|
+
help='Input configuration file')
|
|
24
|
+
self.parser.add_argument(
|
|
25
|
+
'--interactive', action='store_true', dest='interactive',
|
|
26
|
+
help='Allow interactive processes')
|
|
27
|
+
self.parser.add_argument(
|
|
28
|
+
'--log-level', choices=logging._nameToLevel.keys(),
|
|
28
29
|
dest='log_level', default='INFO', help='logging level')
|
|
30
|
+
self.parser.add_argument(
|
|
31
|
+
'--profile', action='store_true', dest='profile',
|
|
32
|
+
help='profile output')
|
|
33
|
+
|
|
29
34
|
|
|
30
35
|
def main():
|
|
31
|
-
"Main function"
|
|
32
|
-
optmgr
|
|
36
|
+
"""Main function"""
|
|
37
|
+
optmgr = OptionParser()
|
|
33
38
|
opts = optmgr.parser.parse_args()
|
|
34
|
-
|
|
39
|
+
if opts.profile:
|
|
40
|
+
from cProfile import runctx # python profiler
|
|
41
|
+
from pstats import Stats # profiler statistics
|
|
42
|
+
cmd = 'runner(opts)'
|
|
43
|
+
runctx(cmd, globals(), locals(), 'profile.dat')
|
|
44
|
+
info = Stats('profile.dat')
|
|
45
|
+
info.sort_stats('cumulative')
|
|
46
|
+
info.print_stats()
|
|
47
|
+
else:
|
|
48
|
+
runner(opts)
|
|
49
|
+
|
|
35
50
|
|
|
36
51
|
def runner(opts):
|
|
52
|
+
"""Main runner function
|
|
53
|
+
|
|
54
|
+
:param opts: object containing input parameters
|
|
55
|
+
:type opts: OptionParser
|
|
37
56
|
"""
|
|
38
|
-
Main runner function
|
|
39
57
|
|
|
40
|
-
|
|
58
|
+
log_level = opts.log_level.upper()
|
|
59
|
+
logger, log_handler = setLogger(log_level)
|
|
60
|
+
config = {}
|
|
61
|
+
with open(opts.config) as file:
|
|
62
|
+
config = safe_load(file)
|
|
63
|
+
logger.info(f'Input configuration: {config}\n')
|
|
64
|
+
pipeline_config = config.get('pipeline', [])
|
|
65
|
+
run(pipeline_config, opts.interactive, logger, log_level, log_handler)
|
|
66
|
+
|
|
67
|
+
def setLogger(log_level="INFO"):
|
|
41
68
|
"""
|
|
69
|
+
Helper function to set CHAP logger
|
|
42
70
|
|
|
71
|
+
:param log_level: logger level, default INFO
|
|
72
|
+
"""
|
|
43
73
|
logger = logging.getLogger(__name__)
|
|
44
|
-
log_level = getattr(logging,
|
|
74
|
+
log_level = getattr(logging, log_level.upper())
|
|
45
75
|
logger.setLevel(log_level)
|
|
46
76
|
log_handler = logging.StreamHandler()
|
|
47
|
-
log_handler.setFormatter(logging.Formatter(
|
|
77
|
+
log_handler.setFormatter(logging.Formatter(
|
|
78
|
+
'{name:20}: {message}', style='{'))
|
|
48
79
|
logger.addHandler(log_handler)
|
|
80
|
+
return logger, log_handler
|
|
49
81
|
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
pipeline_config
|
|
82
|
+
def run(pipeline_config, interactive=False, logger=None, log_level=None, log_handler=None):
|
|
83
|
+
"""
|
|
84
|
+
Run given pipeline_config
|
|
85
|
+
|
|
86
|
+
:param pipeline_config: CHAP pipeline config
|
|
87
|
+
"""
|
|
55
88
|
objects = []
|
|
56
89
|
kwds = []
|
|
57
90
|
for item in pipeline_config:
|
|
58
91
|
# load individual object with given name from its module
|
|
92
|
+
kwargs = {'interactive': interactive}
|
|
59
93
|
if isinstance(item, dict):
|
|
60
94
|
name = list(item.keys())[0]
|
|
61
|
-
|
|
95
|
+
# Combine the "interactive" command line argument with the object's keywords
|
|
96
|
+
# giving precedence of "interactive" in the latter
|
|
97
|
+
kwargs = {**kwargs, **item[name]}
|
|
62
98
|
else:
|
|
63
99
|
name = item
|
|
64
|
-
kwargs = {}
|
|
65
100
|
modName, clsName = name.split('.')
|
|
66
101
|
module = __import__(f'CHAP.{modName}', fromlist=[clsName])
|
|
67
102
|
obj = getattr(module, clsName)()
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
103
|
+
if log_level:
|
|
104
|
+
obj.logger.setLevel(log_level)
|
|
105
|
+
if log_handler:
|
|
106
|
+
obj.logger.addHandler(log_handler)
|
|
107
|
+
if logger:
|
|
108
|
+
logger.info(f'Loaded {obj}')
|
|
71
109
|
objects.append(obj)
|
|
72
110
|
kwds.append(kwargs)
|
|
73
111
|
pipeline = Pipeline(objects, kwds)
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
112
|
+
if log_level:
|
|
113
|
+
pipeline.logger.setLevel(log_level)
|
|
114
|
+
if log_handler:
|
|
115
|
+
pipeline.logger.addHandler(log_handler)
|
|
116
|
+
if logger:
|
|
117
|
+
logger.info(f'Loaded {pipeline} with {len(objects)} items\n')
|
|
118
|
+
logger.info(f'Calling "execute" on {pipeline}')
|
|
78
119
|
pipeline.execute()
|
|
79
120
|
|
|
80
121
|
|
CHAP/saxswaxs/processor.py
CHANGED
CHAP/saxswaxs/reader.py
CHANGED
CHAP/saxswaxs/writer.py
CHANGED
CHAP/server.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
#-*- coding: utf-8 -*-
|
|
3
|
+
#pylint: disable=
|
|
4
|
+
"""
|
|
5
|
+
File : server.py
|
|
6
|
+
Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
|
|
7
|
+
Description: Python server with thread pool and CHAP pipeline
|
|
8
|
+
|
|
9
|
+
### Client side:
|
|
10
|
+
cat /tmp/chap.json
|
|
11
|
+
{
|
|
12
|
+
"pipeline": [{"common.PrintProcessor": {}}],
|
|
13
|
+
"input": 1
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
### curl call to the server with our CHAP pipeline
|
|
17
|
+
curl -X POST -H "Content-type: application/json" -d@/tmp/chap.json http://localhost:5000/pipeline
|
|
18
|
+
{"pipeline":[{"common.PrintProcessor":{}}],"status":"ok"}
|
|
19
|
+
|
|
20
|
+
### Server side:
|
|
21
|
+
flask --app server run
|
|
22
|
+
* Serving Flask app 'server'
|
|
23
|
+
* Debug mode: off
|
|
24
|
+
WARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.
|
|
25
|
+
* Running on http://127.0.0.1:5000
|
|
26
|
+
Press CTRL+C to quit
|
|
27
|
+
...
|
|
28
|
+
|
|
29
|
+
CHAP.server : call pipeline args=() kwds={'pipeline': [{'common.PrintProcessor': {}}]}
|
|
30
|
+
CHAP.server : pipeline
|
|
31
|
+
[{'common.PrintProcessor': {}}]
|
|
32
|
+
CHAP.server : Loaded <CHAP.common.processor.PrintProcessor object at 0x10e0f1ed0>
|
|
33
|
+
CHAP.server : Loaded <CHAP.pipeline.Pipeline object at 0x10e0f1f10> with 1 items
|
|
34
|
+
|
|
35
|
+
CHAP.server : Calling "execute" on <CHAP.pipeline.Pipeline object at 0x10e0f1f10>
|
|
36
|
+
Pipeline : Executing "execute"
|
|
37
|
+
|
|
38
|
+
Pipeline : Calling "process" on <CHAP.common.processor.PrintProcessor object at 0x10e0f1ed0>
|
|
39
|
+
PrintProcessor : Executing "process" with type(data)=<class 'NoneType'>
|
|
40
|
+
PrintProcessor data :
|
|
41
|
+
None
|
|
42
|
+
PrintProcessor : Finished "process" in 0.000 seconds
|
|
43
|
+
|
|
44
|
+
Pipeline : Executed "execute" in 0.000 seconds
|
|
45
|
+
127.0.0.1 - - [07/Apr/2023 09:11:22] "POST /pipeline HTTP/1.1" 200 -
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
# system modules
|
|
49
|
+
import time
|
|
50
|
+
import logging
|
|
51
|
+
from queue import Queue
|
|
52
|
+
|
|
53
|
+
# thrid-party modules
|
|
54
|
+
|
|
55
|
+
# Flask modules
|
|
56
|
+
from flask import Flask, request, jsonify
|
|
57
|
+
|
|
58
|
+
# CHAP modules
|
|
59
|
+
from CHAP.TaskManager import TaskManager, start_new_thread
|
|
60
|
+
from CHAP.runner import run, setLogger
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# Task manager to execute our tasks
|
|
64
|
+
taskManager = TaskManager()
|
|
65
|
+
|
|
66
|
+
# Flask Server
|
|
67
|
+
app = Flask(__name__)
|
|
68
|
+
|
|
69
|
+
# daemon task queue
|
|
70
|
+
task_queue = Queue()
|
|
71
|
+
|
|
72
|
+
@app.route("/")
|
|
73
|
+
def index_route():
|
|
74
|
+
"""
|
|
75
|
+
Server main end-point
|
|
76
|
+
"""
|
|
77
|
+
return "CHAP daemon"
|
|
78
|
+
|
|
79
|
+
@app.route("/run")
|
|
80
|
+
def run_route():
|
|
81
|
+
"""
|
|
82
|
+
Server main end-point
|
|
83
|
+
"""
|
|
84
|
+
task = request.args.get('task')
|
|
85
|
+
task_queue.put(task)
|
|
86
|
+
return f"Execute {task}"
|
|
87
|
+
|
|
88
|
+
@app.route("/pipeline", methods=["POST"])
|
|
89
|
+
def pipeline_route():
|
|
90
|
+
"""
|
|
91
|
+
Server /pipeline end-point
|
|
92
|
+
"""
|
|
93
|
+
content = request.json
|
|
94
|
+
if 'pipeline' in content:
|
|
95
|
+
# spawn new pipeline task
|
|
96
|
+
jobs = []
|
|
97
|
+
jobs.append(taskManager.spawn(task, pipeline=content['pipeline']))
|
|
98
|
+
taskManager.joinall(jobs)
|
|
99
|
+
return {"status": "ok", "pipeline": content['pipeline']}
|
|
100
|
+
else:
|
|
101
|
+
return {"status": "fail", "reason": "no pipeline in incoming request"}
|
|
102
|
+
|
|
103
|
+
def task(*args, **kwds):
|
|
104
|
+
"""
|
|
105
|
+
Helper function to execute CHAP pipeline
|
|
106
|
+
"""
|
|
107
|
+
log_level = "INFO"
|
|
108
|
+
logger, log_handler = setLogger(log_level)
|
|
109
|
+
logger.info(f"call pipeline args={args} kwds={kwds}")
|
|
110
|
+
pipeline = kwds['pipeline']
|
|
111
|
+
logger.info(f"pipeline\n{pipeline}")
|
|
112
|
+
run(pipeline, logger, log_level, log_handler)
|
|
113
|
+
|
|
114
|
+
def daemon(name, queue, interval):
|
|
115
|
+
"""
|
|
116
|
+
Daemon example based on Queue
|
|
117
|
+
"""
|
|
118
|
+
print(f"Daemon {name}")
|
|
119
|
+
while True:
|
|
120
|
+
if queue.qsize() == 0:
|
|
121
|
+
print("Default action")
|
|
122
|
+
time.sleep(interval)
|
|
123
|
+
else:
|
|
124
|
+
task = queue.get()
|
|
125
|
+
if task == "exit":
|
|
126
|
+
return
|
|
127
|
+
print(f"daemon run {task}")
|
|
128
|
+
|
|
129
|
+
# start daemon thread in addition to Flask server
|
|
130
|
+
start_new_thread("daemon", daemon, ("daemon", task_queue, 3))
|
CHAP/sin2psi/processor.py
CHANGED
CHAP/sin2psi/reader.py
CHANGED
CHAP/sin2psi/writer.py
CHANGED