ChessAnalysisPipeline 0.0.17.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. CHAP/TaskManager.py +216 -0
  2. CHAP/__init__.py +27 -0
  3. CHAP/common/__init__.py +57 -0
  4. CHAP/common/models/__init__.py +8 -0
  5. CHAP/common/models/common.py +124 -0
  6. CHAP/common/models/integration.py +659 -0
  7. CHAP/common/models/map.py +1291 -0
  8. CHAP/common/processor.py +2869 -0
  9. CHAP/common/reader.py +658 -0
  10. CHAP/common/utils.py +110 -0
  11. CHAP/common/writer.py +730 -0
  12. CHAP/edd/__init__.py +23 -0
  13. CHAP/edd/models.py +876 -0
  14. CHAP/edd/processor.py +3069 -0
  15. CHAP/edd/reader.py +1023 -0
  16. CHAP/edd/select_material_params_gui.py +348 -0
  17. CHAP/edd/utils.py +1572 -0
  18. CHAP/edd/writer.py +26 -0
  19. CHAP/foxden/__init__.py +19 -0
  20. CHAP/foxden/models.py +71 -0
  21. CHAP/foxden/processor.py +124 -0
  22. CHAP/foxden/reader.py +224 -0
  23. CHAP/foxden/utils.py +80 -0
  24. CHAP/foxden/writer.py +168 -0
  25. CHAP/giwaxs/__init__.py +11 -0
  26. CHAP/giwaxs/models.py +491 -0
  27. CHAP/giwaxs/processor.py +776 -0
  28. CHAP/giwaxs/reader.py +8 -0
  29. CHAP/giwaxs/writer.py +8 -0
  30. CHAP/inference/__init__.py +7 -0
  31. CHAP/inference/processor.py +69 -0
  32. CHAP/inference/reader.py +8 -0
  33. CHAP/inference/writer.py +8 -0
  34. CHAP/models.py +227 -0
  35. CHAP/pipeline.py +479 -0
  36. CHAP/processor.py +125 -0
  37. CHAP/reader.py +124 -0
  38. CHAP/runner.py +277 -0
  39. CHAP/saxswaxs/__init__.py +7 -0
  40. CHAP/saxswaxs/processor.py +8 -0
  41. CHAP/saxswaxs/reader.py +8 -0
  42. CHAP/saxswaxs/writer.py +8 -0
  43. CHAP/server.py +125 -0
  44. CHAP/sin2psi/__init__.py +7 -0
  45. CHAP/sin2psi/processor.py +8 -0
  46. CHAP/sin2psi/reader.py +8 -0
  47. CHAP/sin2psi/writer.py +8 -0
  48. CHAP/tomo/__init__.py +15 -0
  49. CHAP/tomo/models.py +210 -0
  50. CHAP/tomo/processor.py +3862 -0
  51. CHAP/tomo/reader.py +9 -0
  52. CHAP/tomo/writer.py +59 -0
  53. CHAP/utils/__init__.py +6 -0
  54. CHAP/utils/converters.py +188 -0
  55. CHAP/utils/fit.py +2947 -0
  56. CHAP/utils/general.py +2655 -0
  57. CHAP/utils/material.py +274 -0
  58. CHAP/utils/models.py +595 -0
  59. CHAP/utils/parfile.py +224 -0
  60. CHAP/writer.py +122 -0
  61. MLaaS/__init__.py +0 -0
  62. MLaaS/ktrain.py +205 -0
  63. MLaaS/mnist_img.py +83 -0
  64. MLaaS/tfaas_client.py +371 -0
  65. chessanalysispipeline-0.0.17.dev3.dist-info/LICENSE +60 -0
  66. chessanalysispipeline-0.0.17.dev3.dist-info/METADATA +29 -0
  67. chessanalysispipeline-0.0.17.dev3.dist-info/RECORD +70 -0
  68. chessanalysispipeline-0.0.17.dev3.dist-info/WHEEL +5 -0
  69. chessanalysispipeline-0.0.17.dev3.dist-info/entry_points.txt +2 -0
  70. chessanalysispipeline-0.0.17.dev3.dist-info/top_level.txt +2 -0
CHAP/reader.py ADDED
@@ -0,0 +1,124 @@
1
+ #!/usr/bin/env python
2
+ """
3
+ File : reader.py
4
+ Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
5
+ Description: generic Reader module
6
+
7
+ Define a generic `Reader` object.
8
+ """
9
+
10
+ # System modules
11
+ import argparse
12
+ import logging
13
+ import os
14
+ from sys import modules
15
+
16
+ # Third party modules
17
+ from pydantic import (
18
+ PrivateAttr,
19
+ constr,
20
+ model_validator,
21
+ )
22
+
23
+ # Local modules
24
+ from CHAP.pipeline import PipelineItem
25
+
26
+
27
+ def validate_reader_model(reader):
28
+ reader._mapping_filename = reader.filename
29
+ filename = os.path.normpath(os.path.realpath(
30
+ os.path.join(reader.inputdir, reader.filename)))
31
+ if (not os.path.isfile(filename)
32
+ and not os.path.dirname(reader.filename)):
33
+ reader.logger.warning(
34
+ f'Unable to find {reader.filename} in '
35
+ f'{reader.inputdir}, looking in '
36
+ f'{reader.outputdir}')
37
+ filename = os.path.normpath(os.path.realpath(
38
+ os.path.join(reader.outputdir, reader.filename)))
39
+ # Note that reader.filename has str type instead of FilePath
40
+ # since its existence is not yet gueranteed (it can be writen
41
+ # over the course of the pipeline's execution). So postpone
42
+ # validation until the entire pipeline gets validated.
43
+ if not os.path.isfile(filename):
44
+ reader.logger.warning(
45
+ f'Unable to find {reader.filename} during validation')
46
+ reader.filename = filename
47
+ return reader
48
+
49
+
50
+ class Reader(PipelineItem):
51
+ """Generic file reader.
52
+
53
+ The job of any `Reader` in a `Pipeline` is to provide data stored
54
+ in a file to the next `PipelineItem`. Note that a `Reader` used on
55
+ its own disrupts the flow of data in a `Pipeline` -- it does not
56
+ receive or pass along any data returned by the previous
57
+ `PipelineItem`.
58
+
59
+ :ivar filename: Name of file to read from.
60
+ :type filename: str
61
+ """
62
+ filename: constr(strip_whitespace=True, min_length=1)
63
+
64
+ _mapping_filename: PrivateAttr(default=None)
65
+
66
+ _validate_filename = model_validator(mode="after")(
67
+ validate_reader_model)
68
+
69
+ def read(self):
70
+ """Read and return the contents of `filename` as text.
71
+
72
+ :return: The file content.
73
+ :rtype: str
74
+ """
75
+ if not self.filename:
76
+ self.logger.warning(
77
+ 'No file name is given, skipping read operation')
78
+ return None
79
+ try:
80
+ with open(self.filename) as f:
81
+ data = f.read()
82
+ except Exception:
83
+ return None
84
+ return data
85
+
86
+
87
+ class OptionParser():
88
+ """User based option parser."""
89
+ def __init__(self):
90
+ self.parser = argparse.ArgumentParser(prog='PROG')
91
+ self.parser.add_argument(
92
+ '--filename', action='store',
93
+ dest='filename', default='', help='Input file')
94
+ self.parser.add_argument(
95
+ '--reader', action='store',
96
+ dest='reader', default='Reader', help='Reader class name')
97
+ self.parser.add_argument(
98
+ '--log-level', choices=logging._nameToLevel.keys(),
99
+ dest='log_level', default='INFO', help='logging level')
100
+
101
+
102
+ def main(opt_parser=OptionParser):
103
+ """Main function."""
104
+ optmgr = opt_parser()
105
+ opts = optmgr.parser.parse_args()
106
+ cls_name = opts.reader
107
+ try:
108
+ reader_cls = getattr(modules[__name__], cls_name)
109
+ except AttributeError:
110
+ print(f'Unsupported reader {cls_name}')
111
+ raise
112
+
113
+ reader = reader_cls()
114
+ reader.logger.setLevel(getattr(logging, opts.log_level))
115
+ log_handler = logging.StreamHandler()
116
+ log_handler.setFormatter(logging.Formatter(
117
+ '{name:20}: {message}', style='{'))
118
+ reader.logger.addHandler(log_handler)
119
+ data = reader.read(filename=opts.filename)
120
+ print(f'Reader {reader} reads from {opts.filename}, data {data}')
121
+
122
+
123
+ if __name__ == '__main__':
124
+ main()
CHAP/runner.py ADDED
@@ -0,0 +1,277 @@
1
+ """
2
+ File : runner.py
3
+ Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
4
+ Description:
5
+ """
6
+
7
+ def parser():
8
+ """Return an argument parser for the `CHAP` CLI. This parser has
9
+ one argument: the input CHAP configuration file.
10
+ """
11
+ # System modules
12
+ from argparse import ArgumentParser
13
+ pparser = ArgumentParser(prog='PROG')
14
+
15
+ pparser.add_argument(
16
+ 'config', action='store', default='', help='Input configuration file')
17
+ pparser.add_argument(
18
+ '-p', '--pipeline', nargs='*', help='Pipeline name(s)')
19
+ return pparser
20
+
21
+ def main():
22
+ """Main function."""
23
+ # System modules
24
+ from yaml import safe_load
25
+
26
+ # Local modules
27
+ from CHAP.models import RunConfig
28
+
29
+ try:
30
+ # Third party modules
31
+ # pylint: disable=c-extension-no-member
32
+ from mpi4py import MPI
33
+
34
+ have_mpi = True
35
+ comm = MPI.COMM_WORLD
36
+ except ImportError:
37
+ have_mpi = False
38
+ comm = None
39
+
40
+ args = parser().parse_args()
41
+
42
+ # Read the input config file
43
+ configfile = args.config
44
+ with open(configfile) as file:
45
+ config = safe_load(file)
46
+ #RV Add to input_files in provenance data writer
47
+
48
+ # Check if executed as a worker spawned by another Processor
49
+ run_config = RunConfig(**config.pop('config'), comm=comm)
50
+ if have_mpi and run_config.spawn:
51
+ # pylint: disable=c-extension-no-member
52
+ sub_comm = MPI.Comm.Get_parent()
53
+ common_comm = sub_comm.Merge(True)
54
+ # Read worker specific input config file
55
+ if run_config.spawn > 0:
56
+ with open(f'{configfile}_{common_comm.Get_rank()}') as file:
57
+ config = safe_load(file)
58
+ run_config = RunConfig(
59
+ **config.pop('config'), comm=common_comm)
60
+ else:
61
+ with open(f'{configfile}_{sub_comm.Get_rank()}') as file:
62
+ config = safe_load(file)
63
+ run_config = RunConfig(**config.pop('config'), comm=comm)
64
+ else:
65
+ common_comm = comm
66
+
67
+ # Get the pipeline configurations
68
+ sub_pipelines = args.pipeline
69
+ pipeline_config = []
70
+ if sub_pipelines is None:
71
+ for sub_pipeline in config.values():
72
+ pipeline_config += sub_pipeline
73
+ else:
74
+ for sub_pipeline in sub_pipelines:
75
+ if sub_pipeline in config:
76
+ pipeline_config += config.get(sub_pipeline)
77
+ else:
78
+ raise ValueError(
79
+ f'Invalid pipeline option: \'{sub_pipeline}\' missing in '
80
+ f'the pipeline configuration ({list(config.keys())})')
81
+
82
+ # Run the pipeline with or without profiling
83
+ if run_config.profile:
84
+ # System modules
85
+ from cProfile import runctx # python profiler
86
+ from pstats import Stats # profiler statistics
87
+
88
+ cmd = 'runner(run_config, pipeline_config, common_comm)'
89
+ runctx(cmd, globals(), locals(), 'profile.dat')
90
+ info = Stats('profile.dat')
91
+ info.sort_stats('cumulative')
92
+ info.print_stats()
93
+ else:
94
+ runner(run_config, pipeline_config, common_comm)
95
+
96
+ # Disconnect the spawned worker
97
+ if have_mpi and run_config.spawn:
98
+ common_comm.barrier()
99
+ sub_comm.Disconnect()
100
+
101
+ def runner(run_config, pipeline_config, comm=None):
102
+ """Main runner funtion.
103
+
104
+ :param run_config: CHAP run configuration.
105
+ :type run_config: CHAP.runner.RunConfig
106
+ :param pipeline_config: CHAP Pipeline configuration.
107
+ :type pipeline_config: dict
108
+ :param comm: MPI communicator.
109
+ :type comm: mpi4py.MPI.Comm, optional
110
+ :return: The pipeline's returned data field.
111
+ """
112
+ # System modules
113
+ from time import time
114
+
115
+ # Logging setup
116
+ logger, log_handler = set_logger(run_config.log_level)
117
+ logger.info(f'Input pipeline configuration: {pipeline_config}\n')
118
+
119
+ # Run the pipeline
120
+ t0 = time()
121
+ data = run(run_config, pipeline_config, logger, log_handler, comm)
122
+ logger.info(f'Executed "run" in {time()-t0:.3f} seconds')
123
+
124
+ return data
125
+
126
+ def set_logger(log_level='INFO'):
127
+ """Helper function to set CHAP logger.
128
+
129
+ :param log_level: Logger level, defaults to `"INFO"`.
130
+ :type log_level: str
131
+ :return: The CHAP logger and logging handler.
132
+ :rtype: logging.Logger, logging.StreamHandler
133
+ """
134
+ # System modules
135
+ import logging
136
+
137
+ logger = logging.getLogger(__name__)
138
+ log_level = getattr(logging, log_level.upper())
139
+ logger.setLevel(log_level)
140
+ log_handler = logging.StreamHandler()
141
+ log_handler.setFormatter(logging.Formatter(
142
+ '{asctime}: {name:20}: {levelname}: {message}',
143
+ datefmt='%Y-%m-%d %H:%M:%S', style='{'))
144
+ logger.addHandler(log_handler)
145
+ return logger, log_handler
146
+
147
+ def run(
148
+ run_config, pipeline_config, logger=None, log_handler=None, comm=None):
149
+ """Run a given pipeline_config.
150
+
151
+ :param run_config: CHAP run configuration.
152
+ :type run_config: CHAP.runner.RunConfig
153
+ :param pipeline_config: CHAP Pipeline configuration.
154
+ :type pipeline_config: dict
155
+ :param logger: CHAP logger.
156
+ :type logger: logging.Logger, optional
157
+ :param log_handler: Logging handler.
158
+ :type log_handler: logging.StreamHandler, optional
159
+ :param comm: MPI communicator.
160
+ :type comm: mpi4py.MPI.Comm, optional
161
+ :return: The `data` field of the first item in the returned
162
+ list of pipeline items.
163
+ """
164
+ # System modules
165
+ from logging import getLogger
166
+ import os
167
+ from tempfile import NamedTemporaryFile
168
+
169
+ # Local modules
170
+ from CHAP.pipeline import Pipeline
171
+
172
+ # Make sure os.makedirs is only called from the root node
173
+ if comm is None:
174
+ rank = 0
175
+ else:
176
+ rank = comm.Get_rank()
177
+
178
+ pipeline_args = []
179
+ pipeline_mmcs = []
180
+ for item in pipeline_config:
181
+
182
+ # Load individual object with given name from its module
183
+ config = run_config.model_dump()
184
+ if isinstance(item, dict):
185
+ name = list(item.keys())[0]
186
+ item_args = item.get(name)
187
+ # Picking "inputdir" and "outputdir" from the item or from
188
+ # the default run configuration, giving precedence to the
189
+ # former
190
+ if 'inputdir' in item_args:
191
+ inputdir = item_args.pop('inputdir')
192
+ if not os.path.isabs(inputdir):
193
+ inputdir = os.path.normpath(os.path.realpath(
194
+ os.path.join(run_config.inputdir, inputdir)))
195
+ if not os.path.isdir(inputdir):
196
+ raise OSError(
197
+ f'input directory does not exist ({inputdir})')
198
+ if not os.access(inputdir, os.R_OK):
199
+ raise OSError('input directory is not accessible for '
200
+ f'reading ({inputdir})')
201
+ config['inputdir'] = inputdir
202
+ if 'outputdir' in item_args:
203
+ outputdir = item_args.pop('outputdir')
204
+ if not os.path.isabs(outputdir):
205
+ outputdir = os.path.normpath(os.path.realpath(
206
+ os.path.join(run_config.outputdir, outputdir)))
207
+ if not rank:
208
+ if not os.path.isdir(outputdir):
209
+ os.makedirs(outputdir)
210
+ try:
211
+ NamedTemporaryFile(dir=outputdir)
212
+ except Exception as exc:
213
+ raise OSError(
214
+ 'output directory is not accessible for '
215
+ f'writing ({outputdir})') from exc
216
+ config['outputdir'] = outputdir
217
+ else:
218
+ name = item
219
+
220
+ # Initialize the object's identifiers
221
+ if 'users' in name:
222
+ # Load users module. This is required in CHAPaaS which can
223
+ # have common area for users module. Otherwise, we will be
224
+ # required to have invidual user's PYTHONPATHs to load user
225
+ # processors.
226
+ try:
227
+ # Third party modules
228
+ # pylint: disable=unused-import
229
+ import users
230
+ except ImportError:
231
+ if logger is not None:
232
+ logger.error(f'Unable to load {name}')
233
+ continue
234
+ cls_name = name.split('.')[-1]
235
+ mod_name = '.'.join(name.split('.')[:-1])
236
+ module = __import__(mod_name, fromlist=[cls_name])
237
+ else:
238
+ mod_name, cls_name = name.split('.')
239
+ module = __import__(f'CHAP.{mod_name}', fromlist=[cls_name])
240
+
241
+ pipeline_mmcs.append(getattr(module, cls_name))
242
+
243
+ # Initialize the object's runtime arguments
244
+ item_args['comm'] = comm #FIX make comm a field in RunConfig?
245
+ if 'name' not in item_args:
246
+ item_args['name'] = cls_name
247
+ item_args.update(config)
248
+ item_logger = getLogger(name)
249
+ if log_handler is not None:
250
+ item_logger.addHandler(log_handler)
251
+ item_args['logger'] = item_logger
252
+ if logger is not None:
253
+ logger.info(
254
+ f'Initialized input fields for an instance of {cls_name}')
255
+ pipeline_args.append(item_args)
256
+ pipeline = Pipeline(mmcs=pipeline_mmcs, args=pipeline_args)
257
+ pipeline.logger.setLevel(run_config.log_level)
258
+ if log_handler is not None:
259
+ pipeline.logger.addHandler(log_handler)
260
+ if logger is not None:
261
+ logger.info(f'Loaded {pipeline} with {len(pipeline_mmcs)} items\n')
262
+
263
+ # Make sure os.makedirs completes before continuing all nodes
264
+ if comm is not None:
265
+ comm.barrier()
266
+
267
+ # Execute the pipeline
268
+ if logger is not None:
269
+ logger.info(f'Calling "execute" on {pipeline}')
270
+ result = pipeline.execute()
271
+ if result:
272
+ return result[0]['data']
273
+ return result
274
+
275
+
276
+ if __name__ == '__main__':
277
+ main()
@@ -0,0 +1,7 @@
1
+ """This subpackage contains `PipelineItems` unique to SAXSWAXS data
2
+ processing workflows.
3
+ """
4
+
5
+ # from CHAP.saxswaxs.processor import
6
+ # from CHAP.saxswaxs.reader import
7
+ # from CHAP.saxswaxs.writer import
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env python
2
+ """Processors used only by SAXSWAXS experiments."""
3
+
4
+ if __name__ == '__main__':
5
+ # Local modules
6
+ from CHAP.processor import main
7
+
8
+ main()
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env python
2
+ """SAXSWAXS command line reader."""
3
+
4
+ if __name__ == '__main__':
5
+ # Local modules
6
+ from CHAP.reader import main
7
+
8
+ main()
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env python
2
+ """SAXSWAXS command line writer."""
3
+
4
+ if __name__ == '__main__':
5
+ # Local modules
6
+ from CHAP.writer import main
7
+
8
+ main()
CHAP/server.py ADDED
@@ -0,0 +1,125 @@
1
+ #!/usr/bin/env python
2
+ #-*- coding: utf-8 -*-
3
+ """
4
+ File : server.py
5
+ Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
6
+ Description: Python server with thread pool and CHAP pipeline
7
+
8
+ ### Client side:
9
+ cat /tmp/chap.json
10
+ {"pipeline": [{"common.PrintProcessor": {}}], "input": 1}
11
+
12
+ ### curl call to the server with our CHAP pipeline
13
+ curl -X POST -H "Content-type: application/json" -d@/tmp/chap.json http://localhost:5000/pipeline
14
+ {"pipeline": [{"common.PrintProcessor":{}}], "status":"ok"}
15
+
16
+ ### Server side:
17
+ flask --app server run
18
+ * Serving Flask app 'server'
19
+ * Debug mode: off
20
+ WARNING: This is a development server. Do not use it in a production
21
+ deployment. Use a production WSGI server instead.
22
+ * Running on http://127.0.0.1:5000
23
+ Press CTRL+C to quit
24
+ ...
25
+
26
+ CHAP.server : Call pipeline args=()
27
+ kwds={'pipeline': [{'common.PrintProcessor': {}}]}
28
+ CHAP.server : pipeline [{'common.PrintProcessor': {}}]
29
+ CHAP.server : Loaded
30
+ <CHAP.common.processor.PrintProcessor object at 0x10e0f1ed0>
31
+ CHAP.server : Loaded
32
+ <CHAP.pipeline.Pipeline object at 0x10e0f1f10> with 1 items
33
+
34
+ CHAP.server : Calling "execute" on <CHAP.pipeline.Pipeline
35
+ object at 0x10e0f1f10>
36
+ Pipeline : Executing "execute"
37
+
38
+ Pipeline : Calling "process" on
39
+ <CHAP.common.processor.PrintProcessor object at 0x10e0f1ed0>
40
+ PrintProcessor : Executing "process" with
41
+ type(data)=<class 'NoneType'>
42
+ PrintProcessor data :
43
+ None
44
+ PrintProcessor : Finished "process" in 0.000 seconds
45
+
46
+ Pipeline : Executed "execute" in 0.000 seconds
47
+ 127.0.0.1 - - [07/Apr/2023 09:11:22] "POST /pipeline HTTP/1.1" 200 -
48
+ """
49
+
50
+ # System modules
51
+ import time
52
+ from queue import Queue
53
+
54
+ # Third-party modules
55
+
56
+ # Flask modules
57
+ from flask import Flask, request
58
+
59
+ # Local modules
60
+ from CHAP.TaskManager import TaskManager, start_new_thread
61
+ from CHAP.runner import run, set_logger
62
+
63
+
64
+ # Task manager to execute our tasks
65
+ taskManager = TaskManager()
66
+
67
+ # Flask Server
68
+ app = Flask(__name__)
69
+
70
+ # daemon task queue
71
+ task_queue = Queue()
72
+
73
+ @app.route("/")
74
+ def index_route():
75
+ """Server main end-point."""
76
+
77
+ return "CHAP daemon"
78
+
79
+ @app.route("/run")
80
+ def run_route():
81
+ """Server main end-point."""
82
+
83
+ ttask = request.args.get('task')
84
+ task_queue.put(ttask)
85
+ return f'Execute {ttask}'
86
+
87
+ @app.route("/pipeline", methods=["POST"])
88
+ def pipeline_route():
89
+ """Server /pipeline end-point."""
90
+
91
+ content = request.json
92
+ if 'pipeline' in content:
93
+ # spawn new pipeline task
94
+ jobs = []
95
+ jobs.append(taskManager.spawn(task, pipeline=content['pipeline']))
96
+ taskManager.joinall(jobs)
97
+ return {'status': 'ok', 'pipeline': content['pipeline']}
98
+ return {'status': 'fail', 'reason': 'no pipeline in incoming request'}
99
+
100
+ def task(*args, **kwds):
101
+ """Helper function to execute CHAP pipeline."""
102
+
103
+ log_level = 'INFO'
104
+ logger, log_handler = set_logger(log_level)
105
+ logger.info(f'call pipeline args={args} kwds={kwds}')
106
+ pipeline = kwds['pipeline']
107
+ logger.info(f'pipeline\n{pipeline}')
108
+ run(pipeline, logger, log_level, log_handler)
109
+
110
+ def daemon(name, queue, interval):
111
+ """Daemon example based on Queue."""
112
+
113
+ print(f'Daemon {name}')
114
+ while True:
115
+ if queue.qsize() == 0:
116
+ print('Default action')
117
+ time.sleep(interval)
118
+ else:
119
+ ttask = queue.get()
120
+ if ttask == 'exit':
121
+ return
122
+ print(f'daemon run {ttask}')
123
+
124
+ # start daemon thread in addition to Flask server
125
+ start_new_thread('daemon', daemon, ('daemon', task_queue, 3))
@@ -0,0 +1,7 @@
1
+ """This subpackage contains `PipelineItems` unique to sin2psi data
2
+ processing workflows.
3
+ """
4
+
5
+ # from CHAP.sin2psi.processor import
6
+ # from CHAP.sin2psi.reader import
7
+ # from CHAP.sin2psi.writer import
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env python
2
+ """Processors used only by sin2psi experiments."""
3
+
4
+ if __name__ == '__main__':
5
+ # Local modules
6
+ from CHAP.processor import main
7
+
8
+ main()
CHAP/sin2psi/reader.py ADDED
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env python
2
+ """sin2psi command line reader."""
3
+
4
+ if __name__ == '__main__':
5
+ # Local modules
6
+ from CHAP.reader import main
7
+
8
+ main()
CHAP/sin2psi/writer.py ADDED
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env python
2
+ """sin2psi command line writer."""
3
+
4
+ if __name__ == '__main__':
5
+ # Local modules
6
+ from CHAP.writer import main
7
+
8
+ main()
CHAP/tomo/__init__.py ADDED
@@ -0,0 +1,15 @@
1
+ """This subpackage contains `PipelineItems` unique to tomography data
2
+ processing workflows.
3
+ """
4
+
5
+ from CHAP.tomo.processor import (
6
+ TomoMetadataProcessor,
7
+ TomoCHESSMapConverter,
8
+ TomoDataProcessor,
9
+ TomoSimFieldProcessor,
10
+ TomoDarkFieldProcessor,
11
+ TomoBrightFieldProcessor,
12
+ TomoSpecProcessor,
13
+ )
14
+ # from CHAP.tomo.reader import
15
+ from CHAP.tomo.writer import TomoWriter