ChessAnalysisPipeline 0.0.13__py3-none-any.whl → 0.0.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ChessAnalysisPipeline might be problematic. Click here for more details.

@@ -0,0 +1,6 @@
1
+ """This subpackage contains pieces for communication with FOXDEN services.
2
+ """
3
+
4
+ from CHAP.foxden.processor import (
5
+ FoxdenProvenanceProcessor,
6
+ )
@@ -0,0 +1,42 @@
1
+ #!/usr/bin/env python
2
+ #-*- coding: utf-8 -*-
3
+ #pylint: disable=
4
+ """
5
+ File : processor.py
6
+ Author : Valentin Kuznetsov <vkuznet AT gmail dot com>
7
+ Description: Processor module for FOXDEN services
8
+ """
9
+
10
+ # system modules
11
+ from time import time
12
+
13
+ # local modules
14
+ from CHAP import Processor
15
+ from CHAP.foxden.writer import FoxdenWriter
16
+
17
+
18
+ class FoxdenProvenanceProcessor(Processor):
19
+ """A Processor to communicate with FOXDEN provenance server."""
20
+ # def __init__(self):
21
+ # self.writer = FoxdenWriter()
22
+
23
+ def process(self, data, url, dryRun=False, verbose=False):
24
+ """process data API"""
25
+
26
+ t0 = time()
27
+ self.logger.info(f'Executing "process" with url {url} data {data} dryrun {dryRun}')
28
+ writer = FoxdenWriter()
29
+
30
+ # data = self.writer.write(data, url, dryRun)
31
+ data = writer.write(data, url, dryRun=dryRun)
32
+
33
+ self.logger.info(f'Finished "process" in {time()-t0:.3f} seconds\n')
34
+
35
+ return data
36
+
37
+
38
+ if __name__ == '__main__':
39
+ # local modules
40
+ from CHAP.processor import main
41
+
42
+ main()
CHAP/foxden/writer.py ADDED
@@ -0,0 +1,65 @@
1
+ """FOXDE command line writer."""
2
+
3
+ # system modules
4
+ import os
5
+
6
+ # Local modules
7
+ from CHAP.writer import main
8
+
9
+ class FoxdenWriter():
10
+ """FOXDEN writer writes data to specific FOXDEN service
11
+ """
12
+
13
+ def write(self, data, url, method="POST", headers={}, timeout=10, dryRun=False):
14
+ """Write the input data as text to a file.
15
+
16
+ :param data: input data
17
+ :type data: list[PipelineData]
18
+ :param url: url of service
19
+ :type url: str
20
+ :param method: HTTP method to use, POST for creation and PUT for update
21
+ :type method: str
22
+ :param headers: HTTP headers to use
23
+ :type headers: dictionary
24
+ :param timeout: timeout of HTTP request
25
+ :type timeout: str
26
+ :param dryRun: dryRun option to verify HTTP workflow
27
+ :type dryRun: boolean
28
+ :return: contents of the input data
29
+ :rtype: object
30
+ """
31
+ import requests
32
+ if 'Content-Type' not in headers:
33
+ headers['Content-type'] = 'application/json'
34
+ if 'Accept' not in headers:
35
+ headers['Accept'] = 'application/json'
36
+ if dryRun:
37
+ print("### HTTP writer call", url, headers, data)
38
+ return []
39
+ token = ""
40
+ fname = os.getenv("CHESS_WRITE_TOKEN")
41
+ if not fname:
42
+ msg = f'CHESS_WRITE_TOKEN env variable is not set'
43
+ raise Exception(msg)
44
+ with open(fname, 'r') as istream:
45
+ token = istream.read()
46
+ if token:
47
+ headers["Authorization"] = f"Bearer {token}"
48
+ else:
49
+ msg = f'No valid write token found in CHESS_WRITE_TOKEN env variable'
50
+ raise Exception(msg)
51
+
52
+ # make actual HTTP request to FOXDEN service
53
+ if method.lower() == 'post':
54
+ resp = requests.post(url, headers=headers, timeout=timeout, data=data)
55
+ elif method.lower() == 'put':
56
+ resp = requests.put(url, headers=headers, timeout=timeout, data=data)
57
+ else:
58
+ msg = f"unsupporteed method {method}"
59
+ raise Exception(msg)
60
+ data = resp.content
61
+ return data
62
+
63
+
64
+ if __name__ == '__main__':
65
+ main()
CHAP/pipeline.py CHANGED
@@ -113,7 +113,10 @@ class PipelineItem():
113
113
 
114
114
  mod_name, cls_name = schema.rsplit('.', 1)
115
115
  module = __import__(f'CHAP.{mod_name}', fromlist=cls_name)
116
- model_config = getattr(module, cls_name)(**matching_config, **kwargs)
116
+ model_kwargs = {k: v for k, v in kwargs.items() \
117
+ if k not in matching_config}
118
+ model_config = getattr(module, cls_name)(**matching_config,
119
+ **model_kwargs)
117
120
 
118
121
  self.logger.debug(
119
122
  f'Got {schema} configuration in {time()-t0:.3f} seconds')
@@ -184,6 +187,8 @@ class MultiplePipelineItem(PipelineItem):
184
187
  :type items: list
185
188
  :rtype: list[PipelineData]
186
189
  """
190
+ # System modules
191
+ from tempfile import NamedTemporaryFile
187
192
 
188
193
  t0 = time()
189
194
  self.logger.info(f'Executing {len(items)} PipelineItems')
@@ -203,10 +208,37 @@ class MultiplePipelineItem(PipelineItem):
203
208
  mod_name, cls_name = item_name.rsplit('.', 1)
204
209
  module = __import__(f'CHAP.{mod_name}', fromlist=cls_name)
205
210
  item = getattr(module, cls_name)()
211
+ # Combine the command line arguments "inputdir",
212
+ # "outputdir" and "interactive" with the item's arguments
213
+ # joining "inputdir" and "outputdir" and giving precedence
214
+ # for "interactive" in the latter
215
+ args = {**kwargs}
216
+ if 'inputdir' in item_args:
217
+ inputdir = os.path.normpath(os.path.join(
218
+ args['inputdir'], item_args.pop('inputdir')))
219
+ if not os.path.isdir(inputdir):
220
+ raise OSError(
221
+ f'input directory does not exist ({inputdir})')
222
+ if not os.access(inputdir, os.R_OK):
223
+ raise OSError('input directory is not accessible for '
224
+ f'reading ({inputdir})')
225
+ args['inputdir'] = inputdir
226
+ if 'outputdir' in item_args:
227
+ outputdir = os.path.normpath(os.path.join(
228
+ args['outputdir'], item_args.pop('outputdir')))
229
+ if not os.path.isdir(outputdir):
230
+ os.makedirs(outputdir)
231
+ try:
232
+ tmpfile = NamedTemporaryFile(dir=outputdir)
233
+ except:
234
+ raise OSError('output directory is not accessible for '
235
+ f'writing ({outputdir})')
236
+ args['outputdir'] = outputdir
237
+ args = {**args, **item_args}
206
238
  if hasattr(item, 'write'):
207
- item.execute(**item_args, **kwargs)[0]
239
+ item.execute(**args)[0]
208
240
  else:
209
- data.append(item.execute(**item_args, **kwargs)[0])
241
+ data.append(item.execute(**args)[0])
210
242
 
211
243
  self.logger.info(
212
244
  f'Finished executing {len(items)} PipelineItems in {time()-t0:.0f}'
CHAP/runner.py CHANGED
@@ -17,11 +17,11 @@ from CHAP.pipeline import Pipeline
17
17
  class RunConfig():
18
18
  """Representation of Pipeline run configuration."""
19
19
  opts = {'root': os.getcwd(),
20
- 'profile': False,
20
+ 'inputdir': '.',
21
+ 'outputdir': '.',
21
22
  'interactive': False,
22
23
  'log_level': 'INFO',
23
- 'inputdir': '.',
24
- 'outputdir': '.'}
24
+ 'profile': False}
25
25
 
26
26
  def __init__(self, config={}):
27
27
  """RunConfig constructor
@@ -37,12 +37,12 @@ class RunConfig():
37
37
 
38
38
  # Check if root exists (create it if not) and is readable
39
39
  if not os.path.isdir(self.root):
40
- os.mkdir(self.root)
40
+ os.makedirs(self.root)
41
41
  if not os.access(self.root, os.R_OK):
42
42
  raise OSError('root directory is not accessible for reading '
43
43
  f'({self.root})')
44
44
 
45
- # Check if input exists and is readable
45
+ # Check if inputdir exists and is readable
46
46
  if not os.path.isabs(self.inputdir):
47
47
  self.inputdir = os.path.realpath(
48
48
  os.path.join(self.root, self.inputdir))
@@ -52,12 +52,12 @@ class RunConfig():
52
52
  raise OSError('input directory is not accessible for reading '
53
53
  f'({self.inputdir})')
54
54
 
55
- # Check if output exists (create it if not) and is writable
55
+ # Check if outputdir exists (create it if not) and is writable
56
56
  if not os.path.isabs(self.outputdir):
57
57
  self.outputdir = os.path.realpath(
58
58
  os.path.join(self.root, self.outputdir))
59
59
  if not os.path.isdir(self.outputdir):
60
- os.mkdir(self.outputdir)
60
+ os.makedirs(self.outputdir)
61
61
  try:
62
62
  tmpfile = NamedTemporaryFile(dir=self.outputdir)
63
63
  except:
@@ -127,30 +127,57 @@ def setLogger(log_level="INFO"):
127
127
  logger.setLevel(log_level)
128
128
  log_handler = logging.StreamHandler()
129
129
  log_handler.setFormatter(logging.Formatter(
130
- '{name:20}: {message}', style='{'))
130
+ '{name:20}: {levelname}: {message}', style='{'))
131
131
  logger.addHandler(log_handler)
132
132
  return logger, log_handler
133
133
 
134
- def run(pipeline_config,
135
- inputdir=None, outputdir=None, interactive=False,
134
+ def run(
135
+ pipeline_config, inputdir=None, outputdir=None, interactive=False,
136
136
  logger=None, log_level=None, log_handler=None):
137
137
  """
138
138
  Run given pipeline_config
139
139
 
140
140
  :param pipeline_config: CHAP pipeline config
141
141
  """
142
+ # System modules
143
+ from tempfile import NamedTemporaryFile
144
+
142
145
  objects = []
143
146
  kwds = []
144
147
  for item in pipeline_config:
145
148
  # load individual object with given name from its module
146
- kwargs = {'interactive': interactive,
147
- 'inputdir': inputdir,
148
- 'outputdir': outputdir}
149
+ kwargs = {'inputdir': inputdir,
150
+ 'outputdir': outputdir,
151
+ 'interactive': interactive}
149
152
  if isinstance(item, dict):
150
153
  name = list(item.keys())[0]
151
- # Combine the "interactive" command line argument with the object's keywords
152
- # giving precedence of "interactive" in the latter
153
- kwargs = {**kwargs, **item[name]}
154
+ item_args = item[name]
155
+ # Combine the function's input arguments "inputdir",
156
+ # "outputdir" and "interactive" with the item's arguments
157
+ # joining "inputdir" and "outputdir" and giving precedence
158
+ # for "interactive" in the latter
159
+ if 'inputdir' in item_args:
160
+ newinputdir = os.path.normpath(os.path.join(
161
+ kwargs['inputdir'], item_args.pop('inputdir')))
162
+ if not os.path.isdir(newinputdir):
163
+ raise OSError(
164
+ f'input directory does not exist ({newinputdir})')
165
+ if not os.access(newinputdir, os.R_OK):
166
+ raise OSError('input directory is not accessible for '
167
+ f'reading ({newinputdir})')
168
+ kwargs['inputdir'] = newinputdir
169
+ if 'outputdir' in item_args:
170
+ newoutputdir = os.path.normpath(os.path.join(
171
+ kwargs['outputdir'], item_args.pop('outputdir')))
172
+ if not os.path.isdir(newoutputdir):
173
+ os.makedirs(newoutputdir)
174
+ try:
175
+ tmpfile = NamedTemporaryFile(dir=newoutputdir)
176
+ except:
177
+ raise OSError('output directory is not accessible for '
178
+ f'writing ({newoutputdir})')
179
+ kwargs['outputdir'] = newoutputdir
180
+ kwargs = {**kwargs, **item_args}
154
181
  else:
155
182
  name = item
156
183
  if "users" in name:
CHAP/tomo/models.py CHANGED
@@ -7,7 +7,6 @@ from typing import (
7
7
  )
8
8
  from pydantic import (
9
9
  BaseModel,
10
- StrictBool,
11
10
  conint,
12
11
  conlist,
13
12
  confloat,
@@ -93,6 +92,9 @@ class TomoFindCenterConfig(BaseModel):
93
92
  min_items=2, max_items=2)]
94
93
  center_offset_min: Optional[confloat(allow_inf_nan=False)]
95
94
  center_offset_max: Optional[confloat(allow_inf_nan=False)]
95
+ center_search_range: Optional[conlist(
96
+ item_type=confloat(allow_inf_nan=False),
97
+ min_items=1, max_items=3)]
96
98
  gaussian_sigma: Optional[confloat(ge=0, allow_inf_nan=False)]
97
99
  ring_width: Optional[confloat(ge=0, allow_inf_nan=False)]
98
100
 
@@ -111,6 +113,10 @@ class TomoReconstructConfig(BaseModel):
111
113
  :ivar secondary_iters: Number of secondary iterations in the tomopy
112
114
  image reconstruction algorithm, defaults to 0.
113
115
  :type secondary_iters: int, optional
116
+ :ivar gaussian_sigma: Standard deviation for the Gaussian filter
117
+ applied to image reconstruction visualizations, defaults to no
118
+ filtering performed.
119
+ :type gaussian_sigma: float, optional
114
120
  :ivar remove_stripe_sigma: Damping parameter in Fourier space in
115
121
  tomopy's horizontal stripe removal tool, defaults to no
116
122
  correction performed.
@@ -126,6 +132,7 @@ class TomoReconstructConfig(BaseModel):
126
132
  z_bounds: Optional[
127
133
  conlist(item_type=conint(ge=-1), min_items=2, max_items=2)]
128
134
  secondary_iters: conint(ge=0) = 0
135
+ gaussian_sigma: Optional[confloat(ge=0, allow_inf_nan=False)]
129
136
  remove_stripe_sigma: Optional[confloat(ge=0, allow_inf_nan=False)]
130
137
  ring_width: Optional[confloat(ge=0, allow_inf_nan=False)]
131
138
 
@@ -160,9 +167,11 @@ class TomoSimConfig(BaseModel):
160
167
  :type detector: Detector
161
168
  :ivar sample_type: Sample type for the tomography simulator.
162
169
  :type sample_type: Literal['square_rod', 'square_pipe',
163
- 'hollow_cube', 'hollow_brick']
170
+ 'hollow_cube', 'hollow_brick', 'hollow_pyramid']
164
171
  :ivar sample_size: Size of each sample dimension in mm (internally
165
- converted to an integer number of pixels).
172
+ converted to an integer number of pixels). Enter three values
173
+ for sample_type == `'hollow_pyramid'`, the height and the side
174
+ at the respective bottom and the top of the pyramid.
166
175
  :type sample_size: list[float]
167
176
  :ivar wall_thickness: Wall thickness for pipe, cube, and brick in
168
177
  mm (internally converted to an integer number of pixels).
@@ -184,10 +193,11 @@ class TomoSimConfig(BaseModel):
184
193
  station: Literal['id1a3', 'id3a', 'id3b']
185
194
  detector: Detector.construct()
186
195
  sample_type: Literal[
187
- 'square_rod', 'square_pipe', 'hollow_cube', 'hollow_brick']
196
+ 'square_rod', 'square_pipe', 'hollow_cube', 'hollow_brick',
197
+ 'hollow_pyramid']
188
198
  sample_size: conlist(
189
199
  item_type=confloat(gt=0, allow_inf_nan=False),
190
- min_items=1, max_items=2)
200
+ min_items=1, max_items=3)
191
201
  wall_thickness: Optional[confloat(ge=0, allow_inf_nan=False)]
192
202
  mu: Optional[confloat(gt=0, allow_inf_nan=False)] = 0.05
193
203
  theta_step: confloat(gt=0, allow_inf_nan=False)