mccode-plumber 0.15.1__tar.gz → 0.17.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. {mccode_plumber-0.15.1/src/mccode_plumber.egg-info → mccode_plumber-0.17.0}/PKG-INFO +3 -3
  2. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/pyproject.toml +2 -2
  3. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/forwarder.py +14 -4
  4. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/manage/manager.py +80 -3
  5. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/manage/orchestrate.py +8 -1
  6. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/splitrun.py +16 -4
  7. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/writer.py +8 -0
  8. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0/src/mccode_plumber.egg-info}/PKG-INFO +3 -3
  9. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber.egg-info/requires.txt +2 -2
  10. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/tests/test_splitrun.py +55 -0
  11. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/tests/test_writer.py +5 -2
  12. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/.github/dependabot.yml +0 -0
  13. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/.github/workflows/pip.yml +0 -0
  14. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/.github/workflows/wheels.yml +0 -0
  15. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/.gitignore +0 -0
  16. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/README.md +0 -0
  17. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/mypy.ini +0 -0
  18. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/setup.cfg +0 -0
  19. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/__init__.py +0 -0
  20. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/conductor.py +0 -0
  21. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/epics.py +0 -0
  22. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/epics_watcher.py +0 -0
  23. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/CommandChannel.py +0 -0
  24. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/CommandHandler.py +0 -0
  25. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/CommandStatus.py +0 -0
  26. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/InThreadStatusTracker.py +0 -0
  27. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/JobHandler.py +0 -0
  28. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/JobStatus.py +0 -0
  29. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/KafkaTopicUrl.py +0 -0
  30. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/StateExtractor.py +0 -0
  31. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/WorkerFinder.py +0 -0
  32. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/WorkerJobPool.py +0 -0
  33. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/WorkerStatus.py +0 -0
  34. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/WriteJob.py +0 -0
  35. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/file_writer_control/__init__.py +0 -0
  36. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/kafka.py +0 -0
  37. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/manage/__init__.py +0 -0
  38. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/manage/efu.py +0 -0
  39. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/manage/ensure.py +0 -0
  40. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/manage/epics.py +0 -0
  41. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/manage/forwarder.py +0 -0
  42. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/manage/writer.py +0 -0
  43. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/mccode.py +0 -0
  44. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber/utils.py +0 -0
  45. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber.egg-info/SOURCES.txt +0 -0
  46. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber.egg-info/dependency_links.txt +0 -0
  47. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber.egg-info/entry_points.txt +0 -0
  48. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/src/mccode_plumber.egg-info/top_level.txt +0 -0
  49. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/tests/fake_efu.py +0 -0
  50. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/tests/fake_manager.py +0 -0
  51. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/tests/test_epics.py +0 -0
  52. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/tests/test_management.py +0 -0
  53. {mccode_plumber-0.15.1 → mccode_plumber-0.17.0}/tests/test_orchestration_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mccode-plumber
3
- Version: 0.15.1
3
+ Version: 0.17.0
4
4
  Author-email: Gregory Tucker <gregory.tucker@ess.eu>
5
5
  Classifier: License :: OSI Approved :: BSD License
6
6
  Classifier: Programming Language :: Python :: 3
@@ -13,8 +13,8 @@ Requires-Dist: p4p
13
13
  Requires-Dist: kafka-python>=2.2.11
14
14
  Requires-Dist: ess-streaming-data-types>=0.14.0
15
15
  Requires-Dist: restage>=0.10.1
16
- Requires-Dist: mccode-to-kafka>=0.3.1
17
- Requires-Dist: moreniius>=0.6.3
16
+ Requires-Dist: mccode-to-kafka>=0.5.0
17
+ Requires-Dist: moreniius>=0.7.0
18
18
  Requires-Dist: icecream
19
19
  Requires-Dist: ephemeral-port-reserve
20
20
  Provides-Extra: test
@@ -9,8 +9,8 @@ dependencies = [
9
9
  'kafka-python>=2.2.11',
10
10
  'ess-streaming-data-types>=0.14.0',
11
11
  'restage>=0.10.1',
12
- 'mccode-to-kafka>=0.3.1',
13
- 'moreniius>=0.6.3',
12
+ 'mccode-to-kafka>=0.5.0',
13
+ 'moreniius>=0.7.0',
14
14
  'icecream',
15
15
  'ephemeral-port-reserve',
16
16
  ]
@@ -7,6 +7,7 @@ list of EPICS PVs to monitor.
7
7
  Alternatively, the same functionality can be accessed from Python using the configure_forwarder and reset_forwarder
8
8
  functions. Which take PV information and Forwarder/Kafka configuration as arguments.
9
9
  """
10
+ from mccode_antlr.common import InstrumentParameter
10
11
 
11
12
 
12
13
  def normalise_pvs(pvs: list[dict], config=None, prefix=None, topic=None):
@@ -59,10 +60,19 @@ def reset_forwarder(pvs: list[dict], config=None, prefix=None, topic=None):
59
60
  return pvs
60
61
 
61
62
 
62
- def forwarder_partial_streams(prefix, topic, parameters):
63
- names = [p.name for p in parameters]
64
- if 'mcpl_filename' not in names:
65
- names.append("mcpl_filename")
63
+ def forwarder_partial_streams(prefix: str, topic: str, parameters: list[InstrumentParameter]):
64
+ from mccode_antlr.common import DataType
65
+ # The streaming-data-type f144 only supports numeric data, so we need to
66
+ # filter out string-valued data types to avoid annoying error messages in the
67
+ # forwarder's log output.
68
+ names = [p.name for p in parameters if p.value.data_type is not DataType.str]
69
+
70
+ # splitrun adds an instrument parameter named 'mcpl_filename', but we also
71
+ # can not forward this since it is a string-valued parameter.
72
+ #
73
+ #if 'mcpl_filename' not in names:
74
+ # names.append("mcpl_filename")
75
+
66
76
  # Minimal information used by the forwarder for stream setup:
67
77
  partial = [dict(source=f'{prefix}{n}', module='f144', topic=topic) for n in names]
68
78
  return partial
@@ -1,5 +1,5 @@
1
1
  from __future__ import annotations
2
- from dataclasses import dataclass
2
+ from dataclasses import dataclass, field
3
3
  from subprocess import Popen, PIPE
4
4
  from threading import Thread
5
5
  from enum import Enum
@@ -12,6 +12,54 @@ class IOType(Enum):
12
12
  stderr = 2
13
13
 
14
14
 
15
+ @dataclass
16
+ class Triage:
17
+ level: str = field(default=lambda: 'info')
18
+ ignore: list[str] = field(default_factory=list)
19
+ patterns: dict[str, list[str]] = field(default_factory=lambda: {
20
+ 'critical': [r'\bcritical\b', r'^cri'],
21
+ 'error': [r'\berror\b', r'exception', r'traceback', r'^err' ],
22
+ 'warning': [r'\bwarn(ing)?\b', r'deprecated', r'^war'],
23
+ 'notice': [r'\bnotice\b', r'^not'],
24
+ 'info': [r'\binfo\b', r'starting', r'done'],
25
+ 'hint': [r'\bhint\b'],
26
+ 'debug': [r'\bdebug\b', r'^deb', r'^dbg'],
27
+ })
28
+ styles: dict[str, str] = field(default_factory=lambda: {
29
+ 'critical': Fore.MAGENTA + Style.BRIGHT,
30
+ 'error': Fore.RED + Style.BRIGHT,
31
+ 'warning': Fore.YELLOW + Style.BRIGHT,
32
+ 'notice': Fore.CYAN + Style.BRIGHT,
33
+ 'info': Fore.GREEN,
34
+ 'hint': Fore.BLUE,
35
+ 'debug': Fore.WHITE + Style.BRIGHT,
36
+ 'default': Fore.WHITE,
37
+ })
38
+
39
+ def _filtered_level(self, level: str) -> bool:
40
+ def _level_value(v: str):
41
+ for i, lvl in enumerate(self.patterns.keys()):
42
+ if v == lvl:
43
+ return i
44
+ return -1
45
+ return _level_value(level) > _level_value(self.level)
46
+
47
+ def _style_line(self, level: str, line: str):
48
+ return self.styles.get(level, '') + line + Style.RESET_ALL
49
+
50
+ def __call__(self, line: str) -> tuple[bool, str | None]:
51
+ import re
52
+ # If the line contains an ignored keyword, ignore it.
53
+ if any(kw in line for kw in self.ignore):
54
+ return True, None
55
+ # Check if we can identify the status level of this message
56
+ for level, patterns in self.patterns.items():
57
+ for pattern in patterns:
58
+ if re.search(pattern, line, re.IGNORECASE):
59
+ return self._filtered_level(level), self._style_line(level, line)
60
+ return self._filtered_level('default'), self._style_line('default', line)
61
+
62
+
15
63
  @dataclass
16
64
  class Manager:
17
65
  """
@@ -19,13 +67,24 @@ class Manager:
19
67
 
20
68
  Properties
21
69
  ----------
70
+ name: str
71
+ The name of the process, used as a prefix for all printed status messages
72
+ style: AnsiStyle
73
+ Format string to style the printed process name
74
+ _triage: Triage
75
+ An object to filter status messages and identify severity levels
76
+ applying its own message styling based on the identified level
22
77
  _process: a subprocess.Popen instance
78
+ _stdout_thread: Thread
79
+ _stderr_thread: Thread
23
80
  """
24
81
  name: str
25
82
  style: AnsiStyle
83
+ triage: Triage
26
84
  _process: Popen | None
27
85
  _stdout_thread: Thread | None
28
86
  _stderr_thread: Thread | None
87
+ _name_padding: int
29
88
 
30
89
  def __run_command__(self) -> list[str]:
31
90
  return []
@@ -38,6 +97,18 @@ class Manager:
38
97
  from dataclasses import fields
39
98
  return [field.name for field in fields(cls)]
40
99
 
100
+ @property
101
+ def name_padding(self):
102
+ return self._name_padding
103
+
104
+ @name_padding.setter
105
+ def name_padding(self, value: int):
106
+ self._name_padding = value
107
+
108
+ def _pretty_name(self):
109
+ padding = ' ' * self.name_padding
110
+ return f'{self.style}{self.name}:{Style.RESET_ALL}{padding}'
111
+
41
112
  def _read_stream(self, stream, io_type: IOType):
42
113
  """Read lines from stream and print them until EOF.
43
114
 
@@ -51,8 +122,10 @@ class Manager:
51
122
  for line in iter(stream.readline, ''):
52
123
  if not line:
53
124
  break
54
- # format and print the line, preserving original behaviour
55
- formatted = f'{self.style}{self.name}:{Style.RESET_ALL} {line}'
125
+ ignored, line = self.triage(line)
126
+ if ignored:
127
+ continue
128
+ formatted = f'{self._pretty_name()} {line}'
56
129
  if io_type == IOType.stdout:
57
130
  print(formatted, end='')
58
131
  else:
@@ -79,6 +152,10 @@ class Manager:
79
152
  kwargs['name'] = 'Managed process'
80
153
  if 'style' not in kwargs:
81
154
  kwargs['style'] = Fore.WHITE + Back.BLACK
155
+ if 'triage' not in kwargs:
156
+ kwargs['triage'] = Triage()
157
+ if '_name_padding' not in kwargs:
158
+ kwargs['_name_padding'] = 0
82
159
 
83
160
  manager = cls(**kwargs)
84
161
 
@@ -261,6 +261,7 @@ def load_in_wait_load_out(
261
261
  )
262
262
  from mccode_plumber.manage.forwarder import forwarder_verbosity
263
263
  from mccode_plumber.manage.writer import writer_verbosity
264
+ from mccode_plumber.manage.manager import Triage
264
265
 
265
266
  # Start up services if they should be managed locally
266
267
  if manage:
@@ -283,7 +284,10 @@ def load_in_wait_load_out(
283
284
  efu = [EventFormationUnitConfig.from_dict(data)]
284
285
  things = tuple(
285
286
  EventFormationUnit.start(
286
- style=Fore.BLUE, broker=broker, **x.to_dict()
287
+ style=Fore.BLUE,
288
+ broker=broker,
289
+ triage=Triage(ignore=["graphite", ":2003 failed"]),
290
+ **x.to_dict()
287
291
  ) for x in efu) + (
288
292
  Forwarder.start(
289
293
  name='FWD',
@@ -309,6 +313,9 @@ def load_in_wait_load_out(
309
313
  verbosity=writer_verbosity(verbosity_writer),
310
314
  ),
311
315
  )
316
+ longest_name = max(len(thing.name) for thing in things)
317
+ for thing in things:
318
+ thing.name_padding = longest_name - len(thing.name)
312
319
  else:
313
320
  things = ()
314
321
 
@@ -17,11 +17,15 @@ def make_parser():
17
17
 
18
18
 
19
19
  def monitors_to_kafka_callback_with_arguments(
20
- broker: str, topic: str | None, source: str | None, names: list[str] | None
20
+ broker: str, topic: str | None, source: str | None, names: list[str] | None,
21
+ delete_after_sending: bool = True,
21
22
  ):
22
23
  from mccode_to_kafka.sender import send_histograms
23
24
 
24
- partial_kwargs: dict[str, Union[str,list[str]]] = {'broker': broker}
25
+ partial_kwargs: dict[str, Union[str,list[str]]] = {
26
+ 'broker': broker,
27
+ 'delete': delete_after_sending,
28
+ }
25
29
  if topic is not None and source is not None and names is not None and len(names) > 1:
26
30
  raise ValueError("Cannot specify both topic/source and multiple names simultaneously.")
27
31
 
@@ -41,7 +45,15 @@ def monitors_to_kafka_callback_with_arguments(
41
45
  def main():
42
46
  from .mccode import get_mcstas_instr
43
47
  from restage.splitrun import splitrun_args, parse_splitrun
44
- args, parameters, precision = parse_splitrun(make_parser())
48
+ parser = make_parser()
49
+ parser.add_argument('--keep-after-send', action='store_true', help='Keep after sending histograms', default=False)
50
+ args, parameters, precision = parse_splitrun(parser)
45
51
  instr = get_mcstas_instr(args.instrument)
46
- callback, callback_args = monitors_to_kafka_callback_with_arguments(args.broker, args.topic, args.source, args.names)
52
+ callback, callback_args = monitors_to_kafka_callback_with_arguments(
53
+ broker=args.broker,
54
+ topic=args.topic,
55
+ source=args.source,
56
+ names=args.names,
57
+ delete_after_sending=not args.keep_after_send
58
+ )
47
59
  return splitrun_args(instr, parameters, precision, args, callback=callback, callback_arguments=callback_args)
@@ -311,8 +311,16 @@ def construct_writer_pv_dicts(instr: Path | str, prefix: str, topic: str):
311
311
 
312
312
 
313
313
  def construct_writer_pv_dicts_from_parameters(parameters, prefix: str, topic: str):
314
+ from mccode_antlr.common import DataType
315
+
314
316
  def strip_quotes(s):
315
317
  return s[1:-1] if s is not None and len(s) > 2 and (s[0] == s[-1] == '"' or s[0] == s[-1] == "'") else s
318
+
319
+ # Remove string-valued parameters from the provided list since they are not
320
+ # supported by streaming-data-type f144. In the future we could specify a different
321
+ # module for them instead.
322
+ parameters = [p for p in parameters if p.value.data_type is not DataType.str]
323
+
316
324
  return [dict(name=p.name, dtype=p.value.data_type.name, source=f'{prefix}{p.name}', topic=topic,
317
325
  description=parameter_description(p), module='f144', unit=strip_quotes(p.unit)) for p in parameters]
318
326
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mccode-plumber
3
- Version: 0.15.1
3
+ Version: 0.17.0
4
4
  Author-email: Gregory Tucker <gregory.tucker@ess.eu>
5
5
  Classifier: License :: OSI Approved :: BSD License
6
6
  Classifier: Programming Language :: Python :: 3
@@ -13,8 +13,8 @@ Requires-Dist: p4p
13
13
  Requires-Dist: kafka-python>=2.2.11
14
14
  Requires-Dist: ess-streaming-data-types>=0.14.0
15
15
  Requires-Dist: restage>=0.10.1
16
- Requires-Dist: mccode-to-kafka>=0.3.1
17
- Requires-Dist: moreniius>=0.6.3
16
+ Requires-Dist: mccode-to-kafka>=0.5.0
17
+ Requires-Dist: moreniius>=0.7.0
18
18
  Requires-Dist: icecream
19
19
  Requires-Dist: ephemeral-port-reserve
20
20
  Provides-Extra: test
@@ -2,8 +2,8 @@ p4p
2
2
  kafka-python>=2.2.11
3
3
  ess-streaming-data-types>=0.14.0
4
4
  restage>=0.10.1
5
- mccode-to-kafka>=0.3.1
6
- moreniius>=0.6.3
5
+ mccode-to-kafka>=0.5.0
6
+ moreniius>=0.7.0
7
7
  icecream
8
8
  ephemeral-port-reserve
9
9
 
@@ -70,6 +70,61 @@ class SplitrunTestCase(unittest.TestCase):
70
70
  args = args_fixup(parser.parse_args(['--broker', 'l:9092', '--source', 'm', '-n', '10000', 'inst.h5', '--', 'a=1:4', 'b=2:5', 'c=1,2,3,4,5']))
71
71
  self.assertEqual(args.parameters, ['a=1:4', 'b=2:5', 'c=1,2,3,4,5'])
72
72
 
73
+ # New tests for the --keep-after-send flag and how it controls the 'delete' kwarg
74
+ def test_keep_after_send_defaults_to_false_and_delete_true(self):
75
+ # make parser match main() which adds this argument
76
+ parser = make_parser()
77
+ parser.add_argument('--keep-after-send', action='store_true', help='Keep after sending histograms', default=False)
78
+ args = args_fixup(parser.parse_args(['--broker', 'l:9092', '--source', 'm', '-n', '10', 'inst.h5', '--', 'a=1:4']))
79
+ # flag not passed, should be False
80
+ self.assertFalse(args.keep_after_send)
81
+
82
+ from mccode_plumber.splitrun import monitors_to_kafka_callback_with_arguments
83
+ callback, callback_args = monitors_to_kafka_callback_with_arguments(
84
+ broker=args.broker, topic=args.topic, source=args.source, names=args.names,
85
+ delete_after_sending=not args.keep_after_send
86
+ )
87
+
88
+ # inspect closure to find the dict with 'delete'
89
+ delete_value = None
90
+ for cell in (callback.__closure__ or ()): # pragma: no branch - defensive
91
+ try:
92
+ val = cell.cell_contents
93
+ except ValueError:
94
+ continue
95
+ if isinstance(val, dict) and 'delete' in val:
96
+ delete_value = val['delete']
97
+ break
98
+
99
+ self.assertIsNotNone(delete_value)
100
+ self.assertTrue(delete_value)
101
+
102
+ def test_keep_after_send_passed_sets_delete_false(self):
103
+ parser = make_parser()
104
+ parser.add_argument('--keep-after-send', action='store_true', help='Keep after sending histograms', default=False)
105
+ args = args_fixup(parser.parse_args(['--keep-after-send', '--broker', 'l:9092', '--source', 'm', '-n', '10', 'inst.h5', '--', 'a=1:4']))
106
+ # flag passed, should be True
107
+ self.assertTrue(args.keep_after_send)
108
+
109
+ from mccode_plumber.splitrun import monitors_to_kafka_callback_with_arguments
110
+ callback, callback_args = monitors_to_kafka_callback_with_arguments(
111
+ broker=args.broker, topic=args.topic, source=args.source, names=args.names,
112
+ delete_after_sending=not args.keep_after_send
113
+ )
114
+
115
+ delete_value = None
116
+ for cell in (callback.__closure__ or ()): # pragma: no branch - defensive
117
+ try:
118
+ val = cell.cell_contents
119
+ except ValueError:
120
+ continue
121
+ if isinstance(val, dict) and 'delete' in val:
122
+ delete_value = val['delete']
123
+ break
124
+
125
+ self.assertIsNotNone(delete_value)
126
+ self.assertFalse(delete_value)
127
+
73
128
 
74
129
  if __name__ == '__main__':
75
130
  unittest.main()
@@ -61,10 +61,13 @@ class WriterUnitsTestCase(unittest.TestCase):
61
61
  def test_parse(self):
62
62
  from mccode_plumber.writer import construct_writer_pv_dicts_from_parameters
63
63
  params = construct_writer_pv_dicts_from_parameters(self.instr.parameters, 'mcstas:', 'topic')
64
- self.assertEqual(len(params), 4)
65
- for p, x in zip(params, [('a', 'Hz'), ('b', 'm'), ('c', None), ('d', None)]):
64
+ # Only non-string valued parameters should be extracted since f144 only
65
+ # supports numeric-valued data
66
+ self.assertEqual(len(params), 3)
67
+ for p, x in zip(params, [('a', 'Hz'), ('b', 'm'), ('c', None)]):
66
68
  self.assertEqual(p['name'], x[0])
67
69
  self.assertEqual(p['unit'], x[1])
70
+ self.assertEqual(p['module'], 'f144')
68
71
 
69
72
 
70
73
  if __name__ == '__main__':