mccode-plumber 0.14.0__tar.gz → 0.14.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. mccode_plumber-0.14.1/.github/dependabot.yml +17 -0
  2. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/.github/workflows/pip.yml +2 -2
  3. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/.github/workflows/wheels.yml +4 -4
  4. {mccode_plumber-0.14.0/src/mccode_plumber.egg-info → mccode_plumber-0.14.1}/PKG-INFO +3 -3
  5. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/pyproject.toml +2 -2
  6. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/manage/orchestrate.py +25 -19
  7. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/splitrun.py +16 -6
  8. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1/src/mccode_plumber.egg-info}/PKG-INFO +3 -3
  9. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber.egg-info/SOURCES.txt +2 -0
  10. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber.egg-info/requires.txt +2 -2
  11. mccode_plumber-0.14.1/tests/test_orchestration_utils.py +66 -0
  12. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/tests/test_writer.py +3 -3
  13. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/.gitignore +0 -0
  14. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/README.md +0 -0
  15. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/setup.cfg +0 -0
  16. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/__init__.py +0 -0
  17. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/conductor.py +0 -0
  18. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/epics.py +0 -0
  19. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/epics_watcher.py +0 -0
  20. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/CommandChannel.py +0 -0
  21. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/CommandHandler.py +0 -0
  22. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/CommandStatus.py +0 -0
  23. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/InThreadStatusTracker.py +0 -0
  24. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/JobHandler.py +0 -0
  25. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/JobStatus.py +0 -0
  26. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/KafkaTopicUrl.py +0 -0
  27. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/StateExtractor.py +0 -0
  28. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/WorkerFinder.py +0 -0
  29. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/WorkerJobPool.py +0 -0
  30. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/WorkerStatus.py +0 -0
  31. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/WriteJob.py +0 -0
  32. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/file_writer_control/__init__.py +0 -0
  33. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/forwarder.py +0 -0
  34. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/kafka.py +0 -0
  35. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/manage/__init__.py +0 -0
  36. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/manage/efu.py +0 -0
  37. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/manage/ensure.py +0 -0
  38. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/manage/epics.py +0 -0
  39. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/manage/forwarder.py +0 -0
  40. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/manage/manager.py +0 -0
  41. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/manage/writer.py +0 -0
  42. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/mccode.py +0 -0
  43. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/utils.py +0 -0
  44. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber/writer.py +0 -0
  45. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber.egg-info/dependency_links.txt +0 -0
  46. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber.egg-info/entry_points.txt +0 -0
  47. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/src/mccode_plumber.egg-info/top_level.txt +0 -0
  48. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/tests/test_epics.py +0 -0
  49. {mccode_plumber-0.14.0 → mccode_plumber-0.14.1}/tests/test_splitrun.py +0 -0
@@ -0,0 +1,17 @@
1
+ # Please see the documentation for all configuration options:
2
+ # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
3
+
4
+ version: 2
5
+ updates:
6
+ - package-ecosystem: "github-actions" # See documentation for possible values
7
+ directory: "/" # Location of package manifests
8
+ schedule:
9
+ interval: "weekly"
10
+ - package-ecosystem: "pip"
11
+ directory: "/"
12
+ schedule:
13
+ interval: "daily"
14
+ groups:
15
+ python-packages:
16
+ patterns:
17
+ - "*"
@@ -18,9 +18,9 @@ jobs:
18
18
  python-version: ["3.9", "3.10", "3.11", "3.12"]
19
19
 
20
20
  steps:
21
- - uses: actions/checkout@v4
21
+ - uses: actions/checkout@v6
22
22
 
23
- - uses: actions/setup-python@v4
23
+ - uses: actions/setup-python@v6
24
24
  with:
25
25
  python-version: ${{ matrix.python-version }}
26
26
 
@@ -15,7 +15,7 @@ jobs:
15
15
  name: Build SDist and Wheel
16
16
  runs-on: ubuntu-latest
17
17
  steps:
18
- - uses: actions/checkout@v4
18
+ - uses: actions/checkout@v6
19
19
  with:
20
20
  fetch-depth: 0
21
21
  submodules: true
@@ -26,7 +26,7 @@ jobs:
26
26
  - name: Check metadata
27
27
  run: pipx run twine check dist/*
28
28
 
29
- - uses: actions/upload-artifact@v4
29
+ - uses: actions/upload-artifact@v6
30
30
  with:
31
31
  path: dist/*
32
32
 
@@ -42,9 +42,9 @@ jobs:
42
42
  if: github.event_name == 'release' && github.event.action == 'published'
43
43
 
44
44
  steps:
45
- - uses: actions/setup-python@v5
45
+ - uses: actions/setup-python@v6
46
46
 
47
- - uses: actions/download-artifact@v4
47
+ - uses: actions/download-artifact@v7
48
48
  with:
49
49
  name: artifact
50
50
  path: dist
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mccode-plumber
3
- Version: 0.14.0
3
+ Version: 0.14.1
4
4
  Author-email: Gregory Tucker <gregory.tucker@ess.eu>
5
5
  Classifier: License :: OSI Approved :: BSD License
6
6
  Description-Content-Type: text/markdown
@@ -8,8 +8,8 @@ Requires-Dist: p4p
8
8
  Requires-Dist: kafka-python>=2.2.11
9
9
  Requires-Dist: ess-streaming-data-types>=0.14.0
10
10
  Requires-Dist: restage>=0.9.0
11
- Requires-Dist: mccode-to-kafka>=0.2.2
12
- Requires-Dist: moreniius>=0.6.0
11
+ Requires-Dist: mccode-to-kafka>=0.3.0
12
+ Requires-Dist: moreniius>=0.6.1
13
13
  Requires-Dist: icecream
14
14
  Requires-Dist: ephemeral-port-reserve
15
15
 
@@ -9,8 +9,8 @@ dependencies = [
9
9
  'kafka-python>=2.2.11',
10
10
  'ess-streaming-data-types>=0.14.0',
11
11
  'restage>=0.9.0',
12
- 'mccode-to-kafka>=0.2.2',
13
- 'moreniius>=0.6.0',
12
+ 'mccode-to-kafka>=0.3.0',
13
+ 'moreniius>=0.6.1',
14
14
  'icecream',
15
15
  'ephemeral-port-reserve',
16
16
  ]
@@ -121,31 +121,31 @@ def start_writer(start_time: datetime,
121
121
  return job_id, success
122
122
 
123
123
 
124
- def get_topics_iter(data: list | tuple):
124
+ def get_stream_pairs_list(data: list | tuple):
125
125
  topics = set()
126
126
  for entry in data:
127
127
  if isinstance(entry, dict):
128
- topics.update(get_topics_dict(entry))
128
+ topics.update(get_stream_pairs_dict(entry))
129
129
  elif isinstance(entry, (list, tuple)):
130
- topics.update(get_topics_iter(entry))
130
+ topics.update(get_stream_pairs_list(entry))
131
131
  return topics
132
132
 
133
133
 
134
- def get_topics_dict(data: dict):
134
+ def get_stream_pairs_dict(data: dict):
135
135
  topics = set()
136
+ if all(k in data for k in ('topic', 'source')):
137
+ topics.add((data['topic'], data['source']))
136
138
  for k, v in data.items():
137
139
  if isinstance(v, dict):
138
- topics.update(get_topics_dict(v))
140
+ topics.update(get_stream_pairs_dict(v))
139
141
  elif isinstance(v, (list, tuple)):
140
- topics.update(get_topics_iter(list(v)))
141
- elif k == 'topic':
142
- topics.add(v)
142
+ topics.update(get_stream_pairs_list(list(v)))
143
143
  return topics
144
144
 
145
145
 
146
- def get_topics_json(data: dict) -> list[str]:
147
- """Traverse a loaded JSON object and return the found list of topic names"""
148
- return list(get_topics_dict(data))
146
+ def get_stream_pairs(data: dict) -> list[tuple[str, str]]:
147
+ """Traverse a loaded JSON object and return the found list of (topic, source) pairs."""
148
+ return list(get_stream_pairs_dict(data))
149
149
 
150
150
 
151
151
  def load_file_json(file: str | Path):
@@ -360,16 +360,22 @@ def main():
360
360
  instr = get_mcstas_instr(args.instrument)
361
361
 
362
362
  structure = load_file_json(args.structure if args.structure else Path(args.instrument).with_suffix('.json'))
363
+
364
+ streams = get_stream_pairs(structure)
365
+ # All monitors should use a single topic:
366
+ monitor_topic = f'{instr.name}_beam_monitor'
367
+ if {monitor_topic} != {s[0] for s in streams}:
368
+ raise ValueError(f'All monitor streams must use the same topic {monitor_topic}, found {streams}')
369
+ monitor_names = [s[1] for s in streams]
370
+
363
371
  broker = 'localhost:9092'
364
- monitor_source = 'mccode-to-kafka'
365
- callback_topics = get_topics_json(structure) # all structure-topics might be monitor topics?
366
- if len(callback_topics):
367
- print(f'register {callback_topics}')
368
- register_topics(broker, callback_topics) # ensure the topics are known to Kafka
369
- else:
370
- print('no callback topics registered')
372
+ # monitor_source = 'mccode-to-kafka' # old-style single source multi-topic
373
+ register_topics(broker, [monitor_topic]) # ensure the topics are known to Kafka
371
374
 
372
- callback, callback_args = monitors_to_kafka_callback_with_arguments(broker, monitor_source, callback_topics)
375
+ # Configure the callback to send monitor data to Kafka, using the common topic with source names as monitor names
376
+ callback, callback_args = monitors_to_kafka_callback_with_arguments(
377
+ broker=broker, topic=monitor_topic, source=None, names=monitor_names
378
+ )
373
379
  splitrun_kwargs = {
374
380
  'args': args, 'parameters': parameters, 'precision': precision,
375
381
  'callback': callback, 'callback_arguments': callback_args,
@@ -5,17 +5,27 @@ def make_parser():
5
5
  parser.prog = 'mp-splitrun'
6
6
  parser.add_argument('--broker', type=str, help='The Kafka broker to send monitors to', default=None)
7
7
  parser.add_argument('--source', type=str, help='The Kafka source name to use for monitors', default=None)
8
- parser.add_argument('--topic', type=str, help='The Kafka topic name(s) to use for monitors', default=None, action='append')
8
+ parser.add_argument('--topic', type=str, help='The Kafka topic name to use for monitors', default=None)
9
+ parser.add_argument('--names', type=str, help='The monitor name(s) to send to Kafka', default=None, action='append')
9
10
  parser.add_argument('-v', '--version', action='version', version=__version__)
10
11
  return parser
11
12
 
12
13
 
13
- def monitors_to_kafka_callback_with_arguments(broker: str, source: str, topics: list[str]):
14
+ def monitors_to_kafka_callback_with_arguments(
15
+ broker: str, topic: str | None, source: str | None, names: list[str] | None
16
+ ):
14
17
  from mccode_to_kafka.sender import send_histograms
15
18
 
16
- partial_kwargs = {'broker': broker, 'source': source}
17
- if topics is not None and len(topics) > 0:
18
- partial_kwargs['names'] = topics
19
+ partial_kwargs = {'broker': broker}
20
+ if topic is not None and source is not None and names is not None and len(names) > 1:
21
+ raise ValueError("Cannot specify both topic/source and multiple names simultaneously.")
22
+
23
+ if topic is not None:
24
+ partial_kwargs['topic'] = topic
25
+ if source is not None:
26
+ partial_kwargs['source'] = source
27
+ if names is not None and len(names) > 0:
28
+ partial_kwargs['names'] = names
19
29
 
20
30
  def callback(*args, **kwargs):
21
31
  return send_histograms(*args, **partial_kwargs, **kwargs)
@@ -28,5 +38,5 @@ def main():
28
38
  from restage.splitrun import splitrun_args, parse_splitrun
29
39
  args, parameters, precision = parse_splitrun(make_parser())
30
40
  instr = get_mcstas_instr(args.instrument)
31
- callback, callback_args = monitors_to_kafka_callback_with_arguments(args.broker, args.source, args.topic)
41
+ callback, callback_args = monitors_to_kafka_callback_with_arguments(args.broker, args.topic, args.source, args.names)
32
42
  return splitrun_args(instr, parameters, precision, args, callback=callback, callback_arguments=callback_args)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mccode-plumber
3
- Version: 0.14.0
3
+ Version: 0.14.1
4
4
  Author-email: Gregory Tucker <gregory.tucker@ess.eu>
5
5
  Classifier: License :: OSI Approved :: BSD License
6
6
  Description-Content-Type: text/markdown
@@ -8,8 +8,8 @@ Requires-Dist: p4p
8
8
  Requires-Dist: kafka-python>=2.2.11
9
9
  Requires-Dist: ess-streaming-data-types>=0.14.0
10
10
  Requires-Dist: restage>=0.9.0
11
- Requires-Dist: mccode-to-kafka>=0.2.2
12
- Requires-Dist: moreniius>=0.6.0
11
+ Requires-Dist: mccode-to-kafka>=0.3.0
12
+ Requires-Dist: moreniius>=0.6.1
13
13
  Requires-Dist: icecream
14
14
  Requires-Dist: ephemeral-port-reserve
15
15
 
@@ -1,6 +1,7 @@
1
1
  .gitignore
2
2
  README.md
3
3
  pyproject.toml
4
+ .github/dependabot.yml
4
5
  .github/workflows/pip.yml
5
6
  .github/workflows/wheels.yml
6
7
  src/mccode_plumber/__init__.py
@@ -41,5 +42,6 @@ src/mccode_plumber/manage/manager.py
41
42
  src/mccode_plumber/manage/orchestrate.py
42
43
  src/mccode_plumber/manage/writer.py
43
44
  tests/test_epics.py
45
+ tests/test_orchestration_utils.py
44
46
  tests/test_splitrun.py
45
47
  tests/test_writer.py
@@ -2,7 +2,7 @@ p4p
2
2
  kafka-python>=2.2.11
3
3
  ess-streaming-data-types>=0.14.0
4
4
  restage>=0.9.0
5
- mccode-to-kafka>=0.2.2
6
- moreniius>=0.6.0
5
+ mccode-to-kafka>=0.3.0
6
+ moreniius>=0.6.1
7
7
  icecream
8
8
  ephemeral-port-reserve
@@ -0,0 +1,66 @@
1
+ from mccode_antlr.instr import Instr
2
+
3
+
4
+ def get_registries():
5
+ from mccode_antlr.reader import GitHubRegistry
6
+
7
+ registries = ['mcstas-chopper-lib', 'mcstas-detector-tubes', 'mcstas-frame-tof-monitor', 'mccode-mcpl-filter',]
8
+ registries = [GitHubRegistry(
9
+ name,
10
+ url=f'https://github.com/mcdotstar/{name}',
11
+ filename='pooch-registry.txt',
12
+ version='main'
13
+ ) for name in registries]
14
+
15
+ return registries
16
+
17
+
18
+ def instr_to_nexus_structure_json(instrument: Instr):
19
+ from tempfile import TemporaryDirectory
20
+ from pathlib import Path
21
+ from json import load
22
+ import moreniius
23
+ import moreniius.additions
24
+
25
+ moreniius.additions.BIFROST_DETECTOR_TOPIC='SimulatedEvents'
26
+
27
+ nx = moreniius.MorEniius.from_mccode(
28
+ instrument,
29
+ origin='sample_origin',
30
+ only_nx=False,
31
+ absolute_depends_on=False,
32
+ )
33
+
34
+ with TemporaryDirectory() as tmpdir:
35
+ json_file = Path(tmpdir)/f'{instrument.name}.json'
36
+ nx.to_json(json_file.as_posix())
37
+ with open(json_file, 'r') as f:
38
+ return load(f)
39
+
40
+
41
+ def test_monitor_streams():
42
+ from mccode_antlr import Flavor
43
+ from mccode_plumber.manage.orchestrate import get_stream_pairs
44
+ from mccode_antlr.assembler import Assembler
45
+ from niess.bifrost import Primary
46
+ from scipp import scalar
47
+
48
+ assembler = Assembler("bifrost", flavor=Flavor.MCSTAS, registries=get_registries())
49
+
50
+ primary = Primary.from_calibration()
51
+ primary.source.n_pulses = 1
52
+ primary.source.accelerator_power = scalar(2.0, unit='MW')
53
+
54
+ primary.to_mccode(assembler)
55
+
56
+ instr = instr_to_nexus_structure_json(assembler.instrument)
57
+ streams = get_stream_pairs(instr)
58
+ topics = set(t for t, _ in streams)
59
+ sources = set(n for _, n in streams)
60
+
61
+ assert topics == {'bifrost_beam_monitor'}
62
+ assert sources == {f'{x}_monitor' for x in ['psc', 'overlap', 'bandwidth', 'normalization']}
63
+
64
+
65
+ if __name__ == '__main__':
66
+ test_monitor_streams()
@@ -29,9 +29,9 @@ class WriterTestCase(unittest.TestCase):
29
29
  self.assertEqual(len(struct['children']), 1)
30
30
  self.assertEqual(struct['children'][0]['name'], 'entry')
31
31
  self.assertEqual(struct['children'][0]['children'][0]['name'], 'instrument')
32
- self.assertEqual(struct['children'][0]['children'][0]['children'][1]['name'], '0_origin')
33
- self.assertEqual(struct['children'][0]['children'][0]['children'][2]['name'], '1_source')
34
- self.assertEqual(struct['children'][0]['children'][0]['children'][3]['name'], '2_monitor')
32
+ self.assertEqual(struct['children'][0]['children'][0]['children'][1]['name'], 'origin')
33
+ self.assertEqual(struct['children'][0]['children'][0]['children'][2]['name'], 'source')
34
+ self.assertEqual(struct['children'][0]['children'][0]['children'][3]['name'], 'monitor')
35
35
  mon = struct['children'][0]['children'][0]['children'][3]
36
36
  self.assertEqual(len(mon['children']), 4) # removed 'mccode' property 5->4
37
37
  idx = [i for i, ch in enumerate(mon['children']) if 'name' in ch and 'data' == ch['name']]