pytrms 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pytrms/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- _version = '0.9.0'
1
+ _version = '0.9.2'
2
2
 
3
3
  __all__ = ['load', 'connect']
4
4
 
@@ -6,14 +6,16 @@ __all__ = ['load', 'connect']
6
6
  def load(path):
7
7
  '''Open a datafile for post-analysis or batch processing.
8
8
 
9
- returns a `Measurement`.
9
+ `path` may be a glob-expression to collect a whole batch.
10
+
11
+ returns a `Measurement` instance.
10
12
  '''
11
- from .measurement import OfflineMeasurement
12
- from .readers import IoniTOFReader
13
+ import glob
14
+ from .measurement import FinishedMeasurement
13
15
 
14
- reader = IoniTOFReader(path)
16
+ files = glob.glob(path)
15
17
 
16
- return OfflineMeasurement(reader)
18
+ return FinishedMeasurement(*files)
17
19
 
18
20
  def connect(host=None, method='webapi'):
19
21
  '''Connect a client to a running measurement server.
@@ -23,7 +25,6 @@ def connect(host=None, method='webapi'):
23
25
  returns an `Instrument` if connected successfully.
24
26
  '''
25
27
  from .instrument import Instrument
26
- from .helpers import PTRConnectionError
27
28
 
28
29
  if method.lower() == 'webapi':
29
30
  from .clients.ioniclient import IoniClient
@@ -6,9 +6,8 @@ from collections import deque
6
6
  from itertools import cycle
7
7
  from threading import Condition, RLock
8
8
  from datetime import datetime as dt
9
- from abc import ABC, abstractmethod
10
9
 
11
- import paho.mqtt.client as mqtt
10
+ import paho.mqtt.client
12
11
 
13
12
  from .ioniclient import IoniClientBase
14
13
 
@@ -39,7 +38,6 @@ def _on_publish(client, self, mid):
39
38
  class MqttClientBase(IoniClientBase):
40
39
 
41
40
  @property
42
- @abstractmethod
43
41
  def is_connected(self):
44
42
  '''Returns `True` if connected to the server.
45
43
 
@@ -49,12 +47,27 @@ class MqttClientBase(IoniClientBase):
49
47
  return (True
50
48
  and self.client.is_connected())
51
49
 
52
- def __init__(self, host, subscriber_functions,
50
+ def __init__(self, host, port, subscriber_functions,
53
51
  on_connect, on_subscribe, on_publish,
54
52
  connect_timeout_s=10):
55
- super().__init__(host, port=1883)
56
- # configure connection...
57
- self.client = mqtt.Client(clean_session=True)
53
+ assert len(subscriber_functions) > 0, "no subscribers: for some unknown reason this causes disconnects"
54
+ super().__init__(host, port)
55
+
56
+ # Note: Version 2.0 of paho-mqtt introduced versioning of the user-callback to fix
57
+ # some inconsistency in callback arguments and to provide better support for MQTTv5.
58
+ # VERSION1 of the callback is deprecated, but is still supported in version 2.x.
59
+ # If you want to upgrade to the newer version of the API callback, you will need
60
+ # to update your callbacks:
61
+ paho_version = int(paho.mqtt.__version__.split('.')[0])
62
+ if paho_version == 1:
63
+ self.client = paho.mqtt.client.Client(clean_session=True)
64
+ elif paho_version == 2:
65
+ self.client = paho.mqtt.client.Client(paho.mqtt.client.CallbackAPIVersion.VERSION1,
66
+ clean_session=True)
67
+ else:
68
+ # see https://eclipse.dev/paho/files/paho.mqtt.python/html/migrations.html
69
+ raise NotImplementedError("API VERSION2 for MQTTv5 (use paho-mqtt 2.x or implement user callbacks)")
70
+
58
71
  # clean_session is a boolean that determines the client type. If True,
59
72
  # the broker will remove all information about this client when it
60
73
  # disconnects. If False, the client is a persistent client and
@@ -7,7 +7,7 @@ assert os.path.exists(_par_id_file), "par-id file not found: please re-install P
7
7
 
8
8
  import logging as _logging
9
9
 
10
- _logging.TRACE = 0 # overwrites logging.NOTSET
10
+ _logging.TRACE = 5 # even more verbose than logging.DEBUG
11
11
 
12
12
  def enable_extended_logging(log_level=_logging.DEBUG):
13
13
  '''make output of http-requests more talkative.
@@ -22,7 +22,7 @@ def enable_extended_logging(log_level=_logging.DEBUG):
22
22
  requests_log.setLevel(log_level)
23
23
  requests_log.propagate = True
24
24
 
25
- if log_level == _logging.TRACE:
25
+ if log_level <= _logging.TRACE:
26
26
  # Enabling debugging at http.client level (requests->urllib3->http.client)
27
27
  # you will see the REQUEST, including HEADERS and DATA, and RESPONSE with
28
28
  # HEADERS but without DATA. the only thing missing will be the response.body,
pytrms/clients/db_api.py CHANGED
@@ -4,6 +4,7 @@ import json
4
4
  import requests
5
5
 
6
6
  from . import _logging
7
+ from .ssevent import SSEventListener
7
8
  from .._base import IoniClientBase
8
9
 
9
10
  log = _logging.getLogger(__name__)
@@ -82,7 +83,7 @@ class IoniConnect(IoniClientBase):
82
83
  if not endpoint.startswith('/'):
83
84
  endpoint = '/' + endpoint
84
85
  if not isinstance(data, str):
85
- data = json.dumps(data)
86
+ data = json.dumps(data, ensure_ascii=False) # default is `True`, escapes Umlaute!
86
87
  if 'headers' not in kwargs:
87
88
  kwargs['headers'] = {'content-type': 'application/hal+json'}
88
89
  elif 'content-type' not in (k.lower() for k in kwargs['headers']):
@@ -94,93 +95,89 @@ class IoniConnect(IoniClientBase):
94
95
 
95
96
  return r
96
97
 
97
- def iter_events(self):
98
- """Follow the server-sent-events (SSE) on the DB-API."""
99
- r = self.session.request('GET', self.url + "/api/events",
100
- headers={'accept': 'text/event-stream'}, stream=True)
101
- r.raise_for_status()
102
- kv_pair = dict()
103
- for line in r.iter_lines():
104
- # empty newlines serve as keep-alive and end-of-entry:
105
- if not line:
106
- if kv_pair:
107
- yield kv_pair
108
- kv_pair = dict()
109
- else:
110
- log.debug("sse: still kept alive...")
98
+ def sync(self, peaktable):
99
+ """Compare and upload any differences in `peaktable` to the database."""
100
+ from pytrms.peaktable import Peak, PeakTable
101
+ from operator import attrgetter
102
+
103
+ # Note: a `Peak` is a hashable object that serves as a key that
104
+ # distinguishes between peaks as defined by PyTRMS:
105
+ make_key = lambda peak: Peak(center=peak['center'], label=peak['name'], shift=peak['shift'])
106
+
107
+ if isinstance(peaktable, str):
108
+ log.info(f"loading peaktable '{peaktable}'...")
109
+ peaktable = PeakTable.from_file(peaktable)
110
+
111
+ # get the PyTRMS- and IoniConnect-peaks on the same page:
112
+ conv = {
113
+ 'name': attrgetter('label'),
114
+ 'center': attrgetter('center'),
115
+ 'kRate': attrgetter('k_rate'),
116
+ 'low': lambda p: p.borders[0],
117
+ 'high': lambda p: p.borders[1],
118
+ 'shift': attrgetter('shift'),
119
+ 'multiplier': attrgetter('multiplier'),
120
+ }
121
+ # normalize the input argument and create a hashable set:
122
+ updates = dict()
123
+ for peak in peaktable:
124
+ payload = {k: conv[k](peak) for k in conv}
125
+ updates[make_key(payload)] = {'payload': payload}
126
+
127
+ log.info(f"fetching current peaktable from the server...")
128
+ # create a comparable collection of peaks already on the database by
129
+ # reducing the keys in the response to what we actually want to update:
130
+ db_peaks = {make_key(p): {
131
+ 'payload': {k: p[k] for k in conv.keys()},
132
+ 'self': p['_links']['self'],
133
+ 'parent': p['_links'].get('parent'),
134
+ } for p in self.get('/api/peaks')['_embedded']['peaks']}
135
+
136
+ to_update = updates.keys() & db_peaks.keys()
137
+ to_upload = updates.keys() - db_peaks.keys()
138
+ updated = 0
139
+ for key in sorted(to_update):
140
+ # check if an existing peak needs an update
141
+ if db_peaks[key]['payload'] == updates[key]['payload']:
142
+ # nothing to do..
143
+ log.debug(f"up-to-date: {key}")
111
144
  continue
112
145
 
113
- key, val = line.decode().split(':')
114
- kv_pair[key] = val.strip()
115
-
116
- def refresh_comp_dict(self):
117
- j = self.get('/api/components')
118
- self.comp_dict = {component["shortName"]: component
119
- for component in j["_embedded"]["components"]}
120
-
121
- def get_component(self, short_name):
122
- if not len(self.comp_dict):
123
- self.refresh_comp_dict()
124
-
125
- return self.comp_dict[short_name]
126
-
127
- def create_component(self, short_name):
128
- payload = {
129
- "shortName": short_name
146
+ self.put(db_peaks[key]['self']['href'], updates[key]['payload'])
147
+ log.info(f"updated: {key}")
148
+ updated += 1
149
+
150
+ if len(to_upload):
151
+ # Note: POSTing the embedded-collection is *miles faster*
152
+ # than doing separate requests for each peak!
153
+ payload = {'_embedded': {'peaks': [updates[key]['payload'] for key in sorted(to_upload)]}}
154
+ self.post('/api/peaks', payload)
155
+ for key in sorted(to_upload): log.info(f"added new: {key}")
156
+
157
+ # Note: this disregards the peak-parent-relationship, but in
158
+ # order to implement this correctly, one would need to check
159
+ # if the parent-peak with a specific 'parentID' is already
160
+ # uploaded and search it.. there's an endpoint
161
+ # 'LINK /api/peaks/{parentID} Location: /api/peaks/{childID}'
162
+ # to link a child to its parent, but it remains complicated.
163
+ # TODO :: maybe later implement parent-peaks!?
164
+
165
+ return {
166
+ 'added': len(to_upload),
167
+ 'updated': updated,
168
+ 'up-to-date': len(to_update) - updated,
130
169
  }
131
- self.post('/api/components', payload)
132
- self.refresh_comp_dict()
133
-
134
- def create_average(self, endpoint, run, step, action=0, use_mean=True):
135
170
 
136
- params = {'run': int(run), 'step': int(step), 'usemean': bool(use_mean)}
137
- if (action != 0):
138
- params['action'] = int(action)
171
+ def iter_events(self, event_re=r".*"):
172
+ """Follow the server-sent-events (SSE) on the DB-API.
139
173
 
140
- timecycles = self.get(endpoint, params)
141
- self.current_avg_endpoint = self.post('/api/averages', timecycles)
174
+ `event_re` a regular expression to filter events (default: matches everything)
142
175
 
143
- def save_component_values(self, new_values):
176
+ Note: This will block until a matching event is received.
177
+ Especially, it cannot be cancelled by KeyboardInterrupt (due to the `requests`
178
+ stream-implementation), unless the server sends a keep-alive at regular
179
+ intervals (as every well-behaved server should be doing)!
144
180
  """
145
- Post Components to the database.
146
-
147
- `new_values` dictionary {name~>value}
148
- """
149
- if self.current_avg_endpoint is None:
150
- raise Exception("create average first")
151
-
152
- payload = {
153
- "quantities": [
154
- {
155
- "componentID": self.get_component(name)["componentID"],
156
- "value": value
157
- } for name, value in new_values.items()
158
- ]
159
- }
160
- endpoint = self.current_avg_endpoint + '/component_traces'
161
- self.put(endpoint, payload)
162
-
163
- def save_instrument_values(self, new_values):
164
- """
165
- Post Parameters to the database.
166
-
167
- `new_values` dictionary {name~>value}
168
- """
169
- # 13.07.: SCHNELL, SCHNELL (es ist 17 Uhr 57 und ich will die Modbus-instrument
170
- # daten noch hochladen):
171
- # this expects a namedtuple as defined in Modbus client: .set, .act, .par_id
172
- if self.current_avg_endpoint is None:
173
- raise Exception("create average first")
174
-
175
- payload = {
176
- "quantities": [
177
- {
178
- "parameterID": item.par_id,
179
- "setValue": item.set,
180
- "actMean": item.act
181
- } for name, item in new_values.items()
182
- ]
183
- }
184
- endpoint = self.current_avg_endpoint + '/parameter_traces' # on the DB it's called parameter... :\
185
- self.put(endpoint, payload)
181
+ yield from SSEventListener(event_re, host_url=self.url, endpoint="/api/events",
182
+ session=self.session)
186
183
 
@@ -18,17 +18,17 @@ class IoniClient:
18
18
  Access the Ionicon WebAPI.
19
19
 
20
20
  Usage:
21
- >>> client = IoniClient()
22
- >>> client.get('TPS_Pull_H')
21
+ > client = IoniClient()
22
+ > client.get('TPS_Pull_H')
23
23
  {'TPS_Pull_H': 123.45, ... }
24
24
 
25
- >>> client.set('TPS_Pull_H', 42)
25
+ > client.set('TPS_Pull_H', 42)
26
26
  {'TPS_Pull_H': 42.0, ... }
27
27
 
28
- >>> client.start_measurement()
28
+ > client.start_measurement()
29
29
  ACK
30
30
 
31
- >>> client.host, client.port
31
+ > client.host, client.port
32
32
  ('localhost', 8002)
33
33
 
34
34
  '''
@@ -85,19 +85,3 @@ class IoniClient:
85
85
 
86
86
  def stop_measurement(self):
87
87
  return self.set('ACQ_SRV_Stop_Meas', 1)
88
-
89
-
90
- if __name__ == '__main__':
91
- import sys
92
- client = IoniClient()
93
-
94
- if len(sys.argv) == 2:
95
- print(client.get(sys.argv[1]))
96
- elif len(sys.argv) == 3:
97
- print(client.set(sys.argv[1], sys.argv[2]))
98
- else:
99
- print(f"""\
100
- usage:
101
- python {sys.argv[0]} <varname> [<value>]
102
- """)
103
-
pytrms/clients/modbus.py CHANGED
@@ -9,7 +9,7 @@ from collections import namedtuple
9
9
  from functools import lru_cache
10
10
  from itertools import tee
11
11
 
12
- from pyModbusTCP import client
12
+ import pyModbusTCP.client
13
13
 
14
14
  from . import _par_id_file
15
15
  from .._base.ioniclient import IoniClientBase
@@ -19,9 +19,28 @@ log = logging.getLogger(__name__)
19
19
  __all__ = ['IoniconModbus']
20
20
 
21
21
 
22
+ def _patch_is_open():
23
+ # Note: the .is_open and .timeout attributes were changed
24
+ # from a function to a property!
25
+ #
26
+ # 0.2.0 2022-06-05
27
+ #
28
+ # - ModbusClient: parameters are now properties instead of methods (more intuitive).
29
+ #
30
+ # from the [changelog](https://github.com/sourceperl/pyModbusTCP/blob/master/CHANGES):
31
+ major, minor, patch = pyModbusTCP.__version__.split('.')
32
+ if int(minor) < 2:
33
+ return lambda mc: mc.is_open()
34
+ else:
35
+ return lambda mc: mc.is_open
36
+
37
+ _is_open = _patch_is_open()
38
+
22
39
  with open(_par_id_file) as f:
23
40
  it = iter(f)
24
- assert next(it).startswith('ID\tName'), "Modbus parameter file is corrupt: " + f.name
41
+ assert next(it).startswith('ID\tName'), ("Modbus parameter file is corrupt: "
42
+ + f.name
43
+ + "\n\ntry re-installing the PyTRMS python package to fix it!")
25
44
  _id_to_descr = {int(id_): name for id_, name, *_ in (line.strip().split('\t') for line in it)}
26
45
 
27
46
  # look-up-table for c_structs (see docstring of struct-module for more info).
@@ -59,10 +78,10 @@ def _unpack(registers, format='>f'):
59
78
  representation, respectively.
60
79
 
61
80
  >>> _unpack([17448, 0], 'float')
62
- 672.
81
+ 672.0
63
82
 
64
83
  >>> _unpack([17446, 32768], 'float')
65
- 666.
84
+ 666.0
66
85
 
67
86
  >>> _unpack([16875, 61191, 54426, 37896], 'double')
68
87
  3749199524.83057
@@ -132,7 +151,7 @@ class IoniconModbus(IoniClientBase):
132
151
 
133
152
  @property
134
153
  def is_connected(self):
135
- if not self.mc.is_open:
154
+ if not _is_open(self.mc):
136
155
  return False
137
156
 
138
157
  # wait for the IoniTOF alive-counter to change (1 second max)...
@@ -175,7 +194,18 @@ class IoniconModbus(IoniClientBase):
175
194
 
176
195
  def __init__(self, host='localhost', port=502):
177
196
  super().__init__(host, port)
178
- self.mc = client.ModbusClient(host=self.host, port=self.port)
197
+ # Note: we patch the behaviour such, that it behaves like pre-0.2
198
+ # (from the time of development of this module), BUT we skip the
199
+ # auto_close-feature for the sake of speed:
200
+ #
201
+ # 0.2.0 2022-06-05
202
+ #
203
+ # - ModbusClient: now TCP auto open mode is active by default (auto_open=True, auto_close=False).
204
+ #
205
+ # from the [changelog](https://github.com/sourceperl/pyModbusTCP/blob/master/CHANGES)
206
+ self.mc = pyModbusTCP.client.ModbusClient(host=self.host, port=self.port,
207
+ auto_open = False, auto_close = False
208
+ )
179
209
  # try connect immediately:
180
210
  try:
181
211
  self.connect()
@@ -185,8 +215,11 @@ class IoniconModbus(IoniClientBase):
185
215
 
186
216
  def connect(self, timeout_s=10):
187
217
  log.info(f"[{self}] connecting to Modbus server...")
188
- self.mc.timeout = timeout_s
189
- self.mc.auto_open = True
218
+ # Note: .timeout-attribute changed to a property with 0.2.0 (see comments above)
219
+ if callable(self.mc.timeout):
220
+ self.mc.timeout(timeout_s)
221
+ else:
222
+ self.mc.timeout = timeout_s
190
223
  if not self.mc.open():
191
224
  raise TimeoutError(f"[{self}] no connection to modbus socket")
192
225
 
@@ -201,7 +234,7 @@ class IoniconModbus(IoniClientBase):
201
234
  raise TimeoutError(f"[{self}] no connection to IoniTOF");
202
235
 
203
236
  def disconnect(self):
204
- if self.mc.is_open:
237
+ if _is_open(self.mc):
205
238
  self.mc.close()
206
239
 
207
240
  @property
@@ -452,9 +485,9 @@ class IoniconModbus(IoniClientBase):
452
485
  _read = self.mc.read_holding_registers if is_holding_register else self.mc.read_input_registers
453
486
 
454
487
  register = _read(addr, n_bytes)
455
- if register is None and self.mc.is_open:
488
+ if register is None and _is_open(self.mc):
456
489
  raise IOError(f"unable to read ({n_bytes}) registers at [{addr}] from connection")
457
- elif register is None and not self.mc.is_open:
490
+ elif register is None and not _is_open(self.mc):
458
491
  raise IOError("trying to read from closed Modbus-connection")
459
492
 
460
493
  return _unpack(register, c_format)
pytrms/clients/mqtt.py CHANGED
@@ -325,9 +325,14 @@ def follow_state(client, self, msg):
325
325
  log.debug(f"[{self}] new server-state: " + str(state))
326
326
  # replace the current state with the new element:
327
327
  self._server_state.append(state)
328
- if state == "ACQ_Aquire": # yes, there's a typo, plz keep it :)
329
- self._calcconzinfo.append(_NOT_INIT) # invalidate
330
- # Note: this signals to the relevant thread that we need an update
328
+ meas_running = (state == "ACQ_Aquire") # yes, there's a typo, plz keep it :)
329
+ just_started = (meas_running and not msg.retain)
330
+ if meas_running:
331
+ # signal the relevant thread(s) that we need an update:
332
+ self._calcconzinfo.append(_NOT_INIT)
333
+ if just_started:
334
+ # invalidate the source-file until we get a new one:
335
+ self._sf_filename.append(_NOT_INIT)
331
336
 
332
337
  follow_state.topics = ["DataCollection/Act/ACQ_SRV_CurrentState"]
333
338
 
@@ -356,6 +361,10 @@ def follow_act_set_values(client, self, msg):
356
361
  # Note: this topic doesn't strictly follow the convention and is handled separately
357
362
  return
358
363
 
364
+ if server == "Sequencer":
365
+ # Note: this is a separate program and will be ignored (has its own AUTO_-numbers et.c.)
366
+ return
367
+
359
368
  if parID == "PTR_CalcConzInfo":
360
369
  # another "special" topic handled in 'follow_calc_conz_info' ...
361
370
  return
@@ -404,8 +413,8 @@ _NOT_INIT = object()
404
413
  class MqttClient(MqttClientBase):
405
414
  """a simplified client for the Ionicon MQTT API.
406
415
 
407
- >>> mq = MqttClient()
408
- >>> mq.write('TCP_MCP_B', 3400)
416
+ > mq = MqttClient()
417
+ > mq.write('TCP_MCP_B', 3400)
409
418
  ValueError()
410
419
 
411
420
  """
@@ -463,11 +472,27 @@ class MqttClient(MqttClientBase):
463
472
 
464
473
  @property
465
474
  def current_sourcefile(self):
466
- '''Returns the path to the hdf5-file that is currently (or soon to be) written.
475
+ '''Returns the path to the hdf5-file that is currently being written.
467
476
 
468
- May be an empty string if no sourcefile has yet been set.
477
+ Returns an empty string if no measurement is running.
469
478
  '''
470
- return self._sf_filename[0]
479
+ if not self.is_running:
480
+ return ""
481
+
482
+ if self._sf_filename[0] is not _NOT_INIT:
483
+ return self._sf_filename[0]
484
+
485
+ # Note: '_NOT_INIT' is set by us on start of acquisition, so we'd expect
486
+ # to receive the source-file-topic after a (generous) timeout:
487
+ timeout_s = 15
488
+ started_at = time.monotonic()
489
+ while time.monotonic() < started_at + timeout_s:
490
+ if self._sf_filename[0] is not _NOT_INIT:
491
+ return self._sf_filename[0]
492
+
493
+ time.sleep(10e-3)
494
+ else:
495
+ raise TimeoutError(f"[{self}] unable to retrieve source-file after ({timeout_s = })");
471
496
 
472
497
  @property
473
498
  def current_cycle(self):
@@ -476,9 +501,9 @@ class MqttClient(MqttClientBase):
476
501
  return self._overallcycle[0]
477
502
  return 0
478
503
 
479
- def __init__(self, host='127.0.0.1'):
504
+ def __init__(self, host='127.0.0.1', port=1883):
480
505
  # this sets up the mqtt connection with default callbacks:
481
- super().__init__(host, _subscriber_functions, None, None, None)
506
+ super().__init__(host, port, _subscriber_functions, None, None, None)
482
507
  log.debug(f"connection check ({self.is_connected}) :: {self._server_state = } / {self._sched_cmds = }");
483
508
 
484
509
  def disconnect(self):
@@ -508,12 +533,10 @@ class MqttClient(MqttClientBase):
508
533
  if _lut is self.set_values and is_read_only:
509
534
  raise ValueError(f"'{parID}' is read-only, did you mean `kind='act'`?")
510
535
 
511
- # Note: The values should need NO! time to be populated from the MQTT topics,
512
- # because all topics are published as *retained* by the PTR-server.
513
- # However, a short timeout is respected before raising a `KeyError`:
514
- try:
515
- return _lut[parID]
516
- except KeyError as exc:
536
+ if not parID in _lut:
537
+ # Note: The values should need NO! time to be populated from the MQTT topics,
538
+ # because all topics are published as *retained* by the PTR-server.
539
+ # However, a short timeout is respected before raising a `KeyError`:
517
540
  time.sleep(200e-3)
518
541
  rv = _lut.get(parID)
519
542
  if rv is not None:
@@ -525,6 +548,7 @@ class MqttClient(MqttClientBase):
525
548
  "set" if parID in self.set_values else
526
549
  "")
527
550
  raise KeyError(str(parID) + (' (did you mean `kind="%s"`?)' % error_hint) if error_hint else "")
551
+ return _lut[parID]
528
552
 
529
553
  def get_table(self, table_name):
530
554
  timeout_s = 10