PyAutomationIO 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- automation/__init__.py +46 -0
- automation/alarms/__init__.py +563 -0
- automation/alarms/states.py +192 -0
- automation/alarms/trigger.py +64 -0
- automation/buffer.py +132 -0
- automation/core.py +1792 -0
- automation/dbmodels/__init__.py +23 -0
- automation/dbmodels/alarms.py +549 -0
- automation/dbmodels/core.py +86 -0
- automation/dbmodels/events.py +178 -0
- automation/dbmodels/logs.py +155 -0
- automation/dbmodels/machines.py +181 -0
- automation/dbmodels/opcua.py +81 -0
- automation/dbmodels/opcua_server.py +174 -0
- automation/dbmodels/tags.py +921 -0
- automation/dbmodels/users.py +259 -0
- automation/extensions/__init__.py +15 -0
- automation/extensions/api.py +149 -0
- automation/extensions/cors.py +18 -0
- automation/filter/__init__.py +19 -0
- automation/iad/__init__.py +3 -0
- automation/iad/frozen_data.py +54 -0
- automation/iad/out_of_range.py +51 -0
- automation/iad/outliers.py +51 -0
- automation/logger/__init__.py +0 -0
- automation/logger/alarms.py +434 -0
- automation/logger/core.py +265 -0
- automation/logger/datalogger.py +877 -0
- automation/logger/events.py +202 -0
- automation/logger/logdict.py +53 -0
- automation/logger/logs.py +203 -0
- automation/logger/machines.py +248 -0
- automation/logger/opcua_server.py +130 -0
- automation/logger/users.py +96 -0
- automation/managers/__init__.py +4 -0
- automation/managers/alarms.py +455 -0
- automation/managers/db.py +328 -0
- automation/managers/opcua_client.py +186 -0
- automation/managers/state_machine.py +183 -0
- automation/models.py +174 -0
- automation/modules/__init__.py +14 -0
- automation/modules/alarms/__init__.py +0 -0
- automation/modules/alarms/resources/__init__.py +10 -0
- automation/modules/alarms/resources/alarms.py +280 -0
- automation/modules/alarms/resources/summary.py +81 -0
- automation/modules/events/__init__.py +0 -0
- automation/modules/events/resources/__init__.py +10 -0
- automation/modules/events/resources/events.py +85 -0
- automation/modules/events/resources/logs.py +109 -0
- automation/modules/tags/__init__.py +0 -0
- automation/modules/tags/resources/__init__.py +8 -0
- automation/modules/tags/resources/tags.py +254 -0
- automation/modules/users/__init__.py +2 -0
- automation/modules/users/resources/__init__.py +10 -0
- automation/modules/users/resources/models/__init__.py +2 -0
- automation/modules/users/resources/models/roles.py +5 -0
- automation/modules/users/resources/models/users.py +14 -0
- automation/modules/users/resources/roles.py +38 -0
- automation/modules/users/resources/users.py +113 -0
- automation/modules/users/roles.py +121 -0
- automation/modules/users/users.py +335 -0
- automation/opcua/__init__.py +1 -0
- automation/opcua/models.py +541 -0
- automation/opcua/subscription.py +259 -0
- automation/pages/__init__.py +0 -0
- automation/pages/alarms.py +34 -0
- automation/pages/alarms_history.py +21 -0
- automation/pages/assets/styles.css +7 -0
- automation/pages/callbacks/__init__.py +28 -0
- automation/pages/callbacks/alarms.py +218 -0
- automation/pages/callbacks/alarms_summary.py +20 -0
- automation/pages/callbacks/db.py +222 -0
- automation/pages/callbacks/filter.py +238 -0
- automation/pages/callbacks/machines.py +29 -0
- automation/pages/callbacks/machines_detailed.py +581 -0
- automation/pages/callbacks/opcua.py +266 -0
- automation/pages/callbacks/opcua_server.py +244 -0
- automation/pages/callbacks/tags.py +495 -0
- automation/pages/callbacks/trends.py +119 -0
- automation/pages/communications.py +129 -0
- automation/pages/components/__init__.py +123 -0
- automation/pages/components/alarms.py +151 -0
- automation/pages/components/alarms_summary.py +45 -0
- automation/pages/components/database.py +128 -0
- automation/pages/components/gaussian_filter.py +69 -0
- automation/pages/components/machines.py +396 -0
- automation/pages/components/opcua.py +384 -0
- automation/pages/components/opcua_server.py +53 -0
- automation/pages/components/tags.py +253 -0
- automation/pages/components/trends.py +66 -0
- automation/pages/database.py +26 -0
- automation/pages/filter.py +55 -0
- automation/pages/machines.py +20 -0
- automation/pages/machines_detailed.py +41 -0
- automation/pages/main.py +63 -0
- automation/pages/opcua_server.py +28 -0
- automation/pages/tags.py +40 -0
- automation/pages/trends.py +35 -0
- automation/singleton.py +30 -0
- automation/state_machine.py +1674 -0
- automation/tags/__init__.py +2 -0
- automation/tags/cvt.py +1198 -0
- automation/tags/filter.py +55 -0
- automation/tags/tag.py +418 -0
- automation/tests/__init__.py +10 -0
- automation/tests/test_alarms.py +110 -0
- automation/tests/test_core.py +257 -0
- automation/tests/test_unit.py +21 -0
- automation/tests/test_user.py +155 -0
- automation/utils/__init__.py +164 -0
- automation/utils/decorators.py +222 -0
- automation/utils/npw.py +294 -0
- automation/utils/observer.py +21 -0
- automation/utils/units.py +118 -0
- automation/variables/__init__.py +55 -0
- automation/variables/adimentional.py +30 -0
- automation/variables/current.py +71 -0
- automation/variables/density.py +115 -0
- automation/variables/eng_time.py +68 -0
- automation/variables/force.py +90 -0
- automation/variables/length.py +104 -0
- automation/variables/mass.py +80 -0
- automation/variables/mass_flow.py +101 -0
- automation/variables/percentage.py +30 -0
- automation/variables/power.py +113 -0
- automation/variables/pressure.py +93 -0
- automation/variables/temperature.py +168 -0
- automation/variables/volume.py +70 -0
- automation/variables/volumetric_flow.py +100 -0
- automation/workers/__init__.py +2 -0
- automation/workers/logger.py +164 -0
- automation/workers/state_machine.py +207 -0
- automation/workers/worker.py +36 -0
- pyautomationio-1.1.1.dist-info/METADATA +199 -0
- pyautomationio-1.1.1.dist-info/RECORD +138 -0
- pyautomationio-1.1.1.dist-info/WHEEL +5 -0
- pyautomationio-1.1.1.dist-info/licenses/LICENSE +21 -0
- pyautomationio-1.1.1.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,877 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
"""pyhades/logger/datalogger.py
|
|
3
|
+
|
|
4
|
+
This module implements a database logger for the CVT instance,
|
|
5
|
+
will create a time-serie for each tag in a short memory data base.
|
|
6
|
+
"""
|
|
7
|
+
import pytz, logging, math
|
|
8
|
+
from peewee import fn
|
|
9
|
+
from collections import defaultdict
|
|
10
|
+
from datetime import datetime
|
|
11
|
+
from ..tags.tag import Tag
|
|
12
|
+
from ..dbmodels import Tags, TagValue, Units, Segment, Variables
|
|
13
|
+
from ..modules.users.users import User
|
|
14
|
+
from ..tags.cvt import CVTEngine
|
|
15
|
+
from .core import BaseLogger, BaseEngine
|
|
16
|
+
from ..variables import *
|
|
17
|
+
from ..utils.decorators import db_rollback
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
|
|
21
|
+
|
|
22
|
+
class DataLogger(BaseLogger):
|
|
23
|
+
"""
|
|
24
|
+
Data Logger class.
|
|
25
|
+
|
|
26
|
+
This class serves as an API for managing tag settings and accessing logged tags.
|
|
27
|
+
|
|
28
|
+
**Usage Example**:
|
|
29
|
+
|
|
30
|
+
.. code-block:: python
|
|
31
|
+
|
|
32
|
+
>>> from pyhades import DataLogger
|
|
33
|
+
>>> _logger = DataLogger()
|
|
34
|
+
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
def __init__(self):
|
|
38
|
+
|
|
39
|
+
super(DataLogger, self).__init__()
|
|
40
|
+
self.tag_engine = CVTEngine()
|
|
41
|
+
|
|
42
|
+
@db_rollback
|
|
43
|
+
def set_tag(
|
|
44
|
+
self,
|
|
45
|
+
id:str,
|
|
46
|
+
name:str,
|
|
47
|
+
unit:str,
|
|
48
|
+
data_type:str,
|
|
49
|
+
description:str="",
|
|
50
|
+
display_name:str="",
|
|
51
|
+
display_unit:str=None,
|
|
52
|
+
opcua_address:str=None,
|
|
53
|
+
node_namespace:str=None,
|
|
54
|
+
scan_time:int=None,
|
|
55
|
+
dead_band:float=None,
|
|
56
|
+
manufacturer:str="",
|
|
57
|
+
segment:str=""
|
|
58
|
+
):
|
|
59
|
+
r"""
|
|
60
|
+
Documentation here
|
|
61
|
+
"""
|
|
62
|
+
if not self.check_connectivity():
|
|
63
|
+
|
|
64
|
+
return None
|
|
65
|
+
|
|
66
|
+
Tags.create(
|
|
67
|
+
id=id,
|
|
68
|
+
name=name,
|
|
69
|
+
unit=unit,
|
|
70
|
+
data_type=data_type,
|
|
71
|
+
description=description,
|
|
72
|
+
display_name=display_name,
|
|
73
|
+
display_unit=display_unit,
|
|
74
|
+
opcua_address=opcua_address,
|
|
75
|
+
node_namespace=node_namespace,
|
|
76
|
+
scan_time=scan_time,
|
|
77
|
+
dead_band=dead_band,
|
|
78
|
+
manufacturer=manufacturer,
|
|
79
|
+
segment=segment
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
@db_rollback
|
|
83
|
+
def delete_tag(self, id:str):
|
|
84
|
+
r"""
|
|
85
|
+
Documentation here
|
|
86
|
+
"""
|
|
87
|
+
if not self.check_connectivity():
|
|
88
|
+
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
tag, _ = Tags.get_or_create(identifier=id)
|
|
92
|
+
Tags.put(id=tag.id, active=False)
|
|
93
|
+
|
|
94
|
+
@db_rollback
|
|
95
|
+
def get_tag_by_name(self, name:str):
|
|
96
|
+
r"""
|
|
97
|
+
Documentation here
|
|
98
|
+
"""
|
|
99
|
+
if not self.check_connectivity():
|
|
100
|
+
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
return Tags.read_by_name(name=name)
|
|
104
|
+
|
|
105
|
+
@db_rollback
|
|
106
|
+
def update_tag(self, id:str, **kwargs):
|
|
107
|
+
r"""
|
|
108
|
+
Documentation here
|
|
109
|
+
"""
|
|
110
|
+
if not self.check_connectivity():
|
|
111
|
+
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
tag = Tags.get(identifier=id)
|
|
115
|
+
|
|
116
|
+
if "gaussian_filter" in kwargs:
|
|
117
|
+
|
|
118
|
+
if kwargs['gaussian_filter'].lower() in ('1', 'true'):
|
|
119
|
+
|
|
120
|
+
kwargs['gaussian_filter'] = True
|
|
121
|
+
|
|
122
|
+
else:
|
|
123
|
+
|
|
124
|
+
kwargs['gaussian_filter'] = False
|
|
125
|
+
|
|
126
|
+
return Tags.put(id=tag.id, **kwargs)
|
|
127
|
+
|
|
128
|
+
@db_rollback
|
|
129
|
+
def set_tags(self, tags):
|
|
130
|
+
r"""
|
|
131
|
+
Documentation here
|
|
132
|
+
"""
|
|
133
|
+
if not self.check_connectivity():
|
|
134
|
+
|
|
135
|
+
return None
|
|
136
|
+
|
|
137
|
+
for tag in tags:
|
|
138
|
+
|
|
139
|
+
self.set_tag(tag)
|
|
140
|
+
|
|
141
|
+
@db_rollback
|
|
142
|
+
def get_tags(self):
|
|
143
|
+
r"""
|
|
144
|
+
Documentation here
|
|
145
|
+
"""
|
|
146
|
+
if not self.check_connectivity():
|
|
147
|
+
|
|
148
|
+
return list()
|
|
149
|
+
|
|
150
|
+
return Tags.read_all()
|
|
151
|
+
|
|
152
|
+
@db_rollback
|
|
153
|
+
def write_tag(self, tag, value, timestamp):
|
|
154
|
+
r"""
|
|
155
|
+
Documentation here
|
|
156
|
+
"""
|
|
157
|
+
if not self.is_history_logged:
|
|
158
|
+
|
|
159
|
+
return None
|
|
160
|
+
|
|
161
|
+
if not self.check_connectivity():
|
|
162
|
+
|
|
163
|
+
return None
|
|
164
|
+
|
|
165
|
+
trend = Tags.read_by_name(tag)
|
|
166
|
+
unit = Units.read_by_unit(unit=trend.display_unit.unit)
|
|
167
|
+
TagValue.create(tag=trend, value=value, timestamp=timestamp, unit=unit)
|
|
168
|
+
|
|
169
|
+
@db_rollback
|
|
170
|
+
def write_tags(self, tags:list):
|
|
171
|
+
r"""
|
|
172
|
+
Documentation here
|
|
173
|
+
"""
|
|
174
|
+
if not self.is_history_logged:
|
|
175
|
+
|
|
176
|
+
return None
|
|
177
|
+
|
|
178
|
+
if not self.check_connectivity():
|
|
179
|
+
|
|
180
|
+
return None
|
|
181
|
+
|
|
182
|
+
_tags = tags.copy()
|
|
183
|
+
|
|
184
|
+
for counter, tag in enumerate(tags):
|
|
185
|
+
|
|
186
|
+
_tag = Tags.read_by_name(tag['tag'])
|
|
187
|
+
|
|
188
|
+
if _tag:
|
|
189
|
+
|
|
190
|
+
unit = Units.get_or_none(id=_tag.display_unit.id)
|
|
191
|
+
_tags[counter].update({
|
|
192
|
+
'tag': _tag,
|
|
193
|
+
'unit': unit
|
|
194
|
+
})
|
|
195
|
+
|
|
196
|
+
TagValue.insert_many(_tags).execute()
|
|
197
|
+
|
|
198
|
+
@db_rollback
|
|
199
|
+
def read_trends(self, start:str, stop:str, timezone:str, tags):
|
|
200
|
+
r"""
|
|
201
|
+
Documentation here
|
|
202
|
+
"""
|
|
203
|
+
|
|
204
|
+
if not self.is_history_logged:
|
|
205
|
+
|
|
206
|
+
return None
|
|
207
|
+
|
|
208
|
+
if not self.check_connectivity():
|
|
209
|
+
|
|
210
|
+
return dict()
|
|
211
|
+
|
|
212
|
+
_timezone = pytz.timezone(timezone)
|
|
213
|
+
start = _timezone.localize(datetime.strptime(start, DATETIME_FORMAT)).astimezone(pytz.UTC).timestamp()
|
|
214
|
+
stop = _timezone.localize(datetime.strptime(stop, DATETIME_FORMAT)).astimezone(pytz.UTC).timestamp()
|
|
215
|
+
|
|
216
|
+
query = (TagValue
|
|
217
|
+
.select(Tags.name, TagValue.value, TagValue.timestamp,
|
|
218
|
+
Units.unit.alias('tag_value_unit'), Variables.name.alias('variable_name'))
|
|
219
|
+
.join(Tags)
|
|
220
|
+
.join(Units, on=(Tags.unit == Units.id))
|
|
221
|
+
.join(Variables, on=(Units.variable_id == Variables.id))
|
|
222
|
+
.where((TagValue.timestamp.between(start, stop)) & (Tags.name.in_(tags)))
|
|
223
|
+
.order_by(TagValue.timestamp)
|
|
224
|
+
.dicts())
|
|
225
|
+
|
|
226
|
+
# Structure the data
|
|
227
|
+
time_span = (stop - start ) / 60 # span in minutes
|
|
228
|
+
result = defaultdict(lambda: {"values": []})
|
|
229
|
+
if time_span > 60 * 24 * 7: # 1 week
|
|
230
|
+
# Aggregate data every 1 day
|
|
231
|
+
result = self._agregate_data_every_seconds(query=query, result=result, seconds=3600 * 24, timezone=timezone)
|
|
232
|
+
|
|
233
|
+
elif time_span > 60 * 24 * 2: # 2 days
|
|
234
|
+
# Aggregate data every 1 hora
|
|
235
|
+
result = self._agregate_data_every_seconds(query=query, result=result, seconds=3600, timezone=timezone)
|
|
236
|
+
|
|
237
|
+
elif time_span > 60 * 2: # 2 horas
|
|
238
|
+
# Aggregate data every 1 minute
|
|
239
|
+
result = self._agregate_data_every_seconds(query=query, result=result, seconds=60, timezone=timezone)
|
|
240
|
+
|
|
241
|
+
else:
|
|
242
|
+
# Use original data
|
|
243
|
+
|
|
244
|
+
for entry in query:
|
|
245
|
+
|
|
246
|
+
from_timezone = pytz.timezone('UTC')
|
|
247
|
+
timestamp = entry['timestamp']
|
|
248
|
+
timestamp = from_timezone.localize(timestamp)
|
|
249
|
+
result[entry['name']]["values"].append({
|
|
250
|
+
"x": timestamp.astimezone(_timezone).strftime(self.tag_engine.DATETIME_FORMAT),
|
|
251
|
+
"y": entry['value']
|
|
252
|
+
})
|
|
253
|
+
|
|
254
|
+
for tag in tags:
|
|
255
|
+
|
|
256
|
+
result[tag]['unit'] = self.tag_engine.get_display_unit_by_tag(tag)
|
|
257
|
+
|
|
258
|
+
return result
|
|
259
|
+
|
|
260
|
+
@db_rollback
|
|
261
|
+
def read_table(self, start:str, stop:str, timezone:str, tags:list, page:int=1, limit:int=20):
|
|
262
|
+
r"""
|
|
263
|
+
Get historical data in table format with pagination
|
|
264
|
+
"""
|
|
265
|
+
if not self.is_history_logged:
|
|
266
|
+
return None
|
|
267
|
+
|
|
268
|
+
if not self.check_connectivity():
|
|
269
|
+
return dict()
|
|
270
|
+
|
|
271
|
+
_timezone = pytz.timezone(timezone)
|
|
272
|
+
try:
|
|
273
|
+
start_dt = _timezone.localize(datetime.strptime(start, DATETIME_FORMAT)).astimezone(pytz.UTC).timestamp()
|
|
274
|
+
stop_dt = _timezone.localize(datetime.strptime(stop, DATETIME_FORMAT)).astimezone(pytz.UTC).timestamp()
|
|
275
|
+
except ValueError:
|
|
276
|
+
# Fallback or error handling if needed, though read_trends assumes correct format
|
|
277
|
+
return dict()
|
|
278
|
+
|
|
279
|
+
# Base query
|
|
280
|
+
query = (TagValue
|
|
281
|
+
.select(Tags.name, TagValue.value, TagValue.timestamp,
|
|
282
|
+
Units.unit.alias('tag_value_unit'))
|
|
283
|
+
.join(Tags)
|
|
284
|
+
.join(Units, on=(Tags.unit == Units.id))
|
|
285
|
+
.where((TagValue.timestamp.between(start_dt, stop_dt)) & (Tags.name.in_(tags)))
|
|
286
|
+
.order_by(TagValue.timestamp.desc()))
|
|
287
|
+
|
|
288
|
+
total_records = query.count()
|
|
289
|
+
|
|
290
|
+
# Safe pagination
|
|
291
|
+
if limit <= 0: limit = 20
|
|
292
|
+
if page <= 0: page = 1
|
|
293
|
+
|
|
294
|
+
total_pages = math.ceil(total_records / limit)
|
|
295
|
+
if total_pages == 0: total_pages = 1
|
|
296
|
+
|
|
297
|
+
has_next = page < total_pages
|
|
298
|
+
has_prev = page > 1
|
|
299
|
+
|
|
300
|
+
paginated_query = query.paginate(page, limit).dicts()
|
|
301
|
+
|
|
302
|
+
data = []
|
|
303
|
+
utc_timezone = pytz.timezone('UTC')
|
|
304
|
+
|
|
305
|
+
for entry in paginated_query:
|
|
306
|
+
timestamp = entry['timestamp']
|
|
307
|
+
# timestamp in DB is float (epoch) or datetime?
|
|
308
|
+
# In read_trends: timestamp = entry['timestamp']; timestamp = from_timezone.localize(timestamp)
|
|
309
|
+
# This implies entry['timestamp'] is NOT timezone aware or is a float?
|
|
310
|
+
# In read_trends: start/stop converted to .timestamp() (float).
|
|
311
|
+
# Peewee timestamp field usually stores whatever you give it. If float was stored, it comes back as float.
|
|
312
|
+
# But line 247 in read_trends: timestamp = from_timezone.localize(timestamp)
|
|
313
|
+
# localize() works on datetime objects.
|
|
314
|
+
# So TagValue.timestamp is likely a DateTimeField in Peewee, but stored as UTC?
|
|
315
|
+
# Wait, line 212: start = ... .timestamp().
|
|
316
|
+
# TagValue.timestamp.between(start, stop)
|
|
317
|
+
# If start/stop are floats, and TagValue.timestamp compares to them, TagValue.timestamp might be float/DoubleField?
|
|
318
|
+
# Or Peewee handles conversion?
|
|
319
|
+
# Let's check dbmodels/tags.py if possible.
|
|
320
|
+
# But relying on read_trends line 246-247:
|
|
321
|
+
# timestamp = entry['timestamp']
|
|
322
|
+
# timestamp = from_timezone.localize(timestamp)
|
|
323
|
+
# IF timestamp is float, localize() fails. localize takes datetime.
|
|
324
|
+
# SO timestamp must be a datetime object (naive).
|
|
325
|
+
# BUT line 212 converts start/stop to floats!
|
|
326
|
+
# If TagValue.timestamp is DateTimeField, Peewee might accept float for comparison? Or start/stop should be datetimes?
|
|
327
|
+
# Actually line 212: .timestamp() returns float.
|
|
328
|
+
# So TagValue.timestamp might be a float/DoubleField storing epoch?
|
|
329
|
+
# If so, line 247 `from_timezone.localize(timestamp)` would FAIL on a float.
|
|
330
|
+
# Let's assume read_trends logic is correct and see what it does.
|
|
331
|
+
# If timestamp is float, `datetime.fromtimestamp(timestamp, pytz.UTC)` is needed.
|
|
332
|
+
# If timestamp is datetime, `localize` is needed.
|
|
333
|
+
|
|
334
|
+
# Let's check `read_trends` carefully.
|
|
335
|
+
# 246| timestamp = entry['timestamp']
|
|
336
|
+
# 247| timestamp = from_timezone.localize(timestamp)
|
|
337
|
+
|
|
338
|
+
# This strongly suggests `entry['timestamp']` is a naive datetime object.
|
|
339
|
+
# THEN why line 212 converts to .timestamp()?
|
|
340
|
+
# `start = ... .timestamp()`
|
|
341
|
+
# Maybe TagValue.timestamp is Integer/Float (Epoch)?
|
|
342
|
+
# If so, 247 is suspicious.
|
|
343
|
+
# However, I should follow `read_trends` pattern OR be robust.
|
|
344
|
+
# If I check `write_tag`: `TagValue.create(..., timestamp=timestamp, ...)`
|
|
345
|
+
# where timestamp comes from `datetime.now(pytz.utc).astimezone(TIMEZONE)` (from tags.py).
|
|
346
|
+
|
|
347
|
+
# If I look at `read_trends` line 243 `for entry in query:` where query is `.dicts()`.
|
|
348
|
+
# I will assume `entry['timestamp']` works like in `read_trends`.
|
|
349
|
+
# BUT, if `read_trends` is working code, then `entry['timestamp']` is likely a datetime.
|
|
350
|
+
# AND `.between(start, stop)` works with floats if the column is float.
|
|
351
|
+
# Or maybe `start` and `stop` being floats are auto-converted?
|
|
352
|
+
# Actually, looking at line 246, `timestamp` variable is reused.
|
|
353
|
+
|
|
354
|
+
# I will try to support both or verify.
|
|
355
|
+
# Safest is `datetime.fromtimestamp(entry['timestamp'])` if it's float/int, or just use it if it's datetime.
|
|
356
|
+
# Given `read_trends` code, I suspect `timestamp` in DB is DateTimeField.
|
|
357
|
+
# If so, `start` and `stop` should probably be datetimes.
|
|
358
|
+
# Line 212 `... .timestamp()` makes them floats.
|
|
359
|
+
# Peewee `DateTimeField` vs float comparison...
|
|
360
|
+
|
|
361
|
+
# I'll stick to what I see.
|
|
362
|
+
# But wait, `read_trends` logic at 247 `from_timezone.localize(timestamp)` implies naive datetime.
|
|
363
|
+
|
|
364
|
+
# Implementation:
|
|
365
|
+
ts_val = entry['timestamp']
|
|
366
|
+
if isinstance(ts_val, (int, float)):
|
|
367
|
+
dt_object = datetime.fromtimestamp(ts_val, pytz.UTC)
|
|
368
|
+
else:
|
|
369
|
+
# Assuming naive datetime in UTC (based on read_trends using 'UTC' timezone to localize)
|
|
370
|
+
dt_object = utc_timezone.localize(ts_val) if ts_val.tzinfo is None else ts_val
|
|
371
|
+
|
|
372
|
+
formatted_ts = dt_object.astimezone(_timezone).strftime(DATETIME_FORMAT)
|
|
373
|
+
|
|
374
|
+
data.append({
|
|
375
|
+
"timestamp": formatted_ts,
|
|
376
|
+
"tag_name": entry['name'],
|
|
377
|
+
"value": f"{entry['value']} {entry['tag_value_unit']}"
|
|
378
|
+
})
|
|
379
|
+
|
|
380
|
+
return {
|
|
381
|
+
"data": data,
|
|
382
|
+
"pagination": {
|
|
383
|
+
"page": page,
|
|
384
|
+
"limit": limit,
|
|
385
|
+
"total_records": total_records,
|
|
386
|
+
"total_pages": total_pages,
|
|
387
|
+
"has_next": has_next,
|
|
388
|
+
"has_prev": has_prev
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
|
|
392
|
+
@db_rollback
|
|
393
|
+
def read_tabular_data(self, start:str, stop:str, timezone:str, tags:list, sample_time:int, page:int=1, limit:int=20):
|
|
394
|
+
r"""
|
|
395
|
+
Get historical data in tabular format with pagination and forward-fill resampling.
|
|
396
|
+
|
|
397
|
+
Args:
|
|
398
|
+
start (str): Start datetime string
|
|
399
|
+
stop (str): Stop datetime string
|
|
400
|
+
timezone (str): Timezone string
|
|
401
|
+
tags (list): List of tag names
|
|
402
|
+
sample_time (int): Sample time in seconds
|
|
403
|
+
page (int): Page number
|
|
404
|
+
limit (int): Items per page
|
|
405
|
+
|
|
406
|
+
Returns:
|
|
407
|
+
dict: {
|
|
408
|
+
"data": [...],
|
|
409
|
+
"pagination": {...}
|
|
410
|
+
}
|
|
411
|
+
"""
|
|
412
|
+
if not self.is_history_logged:
|
|
413
|
+
return None
|
|
414
|
+
|
|
415
|
+
if not self.check_connectivity():
|
|
416
|
+
return dict()
|
|
417
|
+
|
|
418
|
+
_timezone = pytz.timezone(timezone)
|
|
419
|
+
utc_timezone = pytz.UTC
|
|
420
|
+
|
|
421
|
+
try:
|
|
422
|
+
start_dt = _timezone.localize(datetime.strptime(start, DATETIME_FORMAT)).astimezone(utc_timezone)
|
|
423
|
+
stop_dt = _timezone.localize(datetime.strptime(stop, DATETIME_FORMAT)).astimezone(utc_timezone)
|
|
424
|
+
start_ts = start_dt.timestamp()
|
|
425
|
+
stop_ts = stop_dt.timestamp()
|
|
426
|
+
except ValueError:
|
|
427
|
+
return dict()
|
|
428
|
+
|
|
429
|
+
if sample_time <= 0:
|
|
430
|
+
return dict()
|
|
431
|
+
|
|
432
|
+
# Check for data presence to adjust start time if necessary
|
|
433
|
+
# 1. Check if there is any data BEFORE or AT start_dt (history)
|
|
434
|
+
has_history = (TagValue
|
|
435
|
+
.select()
|
|
436
|
+
.join(Tags)
|
|
437
|
+
.where(
|
|
438
|
+
(Tags.name.in_(tags)) &
|
|
439
|
+
(TagValue.timestamp <= start_dt)
|
|
440
|
+
)
|
|
441
|
+
.limit(1)
|
|
442
|
+
.count() > 0)
|
|
443
|
+
|
|
444
|
+
if not has_history:
|
|
445
|
+
# 2. If no history, find the first actual data point within the requested range
|
|
446
|
+
min_ts = (TagValue
|
|
447
|
+
.select(fn.Min(TagValue.timestamp))
|
|
448
|
+
.join(Tags)
|
|
449
|
+
.where(
|
|
450
|
+
(Tags.name.in_(tags)) &
|
|
451
|
+
(TagValue.timestamp >= start_dt) &
|
|
452
|
+
(TagValue.timestamp <= stop_dt) &
|
|
453
|
+
(TagValue.value.is_null(False))
|
|
454
|
+
)
|
|
455
|
+
.scalar())
|
|
456
|
+
|
|
457
|
+
if min_ts is None:
|
|
458
|
+
# No data in range and no history
|
|
459
|
+
return {"data": [], "pagination": {}}
|
|
460
|
+
|
|
461
|
+
# Adjust start to the first actual data point
|
|
462
|
+
# Ensure min_ts is timezone aware if needed, though scalar() returns DB format
|
|
463
|
+
if isinstance(min_ts, datetime):
|
|
464
|
+
if min_ts.tzinfo is None:
|
|
465
|
+
min_ts = utc_timezone.localize(min_ts)
|
|
466
|
+
start_dt = min_ts
|
|
467
|
+
start_ts = start_dt.timestamp()
|
|
468
|
+
elif isinstance(min_ts, (int, float)):
|
|
469
|
+
start_ts = float(min_ts)
|
|
470
|
+
start_dt = datetime.fromtimestamp(start_ts, pytz.UTC)
|
|
471
|
+
|
|
472
|
+
# Calculate total records based on time range and sample time
|
|
473
|
+
total_duration = stop_ts - start_ts
|
|
474
|
+
if total_duration < 0:
|
|
475
|
+
return {"data": [], "pagination": {}}
|
|
476
|
+
|
|
477
|
+
total_records = math.floor(total_duration / sample_time) + 1
|
|
478
|
+
|
|
479
|
+
# Pagination calculations
|
|
480
|
+
if limit <= 0: limit = 20
|
|
481
|
+
if page <= 0: page = 1
|
|
482
|
+
|
|
483
|
+
total_pages = math.ceil(total_records / limit)
|
|
484
|
+
if total_pages == 0: total_pages = 1
|
|
485
|
+
|
|
486
|
+
has_next = page < total_pages
|
|
487
|
+
has_prev = page > 1
|
|
488
|
+
|
|
489
|
+
# Calculate start and end for current page
|
|
490
|
+
start_index = (page - 1) * limit
|
|
491
|
+
end_index = min(start_index + limit, total_records)
|
|
492
|
+
|
|
493
|
+
page_start_ts = start_ts + (start_index * sample_time)
|
|
494
|
+
page_end_ts = start_ts + ((end_index - 1) * sample_time)
|
|
495
|
+
|
|
496
|
+
# Query data needed for this page plus context for forward fill
|
|
497
|
+
# We need data up to page_end_ts.
|
|
498
|
+
# For forward fill, we need the last known value before or at page_start_ts for each tag.
|
|
499
|
+
|
|
500
|
+
data_points = []
|
|
501
|
+
current_ts = page_start_ts
|
|
502
|
+
|
|
503
|
+
# Pre-fetch data for optimization could be complex due to forward fill requirement across large gaps.
|
|
504
|
+
# However, we can query per tag or query all data in range.
|
|
505
|
+
# For efficiency with many tags/large range, we should query intelligently.
|
|
506
|
+
# But to guarantee "last known value", we might need to look back indefinitely if no recent data.
|
|
507
|
+
# A simple approach for now: Query "last value before or at page_start_ts" for each tag,
|
|
508
|
+
# and all values between page_start_ts and page_end_ts.
|
|
509
|
+
|
|
510
|
+
# 1. Get initial values (state at page_start_ts)
|
|
511
|
+
current_values = {}
|
|
512
|
+
# Convert float timestamp to datetime for Peewee comparison
|
|
513
|
+
current_dt = datetime.fromtimestamp(current_ts, pytz.UTC)
|
|
514
|
+
|
|
515
|
+
for tag_name in tags:
|
|
516
|
+
# Get the latest value <= current_dt
|
|
517
|
+
last_val_query = (TagValue
|
|
518
|
+
.select(TagValue.value)
|
|
519
|
+
.join(Tags)
|
|
520
|
+
.where((Tags.name == tag_name) & (TagValue.timestamp <= current_dt))
|
|
521
|
+
.order_by(TagValue.timestamp.desc())
|
|
522
|
+
.limit(1)
|
|
523
|
+
.dicts())
|
|
524
|
+
|
|
525
|
+
entry = list(last_val_query)
|
|
526
|
+
if entry:
|
|
527
|
+
current_values[tag_name] = entry[0]['value']
|
|
528
|
+
else:
|
|
529
|
+
current_values[tag_name] = None
|
|
530
|
+
|
|
531
|
+
# 2. Get changes within the page window
|
|
532
|
+
# We query all changes for these tags in the time window of the page
|
|
533
|
+
|
|
534
|
+
# Convert boundaries to datetime for Peewee
|
|
535
|
+
page_start_dt = datetime.fromtimestamp(page_start_ts, pytz.UTC)
|
|
536
|
+
page_end_dt = datetime.fromtimestamp(page_end_ts, pytz.UTC)
|
|
537
|
+
|
|
538
|
+
changes_query = (TagValue
|
|
539
|
+
.select(Tags.name, TagValue.value, TagValue.timestamp)
|
|
540
|
+
.join(Tags)
|
|
541
|
+
.where(
|
|
542
|
+
(Tags.name.in_(tags)) &
|
|
543
|
+
(TagValue.timestamp > page_start_dt) &
|
|
544
|
+
(TagValue.timestamp <= page_end_dt) &
|
|
545
|
+
(TagValue.value.is_null(False))
|
|
546
|
+
)
|
|
547
|
+
.order_by(TagValue.timestamp.asc())
|
|
548
|
+
.dicts())
|
|
549
|
+
|
|
550
|
+
# Organize changes by timestamp
|
|
551
|
+
changes_by_ts = defaultdict(dict)
|
|
552
|
+
for change in changes_query:
|
|
553
|
+
# timestamp comes as datetime from Peewee
|
|
554
|
+
ts_val = change['timestamp']
|
|
555
|
+
if isinstance(ts_val, datetime):
|
|
556
|
+
# Ensure it's timezone aware or treat as UTC if naive
|
|
557
|
+
if ts_val.tzinfo is None:
|
|
558
|
+
ts_val = utc_timezone.localize(ts_val)
|
|
559
|
+
ts = ts_val.timestamp()
|
|
560
|
+
else:
|
|
561
|
+
ts = float(ts_val)
|
|
562
|
+
|
|
563
|
+
changes_by_ts[ts][change['name']] = change['value']
|
|
564
|
+
|
|
565
|
+
# 3. Generate tabular data
|
|
566
|
+
# We iterate step by step. This might be slow if step is small and range is large,
|
|
567
|
+
# but we are limited by pagination 'limit' (e.g. 20 rows), so it's fast!
|
|
568
|
+
|
|
569
|
+
# We need to process from page_start_ts to page_end_ts in sample_time steps.
|
|
570
|
+
# BUT we have a list of changes.
|
|
571
|
+
# The simple way: Iterate steps.
|
|
572
|
+
|
|
573
|
+
# Optimization: We have 'limit' steps.
|
|
574
|
+
|
|
575
|
+
changes_iter = sorted(changes_by_ts.keys())
|
|
576
|
+
change_idx = 0
|
|
577
|
+
|
|
578
|
+
for i in range(end_index - start_index):
|
|
579
|
+
step_ts = page_start_ts + (i * sample_time)
|
|
580
|
+
|
|
581
|
+
# Update current_values with any changes that happened between last step and now (inclusive)
|
|
582
|
+
# Actually, standard sample-hold means at time T we have value at T.
|
|
583
|
+
# If multiple values in (T-1, T], usually the last one prevails or the one at T?
|
|
584
|
+
# Requirement: "retorne exactamente el timestamp... valor anterior mas cercano registrado" (forward fill)
|
|
585
|
+
# So at step_ts, value is the latest value where timestamp <= step_ts.
|
|
586
|
+
|
|
587
|
+
# Advance change_idx to consume all changes <= step_ts
|
|
588
|
+
while change_idx < len(changes_iter) and changes_iter[change_idx] <= step_ts:
|
|
589
|
+
ts = changes_iter[change_idx]
|
|
590
|
+
for tag, val in changes_by_ts[ts].items():
|
|
591
|
+
current_values[tag] = val
|
|
592
|
+
change_idx += 1
|
|
593
|
+
|
|
594
|
+
# Build row
|
|
595
|
+
dt_object = datetime.fromtimestamp(step_ts, pytz.UTC)
|
|
596
|
+
formatted_ts = dt_object.astimezone(_timezone).strftime(DATETIME_FORMAT)
|
|
597
|
+
|
|
598
|
+
row = {"timestamp": formatted_ts}
|
|
599
|
+
has_data = False
|
|
600
|
+
for tag in tags:
|
|
601
|
+
val = current_values.get(tag)
|
|
602
|
+
row[tag] = val # None if no value ever recorded
|
|
603
|
+
if val is not None:
|
|
604
|
+
has_data = True
|
|
605
|
+
|
|
606
|
+
if has_data:
|
|
607
|
+
data_points.append(row)
|
|
608
|
+
|
|
609
|
+
return {
|
|
610
|
+
"data": data_points,
|
|
611
|
+
"pagination": {
|
|
612
|
+
"page": page,
|
|
613
|
+
"limit": limit,
|
|
614
|
+
"total_records": total_records,
|
|
615
|
+
"total_pages": total_pages,
|
|
616
|
+
"has_next": has_next,
|
|
617
|
+
"has_prev": has_prev
|
|
618
|
+
}
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
def _agregate_data_every_seconds(self, query, result, seconds:int, timezone:str="UTC"):
|
|
622
|
+
r"""Documentation here
|
|
623
|
+
"""
|
|
624
|
+
# Aggregate data every 5 seconds
|
|
625
|
+
target_timezone = pytz.timezone(timezone)
|
|
626
|
+
buffer = defaultdict(lambda: {"sum": 0, "count": 0, "last_timestamp": None})
|
|
627
|
+
|
|
628
|
+
for entry in query:
|
|
629
|
+
bucket = entry['timestamp'].replace(second=(entry['timestamp'].second // seconds) * seconds, microsecond=0)
|
|
630
|
+
buffer_key = (entry['name'], bucket)
|
|
631
|
+
buffer[buffer_key]["sum"] += entry['value']
|
|
632
|
+
buffer[buffer_key]["count"] += 1
|
|
633
|
+
buffer[buffer_key]["last_timestamp"] = entry['timestamp']
|
|
634
|
+
buffer[buffer_key]['unit'] = entry["tag_value_unit"]
|
|
635
|
+
buffer[buffer_key]['variable'] = entry['variable_name']
|
|
636
|
+
|
|
637
|
+
for (tag_name, bucket), data in buffer.items():
|
|
638
|
+
|
|
639
|
+
avg_value = data["sum"] / data["count"]
|
|
640
|
+
last_timestamp = data["last_timestamp"]
|
|
641
|
+
from_timezone = pytz.timezone('UTC')
|
|
642
|
+
last_timestamp = from_timezone.localize(last_timestamp)
|
|
643
|
+
result[tag_name]["values"].append({
|
|
644
|
+
"x": last_timestamp.astimezone(target_timezone).strftime(self.tag_engine.DATETIME_FORMAT),
|
|
645
|
+
# "y": eval(f"{variable}.convert_value({avg_value}, from_unit={'unit'}, to_unit={'_tag.get_display_unit()'})")
|
|
646
|
+
"y": avg_value
|
|
647
|
+
})
|
|
648
|
+
|
|
649
|
+
return result
|
|
650
|
+
|
|
651
|
+
@db_rollback
|
|
652
|
+
def read_segments(self):
|
|
653
|
+
r"""
|
|
654
|
+
Documentation here
|
|
655
|
+
"""
|
|
656
|
+
if not self.check_connectivity():
|
|
657
|
+
|
|
658
|
+
return list()
|
|
659
|
+
|
|
660
|
+
return Segment.read_all()
|
|
661
|
+
|
|
662
|
+
|
|
663
|
+
class DataLoggerEngine(BaseEngine):
|
|
664
|
+
r"""
|
|
665
|
+
Data logger Engine class for Tag thread-safe database logging.
|
|
666
|
+
|
|
667
|
+
"""
|
|
668
|
+
def __init__(self):
|
|
669
|
+
|
|
670
|
+
super(DataLoggerEngine, self).__init__()
|
|
671
|
+
self.logger = DataLogger()
|
|
672
|
+
|
|
673
|
+
def create_tables(self, tables):
|
|
674
|
+
r"""
|
|
675
|
+
Create default PyHades database tables
|
|
676
|
+
|
|
677
|
+
['TagTrend', 'TagValue']
|
|
678
|
+
|
|
679
|
+
**Parameters**
|
|
680
|
+
|
|
681
|
+
* **tables** (list) list of database model
|
|
682
|
+
|
|
683
|
+
**Returns** `None`
|
|
684
|
+
"""
|
|
685
|
+
self.logger.create_tables(tables)
|
|
686
|
+
|
|
687
|
+
def drop_tables(self, tables:list):
|
|
688
|
+
r"""
|
|
689
|
+
Drop tables if exist in database
|
|
690
|
+
|
|
691
|
+
**Parameters**
|
|
692
|
+
|
|
693
|
+
* **tables** (list): List of database model you want yo drop
|
|
694
|
+
"""
|
|
695
|
+
self.logger.drop_tables(tables)
|
|
696
|
+
|
|
697
|
+
def set_tag(
|
|
698
|
+
self,
|
|
699
|
+
tag:Tag
|
|
700
|
+
):
|
|
701
|
+
r"""
|
|
702
|
+
Define tag names you want log in database, these tags must be defined in CVTEngine
|
|
703
|
+
|
|
704
|
+
**Parameters**
|
|
705
|
+
|
|
706
|
+
* **tag** (str): Tag name defined in CVTEngine
|
|
707
|
+
* **period** (float): Sampling time to log tag on database
|
|
708
|
+
|
|
709
|
+
**Returns** `None`
|
|
710
|
+
"""
|
|
711
|
+
_query = dict()
|
|
712
|
+
_query["action"] = "set_tag"
|
|
713
|
+
_query["parameters"] = dict()
|
|
714
|
+
_query["parameters"]["id"] = tag.id
|
|
715
|
+
_query["parameters"]["name"] = tag.name
|
|
716
|
+
_query["parameters"]["unit"] = tag.unit
|
|
717
|
+
_query["parameters"]["data_type"] = tag.data_type
|
|
718
|
+
_query["parameters"]["description"] = tag.description
|
|
719
|
+
_query["parameters"]["display_name"] = tag.display_name
|
|
720
|
+
_query["parameters"]["display_unit"] = tag.display_unit
|
|
721
|
+
_query["parameters"]["opcua_address"] = tag.opcua_address
|
|
722
|
+
_query["parameters"]["node_namespace"] = tag.node_namespace
|
|
723
|
+
_query["parameters"]["scan_time"] = tag.scan_time
|
|
724
|
+
_query["parameters"]["dead_band"] = tag.dead_band
|
|
725
|
+
_query["parameters"]["manufacturer"] = tag.manufacturer
|
|
726
|
+
_query["parameters"]["segment"] = tag.segment
|
|
727
|
+
|
|
728
|
+
return self.query(_query)
|
|
729
|
+
|
|
730
|
+
def get_tags(self):
|
|
731
|
+
r"""
|
|
732
|
+
|
|
733
|
+
"""
|
|
734
|
+
_query = dict()
|
|
735
|
+
_query["action"] = "get_tags"
|
|
736
|
+
_query["parameters"] = dict()
|
|
737
|
+
|
|
738
|
+
return self.query(_query)
|
|
739
|
+
|
|
740
|
+
def get_tag_by_name(self, name:str):
|
|
741
|
+
r"""
|
|
742
|
+
|
|
743
|
+
"""
|
|
744
|
+
_query = dict()
|
|
745
|
+
_query["action"] = "get_tag_by_name"
|
|
746
|
+
_query["parameters"] = dict()
|
|
747
|
+
_query["parameters"]["name"] = name
|
|
748
|
+
|
|
749
|
+
return self.query(_query)
|
|
750
|
+
|
|
751
|
+
def update_tag(
|
|
752
|
+
self,
|
|
753
|
+
id:str,
|
|
754
|
+
user:User|None=None,
|
|
755
|
+
**kwargs
|
|
756
|
+
):
|
|
757
|
+
r"""Documentation here
|
|
758
|
+
|
|
759
|
+
# Parameters
|
|
760
|
+
|
|
761
|
+
-
|
|
762
|
+
|
|
763
|
+
# Returns
|
|
764
|
+
|
|
765
|
+
-
|
|
766
|
+
"""
|
|
767
|
+
|
|
768
|
+
_query = dict()
|
|
769
|
+
_query["action"] = "update_tag"
|
|
770
|
+
_query["parameters"] = dict()
|
|
771
|
+
_query["parameters"]["id"] = id
|
|
772
|
+
for key, value in kwargs.items():
|
|
773
|
+
|
|
774
|
+
_query["parameters"][key] = value
|
|
775
|
+
|
|
776
|
+
return self.query(_query)
|
|
777
|
+
|
|
778
|
+
def delete_tag(self, id:str):
|
|
779
|
+
r"""Documentation here
|
|
780
|
+
|
|
781
|
+
# Parameters
|
|
782
|
+
|
|
783
|
+
-
|
|
784
|
+
|
|
785
|
+
# Returns
|
|
786
|
+
|
|
787
|
+
-
|
|
788
|
+
"""
|
|
789
|
+
_query = dict()
|
|
790
|
+
_query["action"] = "delete_tag"
|
|
791
|
+
_query["parameters"] = dict()
|
|
792
|
+
_query["parameters"]["id"] = id
|
|
793
|
+
|
|
794
|
+
return self.query(_query)
|
|
795
|
+
|
|
796
|
+
def write_tag(self, tag:str, value:float, timestamp:datetime):
|
|
797
|
+
r"""
|
|
798
|
+
Writes value to tag into database on a thread-safe mechanism
|
|
799
|
+
|
|
800
|
+
**Parameters**
|
|
801
|
+
|
|
802
|
+
* **tag** (str): Tag name in database
|
|
803
|
+
* **value** (float): Value to write in tag
|
|
804
|
+
"""
|
|
805
|
+
_query = dict()
|
|
806
|
+
_query["action"] = "write_tag"
|
|
807
|
+
|
|
808
|
+
_query["parameters"] = dict()
|
|
809
|
+
_query["parameters"]["tag"] = tag
|
|
810
|
+
_query["parameters"]["value"] = value
|
|
811
|
+
_query["parameters"]["timestamp"] = timestamp
|
|
812
|
+
|
|
813
|
+
return self.query(_query)
|
|
814
|
+
|
|
815
|
+
def write_tags(self, tags:list):
|
|
816
|
+
r"""
|
|
817
|
+
Writes value to tag into database on a thread-safe mechanism
|
|
818
|
+
|
|
819
|
+
**Parameters**
|
|
820
|
+
|
|
821
|
+
* **tag** (str): Tag name in database
|
|
822
|
+
* **value** (float): Value to write in tag
|
|
823
|
+
"""
|
|
824
|
+
_query = dict()
|
|
825
|
+
_query["action"] = "write_tags"
|
|
826
|
+
|
|
827
|
+
_query["parameters"] = dict()
|
|
828
|
+
_query["parameters"]["tags"] = tags
|
|
829
|
+
|
|
830
|
+
return self.query(_query)
|
|
831
|
+
|
|
832
|
+
def read_trends(self, start:str, stop:str, timezone:str, *tags):
|
|
833
|
+
r"""
|
|
834
|
+
Read tag value from database on a thread-safe mechanism
|
|
835
|
+
|
|
836
|
+
**Parameters**
|
|
837
|
+
|
|
838
|
+
* **tag** (str): Tag name in database
|
|
839
|
+
|
|
840
|
+
**Returns**
|
|
841
|
+
|
|
842
|
+
* **value** (float): Tag value requested
|
|
843
|
+
"""
|
|
844
|
+
_query = dict()
|
|
845
|
+
_query["action"] = "read_trends"
|
|
846
|
+
_query["parameters"] = dict()
|
|
847
|
+
_query["parameters"]["start"] = start
|
|
848
|
+
_query["parameters"]["stop"] = stop
|
|
849
|
+
_query["parameters"]["timezone"] = timezone
|
|
850
|
+
_query["parameters"]["tags"] = tags
|
|
851
|
+
return self.query(_query)
|
|
852
|
+
|
|
853
|
+
def read_tabular_data(self, start:str, stop:str, timezone:str, tags:list, sample_time:int, page:int=1, limit:int=20):
|
|
854
|
+
r"""
|
|
855
|
+
Get historical data in tabular format with pagination on a thread-safe mechanism
|
|
856
|
+
"""
|
|
857
|
+
_query = dict()
|
|
858
|
+
_query["action"] = "read_tabular_data"
|
|
859
|
+
_query["parameters"] = dict()
|
|
860
|
+
_query["parameters"]["start"] = start
|
|
861
|
+
_query["parameters"]["stop"] = stop
|
|
862
|
+
_query["parameters"]["timezone"] = timezone
|
|
863
|
+
_query["parameters"]["tags"] = tags
|
|
864
|
+
_query["parameters"]["sample_time"] = sample_time
|
|
865
|
+
_query["parameters"]["page"] = page
|
|
866
|
+
_query["parameters"]["limit"] = limit
|
|
867
|
+
return self.query(_query)
|
|
868
|
+
|
|
869
|
+
def read_segments(self):
|
|
870
|
+
r"""
|
|
871
|
+
Documentation here
|
|
872
|
+
"""
|
|
873
|
+
_query = dict()
|
|
874
|
+
_query["action"] = "read_segments"
|
|
875
|
+
_query["parameters"] = dict()
|
|
876
|
+
return self.query(_query)
|
|
877
|
+
|