PyAutomationIO 0.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. automation/__init__.py +46 -0
  2. automation/alarms/__init__.py +563 -0
  3. automation/alarms/states.py +192 -0
  4. automation/alarms/trigger.py +64 -0
  5. automation/buffer.py +132 -0
  6. automation/core.py +1775 -0
  7. automation/dbmodels/__init__.py +23 -0
  8. automation/dbmodels/alarms.py +524 -0
  9. automation/dbmodels/core.py +86 -0
  10. automation/dbmodels/events.py +153 -0
  11. automation/dbmodels/logs.py +155 -0
  12. automation/dbmodels/machines.py +181 -0
  13. automation/dbmodels/opcua.py +81 -0
  14. automation/dbmodels/opcua_server.py +174 -0
  15. automation/dbmodels/tags.py +921 -0
  16. automation/dbmodels/users.py +259 -0
  17. automation/extensions/__init__.py +15 -0
  18. automation/extensions/api.py +149 -0
  19. automation/extensions/cors.py +18 -0
  20. automation/filter/__init__.py +19 -0
  21. automation/iad/__init__.py +3 -0
  22. automation/iad/frozen_data.py +54 -0
  23. automation/iad/out_of_range.py +51 -0
  24. automation/iad/outliers.py +51 -0
  25. automation/logger/__init__.py +0 -0
  26. automation/logger/alarms.py +426 -0
  27. automation/logger/core.py +265 -0
  28. automation/logger/datalogger.py +646 -0
  29. automation/logger/events.py +194 -0
  30. automation/logger/logdict.py +53 -0
  31. automation/logger/logs.py +203 -0
  32. automation/logger/machines.py +248 -0
  33. automation/logger/opcua_server.py +130 -0
  34. automation/logger/users.py +96 -0
  35. automation/managers/__init__.py +4 -0
  36. automation/managers/alarms.py +455 -0
  37. automation/managers/db.py +328 -0
  38. automation/managers/opcua_client.py +186 -0
  39. automation/managers/state_machine.py +183 -0
  40. automation/models.py +174 -0
  41. automation/modules/__init__.py +14 -0
  42. automation/modules/alarms/__init__.py +0 -0
  43. automation/modules/alarms/resources/__init__.py +10 -0
  44. automation/modules/alarms/resources/alarms.py +280 -0
  45. automation/modules/alarms/resources/summary.py +79 -0
  46. automation/modules/events/__init__.py +0 -0
  47. automation/modules/events/resources/__init__.py +10 -0
  48. automation/modules/events/resources/events.py +83 -0
  49. automation/modules/events/resources/logs.py +109 -0
  50. automation/modules/tags/__init__.py +0 -0
  51. automation/modules/tags/resources/__init__.py +8 -0
  52. automation/modules/tags/resources/tags.py +201 -0
  53. automation/modules/users/__init__.py +2 -0
  54. automation/modules/users/resources/__init__.py +10 -0
  55. automation/modules/users/resources/models/__init__.py +2 -0
  56. automation/modules/users/resources/models/roles.py +5 -0
  57. automation/modules/users/resources/models/users.py +14 -0
  58. automation/modules/users/resources/roles.py +38 -0
  59. automation/modules/users/resources/users.py +113 -0
  60. automation/modules/users/roles.py +121 -0
  61. automation/modules/users/users.py +335 -0
  62. automation/opcua/__init__.py +1 -0
  63. automation/opcua/models.py +541 -0
  64. automation/opcua/subscription.py +259 -0
  65. automation/pages/__init__.py +0 -0
  66. automation/pages/alarms.py +34 -0
  67. automation/pages/alarms_history.py +21 -0
  68. automation/pages/assets/styles.css +7 -0
  69. automation/pages/callbacks/__init__.py +28 -0
  70. automation/pages/callbacks/alarms.py +218 -0
  71. automation/pages/callbacks/alarms_summary.py +20 -0
  72. automation/pages/callbacks/db.py +222 -0
  73. automation/pages/callbacks/filter.py +238 -0
  74. automation/pages/callbacks/machines.py +29 -0
  75. automation/pages/callbacks/machines_detailed.py +581 -0
  76. automation/pages/callbacks/opcua.py +266 -0
  77. automation/pages/callbacks/opcua_server.py +244 -0
  78. automation/pages/callbacks/tags.py +495 -0
  79. automation/pages/callbacks/trends.py +119 -0
  80. automation/pages/communications.py +129 -0
  81. automation/pages/components/__init__.py +123 -0
  82. automation/pages/components/alarms.py +151 -0
  83. automation/pages/components/alarms_summary.py +45 -0
  84. automation/pages/components/database.py +128 -0
  85. automation/pages/components/gaussian_filter.py +69 -0
  86. automation/pages/components/machines.py +396 -0
  87. automation/pages/components/opcua.py +384 -0
  88. automation/pages/components/opcua_server.py +53 -0
  89. automation/pages/components/tags.py +253 -0
  90. automation/pages/components/trends.py +66 -0
  91. automation/pages/database.py +26 -0
  92. automation/pages/filter.py +55 -0
  93. automation/pages/machines.py +20 -0
  94. automation/pages/machines_detailed.py +41 -0
  95. automation/pages/main.py +63 -0
  96. automation/pages/opcua_server.py +28 -0
  97. automation/pages/tags.py +40 -0
  98. automation/pages/trends.py +35 -0
  99. automation/singleton.py +30 -0
  100. automation/state_machine.py +1672 -0
  101. automation/tags/__init__.py +2 -0
  102. automation/tags/cvt.py +1198 -0
  103. automation/tags/filter.py +55 -0
  104. automation/tags/tag.py +418 -0
  105. automation/tests/__init__.py +10 -0
  106. automation/tests/test_alarms.py +110 -0
  107. automation/tests/test_core.py +257 -0
  108. automation/tests/test_unit.py +21 -0
  109. automation/tests/test_user.py +155 -0
  110. automation/utils/__init__.py +164 -0
  111. automation/utils/decorators.py +222 -0
  112. automation/utils/npw.py +294 -0
  113. automation/utils/observer.py +21 -0
  114. automation/utils/units.py +118 -0
  115. automation/variables/__init__.py +55 -0
  116. automation/variables/adimentional.py +30 -0
  117. automation/variables/current.py +71 -0
  118. automation/variables/density.py +115 -0
  119. automation/variables/eng_time.py +68 -0
  120. automation/variables/force.py +90 -0
  121. automation/variables/length.py +104 -0
  122. automation/variables/mass.py +80 -0
  123. automation/variables/mass_flow.py +101 -0
  124. automation/variables/percentage.py +30 -0
  125. automation/variables/power.py +113 -0
  126. automation/variables/pressure.py +93 -0
  127. automation/variables/temperature.py +168 -0
  128. automation/variables/volume.py +70 -0
  129. automation/variables/volumetric_flow.py +100 -0
  130. automation/workers/__init__.py +2 -0
  131. automation/workers/logger.py +164 -0
  132. automation/workers/state_machine.py +207 -0
  133. automation/workers/worker.py +36 -0
  134. pyautomationio-0.0.0.dist-info/METADATA +198 -0
  135. pyautomationio-0.0.0.dist-info/RECORD +138 -0
  136. pyautomationio-0.0.0.dist-info/WHEEL +5 -0
  137. pyautomationio-0.0.0.dist-info/licenses/LICENSE +21 -0
  138. pyautomationio-0.0.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,646 @@
1
+ # -*- coding: utf-8 -*-
2
+ """pyhades/logger/datalogger.py
3
+
4
+ This module implements a database logger for the CVT instance,
5
+ will create a time-serie for each tag in a short memory data base.
6
+ """
7
+ import pytz, logging, math
8
+ from collections import defaultdict
9
+ from datetime import datetime
10
+ from ..tags.tag import Tag
11
+ from ..dbmodels import Tags, TagValue, Units, Segment, Variables
12
+ from ..modules.users.users import User
13
+ from ..tags.cvt import CVTEngine
14
+ from .core import BaseLogger, BaseEngine
15
+ from ..variables import *
16
+ from ..utils.decorators import db_rollback
17
+
18
+
19
+ DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
20
+
21
+ class DataLogger(BaseLogger):
22
+ """
23
+ Data Logger class.
24
+
25
+ This class serves as an API for managing tag settings and accessing logged tags.
26
+
27
+ **Usage Example**:
28
+
29
+ .. code-block:: python
30
+
31
+ >>> from pyhades import DataLogger
32
+ >>> _logger = DataLogger()
33
+
34
+ """
35
+
36
+ def __init__(self):
37
+
38
+ super(DataLogger, self).__init__()
39
+ self.tag_engine = CVTEngine()
40
+
41
+ @db_rollback
42
+ def set_tag(
43
+ self,
44
+ id:str,
45
+ name:str,
46
+ unit:str,
47
+ data_type:str,
48
+ description:str="",
49
+ display_name:str="",
50
+ display_unit:str=None,
51
+ opcua_address:str=None,
52
+ node_namespace:str=None,
53
+ scan_time:int=None,
54
+ dead_band:float=None,
55
+ manufacturer:str="",
56
+ segment:str=""
57
+ ):
58
+ r"""
59
+ Documentation here
60
+ """
61
+ if not self.check_connectivity():
62
+
63
+ return None
64
+
65
+ Tags.create(
66
+ id=id,
67
+ name=name,
68
+ unit=unit,
69
+ data_type=data_type,
70
+ description=description,
71
+ display_name=display_name,
72
+ display_unit=display_unit,
73
+ opcua_address=opcua_address,
74
+ node_namespace=node_namespace,
75
+ scan_time=scan_time,
76
+ dead_band=dead_band,
77
+ manufacturer=manufacturer,
78
+ segment=segment
79
+ )
80
+
81
+ @db_rollback
82
+ def delete_tag(self, id:str):
83
+ r"""
84
+ Documentation here
85
+ """
86
+ if not self.check_connectivity():
87
+
88
+ return None
89
+
90
+ tag, _ = Tags.get_or_create(identifier=id)
91
+ Tags.put(id=tag.id, active=False)
92
+
93
+ @db_rollback
94
+ def get_tag_by_name(self, name:str):
95
+ r"""
96
+ Documentation here
97
+ """
98
+ if not self.check_connectivity():
99
+
100
+ return None
101
+
102
+ return Tags.read_by_name(name=name)
103
+
104
+ @db_rollback
105
+ def update_tag(self, id:str, **kwargs):
106
+ r"""
107
+ Documentation here
108
+ """
109
+ if not self.check_connectivity():
110
+
111
+ return None
112
+
113
+ tag = Tags.get(identifier=id)
114
+
115
+ if "gaussian_filter" in kwargs:
116
+
117
+ if kwargs['gaussian_filter'].lower() in ('1', 'true'):
118
+
119
+ kwargs['gaussian_filter'] = True
120
+
121
+ else:
122
+
123
+ kwargs['gaussian_filter'] = False
124
+
125
+ return Tags.put(id=tag.id, **kwargs)
126
+
127
+ @db_rollback
128
+ def set_tags(self, tags):
129
+ r"""
130
+ Documentation here
131
+ """
132
+ if not self.check_connectivity():
133
+
134
+ return None
135
+
136
+ for tag in tags:
137
+
138
+ self.set_tag(tag)
139
+
140
+ @db_rollback
141
+ def get_tags(self):
142
+ r"""
143
+ Documentation here
144
+ """
145
+ if not self.check_connectivity():
146
+
147
+ return list()
148
+
149
+ return Tags.read_all()
150
+
151
+ @db_rollback
152
+ def write_tag(self, tag, value, timestamp):
153
+ r"""
154
+ Documentation here
155
+ """
156
+ if not self.is_history_logged:
157
+
158
+ return None
159
+
160
+ if not self.check_connectivity():
161
+
162
+ return None
163
+
164
+ trend = Tags.read_by_name(tag)
165
+ unit = Units.read_by_unit(unit=trend.display_unit.unit)
166
+ TagValue.create(tag=trend, value=value, timestamp=timestamp, unit=unit)
167
+
168
+ @db_rollback
169
+ def write_tags(self, tags:list):
170
+ r"""
171
+ Documentation here
172
+ """
173
+ if not self.is_history_logged:
174
+
175
+ return None
176
+
177
+ if not self.check_connectivity():
178
+
179
+ return None
180
+
181
+ _tags = tags.copy()
182
+
183
+ for counter, tag in enumerate(tags):
184
+
185
+ _tag = Tags.read_by_name(tag['tag'])
186
+
187
+ if _tag:
188
+
189
+ unit = Units.get_or_none(id=_tag.display_unit.id)
190
+ _tags[counter].update({
191
+ 'tag': _tag,
192
+ 'unit': unit
193
+ })
194
+
195
+ TagValue.insert_many(_tags).execute()
196
+
197
+ @db_rollback
198
+ def read_trends(self, start:str, stop:str, timezone:str, tags):
199
+ r"""
200
+ Documentation here
201
+ """
202
+
203
+ if not self.is_history_logged:
204
+
205
+ return None
206
+
207
+ if not self.check_connectivity():
208
+
209
+ return dict()
210
+
211
+ _timezone = pytz.timezone(timezone)
212
+ start = _timezone.localize(datetime.strptime(start, DATETIME_FORMAT)).astimezone(pytz.UTC).timestamp()
213
+ stop = _timezone.localize(datetime.strptime(stop, DATETIME_FORMAT)).astimezone(pytz.UTC).timestamp()
214
+
215
+ query = (TagValue
216
+ .select(Tags.name, TagValue.value, TagValue.timestamp,
217
+ Units.unit.alias('tag_value_unit'), Variables.name.alias('variable_name'))
218
+ .join(Tags)
219
+ .join(Units, on=(Tags.unit == Units.id))
220
+ .join(Variables, on=(Units.variable_id == Variables.id))
221
+ .where((TagValue.timestamp.between(start, stop)) & (Tags.name.in_(tags)))
222
+ .order_by(TagValue.timestamp)
223
+ .dicts())
224
+
225
+ # Structure the data
226
+ time_span = (stop - start ) / 60 # span in minutes
227
+ result = defaultdict(lambda: {"values": []})
228
+ if time_span > 60 * 24 * 7: # 1 week
229
+ # Aggregate data every 1 day
230
+ result = self._agregate_data_every_seconds(query=query, result=result, seconds=3600 * 24, timezone=timezone)
231
+
232
+ elif time_span > 60 * 24 * 2: # 2 days
233
+ # Aggregate data every 1 hora
234
+ result = self._agregate_data_every_seconds(query=query, result=result, seconds=3600, timezone=timezone)
235
+
236
+ elif time_span > 60 * 2: # 2 horas
237
+ # Aggregate data every 1 minute
238
+ result = self._agregate_data_every_seconds(query=query, result=result, seconds=60, timezone=timezone)
239
+
240
+ else:
241
+ # Use original data
242
+
243
+ for entry in query:
244
+
245
+ from_timezone = pytz.timezone('UTC')
246
+ timestamp = entry['timestamp']
247
+ timestamp = from_timezone.localize(timestamp)
248
+ result[entry['name']]["values"].append({
249
+ "x": timestamp.astimezone(_timezone).strftime(self.tag_engine.DATETIME_FORMAT),
250
+ "y": entry['value']
251
+ })
252
+
253
+ for tag in tags:
254
+
255
+ result[tag]['unit'] = self.tag_engine.get_display_unit_by_tag(tag)
256
+
257
+ return result
258
+
259
+ @db_rollback
260
+ def read_table(self, start:str, stop:str, timezone:str, tags:list, page:int=1, limit:int=20):
261
+ r"""
262
+ Get historical data in table format with pagination
263
+ """
264
+ if not self.is_history_logged:
265
+ return None
266
+
267
+ if not self.check_connectivity():
268
+ return dict()
269
+
270
+ _timezone = pytz.timezone(timezone)
271
+ try:
272
+ start_dt = _timezone.localize(datetime.strptime(start, DATETIME_FORMAT)).astimezone(pytz.UTC).timestamp()
273
+ stop_dt = _timezone.localize(datetime.strptime(stop, DATETIME_FORMAT)).astimezone(pytz.UTC).timestamp()
274
+ except ValueError:
275
+ # Fallback or error handling if needed, though read_trends assumes correct format
276
+ return dict()
277
+
278
+ # Base query
279
+ query = (TagValue
280
+ .select(Tags.name, TagValue.value, TagValue.timestamp,
281
+ Units.unit.alias('tag_value_unit'))
282
+ .join(Tags)
283
+ .join(Units, on=(Tags.unit == Units.id))
284
+ .where((TagValue.timestamp.between(start_dt, stop_dt)) & (Tags.name.in_(tags)))
285
+ .order_by(TagValue.timestamp.desc()))
286
+
287
+ total_records = query.count()
288
+
289
+ # Safe pagination
290
+ if limit <= 0: limit = 20
291
+ if page <= 0: page = 1
292
+
293
+ total_pages = math.ceil(total_records / limit)
294
+ if total_pages == 0: total_pages = 1
295
+
296
+ has_next = page < total_pages
297
+ has_prev = page > 1
298
+
299
+ paginated_query = query.paginate(page, limit).dicts()
300
+
301
+ data = []
302
+ utc_timezone = pytz.timezone('UTC')
303
+
304
+ for entry in paginated_query:
305
+ timestamp = entry['timestamp']
306
+ # timestamp in DB is float (epoch) or datetime?
307
+ # In read_trends: timestamp = entry['timestamp']; timestamp = from_timezone.localize(timestamp)
308
+ # This implies entry['timestamp'] is NOT timezone aware or is a float?
309
+ # In read_trends: start/stop converted to .timestamp() (float).
310
+ # Peewee timestamp field usually stores whatever you give it. If float was stored, it comes back as float.
311
+ # But line 247 in read_trends: timestamp = from_timezone.localize(timestamp)
312
+ # localize() works on datetime objects.
313
+ # So TagValue.timestamp is likely a DateTimeField in Peewee, but stored as UTC?
314
+ # Wait, line 212: start = ... .timestamp().
315
+ # TagValue.timestamp.between(start, stop)
316
+ # If start/stop are floats, and TagValue.timestamp compares to them, TagValue.timestamp might be float/DoubleField?
317
+ # Or Peewee handles conversion?
318
+ # Let's check dbmodels/tags.py if possible.
319
+ # But relying on read_trends line 246-247:
320
+ # timestamp = entry['timestamp']
321
+ # timestamp = from_timezone.localize(timestamp)
322
+ # IF timestamp is float, localize() fails. localize takes datetime.
323
+ # SO timestamp must be a datetime object (naive).
324
+ # BUT line 212 converts start/stop to floats!
325
+ # If TagValue.timestamp is DateTimeField, Peewee might accept float for comparison? Or start/stop should be datetimes?
326
+ # Actually line 212: .timestamp() returns float.
327
+ # So TagValue.timestamp might be a float/DoubleField storing epoch?
328
+ # If so, line 247 `from_timezone.localize(timestamp)` would FAIL on a float.
329
+ # Let's assume read_trends logic is correct and see what it does.
330
+ # If timestamp is float, `datetime.fromtimestamp(timestamp, pytz.UTC)` is needed.
331
+ # If timestamp is datetime, `localize` is needed.
332
+
333
+ # Let's check `read_trends` carefully.
334
+ # 246| timestamp = entry['timestamp']
335
+ # 247| timestamp = from_timezone.localize(timestamp)
336
+
337
+ # This strongly suggests `entry['timestamp']` is a naive datetime object.
338
+ # THEN why line 212 converts to .timestamp()?
339
+ # `start = ... .timestamp()`
340
+ # Maybe TagValue.timestamp is Integer/Float (Epoch)?
341
+ # If so, 247 is suspicious.
342
+ # However, I should follow `read_trends` pattern OR be robust.
343
+ # If I check `write_tag`: `TagValue.create(..., timestamp=timestamp, ...)`
344
+ # where timestamp comes from `datetime.now(pytz.utc).astimezone(TIMEZONE)` (from tags.py).
345
+
346
+ # If I look at `read_trends` line 243 `for entry in query:` where query is `.dicts()`.
347
+ # I will assume `entry['timestamp']` works like in `read_trends`.
348
+ # BUT, if `read_trends` is working code, then `entry['timestamp']` is likely a datetime.
349
+ # AND `.between(start, stop)` works with floats if the column is float.
350
+ # Or maybe `start` and `stop` being floats are auto-converted?
351
+ # Actually, looking at line 246, `timestamp` variable is reused.
352
+
353
+ # I will try to support both or verify.
354
+ # Safest is `datetime.fromtimestamp(entry['timestamp'])` if it's float/int, or just use it if it's datetime.
355
+ # Given `read_trends` code, I suspect `timestamp` in DB is DateTimeField.
356
+ # If so, `start` and `stop` should probably be datetimes.
357
+ # Line 212 `... .timestamp()` makes them floats.
358
+ # Peewee `DateTimeField` vs float comparison...
359
+
360
+ # I'll stick to what I see.
361
+ # But wait, `read_trends` logic at 247 `from_timezone.localize(timestamp)` implies naive datetime.
362
+
363
+ # Implementation:
364
+ ts_val = entry['timestamp']
365
+ if isinstance(ts_val, (int, float)):
366
+ dt_object = datetime.fromtimestamp(ts_val, pytz.UTC)
367
+ else:
368
+ # Assuming naive datetime in UTC (based on read_trends using 'UTC' timezone to localize)
369
+ dt_object = utc_timezone.localize(ts_val) if ts_val.tzinfo is None else ts_val
370
+
371
+ formatted_ts = dt_object.astimezone(_timezone).strftime(DATETIME_FORMAT)
372
+
373
+ data.append({
374
+ "timestamp": formatted_ts,
375
+ "tag_name": entry['name'],
376
+ "value": f"{entry['value']} {entry['tag_value_unit']}"
377
+ })
378
+
379
+ return {
380
+ "data": data,
381
+ "pagination": {
382
+ "page": page,
383
+ "limit": limit,
384
+ "total_records": total_records,
385
+ "total_pages": total_pages,
386
+ "has_next": has_next,
387
+ "has_prev": has_prev
388
+ }
389
+ }
390
+
391
+ def _agregate_data_every_seconds(self, query, result, seconds:int, timezone:str="UTC"):
392
+ r"""Documentation here
393
+ """
394
+ # Aggregate data every 5 seconds
395
+ target_timezone = pytz.timezone(timezone)
396
+ buffer = defaultdict(lambda: {"sum": 0, "count": 0, "last_timestamp": None})
397
+
398
+ for entry in query:
399
+ bucket = entry['timestamp'].replace(second=(entry['timestamp'].second // seconds) * seconds, microsecond=0)
400
+ buffer_key = (entry['name'], bucket)
401
+ buffer[buffer_key]["sum"] += entry['value']
402
+ buffer[buffer_key]["count"] += 1
403
+ buffer[buffer_key]["last_timestamp"] = entry['timestamp']
404
+ buffer[buffer_key]['unit'] = entry["tag_value_unit"]
405
+ buffer[buffer_key]['variable'] = entry['variable_name']
406
+
407
+ for (tag_name, bucket), data in buffer.items():
408
+
409
+ avg_value = data["sum"] / data["count"]
410
+ last_timestamp = data["last_timestamp"]
411
+ from_timezone = pytz.timezone('UTC')
412
+ last_timestamp = from_timezone.localize(last_timestamp)
413
+ result[tag_name]["values"].append({
414
+ "x": last_timestamp.astimezone(target_timezone).strftime(self.tag_engine.DATETIME_FORMAT),
415
+ # "y": eval(f"{variable}.convert_value({avg_value}, from_unit={'unit'}, to_unit={'_tag.get_display_unit()'})")
416
+ "y": avg_value
417
+ })
418
+
419
+ return result
420
+
421
+ @db_rollback
422
+ def read_segments(self):
423
+ r"""
424
+ Documentation here
425
+ """
426
+ if not self.check_connectivity():
427
+
428
+ return list()
429
+
430
+ return Segment.read_all()
431
+
432
+
433
+ class DataLoggerEngine(BaseEngine):
434
+ r"""
435
+ Data logger Engine class for Tag thread-safe database logging.
436
+
437
+ """
438
+ def __init__(self):
439
+
440
+ super(DataLoggerEngine, self).__init__()
441
+ self.logger = DataLogger()
442
+
443
+ def create_tables(self, tables):
444
+ r"""
445
+ Create default PyHades database tables
446
+
447
+ ['TagTrend', 'TagValue']
448
+
449
+ **Parameters**
450
+
451
+ * **tables** (list) list of database model
452
+
453
+ **Returns** `None`
454
+ """
455
+ self.logger.create_tables(tables)
456
+
457
+ def drop_tables(self, tables:list):
458
+ r"""
459
+ Drop tables if exist in database
460
+
461
+ **Parameters**
462
+
463
+ * **tables** (list): List of database model you want yo drop
464
+ """
465
+ self.logger.drop_tables(tables)
466
+
467
+ def set_tag(
468
+ self,
469
+ tag:Tag
470
+ ):
471
+ r"""
472
+ Define tag names you want log in database, these tags must be defined in CVTEngine
473
+
474
+ **Parameters**
475
+
476
+ * **tag** (str): Tag name defined in CVTEngine
477
+ * **period** (float): Sampling time to log tag on database
478
+
479
+ **Returns** `None`
480
+ """
481
+ _query = dict()
482
+ _query["action"] = "set_tag"
483
+ _query["parameters"] = dict()
484
+ _query["parameters"]["id"] = tag.id
485
+ _query["parameters"]["name"] = tag.name
486
+ _query["parameters"]["unit"] = tag.unit
487
+ _query["parameters"]["data_type"] = tag.data_type
488
+ _query["parameters"]["description"] = tag.description
489
+ _query["parameters"]["display_name"] = tag.display_name
490
+ _query["parameters"]["display_unit"] = tag.display_unit
491
+ _query["parameters"]["opcua_address"] = tag.opcua_address
492
+ _query["parameters"]["node_namespace"] = tag.node_namespace
493
+ _query["parameters"]["scan_time"] = tag.scan_time
494
+ _query["parameters"]["dead_band"] = tag.dead_band
495
+ _query["parameters"]["manufacturer"] = tag.manufacturer
496
+ _query["parameters"]["segment"] = tag.segment
497
+
498
+ return self.query(_query)
499
+
500
+ def get_tags(self):
501
+ r"""
502
+
503
+ """
504
+ _query = dict()
505
+ _query["action"] = "get_tags"
506
+ _query["parameters"] = dict()
507
+
508
+ return self.query(_query)
509
+
510
+ def get_tag_by_name(self, name:str):
511
+ r"""
512
+
513
+ """
514
+ _query = dict()
515
+ _query["action"] = "get_tag_by_name"
516
+ _query["parameters"] = dict()
517
+ _query["parameters"]["name"] = name
518
+
519
+ return self.query(_query)
520
+
521
+ def update_tag(
522
+ self,
523
+ id:str,
524
+ user:User|None=None,
525
+ **kwargs
526
+ ):
527
+ r"""Documentation here
528
+
529
+ # Parameters
530
+
531
+ -
532
+
533
+ # Returns
534
+
535
+ -
536
+ """
537
+
538
+ _query = dict()
539
+ _query["action"] = "update_tag"
540
+ _query["parameters"] = dict()
541
+ _query["parameters"]["id"] = id
542
+ for key, value in kwargs.items():
543
+
544
+ _query["parameters"][key] = value
545
+
546
+ return self.query(_query)
547
+
548
+ def delete_tag(self, id:str):
549
+ r"""Documentation here
550
+
551
+ # Parameters
552
+
553
+ -
554
+
555
+ # Returns
556
+
557
+ -
558
+ """
559
+ _query = dict()
560
+ _query["action"] = "delete_tag"
561
+ _query["parameters"] = dict()
562
+ _query["parameters"]["id"] = id
563
+
564
+ return self.query(_query)
565
+
566
+ def write_tag(self, tag:str, value:float, timestamp:datetime):
567
+ r"""
568
+ Writes value to tag into database on a thread-safe mechanism
569
+
570
+ **Parameters**
571
+
572
+ * **tag** (str): Tag name in database
573
+ * **value** (float): Value to write in tag
574
+ """
575
+ _query = dict()
576
+ _query["action"] = "write_tag"
577
+
578
+ _query["parameters"] = dict()
579
+ _query["parameters"]["tag"] = tag
580
+ _query["parameters"]["value"] = value
581
+ _query["parameters"]["timestamp"] = timestamp
582
+
583
+ return self.query(_query)
584
+
585
+ def write_tags(self, tags:list):
586
+ r"""
587
+ Writes value to tag into database on a thread-safe mechanism
588
+
589
+ **Parameters**
590
+
591
+ * **tag** (str): Tag name in database
592
+ * **value** (float): Value to write in tag
593
+ """
594
+ _query = dict()
595
+ _query["action"] = "write_tags"
596
+
597
+ _query["parameters"] = dict()
598
+ _query["parameters"]["tags"] = tags
599
+
600
+ return self.query(_query)
601
+
602
+ def read_trends(self, start:str, stop:str, timezone:str, *tags):
603
+ r"""
604
+ Read tag value from database on a thread-safe mechanism
605
+
606
+ **Parameters**
607
+
608
+ * **tag** (str): Tag name in database
609
+
610
+ **Returns**
611
+
612
+ * **value** (float): Tag value requested
613
+ """
614
+ _query = dict()
615
+ _query["action"] = "read_trends"
616
+ _query["parameters"] = dict()
617
+ _query["parameters"]["start"] = start
618
+ _query["parameters"]["stop"] = stop
619
+ _query["parameters"]["timezone"] = timezone
620
+ _query["parameters"]["tags"] = tags
621
+ return self.query(_query)
622
+
623
+ def read_table(self, start:str, stop:str, timezone:str, tags:list, page:int=1, limit:int=20):
624
+ r"""
625
+ Get historical data in table format with pagination on a thread-safe mechanism
626
+ """
627
+ _query = dict()
628
+ _query["action"] = "read_table"
629
+ _query["parameters"] = dict()
630
+ _query["parameters"]["start"] = start
631
+ _query["parameters"]["stop"] = stop
632
+ _query["parameters"]["timezone"] = timezone
633
+ _query["parameters"]["tags"] = tags
634
+ _query["parameters"]["page"] = page
635
+ _query["parameters"]["limit"] = limit
636
+ return self.query(_query)
637
+
638
+ def read_segments(self):
639
+ r"""
640
+ Documentation here
641
+ """
642
+ _query = dict()
643
+ _query["action"] = "read_segments"
644
+ _query["parameters"] = dict()
645
+ return self.query(_query)
646
+