PyAutomationIO 1.1.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- automation/__init__.py +46 -0
- automation/alarms/__init__.py +563 -0
- automation/alarms/states.py +192 -0
- automation/alarms/trigger.py +64 -0
- automation/buffer.py +132 -0
- automation/core.py +1792 -0
- automation/dbmodels/__init__.py +23 -0
- automation/dbmodels/alarms.py +549 -0
- automation/dbmodels/core.py +86 -0
- automation/dbmodels/events.py +178 -0
- automation/dbmodels/logs.py +155 -0
- automation/dbmodels/machines.py +181 -0
- automation/dbmodels/opcua.py +81 -0
- automation/dbmodels/opcua_server.py +174 -0
- automation/dbmodels/tags.py +921 -0
- automation/dbmodels/users.py +259 -0
- automation/extensions/__init__.py +15 -0
- automation/extensions/api.py +149 -0
- automation/extensions/cors.py +18 -0
- automation/filter/__init__.py +19 -0
- automation/iad/__init__.py +3 -0
- automation/iad/frozen_data.py +54 -0
- automation/iad/out_of_range.py +51 -0
- automation/iad/outliers.py +51 -0
- automation/logger/__init__.py +0 -0
- automation/logger/alarms.py +434 -0
- automation/logger/core.py +265 -0
- automation/logger/datalogger.py +877 -0
- automation/logger/events.py +202 -0
- automation/logger/logdict.py +53 -0
- automation/logger/logs.py +203 -0
- automation/logger/machines.py +248 -0
- automation/logger/opcua_server.py +130 -0
- automation/logger/users.py +96 -0
- automation/managers/__init__.py +4 -0
- automation/managers/alarms.py +455 -0
- automation/managers/db.py +328 -0
- automation/managers/opcua_client.py +186 -0
- automation/managers/state_machine.py +183 -0
- automation/models.py +174 -0
- automation/modules/__init__.py +14 -0
- automation/modules/alarms/__init__.py +0 -0
- automation/modules/alarms/resources/__init__.py +10 -0
- automation/modules/alarms/resources/alarms.py +280 -0
- automation/modules/alarms/resources/summary.py +81 -0
- automation/modules/events/__init__.py +0 -0
- automation/modules/events/resources/__init__.py +10 -0
- automation/modules/events/resources/events.py +85 -0
- automation/modules/events/resources/logs.py +109 -0
- automation/modules/tags/__init__.py +0 -0
- automation/modules/tags/resources/__init__.py +8 -0
- automation/modules/tags/resources/tags.py +254 -0
- automation/modules/users/__init__.py +2 -0
- automation/modules/users/resources/__init__.py +10 -0
- automation/modules/users/resources/models/__init__.py +2 -0
- automation/modules/users/resources/models/roles.py +5 -0
- automation/modules/users/resources/models/users.py +14 -0
- automation/modules/users/resources/roles.py +38 -0
- automation/modules/users/resources/users.py +113 -0
- automation/modules/users/roles.py +121 -0
- automation/modules/users/users.py +335 -0
- automation/opcua/__init__.py +1 -0
- automation/opcua/models.py +541 -0
- automation/opcua/subscription.py +259 -0
- automation/pages/__init__.py +0 -0
- automation/pages/alarms.py +34 -0
- automation/pages/alarms_history.py +21 -0
- automation/pages/assets/styles.css +7 -0
- automation/pages/callbacks/__init__.py +28 -0
- automation/pages/callbacks/alarms.py +218 -0
- automation/pages/callbacks/alarms_summary.py +20 -0
- automation/pages/callbacks/db.py +222 -0
- automation/pages/callbacks/filter.py +238 -0
- automation/pages/callbacks/machines.py +29 -0
- automation/pages/callbacks/machines_detailed.py +581 -0
- automation/pages/callbacks/opcua.py +266 -0
- automation/pages/callbacks/opcua_server.py +244 -0
- automation/pages/callbacks/tags.py +495 -0
- automation/pages/callbacks/trends.py +119 -0
- automation/pages/communications.py +129 -0
- automation/pages/components/__init__.py +123 -0
- automation/pages/components/alarms.py +151 -0
- automation/pages/components/alarms_summary.py +45 -0
- automation/pages/components/database.py +128 -0
- automation/pages/components/gaussian_filter.py +69 -0
- automation/pages/components/machines.py +396 -0
- automation/pages/components/opcua.py +384 -0
- automation/pages/components/opcua_server.py +53 -0
- automation/pages/components/tags.py +253 -0
- automation/pages/components/trends.py +66 -0
- automation/pages/database.py +26 -0
- automation/pages/filter.py +55 -0
- automation/pages/machines.py +20 -0
- automation/pages/machines_detailed.py +41 -0
- automation/pages/main.py +63 -0
- automation/pages/opcua_server.py +28 -0
- automation/pages/tags.py +40 -0
- automation/pages/trends.py +35 -0
- automation/singleton.py +30 -0
- automation/state_machine.py +1674 -0
- automation/tags/__init__.py +2 -0
- automation/tags/cvt.py +1198 -0
- automation/tags/filter.py +55 -0
- automation/tags/tag.py +418 -0
- automation/tests/__init__.py +10 -0
- automation/tests/test_alarms.py +110 -0
- automation/tests/test_core.py +257 -0
- automation/tests/test_unit.py +21 -0
- automation/tests/test_user.py +155 -0
- automation/utils/__init__.py +164 -0
- automation/utils/decorators.py +222 -0
- automation/utils/npw.py +294 -0
- automation/utils/observer.py +21 -0
- automation/utils/units.py +118 -0
- automation/variables/__init__.py +55 -0
- automation/variables/adimentional.py +30 -0
- automation/variables/current.py +71 -0
- automation/variables/density.py +115 -0
- automation/variables/eng_time.py +68 -0
- automation/variables/force.py +90 -0
- automation/variables/length.py +104 -0
- automation/variables/mass.py +80 -0
- automation/variables/mass_flow.py +101 -0
- automation/variables/percentage.py +30 -0
- automation/variables/power.py +113 -0
- automation/variables/pressure.py +93 -0
- automation/variables/temperature.py +168 -0
- automation/variables/volume.py +70 -0
- automation/variables/volumetric_flow.py +100 -0
- automation/workers/__init__.py +2 -0
- automation/workers/logger.py +164 -0
- automation/workers/state_machine.py +207 -0
- automation/workers/worker.py +36 -0
- pyautomationio-1.1.1.dist-info/METADATA +199 -0
- pyautomationio-1.1.1.dist-info/RECORD +138 -0
- pyautomationio-1.1.1.dist-info/WHEEL +5 -0
- pyautomationio-1.1.1.dist-info/licenses/LICENSE +21 -0
- pyautomationio-1.1.1.dist-info/top_level.txt +1 -0
automation/core.py
ADDED
|
@@ -0,0 +1,1792 @@
|
|
|
1
|
+
import sys, logging, json, os, jwt, requests, urllib3, secrets
|
|
2
|
+
from logging.handlers import RotatingFileHandler
|
|
3
|
+
from math import ceil
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
# DRIVERS IMPORTATION
|
|
6
|
+
from peewee import SqliteDatabase, MySQLDatabase, PostgresqlDatabase
|
|
7
|
+
# from peewee_migrations import Router
|
|
8
|
+
from .dbmodels.users import Roles, Users
|
|
9
|
+
from .dbmodels.machines import Machines
|
|
10
|
+
# PYAUTOMATION MODULES IMPORTATION
|
|
11
|
+
from .singleton import Singleton
|
|
12
|
+
from .workers import LoggerWorker
|
|
13
|
+
from .managers import DBManager, OPCUAClientManager, AlarmManager
|
|
14
|
+
from .opcua.models import Client
|
|
15
|
+
from .tags import CVTEngine, Tag
|
|
16
|
+
from .logger.datalogger import DataLoggerEngine
|
|
17
|
+
from .logger.events import EventsLoggerEngine
|
|
18
|
+
from .logger.alarms import AlarmsLoggerEngine
|
|
19
|
+
from .logger.logs import LogsLoggerEngine
|
|
20
|
+
from .logger.machines import MachinesLoggerEngine
|
|
21
|
+
from .logger.opcua_server import OPCUAServerLoggerEngine
|
|
22
|
+
from .alarms import Alarm
|
|
23
|
+
from .state_machine import Machine, DAQ, AutomationStateMachine, StateMachine
|
|
24
|
+
from .opcua.subscription import DAS
|
|
25
|
+
from .buffer import Buffer
|
|
26
|
+
from .models import StringType, FloatType
|
|
27
|
+
from .modules.users.users import users, User
|
|
28
|
+
from .modules.users.roles import roles, Role
|
|
29
|
+
from .dbmodels.core import BaseModel
|
|
30
|
+
from .utils.decorators import validate_types, logging_error_handler
|
|
31
|
+
from flask_socketio import SocketIO
|
|
32
|
+
from geventwebsocket.handler import WebSocketHandler
|
|
33
|
+
from .variables import VARIABLES
|
|
34
|
+
# DASH APP CONFIGURATION PAGES IMPORTATION
|
|
35
|
+
from .pages.main import ConfigView
|
|
36
|
+
from .pages.callbacks import init_callbacks
|
|
37
|
+
import dash_bootstrap_components as dbc
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class PyAutomation(Singleton):
|
|
41
|
+
r"""
|
|
42
|
+
Automation is a `singleton <https://en.wikipedia.org/wiki/Singleton_pattern>`_ class designed to develop multi-threaded web applications for Industrial Applications.
|
|
43
|
+
|
|
44
|
+
You can initialize and run PyAutomation Framework in different ways depending on your requirements.
|
|
45
|
+
|
|
46
|
+
**Example 1**: Using only PyAutomation Framework
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
from automation import PyAutomation, server
|
|
50
|
+
app = PyAutomation()
|
|
51
|
+
app.define_dash_app(server=server) # This is the configuration page
|
|
52
|
+
app.run(debug=True, create_tables=True)
|
|
53
|
+
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
**Example 2**: Extending PyAutomation Framework with Flask Application
|
|
57
|
+
|
|
58
|
+
```python
|
|
59
|
+
from automation import PyAutomation
|
|
60
|
+
from app import CreateApp
|
|
61
|
+
application = CreateApp()
|
|
62
|
+
server = application() # Flask App
|
|
63
|
+
app = PyAutomation()
|
|
64
|
+
app.define_dash_app(server=server) # This is the configuration page
|
|
65
|
+
app.run(create_tables=True)
|
|
66
|
+
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
PORTS = 65535
|
|
72
|
+
def __init__(self):
|
|
73
|
+
|
|
74
|
+
self.machine = Machine()
|
|
75
|
+
self.machine_manager = self.machine.get_state_machine_manager()
|
|
76
|
+
self.is_starting = True
|
|
77
|
+
self.cvt = CVTEngine()
|
|
78
|
+
self.logger_engine = DataLoggerEngine()
|
|
79
|
+
self.events_engine = EventsLoggerEngine()
|
|
80
|
+
self.alarms_engine = AlarmsLoggerEngine()
|
|
81
|
+
self.logs_engine = LogsLoggerEngine()
|
|
82
|
+
self.machines_engine = MachinesLoggerEngine()
|
|
83
|
+
self.opcua_server_engine = OPCUAServerLoggerEngine()
|
|
84
|
+
self.db_manager = DBManager()
|
|
85
|
+
self.opcua_client_manager = OPCUAClientManager()
|
|
86
|
+
self.alarm_manager = AlarmManager()
|
|
87
|
+
self.workers = list()
|
|
88
|
+
self.das = DAS()
|
|
89
|
+
self.sio = None
|
|
90
|
+
folder_path = os.path.join(".", "logs")
|
|
91
|
+
|
|
92
|
+
if not os.path.exists(folder_path):
|
|
93
|
+
|
|
94
|
+
os.makedirs(folder_path)
|
|
95
|
+
|
|
96
|
+
folder_db = os.path.join(".", "db")
|
|
97
|
+
|
|
98
|
+
if not os.path.exists(folder_db):
|
|
99
|
+
|
|
100
|
+
os.makedirs(folder_db)
|
|
101
|
+
|
|
102
|
+
folder_db_backups = os.path.join(".", "db", "backups")
|
|
103
|
+
|
|
104
|
+
if not os.path.exists(folder_db_backups):
|
|
105
|
+
|
|
106
|
+
os.makedirs(folder_db_backups)
|
|
107
|
+
|
|
108
|
+
folder_ssl = os.path.join(".", "ssl")
|
|
109
|
+
|
|
110
|
+
if not os.path.exists(folder_ssl):
|
|
111
|
+
|
|
112
|
+
os.makedirs(folder_ssl)
|
|
113
|
+
|
|
114
|
+
self.set_log(file=os.path.join(folder_path, "app.log") ,level=logging.WARNING)
|
|
115
|
+
self.__log_histories = False
|
|
116
|
+
|
|
117
|
+
@logging_error_handler
|
|
118
|
+
def define_dash_app(self, certfile:str=None, keyfile:str=None, **kwargs)->None:
|
|
119
|
+
r"""
|
|
120
|
+
Documentation here
|
|
121
|
+
"""
|
|
122
|
+
self.dash_app = ConfigView(use_pages=True, external_stylesheets=[dbc.themes.BOOTSTRAP], prevent_initial_callbacks=True, pages_folder=".", **kwargs)
|
|
123
|
+
self.dash_app.set_automation_app(self)
|
|
124
|
+
init_callbacks(app=self.dash_app)
|
|
125
|
+
if certfile and keyfile:
|
|
126
|
+
|
|
127
|
+
self.sio = SocketIO(
|
|
128
|
+
self.dash_app.server,
|
|
129
|
+
cors_allowed_origins='*',
|
|
130
|
+
ping_timeout=10,
|
|
131
|
+
ping_interval=10,
|
|
132
|
+
async_mode='gevent',
|
|
133
|
+
ssl_context=(certfile, keyfile),
|
|
134
|
+
handler_class=WebSocketHandler
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
else:
|
|
138
|
+
self.sio = SocketIO(self.dash_app.server, cors_allowed_origins='*', ping_timeout=10, ping_interval=10, async_mode='gevent', handler_class=WebSocketHandler)
|
|
139
|
+
|
|
140
|
+
self.cvt._cvt.set_socketio(sio=self.sio)
|
|
141
|
+
|
|
142
|
+
@self.sio.on('connect')
|
|
143
|
+
def handle_connect(auth=None):
|
|
144
|
+
|
|
145
|
+
payload= {
|
|
146
|
+
"tags": self.get_tags() or list(),
|
|
147
|
+
"alarms": self.serialize_alarms() or list(),
|
|
148
|
+
"machines": self.serialize_machines() or list(),
|
|
149
|
+
"last_alarms": self.get_lasts_alarms(lasts=10) or list(),
|
|
150
|
+
"last_active_alarms": self.get_lasts_active_alarms(lasts=3) or list(),
|
|
151
|
+
"last_events": self.get_lasts_events(lasts=10) or list(),
|
|
152
|
+
"last_logs": self.get_lasts_logs(lasts=10) or list()
|
|
153
|
+
}
|
|
154
|
+
self.sio.emit("on_connection", data=payload)
|
|
155
|
+
|
|
156
|
+
@logging_error_handler
|
|
157
|
+
@validate_types(name=StringType, output=StateMachine|None)
|
|
158
|
+
def get_machine(self, name:StringType)->StateMachine:
|
|
159
|
+
r"""
|
|
160
|
+
Documentation here
|
|
161
|
+
"""
|
|
162
|
+
return self.machine_manager.get_machine(name=name)
|
|
163
|
+
|
|
164
|
+
@logging_error_handler
|
|
165
|
+
def get_machines(self)->list[tuple[Machine, int, str]]:
|
|
166
|
+
r"""
|
|
167
|
+
Documentation here
|
|
168
|
+
"""
|
|
169
|
+
return self.machine_manager.get_machines()
|
|
170
|
+
|
|
171
|
+
@logging_error_handler
|
|
172
|
+
@validate_types(output=list)
|
|
173
|
+
def serialize_machines(self)->list[dict]:
|
|
174
|
+
r"""
|
|
175
|
+
Documentation here
|
|
176
|
+
"""
|
|
177
|
+
return self.machine_manager.serialize_machines()
|
|
178
|
+
|
|
179
|
+
@logging_error_handler
|
|
180
|
+
@validate_types(machine=AutomationStateMachine, tag=Tag, output=dict)
|
|
181
|
+
def subscribe_tag_into_automation_machine(self, machine:AutomationStateMachine, tag:Tag)->dict:
|
|
182
|
+
r"""
|
|
183
|
+
Documentation here
|
|
184
|
+
"""
|
|
185
|
+
machine.subscribe_to(tag)
|
|
186
|
+
|
|
187
|
+
# TAGS METHODS
|
|
188
|
+
@logging_error_handler
|
|
189
|
+
@validate_types(
|
|
190
|
+
name=str,
|
|
191
|
+
unit=str,
|
|
192
|
+
display_unit=str,
|
|
193
|
+
variable=str,
|
|
194
|
+
data_type=str,
|
|
195
|
+
description=str|type(None),
|
|
196
|
+
display_name=str|type(None),
|
|
197
|
+
opcua_address=str|type(None),
|
|
198
|
+
node_namespace=str|type(None),
|
|
199
|
+
scan_time=int|float|type(None),
|
|
200
|
+
dead_band=int|float|type(None),
|
|
201
|
+
process_filter=bool,
|
|
202
|
+
gaussian_filter=bool,
|
|
203
|
+
gaussian_filter_threshold=float|int,
|
|
204
|
+
gaussian_filter_r_value=float|int,
|
|
205
|
+
outlier_detection=bool,
|
|
206
|
+
out_of_range_detection=bool,
|
|
207
|
+
frozen_data_detection=bool,
|
|
208
|
+
manufacturer=str|type(None),
|
|
209
|
+
segment=str|type(None),
|
|
210
|
+
id=str|type(None),
|
|
211
|
+
user=User|type(None),
|
|
212
|
+
reload=bool,
|
|
213
|
+
output=(Tag|None, str)
|
|
214
|
+
)
|
|
215
|
+
def create_tag(self,
|
|
216
|
+
name:str,
|
|
217
|
+
unit:str,
|
|
218
|
+
variable:str,
|
|
219
|
+
display_unit:str="",
|
|
220
|
+
data_type:str='float',
|
|
221
|
+
description:str=None,
|
|
222
|
+
display_name:str=None,
|
|
223
|
+
opcua_address:str=None,
|
|
224
|
+
node_namespace:str=None,
|
|
225
|
+
scan_time:int=None,
|
|
226
|
+
dead_band:float=None,
|
|
227
|
+
process_filter:bool=False,
|
|
228
|
+
gaussian_filter:bool=False,
|
|
229
|
+
gaussian_filter_threshold:float=1.0,
|
|
230
|
+
gaussian_filter_r_value:float=0.0,
|
|
231
|
+
outlier_detection:bool=False,
|
|
232
|
+
out_of_range_detection:bool=False,
|
|
233
|
+
frozen_data_detection:bool=False,
|
|
234
|
+
segment:str|None="",
|
|
235
|
+
manufacturer:str|None="",
|
|
236
|
+
id:str=None,
|
|
237
|
+
user:User|None=None,
|
|
238
|
+
reload:bool=False,
|
|
239
|
+
)->tuple[Tag,str]:
|
|
240
|
+
r"""
|
|
241
|
+
Create tag to automation app.
|
|
242
|
+
|
|
243
|
+
Addding tag from this way, you get the following features.
|
|
244
|
+
|
|
245
|
+
- Add tag to CVT.
|
|
246
|
+
-
|
|
247
|
+
|
|
248
|
+
```python
|
|
249
|
+
>>> from automation import PyAutomation
|
|
250
|
+
>>> app = PyAutomation()
|
|
251
|
+
>>> tag_name = "tag1"
|
|
252
|
+
>>> unit = "Pa"
|
|
253
|
+
>>> variable = "Pressure"
|
|
254
|
+
>>> app.create_tag(name=tag_name, unit=unit, variable=variable)
|
|
255
|
+
tag, message
|
|
256
|
+
|
|
257
|
+
```
|
|
258
|
+
|
|
259
|
+
"""
|
|
260
|
+
if not display_name:
|
|
261
|
+
|
|
262
|
+
display_name = name
|
|
263
|
+
|
|
264
|
+
tag, message = self.cvt.set_tag(
|
|
265
|
+
name=name,
|
|
266
|
+
unit=unit,
|
|
267
|
+
display_unit=display_unit,
|
|
268
|
+
variable=variable,
|
|
269
|
+
data_type=data_type,
|
|
270
|
+
description=description,
|
|
271
|
+
display_name=display_name,
|
|
272
|
+
opcua_address=opcua_address,
|
|
273
|
+
node_namespace=node_namespace,
|
|
274
|
+
scan_time=scan_time,
|
|
275
|
+
dead_band=dead_band,
|
|
276
|
+
process_filter=process_filter,
|
|
277
|
+
gaussian_filter=gaussian_filter,
|
|
278
|
+
gaussian_filter_threshold=gaussian_filter_threshold,
|
|
279
|
+
gaussian_filter_r_value=gaussian_filter_r_value,
|
|
280
|
+
outlier_detection=outlier_detection,
|
|
281
|
+
out_of_range_detection=out_of_range_detection,
|
|
282
|
+
frozen_data_detection=frozen_data_detection,
|
|
283
|
+
segment=segment,
|
|
284
|
+
manufacturer=manufacturer,
|
|
285
|
+
id=id,
|
|
286
|
+
user=user
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
# CREATE OPCUA SUBSCRIPTION
|
|
290
|
+
if tag:
|
|
291
|
+
|
|
292
|
+
if self.is_db_connected():
|
|
293
|
+
self.logger_engine.set_tag(tag=tag)
|
|
294
|
+
self.db_manager.attach(tag_name=name)
|
|
295
|
+
|
|
296
|
+
if scan_time:
|
|
297
|
+
|
|
298
|
+
self.das.buffer[name] = {
|
|
299
|
+
"timestamp": Buffer(size=ceil(10 / ceil(scan_time / 1000))),
|
|
300
|
+
"values": Buffer(size=ceil(10 / ceil(scan_time / 1000))),
|
|
301
|
+
"unit": display_unit
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
else:
|
|
305
|
+
|
|
306
|
+
self.das.buffer[name] = {
|
|
307
|
+
"timestamp": Buffer(),
|
|
308
|
+
"values": Buffer(),
|
|
309
|
+
"unit": display_unit
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
self.subscribe_opcua(tag=self.cvt.get_tag_by_name(name=name), opcua_address=opcua_address, node_namespace=node_namespace, scan_time=scan_time, reload=reload)
|
|
313
|
+
|
|
314
|
+
return tag, message
|
|
315
|
+
|
|
316
|
+
else:
|
|
317
|
+
|
|
318
|
+
return None, message
|
|
319
|
+
|
|
320
|
+
@logging_error_handler
|
|
321
|
+
@validate_types(output=list)
|
|
322
|
+
def get_tags(self)->list:
|
|
323
|
+
r"""Documentation here
|
|
324
|
+
|
|
325
|
+
# Parameters
|
|
326
|
+
|
|
327
|
+
-
|
|
328
|
+
|
|
329
|
+
# Returns
|
|
330
|
+
|
|
331
|
+
-
|
|
332
|
+
"""
|
|
333
|
+
|
|
334
|
+
return self.cvt.get_tags()
|
|
335
|
+
|
|
336
|
+
@logging_error_handler
|
|
337
|
+
@validate_types(names=list, output=list)
|
|
338
|
+
def get_tags_by_names(self, names:list)->list[Tag|None]:
|
|
339
|
+
r"""
|
|
340
|
+
# Parameters
|
|
341
|
+
|
|
342
|
+
- names: list of tag names
|
|
343
|
+
|
|
344
|
+
# Returns
|
|
345
|
+
|
|
346
|
+
- list of tags
|
|
347
|
+
"""
|
|
348
|
+
return self.cvt.get_tags_by_names(names=names)
|
|
349
|
+
|
|
350
|
+
@logging_error_handler
|
|
351
|
+
@validate_types(name=str, output=Tag|None)
|
|
352
|
+
def get_tag_by_name(self, name:str)->Tag:
|
|
353
|
+
|
|
354
|
+
return self.cvt.get_tag_by_name(name=name)
|
|
355
|
+
|
|
356
|
+
@logging_error_handler
|
|
357
|
+
@validate_types(namespace=str, output=Tag|None)
|
|
358
|
+
def get_tag_by_node_namespace(self, namespace:str)->Tag:
|
|
359
|
+
|
|
360
|
+
return self.cvt.get_tag_by_node_namespace(node_namespace=namespace)
|
|
361
|
+
|
|
362
|
+
@logging_error_handler
|
|
363
|
+
def get_trends(self, start:str, stop:str, timezone:str, *tags):
|
|
364
|
+
r"""
|
|
365
|
+
Documentation here
|
|
366
|
+
"""
|
|
367
|
+
return self.logger_engine.read_trends(start, stop, timezone, *tags)
|
|
368
|
+
|
|
369
|
+
@logging_error_handler
|
|
370
|
+
def get_tags_tables(self, start:str, stop:str, timezone:str, tags:list, page:int=1, limit:int=20):
|
|
371
|
+
r"""
|
|
372
|
+
Documentation here
|
|
373
|
+
"""
|
|
374
|
+
return self.logger_engine.read_table(start, stop, timezone, tags, page, limit)
|
|
375
|
+
|
|
376
|
+
@logging_error_handler
|
|
377
|
+
def get_tabular_data(self, start:str, stop:str, timezone:str, tags:list, sample_time:int, page:int=1, limit:int=20):
|
|
378
|
+
r"""
|
|
379
|
+
Documentation here
|
|
380
|
+
"""
|
|
381
|
+
return self.logger_engine.read_tabular_data(start, stop, timezone, tags, sample_time, page, limit)
|
|
382
|
+
|
|
383
|
+
@logging_error_handler
|
|
384
|
+
def get_segments(self):
|
|
385
|
+
r"""
|
|
386
|
+
Documentation here
|
|
387
|
+
"""
|
|
388
|
+
return self.logger_engine.read_segments()
|
|
389
|
+
|
|
390
|
+
@logging_error_handler
|
|
391
|
+
@validate_types(id=str, output=None|str)
|
|
392
|
+
def delete_tag(self, id:str, user:User|None=None)->None|str:
|
|
393
|
+
r"""
|
|
394
|
+
Documentation here
|
|
395
|
+
"""
|
|
396
|
+
tag = self.cvt.get_tag(id=id)
|
|
397
|
+
tag_name = tag.get_name()
|
|
398
|
+
alarm = self.alarm_manager.get_alarm_by_tag(tag=tag_name)
|
|
399
|
+
if alarm:
|
|
400
|
+
|
|
401
|
+
return f"Tag {tag_name} has an alarm associated"
|
|
402
|
+
|
|
403
|
+
self.unsubscribe_opcua(tag=tag)
|
|
404
|
+
self.cvt.delete_tag(id=id, user=user)
|
|
405
|
+
self.das.buffer.pop(tag_name)
|
|
406
|
+
# Persist Tag on Database
|
|
407
|
+
if self.is_db_connected():
|
|
408
|
+
|
|
409
|
+
self.logger_engine.delete_tag(id=id)
|
|
410
|
+
|
|
411
|
+
@logging_error_handler
|
|
412
|
+
def update_tag(
|
|
413
|
+
self,
|
|
414
|
+
id:str,
|
|
415
|
+
user:User|None=None,
|
|
416
|
+
**kwargs
|
|
417
|
+
)->tuple[Tag|None, str]:
|
|
418
|
+
r"""
|
|
419
|
+
Documentation here
|
|
420
|
+
"""
|
|
421
|
+
tag = self.cvt.get_tag(id=id)
|
|
422
|
+
if "name" in kwargs:
|
|
423
|
+
tag_name = tag.get_name()
|
|
424
|
+
machines_with_tags_subscribed = list()
|
|
425
|
+
for _machine, _, _ in self.get_machines():
|
|
426
|
+
|
|
427
|
+
if tag_name in _machine.get_subscribed_tags():
|
|
428
|
+
|
|
429
|
+
machines_with_tags_subscribed.append(_machine.name.value)
|
|
430
|
+
|
|
431
|
+
if machines_with_tags_subscribed:
|
|
432
|
+
|
|
433
|
+
return None, f"{tag_name} is subscribed into {machines_with_tags_subscribed}"
|
|
434
|
+
|
|
435
|
+
keys_to_check = ["gaussian_filter", "threshold", "R-value"]
|
|
436
|
+
|
|
437
|
+
if not any(key in kwargs for key in keys_to_check):
|
|
438
|
+
|
|
439
|
+
self.unsubscribe_opcua(tag)
|
|
440
|
+
|
|
441
|
+
# Persist Tag on Database
|
|
442
|
+
if "variable" in kwargs:
|
|
443
|
+
|
|
444
|
+
kwargs["unit"] = list(VARIABLES[kwargs["variable"]].values())[0]
|
|
445
|
+
kwargs["display_unit"] = list(VARIABLES[kwargs["variable"]].values())[0]
|
|
446
|
+
|
|
447
|
+
if "R-value" in kwargs:
|
|
448
|
+
|
|
449
|
+
try:
|
|
450
|
+
r_value = float(kwargs.pop("R-value"))
|
|
451
|
+
if r_value < 0.0 or r_value > 100.0:
|
|
452
|
+
|
|
453
|
+
r_value = tag.gaussian_filter_r_value * 100.0
|
|
454
|
+
|
|
455
|
+
except Exception as err:
|
|
456
|
+
|
|
457
|
+
r_value = tag.gaussian_filter_r_value
|
|
458
|
+
|
|
459
|
+
kwargs['gaussian_filter_r_value'] = r_value / 100.0
|
|
460
|
+
|
|
461
|
+
if "threshold" in kwargs:
|
|
462
|
+
|
|
463
|
+
try:
|
|
464
|
+
|
|
465
|
+
threshold = float(kwargs.pop("threshold"))
|
|
466
|
+
if threshold < 0.0:
|
|
467
|
+
|
|
468
|
+
threshold = tag.gaussian_filter_threshold
|
|
469
|
+
|
|
470
|
+
except Exception as err:
|
|
471
|
+
|
|
472
|
+
threshold = tag.gaussian_filter_threshold
|
|
473
|
+
|
|
474
|
+
kwargs['gaussian_filter_threshold'] = threshold
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
result = self.cvt.update_tag(
|
|
478
|
+
id=id,
|
|
479
|
+
user=user,
|
|
480
|
+
**kwargs
|
|
481
|
+
)
|
|
482
|
+
if self.is_db_connected():
|
|
483
|
+
|
|
484
|
+
if 'variable' in kwargs:
|
|
485
|
+
|
|
486
|
+
kwargs.pop("variable")
|
|
487
|
+
|
|
488
|
+
if kwargs:
|
|
489
|
+
|
|
490
|
+
self.logger_engine.update_tag(
|
|
491
|
+
id=id,
|
|
492
|
+
**kwargs
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
if "name" in kwargs:
|
|
496
|
+
|
|
497
|
+
self.das.buffer.pop(tag_name)
|
|
498
|
+
|
|
499
|
+
keys_to_check = ["gaussian_filter", "gaussian_filter_threshold", "gaussian_filter_r_value"]
|
|
500
|
+
|
|
501
|
+
if kwargs:
|
|
502
|
+
|
|
503
|
+
if not any(key in kwargs for key in keys_to_check):
|
|
504
|
+
|
|
505
|
+
self.__update_buffer(tag=tag)
|
|
506
|
+
|
|
507
|
+
if "scan_time" in kwargs:
|
|
508
|
+
scan_time = kwargs["scan_time"]
|
|
509
|
+
if isinstance(scan_time, int):
|
|
510
|
+
self.subscribe_opcua(tag, opcua_address=tag.get_opcua_address(), node_namespace=tag.get_node_namespace(), scan_time=scan_time)
|
|
511
|
+
else:
|
|
512
|
+
self.subscribe_opcua(tag, opcua_address=tag.get_opcua_address(), node_namespace=tag.get_node_namespace(), scan_time=tag.get_scan_time())
|
|
513
|
+
else:
|
|
514
|
+
|
|
515
|
+
self.subscribe_opcua(tag, opcua_address=tag.get_opcua_address(), node_namespace=tag.get_node_namespace(), scan_time=tag.get_scan_time())
|
|
516
|
+
|
|
517
|
+
return result
|
|
518
|
+
|
|
519
|
+
@logging_error_handler
|
|
520
|
+
@validate_types(name=str, output=None|str)
|
|
521
|
+
def delete_tag_by_name(self, name:str, user:User|None=None):
|
|
522
|
+
r"""
|
|
523
|
+
Documentation here
|
|
524
|
+
"""
|
|
525
|
+
tag = self.cvt.get_tag_by_name(name=name)
|
|
526
|
+
alarm = self.alarm_manager.get_alarm_by_tag(tag=name)
|
|
527
|
+
if alarm:
|
|
528
|
+
|
|
529
|
+
return f"Tag {name} has an alarm associated"
|
|
530
|
+
|
|
531
|
+
self.unsubscribe_opcua(tag=tag)
|
|
532
|
+
# Persist Tag on Database
|
|
533
|
+
if self.is_db_connected():
|
|
534
|
+
|
|
535
|
+
self.logger_engine.delete_tag(id=tag.id)
|
|
536
|
+
|
|
537
|
+
self.cvt.delete_tag(id=tag.id, user=user)
|
|
538
|
+
|
|
539
|
+
# USERS METHODS
|
|
540
|
+
@logging_error_handler
|
|
541
|
+
@validate_types(
|
|
542
|
+
username=str|type(None),
|
|
543
|
+
email=str|type(None),
|
|
544
|
+
password=str,
|
|
545
|
+
name=str|type(None),
|
|
546
|
+
output=tuple
|
|
547
|
+
)
|
|
548
|
+
def login(
|
|
549
|
+
self,
|
|
550
|
+
password:str,
|
|
551
|
+
username:str="",
|
|
552
|
+
email:str=""
|
|
553
|
+
)->tuple[User|None, str]:
|
|
554
|
+
# Check Token on Database
|
|
555
|
+
if self.is_db_connected():
|
|
556
|
+
|
|
557
|
+
return self.db_manager.login(password=password, username=username, email=email)
|
|
558
|
+
|
|
559
|
+
return users.login(password=password, username=username, email=email)
|
|
560
|
+
|
|
561
|
+
@logging_error_handler
|
|
562
|
+
@validate_types(
|
|
563
|
+
username=str,
|
|
564
|
+
role_name=str,
|
|
565
|
+
email=str,
|
|
566
|
+
password=str,
|
|
567
|
+
name=str|type(None),
|
|
568
|
+
lastname=str|type(None),
|
|
569
|
+
output=(User|None, str)
|
|
570
|
+
)
|
|
571
|
+
def signup(
|
|
572
|
+
self,
|
|
573
|
+
username:str,
|
|
574
|
+
role_name:str,
|
|
575
|
+
email:str,
|
|
576
|
+
password:str,
|
|
577
|
+
name:str=None,
|
|
578
|
+
lastname:str=None
|
|
579
|
+
)->tuple[User|None, str]:
|
|
580
|
+
r"""
|
|
581
|
+
Documentation here
|
|
582
|
+
"""
|
|
583
|
+
user, message = users.signup(
|
|
584
|
+
username=username,
|
|
585
|
+
role_name=role_name,
|
|
586
|
+
email=email,
|
|
587
|
+
password=password,
|
|
588
|
+
name=name,
|
|
589
|
+
lastname=lastname
|
|
590
|
+
)
|
|
591
|
+
if user:
|
|
592
|
+
|
|
593
|
+
# Persist Tag on Database
|
|
594
|
+
if self.is_db_connected():
|
|
595
|
+
|
|
596
|
+
_, message = self.db_manager.set_user(user=user)
|
|
597
|
+
|
|
598
|
+
return user, message
|
|
599
|
+
|
|
600
|
+
return None, message
|
|
601
|
+
|
|
602
|
+
@logging_error_handler
|
|
603
|
+
@validate_types(role_name=str, output=str)
|
|
604
|
+
def create_token(self, role_name:str)->str:
|
|
605
|
+
r"""
|
|
606
|
+
Documentation here
|
|
607
|
+
"""
|
|
608
|
+
from . import server
|
|
609
|
+
payload = {
|
|
610
|
+
"created_on": datetime.now(timezone.utc).strftime(self.cvt.DATETIME_FORMAT),
|
|
611
|
+
"role": role_name
|
|
612
|
+
}
|
|
613
|
+
return jwt.encode(payload, server.config['TPT_TOKEN'], algorithm="HS256")
|
|
614
|
+
|
|
615
|
+
@logging_error_handler
|
|
616
|
+
@validate_types(name=str, level=int, output=(Role|None, str))
|
|
617
|
+
def set_role(self, name:str, level:int)->Role|None:
|
|
618
|
+
r"""
|
|
619
|
+
Documentation here
|
|
620
|
+
"""
|
|
621
|
+
role = Role(name=name, level=level)
|
|
622
|
+
if roles.check_role_name(name=name):
|
|
623
|
+
|
|
624
|
+
return None, f"Role {name} exists"
|
|
625
|
+
|
|
626
|
+
role_id, message = roles.add(role=role)
|
|
627
|
+
if role_id:
|
|
628
|
+
|
|
629
|
+
# Persist Tag on Database
|
|
630
|
+
if self.is_db_connected():
|
|
631
|
+
|
|
632
|
+
_, message = self.db_manager.set_role(name=name, level=level, identifier=role.identifier)
|
|
633
|
+
|
|
634
|
+
return role, message
|
|
635
|
+
|
|
636
|
+
return None, message
|
|
637
|
+
|
|
638
|
+
# OPCUA METHODS
|
|
639
|
+
@logging_error_handler
|
|
640
|
+
@validate_types(host=str|type(None), port=int|type(None), output=dict)
|
|
641
|
+
def find_opcua_servers(self, host:str='127.0.0.1', port:int=4840)->dict:
|
|
642
|
+
r"""
|
|
643
|
+
Documentation here
|
|
644
|
+
"""
|
|
645
|
+
result = {
|
|
646
|
+
"message": f"Connection refused to opc.tcp://{host}:{port}"
|
|
647
|
+
}
|
|
648
|
+
try:
|
|
649
|
+
|
|
650
|
+
server = self.opcua_client_manager.discovery(host=host, port=port)
|
|
651
|
+
result["message"] = f"Successfully connection to {server[0]['DiscoveryUrls'][0]}"
|
|
652
|
+
result["data"] = server
|
|
653
|
+
|
|
654
|
+
except Exception as err:
|
|
655
|
+
|
|
656
|
+
result["data"] = list()
|
|
657
|
+
|
|
658
|
+
return result
|
|
659
|
+
|
|
660
|
+
@logging_error_handler
|
|
661
|
+
@validate_types(output=dict)
|
|
662
|
+
def get_opcua_clients(self):
|
|
663
|
+
r"""
|
|
664
|
+
Documentation here
|
|
665
|
+
"""
|
|
666
|
+
return self.opcua_client_manager.serialize()
|
|
667
|
+
|
|
668
|
+
@logging_error_handler
|
|
669
|
+
@validate_types(client_name=str, output=Client)
|
|
670
|
+
def get_opcua_client(self, client_name:str):
|
|
671
|
+
r"""
|
|
672
|
+
Documentation here
|
|
673
|
+
"""
|
|
674
|
+
return self.opcua_client_manager.get(client_name=client_name)
|
|
675
|
+
|
|
676
|
+
@logging_error_handler
|
|
677
|
+
@validate_types(opcua_address=str, output=Client|None)
|
|
678
|
+
def get_opcua_client_by_address(self, opcua_address:str)->Client|None:
|
|
679
|
+
r"""
|
|
680
|
+
Obtiene el cliente OPC UA correspondiente a una dirección
|
|
681
|
+
|
|
682
|
+
Args:
|
|
683
|
+
opcua_address: Dirección del servidor OPC UA (ej: "opc.tcp://localhost:4840")
|
|
684
|
+
|
|
685
|
+
Returns:
|
|
686
|
+
Client: Cliente OPC UA si existe y está conectado, None en caso contrario
|
|
687
|
+
"""
|
|
688
|
+
return self.opcua_client_manager.get_client_by_address(opcua_address=opcua_address)
|
|
689
|
+
|
|
690
|
+
@logging_error_handler
|
|
691
|
+
@validate_types(opcua_address=str, node_namespace=str, value=float|int|bool|str, output=tuple)
|
|
692
|
+
def write_opcua_value(self, opcua_address:str, node_namespace:str, value:float|int|bool|str)->tuple[dict, int]:
|
|
693
|
+
r"""
|
|
694
|
+
Escribe un valor a un nodo OPC UA
|
|
695
|
+
|
|
696
|
+
Args:
|
|
697
|
+
opcua_address: Dirección del servidor OPC UA
|
|
698
|
+
node_namespace: Namespace del nodo (ej: "ns=2;i=1234")
|
|
699
|
+
value: Valor a escribir (float, int, bool, str)
|
|
700
|
+
|
|
701
|
+
Returns:
|
|
702
|
+
tuple: (dict con resultado, status_code)
|
|
703
|
+
"""
|
|
704
|
+
opcua_client = self.get_opcua_client_by_address(opcua_address=opcua_address)
|
|
705
|
+
|
|
706
|
+
if not opcua_client:
|
|
707
|
+
return {
|
|
708
|
+
'message': f'Cliente OPC UA no encontrado o no conectado para {opcua_address}',
|
|
709
|
+
'opcua_address': opcua_address,
|
|
710
|
+
'node_namespace': node_namespace,
|
|
711
|
+
'success': False
|
|
712
|
+
}, 404
|
|
713
|
+
|
|
714
|
+
return opcua_client.write_value(node_namespace=node_namespace, value=value)
|
|
715
|
+
|
|
716
|
+
@logging_error_handler
|
|
717
|
+
def create_opcua_server_record(self, name:str, namespace:str, access_type:str="Read"):
|
|
718
|
+
r"""
|
|
719
|
+
Documentation here
|
|
720
|
+
"""
|
|
721
|
+
return self.opcua_server_engine.create(name=name, namespace=namespace, access_type=access_type)
|
|
722
|
+
|
|
723
|
+
@logging_error_handler
|
|
724
|
+
def update_opcua_server_access_type(self, namespace:str, access_type:str):
|
|
725
|
+
r"""
|
|
726
|
+
Documentation here
|
|
727
|
+
"""
|
|
728
|
+
return self.opcua_server_engine.put(namespace=namespace, access_type=access_type)
|
|
729
|
+
|
|
730
|
+
@logging_error_handler
|
|
731
|
+
def get_opcua_server_record_by_namespace(self, namespace:str):
|
|
732
|
+
r"""
|
|
733
|
+
Documentation here
|
|
734
|
+
"""
|
|
735
|
+
return self.opcua_server_engine.read_by_namespace(namespace=namespace)
|
|
736
|
+
|
|
737
|
+
@logging_error_handler
|
|
738
|
+
@validate_types(client_name=str, namespaces=list, output=list)
|
|
739
|
+
def get_node_values(self, client_name:str, namespaces:list)->list:
|
|
740
|
+
r"""
|
|
741
|
+
Documentation here
|
|
742
|
+
"""
|
|
743
|
+
|
|
744
|
+
return self.opcua_client_manager.get_node_values(client_name=client_name, namespaces=namespaces)
|
|
745
|
+
|
|
746
|
+
@logging_error_handler
|
|
747
|
+
@validate_types(client_name=str, namespaces=list, output=list|None)
|
|
748
|
+
def get_node_attributes(self, client_name:str, namespaces:list)->list[dict]:
|
|
749
|
+
r"""
|
|
750
|
+
Documentation here
|
|
751
|
+
"""
|
|
752
|
+
|
|
753
|
+
return self.opcua_client_manager.get_node_attributes(client_name=client_name, namespaces=namespaces)
|
|
754
|
+
|
|
755
|
+
@logging_error_handler
|
|
756
|
+
def get_opcua_tree(self, client_name:str):
|
|
757
|
+
r"""
|
|
758
|
+
Documentation here
|
|
759
|
+
"""
|
|
760
|
+
return self.opcua_client_manager.get_opcua_tree(client_name=client_name)
|
|
761
|
+
|
|
762
|
+
@logging_error_handler
|
|
763
|
+
@validate_types(client_name=str, host=str|type(None), port=int|type(None), output=(bool, str|dict))
|
|
764
|
+
def add_opcua_client(self, client_name:str, host:str="127.0.0.1", port:int=4840):
|
|
765
|
+
r"""
|
|
766
|
+
Documentation here
|
|
767
|
+
"""
|
|
768
|
+
servers = self.find_opcua_servers(host=host, port=port)
|
|
769
|
+
|
|
770
|
+
if servers:
|
|
771
|
+
|
|
772
|
+
return self.opcua_client_manager.add(client_name=client_name, host=host, port=port)
|
|
773
|
+
|
|
774
|
+
@logging_error_handler
|
|
775
|
+
@validate_types(client_name=str, host=str|type(None), port=int|type(None), output=bool)
|
|
776
|
+
def remove_opcua_client(self, client_name:str):
|
|
777
|
+
r"""
|
|
778
|
+
Documentation here
|
|
779
|
+
"""
|
|
780
|
+
return self.opcua_client_manager.remove(client_name=client_name)
|
|
781
|
+
|
|
782
|
+
@logging_error_handler
|
|
783
|
+
@validate_types(tag=Tag, opcua_address=str|type(None), node_namespace=str|type(None), scan_time=float|int|type(None), reload=bool, output=None)
|
|
784
|
+
def subscribe_opcua(self, tag:Tag, opcua_address:str, node_namespace:str, scan_time:float, reload:bool=False):
|
|
785
|
+
r"""
|
|
786
|
+
Documentation here
|
|
787
|
+
"""
|
|
788
|
+
if opcua_address and node_namespace:
|
|
789
|
+
|
|
790
|
+
if not scan_time or scan_time<=100: # SUBSCRIBE BY DAS
|
|
791
|
+
|
|
792
|
+
for client_name, info in self.get_opcua_clients().items():
|
|
793
|
+
|
|
794
|
+
if opcua_address==info["server_url"]:
|
|
795
|
+
|
|
796
|
+
opcua_client = self.get_opcua_client(client_name=client_name)
|
|
797
|
+
subscription = opcua_client.create_subscription(1000, self.das)
|
|
798
|
+
node_id = opcua_client.get_node_id_by_namespace(node_namespace)
|
|
799
|
+
self.das.subscribe(subscription=subscription, client_name=client_name, node_id=node_id)
|
|
800
|
+
break
|
|
801
|
+
|
|
802
|
+
else: # SUBSCRIBE BY DAQ
|
|
803
|
+
|
|
804
|
+
self.subscribe_tag(tag_name=tag.get_name(), scan_time=scan_time, reload=reload)
|
|
805
|
+
|
|
806
|
+
self.das.buffer[tag.get_name()].update({
|
|
807
|
+
"unit": tag.get_display_unit()
|
|
808
|
+
})
|
|
809
|
+
|
|
810
|
+
@logging_error_handler
|
|
811
|
+
@validate_types(tag_name=str, scan_time=float|int, reload=bool, output=None)
|
|
812
|
+
def subscribe_tag(self, tag_name:str, scan_time:float|int, reload:bool=False):
|
|
813
|
+
r"""
|
|
814
|
+
Documentatio here
|
|
815
|
+
"""
|
|
816
|
+
scan_time = float(scan_time)
|
|
817
|
+
daq_name = StringType(f"DAQ-{int(scan_time)}")
|
|
818
|
+
daq = self.machine_manager.get_machine(name=daq_name)
|
|
819
|
+
tag = self.cvt.get_tag_by_name(name=tag_name)
|
|
820
|
+
if not daq:
|
|
821
|
+
|
|
822
|
+
daq = DAQ(name=daq_name)
|
|
823
|
+
interval = FloatType(scan_time / 1000)
|
|
824
|
+
daq.set_opcua_client_manager(manager=self.opcua_client_manager)
|
|
825
|
+
self.machine.append_machine(machine=daq, interval=interval, mode="async")
|
|
826
|
+
|
|
827
|
+
if not reload:
|
|
828
|
+
|
|
829
|
+
if self.machine.state_worker:
|
|
830
|
+
self.machine.join(machine=daq)
|
|
831
|
+
else:
|
|
832
|
+
self.machine.start()
|
|
833
|
+
|
|
834
|
+
daq.subscribe_to(tag=tag)
|
|
835
|
+
|
|
836
|
+
@logging_error_handler
|
|
837
|
+
@validate_types(tag=Tag, output=None)
|
|
838
|
+
def unsubscribe_opcua(self, tag:Tag):
|
|
839
|
+
r"""
|
|
840
|
+
Documentation here
|
|
841
|
+
"""
|
|
842
|
+
|
|
843
|
+
if tag.get_node_namespace():
|
|
844
|
+
|
|
845
|
+
for client_name, info in self.get_opcua_clients().items():
|
|
846
|
+
|
|
847
|
+
if tag.get_opcua_address()==info["server_url"]:
|
|
848
|
+
|
|
849
|
+
opcua_client = self.get_opcua_client(client_name=client_name)
|
|
850
|
+
node_id = opcua_client.get_node_id_by_namespace(tag.get_node_namespace())
|
|
851
|
+
self.das.unsubscribe(client_name=client_name, node_id=node_id)
|
|
852
|
+
break
|
|
853
|
+
|
|
854
|
+
drop_machine_from_worker, _, _ = self.machine_manager.unsubscribe_tag(tag=tag)
|
|
855
|
+
if drop_machine_from_worker:
|
|
856
|
+
|
|
857
|
+
self.machine.drop(machine=drop_machine_from_worker)
|
|
858
|
+
|
|
859
|
+
# CLEAR BUFFER
|
|
860
|
+
scan_time = tag.get_scan_time()
|
|
861
|
+
if scan_time:
|
|
862
|
+
|
|
863
|
+
self.das.buffer[tag.get_name()].update({
|
|
864
|
+
"timestamp": Buffer(size=ceil(10 / ceil(scan_time / 1000))),
|
|
865
|
+
"values": Buffer(size=ceil(10 / ceil(scan_time / 1000)))
|
|
866
|
+
})
|
|
867
|
+
else:
|
|
868
|
+
self.das.buffer[tag.get_name()].update({
|
|
869
|
+
"timestamp": Buffer(),
|
|
870
|
+
"values": Buffer()
|
|
871
|
+
})
|
|
872
|
+
|
|
873
|
+
@logging_error_handler
|
|
874
|
+
def __update_buffer(self, tag:Tag):
|
|
875
|
+
r"""
|
|
876
|
+
Documentation here
|
|
877
|
+
"""
|
|
878
|
+
tag_name = tag.get_name()
|
|
879
|
+
scan_time = tag.get_scan_time()
|
|
880
|
+
unit = tag.get_display_unit()
|
|
881
|
+
|
|
882
|
+
if scan_time:
|
|
883
|
+
|
|
884
|
+
self.das.buffer[tag_name] = {
|
|
885
|
+
"timestamp": Buffer(size=ceil(10 / ceil(scan_time / 1000))),
|
|
886
|
+
"values": Buffer(size=ceil(10 / ceil(scan_time / 1000))),
|
|
887
|
+
"unit": unit
|
|
888
|
+
}
|
|
889
|
+
|
|
890
|
+
else:
|
|
891
|
+
|
|
892
|
+
self.das.buffer[tag_name] = {
|
|
893
|
+
"timestamp": Buffer(),
|
|
894
|
+
"values": Buffer(),
|
|
895
|
+
"unit": unit
|
|
896
|
+
}
|
|
897
|
+
|
|
898
|
+
# ERROR LOGS
|
|
899
|
+
@logging_error_handler
|
|
900
|
+
@validate_types(level=int, file=str, output=None)
|
|
901
|
+
def set_log(self, level:int=logging.INFO, file:str="logs/app.log"):
|
|
902
|
+
r"""
|
|
903
|
+
Sets the log file and level.
|
|
904
|
+
|
|
905
|
+
**Parameters:**
|
|
906
|
+
|
|
907
|
+
* **level** (str): `logging.LEVEL` (default: logging.INFO).
|
|
908
|
+
* **file** (str): log filename (default: 'app.log').
|
|
909
|
+
|
|
910
|
+
**Returns:** `None`
|
|
911
|
+
|
|
912
|
+
Usage:
|
|
913
|
+
|
|
914
|
+
```python
|
|
915
|
+
>>> app.set_log(file="app.log")
|
|
916
|
+
```
|
|
917
|
+
"""
|
|
918
|
+
|
|
919
|
+
self._logging_level = level
|
|
920
|
+
self._log_file = file
|
|
921
|
+
|
|
922
|
+
# DATABASES
|
|
923
|
+
@validate_types(
|
|
924
|
+
dbtype=str,
|
|
925
|
+
drop_table=bool,
|
|
926
|
+
clear_default_tables=bool,
|
|
927
|
+
dbfile=str|type(None),
|
|
928
|
+
user=str|type(None),
|
|
929
|
+
password=str|type(None),
|
|
930
|
+
host=str|type(None),
|
|
931
|
+
port=int|type(None),
|
|
932
|
+
name=str|type(None),
|
|
933
|
+
output=None)
|
|
934
|
+
def set_db(
|
|
935
|
+
self,
|
|
936
|
+
dbtype:str='sqlite',
|
|
937
|
+
drop_table:bool=False,
|
|
938
|
+
clear_default_tables:bool=False,
|
|
939
|
+
**kwargs):
|
|
940
|
+
r"""
|
|
941
|
+
Sets the database, it supports SQLite and Postgres,
|
|
942
|
+
in case of SQLite, the filename must be provided.
|
|
943
|
+
|
|
944
|
+
if app mode is "Development" you must use SQLite Databse
|
|
945
|
+
|
|
946
|
+
**Parameters:**
|
|
947
|
+
|
|
948
|
+
* **dbfile** (str): a path to database file.
|
|
949
|
+
* *drop_table** (bool): If you want to drop table.
|
|
950
|
+
* **cascade** (bool): if there are some table dependency, drop it as well
|
|
951
|
+
* **kwargs**: Same attributes to a postgres connection.
|
|
952
|
+
|
|
953
|
+
**Returns:** `None`
|
|
954
|
+
|
|
955
|
+
Usage:
|
|
956
|
+
|
|
957
|
+
```python
|
|
958
|
+
>>> app.set_db(dbfile="app.db")
|
|
959
|
+
```
|
|
960
|
+
"""
|
|
961
|
+
|
|
962
|
+
from .dbmodels import proxy
|
|
963
|
+
|
|
964
|
+
if clear_default_tables:
|
|
965
|
+
|
|
966
|
+
self.db_manager.clear_default_tables()
|
|
967
|
+
|
|
968
|
+
if dbtype.lower()=='sqlite':
|
|
969
|
+
|
|
970
|
+
dbfile = kwargs.get("dbfile", ":memory:")
|
|
971
|
+
if not dbfile.endswith(".db"):
|
|
972
|
+
dbfile = f"{dbfile}.db"
|
|
973
|
+
|
|
974
|
+
self._db = SqliteDatabase(os.path.join(".", "db", dbfile), pragmas={
|
|
975
|
+
'journal_mode': 'wal',
|
|
976
|
+
'wal_checkpoint': 1,
|
|
977
|
+
'cache_size': -1024 * 10, # 10MB
|
|
978
|
+
'foreign_keys': 1,
|
|
979
|
+
'ignore_check_constraints': 0,
|
|
980
|
+
'synchronous': 1
|
|
981
|
+
}
|
|
982
|
+
)
|
|
983
|
+
|
|
984
|
+
elif dbtype.lower()=='mysql':
|
|
985
|
+
|
|
986
|
+
db_name = kwargs['name']
|
|
987
|
+
del kwargs['name']
|
|
988
|
+
self._db = MySQLDatabase(db_name, **kwargs)
|
|
989
|
+
|
|
990
|
+
elif dbtype.lower()=='postgresql':
|
|
991
|
+
|
|
992
|
+
db_name = kwargs['name']
|
|
993
|
+
del kwargs['name']
|
|
994
|
+
self._db = PostgresqlDatabase(db_name, **kwargs)
|
|
995
|
+
|
|
996
|
+
proxy.initialize(self._db)
|
|
997
|
+
self._db.connect()
|
|
998
|
+
self.db_manager.set_db(self._db, is_history_logged=self.__log_histories)
|
|
999
|
+
self.db_manager.set_dropped(drop_table)
|
|
1000
|
+
|
|
1001
|
+
@logging_error_handler
|
|
1002
|
+
@validate_types(
|
|
1003
|
+
dbtype=str,
|
|
1004
|
+
dbfile=str,
|
|
1005
|
+
user=str|type(None),
|
|
1006
|
+
password=str|type(None),
|
|
1007
|
+
host=str|type(None),
|
|
1008
|
+
port=int|str|type(None),
|
|
1009
|
+
name=str|type(None),
|
|
1010
|
+
output=None)
|
|
1011
|
+
def set_db_config(
|
|
1012
|
+
self,
|
|
1013
|
+
dbtype:str="sqlite",
|
|
1014
|
+
dbfile:str="app.db",
|
|
1015
|
+
user:str|None="admin",
|
|
1016
|
+
password:str|None="admin",
|
|
1017
|
+
host:str|None="127.0.0.1",
|
|
1018
|
+
port:int|str|None=5432,
|
|
1019
|
+
name:str|None="app_db"
|
|
1020
|
+
):
|
|
1021
|
+
r"""
|
|
1022
|
+
Documentation here
|
|
1023
|
+
"""
|
|
1024
|
+
if dbtype.lower()=="sqlite":
|
|
1025
|
+
|
|
1026
|
+
db_config = {
|
|
1027
|
+
"dbtype": dbtype,
|
|
1028
|
+
"dbfile": dbfile
|
|
1029
|
+
}
|
|
1030
|
+
|
|
1031
|
+
else:
|
|
1032
|
+
|
|
1033
|
+
db_config = {
|
|
1034
|
+
"dbtype": dbtype,
|
|
1035
|
+
'user': user,
|
|
1036
|
+
'password': password,
|
|
1037
|
+
'host': host,
|
|
1038
|
+
'port': port,
|
|
1039
|
+
'name': name,
|
|
1040
|
+
}
|
|
1041
|
+
|
|
1042
|
+
with open('./db/db_config.json', 'w') as json_file:
|
|
1043
|
+
|
|
1044
|
+
json.dump(db_config, json_file)
|
|
1045
|
+
|
|
1046
|
+
@logging_error_handler
|
|
1047
|
+
@validate_types(output=dict|None)
|
|
1048
|
+
def get_db_config(self):
|
|
1049
|
+
r"""
|
|
1050
|
+
Documentation here
|
|
1051
|
+
"""
|
|
1052
|
+
try:
|
|
1053
|
+
|
|
1054
|
+
with open('./db/db_config.json', 'r') as json_file:
|
|
1055
|
+
|
|
1056
|
+
db_config = json.load(json_file)
|
|
1057
|
+
|
|
1058
|
+
return db_config
|
|
1059
|
+
|
|
1060
|
+
except Exception as e:
|
|
1061
|
+
_, _, e_traceback = sys.exc_info()
|
|
1062
|
+
e_filename = os.path.split(e_traceback.tb_frame.f_code.co_filename)[1]
|
|
1063
|
+
e_message = str(e)
|
|
1064
|
+
e_line_number = e_traceback.tb_lineno
|
|
1065
|
+
message = f"Database is not configured: {e_line_number} - {e_filename} - {e_message}"
|
|
1066
|
+
logging.warning(message)
|
|
1067
|
+
return None
|
|
1068
|
+
|
|
1069
|
+
@logging_error_handler
|
|
1070
|
+
@validate_types(output=bool)
|
|
1071
|
+
def is_db_connected(self):
|
|
1072
|
+
r"""
|
|
1073
|
+
Documentation here
|
|
1074
|
+
"""
|
|
1075
|
+
if self.db_manager.get_db():
|
|
1076
|
+
|
|
1077
|
+
return True
|
|
1078
|
+
|
|
1079
|
+
return False
|
|
1080
|
+
|
|
1081
|
+
@validate_types(test=bool|type(None), reload=bool|type(None), output=None|bool)
|
|
1082
|
+
def connect_to_db(self, test:bool=False, reload:bool=False):
|
|
1083
|
+
r"""
|
|
1084
|
+
Documentation here
|
|
1085
|
+
"""
|
|
1086
|
+
try:
|
|
1087
|
+
db_config = self.get_db_config()
|
|
1088
|
+
|
|
1089
|
+
if test:
|
|
1090
|
+
|
|
1091
|
+
db_config = {"dbtype": "sqlite", "dbfile": "test.db"}
|
|
1092
|
+
|
|
1093
|
+
if db_config:
|
|
1094
|
+
|
|
1095
|
+
dbtype = db_config.pop("dbtype")
|
|
1096
|
+
self.__log_histories = True
|
|
1097
|
+
self.set_db(dbtype=dbtype, **db_config)
|
|
1098
|
+
self.db_manager.init_database()
|
|
1099
|
+
self.load_opcua_clients_from_db()
|
|
1100
|
+
self.load_db_to_cvt()
|
|
1101
|
+
self.load_db_to_alarm_manager()
|
|
1102
|
+
self.load_db_to_roles()
|
|
1103
|
+
self.load_db_to_users()
|
|
1104
|
+
if reload:
|
|
1105
|
+
|
|
1106
|
+
self.load_db_tags_to_machine()
|
|
1107
|
+
|
|
1108
|
+
return True
|
|
1109
|
+
|
|
1110
|
+
except Exception as err:
|
|
1111
|
+
logging.critical(f"CONNECTING DATABASE ERROR: {err}")
|
|
1112
|
+
return False
|
|
1113
|
+
|
|
1114
|
+
@validate_types(test=bool|type(None), reload=bool|type(None), output=None|bool)
|
|
1115
|
+
def reconnect_to_db(self, test:bool=False):
|
|
1116
|
+
r"""
|
|
1117
|
+
Documentation here
|
|
1118
|
+
"""
|
|
1119
|
+
try:
|
|
1120
|
+
db_config = self.get_db_config()
|
|
1121
|
+
|
|
1122
|
+
if test:
|
|
1123
|
+
|
|
1124
|
+
db_config = {"dbtype": "sqlite", "dbfile": "test.db"}
|
|
1125
|
+
|
|
1126
|
+
if db_config:
|
|
1127
|
+
|
|
1128
|
+
dbtype = db_config.pop("dbtype")
|
|
1129
|
+
self.__log_histories = True
|
|
1130
|
+
self.set_db(dbtype=dbtype, **db_config)
|
|
1131
|
+
self.db_manager.init_database()
|
|
1132
|
+
self.load_opcua_clients_from_db()
|
|
1133
|
+
self.load_db_to_cvt()
|
|
1134
|
+
self.load_db_to_alarm_manager()
|
|
1135
|
+
self.load_db_to_roles()
|
|
1136
|
+
self.load_db_to_users()
|
|
1137
|
+
self.load_db_tags_to_machine()
|
|
1138
|
+
|
|
1139
|
+
return True
|
|
1140
|
+
else:
|
|
1141
|
+
return False
|
|
1142
|
+
|
|
1143
|
+
except Exception as err:
|
|
1144
|
+
self.db_manager._logger.logger._db = None
|
|
1145
|
+
return False
|
|
1146
|
+
|
|
1147
|
+
@logging_error_handler
|
|
1148
|
+
@validate_types(output=None)
|
|
1149
|
+
def disconnect_to_db(self):
|
|
1150
|
+
r"""
|
|
1151
|
+
Documentation here
|
|
1152
|
+
"""
|
|
1153
|
+
self.__log_histories = False
|
|
1154
|
+
self.db_manager._logger.logger.stop_db()
|
|
1155
|
+
|
|
1156
|
+
@logging_error_handler
|
|
1157
|
+
@validate_types(output=None)
|
|
1158
|
+
def load_db_to_cvt(self):
|
|
1159
|
+
r"""
|
|
1160
|
+
Documentation here
|
|
1161
|
+
"""
|
|
1162
|
+
if self.is_db_connected():
|
|
1163
|
+
|
|
1164
|
+
tags = self.db_manager.get_tags()
|
|
1165
|
+
|
|
1166
|
+
|
|
1167
|
+
for tag in tags:
|
|
1168
|
+
|
|
1169
|
+
active = tag.pop("active")
|
|
1170
|
+
|
|
1171
|
+
if active:
|
|
1172
|
+
|
|
1173
|
+
self.create_tag(reload=True, **tag)
|
|
1174
|
+
|
|
1175
|
+
@logging_error_handler
|
|
1176
|
+
@validate_types(output=None)
|
|
1177
|
+
def load_db_to_alarm_manager(self):
|
|
1178
|
+
r"""
|
|
1179
|
+
Documentation here
|
|
1180
|
+
"""
|
|
1181
|
+
if self.is_db_connected():
|
|
1182
|
+
|
|
1183
|
+
alarms = self.db_manager.get_alarms()
|
|
1184
|
+
if alarms:
|
|
1185
|
+
for alarm in alarms:
|
|
1186
|
+
|
|
1187
|
+
self.create_alarm(reload=True, **alarm)
|
|
1188
|
+
|
|
1189
|
+
@logging_error_handler
|
|
1190
|
+
@validate_types(output=None)
|
|
1191
|
+
def load_db_to_roles(self):
|
|
1192
|
+
r"""
|
|
1193
|
+
Documentation here
|
|
1194
|
+
"""
|
|
1195
|
+
if self.is_db_connected():
|
|
1196
|
+
|
|
1197
|
+
Roles.fill_cvt_roles()
|
|
1198
|
+
|
|
1199
|
+
@logging_error_handler
|
|
1200
|
+
@validate_types(output=None)
|
|
1201
|
+
def load_db_to_users(self):
|
|
1202
|
+
r"""
|
|
1203
|
+
Documentation here
|
|
1204
|
+
"""
|
|
1205
|
+
if self.is_db_connected():
|
|
1206
|
+
|
|
1207
|
+
Users.fill_cvt_users()
|
|
1208
|
+
|
|
1209
|
+
@logging_error_handler
|
|
1210
|
+
@validate_types(output=None)
|
|
1211
|
+
def load_opcua_clients_from_db(self):
|
|
1212
|
+
r"""
|
|
1213
|
+
Documentation here
|
|
1214
|
+
"""
|
|
1215
|
+
|
|
1216
|
+
if self.is_db_connected():
|
|
1217
|
+
|
|
1218
|
+
clients = self.db_manager.get_opcua_clients()
|
|
1219
|
+
|
|
1220
|
+
for client in clients:
|
|
1221
|
+
|
|
1222
|
+
self.add_opcua_client(**client)
|
|
1223
|
+
|
|
1224
|
+
@logging_error_handler
|
|
1225
|
+
def load_db_tags_to_machine(self):
|
|
1226
|
+
|
|
1227
|
+
machines = self.machine_manager.get_machines()
|
|
1228
|
+
|
|
1229
|
+
for machine, _, _ in machines:
|
|
1230
|
+
|
|
1231
|
+
if machine.classification.value.lower()!="data acquisition system":
|
|
1232
|
+
|
|
1233
|
+
machine_name = machine.name.value
|
|
1234
|
+
machine_db = Machines.get_or_none(name=machine_name)
|
|
1235
|
+
|
|
1236
|
+
if not machine_db:
|
|
1237
|
+
|
|
1238
|
+
return f"{machine_name} not found into DB", 404
|
|
1239
|
+
|
|
1240
|
+
machine.identifier.value = machine_db.identifier
|
|
1241
|
+
tags_machine = machine_db.get_tags()
|
|
1242
|
+
|
|
1243
|
+
for tag_machine in tags_machine:
|
|
1244
|
+
|
|
1245
|
+
_tag = tag_machine.serialize()
|
|
1246
|
+
tag_name = _tag["tag"]["name"]
|
|
1247
|
+
tag = self.cvt.get_tag_by_name(name=tag_name)
|
|
1248
|
+
machine.subscribe_to(tag=tag, default_tag_name=_tag["default_tag_name"])
|
|
1249
|
+
|
|
1250
|
+
else:
|
|
1251
|
+
|
|
1252
|
+
machine_name = machine.name.value
|
|
1253
|
+
machine_db = Machines.get_or_none(name=machine_name)
|
|
1254
|
+
machine.identifier.value = machine_db.identifier
|
|
1255
|
+
|
|
1256
|
+
@logging_error_handler
|
|
1257
|
+
def add_db_table(self, table:BaseModel):
|
|
1258
|
+
r"""
|
|
1259
|
+
Documentation here
|
|
1260
|
+
"""
|
|
1261
|
+
self.db_manager.register_table(table)
|
|
1262
|
+
|
|
1263
|
+
@logging_error_handler
|
|
1264
|
+
def get_db_table(self, tablename:str):
|
|
1265
|
+
r"""
|
|
1266
|
+
Documentation here
|
|
1267
|
+
"""
|
|
1268
|
+
return self.db_manager.get_db_table(tablename=tablename)
|
|
1269
|
+
|
|
1270
|
+
# ALARMS METHODS
|
|
1271
|
+
@logging_error_handler
|
|
1272
|
+
@validate_types(output=AlarmManager)
|
|
1273
|
+
def get_alarm_manager(self)->AlarmManager:
|
|
1274
|
+
r"""
|
|
1275
|
+
Documentation here
|
|
1276
|
+
"""
|
|
1277
|
+
return self.alarm_manager
|
|
1278
|
+
|
|
1279
|
+
@logging_error_handler
|
|
1280
|
+
@validate_types(
|
|
1281
|
+
name=str,
|
|
1282
|
+
tag=str,
|
|
1283
|
+
alarm_type=str,
|
|
1284
|
+
trigger_value=bool|float|int,
|
|
1285
|
+
description=str|type(None),
|
|
1286
|
+
identifier=str|type(None),
|
|
1287
|
+
state=str,
|
|
1288
|
+
timestamp=str|type(None),
|
|
1289
|
+
ack_timestamp=str|type(None),
|
|
1290
|
+
user=User|type(None),
|
|
1291
|
+
reload=bool,
|
|
1292
|
+
output=(Alarm, str)
|
|
1293
|
+
)
|
|
1294
|
+
def create_alarm(
|
|
1295
|
+
self,
|
|
1296
|
+
name:str,
|
|
1297
|
+
tag:str,
|
|
1298
|
+
alarm_type:str="BOOL",
|
|
1299
|
+
trigger_value:bool|float|int=True,
|
|
1300
|
+
description:str="",
|
|
1301
|
+
identifier:str=None,
|
|
1302
|
+
state:str="Normal",
|
|
1303
|
+
timestamp:str=None,
|
|
1304
|
+
ack_timestamp:str=None,
|
|
1305
|
+
user:User=None,
|
|
1306
|
+
reload:bool=False
|
|
1307
|
+
)->tuple[Alarm, str]:
|
|
1308
|
+
r"""
|
|
1309
|
+
Append alarm to the Alarm Manager
|
|
1310
|
+
|
|
1311
|
+
**Paramters**
|
|
1312
|
+
|
|
1313
|
+
* **alarm**: (Alarm Object)
|
|
1314
|
+
|
|
1315
|
+
**Returns**
|
|
1316
|
+
|
|
1317
|
+
* **None**
|
|
1318
|
+
"""
|
|
1319
|
+
alarm, message = self.alarm_manager.append_alarm(
|
|
1320
|
+
name=name,
|
|
1321
|
+
tag=tag,
|
|
1322
|
+
type=alarm_type,
|
|
1323
|
+
trigger_value=trigger_value,
|
|
1324
|
+
description=description,
|
|
1325
|
+
identifier=identifier,
|
|
1326
|
+
state=state,
|
|
1327
|
+
timestamp=timestamp,
|
|
1328
|
+
ack_timestamp=ack_timestamp,
|
|
1329
|
+
user=user,
|
|
1330
|
+
reload=reload,
|
|
1331
|
+
sio=self.sio
|
|
1332
|
+
)
|
|
1333
|
+
|
|
1334
|
+
if alarm:
|
|
1335
|
+
|
|
1336
|
+
# Persist Tag on Database
|
|
1337
|
+
if not reload:
|
|
1338
|
+
if self.is_db_connected():
|
|
1339
|
+
|
|
1340
|
+
alarm = self.alarm_manager.get_alarm_by_name(name=name)
|
|
1341
|
+
|
|
1342
|
+
self.alarms_engine.create(
|
|
1343
|
+
id=alarm.identifier,
|
|
1344
|
+
name=name,
|
|
1345
|
+
tag=tag,
|
|
1346
|
+
trigger_type=alarm_type,
|
|
1347
|
+
trigger_value=trigger_value,
|
|
1348
|
+
description=description
|
|
1349
|
+
)
|
|
1350
|
+
|
|
1351
|
+
return alarm, message
|
|
1352
|
+
|
|
1353
|
+
return None, message
|
|
1354
|
+
|
|
1355
|
+
@logging_error_handler
|
|
1356
|
+
@validate_types(lasts=int, output=list)
|
|
1357
|
+
def get_lasts_alarms(self, lasts:int=10)->list:
|
|
1358
|
+
r"""
|
|
1359
|
+
Documentation here
|
|
1360
|
+
"""
|
|
1361
|
+
if self.is_db_connected():
|
|
1362
|
+
|
|
1363
|
+
return self.alarms_engine.get_lasts(lasts=lasts)
|
|
1364
|
+
|
|
1365
|
+
return list()
|
|
1366
|
+
|
|
1367
|
+
@logging_error_handler
|
|
1368
|
+
def filter_alarms_by(self, **fields):
|
|
1369
|
+
r"""
|
|
1370
|
+
Documentation here
|
|
1371
|
+
"""
|
|
1372
|
+
if self.is_db_connected():
|
|
1373
|
+
|
|
1374
|
+
# Ensure pagination parameters are present or defaulted
|
|
1375
|
+
if 'page' not in fields: fields['page'] = 1
|
|
1376
|
+
if 'limit' not in fields: fields['limit'] = 20
|
|
1377
|
+
|
|
1378
|
+
return self.alarms_engine.filter_alarm_summary_by(**fields)
|
|
1379
|
+
|
|
1380
|
+
@logging_error_handler
|
|
1381
|
+
@validate_types(id=str, name=str|None, description=str|None, alarm_type=str|None, trigger_value=int|float|None, output=None)
|
|
1382
|
+
def update_alarm(
|
|
1383
|
+
self,
|
|
1384
|
+
id:str,
|
|
1385
|
+
name:str=None,
|
|
1386
|
+
tag:str=None,
|
|
1387
|
+
description:str=None,
|
|
1388
|
+
alarm_type:str=None,
|
|
1389
|
+
trigger_value:int|float=None)->None:
|
|
1390
|
+
r"""
|
|
1391
|
+
Updates alarm attributes
|
|
1392
|
+
|
|
1393
|
+
**Parameters**
|
|
1394
|
+
|
|
1395
|
+
* **id** (int).
|
|
1396
|
+
* **name** (str)[Optional]:
|
|
1397
|
+
* **tag** (str)[Optional]:
|
|
1398
|
+
* **description** (str)[Optional]:
|
|
1399
|
+
* **alarm_type** (str)[Optional]:
|
|
1400
|
+
* **trigger** (float)[Optional]:
|
|
1401
|
+
|
|
1402
|
+
**Returns**
|
|
1403
|
+
|
|
1404
|
+
* **alarm** (dict) Alarm Object jsonable
|
|
1405
|
+
"""
|
|
1406
|
+
self.alarm_manager.put(
|
|
1407
|
+
id=id,
|
|
1408
|
+
name=name,
|
|
1409
|
+
tag=tag,
|
|
1410
|
+
description=description,
|
|
1411
|
+
alarm_type=alarm_type,
|
|
1412
|
+
trigger_value=trigger_value
|
|
1413
|
+
)
|
|
1414
|
+
# Persist Tag on Database
|
|
1415
|
+
if self.is_db_connected():
|
|
1416
|
+
|
|
1417
|
+
self.alarms_engine.put(
|
|
1418
|
+
id=id,
|
|
1419
|
+
name=name,
|
|
1420
|
+
tag=tag,
|
|
1421
|
+
description=description,
|
|
1422
|
+
alarm_type=alarm_type,
|
|
1423
|
+
trigger_value=trigger_value)
|
|
1424
|
+
|
|
1425
|
+
@logging_error_handler
|
|
1426
|
+
@validate_types(id=str, output=Alarm)
|
|
1427
|
+
def get_alarm(self, id:str)->Alarm:
|
|
1428
|
+
r"""
|
|
1429
|
+
Gets alarm from the Alarm Manager by id
|
|
1430
|
+
|
|
1431
|
+
**Paramters**
|
|
1432
|
+
|
|
1433
|
+
* **id**: (int) Alarm ID
|
|
1434
|
+
|
|
1435
|
+
**Returns**
|
|
1436
|
+
|
|
1437
|
+
* **alarm** (Alarm Object)
|
|
1438
|
+
"""
|
|
1439
|
+
return self.alarm_manager.get_alarm(id=id)
|
|
1440
|
+
|
|
1441
|
+
@logging_error_handler
|
|
1442
|
+
@validate_types(output=dict)
|
|
1443
|
+
def get_alarms(self)->dict:
|
|
1444
|
+
r"""
|
|
1445
|
+
Gets all alarms
|
|
1446
|
+
|
|
1447
|
+
**Returns**
|
|
1448
|
+
|
|
1449
|
+
* **alarms**: (dict) Alarm objects
|
|
1450
|
+
"""
|
|
1451
|
+
return self.alarm_manager.get_alarms()
|
|
1452
|
+
|
|
1453
|
+
@logging_error_handler
|
|
1454
|
+
@validate_types(output=list)
|
|
1455
|
+
def serialize_alarms(self)->list:
|
|
1456
|
+
r"""
|
|
1457
|
+
Gets all alarms
|
|
1458
|
+
|
|
1459
|
+
**Returns**
|
|
1460
|
+
|
|
1461
|
+
* **alarms**: (dict) Alarm objects
|
|
1462
|
+
"""
|
|
1463
|
+
result = list()
|
|
1464
|
+
for _, alarm in self.alarm_manager.get_alarms().items():
|
|
1465
|
+
|
|
1466
|
+
result.append(alarm.serialize())
|
|
1467
|
+
|
|
1468
|
+
return result
|
|
1469
|
+
|
|
1470
|
+
@logging_error_handler
|
|
1471
|
+
@validate_types(lasts=int|None, output=list)
|
|
1472
|
+
def get_lasts_active_alarms(self, lasts:int=None)->list:
|
|
1473
|
+
r"""
|
|
1474
|
+
Documentation here
|
|
1475
|
+
"""
|
|
1476
|
+
return self.alarm_manager.get_lasts_active_alarms(lasts=lasts) or list()
|
|
1477
|
+
|
|
1478
|
+
@logging_error_handler
|
|
1479
|
+
@validate_types(name=str, output=Alarm)
|
|
1480
|
+
def get_alarm_by_name(self, name:str)->Alarm:
|
|
1481
|
+
r"""
|
|
1482
|
+
Gets alarm from the Alarm Manager by name
|
|
1483
|
+
|
|
1484
|
+
**Paramters**
|
|
1485
|
+
|
|
1486
|
+
* **name**: (str) Alarm name
|
|
1487
|
+
|
|
1488
|
+
**Returns**
|
|
1489
|
+
|
|
1490
|
+
* **alarm** (Alarm Object)
|
|
1491
|
+
"""
|
|
1492
|
+
return self.alarm_manager.get_alarm_by_name(name=name)
|
|
1493
|
+
|
|
1494
|
+
@logging_error_handler
|
|
1495
|
+
@validate_types(tag=str, output=list)
|
|
1496
|
+
def get_alarms_by_tag(self, tag:str)->list:
|
|
1497
|
+
r"""
|
|
1498
|
+
Gets all alarms associated to some tag
|
|
1499
|
+
|
|
1500
|
+
**Parameters**
|
|
1501
|
+
|
|
1502
|
+
* **tag**: (str) tag name binded to alarm
|
|
1503
|
+
|
|
1504
|
+
**Returns**
|
|
1505
|
+
|
|
1506
|
+
* **alarm** (dict) of alarm objects
|
|
1507
|
+
"""
|
|
1508
|
+
return self.alarm_manager.get_alarms_by_tag(tag=tag)
|
|
1509
|
+
|
|
1510
|
+
@logging_error_handler
|
|
1511
|
+
@validate_types(id=str, user=User|type(None), output=None)
|
|
1512
|
+
def delete_alarm(self, id:str, user:User=None):
|
|
1513
|
+
r"""
|
|
1514
|
+
Removes alarm
|
|
1515
|
+
|
|
1516
|
+
**Paramters**
|
|
1517
|
+
|
|
1518
|
+
* **id** (int): Alarm ID
|
|
1519
|
+
"""
|
|
1520
|
+
self.alarm_manager.delete_alarm(id=id, user=user)
|
|
1521
|
+
if self.is_db_connected():
|
|
1522
|
+
|
|
1523
|
+
self.alarms_engine.delete(id=id)
|
|
1524
|
+
|
|
1525
|
+
# EVENTS METHODS
|
|
1526
|
+
@logging_error_handler
|
|
1527
|
+
@validate_types(lasts=int, output=list)
|
|
1528
|
+
def get_lasts_events(self, lasts:int=10)->list:
|
|
1529
|
+
r"""
|
|
1530
|
+
Documentation here
|
|
1531
|
+
"""
|
|
1532
|
+
if self.is_db_connected():
|
|
1533
|
+
|
|
1534
|
+
return self.events_engine.get_lasts(lasts=lasts)
|
|
1535
|
+
|
|
1536
|
+
return list()
|
|
1537
|
+
|
|
1538
|
+
@logging_error_handler
|
|
1539
|
+
def filter_events_by(
|
|
1540
|
+
self,
|
|
1541
|
+
usernames:list[str]=None,
|
|
1542
|
+
priorities:list[int]=None,
|
|
1543
|
+
criticities:list[int]=None,
|
|
1544
|
+
message:str="",
|
|
1545
|
+
classification:str="",
|
|
1546
|
+
description:str="",
|
|
1547
|
+
greater_than_timestamp:datetime=None,
|
|
1548
|
+
less_than_timestamp:datetime=None,
|
|
1549
|
+
timezone:str="UTC",
|
|
1550
|
+
page:int=1,
|
|
1551
|
+
limit:int=20)->list:
|
|
1552
|
+
r"""
|
|
1553
|
+
Documentation here
|
|
1554
|
+
"""
|
|
1555
|
+
if self.is_db_connected():
|
|
1556
|
+
|
|
1557
|
+
return self.events_engine.filter_by(
|
|
1558
|
+
usernames=usernames,
|
|
1559
|
+
priorities=priorities,
|
|
1560
|
+
criticities=criticities,
|
|
1561
|
+
message=message,
|
|
1562
|
+
description=description,
|
|
1563
|
+
classification=classification,
|
|
1564
|
+
greater_than_timestamp=greater_than_timestamp,
|
|
1565
|
+
less_than_timestamp=less_than_timestamp,
|
|
1566
|
+
timezone=timezone,
|
|
1567
|
+
page=page,
|
|
1568
|
+
limit=limit
|
|
1569
|
+
)
|
|
1570
|
+
|
|
1571
|
+
return list()
|
|
1572
|
+
|
|
1573
|
+
# LOGS METHODS
|
|
1574
|
+
@logging_error_handler
|
|
1575
|
+
def create_log(
|
|
1576
|
+
self,
|
|
1577
|
+
message:str,
|
|
1578
|
+
user:User,
|
|
1579
|
+
description:str=None,
|
|
1580
|
+
classification:str=None,
|
|
1581
|
+
alarm_summary_id:int=None,
|
|
1582
|
+
event_id:int=None,
|
|
1583
|
+
timestamp:datetime=None
|
|
1584
|
+
)->tuple:
|
|
1585
|
+
r"""
|
|
1586
|
+
Documentation here
|
|
1587
|
+
"""
|
|
1588
|
+
if self.is_db_connected():
|
|
1589
|
+
|
|
1590
|
+
log, message = self.logs_engine.create(
|
|
1591
|
+
message=message,
|
|
1592
|
+
user=user,
|
|
1593
|
+
description=description,
|
|
1594
|
+
classification=classification,
|
|
1595
|
+
alarm_summary_id=alarm_summary_id,
|
|
1596
|
+
event_id=event_id,
|
|
1597
|
+
timestamp=timestamp
|
|
1598
|
+
)
|
|
1599
|
+
|
|
1600
|
+
if self.sio:
|
|
1601
|
+
|
|
1602
|
+
self.sio.emit("on.log", data=log.serialize())
|
|
1603
|
+
|
|
1604
|
+
return log, message
|
|
1605
|
+
|
|
1606
|
+
return None, "Logs DB is not up"
|
|
1607
|
+
|
|
1608
|
+
@logging_error_handler
|
|
1609
|
+
def filter_logs_by(
|
|
1610
|
+
self,
|
|
1611
|
+
usernames:list[str]=None,
|
|
1612
|
+
alarm_names:list[str]=None,
|
|
1613
|
+
event_ids:list[int]=None,
|
|
1614
|
+
classification:str="",
|
|
1615
|
+
message:str="",
|
|
1616
|
+
description:str="",
|
|
1617
|
+
greater_than_timestamp:datetime=None,
|
|
1618
|
+
less_than_timestamp:datetime=None,
|
|
1619
|
+
timezone:str="UTC"
|
|
1620
|
+
)->list:
|
|
1621
|
+
r"""
|
|
1622
|
+
Documentation here
|
|
1623
|
+
"""
|
|
1624
|
+
if self.is_db_connected():
|
|
1625
|
+
|
|
1626
|
+
return self.logs_engine.filter_by(
|
|
1627
|
+
usernames=usernames,
|
|
1628
|
+
alarm_names=alarm_names,
|
|
1629
|
+
event_ids=event_ids,
|
|
1630
|
+
classification=classification,
|
|
1631
|
+
message=message,
|
|
1632
|
+
description=description,
|
|
1633
|
+
greater_than_timestamp=greater_than_timestamp,
|
|
1634
|
+
less_than_timestamp=less_than_timestamp,
|
|
1635
|
+
timezone=timezone
|
|
1636
|
+
)
|
|
1637
|
+
|
|
1638
|
+
@logging_error_handler
|
|
1639
|
+
@validate_types(lasts=int, output=list)
|
|
1640
|
+
def get_lasts_logs(self, lasts:int=10)->list:
|
|
1641
|
+
r"""
|
|
1642
|
+
Documentation here
|
|
1643
|
+
"""
|
|
1644
|
+
if self.is_db_connected():
|
|
1645
|
+
|
|
1646
|
+
return self.logs_engine.get_lasts(lasts=lasts) or list()
|
|
1647
|
+
|
|
1648
|
+
return list()
|
|
1649
|
+
|
|
1650
|
+
# INIT APP
|
|
1651
|
+
@logging_error_handler
|
|
1652
|
+
def run(self, debug:bool=False, test:bool=False, create_tables:bool=False, machines:tuple=None)->None:
|
|
1653
|
+
r"""
|
|
1654
|
+
Runs main app thread and all defined threads by decorators and State Machines besides this method starts app logger
|
|
1655
|
+
|
|
1656
|
+
**Returns:** `None`
|
|
1657
|
+
"""
|
|
1658
|
+
self.safe_start(test=test, create_tables=create_tables, machines=machines)
|
|
1659
|
+
self.create_system_user()
|
|
1660
|
+
|
|
1661
|
+
if not test:
|
|
1662
|
+
|
|
1663
|
+
if debug:
|
|
1664
|
+
|
|
1665
|
+
self.dash_app.run(debug=debug, use_reloader=False)
|
|
1666
|
+
|
|
1667
|
+
@logging_error_handler
|
|
1668
|
+
def create_system_user(self):
|
|
1669
|
+
# Create system user
|
|
1670
|
+
users = Users()
|
|
1671
|
+
roles = Roles()
|
|
1672
|
+
|
|
1673
|
+
# Verificar si el usuario system existe
|
|
1674
|
+
if not users.read_by_username(username="system"):
|
|
1675
|
+
# Obtener el rol de administrador
|
|
1676
|
+
admin_role = roles.read_by_name(name="sudo")
|
|
1677
|
+
if admin_role:
|
|
1678
|
+
# Generar password e identificador dinámicamente
|
|
1679
|
+
system_password = secrets.token_urlsafe(32)
|
|
1680
|
+
self.signup(
|
|
1681
|
+
username="system",
|
|
1682
|
+
role_name="sudo",
|
|
1683
|
+
email="system@intelcon.com",
|
|
1684
|
+
password=system_password,
|
|
1685
|
+
name="System",
|
|
1686
|
+
lastname="Intelcon"
|
|
1687
|
+
)
|
|
1688
|
+
|
|
1689
|
+
@logging_error_handler
|
|
1690
|
+
def safe_start(self, test:bool=False, create_tables:bool=True, machines:tuple=None):
|
|
1691
|
+
r"""
|
|
1692
|
+
Run the app without a main thread, only run the app with the threads and state machines define
|
|
1693
|
+
"""
|
|
1694
|
+
self._create_tables = create_tables
|
|
1695
|
+
self.__start_logger()
|
|
1696
|
+
self.__start_workers(test=test, machines=machines)
|
|
1697
|
+
|
|
1698
|
+
@logging_error_handler
|
|
1699
|
+
@validate_types(output=None)
|
|
1700
|
+
def safe_stop(self)->None:
|
|
1701
|
+
r"""
|
|
1702
|
+
Stops the app in safe way with the threads
|
|
1703
|
+
"""
|
|
1704
|
+
self.__stop_workers()
|
|
1705
|
+
|
|
1706
|
+
@logging_error_handler
|
|
1707
|
+
def state_machine_diagrams(self, folder_path:str):
|
|
1708
|
+
r"""
|
|
1709
|
+
Documentation here"""
|
|
1710
|
+
for machine, _, _ in self._manager.get_machines():
|
|
1711
|
+
# SAVE STATE DIAGRAM
|
|
1712
|
+
img_path = f"{folder_path}{machine.name.value}.png"
|
|
1713
|
+
machine._graph().write_png(img_path)
|
|
1714
|
+
|
|
1715
|
+
# WORKERS
|
|
1716
|
+
@logging_error_handler
|
|
1717
|
+
def __start_workers(self, test:bool=False, machines:tuple=None)->None:
|
|
1718
|
+
r"""
|
|
1719
|
+
Starts all workers.
|
|
1720
|
+
|
|
1721
|
+
* LoggerWorker
|
|
1722
|
+
* StateMachineWorker
|
|
1723
|
+
* DASWorker
|
|
1724
|
+
"""
|
|
1725
|
+
if self._create_tables:
|
|
1726
|
+
|
|
1727
|
+
self.db_worker = LoggerWorker(self.db_manager)
|
|
1728
|
+
self.connect_to_db(test=test)
|
|
1729
|
+
self.db_worker.start()
|
|
1730
|
+
|
|
1731
|
+
if machines:
|
|
1732
|
+
|
|
1733
|
+
for machine in machines:
|
|
1734
|
+
|
|
1735
|
+
machine.set_socketio(sio=self.sio)
|
|
1736
|
+
|
|
1737
|
+
self.machine.start(machines=machines)
|
|
1738
|
+
|
|
1739
|
+
if self.is_db_connected():
|
|
1740
|
+
|
|
1741
|
+
self.load_db_tags_to_machine()
|
|
1742
|
+
|
|
1743
|
+
self.is_starting = False
|
|
1744
|
+
|
|
1745
|
+
@logging_error_handler
|
|
1746
|
+
@validate_types(output=None)
|
|
1747
|
+
def __stop_workers(self)->None:
|
|
1748
|
+
r"""
|
|
1749
|
+
Safe stop workers execution
|
|
1750
|
+
"""
|
|
1751
|
+
self.machine.stop()
|
|
1752
|
+
self.db_worker.stop()
|
|
1753
|
+
if hasattr(self, 'subscription_monitor'):
|
|
1754
|
+
self.subscription_monitor.stop()
|
|
1755
|
+
|
|
1756
|
+
@logging_error_handler
|
|
1757
|
+
@validate_types(output=None)
|
|
1758
|
+
def __start_logger(self)->None:
|
|
1759
|
+
r"""
|
|
1760
|
+
Starts logger in log file
|
|
1761
|
+
"""
|
|
1762
|
+
|
|
1763
|
+
requests.urllib3.disable_warnings()
|
|
1764
|
+
urllib3.disable_warnings()
|
|
1765
|
+
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
1766
|
+
logging.getLogger("requests").setLevel(logging.WARNING)
|
|
1767
|
+
logging.getLogger('peewee').setLevel(logging.WARNING)
|
|
1768
|
+
logging.getLogger('opcua').setLevel(logging.CRITICAL)
|
|
1769
|
+
# Configure root logger with rotating file handler (size-based)
|
|
1770
|
+
root_logger = logging.getLogger()
|
|
1771
|
+
root_logger.setLevel(self._logging_level)
|
|
1772
|
+
# Clear existing handlers to avoid duplicates
|
|
1773
|
+
for _h in list(root_logger.handlers):
|
|
1774
|
+
root_logger.removeHandler(_h)
|
|
1775
|
+
|
|
1776
|
+
max_bytes = int(os.environ.get('LOG_MAX_BYTES', 10 * 1024 * 1024)) # 10MB
|
|
1777
|
+
backup_count = int(os.environ.get('LOG_BACKUP_COUNT', 3)) # 3 backups
|
|
1778
|
+
handler = RotatingFileHandler(
|
|
1779
|
+
filename=self._log_file,
|
|
1780
|
+
maxBytes=max_bytes,
|
|
1781
|
+
backupCount=backup_count,
|
|
1782
|
+
encoding="utf-8",
|
|
1783
|
+
)
|
|
1784
|
+
log_format = "%(asctime)s:%(levelname)s:%(message)s"
|
|
1785
|
+
formatter = logging.Formatter(log_format)
|
|
1786
|
+
handler.setFormatter(formatter)
|
|
1787
|
+
root_logger.addHandler(handler)
|
|
1788
|
+
|
|
1789
|
+
# Ensure named logger propagates to root (no extra handler to avoid duplicates)
|
|
1790
|
+
app_logger = logging.getLogger("pyautomation")
|
|
1791
|
+
app_logger.setLevel(self._logging_level)
|
|
1792
|
+
app_logger.propagate = True
|