brynq-sdk-task-scheduler 1.0.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brynq_sdk_task_scheduler-1.0.0/PKG-INFO +10 -0
- brynq_sdk_task_scheduler-1.0.0/brynq_sdk/task_scheduler/__init__.py +1 -0
- brynq_sdk_task_scheduler-1.0.0/brynq_sdk/task_scheduler/task_scheduler.py +555 -0
- brynq_sdk_task_scheduler-1.0.0/brynq_sdk_task_scheduler.egg-info/PKG-INFO +10 -0
- brynq_sdk_task_scheduler-1.0.0/brynq_sdk_task_scheduler.egg-info/SOURCES.txt +9 -0
- brynq_sdk_task_scheduler-1.0.0/brynq_sdk_task_scheduler.egg-info/dependency_links.txt +1 -0
- brynq_sdk_task_scheduler-1.0.0/brynq_sdk_task_scheduler.egg-info/not-zip-safe +1 -0
- brynq_sdk_task_scheduler-1.0.0/brynq_sdk_task_scheduler.egg-info/requires.txt +5 -0
- brynq_sdk_task_scheduler-1.0.0/brynq_sdk_task_scheduler.egg-info/top_level.txt +1 -0
- brynq_sdk_task_scheduler-1.0.0/setup.cfg +4 -0
- brynq_sdk_task_scheduler-1.0.0/setup.py +21 -0
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
Metadata-Version: 1.0
|
|
2
|
+
Name: brynq_sdk_task_scheduler
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Code to execute tasks in BrynQ.com with the task scheduler
|
|
5
|
+
Home-page: UNKNOWN
|
|
6
|
+
Author: BrynQ
|
|
7
|
+
Author-email: support@brynq.com
|
|
8
|
+
License: BrynQ License
|
|
9
|
+
Description: Code to execute tasks in the BrynQ.com platform with the task scheduler
|
|
10
|
+
Platform: UNKNOWN
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from brynq_sdk.task_scheduler.task_scheduler import TaskScheduler
|
|
@@ -0,0 +1,555 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import os
|
|
3
|
+
import datetime
|
|
4
|
+
import inspect
|
|
5
|
+
import time
|
|
6
|
+
import typing
|
|
7
|
+
import traceback
|
|
8
|
+
import pandas as pd
|
|
9
|
+
import json
|
|
10
|
+
import requests
|
|
11
|
+
from salure_helpers.mandrill import MailClient
|
|
12
|
+
from brynq_sdk.functions import Functions
|
|
13
|
+
from brynq_sdk.brynq import BrynQ
|
|
14
|
+
from salure_helpers.mysql import MySQL
|
|
15
|
+
from salure_helpers.elastic import Elastic
|
|
16
|
+
import warnings
|
|
17
|
+
import re
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class TaskScheduler(BrynQ):
|
|
21
|
+
|
|
22
|
+
def __init__(self, task_id: int = None, loglevel: str = 'INFO', email_after_errors: bool = False):
|
|
23
|
+
"""
|
|
24
|
+
The TaskScheduler is responsible for the logging to the database. Based on this logging, the next reload will
|
|
25
|
+
start or not and warning will be given or not
|
|
26
|
+
:param task_id: The ID from the task as saved in the task_scheduler table in the customer database
|
|
27
|
+
:param email_after_errors: a True or False value. When True, there will be send an email to a contactperson of the customer (as given in the database) with the number of errors
|
|
28
|
+
:param loglevel: Chose on which level you want to store the logs. Default is INFO. that means that a logline
|
|
29
|
+
with level DEBUG not is stored
|
|
30
|
+
"""
|
|
31
|
+
super().__init__()
|
|
32
|
+
self.es = Elastic()
|
|
33
|
+
self.mysql = MySQL()
|
|
34
|
+
self.email_after_errors = email_after_errors
|
|
35
|
+
self.task_id = task_id
|
|
36
|
+
self.loglevel = loglevel
|
|
37
|
+
self.started_at = datetime.datetime.now()
|
|
38
|
+
# If the task is started via the task_scheduler, the following 3 parameters will be passed by the scheduler
|
|
39
|
+
if len(sys.argv[1:4]) > 0:
|
|
40
|
+
self.started_local = False
|
|
41
|
+
self.customer_db, self.task_id, self.run_id = sys.argv[1:4]
|
|
42
|
+
# If the task is started locally, the parameters should be set locally
|
|
43
|
+
else:
|
|
44
|
+
self.started_local = True
|
|
45
|
+
self.customer_db = 'placeholder'
|
|
46
|
+
self.run_id = int(round(time.time() * 100000))
|
|
47
|
+
print(self.task_id, self.run_id)
|
|
48
|
+
self.error_count = 0
|
|
49
|
+
|
|
50
|
+
# Check if the log tables exists in the customer database. If not, create them
|
|
51
|
+
# Mysql throws a warning when a table already exists. We don't care so we ignore warnings. (not exceptions!)
|
|
52
|
+
warnings.filterwarnings('ignore')
|
|
53
|
+
# self.check_if_logging_tables_exists()
|
|
54
|
+
|
|
55
|
+
# Check if the task is started on schedule or manual. store in a variable to use later in the script
|
|
56
|
+
self.task_manual_started = self.check_if_task_manual_started()
|
|
57
|
+
|
|
58
|
+
# Creates Elasticsearch index and data view if not exists
|
|
59
|
+
self.es.initialize_customer()
|
|
60
|
+
|
|
61
|
+
# Start the task and setup the data in the database
|
|
62
|
+
self.start_task()
|
|
63
|
+
|
|
64
|
+
def __count_keys(self, json_obj):
|
|
65
|
+
if not isinstance(json_obj, dict):
|
|
66
|
+
return 0
|
|
67
|
+
key_count = 0
|
|
68
|
+
for key, value in json_obj.items():
|
|
69
|
+
if not isinstance(value, dict):
|
|
70
|
+
key_count += 1 # Count the current key
|
|
71
|
+
else:
|
|
72
|
+
key_count += self.__count_keys(value) # Recursively count keys in nested dictionaries
|
|
73
|
+
return key_count
|
|
74
|
+
|
|
75
|
+
def check_if_logging_tables_exists(self):
|
|
76
|
+
"""
|
|
77
|
+
This function checks if all the needed tables for the task_scheduler exists. If they don't, this function
|
|
78
|
+
creates the needed tables
|
|
79
|
+
:return: nothing
|
|
80
|
+
"""
|
|
81
|
+
# Check if the table task_scheduler exists. If not, create it
|
|
82
|
+
new_table_query = 'CREATE TABLE IF NOT EXISTS `task_scheduler` (' \
|
|
83
|
+
'`id` int(11) NOT NULL AUTO_INCREMENT,' \
|
|
84
|
+
'`dashboard_reload` bool NOT NULL DEFAULT \'0\',' \
|
|
85
|
+
'`title` varchar(50) NOT NULL,' \
|
|
86
|
+
'`description` varchar(255) NOT NULL,' \
|
|
87
|
+
'`dashboard_guid` varchar(255) NULL DEFAULT NULL,' \
|
|
88
|
+
'`docker_image` varchar(255) DEFAULT NULL,' \
|
|
89
|
+
'`runfile_path` varchar(255) DEFAULT NULL,' \
|
|
90
|
+
'`trigger_type` enum("MANUAL", "TIME", "OTHER_TASK") NOT NULL DEFAULT \'MANUAL\',' \
|
|
91
|
+
'`next_reload` timestamp NULL DEFAULT NULL,' \
|
|
92
|
+
'`timezone` enum("Africa/Abidjan", "Africa/Accra", "Africa/Addis_Ababa", "Africa/Algiers", "Africa/Asmara", "Africa/Bamako", "Africa/Bangui", "Africa/Banjul", "Africa/Bissau", "Africa/Blantyre", "Africa/Brazzaville", "Africa/Bujumbura", "Africa/Cairo", "Africa/Casablanca", "Africa/Ceuta", "Africa/Conakry", "Africa/Dakar", "Africa/Dar_es_Salaam", "Africa/Djibouti", "Africa/Douala", "Africa/El_Aaiun", "Africa/Freetown", "Africa/Gaborone", "Africa/Harare", "Africa/Johannesburg", "Africa/Juba", "Africa/Kampala", "Africa/Khartoum", "Africa/Kigali", "Africa/Kinshasa", "Africa/Lagos", "Africa/Libreville", "Africa/Lome", "Africa/Luanda", "Africa/Lubumbashi", "Africa/Lusaka", "Africa/Malabo", "Africa/Maputo", "Africa/Maseru", "Africa/Mbabane", "Africa/Mogadishu", "Africa/Monrovia", "Africa/Nairobi", "Africa/Ndjamena", "Africa/Niamey", "Africa/Nouakchott", "Africa/Ouagadougou", "Africa/Porto-Novo", "Africa/Sao_Tome", "Africa/Tripoli", "Africa/Tunis", "Africa/Windhoek", "America/Adak", "America/Anchorage", "America/Anguilla", "America/Antigua", "America/Araguaina", "America/Argentina/Buenos_Aires", "America/Argentina/Catamarca", "America/Argentina/Cordoba", "America/Argentina/Jujuy", "America/Argentina/La_Rioja", "America/Argentina/Mendoza", "America/Argentina/Rio_Gallegos", "America/Argentina/Salta", "America/Argentina/San_Juan", "America/Argentina/San_Luis", "America/Argentina/Tucuman", "America/Argentina/Ushuaia", "America/Aruba", "America/Asuncion", "America/Atikokan", "America/Bahia", "America/Bahia_Banderas", "America/Barbados", "America/Belem", "America/Belize", "America/Blanc-Sablon", "America/Boa_Vista", "America/Bogota", "America/Boise", "America/Cambridge_Bay", "America/Campo_Grande", "America/Cancun", "America/Caracas", "America/Cayenne", "America/Cayman", "America/Chicago", "America/Chihuahua", "America/Costa_Rica", "America/Creston", "America/Cuiaba", "America/Curacao", "America/Danmarkshavn", "America/Dawson", "America/Dawson_Creek", "America/Denver", "America/Detroit", "America/Dominica", "America/Edmonton", "America/Eirunepe", "America/El_Salvador", "America/Fort_Nelson", "America/Fortaleza", "America/Glace_Bay", "America/Godthab", "America/Goose_Bay", "America/Grand_Turk", "America/Grenada", "America/Guadeloupe", "America/Guatemala", "America/Guayaquil", "America/Guyana", "America/Halifax", "America/Havana", "America/Hermosillo", "America/Indiana/Indianapolis", "America/Indiana/Knox", "America/Indiana/Marengo", "America/Indiana/Petersburg", "America/Indiana/Tell_City", "America/Indiana/Vevay", "America/Indiana/Vincennes", "America/Indiana/Winamac", "America/Inuvik", "America/Iqaluit", "America/Jamaica", "America/Juneau", "America/Kentucky/Louisville", "America/Kentucky/Monticello", "America/Kralendijk", "America/La_Paz", "America/Lima", "America/Los_Angeles", "America/Lower_Princes", "America/Maceio", "America/Managua", "America/Manaus", "America/Marigot", "America/Martinique", "America/Matamoros", "America/Mazatlan", "America/Menominee", "America/Merida", "America/Metlakatla", "America/Mexico_City", "America/Miquelon", "America/Moncton", "America/Monterrey", "America/Montevideo", "America/Montserrat", "America/Nassau", "America/New_York", "America/Nipigon", "America/Nome", "America/Noronha", "America/North_Dakota/Beulah", "America/North_Dakota/Center", "America/North_Dakota/New_Salem", "America/Ojinaga", "America/Panama", "America/Pangnirtung", "America/Paramaribo", "America/Phoenix", "America/Port-au-Prince", "America/Port_of_Spain", "America/Porto_Velho", "America/Puerto_Rico", "America/Punta_Arenas", "America/Rainy_River", "America/Rankin_Inlet", "America/Recife", "America/Regina", "America/Resolute", "America/Rio_Branco", "America/Santarem", "America/Santiago", "America/Santo_Domingo", "America/Sao_Paulo", "America/Scoresbysund", "America/Sitka", "America/St_Barthelemy", "America/St_Johns", "America/St_Kitts", "America/St_Lucia", "America/St_Thomas", "America/St_Vincent", "America/Swift_Current", "America/Tegucigalpa", "America/Thule", "America/Thunder_Bay", "America/Tijuana", "America/Toronto", "America/Tortola", "America/Vancouver", "America/Whitehorse", "America/Winnipeg", "America/Yakutat", "America/Yellowknife", "Antarctica/Casey", "Antarctica/Davis", "Antarctica/DumontDUrville", "Antarctica/Macquarie", "Antarctica/Mawson", "Antarctica/McMurdo", "Antarctica/Palmer", "Antarctica/Rothera", "Antarctica/Syowa", "Antarctica/Troll", "Antarctica/Vostok", "Arctic/Longyearbyen", "Asia/Aden", "Asia/Almaty", "Asia/Amman", "Asia/Anadyr", "Asia/Aqtau", "Asia/Aqtobe", "Asia/Ashgabat", "Asia/Atyrau", "Asia/Baghdad", "Asia/Bahrain", "Asia/Baku", "Asia/Bangkok", "Asia/Barnaul", "Asia/Beirut", "Asia/Bishkek", "Asia/Brunei", "Asia/Chita", "Asia/Choibalsan", "Asia/Colombo", "Asia/Damascus", "Asia/Dhaka", "Asia/Dili", "Asia/Dubai", "Asia/Dushanbe", "Asia/Famagusta", "Asia/Gaza", "Asia/Hebron", "Asia/Ho_Chi_Minh", "Asia/Hong_Kong", "Asia/Hovd", "Asia/Irkutsk", "Asia/Jakarta", "Asia/Jayapura", "Asia/Jerusalem", "Asia/Kabul", "Asia/Kamchatka", "Asia/Karachi", "Asia/Kathmandu", "Asia/Khandyga", "Asia/Kolkata", "Asia/Krasnoyarsk", "Asia/Kuala_Lumpur", "Asia/Kuching", "Asia/Kuwait", "Asia/Macau", "Asia/Magadan", "Asia/Makassar", "Asia/Manila", "Asia/Muscat", "Asia/Nicosia", "Asia/Novokuznetsk", "Asia/Novosibirsk", "Asia/Omsk", "Asia/Oral", "Asia/Phnom_Penh", "Asia/Pontianak", "Asia/Pyongyang", "Asia/Qatar", "Asia/Qostanay", "Asia/Qyzylorda", "Asia/Riyadh", "Asia/Sakhalin", "Asia/Samarkand", "Asia/Seoul", "Asia/Shanghai", "Asia/Singapore", "Asia/Srednekolymsk", "Asia/Taipei", "Asia/Tashkent", "Asia/Tbilisi", "Asia/Tehran", "Asia/Thimphu", "Asia/Tokyo", "Asia/Tomsk", "Asia/Ulaanbaatar", "Asia/Urumqi", "Asia/Ust-Nera", "Asia/Vientiane", "Asia/Vladivostok", "Asia/Yakutsk", "Asia/Yangon", "Asia/Yekaterinburg", "Asia/Yerevan", "Atlantic/Azores", "Atlantic/Bermuda", "Atlantic/Canary", "Atlantic/Cape_Verde", "Atlantic/Faroe", "Atlantic/Madeira", "Atlantic/Reykjavik", "Atlantic/South_Georgia", "Atlantic/St_Helena", "Atlantic/Stanley", "Australia/Adelaide", "Australia/Brisbane", "Australia/Broken_Hill", "Australia/Currie", "Australia/Darwin", "Australia/Eucla", "Australia/Hobart", "Australia/Lindeman", "Australia/Lord_Howe", "Australia/Melbourne", "Australia/Perth", "Australia/Sydney", "Canada/Atlantic", "Canada/Central", "Canada/Eastern", "Canada/Mountain", "Canada/Newfoundland", "Canada/Pacific", "Europe/Amsterdam", "Europe/Andorra", "Europe/Astrakhan", "Europe/Athens", "Europe/Belgrade", "Europe/Berlin", "Europe/Bratislava", "Europe/Brussels", "Europe/Bucharest", "Europe/Budapest", "Europe/Busingen", "Europe/Chisinau", "Europe/Copenhagen", "Europe/Dublin", "Europe/Gibraltar", "Europe/Guernsey", "Europe/Helsinki", "Europe/Isle_of_Man", "Europe/Istanbul", "Europe/Jersey", "Europe/Kaliningrad", "Europe/Kiev", "Europe/Kirov", "Europe/Lisbon", "Europe/Ljubljana", "Europe/London", "Europe/Luxembourg", "Europe/Madrid", "Europe/Malta", "Europe/Mariehamn", "Europe/Minsk", "Europe/Monaco", "Europe/Moscow", "Europe/Oslo", "Europe/Paris", "Europe/Podgorica", "Europe/Prague", "Europe/Riga", "Europe/Rome", "Europe/Samara", "Europe/San_Marino", "Europe/Sarajevo", "Europe/Saratov", "Europe/Simferopol", "Europe/Skopje", "Europe/Sofia", "Europe/Stockholm", "Europe/Tallinn", "Europe/Tirane", "Europe/Ulyanovsk", "Europe/Uzhgorod", "Europe/Vaduz", "Europe/Vatican", "Europe/Vienna", "Europe/Vilnius", "Europe/Volgograd", "Europe/Warsaw", "Europe/Zagreb", "Europe/Zaporozhye", "Europe/Zurich", "GMT", "Indian/Antananarivo", "Indian/Chagos", "Indian/Christmas", "Indian/Cocos", "Indian/Comoro", "Indian/Kerguelen", "Indian/Mahe", "Indian/Maldives", "Indian/Mauritius", "Indian/Mayotte", "Indian/Reunion", "Pacific/Apia", "Pacific/Auckland", "Pacific/Bougainville", "Pacific/Chatham", "Pacific/Chuuk", "Pacific/Easter", "Pacific/Efate", "Pacific/Enderbury", "Pacific/Fakaofo", "Pacific/Fiji", "Pacific/Funafuti", "Pacific/Galapagos", "Pacific/Gambier", "Pacific/Guadalcanal", "Pacific/Guam", "Pacific/Honolulu", "Pacific/Kiritimati", "Pacific/Kosrae", "Pacific/Kwajalein", "Pacific/Majuro", "Pacific/Marquesas", "Pacific/Midway", "Pacific/Nauru", "Pacific/Niue", "Pacific/Norfolk", "Pacific/Noumea", "Pacific/Pago_Pago", "Pacific/Palau", "Pacific/Pitcairn", "Pacific/Pohnpei", "Pacific/Port_Moresby", "Pacific/Rarotonga", "Pacific/Saipan", "Pacific/Tahiti", "Pacific/Tarawa", "Pacific/Tongatapu", "Pacific/Wake", "Pacific/Wallis", "US/Alaska", "US/Arizona", "US/Central", "US/Eastern", "US/Hawaii", "US/Mountain", "US/Pacific", "UTC") CHARACTER SET utf8mb4 COLLATE utf8mb4_bin NOT NULL DEFAULT \"Europe/Amsterdam\",' \
|
|
93
|
+
'`frequency` varchar(255) DEFAULT \'{"month":0,"day":0,"hour":0,"minute":0}\',' \
|
|
94
|
+
'`start_after_task_id` int DEFAULT NULL,' \
|
|
95
|
+
'`start_after_preceding_task` enum("FAILED", "SUCCESS") DEFAULT NULL,' \
|
|
96
|
+
'`last_reload` timestamp NULL DEFAULT NULL,' \
|
|
97
|
+
'`last_error_message` varchar(255) DEFAULT NULL,' \
|
|
98
|
+
'`status` varchar(255) DEFAULT \'IDLE\',' \
|
|
99
|
+
'`disabled` tinyint(4) DEFAULT \'1\',' \
|
|
100
|
+
'`run_instant` tinyint(1) DEFAULT \'0\',' \
|
|
101
|
+
'`sftp_mapping` varchar(255) NOT NULL DEFAULT \'[]\',' \
|
|
102
|
+
'`step_nr` int NOT NULL DEFAULT \'0\',' \
|
|
103
|
+
'`stopped_by_user` tinyint(1) DEFAULT \'0\',' \
|
|
104
|
+
'`stop_is_allowed` bool NOT NULL DEFAULT \'0\',' \
|
|
105
|
+
'PRIMARY KEY (`id`),' \
|
|
106
|
+
'UNIQUE KEY `task_scheduler_id_uindex` (`id`),' \
|
|
107
|
+
'constraint task_scheduler_task_scheduler_id_fk foreign key (start_after_task_id) references task_scheduler (id)' \
|
|
108
|
+
') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci'
|
|
109
|
+
self.mysql.raw_query(new_table_query)
|
|
110
|
+
|
|
111
|
+
# Check if the table task_scheduler_log exists. If not, create it
|
|
112
|
+
new_table_query = 'CREATE TABLE IF NOT EXISTS `task_scheduler_log` (' \
|
|
113
|
+
'`reload_id` bigint NOT NULL,' \
|
|
114
|
+
'`task_id` int NULL,' \
|
|
115
|
+
'`reload_status` varchar(255) NULL,' \
|
|
116
|
+
'`started_at` datetime NULL,' \
|
|
117
|
+
'`finished_at` datetime NULL' \
|
|
118
|
+
') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci'
|
|
119
|
+
self.mysql.raw_query(new_table_query)
|
|
120
|
+
|
|
121
|
+
# Check if the table check_task_execution_log exists. If not, create it
|
|
122
|
+
new_table_query = 'CREATE TABLE IF NOT EXISTS `task_execution_log`(' \
|
|
123
|
+
'`reload_id` bigint NOT NULL,' \
|
|
124
|
+
'`task_id` int NULL,' \
|
|
125
|
+
'`log_level` varchar(255) NULL,' \
|
|
126
|
+
'`created_at` datetime NULL,' \
|
|
127
|
+
'`line_number` int NULL,' \
|
|
128
|
+
'`message` longtext NULL)' \
|
|
129
|
+
'ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci'
|
|
130
|
+
self.mysql.raw_query(new_table_query)
|
|
131
|
+
|
|
132
|
+
# Check if the table check_task_execution_steps exists. If not, create it
|
|
133
|
+
new_table_query = 'CREATE TABLE IF NOT EXISTS `task_execution_steps`(' \
|
|
134
|
+
'`id` bigint NOT NULL AUTO_INCREMENT,' \
|
|
135
|
+
'`task_id` int NULL,' \
|
|
136
|
+
'`nr` int DEFAULT 0 NOT NULL,' \
|
|
137
|
+
'`description` varchar(255) DEFAULT \'ZzZzZz...\' NOT NULL,' \
|
|
138
|
+
'PRIMARY KEY (`id`),' \
|
|
139
|
+
'UNIQUE KEY `task_execution_steps_id_uindex` (`id`),' \
|
|
140
|
+
'UNIQUE INDEX `task_execution_steps_task_id_nr_uindex` (`task_id`, `nr`))' \
|
|
141
|
+
'ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci'
|
|
142
|
+
self.mysql.raw_query(new_table_query)
|
|
143
|
+
|
|
144
|
+
new_table_query = 'CREATE TABLE IF NOT EXISTS `task_variables`(' \
|
|
145
|
+
'id INT NOT NULL AUTO_INCREMENT,' \
|
|
146
|
+
'task_id INT NOT NULL,' \
|
|
147
|
+
'name VARCHAR(150) NOT NULL,' \
|
|
148
|
+
'description VARCHAR(255) NULL,' \
|
|
149
|
+
'type ENUM(\'INT\', \'TINYINT\', \'BIGINT\', \'FLOAT\', \'DOUBLE\', \'DATETIME\', \'TIMESTAMP\', \'TIME\', \'VARCHAR\', \'BLOB\', \'TEXT\', \'LONGBLOB\') NOT NULL,' \
|
|
150
|
+
'value VARCHAR(600) NULL,' \
|
|
151
|
+
'temp_value VARCHAR(600) NULL,' \
|
|
152
|
+
'PRIMARY KEY (`id`),' \
|
|
153
|
+
'UNIQUE KEY `task_variables_id_uindex` (`id`),' \
|
|
154
|
+
'UNIQUE INDEX `task_variables_name_value_uindex` (`task_id`, `name`, `value`), ' \
|
|
155
|
+
'INDEX `task_variables_name_index` (`name`),' \
|
|
156
|
+
'CONSTRAINT task_variables_task_scheduler_id_fk ' \
|
|
157
|
+
'FOREIGN KEY (`task_id`) REFERENCES task_scheduler (`id`) ON DELETE CASCADE)' \
|
|
158
|
+
'ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci'
|
|
159
|
+
self.mysql.raw_query(new_table_query)
|
|
160
|
+
|
|
161
|
+
# Add the variable 'email_errors_to' as default to the new added table if it doesn't exist for the current task
|
|
162
|
+
response = self.mysql.select('task_variables', 'temp_value',
|
|
163
|
+
f'WHERE name = \'email_errors_to\' AND task_id = {self.task_id}')
|
|
164
|
+
if len(response) == 0:
|
|
165
|
+
new_variables = f"-- INSERT INTO `task_variables` (`task_id`, `name`, `type`, `value`, `temp_value`) " \
|
|
166
|
+
f"VALUES ({self.task_id}, 'email_errors_to', 'TEXT', 'example@brynq.com, example2@brynq.com', 'example@brynq.com, example2@brynq.com')"
|
|
167
|
+
self.mysql.raw_query(new_variables, insert=True)
|
|
168
|
+
|
|
169
|
+
def create_task_execution_steps(self, step_details: list):
|
|
170
|
+
"""
|
|
171
|
+
Check if the given steps already exists in the task_execution_steps table. If not, update or insert the values in the table
|
|
172
|
+
:param step_details: list of dicts. Each dict must contain task details according to required_fields.
|
|
173
|
+
Example: step_details = [
|
|
174
|
+
{'nr': 1, 'description': 'test'},
|
|
175
|
+
{'nr': 2, 'description': 'test2'}
|
|
176
|
+
]
|
|
177
|
+
:return: error (str) or response of mysql
|
|
178
|
+
"""
|
|
179
|
+
# Check if the required fields are available in the given list
|
|
180
|
+
required_fields = ['nr', 'description']
|
|
181
|
+
for step in step_details:
|
|
182
|
+
for field in required_fields:
|
|
183
|
+
if field not in step.keys():
|
|
184
|
+
return 'Field {field} is required in step {step}. Required fields are: {required_fields}'.format(
|
|
185
|
+
field=field, step=step, required_fields=tuple(required_fields))
|
|
186
|
+
|
|
187
|
+
# Reformat the list of dictionaries to a valid MySQL query
|
|
188
|
+
values = ','.join(str((self.task_id, step['nr'], step['description'])) for step in step_details)
|
|
189
|
+
response = self.mysql.raw_query("INSERT INTO task_execution_steps (`task_id`, `nr`, `description`) "
|
|
190
|
+
"VALUES {step_values} ON DUPLICATE KEY UPDATE `description` = VALUES(description)".format(
|
|
191
|
+
step_values=values), insert=True)
|
|
192
|
+
return response
|
|
193
|
+
|
|
194
|
+
def check_if_task_manual_started(self):
|
|
195
|
+
"""
|
|
196
|
+
Check if the task manual is started of on schedule. If it's manual started, that's important for the variables in the db_variables function.
|
|
197
|
+
In that case the dynamic variables should be used instead of the static ones
|
|
198
|
+
:return: True of False
|
|
199
|
+
"""
|
|
200
|
+
response = self.mysql.select('task_scheduler', 'run_instant', f'WHERE id = {self.task_id}')[0][0]
|
|
201
|
+
if response == 1:
|
|
202
|
+
# Reset the 1 back to 0 before sending the result
|
|
203
|
+
self.mysql.update('task_scheduler', ['run_instant'], [0], 'WHERE `id` = {}'.format(self.task_id))
|
|
204
|
+
return True
|
|
205
|
+
else:
|
|
206
|
+
return False
|
|
207
|
+
|
|
208
|
+
def start_task(self):
|
|
209
|
+
"""
|
|
210
|
+
Start the task and write this to the database. While the status is running, the task will not start again
|
|
211
|
+
:return: if the update to the database is successful or not
|
|
212
|
+
"""
|
|
213
|
+
# If the task is started from a local instance (not the task_scheduler), create a start log row in the task_scheduler_log
|
|
214
|
+
if self.started_local:
|
|
215
|
+
self.mysql.raw_query(f"INSERT INTO `task_scheduler_log` (reload_id, task_id, reload_status, started_at, finished_at) VALUES ({self.run_id}, {self.task_id}, 'Running', '{self.started_at}', null)", insert=True)
|
|
216
|
+
return self.mysql.update('task_scheduler', ['status', 'step_nr'], ['RUNNING', 1], 'WHERE `id` = {}'.format(self.task_id))
|
|
217
|
+
|
|
218
|
+
def db_variable(self, variable_name: str, default_value_if_temp_is_empty: bool = False):
|
|
219
|
+
"""
|
|
220
|
+
Get a value from the task_variables table corresponding with the given name. If the task is manually started
|
|
221
|
+
(run_instant = 1), then the temp_value will be returned. This is to give the possibility for users in the frontend to run
|
|
222
|
+
a task once manual with other values then normal without overwriting the normal values.
|
|
223
|
+
:param variable_name: the name of the variable
|
|
224
|
+
:param default_value_if_temp_is_empty: bool to determine whether default value should be used if temp value is empty when manually started
|
|
225
|
+
:return: the value of the given variable.
|
|
226
|
+
"""
|
|
227
|
+
if self.task_manual_started is True:
|
|
228
|
+
response = self.mysql.select('task_variables', 'temp_value, value',
|
|
229
|
+
f'WHERE name = \'{variable_name}\' AND task_id = {self.task_id}')
|
|
230
|
+
else:
|
|
231
|
+
response = self.mysql.select('task_variables', 'value',
|
|
232
|
+
f'WHERE name = \'{variable_name}\' AND task_id = {self.task_id}')
|
|
233
|
+
if len(response) == 0:
|
|
234
|
+
raise Exception(f'Variable with name \'{variable_name}\' does not exist')
|
|
235
|
+
else:
|
|
236
|
+
value = response[0][0]
|
|
237
|
+
if value is None and default_value_if_temp_is_empty is True and len(response[0]) > 0:
|
|
238
|
+
value = response[0][1]
|
|
239
|
+
return value
|
|
240
|
+
|
|
241
|
+
def write_execution_log(self, message: str, data, loglevel: str = 'INFO', full_extract: bool = False):
|
|
242
|
+
"""
|
|
243
|
+
Writes messages to the database. Give the message and the level of the log
|
|
244
|
+
:param message: A string with a message for the log
|
|
245
|
+
:param loglevel: You can choose between DEBUG, INFO, ERROR or CRITICAL (DEBUG is most granulated, CRITICAL the less)
|
|
246
|
+
:param data: Uploaded data by the interface that has to be logged in ElasticSearch, if you have nothing to log, use None
|
|
247
|
+
:param full_extract: If the data is a full load, set this to True. This will prevent the payload from being logged in ElasticSearch
|
|
248
|
+
:return: If writing to the database is successful or not
|
|
249
|
+
"""
|
|
250
|
+
|
|
251
|
+
# Validate if the provided loglevel is valid
|
|
252
|
+
allowed_loglevels = ['DEBUG', 'INFO', 'ERROR', 'CRITICAL']
|
|
253
|
+
if loglevel not in allowed_loglevels:
|
|
254
|
+
raise Exception('You\'ve entered a not allowed loglevel. Choose one of: {}'.format(allowed_loglevels))
|
|
255
|
+
|
|
256
|
+
# Handling different data types and preparing extra payload information based on the data type
|
|
257
|
+
if isinstance(data, pd.Series):
|
|
258
|
+
dataframe = pd.DataFrame(data).T
|
|
259
|
+
extra_payload = {
|
|
260
|
+
'rows': len(dataframe),
|
|
261
|
+
'columns': len(dataframe.columns),
|
|
262
|
+
'cells': len(dataframe) * len(dataframe.columns),
|
|
263
|
+
}
|
|
264
|
+
if not full_extract:
|
|
265
|
+
extra_payload['payload'] = dataframe.to_json(orient='records')
|
|
266
|
+
elif isinstance(data, dict):
|
|
267
|
+
records = self.__count_keys(data)
|
|
268
|
+
extra_payload = {
|
|
269
|
+
'rows': 1,
|
|
270
|
+
'columns': records,
|
|
271
|
+
'cells': records,
|
|
272
|
+
}
|
|
273
|
+
if not full_extract:
|
|
274
|
+
extra_payload['payload'] = data
|
|
275
|
+
elif isinstance(data, pd.DataFrame):
|
|
276
|
+
extra_payload = {
|
|
277
|
+
'rows': len(data),
|
|
278
|
+
'columns': len(data.columns),
|
|
279
|
+
'cells': len(data) * len(data.columns),
|
|
280
|
+
}
|
|
281
|
+
if not full_extract:
|
|
282
|
+
extra_payload['payload'] = data.to_json(orient='records')
|
|
283
|
+
elif isinstance(data, requests.Response):
|
|
284
|
+
records = 1
|
|
285
|
+
if data.request.body is not None:
|
|
286
|
+
records = self.__count_keys(json.loads(data.request.body))
|
|
287
|
+
if isinstance(data.request.body, bytes):
|
|
288
|
+
data.request.body = data.request.body.decode('utf-8')
|
|
289
|
+
extra_payload = {
|
|
290
|
+
'response': data.text,
|
|
291
|
+
'status_code': data.status_code,
|
|
292
|
+
'url': data.url,
|
|
293
|
+
'method': data.request.method,
|
|
294
|
+
'rows': 1,
|
|
295
|
+
'columns': records,
|
|
296
|
+
'cells': records,
|
|
297
|
+
}
|
|
298
|
+
if not full_extract:
|
|
299
|
+
extra_payload['payload'] = data.request.body
|
|
300
|
+
elif data is None:
|
|
301
|
+
extra_payload = {}
|
|
302
|
+
else:
|
|
303
|
+
extra_payload = {
|
|
304
|
+
'data_type': str(type(data)),
|
|
305
|
+
}
|
|
306
|
+
if not full_extract:
|
|
307
|
+
extra_payload['payload'] = data
|
|
308
|
+
|
|
309
|
+
# Modify payload based on 'full_load' flag
|
|
310
|
+
if data is not None and full_extract is True:
|
|
311
|
+
extra_payload['full_load'] = True
|
|
312
|
+
elif data is not None and full_extract is False:
|
|
313
|
+
extra_payload['full_load'] = False
|
|
314
|
+
|
|
315
|
+
# Preparing the primary payload with log details
|
|
316
|
+
payload = {
|
|
317
|
+
'reload_id': self.run_id,
|
|
318
|
+
'task_id': self.task_id,
|
|
319
|
+
'customer_id': os.getenv('BRYNQ_CUSTOMER_NAME').lower().replace(' ', '_'),
|
|
320
|
+
'started_at': datetime.datetime.now().isoformat(),
|
|
321
|
+
'loglevel': loglevel,
|
|
322
|
+
'message': message
|
|
323
|
+
}
|
|
324
|
+
payload.update(extra_payload)
|
|
325
|
+
|
|
326
|
+
# Sending the payload to ElasticSearch
|
|
327
|
+
self.es.task_execution_log(payload)
|
|
328
|
+
|
|
329
|
+
# Get the linenumber from where the logline is executed. Get the stacktrace of this action, jump 1 file up and pick then the linenumber (second item)
|
|
330
|
+
linenumber = inspect.getouterframes(inspect.currentframe())[1][2]
|
|
331
|
+
# Write the logline to the database, depends on the chosen loglevel in the task
|
|
332
|
+
print('{} at line: {}'.format(message, linenumber))
|
|
333
|
+
# Remove quotes from message since these break the query
|
|
334
|
+
message = re.sub("[']", '', message)
|
|
335
|
+
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
|
|
336
|
+
information = {
|
|
337
|
+
'reload_id': self.run_id,
|
|
338
|
+
'task_id': self.task_id,
|
|
339
|
+
'log_level': loglevel,
|
|
340
|
+
'line_number': linenumber,
|
|
341
|
+
'message': message,
|
|
342
|
+
'created_at': timestamp
|
|
343
|
+
}
|
|
344
|
+
if self.loglevel == 'DEBUG':
|
|
345
|
+
# Count the errors
|
|
346
|
+
if loglevel == 'ERROR' or loglevel == 'CRITICAL':
|
|
347
|
+
self.error_count += 1
|
|
348
|
+
return self.mysql.raw_query(
|
|
349
|
+
"INSERT INTO `task_execution_log` (reload_id, task_id, log_level, created_at, line_number, message) VALUES ({}, {}, '{}', '{}', {}, '{}')".format(self.run_id, self.task_id, loglevel, datetime.datetime.now(), linenumber, message), insert=True)
|
|
350
|
+
return self.mysql.update(table='task_execution_log',
|
|
351
|
+
columns=['reload_id', 'task_id', 'log_level', 'created_at', 'line_number', 'message'],
|
|
352
|
+
values=[self.run_id, self.task_id, loglevel, datetime.datetime.now(), linenumber, message])
|
|
353
|
+
elif self.loglevel == 'INFO' and (loglevel == 'INFO' or loglevel == 'ERROR' or loglevel == 'CRITICAL'):
|
|
354
|
+
# Count the errors
|
|
355
|
+
if loglevel == 'ERROR' or loglevel == 'CRITICAL':
|
|
356
|
+
self.error_count += 1
|
|
357
|
+
return self.mysql.raw_query(
|
|
358
|
+
"INSERT INTO `task_execution_log` (reload_id, task_id, log_level, created_at, line_number, message) VALUES ({}, {}, '{}', '{}', {}, '{}')".format(self.run_id, self.task_id, loglevel, datetime.datetime.now(), linenumber, message), insert=True)
|
|
359
|
+
elif self.loglevel == 'ERROR' and (loglevel == 'ERROR' or loglevel == 'CRITICAL'):
|
|
360
|
+
self.error_count += 1
|
|
361
|
+
return self.mysql.raw_query(
|
|
362
|
+
"INSERT INTO `task_execution_log` (reload_id, task_id, log_level, created_at, line_number, message) VALUES ({}, {}, '{}', '{}', {}, '{}')".format(self.run_id, self.task_id, loglevel, datetime.datetime.now(), linenumber, message), insert=True)
|
|
363
|
+
elif self.loglevel == 'CRITICAL' and loglevel == 'CRITICAL':
|
|
364
|
+
self.error_count += 1
|
|
365
|
+
return self.mysql.raw_query(
|
|
366
|
+
"INSERT INTO `task_execution_log` (reload_id, task_id, log_level, created_at, line_number, message) VALUES ({}, {}, '{}', '{}', {}, '{}')".format(self.run_id, self.task_id, loglevel, datetime.datetime.now(), linenumber, message), insert=True)
|
|
367
|
+
|
|
368
|
+
def update_execution_step(self, step_number: int):
|
|
369
|
+
"""
|
|
370
|
+
Update the current step number in the task_scheduler table so that user's in the frontend of BrynQ can see where a task is at any moment
|
|
371
|
+
:param step_number: Give only a number
|
|
372
|
+
:return: nothing
|
|
373
|
+
"""
|
|
374
|
+
# Update the step number in the task_scheduler table
|
|
375
|
+
return self.mysql.update('task_scheduler', ['step_nr'], [step_number], 'WHERE `id` = {}'.format(self.task_id))
|
|
376
|
+
|
|
377
|
+
def error_handling(self, e: Exception, breaking=True, send_to_teams=False):
|
|
378
|
+
"""
|
|
379
|
+
This function handles errors that occur in the scheduler. Logs the traceback, updates run statuses and notifies users
|
|
380
|
+
:param e: the Exception that is to be handled
|
|
381
|
+
:param task_id: The scheduler task id
|
|
382
|
+
:param mysql_con: The connection which is used to update the scheduler task status
|
|
383
|
+
:param logger: The logger that is used to write the logging status to
|
|
384
|
+
:param breaking: Determines if the error is breaking or code will continue
|
|
385
|
+
:param started_at: Give the time the task is started
|
|
386
|
+
:return: nothing
|
|
387
|
+
"""
|
|
388
|
+
|
|
389
|
+
# Preparing the primary payload with error details for upload to elastic
|
|
390
|
+
payload = {
|
|
391
|
+
'reload_id': self.run_id,
|
|
392
|
+
'task_id': self.task_id,
|
|
393
|
+
'customer_id': os.getenv('BRYNQ_CUSTOMER_NAME').lower().replace(' ', '_'),
|
|
394
|
+
'started_at': datetime.datetime.now().isoformat(),
|
|
395
|
+
'loglevel': 'CRITICAL',
|
|
396
|
+
'message': str(e),
|
|
397
|
+
'traceback': traceback.format_exc()
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
# Sending the payload to ElasticSearch
|
|
401
|
+
self.es.task_execution_log(payload)
|
|
402
|
+
|
|
403
|
+
# Format error to a somewhat readable format
|
|
404
|
+
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
405
|
+
error = str(e)[:400].replace('\'', '').replace('\"', '') + ' | Line: {}'.format(exc_tb.tb_lineno)
|
|
406
|
+
# Get scheduler task details for logging
|
|
407
|
+
task_details = \
|
|
408
|
+
self.mysql.select('task_scheduler', 'docker_image, runfile_path', 'WHERE id = {}'.format(self.task_id))[0]
|
|
409
|
+
taskname = task_details[0]
|
|
410
|
+
customer = task_details[1].split('/')[-1].split('.')[0]
|
|
411
|
+
|
|
412
|
+
if breaking:
|
|
413
|
+
# Set scheduler status to failed
|
|
414
|
+
self.mysql.update('task_scheduler', ['status', 'last_reload', 'last_error_message', 'step_nr'],
|
|
415
|
+
['IDLE', datetime.datetime.now(), 'Failed', 0],
|
|
416
|
+
'WHERE `id` = {}'.format(self.task_id))
|
|
417
|
+
# Log to database
|
|
418
|
+
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
|
|
419
|
+
information = {
|
|
420
|
+
'reload_id': self.run_id,
|
|
421
|
+
'task_id': self.task_id,
|
|
422
|
+
'log_level': 'CRITICAL',
|
|
423
|
+
'line_number': exc_tb.tb_lineno,
|
|
424
|
+
'message': error,
|
|
425
|
+
'created_at': timestamp
|
|
426
|
+
}
|
|
427
|
+
self.mysql.raw_query(
|
|
428
|
+
"INSERT INTO `task_execution_log` (reload_id, task_id, log_level, created_at, line_number, message) VALUES ({}, {}, 'CRITICAL', '{}', {}, '{}')".format(self.run_id,
|
|
429
|
+
self.task_id,
|
|
430
|
+
datetime.datetime.now(),
|
|
431
|
+
exc_tb.tb_lineno,
|
|
432
|
+
error),
|
|
433
|
+
insert=True)
|
|
434
|
+
self.mysql.update(table='task_scheduler_log',
|
|
435
|
+
columns=['reload_status', 'finished_at'],
|
|
436
|
+
values=['Failed', f'{datetime.datetime.now()}'],
|
|
437
|
+
filter=f'WHERE `reload_id` = {self.run_id}')
|
|
438
|
+
# Notify users on Teams and If the variable self.send_mail_after_errors is set to True, send an email with the message that the task is failed
|
|
439
|
+
if send_to_teams:
|
|
440
|
+
Functions.send_error_to_teams(database=customer, task_number=self.task_id, task_title=taskname)
|
|
441
|
+
if self.email_after_errors:
|
|
442
|
+
self.email_errors(failed=True)
|
|
443
|
+
# Remove the temp values from the variables table
|
|
444
|
+
self.mysql.raw_query(f'UPDATE `task_variables` SET temp_value = null WHERE task_id = {self.task_id}', insert=True)
|
|
445
|
+
|
|
446
|
+
# Start the chained tasks if it there are tasks which should start if this one is failed
|
|
447
|
+
self.start_chained_tasks(finished_task_status='FAILED')
|
|
448
|
+
|
|
449
|
+
raise Exception(error)
|
|
450
|
+
else:
|
|
451
|
+
timestamp = time.strftime("%Y-%m-%d %H:%M:%S")
|
|
452
|
+
information = {
|
|
453
|
+
'reload_id': self.run_id,
|
|
454
|
+
'task_id': self.task_id,
|
|
455
|
+
'log_level': 'CRITICAL',
|
|
456
|
+
'line_number': exc_tb.tb_lineno,
|
|
457
|
+
'message': error,
|
|
458
|
+
'created_at': timestamp
|
|
459
|
+
}
|
|
460
|
+
self.mysql.raw_query(
|
|
461
|
+
"INSERT INTO `task_execution_log` (reload_id, task_id, log_level, created_at, line_number, message) VALUES ({}, {}, 'CRITICAL', '{}', {}, '{}')".format(self.run_id,
|
|
462
|
+
self.task_id,
|
|
463
|
+
datetime.datetime.now(),
|
|
464
|
+
exc_tb.tb_lineno,
|
|
465
|
+
error),
|
|
466
|
+
insert=True)
|
|
467
|
+
if send_to_teams:
|
|
468
|
+
Functions.send_error_to_teams(database=customer, task_number=self.task_id, task_title=taskname)
|
|
469
|
+
self.error_count += 1
|
|
470
|
+
|
|
471
|
+
def finish_task(self, reload_instant=False, log_limit: typing.Optional[int] = 10000, log_date_limit: datetime.date = None):
|
|
472
|
+
"""
|
|
473
|
+
At the end of the script, write the outcome to the database. Write if the task is finished with or without errors, Email to a contactperson if this variable is given in the
|
|
474
|
+
variables table. Also clean up the execution_log table when the number of lines is more than 1000
|
|
475
|
+
:return:
|
|
476
|
+
"""
|
|
477
|
+
# If reload instant is true, this adds an extra field 'run_instant' to the update query, and sets the value to 1. This makes the task reload immediately after it's finished
|
|
478
|
+
field = ['run_instant', 'next_reload'] if reload_instant else []
|
|
479
|
+
value = ['1', datetime.datetime.now()] if reload_instant else []
|
|
480
|
+
if self.error_count > 0:
|
|
481
|
+
self.mysql.update('task_scheduler', ['status', 'last_reload', 'last_error_message', 'step_nr'],
|
|
482
|
+
['IDLE', datetime.datetime.now(), 'FinishedWithErrors', 0],
|
|
483
|
+
'WHERE `id` = {}'.format(self.task_id))
|
|
484
|
+
self.mysql.update(table='task_scheduler_log',
|
|
485
|
+
columns=['reload_status', 'finished_at'],
|
|
486
|
+
values=['FinishedWithErrors', f'{datetime.datetime.now()}'],
|
|
487
|
+
filter=f'WHERE `reload_id` = {self.run_id}')
|
|
488
|
+
# If the variable self.send_mail_after_errors is set to True, send an email with the number of errors to the given user
|
|
489
|
+
if self.email_after_errors:
|
|
490
|
+
self.email_errors(failed=False)
|
|
491
|
+
else:
|
|
492
|
+
self.mysql.update(table='task_scheduler',
|
|
493
|
+
columns=['status', 'last_reload', 'last_error_message', 'step_nr', 'stopped_by_user'] + field,
|
|
494
|
+
values=['IDLE', datetime.datetime.now(), 'FinishedSucces', 0, 0] + value,
|
|
495
|
+
filter='WHERE `id` = {}'.format(self.task_id))
|
|
496
|
+
|
|
497
|
+
self.mysql.update(table='task_scheduler_log',
|
|
498
|
+
columns=['reload_status', 'finished_at'],
|
|
499
|
+
values=['FinishedSuccess', f'{datetime.datetime.now()}'],
|
|
500
|
+
filter=f'WHERE `reload_id` = {self.run_id}')
|
|
501
|
+
|
|
502
|
+
# Remove the temp values from the variables table
|
|
503
|
+
self.mysql.raw_query(f'UPDATE `task_variables` SET temp_value = null WHERE task_id = {self.task_id}', insert=True)
|
|
504
|
+
|
|
505
|
+
# Start the new task if it there is a task which should start if this one is finished
|
|
506
|
+
self.start_chained_tasks(finished_task_status='SUCCESS')
|
|
507
|
+
|
|
508
|
+
# Clean up execution log
|
|
509
|
+
# set this date filter above the actual delete filter because of the many uncooperative quotation marks involved in the whole filter
|
|
510
|
+
log_date_limit_filter = f"AND created_at >= \'{log_date_limit.strftime('%Y-%m-%d')}\'" if log_date_limit is not None else None
|
|
511
|
+
delete_filter = f"WHERE task_id = {self.task_id} " \
|
|
512
|
+
f"AND reload_id NOT IN (SELECT reload_id FROM (SELECT reload_id FROM `task_execution_log` WHERE task_id = {self.task_id} " \
|
|
513
|
+
f"AND log_level != 'CRITICAL' " \
|
|
514
|
+
f"AND log_level != 'ERROR' " \
|
|
515
|
+
f"{log_date_limit_filter if log_date_limit_filter is not None else ''} " \
|
|
516
|
+
f"ORDER BY created_at DESC {f' LIMIT {log_limit} ' if log_limit is not None else ''}) temp)"
|
|
517
|
+
|
|
518
|
+
resp = self.mysql.delete(table="task_execution_log",
|
|
519
|
+
filter=delete_filter)
|
|
520
|
+
print(resp)
|
|
521
|
+
|
|
522
|
+
def start_chained_tasks(self, finished_task_status: str):
|
|
523
|
+
filter = f'WHERE start_after_task_id = \'{self.task_id}\' AND start_after_preceding_task = \'{finished_task_status}\''
|
|
524
|
+
response = self.mysql.select(table='task_scheduler', selection='id', filter=filter)
|
|
525
|
+
if len(response) > 0:
|
|
526
|
+
tasks_to_run = [str(task[0]) for task in response]
|
|
527
|
+
self.mysql.update(table='task_scheduler', columns=['run_instant'], values=['1'], filter=f'WHERE id IN({",".join(tasks_to_run)})')
|
|
528
|
+
|
|
529
|
+
def email_errors(self, failed):
|
|
530
|
+
# The mails to email to should be stored in the task_variables table with the variable email_errors_to
|
|
531
|
+
email_variable = self.db_variable('email_errors_to')
|
|
532
|
+
if email_variable is not None:
|
|
533
|
+
email_to = email_variable.split(',')
|
|
534
|
+
if isinstance(email_to, list):
|
|
535
|
+
# The email_errors_to variable is a simple string. Convert it to a list and add a name because mandrill is asking for it
|
|
536
|
+
email_list = []
|
|
537
|
+
for i in email_to:
|
|
538
|
+
email_list.append({'name': 'BrynQ User', 'mail': i.strip()})
|
|
539
|
+
# Set the content of the mail and all other stuff
|
|
540
|
+
task = self.mysql.select(table='task_scheduler', selection='title', filter=f'WHERE id = {self.task_id}')[0][
|
|
541
|
+
0]
|
|
542
|
+
finished_at = \
|
|
543
|
+
self.mysql.select(table='task_scheduler', selection='last_reload', filter=f'WHERE id = {self.task_id}')[0][
|
|
544
|
+
0]
|
|
545
|
+
if failed:
|
|
546
|
+
subject = f'Task \'{task}\' has failed'
|
|
547
|
+
content = f'Task \'{task}\' with task ID \'{self.task_id}\' failed during its last run and was stopped at {finished_at}. ' \
|
|
548
|
+
f'The task is failed. ' \
|
|
549
|
+
f'to visit the BrynQ scheduler, click here: <a href="https://app.brynq.com/interfaces/">here</a>. Here you can find the logs and find more information on why this task had failed.'
|
|
550
|
+
else:
|
|
551
|
+
subject = f'Task \'{task}\' is finished with errors'
|
|
552
|
+
content = f'Task \'{task}\' with ID \'{self.task_id}\' has runned and is finished at {finished_at}. ' \
|
|
553
|
+
f'The task is finished with {self.error_count} errors. ' \
|
|
554
|
+
f'to visit the BrynQ scheduler, click here: <a href="https://app.brynq.com/interfaces/">here</a>. Here you can find the logs and find more information on why this task had some errors.'
|
|
555
|
+
MailClient().send_mail(email_to=email_list, subject=subject, content=content, language='EN')
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
Metadata-Version: 1.0
|
|
2
|
+
Name: brynq-sdk-task-scheduler
|
|
3
|
+
Version: 1.0.0
|
|
4
|
+
Summary: Code to execute tasks in BrynQ.com with the task scheduler
|
|
5
|
+
Home-page: UNKNOWN
|
|
6
|
+
Author: BrynQ
|
|
7
|
+
Author-email: support@brynq.com
|
|
8
|
+
License: BrynQ License
|
|
9
|
+
Description: Code to execute tasks in the BrynQ.com platform with the task scheduler
|
|
10
|
+
Platform: UNKNOWN
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
setup.py
|
|
2
|
+
brynq_sdk/task_scheduler/__init__.py
|
|
3
|
+
brynq_sdk/task_scheduler/task_scheduler.py
|
|
4
|
+
brynq_sdk_task_scheduler.egg-info/PKG-INFO
|
|
5
|
+
brynq_sdk_task_scheduler.egg-info/SOURCES.txt
|
|
6
|
+
brynq_sdk_task_scheduler.egg-info/dependency_links.txt
|
|
7
|
+
brynq_sdk_task_scheduler.egg-info/not-zip-safe
|
|
8
|
+
brynq_sdk_task_scheduler.egg-info/requires.txt
|
|
9
|
+
brynq_sdk_task_scheduler.egg-info/top_level.txt
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
brynq_sdk
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from setuptools import setup
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
setup(
|
|
5
|
+
name='brynq_sdk_task_scheduler',
|
|
6
|
+
version='1.0.0',
|
|
7
|
+
description='Code to execute tasks in BrynQ.com with the task scheduler',
|
|
8
|
+
long_description='Code to execute tasks in the BrynQ.com platform with the task scheduler',
|
|
9
|
+
author='BrynQ',
|
|
10
|
+
author_email='support@brynq.com',
|
|
11
|
+
packages=["brynq_sdk.task_scheduler"],
|
|
12
|
+
license='BrynQ License',
|
|
13
|
+
install_requires=[
|
|
14
|
+
'brynq-sdk-brynq>=1',
|
|
15
|
+
'brynq-sdk-functions>=1',
|
|
16
|
+
'salure-helpers-mysql>=1',
|
|
17
|
+
'salure-helpers-mandrill>=0',
|
|
18
|
+
'salure-helpers-elastic>=1'
|
|
19
|
+
],
|
|
20
|
+
zip_safe=False,
|
|
21
|
+
)
|