brynq-sdk-elastic 2.0.1__tar.gz → 2.2.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/PKG-INFO +1 -1
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk/elastic/elastic.py +139 -79
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk_elastic.egg-info/PKG-INFO +1 -1
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/setup.py +1 -1
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk/elastic/__init__.py +0 -0
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk_elastic.egg-info/SOURCES.txt +0 -0
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk_elastic.egg-info/dependency_links.txt +0 -0
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk_elastic.egg-info/not-zip-safe +0 -0
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk_elastic.egg-info/requires.txt +0 -0
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk_elastic.egg-info/top_level.txt +0 -0
- {brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/setup.cfg +0 -0
|
@@ -9,17 +9,33 @@ import os
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class Elastic:
|
|
12
|
-
def __init__(self, api_key: str = None, customer_name: str = None, space_name: str = None):
|
|
12
|
+
def __init__(self, api_key: str = None, customer_name: str = None, space_name: str = None, disabled: bool = False):
|
|
13
13
|
"""
|
|
14
14
|
A package to create indexes, users, roles, getting data, etc.
|
|
15
15
|
:param api_key: The api key to connect to elasticsearch if not provided in the .env file
|
|
16
16
|
"""
|
|
17
17
|
try:
|
|
18
18
|
self.verify = False
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
19
|
+
self.disabled = disabled
|
|
20
|
+
elasticsearch_host = os.getenv("ELASTIC_HOST")
|
|
21
|
+
elasticsearch_port = os.getenv("ELASTIC_PORT")
|
|
22
|
+
kibana_port = os.getenv("KIBANA_PORT")
|
|
23
|
+
elastic_token = os.getenv('ELASTIC_API_KEY', api_key)
|
|
24
|
+
|
|
25
|
+
if not self.disabled:
|
|
26
|
+
# Check for missing environment variables and show warnings
|
|
27
|
+
if elasticsearch_host is None:
|
|
28
|
+
raise KeyError("Environment variable ELASTIC_HOST is not set. Please set it and try again")
|
|
29
|
+
if elasticsearch_port is None:
|
|
30
|
+
elasticsearch_port = 9200
|
|
31
|
+
warnings.warn("Environment variable ELASTIC_PORT is not set. Using default port 9200")
|
|
32
|
+
if kibana_port is None:
|
|
33
|
+
kibana_port = 5601
|
|
34
|
+
warnings.warn("Environment variable KIBANA_PORT is not set. Using default port 5601")
|
|
35
|
+
if elastic_token is None:
|
|
36
|
+
raise KeyError("Environment variable ELASTIC_API_KEY is not set and no api_key is provided. Please specify either one and try again")
|
|
37
|
+
if os.getenv("ELASTIC_SPACE") is None:
|
|
38
|
+
warnings.warn("Environment variable ELASTIC_SPACE is not set. Using 'default'")
|
|
23
39
|
|
|
24
40
|
# Build the host URLs
|
|
25
41
|
self.elasticsearch_host = f'https://{elasticsearch_host}:{elasticsearch_port}'
|
|
@@ -28,17 +44,6 @@ class Elastic:
|
|
|
28
44
|
self.space_name = os.getenv('ELASTIC_SPACE', 'default') if space_name is None else space_name
|
|
29
45
|
self.client_user = os.getenv('BRYNQ_SUBDOMAIN', 'default').lower().replace(' ', '_') if customer_name is None else customer_name.lower().replace(' ', '_')
|
|
30
46
|
|
|
31
|
-
# Check for missing environment variables and show warnings
|
|
32
|
-
if os.getenv("ELASTIC_HOST") is None:
|
|
33
|
-
warnings.warn("Environment variable ELASTIC_HOST is not set. Using default: localhost")
|
|
34
|
-
if os.getenv("KIBANA_PORT") is None:
|
|
35
|
-
warnings.warn("Environment variable ELASTIC_PORT is not set. Using default: 9200")
|
|
36
|
-
if os.getenv("KIBANA_PORT") is None:
|
|
37
|
-
warnings.warn("Environment variable KIBANA_PORT is not set. Using default: 5601")
|
|
38
|
-
if elastic_token is None:
|
|
39
|
-
raise KeyError("Environment variable ELASTIC_API_KEY is not set and no api_key is provided. Please specify either one and try again")
|
|
40
|
-
if os.getenv("ELASTIC_SPACE") is None:
|
|
41
|
-
warnings.warn("Environment variable ELASTIC_SPACE is not set. Using 'default'")
|
|
42
47
|
if self.client_user == 'default':
|
|
43
48
|
warnings.warn("Environment variable BRYNQ_SUBDOMAIN is not set and customer_name is not specified. Using 'default'")
|
|
44
49
|
|
|
@@ -54,8 +59,9 @@ class Elastic:
|
|
|
54
59
|
'Authorization': f'ApiKey {self.elastic_token}',
|
|
55
60
|
'kbn-xsrf': 'true'
|
|
56
61
|
}
|
|
57
|
-
self.
|
|
58
|
-
|
|
62
|
+
if not self.disabled:
|
|
63
|
+
self.get_health()
|
|
64
|
+
self.create_space(space_name=self.space_name)
|
|
59
65
|
except Exception as e:
|
|
60
66
|
raise ConnectionError('Could not establish a connection: {}'.format(str(e)))
|
|
61
67
|
|
|
@@ -65,6 +71,9 @@ class Elastic:
|
|
|
65
71
|
:return: if the connection is established or not
|
|
66
72
|
"""
|
|
67
73
|
# Get the health of the database connection
|
|
74
|
+
if self.disabled:
|
|
75
|
+
return 'Healthy connection established with elasticsearch!'
|
|
76
|
+
|
|
68
77
|
try:
|
|
69
78
|
health = requests.get(url=f'{self.elasticsearch_host}/_cat/health?', headers=self.elastic_headers, verify=self.verify).status_code
|
|
70
79
|
if health != 200:
|
|
@@ -87,26 +96,34 @@ class Elastic:
|
|
|
87
96
|
:param space_name: The name of the space
|
|
88
97
|
:return: The status of the creation of the space
|
|
89
98
|
"""
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
+
try:
|
|
100
|
+
if self.disabled:
|
|
101
|
+
return 'Space creation disabled'
|
|
102
|
+
|
|
103
|
+
url = f'{self.kibana_host}/api/spaces/space'
|
|
104
|
+
data = {
|
|
105
|
+
"id": space_name,
|
|
106
|
+
"name": space_name,
|
|
107
|
+
"description": f"This is the space for {space_name}",
|
|
108
|
+
"color": "#aabbcc",
|
|
109
|
+
"initials": space_name[0:2].upper(),
|
|
110
|
+
"disabledFeatures": [],
|
|
111
|
+
}
|
|
99
112
|
|
|
100
|
-
|
|
113
|
+
response = requests.head(url=url + fr'/{space_name}', headers=self.kibana_headers, verify=self.verify)
|
|
101
114
|
|
|
102
|
-
if response.status_code == 200:
|
|
103
|
-
return f'Index \'{space_name}\' already exists'
|
|
104
|
-
else:
|
|
105
|
-
response = requests.post(url=url, headers=self.kibana_headers, data=json.dumps(data), verify=self.verify)
|
|
106
115
|
if response.status_code == 200:
|
|
107
|
-
return f'
|
|
116
|
+
return f'Index \'{space_name}\' already exists'
|
|
108
117
|
else:
|
|
109
|
-
|
|
118
|
+
response = requests.post(url=url, headers=self.kibana_headers, data=json.dumps(data), verify=self.verify)
|
|
119
|
+
if response.status_code == 200:
|
|
120
|
+
return f'space {space_name} created'
|
|
121
|
+
else:
|
|
122
|
+
raise ConnectionError(f'Could not create space {space_name} with status code: {response.status_code}. Response: {response.text}')
|
|
123
|
+
except:
|
|
124
|
+
message = "Could not create space, since this is not strictly necessary to write logs, continue without it"
|
|
125
|
+
print(message)
|
|
126
|
+
return message
|
|
110
127
|
|
|
111
128
|
def create_data_view(self, space_name: str, view_name: str, name: str, time_field: str) -> str:
|
|
112
129
|
"""
|
|
@@ -116,26 +133,34 @@ class Elastic:
|
|
|
116
133
|
:param time_field: The name of the time field
|
|
117
134
|
:return: The status of the creation of the data view
|
|
118
135
|
"""
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
"
|
|
136
|
+
try:
|
|
137
|
+
if self.disabled:
|
|
138
|
+
return 'Data view creation disabled'
|
|
139
|
+
|
|
140
|
+
url = f'{self.kibana_host}/s/{space_name}/api/data_views/data_view'
|
|
141
|
+
data = {
|
|
142
|
+
"data_view": {
|
|
143
|
+
"title": f'{view_name}*',
|
|
144
|
+
"id": f'{view_name}',
|
|
145
|
+
"name": f'{name}',
|
|
146
|
+
"timeFieldName": time_field
|
|
147
|
+
}
|
|
126
148
|
}
|
|
127
|
-
}
|
|
128
149
|
|
|
129
|
-
|
|
150
|
+
response = requests.head(url=url + fr'/{view_name}', headers=self.kibana_headers, verify=self.verify)
|
|
130
151
|
|
|
131
|
-
if response.status_code == 200:
|
|
132
|
-
return f'Data view \'{view_name}\' already exists'
|
|
133
|
-
else:
|
|
134
|
-
response = requests.post(url=url, headers=self.kibana_headers, data=json.dumps(data), verify=self.verify)
|
|
135
152
|
if response.status_code == 200:
|
|
136
|
-
return f'
|
|
153
|
+
return f'Data view \'{view_name}\' already exists'
|
|
137
154
|
else:
|
|
138
|
-
|
|
155
|
+
response = requests.post(url=url, headers=self.kibana_headers, data=json.dumps(data), verify=self.verify)
|
|
156
|
+
if response.status_code == 200:
|
|
157
|
+
return f'data view {view_name} created'
|
|
158
|
+
else:
|
|
159
|
+
raise ConnectionError(f'Could not create data view {view_name} with status code: {response.status_code}. Response: {response.text}')
|
|
160
|
+
except:
|
|
161
|
+
message = "Could not create data view, since this is not strictly necessary to write logs, continue without it"
|
|
162
|
+
print(message)
|
|
163
|
+
return message
|
|
139
164
|
|
|
140
165
|
def get_all_docs_from_index(self, index: str) -> pd.DataFrame:
|
|
141
166
|
"""
|
|
@@ -143,6 +168,9 @@ class Elastic:
|
|
|
143
168
|
:param index: the name of the index
|
|
144
169
|
:return: The response of the request to elasticsearch
|
|
145
170
|
"""
|
|
171
|
+
if self.disabled:
|
|
172
|
+
return pd.DataFrame()
|
|
173
|
+
|
|
146
174
|
size = 10000
|
|
147
175
|
|
|
148
176
|
# Get all indices with the given index from the function parameter. For each day a new index.
|
|
@@ -185,6 +213,9 @@ class Elastic:
|
|
|
185
213
|
:param index_name: The index you want to delete
|
|
186
214
|
:return: The response of the request to elasticsearch
|
|
187
215
|
"""
|
|
216
|
+
if self.disabled:
|
|
217
|
+
return 'Index deletion disabled'
|
|
218
|
+
|
|
188
219
|
# Check if index exists
|
|
189
220
|
url = f'{self.elasticsearch_host}/{index_name}'
|
|
190
221
|
response = requests.head(url=url, headers=self.elastic_headers, verify=self.verify)
|
|
@@ -205,6 +236,9 @@ class Elastic:
|
|
|
205
236
|
:param index_name: The name of the desired index
|
|
206
237
|
:return: The response of the request to elasticsearch
|
|
207
238
|
"""
|
|
239
|
+
if self.disabled:
|
|
240
|
+
return 'Index creation disabled'
|
|
241
|
+
|
|
208
242
|
url = f'{self.elasticsearch_host}/{index_name}'
|
|
209
243
|
response = requests.head(url=url, headers=self.elastic_headers, verify=self.verify)
|
|
210
244
|
|
|
@@ -224,47 +258,58 @@ class Elastic:
|
|
|
224
258
|
:param index: one or more index names in a list.
|
|
225
259
|
:return: The response of the request to elasticsearch
|
|
226
260
|
"""
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
'discover': ['read']
|
|
261
|
+
try:
|
|
262
|
+
if self.disabled:
|
|
263
|
+
return 'Role creation disabled'
|
|
264
|
+
|
|
265
|
+
url = f'{self.kibana_host}/api/security/role/{role_name}'
|
|
266
|
+
# Set the body
|
|
267
|
+
body = {
|
|
268
|
+
'elasticsearch': {
|
|
269
|
+
'cluster': ['transport_client'],
|
|
270
|
+
'indices': [
|
|
271
|
+
{
|
|
272
|
+
'names': [index],
|
|
273
|
+
'privileges': ['read', 'write', 'read_cross_cluster', 'view_index_metadata', 'index']
|
|
274
|
+
}
|
|
275
|
+
]
|
|
243
276
|
},
|
|
244
|
-
'
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
277
|
+
'kibana': [{
|
|
278
|
+
'feature': {
|
|
279
|
+
'dashboard': ['read'],
|
|
280
|
+
'discover': ['read']
|
|
281
|
+
},
|
|
282
|
+
'spaces': [role_name],
|
|
283
|
+
}],
|
|
284
|
+
'metadata': {
|
|
285
|
+
'version': 1
|
|
286
|
+
}
|
|
248
287
|
}
|
|
249
|
-
|
|
250
|
-
body = json.dumps(body)
|
|
288
|
+
body = json.dumps(body)
|
|
251
289
|
|
|
252
|
-
|
|
290
|
+
response = requests.head(url=url, headers=self.kibana_headers, verify=self.verify)
|
|
253
291
|
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
else:
|
|
257
|
-
response = requests.put(url=url, data=body, headers=self.kibana_headers, verify=self.verify)
|
|
258
|
-
if response.status_code == 204:
|
|
259
|
-
return f'Role {role_name} created'
|
|
292
|
+
if response.status_code == 200:
|
|
293
|
+
return f'Role \'{role_name}\' already exists'
|
|
260
294
|
else:
|
|
261
|
-
|
|
295
|
+
response = requests.put(url=url, data=body, headers=self.kibana_headers, verify=self.verify)
|
|
296
|
+
if response.status_code == 204:
|
|
297
|
+
return f'Role {role_name} created'
|
|
298
|
+
else:
|
|
299
|
+
raise ConnectionError(f'Could not create role {role_name} with status code: {response.status_code}. Response: {response.text}')
|
|
300
|
+
except:
|
|
301
|
+
message = "Could not create role, since this is not strictly necessary to write logs, continue without it"
|
|
302
|
+
print(message)
|
|
303
|
+
return message
|
|
262
304
|
|
|
263
305
|
def get_indices(self) -> dict:
|
|
264
306
|
"""
|
|
265
307
|
Get all the indices in the elasticsearch instance
|
|
266
308
|
:return: A dictionary with all the indices
|
|
267
309
|
"""
|
|
310
|
+
if self.disabled:
|
|
311
|
+
return {}
|
|
312
|
+
|
|
268
313
|
indices = requests.get(url=f'{self.elasticsearch_host}/_cat/indices?format=json', headers=self.elastic_headers, verify=self.verify).json()
|
|
269
314
|
return indices
|
|
270
315
|
|
|
@@ -277,6 +322,9 @@ class Elastic:
|
|
|
277
322
|
:param roles: Give the roles to which the user belongs in a list. Most often the same role_name as the user_name
|
|
278
323
|
:return: The response of the request to elasticsearch
|
|
279
324
|
"""
|
|
325
|
+
if self.disabled:
|
|
326
|
+
return 'User creation disabled'
|
|
327
|
+
|
|
280
328
|
url = f'{self.elasticsearch_host}/_security/user/{user_name}'
|
|
281
329
|
body = {
|
|
282
330
|
'password': f'{password}',
|
|
@@ -303,6 +351,9 @@ class Elastic:
|
|
|
303
351
|
:param document: The document to be posted
|
|
304
352
|
:return: The response of the request to elasticsearch
|
|
305
353
|
"""
|
|
354
|
+
if self.disabled:
|
|
355
|
+
return None
|
|
356
|
+
|
|
306
357
|
url = f'{self.elasticsearch_host}/{index_name}/_doc/'
|
|
307
358
|
body = json.dumps(document)
|
|
308
359
|
response = requests.post(url=url, data=body, headers=self.elastic_headers, verify=self.verify)
|
|
@@ -315,6 +366,9 @@ class Elastic:
|
|
|
315
366
|
:param document_id: The id of the document to be retrieved
|
|
316
367
|
:return: The response of the request to elasticsearch
|
|
317
368
|
"""
|
|
369
|
+
if self.disabled:
|
|
370
|
+
return None
|
|
371
|
+
|
|
318
372
|
url = f'{self.elasticsearch_host}/{index_name}/_doc/{document_id}'
|
|
319
373
|
response = requests.get(url=url, headers=self.elastic_headers, verify=self.verify)
|
|
320
374
|
return response
|
|
@@ -326,6 +380,9 @@ class Elastic:
|
|
|
326
380
|
:param document_id: The id of the document to be deleted
|
|
327
381
|
:return: The response of the request to elasticsearch
|
|
328
382
|
"""
|
|
383
|
+
if self.disabled:
|
|
384
|
+
return None
|
|
385
|
+
|
|
329
386
|
url = f'{self.elasticsearch_host}/{index_name}/_doc/{document_id}'
|
|
330
387
|
response = requests.delete(url=url, headers=self.elastic_headers, verify=self.verify)
|
|
331
388
|
return response
|
|
@@ -336,6 +393,9 @@ class Elastic:
|
|
|
336
393
|
:param information: the information to be inserted into the database.
|
|
337
394
|
:return: the response of the post request
|
|
338
395
|
"""
|
|
396
|
+
if self.disabled:
|
|
397
|
+
return None
|
|
398
|
+
|
|
339
399
|
# Add new document
|
|
340
400
|
url = f'{self.elasticsearch_host}/task_execution_log_{self.client_user}/_doc/'
|
|
341
401
|
body = json.dumps(information)
|
|
File without changes
|
|
File without changes
|
{brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk_elastic.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{brynq_sdk_elastic-2.0.1 → brynq_sdk_elastic-2.2.1}/brynq_sdk_elastic.egg-info/top_level.txt
RENAMED
|
File without changes
|
|
File without changes
|