deltafi 0.109.0__py3-none-any.whl → 2.40.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deltafi/__init__.py +3 -1
- deltafi/action.py +262 -102
- deltafi/actioneventqueue.py +29 -4
- deltafi/actiontype.py +7 -11
- deltafi/domain.py +241 -88
- deltafi/exception.py +1 -11
- deltafi/genericmodel.py +38 -0
- deltafi/input.py +6 -163
- deltafi/logger.py +16 -4
- deltafi/lookuptable.py +292 -0
- deltafi/metric.py +2 -2
- deltafi/plugin.py +374 -87
- deltafi/result.py +174 -172
- deltafi/resultmessage.py +56 -0
- deltafi/storage.py +20 -90
- deltafi/test_kit/__init__.py +19 -0
- deltafi/test_kit/assertions.py +56 -0
- deltafi/test_kit/compare_helpers.py +293 -0
- deltafi/test_kit/constants.py +23 -0
- deltafi/test_kit/egress.py +54 -0
- deltafi/test_kit/framework.py +390 -0
- deltafi/test_kit/timed_ingress.py +104 -0
- deltafi/test_kit/transform.py +103 -0
- deltafi/types.py +31 -0
- deltafi-2.40.0.dist-info/METADATA +82 -0
- deltafi-2.40.0.dist-info/RECORD +27 -0
- {deltafi-0.109.0.dist-info → deltafi-2.40.0.dist-info}/WHEEL +1 -1
- deltafi-0.109.0.dist-info/METADATA +0 -41
- deltafi-0.109.0.dist-info/RECORD +0 -15
deltafi/plugin.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
#
|
|
2
2
|
# DeltaFi - Data transformation and enrichment platform
|
|
3
3
|
#
|
|
4
|
-
# Copyright 2021-
|
|
4
|
+
# Copyright 2021-2025 DeltaFi Contributors <deltafi@deltafi.org>
|
|
5
5
|
#
|
|
6
6
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
7
7
|
# you may not use this file except in compliance with the License.
|
|
@@ -16,57 +16,189 @@
|
|
|
16
16
|
# limitations under the License.
|
|
17
17
|
#
|
|
18
18
|
|
|
19
|
+
import importlib
|
|
20
|
+
import inspect
|
|
19
21
|
import json
|
|
20
22
|
import os
|
|
23
|
+
import pkgutil
|
|
21
24
|
import sys
|
|
22
25
|
import threading
|
|
23
26
|
import time
|
|
24
27
|
import traceback
|
|
28
|
+
from datetime import datetime, timezone, timedelta
|
|
29
|
+
from importlib import metadata
|
|
25
30
|
from os.path import isdir, isfile, join
|
|
26
31
|
from pathlib import Path
|
|
27
|
-
from typing import List
|
|
32
|
+
from typing import List, NamedTuple
|
|
28
33
|
|
|
29
|
-
import pkg_resources
|
|
30
34
|
import requests
|
|
35
|
+
import yaml
|
|
36
|
+
|
|
37
|
+
from deltafi.action import Action, Join
|
|
31
38
|
from deltafi.actioneventqueue import ActionEventQueue
|
|
32
|
-
from deltafi.domain import Event
|
|
33
|
-
from deltafi.exception import ExpectedContentException,
|
|
34
|
-
MissingMetadataException
|
|
39
|
+
from deltafi.domain import Event, ActionExecution
|
|
40
|
+
from deltafi.exception import ExpectedContentException, MissingMetadataException
|
|
35
41
|
from deltafi.logger import get_logger
|
|
36
|
-
from deltafi.
|
|
42
|
+
from deltafi.lookuptable import LookupTable, LookupTableClient, LookupTableEvent, LookupTableEventResult, LookupTableSupplier
|
|
43
|
+
from deltafi.result import ErrorResult, IngressResult, TransformResult, TransformResults
|
|
37
44
|
from deltafi.storage import ContentService
|
|
45
|
+
from deltafi.types import PluginCoordinates
|
|
38
46
|
|
|
39
47
|
|
|
40
48
|
def _coordinates():
|
|
41
|
-
return
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
49
|
+
return PluginCoordinates(os.getenv('PROJECT_GROUP'), os.getenv('PROJECT_NAME'), os.getenv('PROJECT_VERSION'))
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _valid_file(filename: str):
|
|
53
|
+
return isfile(filename) and \
|
|
54
|
+
(filename.endswith(".json")
|
|
55
|
+
or filename.endswith(".yaml")
|
|
56
|
+
or filename.endswith(".yml"))
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _read_valid_files(path: str):
|
|
60
|
+
"""
|
|
61
|
+
Read the contents of a directory, and returns a filtered list of files
|
|
62
|
+
that can be read/parsed for plugin usage, and ignores everything else.
|
|
63
|
+
:param path: name of the directory to scan
|
|
64
|
+
:return: list of filtered, parsable files
|
|
65
|
+
"""
|
|
66
|
+
files = []
|
|
67
|
+
if isdir(path):
|
|
68
|
+
files = [f for f in os.listdir(path) if _valid_file(join(path, f))]
|
|
69
|
+
return files
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _load_resource(path: str, filename: str):
|
|
73
|
+
"""
|
|
74
|
+
Read the content of a JSON or YAML file, and return a Python
|
|
75
|
+
object of its contents, typically as a dict or list.
|
|
76
|
+
To avoid exceptions, use only files returned by _read_valid_files().
|
|
77
|
+
:param path: directory which contains the file to load
|
|
78
|
+
:param filename: name of the file to load
|
|
79
|
+
:return: dict or list of file contents
|
|
80
|
+
"""
|
|
81
|
+
with open(join(path, filename)) as file_in:
|
|
82
|
+
if filename.endswith(".json"):
|
|
83
|
+
return json.load(file_in)
|
|
84
|
+
elif filename.endswith(".yaml") or filename.endswith(".yml"):
|
|
85
|
+
results = []
|
|
86
|
+
yaml_docs = yaml.safe_load_all(file_in)
|
|
87
|
+
for doc_iter in yaml_docs:
|
|
88
|
+
# yaml_docs must be iterated
|
|
89
|
+
results.append(doc_iter)
|
|
90
|
+
if len(results) == 1:
|
|
91
|
+
# Single document YAML file
|
|
92
|
+
return results[0]
|
|
93
|
+
else:
|
|
94
|
+
# Multi-document YAML file
|
|
95
|
+
return results
|
|
96
|
+
raise RuntimeError(f"File type not supported: {filename}")
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _load__all_resource(path: str, file_list: List[str]):
|
|
100
|
+
resources = []
|
|
101
|
+
for f in file_list:
|
|
102
|
+
r = _load_resource(path, f)
|
|
103
|
+
if isinstance(r, list):
|
|
104
|
+
resources.extend(r)
|
|
105
|
+
else:
|
|
106
|
+
resources.append(r)
|
|
107
|
+
return resources
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _find_variables_filename(names: List[str]):
|
|
111
|
+
if 'variables.json' in names:
|
|
112
|
+
return 'variables.json'
|
|
113
|
+
elif 'variables.yaml' in names:
|
|
114
|
+
return 'variables.yaml'
|
|
115
|
+
elif 'variables.yml' in names:
|
|
116
|
+
return 'variables.yml'
|
|
117
|
+
else:
|
|
118
|
+
return None
|
|
46
119
|
|
|
47
120
|
|
|
48
121
|
def _setup_queue(max_connections):
|
|
49
|
-
|
|
50
|
-
password = os.getenv('
|
|
51
|
-
|
|
122
|
+
url = os.getenv('VALKEY_URL', 'http://localhost:6379')
|
|
123
|
+
password = os.getenv('VALKEY_PASSWORD')
|
|
124
|
+
app_name = os.getenv('APP_NAME')
|
|
125
|
+
return ActionEventQueue(url, max_connections, password, app_name)
|
|
52
126
|
|
|
53
127
|
|
|
54
128
|
def _setup_content_service():
|
|
55
|
-
minio_url = os.getenv('MINIO_URL', 'http://
|
|
129
|
+
minio_url = os.getenv('MINIO_URL', 'http://localhost:9000')
|
|
130
|
+
bucket_name = os.getenv('STORAGE_BUCKET_NAME', 'storage')
|
|
56
131
|
return ContentService(minio_url,
|
|
57
132
|
os.getenv('MINIO_ACCESSKEY'),
|
|
58
|
-
os.getenv('MINIO_SECRETKEY')
|
|
133
|
+
os.getenv('MINIO_SECRETKEY'),
|
|
134
|
+
bucket_name)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class ActionThread(object):
|
|
138
|
+
def __init__(self, clazz: Action, thread_num: int, name: str, execution: ActionExecution = None):
|
|
139
|
+
self.clazz = clazz
|
|
140
|
+
self.thread_num = thread_num
|
|
141
|
+
self.name = name
|
|
142
|
+
self.execution = execution
|
|
143
|
+
|
|
144
|
+
def logger_name(self):
|
|
145
|
+
return f"{self.name}#{self.thread_num}"
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
LONG_RUNNING_TASK_DURATION = timedelta(seconds=5)
|
|
59
149
|
|
|
60
150
|
|
|
61
151
|
class Plugin(object):
|
|
62
|
-
def __init__(self,
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
152
|
+
def __init__(self, description: str, plugin_name: str = None, plugin_coordinates: PluginCoordinates = None,
|
|
153
|
+
actions: List = None, action_package: str = None, lookup_table_suppliers_package: str = None,
|
|
154
|
+
thread_config: dict = None):
|
|
155
|
+
"""
|
|
156
|
+
Initialize the plugin object
|
|
157
|
+
:param plugin_name: Name of the plugin project
|
|
158
|
+
:param description: Description of the plugin
|
|
159
|
+
:param plugin_coordinates: plugin coordinates of the plugin, if None the coordinates must be defined in
|
|
160
|
+
environment variables
|
|
161
|
+
:param actions: list of action classes to run
|
|
162
|
+
:param action_package: name of the package containing the actions to run
|
|
163
|
+
:param lookup_table_suppliers_package name of the package containing lookup table suppliers
|
|
164
|
+
:param thread_config: map of action class name and thread count. Actions not found default to 1 thread.
|
|
165
|
+
"""
|
|
166
|
+
self.logger = get_logger()
|
|
66
167
|
|
|
67
|
-
self.
|
|
68
|
-
self.queue =
|
|
69
|
-
self.
|
|
168
|
+
self.content_service = None
|
|
169
|
+
self.queue = None
|
|
170
|
+
self.singleton_actions = []
|
|
171
|
+
self.action_threads = []
|
|
172
|
+
self.thread_config = {}
|
|
173
|
+
if thread_config is not None:
|
|
174
|
+
self.thread_config = thread_config
|
|
175
|
+
self.core_url = os.getenv('CORE_URL', 'http://127.0.0.1:8042')
|
|
176
|
+
self.image = os.getenv('IMAGE')
|
|
177
|
+
self.image_pull_secret = os.getenv('IMAGE_PULL_SECRET')
|
|
178
|
+
action_classes = []
|
|
179
|
+
if actions is not None and len(actions):
|
|
180
|
+
action_classes.extend(actions)
|
|
181
|
+
|
|
182
|
+
if action_package is not None:
|
|
183
|
+
found_actions = Plugin.find_classes(action_package, Action)
|
|
184
|
+
if len(found_actions):
|
|
185
|
+
action_classes.extend(found_actions)
|
|
186
|
+
|
|
187
|
+
unique_actions = dict.fromkeys(action_classes)
|
|
188
|
+
self.singleton_actions = [action() for action in unique_actions]
|
|
189
|
+
|
|
190
|
+
self.lookup_table_suppliers = {}
|
|
191
|
+
if lookup_table_suppliers_package is not None:
|
|
192
|
+
lookup_table_client = LookupTableClient()
|
|
193
|
+
lookup_table_supplier_classes = Plugin.find_classes(lookup_table_suppliers_package, LookupTableSupplier)
|
|
194
|
+
self.logger.info(f"Found {len(lookup_table_supplier_classes)} suppliers")
|
|
195
|
+
for lookup_table_supplier_class in lookup_table_supplier_classes:
|
|
196
|
+
lookup_table_supplier = lookup_table_supplier_class(lookup_table_client)
|
|
197
|
+
self.lookup_table_suppliers[lookup_table_supplier.lookup_table.name] = lookup_table_supplier
|
|
198
|
+
|
|
199
|
+
self.description = description
|
|
200
|
+
self.display_name = os.getenv('PROJECT_NAME') if plugin_name is None else plugin_name
|
|
201
|
+
self.coordinates = _coordinates() if plugin_coordinates is None else plugin_coordinates
|
|
70
202
|
|
|
71
203
|
if os.getenv('ACTIONS_HOSTNAME'):
|
|
72
204
|
self.hostname = os.getenv('ACTIONS_HOSTNAME')
|
|
@@ -77,52 +209,102 @@ class Plugin(object):
|
|
|
77
209
|
else:
|
|
78
210
|
self.hostname = 'UNKNOWN'
|
|
79
211
|
|
|
80
|
-
self.logger
|
|
212
|
+
self.logger.debug(f"Initialized ActionRunner with actions {self.singleton_actions}")
|
|
213
|
+
|
|
214
|
+
@staticmethod
|
|
215
|
+
def find_classes(package_name: str, base_class: type) -> List[object]:
|
|
216
|
+
"""
|
|
217
|
+
Find all concrete classes that extend the base class in the given package
|
|
218
|
+
:param package_name: name of the package to load and scan
|
|
219
|
+
:param base_class: the base class
|
|
220
|
+
:return: list of classes that extend the base class
|
|
221
|
+
"""
|
|
222
|
+
package = importlib.import_module(package_name)
|
|
223
|
+
classes = []
|
|
224
|
+
visited = set()
|
|
225
|
+
|
|
226
|
+
# Iterate over all submodules in the package
|
|
227
|
+
for _, module_name, _ in pkgutil.walk_packages(package.__path__):
|
|
228
|
+
try:
|
|
229
|
+
module = importlib.import_module(package.__name__ + '.' + module_name)
|
|
230
|
+
except ModuleNotFoundError:
|
|
231
|
+
continue
|
|
232
|
+
|
|
233
|
+
# Iterate over all members in the module
|
|
234
|
+
for name, obj in inspect.getmembers(module):
|
|
235
|
+
if inspect.isclass(obj) and obj.__module__.startswith(package_name) and obj not in visited:
|
|
236
|
+
if not inspect.isabstract(obj) and issubclass(obj, base_class):
|
|
237
|
+
classes.append(obj)
|
|
238
|
+
visited.add(obj)
|
|
81
239
|
|
|
82
|
-
|
|
240
|
+
return classes
|
|
83
241
|
|
|
84
242
|
def action_name(self, action):
|
|
85
|
-
return f"{self.coordinates
|
|
243
|
+
return f"{self.coordinates.group_id}.{action.__class__.__name__}"
|
|
86
244
|
|
|
87
245
|
def _action_json(self, action):
|
|
88
246
|
return {
|
|
89
247
|
'name': self.action_name(action),
|
|
90
|
-
'description': action.description,
|
|
91
248
|
'type': action.action_type.name,
|
|
92
|
-
'
|
|
93
|
-
'
|
|
94
|
-
'
|
|
249
|
+
'supportsJoin': isinstance(action, Join),
|
|
250
|
+
'schema': action.param_class().model_json_schema(),
|
|
251
|
+
'actionOptions': action.action_options.json()
|
|
95
252
|
}
|
|
96
253
|
|
|
97
|
-
|
|
254
|
+
@staticmethod
|
|
255
|
+
def load_integration_tests(tests_path: str):
|
|
256
|
+
test_files = _read_valid_files(tests_path)
|
|
257
|
+
return _load__all_resource(tests_path, test_files)
|
|
258
|
+
|
|
259
|
+
@staticmethod
|
|
260
|
+
def load_variables(flows_path: str, flow_files: List[str]):
|
|
261
|
+
variables = []
|
|
262
|
+
variables_filename = _find_variables_filename(flow_files)
|
|
263
|
+
if variables_filename is not None:
|
|
264
|
+
flow_files.remove(variables_filename)
|
|
265
|
+
variables = _load__all_resource(flows_path, [variables_filename])
|
|
266
|
+
return variables
|
|
267
|
+
|
|
268
|
+
def registration_json(self):
|
|
98
269
|
flows_path = str(Path(os.path.dirname(os.path.abspath(sys.argv[0]))) / 'flows')
|
|
270
|
+
tests_path = str(Path(os.path.dirname(os.path.abspath(sys.argv[0]))) / 'integration')
|
|
99
271
|
|
|
100
|
-
flow_files = []
|
|
101
272
|
variables = []
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
variables = json.load(open(join(flows_path, 'variables.json')))
|
|
273
|
+
flow_files = _read_valid_files(flows_path)
|
|
274
|
+
if len(flow_files) == 0:
|
|
275
|
+
self.logger.warning(
|
|
276
|
+
f"Flows directory ({flows_path}) does not exist or contains no valid files. No flows will be installed.")
|
|
107
277
|
else:
|
|
108
|
-
self.
|
|
278
|
+
variables = self.load_variables(flows_path, flow_files)
|
|
109
279
|
|
|
110
|
-
flows =
|
|
111
|
-
actions = [self._action_json(action) for action in self.
|
|
280
|
+
flows = _load__all_resource(flows_path, flow_files)
|
|
281
|
+
actions = [self._action_json(action) for action in self.singleton_actions]
|
|
282
|
+
lookup_tables = [lookup_table_supplier.lookup_table.json() for lookup_table_supplier in self.lookup_table_suppliers.values()]
|
|
112
283
|
|
|
284
|
+
test_files = self.load_integration_tests(tests_path)
|
|
285
|
+
if len(test_files) == 0:
|
|
286
|
+
self.logger.warning(
|
|
287
|
+
f"tests directory ({tests_path}) does not exist or contains no valid files. No tests will be installed.")
|
|
288
|
+
|
|
289
|
+
return {
|
|
290
|
+
'pluginCoordinates': self.coordinates.json(),
|
|
291
|
+
'displayName': self.display_name,
|
|
292
|
+
'description': self.description,
|
|
293
|
+
'actionKitVersion': metadata.version('deltafi'),
|
|
294
|
+
'image': self.image,
|
|
295
|
+
'imagePullSecret': self.image_pull_secret,
|
|
296
|
+
'dependencies': [],
|
|
297
|
+
'actions': actions,
|
|
298
|
+
'lookupTables': lookup_tables,
|
|
299
|
+
'variables': variables,
|
|
300
|
+
'flowPlans': flows,
|
|
301
|
+
'integrationTests': test_files
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
def _register(self):
|
|
113
305
|
url = f"{self.core_url}/plugins"
|
|
114
|
-
headers = {'Content-
|
|
115
|
-
registration_json =
|
|
116
|
-
{
|
|
117
|
-
'pluginCoordinates': self.coordinates,
|
|
118
|
-
'displayName': os.getenv('PROJECT_NAME'),
|
|
119
|
-
'description': self.description,
|
|
120
|
-
'actionKitVersion': pkg_resources.get_distribution('deltafi').version,
|
|
121
|
-
'dependencies': [],
|
|
122
|
-
'actions': actions,
|
|
123
|
-
'variables': variables,
|
|
124
|
-
'flowPlans': flows
|
|
125
|
-
}
|
|
306
|
+
headers = {'Content-Type': 'application/json'}
|
|
307
|
+
registration_json = self.registration_json()
|
|
126
308
|
|
|
127
309
|
self.logger.info(f"Registering plugin:\n{registration_json}")
|
|
128
310
|
|
|
@@ -131,59 +313,116 @@ class Plugin(object):
|
|
|
131
313
|
self.logger.error(f"Failed to register plugin ({response.status_code}):\n{response.content}")
|
|
132
314
|
exit(1)
|
|
133
315
|
|
|
134
|
-
self.logger.info(
|
|
316
|
+
self.logger.info("Plugin registered")
|
|
135
317
|
|
|
136
318
|
def run(self):
|
|
137
|
-
self.logger.info(
|
|
319
|
+
self.logger.info("Plugin starting")
|
|
320
|
+
|
|
321
|
+
for action in self.singleton_actions:
|
|
322
|
+
num_threads = 1
|
|
323
|
+
if self.action_name(action) in self.thread_config:
|
|
324
|
+
maybe_num_threads = self.thread_config[self.action_name(action)]
|
|
325
|
+
if type(maybe_num_threads) is int and maybe_num_threads > 0:
|
|
326
|
+
num_threads = maybe_num_threads
|
|
327
|
+
else:
|
|
328
|
+
self.logger.error(f"Ignoring non-int or invalid thread value {maybe_num_threads}")
|
|
329
|
+
for i in range(num_threads):
|
|
330
|
+
action_thread = ActionThread(action, i, self.action_name(action))
|
|
331
|
+
self.action_threads.append(action_thread)
|
|
332
|
+
|
|
333
|
+
total_threads = len(self.action_threads) + 1
|
|
334
|
+
if len(self.lookup_table_suppliers) > 0:
|
|
335
|
+
total_threads += 1
|
|
336
|
+
|
|
337
|
+
self.queue = _setup_queue(total_threads)
|
|
338
|
+
self.content_service = _setup_content_service()
|
|
339
|
+
|
|
340
|
+
if len(self.lookup_table_suppliers) > 0:
|
|
341
|
+
threading.Thread(target=self._handle_lookup_table_supplier_events).start()
|
|
342
|
+
|
|
343
|
+
for action_thread in self.action_threads:
|
|
344
|
+
threading.Thread(target=self._do_action, args=(action_thread,)).start()
|
|
345
|
+
|
|
138
346
|
self._register()
|
|
139
|
-
for action in self.actions:
|
|
140
|
-
threading.Thread(target=self._do_action, args=(action,)).start()
|
|
141
347
|
|
|
142
|
-
threading.Thread(target=self._heartbeat)
|
|
348
|
+
hb_thread = threading.Thread(target=self._heartbeat)
|
|
349
|
+
hb_thread.start()
|
|
143
350
|
|
|
144
|
-
self.logger.info(
|
|
351
|
+
self.logger.info("All threads running")
|
|
145
352
|
|
|
146
353
|
f = open("/tmp/running", "w")
|
|
147
354
|
f.close()
|
|
148
355
|
|
|
149
|
-
self.logger.info(
|
|
356
|
+
self.logger.info("Application initialization complete")
|
|
357
|
+
hb_thread.join()
|
|
150
358
|
|
|
151
359
|
def _heartbeat(self):
|
|
360
|
+
long_running_actions = set()
|
|
152
361
|
while True:
|
|
153
362
|
try:
|
|
154
|
-
|
|
155
|
-
|
|
363
|
+
# Set heartbeats
|
|
364
|
+
for action_thread in self.action_threads:
|
|
365
|
+
self.queue.heartbeat(action_thread.name)
|
|
366
|
+
|
|
367
|
+
# Record long running tasks
|
|
368
|
+
new_long_running_actions = set()
|
|
369
|
+
for action_thread in self.action_threads:
|
|
370
|
+
action_execution = action_thread.execution
|
|
371
|
+
if action_execution and action_execution.exceeds_duration(LONG_RUNNING_TASK_DURATION):
|
|
372
|
+
new_long_running_actions.add(action_execution)
|
|
373
|
+
self.queue.record_long_running_task(action_execution)
|
|
374
|
+
|
|
375
|
+
# Remove old long running tasks
|
|
376
|
+
tasks_to_remove = long_running_actions - new_long_running_actions
|
|
377
|
+
for action_execution in tasks_to_remove:
|
|
378
|
+
self.queue.remove_long_running_task(action_execution)
|
|
379
|
+
|
|
380
|
+
long_running_actions = new_long_running_actions
|
|
381
|
+
|
|
156
382
|
except Exception as e:
|
|
157
|
-
self.logger.error(f"Failed to register action queue heartbeat: {e}", e)
|
|
383
|
+
self.logger.error(f"Failed to register action queue heartbeat or record long running tasks: {e}", e)
|
|
158
384
|
finally:
|
|
159
385
|
time.sleep(10)
|
|
160
386
|
|
|
161
|
-
|
|
162
|
-
|
|
387
|
+
@staticmethod
|
|
388
|
+
def to_response(event, start_time, stop_time, result):
|
|
389
|
+
response = {
|
|
390
|
+
'did': event.context.did,
|
|
391
|
+
'flowName': event.context.flow_name,
|
|
392
|
+
'flowId': event.context.flow_id,
|
|
393
|
+
'actionName': event.context.action_name,
|
|
394
|
+
'start': start_time,
|
|
395
|
+
'stop': stop_time,
|
|
396
|
+
'type': result.result_type,
|
|
397
|
+
'messages': [message.json() for message in result.messages],
|
|
398
|
+
'metrics': [metric.json() for metric in result.metrics]
|
|
399
|
+
}
|
|
400
|
+
if result.result_key is not None:
|
|
401
|
+
response[result.result_key] = result.response()
|
|
402
|
+
return response
|
|
403
|
+
|
|
404
|
+
def _do_action(self, action_thread: ActionThread):
|
|
405
|
+
action_logger = get_logger(action_thread.logger_name())
|
|
406
|
+
action_logger.info(f"Listening on {action_thread.name}")
|
|
163
407
|
|
|
164
|
-
action_logger.info(f"Listening on {self.action_name(action)}")
|
|
165
408
|
while True:
|
|
166
409
|
try:
|
|
167
|
-
event_string = self.queue.take(
|
|
168
|
-
event = Event.create(json.loads(event_string), self.
|
|
410
|
+
event_string = self.queue.take(action_thread.name)
|
|
411
|
+
event = Event.create(json.loads(event_string), self.content_service, action_logger)
|
|
169
412
|
start_time = time.time()
|
|
170
413
|
action_logger.debug(f"Processing event for did {event.context.did}")
|
|
171
414
|
|
|
415
|
+
action_thread.execution = ActionExecution(action_thread.name, event.context.action_name,
|
|
416
|
+
action_thread.thread_num, event.context.did,
|
|
417
|
+
datetime.now(timezone.utc))
|
|
418
|
+
|
|
172
419
|
try:
|
|
173
|
-
result =
|
|
420
|
+
result = action_thread.clazz.execute_action(event)
|
|
174
421
|
except ExpectedContentException as e:
|
|
175
422
|
result = ErrorResult(event.context,
|
|
176
423
|
f"Action attempted to look up element {e.index + 1} (index {e.index}) from "
|
|
177
424
|
f"content list of size {e.size}",
|
|
178
425
|
f"{str(e)}\n{traceback.format_exc()}")
|
|
179
|
-
except MissingDomainException as e:
|
|
180
|
-
result = ErrorResult(event.context,
|
|
181
|
-
f"Action attempted to access domain {e.name}, which does not exist",
|
|
182
|
-
f"{str(e)}\n{traceback.format_exc()}")
|
|
183
|
-
except MissingEnrichmentException as e:
|
|
184
|
-
result = ErrorResult(event.context,
|
|
185
|
-
f"Action attempted to access enrichment {e.name}, which does not exist",
|
|
186
|
-
f"{str(e)}\n{traceback.format_exc()}")
|
|
187
426
|
except MissingMetadataException as e:
|
|
188
427
|
result = ErrorResult(event.context,
|
|
189
428
|
f"Missing metadata with key {e.key}",
|
|
@@ -192,16 +431,12 @@ class Plugin(object):
|
|
|
192
431
|
result = ErrorResult(event.context,
|
|
193
432
|
f"Action execution {type(e)} exception", f"{str(e)}\n{traceback.format_exc()}")
|
|
194
433
|
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
'metrics': [metric.json() for metric in result.metrics]
|
|
202
|
-
}
|
|
203
|
-
if result.result_key is not None:
|
|
204
|
-
response[result.result_key] = result.response()
|
|
434
|
+
action_thread.execution = None
|
|
435
|
+
|
|
436
|
+
response = Plugin.to_response(
|
|
437
|
+
event, start_time, time.time(), result)
|
|
438
|
+
|
|
439
|
+
Plugin.orphaned_content_check(action_logger, event.context, result, response)
|
|
205
440
|
|
|
206
441
|
topic = 'dgs'
|
|
207
442
|
if event.return_address:
|
|
@@ -210,3 +445,55 @@ class Plugin(object):
|
|
|
210
445
|
except BaseException as e:
|
|
211
446
|
action_logger.error(f"Unexpected {type(e)} error: {str(e)}\n{traceback.format_exc()}")
|
|
212
447
|
time.sleep(1)
|
|
448
|
+
|
|
449
|
+
def _handle_lookup_table_supplier_events(self):
|
|
450
|
+
event_keys = ["lookup-table-event-" + key for key in self.lookup_table_suppliers.keys()]
|
|
451
|
+
self.logger.debug(f"Listening for the following lookup table supplier events: {event_keys}")
|
|
452
|
+
while True:
|
|
453
|
+
event_string = self.queue.take(event_keys)
|
|
454
|
+
lookup_table_event = LookupTableEvent.create(json.loads(event_string))
|
|
455
|
+
rows = self.lookup_table_suppliers[lookup_table_event.lookup_table_name].get_rows(
|
|
456
|
+
lookup_table_event.variables, lookup_table_event.matching_column_values,
|
|
457
|
+
lookup_table_event.result_columns)
|
|
458
|
+
lookup_table_event_result = LookupTableEventResult(lookup_table_event_id=lookup_table_event.id,
|
|
459
|
+
lookup_table_name=lookup_table_event.lookup_table_name, rows=rows)
|
|
460
|
+
self.queue.put(lookup_table_event.id, json.dumps(lookup_table_event_result.json()))
|
|
461
|
+
|
|
462
|
+
@staticmethod
|
|
463
|
+
def orphaned_content_check(logger, context, result, response):
|
|
464
|
+
if len(context.saved_content) > 0:
|
|
465
|
+
to_delete = Plugin.find_unused_content(context.saved_content, result)
|
|
466
|
+
if len(to_delete) > 0:
|
|
467
|
+
errors = context.content_service.delete_all(to_delete)
|
|
468
|
+
for e in errors:
|
|
469
|
+
logger.error(f"Unable to delete object(s), {e}")
|
|
470
|
+
logger.warning(
|
|
471
|
+
f"Deleted {len(to_delete)} unused content entries for did {context.did} due to a {response['type']} event by {response['actionName']}")
|
|
472
|
+
|
|
473
|
+
@staticmethod
|
|
474
|
+
def find_unused_content(saved_content, result):
|
|
475
|
+
segments_in_use = Plugin.used_segment_names(result)
|
|
476
|
+
saved_segments = Plugin.get_segment_names(saved_content)
|
|
477
|
+
to_delete = []
|
|
478
|
+
for key, value in saved_segments.items():
|
|
479
|
+
if key not in segments_in_use:
|
|
480
|
+
to_delete.append(value)
|
|
481
|
+
return to_delete
|
|
482
|
+
|
|
483
|
+
@staticmethod
|
|
484
|
+
def used_segment_names(result):
|
|
485
|
+
segment_names = {}
|
|
486
|
+
if isinstance(result, TransformResult):
|
|
487
|
+
segment_names.update(result.get_segment_names())
|
|
488
|
+
elif isinstance(result, TransformResults):
|
|
489
|
+
segment_names.update(result.get_segment_names())
|
|
490
|
+
elif isinstance(result, IngressResult):
|
|
491
|
+
segment_names.update(result.get_segment_names())
|
|
492
|
+
return segment_names
|
|
493
|
+
|
|
494
|
+
@staticmethod
|
|
495
|
+
def get_segment_names(content_list):
|
|
496
|
+
segment_names = {}
|
|
497
|
+
for content in content_list:
|
|
498
|
+
segment_names.update(content.get_segment_names())
|
|
499
|
+
return segment_names
|