flowcept 0.8.10__py3-none-any.whl → 0.8.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowcept/__init__.py +7 -4
- flowcept/agents/__init__.py +5 -0
- flowcept/agents/agent_client.py +58 -0
- flowcept/agents/agents_utils.py +181 -0
- flowcept/agents/dynamic_schema_tracker.py +191 -0
- flowcept/agents/flowcept_agent.py +30 -0
- flowcept/agents/flowcept_ctx_manager.py +175 -0
- flowcept/agents/gui/__init__.py +5 -0
- flowcept/agents/gui/agent_gui.py +76 -0
- flowcept/agents/gui/gui_utils.py +239 -0
- flowcept/agents/llms/__init__.py +1 -0
- flowcept/agents/llms/claude_gcp.py +139 -0
- flowcept/agents/llms/gemini25.py +119 -0
- flowcept/agents/prompts/__init__.py +1 -0
- flowcept/agents/prompts/general_prompts.py +69 -0
- flowcept/agents/prompts/in_memory_query_prompts.py +297 -0
- flowcept/agents/tools/__init__.py +1 -0
- flowcept/agents/tools/general_tools.py +102 -0
- flowcept/agents/tools/in_memory_queries/__init__.py +1 -0
- flowcept/agents/tools/in_memory_queries/in_memory_queries_tools.py +704 -0
- flowcept/agents/tools/in_memory_queries/pandas_agent_utils.py +309 -0
- flowcept/cli.py +459 -17
- flowcept/commons/daos/docdb_dao/mongodb_dao.py +47 -0
- flowcept/commons/daos/keyvalue_dao.py +19 -23
- flowcept/commons/daos/mq_dao/mq_dao_base.py +49 -38
- flowcept/commons/daos/mq_dao/mq_dao_kafka.py +20 -3
- flowcept/commons/daos/mq_dao/mq_dao_mofka.py +4 -0
- flowcept/commons/daos/mq_dao/mq_dao_redis.py +38 -5
- flowcept/commons/daos/redis_conn.py +47 -0
- flowcept/commons/flowcept_dataclasses/task_object.py +50 -27
- flowcept/commons/flowcept_dataclasses/workflow_object.py +9 -1
- flowcept/commons/settings_factory.py +2 -4
- flowcept/commons/task_data_preprocess.py +400 -0
- flowcept/commons/utils.py +26 -7
- flowcept/configs.py +48 -29
- flowcept/flowcept_api/flowcept_controller.py +102 -18
- flowcept/flowceptor/adapters/base_interceptor.py +24 -11
- flowcept/flowceptor/adapters/brokers/__init__.py +1 -0
- flowcept/flowceptor/adapters/brokers/mqtt_interceptor.py +132 -0
- flowcept/flowceptor/adapters/mlflow/mlflow_interceptor.py +3 -3
- flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py +3 -3
- flowcept/flowceptor/consumers/agent/__init__.py +1 -0
- flowcept/flowceptor/consumers/agent/base_agent_context_manager.py +125 -0
- flowcept/flowceptor/consumers/base_consumer.py +94 -0
- flowcept/flowceptor/consumers/consumer_utils.py +5 -4
- flowcept/flowceptor/consumers/document_inserter.py +135 -36
- flowcept/flowceptor/telemetry_capture.py +6 -3
- flowcept/instrumentation/flowcept_agent_task.py +294 -0
- flowcept/instrumentation/flowcept_decorator.py +43 -0
- flowcept/instrumentation/flowcept_loop.py +3 -3
- flowcept/instrumentation/flowcept_task.py +64 -24
- flowcept/instrumentation/flowcept_torch.py +5 -5
- flowcept/instrumentation/task_capture.py +87 -4
- flowcept/version.py +1 -1
- {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/METADATA +48 -11
- flowcept-0.8.12.dist-info/RECORD +101 -0
- resources/sample_settings.yaml +46 -14
- flowcept/flowceptor/adapters/zambeze/__init__.py +0 -1
- flowcept/flowceptor/adapters/zambeze/zambeze_dataclasses.py +0 -41
- flowcept/flowceptor/adapters/zambeze/zambeze_interceptor.py +0 -102
- flowcept-0.8.10.dist-info/RECORD +0 -75
- {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/WHEEL +0 -0
- {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/entry_points.txt +0 -0
- {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,15 +1,12 @@
|
|
|
1
1
|
"""MQ base module."""
|
|
2
2
|
|
|
3
|
-
from abc import
|
|
3
|
+
from abc import abstractmethod
|
|
4
4
|
from typing import Union, List, Callable
|
|
5
5
|
import csv
|
|
6
6
|
import msgpack
|
|
7
7
|
from time import time
|
|
8
8
|
import flowcept.commons
|
|
9
9
|
from flowcept.commons.autoflush_buffer import AutoflushBuffer
|
|
10
|
-
|
|
11
|
-
from flowcept.commons.daos.keyvalue_dao import KeyValueDAO
|
|
12
|
-
|
|
13
10
|
from flowcept.commons.utils import chunked
|
|
14
11
|
from flowcept.commons.flowcept_logger import FlowceptLogger
|
|
15
12
|
from flowcept.configs import (
|
|
@@ -20,12 +17,15 @@ from flowcept.configs import (
|
|
|
20
17
|
MQ_CHUNK_SIZE,
|
|
21
18
|
MQ_TYPE,
|
|
22
19
|
MQ_TIMING,
|
|
20
|
+
KVDB_ENABLED,
|
|
21
|
+
MQ_ENABLED,
|
|
22
|
+
DUMP_BUFFER_PATH,
|
|
23
23
|
)
|
|
24
24
|
|
|
25
25
|
from flowcept.commons.utils import GenericJSONEncoder
|
|
26
26
|
|
|
27
27
|
|
|
28
|
-
class MQDao(
|
|
28
|
+
class MQDao(object):
|
|
29
29
|
"""MQ base class."""
|
|
30
30
|
|
|
31
31
|
ENCODER = GenericJSONEncoder if JSON_SERIALIZER == "complex" else None
|
|
@@ -34,6 +34,9 @@ class MQDao(ABC):
|
|
|
34
34
|
@staticmethod
|
|
35
35
|
def build(*args, **kwargs) -> "MQDao":
|
|
36
36
|
"""Build it."""
|
|
37
|
+
if not MQ_ENABLED:
|
|
38
|
+
return MQDao()
|
|
39
|
+
|
|
37
40
|
if MQ_TYPE == "redis":
|
|
38
41
|
from flowcept.commons.daos.mq_dao.mq_dao_redis import MQDaoRedis
|
|
39
42
|
|
|
@@ -67,7 +70,12 @@ class MQDao(ABC):
|
|
|
67
70
|
self.logger = FlowceptLogger()
|
|
68
71
|
self.started = False
|
|
69
72
|
self._adapter_settings = adapter_settings
|
|
70
|
-
|
|
73
|
+
if KVDB_ENABLED:
|
|
74
|
+
from flowcept.commons.daos.keyvalue_dao import KeyValueDAO
|
|
75
|
+
|
|
76
|
+
self._keyvalue_dao = KeyValueDAO()
|
|
77
|
+
else:
|
|
78
|
+
self._keyvalue_dao = None
|
|
71
79
|
self._time_based_flushing_started = False
|
|
72
80
|
self.buffer: Union[AutoflushBuffer, List] = None
|
|
73
81
|
if MQ_TIMING:
|
|
@@ -88,11 +96,21 @@ class MQDao(ABC):
|
|
|
88
96
|
def bulk_publish(self, buffer):
|
|
89
97
|
"""Publish it."""
|
|
90
98
|
# self.logger.info(f"Going to flush {len(buffer)} to MQ...")
|
|
91
|
-
if
|
|
92
|
-
|
|
93
|
-
|
|
99
|
+
if flowcept.configs.DB_FLUSH_MODE == "offline":
|
|
100
|
+
if DUMP_BUFFER_PATH is not None:
|
|
101
|
+
import orjson
|
|
102
|
+
|
|
103
|
+
with open(DUMP_BUFFER_PATH, "wb", buffering=1_048_576) as f:
|
|
104
|
+
for obj in buffer:
|
|
105
|
+
f.write(orjson.dumps(obj))
|
|
106
|
+
f.write(b"\n")
|
|
107
|
+
self.logger.info(f"Saved Flowcept messages into {DUMP_BUFFER_PATH}.")
|
|
94
108
|
else:
|
|
95
|
-
|
|
109
|
+
if MQ_CHUNK_SIZE > 1:
|
|
110
|
+
for chunk in chunked(buffer, MQ_CHUNK_SIZE):
|
|
111
|
+
self._bulk_publish(chunk)
|
|
112
|
+
else:
|
|
113
|
+
self._bulk_publish(buffer)
|
|
96
114
|
|
|
97
115
|
def register_time_based_thread_init(self, interceptor_instance_id: str, exec_bundle_id=None):
|
|
98
116
|
"""Register the time."""
|
|
@@ -138,7 +156,7 @@ class MQDao(ABC):
|
|
|
138
156
|
"""
|
|
139
157
|
self._keyvalue_dao.delete_key("current_campaign_id")
|
|
140
158
|
|
|
141
|
-
def init_buffer(self, interceptor_instance_id: str, exec_bundle_id=None):
|
|
159
|
+
def init_buffer(self, interceptor_instance_id: str, exec_bundle_id=None, check_safe_stops=True):
|
|
142
160
|
"""Create the buffer."""
|
|
143
161
|
if not self.started:
|
|
144
162
|
if flowcept.configs.DB_FLUSH_MODE == "online":
|
|
@@ -147,7 +165,8 @@ class MQDao(ABC):
|
|
|
147
165
|
max_size=MQ_BUFFER_SIZE,
|
|
148
166
|
flush_interval=MQ_INSERTION_BUFFER_TIME,
|
|
149
167
|
)
|
|
150
|
-
|
|
168
|
+
if check_safe_stops:
|
|
169
|
+
self.register_time_based_thread_init(interceptor_instance_id, exec_bundle_id)
|
|
151
170
|
self._time_based_flushing_started = True
|
|
152
171
|
else:
|
|
153
172
|
self.buffer = list()
|
|
@@ -164,9 +183,9 @@ class MQDao(ABC):
|
|
|
164
183
|
self.bulk_publish(self.buffer)
|
|
165
184
|
self.buffer = list()
|
|
166
185
|
|
|
167
|
-
def _stop_timed(self, interceptor_instance_id: str, bundle_exec_id: int = None):
|
|
186
|
+
def _stop_timed(self, interceptor_instance_id: str, check_safe_stops: bool = True, bundle_exec_id: int = None):
|
|
168
187
|
t1 = time()
|
|
169
|
-
self._stop(interceptor_instance_id, bundle_exec_id)
|
|
188
|
+
self._stop(interceptor_instance_id, check_safe_stops, bundle_exec_id)
|
|
170
189
|
t2 = time()
|
|
171
190
|
self._flush_events.append(["final", t1, t2, t2 - t1, "n/a"])
|
|
172
191
|
|
|
@@ -175,14 +194,14 @@ class MQDao(ABC):
|
|
|
175
194
|
writer.writerow(["type", "start", "end", "duration", "size"])
|
|
176
195
|
writer.writerows(self._flush_events)
|
|
177
196
|
|
|
178
|
-
def _stop(self, interceptor_instance_id: str, bundle_exec_id: int = None):
|
|
179
|
-
"""Stop
|
|
180
|
-
|
|
181
|
-
self.logger.debug(msg0 + f"{bundle_exec_id}; interceptor id: {interceptor_instance_id}")
|
|
197
|
+
def _stop(self, interceptor_instance_id: str = None, check_safe_stops: bool = True, bundle_exec_id: int = None):
|
|
198
|
+
"""Stop MQ publisher."""
|
|
199
|
+
self.logger.debug(f"MQ pub received stop sign: bundle={bundle_exec_id}, interceptor={interceptor_instance_id}")
|
|
182
200
|
self._close_buffer()
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
201
|
+
self.logger.debug("Flushed MQ for the last time!")
|
|
202
|
+
if check_safe_stops:
|
|
203
|
+
self.logger.debug(f"Sending stop msg. Bundle: {bundle_exec_id}; interceptor id: {interceptor_instance_id}")
|
|
204
|
+
self._send_mq_dao_time_thread_stop(interceptor_instance_id, bundle_exec_id)
|
|
186
205
|
self.started = False
|
|
187
206
|
|
|
188
207
|
def _send_mq_dao_time_thread_stop(self, interceptor_instance_id, exec_bundle_id=None):
|
|
@@ -197,10 +216,10 @@ class MQDao(ABC):
|
|
|
197
216
|
# self.logger.info("Control msg sent: " + str(msg))
|
|
198
217
|
self.send_message(msg)
|
|
199
218
|
|
|
200
|
-
def send_document_inserter_stop(self):
|
|
219
|
+
def send_document_inserter_stop(self, exec_bundle_id=None):
|
|
201
220
|
"""Send the document."""
|
|
202
221
|
# These control_messages are handled by the document inserter
|
|
203
|
-
msg = {"type": "flowcept_control", "info": "stop_document_inserter"}
|
|
222
|
+
msg = {"type": "flowcept_control", "info": "stop_document_inserter", "exec_bundle_id": exec_bundle_id}
|
|
204
223
|
self.send_message(msg)
|
|
205
224
|
|
|
206
225
|
@abstractmethod
|
|
@@ -223,20 +242,12 @@ class MQDao(ABC):
|
|
|
223
242
|
"""Subscribe to the interception channel."""
|
|
224
243
|
raise NotImplementedError()
|
|
225
244
|
|
|
245
|
+
@abstractmethod
|
|
246
|
+
def unsubscribe(self):
|
|
247
|
+
"""Subscribe to the interception channel."""
|
|
248
|
+
raise NotImplementedError()
|
|
249
|
+
|
|
226
250
|
@abstractmethod
|
|
227
251
|
def liveness_test(self) -> bool:
|
|
228
|
-
"""
|
|
229
|
-
|
|
230
|
-
"""
|
|
231
|
-
try:
|
|
232
|
-
response = self._keyvalue_dao.redis_conn.ping()
|
|
233
|
-
if response:
|
|
234
|
-
return True
|
|
235
|
-
else:
|
|
236
|
-
return False
|
|
237
|
-
except ConnectionError as e:
|
|
238
|
-
self.logger.exception(e)
|
|
239
|
-
return False
|
|
240
|
-
except Exception as e:
|
|
241
|
-
self.logger.exception(e)
|
|
242
|
-
return False
|
|
252
|
+
"""Checks if the MQ system is alive."""
|
|
253
|
+
raise NotImplementedError()
|
|
@@ -108,12 +108,29 @@ class MQDaoKafka(MQDao):
|
|
|
108
108
|
def liveness_test(self):
|
|
109
109
|
"""Get the livelyness of it."""
|
|
110
110
|
try:
|
|
111
|
-
if not super().liveness_test():
|
|
112
|
-
self.logger.error("KV Store not alive!")
|
|
113
|
-
return False
|
|
114
111
|
admin_client = AdminClient(self._kafka_conf)
|
|
115
112
|
kafka_metadata = admin_client.list_topics(timeout=5)
|
|
116
113
|
return MQ_CHANNEL in kafka_metadata.topics
|
|
117
114
|
except Exception as e:
|
|
118
115
|
self.logger.exception(e)
|
|
119
116
|
return False
|
|
117
|
+
|
|
118
|
+
def unsubscribe(self):
|
|
119
|
+
"""Unsubscribes from Kafka topic and closes consumer if open."""
|
|
120
|
+
if self._consumer is None:
|
|
121
|
+
self.logger.warning("No Kafka consumer to unsubscribe.")
|
|
122
|
+
return
|
|
123
|
+
|
|
124
|
+
try:
|
|
125
|
+
self._consumer.unsubscribe()
|
|
126
|
+
self.logger.info("Unsubscribed from Kafka topics.")
|
|
127
|
+
except RuntimeError as e:
|
|
128
|
+
self.logger.debug(f"Consumer already closed while unsubscribing: {e}")
|
|
129
|
+
except Exception as e:
|
|
130
|
+
self.logger.exception(f"Error while unsubscribing from Kafka: {e}")
|
|
131
|
+
finally:
|
|
132
|
+
try:
|
|
133
|
+
self._consumer.close()
|
|
134
|
+
except Exception as e:
|
|
135
|
+
self.logger.debug(f"Error closing consumer after unsubscribe: {e}")
|
|
136
|
+
self._consumer = None
|
|
@@ -7,7 +7,8 @@ import msgpack
|
|
|
7
7
|
from time import time, sleep
|
|
8
8
|
|
|
9
9
|
from flowcept.commons.daos.mq_dao.mq_dao_base import MQDao
|
|
10
|
-
from flowcept.
|
|
10
|
+
from flowcept.commons.daos.redis_conn import RedisConn
|
|
11
|
+
from flowcept.configs import MQ_CHANNEL, MQ_HOST, MQ_PORT, MQ_PASSWORD, MQ_URI, MQ_SETTINGS, KVDB_ENABLED
|
|
11
12
|
|
|
12
13
|
|
|
13
14
|
class MQDaoRedis(MQDao):
|
|
@@ -17,16 +18,32 @@ class MQDaoRedis(MQDao):
|
|
|
17
18
|
|
|
18
19
|
def __init__(self, adapter_settings=None):
|
|
19
20
|
super().__init__(adapter_settings)
|
|
20
|
-
|
|
21
|
+
|
|
21
22
|
self._consumer = None
|
|
23
|
+
use_same_as_kv = MQ_SETTINGS.get("same_as_kvdb", False)
|
|
24
|
+
if use_same_as_kv:
|
|
25
|
+
if KVDB_ENABLED:
|
|
26
|
+
self._producer = self._keyvalue_dao.redis_conn
|
|
27
|
+
else:
|
|
28
|
+
raise Exception("You have same_as_kvdb in your settings, but kvdb is disabled.")
|
|
29
|
+
else:
|
|
30
|
+
self._producer = RedisConn.build_redis_conn_pool(
|
|
31
|
+
host=MQ_HOST, port=MQ_PORT, password=MQ_PASSWORD, uri=MQ_URI
|
|
32
|
+
)
|
|
22
33
|
|
|
23
34
|
def subscribe(self):
|
|
24
35
|
"""
|
|
25
36
|
Subscribe to interception channel.
|
|
26
37
|
"""
|
|
27
|
-
self._consumer = self.
|
|
38
|
+
self._consumer = self._producer.pubsub()
|
|
28
39
|
self._consumer.psubscribe(MQ_CHANNEL)
|
|
29
40
|
|
|
41
|
+
def unsubscribe(self):
|
|
42
|
+
"""
|
|
43
|
+
Unsubscribe to interception channel.
|
|
44
|
+
"""
|
|
45
|
+
self._consumer.unsubscribe(MQ_CHANNEL)
|
|
46
|
+
|
|
30
47
|
def message_listener(self, message_handler: Callable):
|
|
31
48
|
"""Get message listener with automatic reconnection."""
|
|
32
49
|
max_retrials = 10
|
|
@@ -37,13 +54,22 @@ class MQDaoRedis(MQDao):
|
|
|
37
54
|
for message in self._consumer.listen():
|
|
38
55
|
if message and message["type"] in MQDaoRedis.MESSAGE_TYPES_IGNORE:
|
|
39
56
|
continue
|
|
57
|
+
|
|
58
|
+
if not isinstance(message["data"], (bytes, bytearray)):
|
|
59
|
+
self.logger.warning(
|
|
60
|
+
f"Skipping message with unexpected data type: {type(message['data'])} - {message['data']}"
|
|
61
|
+
)
|
|
62
|
+
continue
|
|
63
|
+
|
|
40
64
|
try:
|
|
41
65
|
msg_obj = msgpack.loads(message["data"], strict_map_key=False)
|
|
66
|
+
# self.logger.debug(f"In mq dao redis, received msg! {msg_obj}")
|
|
42
67
|
if not message_handler(msg_obj):
|
|
43
68
|
should_continue = False # Break While loop
|
|
44
69
|
break # Break For loop
|
|
45
70
|
except Exception as e:
|
|
46
|
-
self.logger.error(f"Failed to process message
|
|
71
|
+
self.logger.error(f"Failed to process message {message}")
|
|
72
|
+
self.logger.exception(e)
|
|
47
73
|
|
|
48
74
|
current_trials = 0
|
|
49
75
|
except (redis.exceptions.ConnectionError, redis.exceptions.TimeoutError) as e:
|
|
@@ -103,7 +129,14 @@ class MQDaoRedis(MQDao):
|
|
|
103
129
|
def liveness_test(self):
|
|
104
130
|
"""Get the livelyness of it."""
|
|
105
131
|
try:
|
|
106
|
-
|
|
132
|
+
response = self._producer.ping()
|
|
133
|
+
if response:
|
|
134
|
+
return True
|
|
135
|
+
else:
|
|
136
|
+
return False
|
|
137
|
+
except ConnectionError as e:
|
|
138
|
+
self.logger.exception(e)
|
|
139
|
+
return False
|
|
107
140
|
except Exception as e:
|
|
108
141
|
self.logger.exception(e)
|
|
109
142
|
return False
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""RedisConn module."""
|
|
2
|
+
|
|
3
|
+
from redis import Redis, ConnectionPool
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class RedisConn:
|
|
7
|
+
"""RedisConn DAO class."""
|
|
8
|
+
|
|
9
|
+
@staticmethod
|
|
10
|
+
def build_redis_conn_pool(host: str = None, port: str = None, password: str = None, uri: str = None) -> Redis:
|
|
11
|
+
"""
|
|
12
|
+
Create a Redis connection using either a URI or host/port.
|
|
13
|
+
|
|
14
|
+
If `uri` is provided, it will be used to initialize the Redis connection.
|
|
15
|
+
Otherwise, the connection will fall back to using `host` and `port`.
|
|
16
|
+
|
|
17
|
+
Parameters
|
|
18
|
+
----------
|
|
19
|
+
host : str, optional
|
|
20
|
+
Redis host address. Used only if `uri` is not provided.
|
|
21
|
+
port : str, optional
|
|
22
|
+
Redis port. Used only if `uri` is not provided.
|
|
23
|
+
uri : str, optional
|
|
24
|
+
Full Redis URI. Takes precedence over `host` and `port` if defined.
|
|
25
|
+
password : str, optional
|
|
26
|
+
Password for authenticating with Redis.
|
|
27
|
+
|
|
28
|
+
Returns
|
|
29
|
+
-------
|
|
30
|
+
Redis
|
|
31
|
+
An instance of the Redis client with a configured connection pool.
|
|
32
|
+
"""
|
|
33
|
+
pool_kwargs = {
|
|
34
|
+
"db": 0,
|
|
35
|
+
"password": password,
|
|
36
|
+
"decode_responses": False,
|
|
37
|
+
"max_connections": 10000,
|
|
38
|
+
"socket_keepalive": True,
|
|
39
|
+
"retry_on_timeout": True,
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
if uri:
|
|
43
|
+
pool = ConnectionPool.from_url(uri, **pool_kwargs)
|
|
44
|
+
else:
|
|
45
|
+
pool = ConnectionPool(host=host, port=port, **pool_kwargs)
|
|
46
|
+
|
|
47
|
+
return Redis(connection_pool=pool)
|
|
@@ -24,13 +24,14 @@ class TaskObject:
|
|
|
24
24
|
utc_timestamp: float = None
|
|
25
25
|
adapter_id: AnyStr = None
|
|
26
26
|
user: AnyStr = None
|
|
27
|
+
data: Any = None
|
|
27
28
|
used: Dict[AnyStr, Any] = None # Used parameter and files
|
|
28
29
|
campaign_id: AnyStr = None
|
|
29
30
|
generated: Dict[AnyStr, Any] = None # Generated results and files
|
|
30
31
|
submitted_at: float = None
|
|
31
32
|
started_at: float = None
|
|
32
33
|
ended_at: float = None
|
|
33
|
-
registered_at: float = None
|
|
34
|
+
registered_at: float = None # Leave this for dates generated at the DocInserter
|
|
34
35
|
telemetry_at_start: Telemetry = None
|
|
35
36
|
telemetry_at_end: Telemetry = None
|
|
36
37
|
workflow_name: AnyStr = None
|
|
@@ -52,6 +53,8 @@ class TaskObject:
|
|
|
52
53
|
address: AnyStr = None
|
|
53
54
|
dependencies: List = None
|
|
54
55
|
dependents: List = None
|
|
56
|
+
tags: List = None
|
|
57
|
+
agent_id: str = None
|
|
55
58
|
|
|
56
59
|
_DEFAULT_ENRICH_VALUES = {
|
|
57
60
|
"node_name": NODE_NAME,
|
|
@@ -103,20 +106,16 @@ class TaskObject:
|
|
|
103
106
|
if self.utc_timestamp is None:
|
|
104
107
|
self.utc_timestamp = flowcept.commons.utils.get_utc_now()
|
|
105
108
|
|
|
106
|
-
|
|
107
|
-
self
|
|
108
|
-
|
|
109
|
-
if self.login_name is None and LOGIN_NAME is not None:
|
|
110
|
-
self.login_name = LOGIN_NAME
|
|
111
|
-
|
|
112
|
-
if self.public_ip is None and PUBLIC_IP is not None:
|
|
113
|
-
self.public_ip = PUBLIC_IP
|
|
114
|
-
|
|
115
|
-
if self.private_ip is None and PRIVATE_IP is not None:
|
|
116
|
-
self.private_ip = PRIVATE_IP
|
|
109
|
+
for key, fallback_value in TaskObject._DEFAULT_ENRICH_VALUES.items():
|
|
110
|
+
if getattr(self, key) is None and fallback_value is not None:
|
|
111
|
+
setattr(self, key, fallback_value)
|
|
117
112
|
|
|
118
|
-
|
|
119
|
-
|
|
113
|
+
@staticmethod
|
|
114
|
+
def enrich_task_dict(task_dict: dict):
|
|
115
|
+
"""Enrich the task."""
|
|
116
|
+
for key, fallback_value in TaskObject._DEFAULT_ENRICH_VALUES.items():
|
|
117
|
+
if (key not in task_dict or task_dict[key] is None) and fallback_value is not None:
|
|
118
|
+
task_dict[key] = fallback_value
|
|
120
119
|
|
|
121
120
|
def to_dict(self):
|
|
122
121
|
"""Convert to dictionary."""
|
|
@@ -139,16 +138,40 @@ class TaskObject:
|
|
|
139
138
|
return msgpack.dumps(self.to_dict())
|
|
140
139
|
|
|
141
140
|
@staticmethod
|
|
142
|
-
def
|
|
143
|
-
"""
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
141
|
+
def from_dict(task_obj_dict: Dict[AnyStr, Any]) -> "TaskObject":
|
|
142
|
+
"""Create a TaskObject from a dictionary.
|
|
143
|
+
|
|
144
|
+
Parameters
|
|
145
|
+
----------
|
|
146
|
+
task_obj_dict : Dict[AnyStr, Any]
|
|
147
|
+
Dictionary containing task attributes.
|
|
148
|
+
|
|
149
|
+
Returns
|
|
150
|
+
-------
|
|
151
|
+
TaskObject
|
|
152
|
+
A TaskObject instance populated with available data.
|
|
153
|
+
"""
|
|
154
|
+
task = TaskObject()
|
|
155
|
+
|
|
156
|
+
for key, value in task_obj_dict.items():
|
|
157
|
+
if hasattr(task, key):
|
|
158
|
+
if key == "status" and isinstance(value, str):
|
|
159
|
+
setattr(task, key, Status(value))
|
|
160
|
+
else:
|
|
161
|
+
setattr(task, key, value)
|
|
162
|
+
|
|
163
|
+
return task
|
|
164
|
+
|
|
165
|
+
def __str__(self):
|
|
166
|
+
"""Return a user-friendly string representation of the TaskObject."""
|
|
167
|
+
return self.__repr__()
|
|
168
|
+
|
|
169
|
+
def __repr__(self):
|
|
170
|
+
"""Return an unambiguous string representation of the TaskObject."""
|
|
171
|
+
attrs = ["task_id", "workflow_id", "campaign_id", "activity_id", "started_at", "ended_at"]
|
|
172
|
+
optionals = ["subtype", "parent_task_id", "agent_id"]
|
|
173
|
+
for opt in optionals:
|
|
174
|
+
if getattr(self, opt) is not None:
|
|
175
|
+
attrs.append(opt)
|
|
176
|
+
attr_str = ", ".join(f"{attr}={repr(getattr(self, attr))}" for attr in attrs)
|
|
177
|
+
return f"TaskObject({attr_str})"
|
|
@@ -5,7 +5,7 @@ import msgpack
|
|
|
5
5
|
from omegaconf import OmegaConf, DictConfig
|
|
6
6
|
|
|
7
7
|
from flowcept.version import __version__
|
|
8
|
-
from flowcept.commons.utils import get_utc_now
|
|
8
|
+
from flowcept.commons.utils import get_utc_now, get_git_info
|
|
9
9
|
from flowcept.configs import (
|
|
10
10
|
settings,
|
|
11
11
|
FLOWCEPT_USER,
|
|
@@ -38,6 +38,7 @@ class WorkflowObject:
|
|
|
38
38
|
sys_name: str = None
|
|
39
39
|
extra_metadata: str = None
|
|
40
40
|
used: Dict = None
|
|
41
|
+
code_repository: Dict = None
|
|
41
42
|
generated: Dict = None
|
|
42
43
|
|
|
43
44
|
def __init__(self, workflow_id=None, name=None, used=None, generated=None):
|
|
@@ -93,6 +94,13 @@ class WorkflowObject:
|
|
|
93
94
|
)
|
|
94
95
|
self.extra_metadata = _extra_metadata
|
|
95
96
|
|
|
97
|
+
if self.code_repository is None:
|
|
98
|
+
try:
|
|
99
|
+
self.code_repository = get_git_info()
|
|
100
|
+
except Exception as e:
|
|
101
|
+
print(e)
|
|
102
|
+
pass
|
|
103
|
+
|
|
96
104
|
if self.flowcept_version is None:
|
|
97
105
|
self.flowcept_version = __version__
|
|
98
106
|
|
|
@@ -7,9 +7,6 @@ from flowcept.commons.flowcept_dataclasses.base_settings_dataclasses import (
|
|
|
7
7
|
BaseSettings,
|
|
8
8
|
KeyValue,
|
|
9
9
|
)
|
|
10
|
-
from flowcept.flowceptor.adapters.zambeze.zambeze_dataclasses import (
|
|
11
|
-
ZambezeSettings,
|
|
12
|
-
)
|
|
13
10
|
from flowcept.flowceptor.adapters.mlflow.mlflow_dataclasses import (
|
|
14
11
|
MLFlowSettings,
|
|
15
12
|
)
|
|
@@ -22,7 +19,6 @@ from flowcept.flowceptor.adapters.dask.dask_dataclasses import (
|
|
|
22
19
|
|
|
23
20
|
|
|
24
21
|
SETTINGS_CLASSES = {
|
|
25
|
-
Vocabulary.Settings.ZAMBEZE_KIND: ZambezeSettings,
|
|
26
22
|
Vocabulary.Settings.MLFLOW_KIND: MLFlowSettings,
|
|
27
23
|
Vocabulary.Settings.TENSORBOARD_KIND: TensorboardSettings,
|
|
28
24
|
Vocabulary.Settings.DASK_KIND: DaskSettings,
|
|
@@ -30,6 +26,8 @@ SETTINGS_CLASSES = {
|
|
|
30
26
|
|
|
31
27
|
|
|
32
28
|
def _build_base_settings(kind: str, settings_dict: dict) -> BaseSettings:
|
|
29
|
+
if kind not in SETTINGS_CLASSES:
|
|
30
|
+
return settings_dict
|
|
33
31
|
settings_obj = SETTINGS_CLASSES[kind](**settings_dict)
|
|
34
32
|
return settings_obj
|
|
35
33
|
|