flowcept 0.8.10__py3-none-any.whl → 0.8.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. flowcept/__init__.py +7 -4
  2. flowcept/agents/__init__.py +5 -0
  3. flowcept/agents/agent_client.py +58 -0
  4. flowcept/agents/agents_utils.py +181 -0
  5. flowcept/agents/dynamic_schema_tracker.py +191 -0
  6. flowcept/agents/flowcept_agent.py +30 -0
  7. flowcept/agents/flowcept_ctx_manager.py +175 -0
  8. flowcept/agents/gui/__init__.py +5 -0
  9. flowcept/agents/gui/agent_gui.py +76 -0
  10. flowcept/agents/gui/gui_utils.py +239 -0
  11. flowcept/agents/llms/__init__.py +1 -0
  12. flowcept/agents/llms/claude_gcp.py +139 -0
  13. flowcept/agents/llms/gemini25.py +119 -0
  14. flowcept/agents/prompts/__init__.py +1 -0
  15. flowcept/agents/prompts/general_prompts.py +69 -0
  16. flowcept/agents/prompts/in_memory_query_prompts.py +297 -0
  17. flowcept/agents/tools/__init__.py +1 -0
  18. flowcept/agents/tools/general_tools.py +102 -0
  19. flowcept/agents/tools/in_memory_queries/__init__.py +1 -0
  20. flowcept/agents/tools/in_memory_queries/in_memory_queries_tools.py +704 -0
  21. flowcept/agents/tools/in_memory_queries/pandas_agent_utils.py +309 -0
  22. flowcept/cli.py +459 -17
  23. flowcept/commons/daos/docdb_dao/mongodb_dao.py +47 -0
  24. flowcept/commons/daos/keyvalue_dao.py +19 -23
  25. flowcept/commons/daos/mq_dao/mq_dao_base.py +49 -38
  26. flowcept/commons/daos/mq_dao/mq_dao_kafka.py +20 -3
  27. flowcept/commons/daos/mq_dao/mq_dao_mofka.py +4 -0
  28. flowcept/commons/daos/mq_dao/mq_dao_redis.py +38 -5
  29. flowcept/commons/daos/redis_conn.py +47 -0
  30. flowcept/commons/flowcept_dataclasses/task_object.py +50 -27
  31. flowcept/commons/flowcept_dataclasses/workflow_object.py +9 -1
  32. flowcept/commons/settings_factory.py +2 -4
  33. flowcept/commons/task_data_preprocess.py +400 -0
  34. flowcept/commons/utils.py +26 -7
  35. flowcept/configs.py +48 -29
  36. flowcept/flowcept_api/flowcept_controller.py +102 -18
  37. flowcept/flowceptor/adapters/base_interceptor.py +24 -11
  38. flowcept/flowceptor/adapters/brokers/__init__.py +1 -0
  39. flowcept/flowceptor/adapters/brokers/mqtt_interceptor.py +132 -0
  40. flowcept/flowceptor/adapters/mlflow/mlflow_interceptor.py +3 -3
  41. flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py +3 -3
  42. flowcept/flowceptor/consumers/agent/__init__.py +1 -0
  43. flowcept/flowceptor/consumers/agent/base_agent_context_manager.py +125 -0
  44. flowcept/flowceptor/consumers/base_consumer.py +94 -0
  45. flowcept/flowceptor/consumers/consumer_utils.py +5 -4
  46. flowcept/flowceptor/consumers/document_inserter.py +135 -36
  47. flowcept/flowceptor/telemetry_capture.py +6 -3
  48. flowcept/instrumentation/flowcept_agent_task.py +294 -0
  49. flowcept/instrumentation/flowcept_decorator.py +43 -0
  50. flowcept/instrumentation/flowcept_loop.py +3 -3
  51. flowcept/instrumentation/flowcept_task.py +64 -24
  52. flowcept/instrumentation/flowcept_torch.py +5 -5
  53. flowcept/instrumentation/task_capture.py +87 -4
  54. flowcept/version.py +1 -1
  55. {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/METADATA +48 -11
  56. flowcept-0.8.12.dist-info/RECORD +101 -0
  57. resources/sample_settings.yaml +46 -14
  58. flowcept/flowceptor/adapters/zambeze/__init__.py +0 -1
  59. flowcept/flowceptor/adapters/zambeze/zambeze_dataclasses.py +0 -41
  60. flowcept/flowceptor/adapters/zambeze/zambeze_interceptor.py +0 -102
  61. flowcept-0.8.10.dist-info/RECORD +0 -75
  62. {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/WHEEL +0 -0
  63. {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/entry_points.txt +0 -0
  64. {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/licenses/LICENSE +0 -0
@@ -1,6 +1,7 @@
1
1
  """Controller module."""
2
2
 
3
- from typing import List
3
+ import os.path
4
+ from typing import List, Dict
4
5
  from uuid import uuid4
5
6
 
6
7
  from flowcept.commons.daos.mq_dao.mq_dao_base import MQDao
@@ -9,7 +10,16 @@ from flowcept.commons.flowcept_dataclasses.workflow_object import (
9
10
  )
10
11
  from flowcept.commons.flowcept_logger import FlowceptLogger
11
12
  from flowcept.commons.utils import ClassProperty
12
- from flowcept.configs import MQ_INSTANCES, INSTRUMENTATION_ENABLED, MONGO_ENABLED, SETTINGS_PATH
13
+ from flowcept.configs import (
14
+ MQ_INSTANCES,
15
+ INSTRUMENTATION_ENABLED,
16
+ MONGO_ENABLED,
17
+ SETTINGS_PATH,
18
+ LMDB_ENABLED,
19
+ KVDB_ENABLED,
20
+ MQ_ENABLED,
21
+ DUMP_BUFFER_PATH,
22
+ )
13
23
  from flowcept.flowceptor.adapters.base_interceptor import BaseInterceptor
14
24
 
15
25
 
@@ -37,8 +47,9 @@ class Flowcept(object):
37
47
  campaign_id: str = None,
38
48
  workflow_id: str = None,
39
49
  workflow_name: str = None,
40
- workflow_args: str = None,
50
+ workflow_args: Dict = None,
41
51
  start_persistence=True,
52
+ check_safe_stops=True, # TODO add to docstring
42
53
  save_workflow=True,
43
54
  *args,
44
55
  **kwargs,
@@ -86,6 +97,8 @@ class Flowcept(object):
86
97
  self.logger.debug(f"Using settings file: {SETTINGS_PATH}")
87
98
  self._enable_persistence = start_persistence
88
99
  self._db_inserters: List = []
100
+ self.buffer = None
101
+ self._check_safe_stops = check_safe_stops
89
102
  if bundle_exec_id is None:
90
103
  self._bundle_exec_id = id(self)
91
104
  else:
@@ -140,9 +153,9 @@ class Flowcept(object):
140
153
  Flowcept.current_workflow_id = self.current_workflow_id
141
154
 
142
155
  interceptor_inst = BaseInterceptor.build(interceptor)
143
- interceptor_inst.start(bundle_exec_id=self._bundle_exec_id)
156
+ interceptor_inst.start(bundle_exec_id=self._bundle_exec_id, check_safe_stops=self._check_safe_stops)
144
157
  self._interceptor_instances.append(interceptor_inst)
145
-
158
+ self.buffer = interceptor_inst._mq_dao.buffer
146
159
  if self._should_save_workflow and not self._workflow_saved:
147
160
  self.save_workflow(interceptor, interceptor_inst)
148
161
 
@@ -152,6 +165,68 @@ class Flowcept(object):
152
165
  self.logger.debug("Flowcept started successfully.")
153
166
  return self
154
167
 
168
+ def _publish_buffer(self):
169
+ self._interceptor_instances[0]._mq_dao.bulk_publish(self.buffer)
170
+
171
+ @staticmethod
172
+ def read_messages_file(file_path: str = None) -> List[Dict]:
173
+ """
174
+ Read a JSON Lines (JSONL) file containing captured Flowcept messages.
175
+
176
+ This function loads a file where each line is a serialized JSON object.
177
+ It joins the lines into a single JSON array and parses them efficiently
178
+ with ``orjson``.
179
+
180
+ Parameters
181
+ ----------
182
+ file_path : str, optional
183
+ Path to the messages file. If not provided, defaults to the
184
+ value of ``DUMP_BUFFER_PATH`` from the configuration.
185
+ If neither is provided, an assertion error is raised.
186
+
187
+ Returns
188
+ -------
189
+ List[dict]
190
+ A list of message objects (dictionaries) parsed from the file.
191
+
192
+ Raises
193
+ ------
194
+ AssertionError
195
+ If no ``file_path`` is provided and ``DUMP_BUFFER_PATH`` is not set.
196
+ FileNotFoundError
197
+ If the specified file does not exist.
198
+ orjson.JSONDecodeError
199
+ If the file contents cannot be parsed as valid JSON.
200
+
201
+ Examples
202
+ --------
203
+ Read messages from a file explicitly:
204
+
205
+ >>> msgs = read_messages_file("offline_buffer.jsonl")
206
+ >>> print(len(msgs))
207
+ 128
208
+
209
+ Use the default dump buffer path from config:
210
+
211
+ >>> msgs = read_messages_file()
212
+ >>> for m in msgs[:2]:
213
+ ... print(m["type"], m.get("workflow_id"))
214
+ task_start wf_123
215
+ task_end wf_123
216
+ """
217
+ import orjson
218
+
219
+ _buffer = []
220
+ if file_path is None:
221
+ file_path = DUMP_BUFFER_PATH
222
+ assert file_path is not None, "Please indicate file_path either in the argument or in the config file."
223
+ if not os.path.exists(file_path):
224
+ raise f"File {file_path} has not been created. It will only be created if you run in fully offline mode."
225
+ with open(file_path, "rb") as f:
226
+ lines = [ln for ln in f.read().splitlines() if ln]
227
+ _buffer = orjson.loads(b"[" + b",".join(lines) + b"]")
228
+ return _buffer
229
+
155
230
  def save_workflow(self, interceptor: str, interceptor_instance: BaseInterceptor):
156
231
  """
157
232
  Save the current workflow and send its metadata using the provided interceptor.
@@ -190,19 +265,20 @@ class Flowcept(object):
190
265
  else:
191
266
  raise Exception("You must provide the argument `dask_client` so we can correctly link the workflow.")
192
267
 
193
- interceptor_instance._mq_dao.set_campaign_id(Flowcept.campaign_id)
268
+ if KVDB_ENABLED:
269
+ interceptor_instance._mq_dao.set_campaign_id(Flowcept.campaign_id)
194
270
  interceptor_instance.send_workflow_message(wf_obj)
195
271
  self._workflow_saved = True
196
272
 
197
273
  def _init_persistence(self, mq_host=None, mq_port=None):
274
+ if not LMDB_ENABLED and not MONGO_ENABLED:
275
+ return
276
+
198
277
  from flowcept.flowceptor.consumers.document_inserter import DocumentInserter
199
278
 
200
- self._db_inserters.append(
201
- DocumentInserter(
202
- check_safe_stops=True,
203
- bundle_exec_id=self._bundle_exec_id,
204
- ).start()
205
- )
279
+ doc_inserter = DocumentInserter(check_safe_stops=self._check_safe_stops, bundle_exec_id=self._bundle_exec_id)
280
+ doc_inserter.start()
281
+ self._db_inserters.append(doc_inserter)
206
282
 
207
283
  def stop(self):
208
284
  """Stop it."""
@@ -214,7 +290,7 @@ class Flowcept(object):
214
290
  for interceptor in self._interceptor_instances:
215
291
  if interceptor is None:
216
292
  continue
217
- interceptor.stop()
293
+ interceptor.stop(check_safe_stops=self._check_safe_stops)
218
294
 
219
295
  if len(self._db_inserters):
220
296
  self.logger.info("Stopping DB Inserters...")
@@ -259,17 +335,25 @@ class Flowcept(object):
259
335
  ... print("One or more services are not ready.")
260
336
  """
261
337
  logger = FlowceptLogger()
262
- if not MQDao.build().liveness_test():
263
- logger.error("MQ Not Ready!")
264
- return False
338
+ mq = MQDao.build()
339
+ if MQ_ENABLED:
340
+ if not mq.liveness_test():
341
+ logger.error("MQ Not Ready!")
342
+ return False
343
+
344
+ if KVDB_ENABLED:
345
+ if not mq._keyvalue_dao.liveness_test():
346
+ logger.error("KVBD is enabled but is not ready!")
347
+ return False
265
348
 
349
+ logger.info("MQ is alive!")
266
350
  if MONGO_ENABLED:
267
351
  from flowcept.commons.daos.docdb_dao.mongodb_dao import MongoDBDAO
268
352
 
269
353
  if not MongoDBDAO(create_indices=False).liveness_test():
270
- logger.error("DocDB Not Ready!")
354
+ logger.error("MongoDB is enabled but DocDB is not Ready!")
271
355
  return False
272
- logger.info("MQ and DocDB are alive!")
356
+ logger.info("DocDB is alive!")
273
357
  return True
274
358
 
275
359
  @staticmethod
@@ -9,14 +9,13 @@ from flowcept.commons.flowcept_dataclasses.workflow_object import (
9
9
  )
10
10
  from flowcept.configs import (
11
11
  ENRICH_MESSAGES,
12
+ TELEMETRY_ENABLED,
12
13
  )
13
14
  from flowcept.commons.flowcept_logger import FlowceptLogger
14
15
  from flowcept.commons.daos.mq_dao.mq_dao_base import MQDao
15
16
  from flowcept.commons.flowcept_dataclasses.task_object import TaskObject
16
17
  from flowcept.commons.settings_factory import get_settings
17
18
 
18
- from flowcept.flowceptor.telemetry_capture import TelemetryCapture
19
-
20
19
 
21
20
  # TODO :base-interceptor-refactor: :ml-refactor: :code-reorg: :usability:
22
21
  # Consider creating a new concept for instrumentation-based 'interception'.
@@ -28,8 +27,6 @@ from flowcept.flowceptor.telemetry_capture import TelemetryCapture
28
27
  class BaseInterceptor(object):
29
28
  """Base interceptor class."""
30
29
 
31
- # KINDS_TO_NOT_EXPLICITLY_CONTROL = {"dask"}
32
-
33
30
  @staticmethod
34
31
  def build(kind: str) -> "BaseInterceptor":
35
32
  """Build the Interceptor."""
@@ -42,6 +39,11 @@ class BaseInterceptor(object):
42
39
  from flowcept.flowceptor.adapters.tensorboard.tensorboard_interceptor import TensorboardInterceptor
43
40
 
44
41
  return TensorboardInterceptor()
42
+
43
+ elif kind == "broker_mqtt":
44
+ from flowcept.flowceptor.adapters.brokers.mqtt_interceptor import MQTTBrokerInterceptor
45
+
46
+ return MQTTBrokerInterceptor()
45
47
  elif kind == "dask_worker":
46
48
  from flowcept.flowceptor.adapters.dask.dask_interceptor import DaskWorkerInterceptor
47
49
 
@@ -71,7 +73,14 @@ class BaseInterceptor(object):
71
73
  self._bundle_exec_id = None
72
74
  self.started = False
73
75
  self._interceptor_instance_id = str(id(self))
74
- self.telemetry_capture = TelemetryCapture()
76
+
77
+ if TELEMETRY_ENABLED:
78
+ from flowcept.flowceptor.telemetry_capture import TelemetryCapture
79
+
80
+ self.telemetry_capture = TelemetryCapture()
81
+ else:
82
+ self.telemetry_capture = None
83
+
75
84
  self._saved_workflows = set()
76
85
  self._generated_workflow_id = False
77
86
  self.kind = kind
@@ -80,17 +89,21 @@ class BaseInterceptor(object):
80
89
  """Prepare a task."""
81
90
  raise NotImplementedError()
82
91
 
83
- def start(self, bundle_exec_id) -> "BaseInterceptor":
92
+ def start(self, bundle_exec_id, check_safe_stops: bool = True) -> "BaseInterceptor":
84
93
  """Start an interceptor."""
85
94
  if not self.started:
86
95
  self._bundle_exec_id = bundle_exec_id
87
- self._mq_dao.init_buffer(self._interceptor_instance_id, bundle_exec_id)
96
+ self._mq_dao.init_buffer(self._interceptor_instance_id, bundle_exec_id, check_safe_stops)
88
97
  self.started = True
89
98
  return self
90
99
 
91
- def stop(self):
100
+ def stop(self, check_safe_stops: bool = True):
92
101
  """Stop an interceptor."""
93
- self._mq_dao.stop(self._interceptor_instance_id, self._bundle_exec_id)
102
+ self._mq_dao.stop(
103
+ interceptor_instance_id=self._interceptor_instance_id,
104
+ check_safe_stops=check_safe_stops,
105
+ bundle_exec_id=self._bundle_exec_id,
106
+ )
94
107
  self.started = False
95
108
 
96
109
  def observe(self, *args, **kwargs):
@@ -122,8 +135,8 @@ class BaseInterceptor(object):
122
135
  # TODO :base-interceptor-refactor: :code-reorg: :usability:
123
136
  raise Exception(f"This interceptor {id(self)} has never been started!")
124
137
  workflow_obj.interceptor_ids = [self._interceptor_instance_id]
125
- machine_info = self.telemetry_capture.capture_machine_info()
126
- if machine_info is not None:
138
+ if self.telemetry_capture:
139
+ machine_info = self.telemetry_capture.capture_machine_info()
127
140
  if workflow_obj.machine_info is None:
128
141
  workflow_obj.machine_info = dict()
129
142
  # TODO :refactor-base-interceptor: we might want to register
@@ -0,0 +1 @@
1
+ """Brokers' adapters subpackage."""
@@ -0,0 +1,132 @@
1
+ """Zambeze interceptor module."""
2
+
3
+ import uuid
4
+ from threading import Thread
5
+ from time import sleep
6
+ import paho.mqtt.client as mqtt
7
+ import json
8
+ from typing import Dict
9
+ from flowcept.commons.flowcept_dataclasses.task_object import TaskObject
10
+ from flowcept.flowcept_api.flowcept_controller import Flowcept
11
+ from flowcept.flowceptor.adapters.base_interceptor import (
12
+ BaseInterceptor,
13
+ )
14
+
15
+
16
+ class MQTTBrokerInterceptor(BaseInterceptor):
17
+ """Zambeze interceptor."""
18
+
19
+ def __init__(self, plugin_key="broker_mqtt"):
20
+ super().__init__(plugin_key)
21
+
22
+ assert self.settings.get("protocol") == "mqtt3.1.1", "We only support mqtt3.1.1 for this interceptor."
23
+ self._host = self.settings.get("host", "localhost")
24
+ self._port = self.settings.get("port", 1883)
25
+ self._username = self.settings.get("username", "username")
26
+ self._password = self.settings.get("password", None)
27
+ self._queues = self.settings.get("queues")
28
+ self._qos = self.settings.get("qos", 2)
29
+ self._id = str(id(self))
30
+
31
+ self._tracked_keys = self.settings.get("tracked_keys")
32
+ self._task_subtype = self.settings.get("task_subtype", None)
33
+ self._client: mqtt.Client = None
34
+
35
+ self._observer_thread: Thread = None
36
+
37
+ def _connect(self):
38
+ """Establish a connection to the MQTT broker."""
39
+ try:
40
+ self._client = mqtt.Client(client_id=self._id, clean_session=False, protocol=mqtt.MQTTv311)
41
+ self._client.username_pw_set(self._username, self._password)
42
+
43
+ self._client.on_message = self.callback
44
+ self._client.on_connect = self._on_connect
45
+ self._client.on_disconnect = self._on_disconnect
46
+
47
+ self.logger.debug("Connecting to MQTT broker...")
48
+ self._client.connect(self._host, self._port, 60)
49
+ self.logger.debug("Connected.")
50
+ except Exception as e:
51
+ self.logger.error(f"Connection failed: {e}")
52
+ raise e
53
+
54
+ def _on_connect(self, *_):
55
+ """Handle connection events and subscribe to the topic."""
56
+ for q in self._queues:
57
+ self.logger.debug(f"Client {self._id} connected to MQTT queue {q}. Waiting for messages...")
58
+ self._client.subscribe(q, qos=self._qos)
59
+
60
+ def callback(self, _, __, msg):
61
+ """Implement the callback."""
62
+ msg_str = msg.payload.decode()
63
+ topic = msg.topic
64
+ self.logger.debug(f"Received message: '{msg_str}' on topic '{topic}'")
65
+
66
+ msg_dict = json.loads(msg_str)
67
+ msg_dict["topic"] = topic
68
+
69
+ task_msg = self.prepare_task_msg(msg_dict)
70
+ self.intercept(task_msg.to_dict())
71
+
72
+ def _on_disconnect(self, *_):
73
+ """Handle disconnections and attempt reconnection."""
74
+ self.logger.warning("MQTT Observer Client Disconnected.")
75
+
76
+ def start(self, bundle_exec_id) -> "MQTTBrokerInterceptor":
77
+ """Start it."""
78
+ super().start(bundle_exec_id)
79
+ self._observer_thread = Thread(target=self.observe, daemon=True)
80
+ self._observer_thread.start()
81
+ return self
82
+
83
+ def observe(self):
84
+ """Start the MQTT loop."""
85
+ self._connect()
86
+ self._client.loop_forever()
87
+
88
+ def prepare_task_msg(self, msg: Dict) -> TaskObject:
89
+ """Prepare a task."""
90
+ task_dict = {}
91
+ custom_metadata = {"topic": msg.get("topic", None)}
92
+ for key in self._tracked_keys:
93
+ if key != "custom_metadata":
94
+ if self._tracked_keys.get(key):
95
+ task_dict[key] = msg.get(self._tracked_keys.get(key), None)
96
+ else:
97
+ cm = self._tracked_keys.get("custom_metadata", None)
98
+ if cm and len(cm):
99
+ for k in cm:
100
+ custom_metadata[k] = msg[k]
101
+ task_dict["custom_metadata"] = custom_metadata
102
+
103
+ if isinstance(task_dict.get("used"), str):
104
+ task_dict["used"] = {"payload": task_dict.get("used")}
105
+
106
+ if "task_id" not in task_dict:
107
+ task_dict["task_id"] = str(uuid.uuid4())
108
+
109
+ task_obj = TaskObject.from_dict(task_dict)
110
+ task_obj.subtype = self._task_subtype
111
+
112
+ if task_obj.campaign_id is None:
113
+ task_obj.campaign_id = Flowcept.campaign_id
114
+
115
+ if task_obj.workflow_id is None:
116
+ task_obj.workflow_id = Flowcept.current_workflow_id
117
+
118
+ print(task_obj)
119
+ return task_obj
120
+
121
+ def stop(self) -> bool:
122
+ """Stop it."""
123
+ self.logger.debug("Interceptor stopping...")
124
+ super().stop()
125
+ try:
126
+ self._client.disconnect()
127
+ except Exception as e:
128
+ self.logger.warning(f"This exception is expected to occur after channel.basic_cancel: {e}")
129
+ sleep(2)
130
+ self._observer_thread.join()
131
+ self.logger.debug("Interceptor stopped.")
132
+ return True
@@ -64,17 +64,17 @@ class MLFlowInterceptor(BaseInterceptor):
64
64
  task_msg = self.prepare_task_msg(run_data).to_dict()
65
65
  self.intercept(task_msg)
66
66
 
67
- def start(self, bundle_exec_id) -> "MLFlowInterceptor":
67
+ def start(self, bundle_exec_id, check_safe_stops) -> "MLFlowInterceptor":
68
68
  """Start it."""
69
69
  super().start(bundle_exec_id)
70
70
  self._observer_thread = Thread(target=self.observe, daemon=True)
71
71
  self._observer_thread.start()
72
72
  return self
73
73
 
74
- def stop(self) -> bool:
74
+ def stop(self, check_safe_stops: bool = True) -> bool:
75
75
  """Stop it."""
76
76
  sleep(1)
77
- super().stop()
77
+ super().stop(check_safe_stops)
78
78
  self.logger.debug("Interceptor stopping...")
79
79
  self._observer.stop()
80
80
  self._observer_thread.join()
@@ -91,17 +91,17 @@ class TensorboardInterceptor(BaseInterceptor):
91
91
  self.intercept(task_msg.to_dict())
92
92
  self.state_manager.add_element_id(child_event.log_path)
93
93
 
94
- def start(self, bundle_exec_id) -> "TensorboardInterceptor":
94
+ def start(self, bundle_exec_id, check_safe_stops: bool = True) -> "TensorboardInterceptor":
95
95
  """Start it."""
96
96
  super().start(bundle_exec_id)
97
97
  self.observe()
98
98
  return self
99
99
 
100
- def stop(self) -> bool:
100
+ def stop(self, check_safe_stops: bool = True) -> bool:
101
101
  """Stop it."""
102
102
  sleep(1)
103
103
  self.logger.debug("Interceptor stopping...")
104
- super().stop()
104
+ super().stop(check_safe_stops)
105
105
  self._observer.stop()
106
106
  self.logger.debug("Interceptor stopped.")
107
107
  return True
@@ -0,0 +1 @@
1
+ """Flowcept agent and Flowcept-enabled agent module."""
@@ -0,0 +1,125 @@
1
+ from contextlib import asynccontextmanager
2
+ from dataclasses import dataclass
3
+ from typing import Dict, List
4
+ from uuid import uuid4
5
+
6
+ from flowcept.flowcept_api.flowcept_controller import Flowcept
7
+ from flowcept.flowceptor.consumers.base_consumer import BaseConsumer
8
+
9
+
10
+ @dataclass
11
+ class BaseAppContext:
12
+ """
13
+ Container for storing agent context data during the lifespan of an application session.
14
+
15
+ Attributes
16
+ ----------
17
+ tasks : list of dict
18
+ A list of task messages received from the message queue. Each task message is stored as a dictionary.
19
+ """
20
+
21
+ tasks: List[Dict]
22
+
23
+
24
+ class BaseAgentContextManager(BaseConsumer):
25
+ """
26
+ Base class for any MCP Agent that wants to participate in the Flowcept ecosystem.
27
+
28
+ Agents inheriting from this class can:
29
+ - Subscribe to and consume messages from the Flowcept-compatible message queue (MQ)
30
+ - Handle task-related messages and accumulate them in context
31
+ - Gracefully manage their lifecycle using an async context manager
32
+ - Interact with Flowcept’s provenance system to read/write messages, query databases, and store chat history
33
+
34
+ To integrate with Flowcept:
35
+ - Inherit from `BaseAgentContextManager`
36
+ - Override `message_handler()` if custom message handling is needed
37
+ - Access shared state via `self.context` during execution
38
+ """
39
+
40
+ agent_id = None
41
+
42
+ def __init__(self):
43
+ """
44
+ Initializes the agent and resets its context state.
45
+ """
46
+ self._started = False
47
+ super().__init__()
48
+ self.context = None
49
+ self.reset_context()
50
+ self.agent_id = BaseAgentContextManager.agent_id
51
+
52
+ def message_handler(self, msg_obj: Dict) -> bool:
53
+ """
54
+ Handles a single message received from the message queue.
55
+
56
+ Parameters
57
+ ----------
58
+ msg_obj : dict
59
+ The message received, typically structured with a "type" field.
60
+
61
+ Returns
62
+ -------
63
+ bool
64
+ Return True to continue listening for messages, or False to stop the loop.
65
+
66
+ Notes
67
+ -----
68
+ This default implementation stores messages of type 'task' in the internal context.
69
+ Override this method in a subclass to handle other message types or implement custom logic.
70
+ """
71
+ msg_type = msg_obj.get("type", None)
72
+ msg_subtype = msg_obj.get("subtype", "")
73
+ if msg_type == "task":
74
+ self.logger.debug("Received task msg!")
75
+ if msg_subtype not in {"llm_query"}:
76
+ self.context.tasks.append(msg_obj)
77
+
78
+ return True
79
+
80
+ def reset_context(self):
81
+ """
82
+ Resets the internal context, clearing all stored task data.
83
+ """
84
+ self.context = BaseAppContext(tasks=[])
85
+
86
+ @asynccontextmanager
87
+ async def lifespan(self, app):
88
+ """
89
+ Async context manager to handle the agent’s lifecycle within an application.
90
+
91
+ Starts the message consumption when the context is entered and stops it when exited.
92
+
93
+ Parameters
94
+ ----------
95
+ app : Any
96
+ The application instance using this context (typically unused but included for compatibility).
97
+
98
+ Yields
99
+ ------
100
+ BaseAppContext
101
+ The current application context, including collected tasks.
102
+ """
103
+ if not self._started:
104
+ self.agent_id = BaseAgentContextManager.agent_id = str(uuid4())
105
+ self.logger.info(f"Starting lifespan for agent {BaseAgentContextManager.agent_id}.")
106
+ self._started = True
107
+
108
+ f = Flowcept(
109
+ start_persistence=False,
110
+ save_workflow=True,
111
+ check_safe_stops=False,
112
+ workflow_name="agent_workflow",
113
+ workflow_args={"agent_id": self.agent_id},
114
+ )
115
+ self.agent_workflow_id = f.current_workflow_id
116
+ f.start()
117
+ f.logger.info(
118
+ f"This section's workflow_id={Flowcept.current_workflow_id}, campaign_id={Flowcept.campaign_id}"
119
+ )
120
+ self.start()
121
+
122
+ try:
123
+ yield self.context
124
+ finally:
125
+ self.stop_consumption()
@@ -0,0 +1,94 @@
1
+ from abc import abstractmethod
2
+ from threading import Thread
3
+ from typing import Callable, Dict, Tuple, Optional
4
+
5
+ from flowcept.commons.daos.mq_dao.mq_dao_base import MQDao
6
+ from flowcept.commons.flowcept_logger import FlowceptLogger
7
+
8
+
9
+ class BaseConsumer(object):
10
+ """
11
+ Abstract base class for message consumers in a pub-sub architecture.
12
+
13
+ This class provides a standard interface and shared logic for subscribing to
14
+ message queues and dispatching messages to a handler.
15
+ """
16
+
17
+ def __init__(self):
18
+ """Initialize the message queue DAO and logger."""
19
+ self._mq_dao = MQDao.build()
20
+ self.logger = FlowceptLogger()
21
+ self._main_thread: Optional[Thread] = None
22
+
23
+ @abstractmethod
24
+ def message_handler(self, msg_obj: Dict) -> bool:
25
+ """
26
+ Handle a single incoming message.
27
+
28
+ Parameters
29
+ ----------
30
+ msg_obj : dict
31
+ The parsed message object received from the queue.
32
+
33
+ Returns
34
+ -------
35
+ bool
36
+ Return False to break the message listener loop.
37
+ Return True to continue listening.
38
+ """
39
+ pass
40
+
41
+ def start(self, target: Callable = None, args: Tuple = (), threaded: bool = True, daemon=False):
42
+ """
43
+ Start the consumer by subscribing and launching the message handler.
44
+
45
+ Parameters
46
+ ----------
47
+ target : Callable
48
+ The function to run for listening to messages (usually the message loop).
49
+ args : tuple, optional
50
+ Arguments to pass to the target function.
51
+ threaded : bool, default=True
52
+ Whether to run the target function in a background thread.
53
+ daemon: bool
54
+
55
+ Returns
56
+ -------
57
+ BaseConsumer
58
+ The current instance (to allow chaining).
59
+ """
60
+ if target is None:
61
+ target = self.default_thread_target
62
+ self._mq_dao.subscribe()
63
+ if threaded:
64
+ self._main_thread = Thread(target=target, args=args, daemon=daemon)
65
+ self._main_thread.start()
66
+ else:
67
+ target(*args)
68
+ return self
69
+
70
+ def default_thread_target(self):
71
+ """
72
+ The default message consumption loop.
73
+
74
+ This method is used as the default thread target when starting the consumer. It listens for
75
+ messages from the message queue and passes them to the consumer's `message_handler`.
76
+
77
+ Typically run in a background thread when `start()` is called without a custom target.
78
+
79
+ See Also
80
+ --------
81
+ start : Starts the consumer and optionally spawns a background thread to run this method.
82
+ """
83
+ self.logger.debug("Going to wait for new messages!")
84
+ self._mq_dao.message_listener(self.message_handler)
85
+ self.logger.debug("Broke main message listening loop!")
86
+ # self._mq_dao.stop(check_safe_stops=False) # TODO Do we need to stop mq_dao here?
87
+ self.stop_consumption()
88
+ self.logger.debug("MQ stopped.")
89
+
90
+ def stop_consumption(self):
91
+ """
92
+ Stop consuming messages by unsubscribing from the message queue.
93
+ """
94
+ self._mq_dao.unsubscribe()