flowcept 0.9.9__py3-none-any.whl → 0.9.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
flowcept/__init__.py CHANGED
@@ -41,6 +41,11 @@ def __getattr__(name):
41
41
 
42
42
  return FlowceptLoop
43
43
 
44
+ elif name == "FlowceptLightweightLoop":
45
+ from flowcept.instrumentation.flowcept_loop import FlowceptLightweightLoop
46
+
47
+ return FlowceptLightweightLoop
48
+
44
49
  elif name == "telemetry_flowcept_task":
45
50
  from flowcept.instrumentation.flowcept_task import telemetry_flowcept_task
46
51
 
@@ -59,7 +64,7 @@ def __getattr__(name):
59
64
  return FlowceptDaskWorkerAdapter
60
65
 
61
66
  elif name == "SETTINGS_PATH":
62
- from configs import SETTINGS_PATH
67
+ from flowcept.configs import SETTINGS_PATH
63
68
 
64
69
  return SETTINGS_PATH
65
70
  elif name == "TaskQueryAPI":
@@ -74,6 +79,7 @@ __all__ = [
74
79
  "TaskQueryAPI",
75
80
  "flowcept_task",
76
81
  "FlowceptLoop",
82
+ "FlowceptLightweightLoop",
77
83
  "FlowceptTask",
78
84
  "telemetry_flowcept_task",
79
85
  "lightweight_flowcept_task",
@@ -39,9 +39,10 @@ def main():
39
39
  st.caption(
40
40
  "💡 Quick help\n"
41
41
  "Ask about workflow metrics, plots, or summaries.\n\n"
42
- "Inputs used, outputs generated.\n"
43
- "Commands: @record <note> add @show records list reset context clear • save context save.\n"
44
- "Tip: set result = df to run ad-hoc Python on the in-memory DataFrame."
42
+ "I have an internal DataFrame in my context to which you can ask direct questions."
43
+ "Tasks inputs are mapped to `used.*` fields, and outputs to `generated.*`\n"
44
+ "Commands: `@record <note>`; \n `@show records`; \n `reset context` ; `save context` \n"
45
+ "Tip: Inputs like `result = df[some valid df query]` will run direct queries to the df in context."
45
46
  )
46
47
 
47
48
  user_input = st.chat_input("Send a message")
@@ -104,25 +104,6 @@ def reset_records() -> ToolResult:
104
104
  except Exception as e:
105
105
  return ToolResult(code=499, result=str(e))
106
106
 
107
- @mcp_flowcept.tool()
108
- def show_records() -> ToolResult:
109
- """
110
- Lists all recorded user guidance.
111
- """
112
- try:
113
- ctx = mcp_flowcept.get_context()
114
- custom_guidance: List = ctx.request_context.lifespan_context.custom_guidance
115
- if not custom_guidance:
116
- message = "There is no recorded user guidance."
117
- else:
118
- message = "This is the list of custom guidance I have in my memory:"
119
- message += "\n".join(f"- {msg}" for msg in custom_guidance)
120
-
121
- return ToolResult(code=201, result=message)
122
- except Exception as e:
123
- return ToolResult(code=499, result=str(e))
124
-
125
-
126
107
 
127
108
  @mcp_flowcept.tool()
128
109
  def prompt_handler(message: str) -> ToolResult:
flowcept/cli.py CHANGED
@@ -106,7 +106,7 @@ def stream_messages(messages_file_path: Optional[str] = None, keys_to_show: List
106
106
  Listen to Flowcept's message stream and optionally echo/save messages.
107
107
 
108
108
  Parameters.
109
- ----------
109
+ -----------
110
110
  messages_file_path : str, optional
111
111
  If provided, append each message as JSON (one per line) to this file.
112
112
  If the file already exists, a new timestamped file is created instead.
@@ -350,7 +350,7 @@ def agent_client(tool_name: str, kwargs: str = None):
350
350
  """Agent Client.
351
351
 
352
352
  Parameters.
353
- ----------
353
+ -----------
354
354
  tool_name : str
355
355
  Name of the tool
356
356
  kwargs : str, optional
@@ -459,23 +459,85 @@ def start_mongo() -> None:
459
459
  databases:
460
460
  mongodb:
461
461
  - bin : str (required) path to the mongod executable
462
+ - db_path: str, required path to the db data directory
462
463
  - log_path : str, optional (adds --fork --logpath)
463
464
  - lock_file_path : str, optional (adds --pidfilepath)
464
465
 
466
+
465
467
  Builds and runs the startup command.
466
468
  """
469
+ import time
470
+ import socket
471
+ from flowcept.configs import MONGO_HOST, MONGO_PORT, MONGO_URI
472
+
473
+ def _port_open(host: str, port: int, timeout: float = 0.5) -> bool:
474
+ try:
475
+ with socket.create_connection((host, port), timeout=timeout):
476
+ return True
477
+ except OSError:
478
+ return False
479
+
480
+ def _await_mongo(host: str, port: int, uri: str | None, timeout: float = 20.0) -> bool:
481
+ """Wait until MongoDB is accepting connections (and ping if pymongo is available)."""
482
+ deadline = time.time() + timeout
483
+ have_pymongo = False
484
+ try:
485
+ from pymongo import MongoClient # optional
486
+
487
+ have_pymongo = True
488
+ except Exception:
489
+ pass
490
+
491
+ while time.time() < deadline:
492
+ if not _port_open(host, port):
493
+ time.sleep(0.25)
494
+ continue
495
+
496
+ if not have_pymongo:
497
+ return True # port is open; assume OK
498
+
499
+ try:
500
+ from pymongo import MongoClient
501
+
502
+ client = MongoClient(uri or f"mongodb://{host}:{port}", serverSelectionTimeoutMS=800)
503
+ client.admin.command("ping")
504
+ return True
505
+ except Exception:
506
+ time.sleep(0.25)
507
+
508
+ return False
509
+
510
+ def _tail(path: str, lines: int = 40) -> str:
511
+ try:
512
+ with open(path, "rb") as f:
513
+ f.seek(0, os.SEEK_END)
514
+ size = f.tell()
515
+ block = 1024
516
+ data = b""
517
+ while size > 0 and data.count(b"\n") <= lines:
518
+ size = max(0, size - block)
519
+ f.seek(size)
520
+ data = f.read(min(block, size)) + data
521
+ return data.decode(errors="replace").splitlines()[-lines:]
522
+ except Exception:
523
+ return []
524
+
467
525
  # Safe nested gets
468
526
  settings = getattr(configs, "settings", {}) or {}
469
527
  databases = settings.get("databases") or {}
470
528
  mongodb = databases.get("mongodb") or {}
471
529
 
472
530
  bin_path = mongodb.get("bin")
473
- log_path = mongodb.get("log_path")
474
- lock_file_path = mongodb.get("lock_file_path")
531
+ db_path = mongodb.get("db_path")
532
+ log_path = mongodb.get("log_path", None)
533
+ lock_file_path = mongodb.get("lock_file_path", None)
475
534
 
476
535
  if not bin_path:
477
536
  print("Error: settings['databases']['mongodb']['bin'] is required.")
478
537
  return
538
+ if not db_path:
539
+ print("Error: settings['databases']['mongodb']['db_path'] is required.")
540
+ return
479
541
 
480
542
  # Build command
481
543
  parts = [shlex.quote(str(bin_path))]
@@ -483,12 +545,29 @@ def start_mongo() -> None:
483
545
  parts += ["--fork", "--logpath", shlex.quote(str(log_path))]
484
546
  if lock_file_path:
485
547
  parts += ["--pidfilepath", shlex.quote(str(lock_file_path))]
548
+ if db_path:
549
+ parts += ["--dbpath", shlex.quote(str(db_path))]
486
550
 
487
551
  cmd = " ".join(parts)
488
552
  try:
553
+ # Background start returns immediately because --fork is set
489
554
  out = _run_command(cmd, check_output=True)
490
555
  if out:
491
556
  print(out)
557
+ print(f"mongod launched (logs: {log_path}). Waiting for readiness on {MONGO_HOST}:{MONGO_PORT} ...")
558
+
559
+ ok = _await_mongo(MONGO_HOST, MONGO_PORT, MONGO_URI, timeout=20.0)
560
+ if ok:
561
+ print("✅ MongoDB is up and responding.")
562
+ else:
563
+ print("❌ MongoDB did not become ready in time.")
564
+ if log_path:
565
+ last_lines = _tail(log_path, 60)
566
+ if last_lines:
567
+ print("---- mongod last log lines ----")
568
+ for line in last_lines:
569
+ print(line)
570
+ print("---- end ----")
492
571
  except subprocess.CalledProcessError as e:
493
572
  print(f"Failed to start MongoDB: {e}")
494
573
 
@@ -19,7 +19,6 @@ from flowcept.configs import (
19
19
  MQ_TIMING,
20
20
  KVDB_ENABLED,
21
21
  MQ_ENABLED,
22
- DUMP_BUFFER_PATH,
23
22
  )
24
23
 
25
24
  from flowcept.commons.utils import GenericJSONEncoder
@@ -96,22 +95,11 @@ class MQDao(object):
96
95
  def bulk_publish(self, buffer):
97
96
  """Publish it."""
98
97
  # self.logger.info(f"Going to flush {len(buffer)} to MQ...")
99
- if flowcept.configs.DB_FLUSH_MODE == "offline":
100
- if DUMP_BUFFER_PATH is not None:
101
- import orjson
102
-
103
- with open(DUMP_BUFFER_PATH, "wb", buffering=1_048_576) as f:
104
- for obj in buffer:
105
- obj.pop("data", None) # We are not going to store data in the buffer file.
106
- f.write(orjson.dumps(obj))
107
- f.write(b"\n")
108
- self.logger.info(f"Saved Flowcept messages into {DUMP_BUFFER_PATH}.")
98
+ if MQ_CHUNK_SIZE > 1:
99
+ for chunk in chunked(buffer, MQ_CHUNK_SIZE):
100
+ self._bulk_publish(chunk)
109
101
  else:
110
- if MQ_CHUNK_SIZE > 1:
111
- for chunk in chunked(buffer, MQ_CHUNK_SIZE):
112
- self._bulk_publish(chunk)
113
- else:
114
- self._bulk_publish(buffer)
102
+ self._bulk_publish(buffer)
115
103
 
116
104
  def register_time_based_thread_init(self, interceptor_instance_id: str, exec_bundle_id=None):
117
105
  """Register the time."""
@@ -174,6 +162,12 @@ class MQDao(object):
174
162
  self.started = True
175
163
 
176
164
  def _close_buffer(self):
165
+ if flowcept.configs.DUMP_BUFFER_ENABLED and flowcept.configs.DUMP_BUFFER_PATH is not None:
166
+ from flowcept.commons.utils import buffer_to_disk
167
+
168
+ _buf = self.buffer.current_buffer if isinstance(self.buffer, AutoflushBuffer) else self.buffer
169
+ buffer_to_disk(_buf, flowcept.configs.DUMP_BUFFER_PATH, self.logger)
170
+
177
171
  if flowcept.configs.DB_FLUSH_MODE == "online":
178
172
  if self._time_based_flushing_started:
179
173
  self.buffer.stop()
@@ -181,7 +175,6 @@ class MQDao(object):
181
175
  else:
182
176
  self.logger.error("MQ time-based flushing is not started")
183
177
  else:
184
- self.bulk_publish(self.buffer)
185
178
  self.buffer = list()
186
179
 
187
180
  def _stop_timed(self, interceptor_instance_id: str, check_safe_stops: bool = True, bundle_exec_id: int = None):
flowcept/commons/utils.py CHANGED
@@ -4,7 +4,7 @@ import argparse
4
4
  from datetime import datetime, timedelta, timezone
5
5
  import json
6
6
  from time import time, sleep
7
- from typing import Callable
7
+ from typing import Callable, List, Dict
8
8
  import os
9
9
  import platform
10
10
  import subprocess
@@ -245,6 +245,24 @@ def get_current_config_values():
245
245
  return _vars
246
246
 
247
247
 
248
+ def buffer_to_disk(buffer: List[Dict], path: str, logger):
249
+ """
250
+ Append the in-memory buffer to a JSON Lines (JSONL) file on disk.
251
+ """
252
+ if not buffer:
253
+ logger.warning("The buffer is currently empty.")
254
+ return
255
+ with open(path, "ab", buffering=1_048_576) as f:
256
+ for obj in buffer:
257
+ obj.pop("data", None) # We are not going to store data in the buffer file.
258
+ from orjson import orjson
259
+
260
+ f.write(orjson.dumps(obj))
261
+ f.write(b"\n")
262
+
263
+ logger.info(f"Saved Flowcept buffer into {path}.")
264
+
265
+
248
266
  class GenericJSONDecoder(json.JSONDecoder):
249
267
  """JSON decoder class."""
250
268
 
@@ -35,3 +35,23 @@ class Status(str, Enum):
35
35
  def get_finished_statuses():
36
36
  """Get finished status."""
37
37
  return [Status.FINISHED, Status.ERROR]
38
+
39
+
40
+ class MimeType(Enum):
41
+ """MimeTypes used in Flowcept."""
42
+
43
+ JPEG = "image/jpeg"
44
+ PNG = "image/png"
45
+ GIF = "image/gif"
46
+ BMP = "image/bmp"
47
+ TIFF = "image/tiff"
48
+ WEBP = "image/webp"
49
+ SVG = "image/svg+xml"
50
+
51
+ # Documents
52
+ PDF = "application/pdf"
53
+
54
+ # Data formats
55
+ JSON = "application/json"
56
+ CSV = "text/csv"
57
+ JSONL = "application/x-ndjson" # standard for JSON Lines
flowcept/configs.py CHANGED
@@ -11,7 +11,7 @@ PROJECT_NAME = "flowcept"
11
11
  DEFAULT_SETTINGS = {
12
12
  "version": __version__,
13
13
  "log": {"log_file_level": "disable", "log_stream_level": "disable"},
14
- "project": {"dump_buffer_path": "flowcept_messages.jsonl"},
14
+ "project": {"dump_buffer": {"enabled": True}},
15
15
  "telemetry_capture": {},
16
16
  "instrumentation": {},
17
17
  "experiment": {},
@@ -27,7 +27,9 @@ DEFAULT_SETTINGS = {
27
27
  "agent": {},
28
28
  }
29
29
 
30
- USE_DEFAULT = os.getenv("FLOWCEPT_USE_DEFAULT", "False").lower() == "true"
30
+ _TRUE_VALUES = {"1", "true", "yes", "y", "t"}
31
+
32
+ USE_DEFAULT = os.getenv("FLOWCEPT_USE_DEFAULT", "False").lower() in _TRUE_VALUES
31
33
 
32
34
  if USE_DEFAULT:
33
35
  settings = DEFAULT_SETTINGS.copy()
@@ -86,8 +88,8 @@ MQ_PASSWORD = settings["mq"].get("password", None)
86
88
  MQ_HOST = os.getenv("MQ_HOST", settings["mq"].get("host", "localhost"))
87
89
  MQ_PORT = int(os.getenv("MQ_PORT", settings["mq"].get("port", "6379")))
88
90
  MQ_URI = os.getenv("MQ_URI", settings["mq"].get("uri", None))
89
- MQ_BUFFER_SIZE = settings["mq"].get("buffer_size", None)
90
- MQ_INSERTION_BUFFER_TIME = settings["mq"].get("insertion_buffer_time_secs", None)
91
+ MQ_BUFFER_SIZE = settings["mq"].get("buffer_size", 1)
92
+ MQ_INSERTION_BUFFER_TIME = settings["mq"].get("insertion_buffer_time_secs", 1)
91
93
  MQ_TIMING = settings["mq"].get("timing", False)
92
94
  MQ_CHUNK_SIZE = int(settings["mq"].get("chunk_size", -1))
93
95
 
@@ -158,10 +160,21 @@ PERF_LOG = settings["project"].get("performance_logging", False)
158
160
  JSON_SERIALIZER = settings["project"].get("json_serializer", "default")
159
161
  REPLACE_NON_JSON_SERIALIZABLE = settings["project"].get("replace_non_json_serializable", True)
160
162
  ENRICH_MESSAGES = settings["project"].get("enrich_messages", True)
161
- DUMP_BUFFER_PATH = settings["project"].get("dump_buffer_path", "flowcept_messages.jsonl")
163
+
164
+ _DEFAULT_DUMP_BUFFER_ENABLED = DB_FLUSH_MODE == "offline"
165
+ DUMP_BUFFER_ENABLED = (
166
+ os.getenv(
167
+ "DUMP_BUFFER", str(settings["project"].get("dump_buffer", {}).get("enabled", _DEFAULT_DUMP_BUFFER_ENABLED))
168
+ )
169
+ .strip()
170
+ .lower()
171
+ in _TRUE_VALUES
172
+ )
173
+ DUMP_BUFFER_PATH = settings["project"].get("dump_buffer", {}).get("path", "flowcept_buffer.jsonl")
162
174
 
163
175
  TELEMETRY_CAPTURE = settings.get("telemetry_capture", None)
164
- TELEMETRY_ENABLED = TELEMETRY_CAPTURE is not None and len(TELEMETRY_CAPTURE)
176
+ TELEMETRY_ENABLED = os.getenv("TELEMETRY_ENABLED", "true").strip().lower() in _TRUE_VALUES
177
+ TELEMETRY_ENABLED = TELEMETRY_ENABLED and (TELEMETRY_CAPTURE is not None) and (len(TELEMETRY_CAPTURE) > 0)
165
178
 
166
179
  ######################
167
180
  # SYS METADATA #
@@ -235,13 +248,9 @@ INSTRUMENTATION = settings.get("instrumentation", {})
235
248
  INSTRUMENTATION_ENABLED = INSTRUMENTATION.get("enabled", True)
236
249
 
237
250
  AGENT = settings.get("agent", {})
238
- AGENT_AUDIO = os.getenv("AGENT_AUDIO", str(settings["agent"].get("audio_enabled", "false"))).strip().lower() in {
239
- "1",
240
- "true",
241
- "yes",
242
- "y",
243
- "t",
244
- }
251
+ AGENT_AUDIO = (
252
+ os.getenv("AGENT_AUDIO", str(settings["agent"].get("audio_enabled", "false"))).strip().lower() in _TRUE_VALUES
253
+ )
245
254
  AGENT_HOST = os.getenv("AGENT_HOST", settings["agent"].get("mcp_host", "localhost"))
246
255
  AGENT_PORT = int(os.getenv("AGENT_PORT", settings["agent"].get("mcp_port", "8000")))
247
256
 
@@ -1,5 +1,6 @@
1
1
  """Controller module."""
2
2
 
3
+ import os
3
4
  from typing import List, Dict, Any
4
5
  from uuid import uuid4
5
6
 
@@ -9,7 +10,7 @@ from flowcept.commons.flowcept_dataclasses.workflow_object import (
9
10
  WorkflowObject,
10
11
  )
11
12
  from flowcept.commons.flowcept_logger import FlowceptLogger
12
- from flowcept.commons.utils import ClassProperty
13
+ from flowcept.commons.utils import ClassProperty, buffer_to_disk
13
14
  from flowcept.configs import (
14
15
  MQ_INSTANCES,
15
16
  INSTRUMENTATION_ENABLED,
@@ -44,7 +45,7 @@ class Flowcept(object):
44
45
  def __init__(
45
46
  self,
46
47
  interceptors: List[str] = None,
47
- bundle_exec_id=None,
48
+ bundle_exec_id: str = None,
48
49
  campaign_id: str = None,
49
50
  workflow_id: str = None,
50
51
  workflow_name: str = None,
@@ -52,6 +53,7 @@ class Flowcept(object):
52
53
  start_persistence=True,
53
54
  check_safe_stops=True, # TODO add to docstring
54
55
  save_workflow=True,
56
+ delete_buffer_file=True,
55
57
  *args,
56
58
  **kwargs,
57
59
  ):
@@ -68,7 +70,7 @@ class Flowcept(object):
68
70
  Examples: "instrumentation", "dask", "mlflow", ...
69
71
  The order of interceptors matters — place the outer-most interceptor first,
70
72
 
71
- bundle_exec_id : Any, optional
73
+ bundle_exec_id : str, optional
72
74
  Identifier for grouping interceptors in a bundle, essential for the correct initialization and stop of
73
75
  interceptors. If not provided, a unique ID is assigned.
74
76
 
@@ -90,6 +92,9 @@ class Flowcept(object):
90
92
  save_workflow : bool, default=True
91
93
  If True, a workflow object message is sent.
92
94
 
95
+ delete_buffer_file : bool, default=True
96
+ if True, deletes an existing existing buffer file or ignores if it doesn't exist.
97
+
93
98
  Additional arguments (`*args`, `**kwargs`) are used for specific adapters.
94
99
  For example, when using the Dask interceptor, the `dask_client` argument
95
100
  should be provided in `kwargs` to enable saving the Dask workflow, which is recommended.
@@ -101,9 +106,9 @@ class Flowcept(object):
101
106
  self.buffer = None
102
107
  self._check_safe_stops = check_safe_stops
103
108
  if bundle_exec_id is None:
104
- self._bundle_exec_id = id(self)
109
+ self.bundle_exec_id = str(id(self))
105
110
  else:
106
- self._bundle_exec_id = bundle_exec_id
111
+ self.bundle_exec_id = str(bundle_exec_id)
107
112
 
108
113
  self.enabled = True
109
114
  self.is_started = False
@@ -129,8 +134,11 @@ class Flowcept(object):
129
134
  self.workflow_name = workflow_name
130
135
  self.workflow_args = workflow_args
131
136
 
137
+ if delete_buffer_file:
138
+ Flowcept.delete_buffer_file()
139
+
132
140
  def start(self):
133
- """Start it."""
141
+ """Start Flowcept Controller."""
134
142
  if self.is_started or not self.enabled:
135
143
  self.logger.warning("DB inserter may be already started or instrumentation is not set")
136
144
  return self
@@ -154,7 +162,7 @@ class Flowcept(object):
154
162
  Flowcept.current_workflow_id = self.current_workflow_id
155
163
 
156
164
  interceptor_inst = BaseInterceptor.build(interceptor)
157
- interceptor_inst.start(bundle_exec_id=self._bundle_exec_id, check_safe_stops=self._check_safe_stops)
165
+ interceptor_inst.start(bundle_exec_id=self.bundle_exec_id, check_safe_stops=self._check_safe_stops)
158
166
  self._interceptor_instances.append(interceptor_inst)
159
167
  if isinstance(interceptor_inst._mq_dao.buffer, AutoflushBuffer):
160
168
  Flowcept.buffer = self.buffer = interceptor_inst._mq_dao.buffer.current_buffer
@@ -170,11 +178,88 @@ class Flowcept(object):
170
178
  self.logger.debug("Flowcept started successfully.")
171
179
  return self
172
180
 
181
+ def get_buffer(self, return_df: bool = False):
182
+ """
183
+ Retrieve the in-memory message buffer.
184
+
185
+ Parameters
186
+ ----------
187
+ return_df : bool, optional
188
+ If False (default), return the raw buffer as a list of dictionaries.
189
+ If True, normalize the buffer into a pandas DataFrame with dotted
190
+ notation for nested keys. Requires ``pandas`` to be installed.
191
+
192
+ Returns
193
+ -------
194
+ list of dict or pandas.DataFrame
195
+ - If ``return_df=False``: the buffer as a list of dictionaries.
196
+ - If ``return_df=True``: the buffer as a normalized DataFrame.
197
+
198
+ Raises
199
+ ------
200
+ ModuleNotFoundError
201
+ If ``return_df=True`` but ``pandas`` is not installed.
202
+
203
+ Examples
204
+ --------
205
+ >>> buf = flowcept.get_buffer()
206
+ >>> isinstance(buf, list)
207
+ True
208
+
209
+ >>> df = flowcept.get_buffer(return_df=True)
210
+ >>> "generated.attention" in df.columns
211
+ True
212
+ """
213
+ if return_df:
214
+ try:
215
+ import pandas as pd
216
+ except ModuleNotFoundError as e:
217
+ raise ModuleNotFoundError("pandas is required when return_df=True. Please install pandas.") from e
218
+ return pd.json_normalize(self.buffer, sep=".")
219
+ return self.buffer
220
+
173
221
  def _publish_buffer(self):
174
222
  self._interceptor_instances[0]._mq_dao.bulk_publish(self.buffer)
175
223
 
224
+ def dump_buffer(self, path: str = None):
225
+ """
226
+ Dump the current in-memory buffer to a JSON Lines (JSONL) file.
227
+
228
+ Each element of the buffer (a dictionary) is serialized as a single line
229
+ of JSON. If no path is provided, the default path from the settings file
230
+ is used.
231
+
232
+ Parameters
233
+ ----------
234
+ path : str, optional
235
+ Destination file path for the JSONL output. If not provided,
236
+ defaults to ``DUMP_BUFFER_PATH`` as configured in the settings.
237
+
238
+ Returns
239
+ -------
240
+ None
241
+ The buffer is written to disk, no value is returned.
242
+
243
+ Notes
244
+ -----
245
+ - The buffer is expected to be a list of dictionaries.
246
+ - Existing files at the specified path will be overwritten.
247
+ - Logging is performed through the class logger.
248
+
249
+ Examples
250
+ --------
251
+ >>> flowcept.dump_buffer("buffer.jsonl")
252
+ # Writes buffer contents to buffer.jsonl
253
+
254
+ >>> flowcept.dump_buffer()
255
+ # Writes buffer contents to the default path defined in settings
256
+ """
257
+ if path is None:
258
+ path = DUMP_BUFFER_PATH
259
+ buffer_to_disk(self.buffer, path, self.logger)
260
+
176
261
  @staticmethod
177
- def read_messages_file(file_path: str | None = None, return_df: bool = False):
262
+ def read_buffer_file(file_path: str | None = None, return_df: bool = False, normalize_df: bool = False):
178
263
  """
179
264
  Read a JSON Lines (JSONL) file containing captured Flowcept messages.
180
265
 
@@ -187,12 +272,15 @@ class Flowcept(object):
187
272
  Parameters
188
273
  ----------
189
274
  file_path : str, optional
190
- Path to the messages file. If not provided, defaults to the value of
275
+ Path to the buffer file. If not provided, defaults to the value of
191
276
  ``DUMP_BUFFER_PATH`` from the configuration. If neither is provided,
192
277
  an assertion error is raised.
193
278
  return_df : bool, default False
194
279
  If True, return a normalized pandas DataFrame. If False, return the
195
280
  parsed list of dictionaries.
281
+ normalize_df: bool, default False
282
+ If True, normalize the inner dicts (e.g., used, generated, custom_metadata) as individual columns in the
283
+ returned DataFrame.
196
284
 
197
285
  Returns
198
286
  -------
@@ -215,13 +303,13 @@ class Flowcept(object):
215
303
  --------
216
304
  Read messages as a list:
217
305
 
218
- >>> msgs = read_messages_file("offline_buffer.jsonl")
306
+ >>> msgs = read_buffer_file("offline_buffer.jsonl")
219
307
  >>> len(msgs) > 0
220
308
  True
221
309
 
222
310
  Read messages as a normalized DataFrame:
223
311
 
224
- >>> df = read_messages_file("offline_buffer.jsonl", return_df=True)
312
+ >>> df = read_buffer_file("offline_buffer.jsonl", return_df=True)
225
313
  >>> "generated.attention" in df.columns
226
314
  True
227
315
  """
@@ -232,7 +320,7 @@ class Flowcept(object):
232
320
  file_path = DUMP_BUFFER_PATH
233
321
  assert file_path is not None, "Please indicate file_path either in the argument or in the config file."
234
322
  if not os.path.exists(file_path):
235
- raise FileNotFoundError(f"File '{file_path}' was not found. It is created only in fully offline mode.")
323
+ raise FileNotFoundError(f"Flowcept buffer file '{file_path}' was not found.")
236
324
 
237
325
  with open(file_path, "rb") as f:
238
326
  lines = [ln for ln in f.read().splitlines() if ln]
@@ -244,10 +332,57 @@ class Flowcept(object):
244
332
  import pandas as pd
245
333
  except ModuleNotFoundError as e:
246
334
  raise ModuleNotFoundError("pandas is required when return_df=True. Please install pandas.") from e
247
- return pd.json_normalize(buffer, sep=".")
335
+ if normalize_df:
336
+ return pd.json_normalize(buffer, sep=".")
337
+ else:
338
+ return pd.read_json(file_path, lines=True)
248
339
 
249
340
  return buffer
250
341
 
342
+ @staticmethod
343
+ def delete_buffer_file(path: str = None):
344
+ """
345
+ Delete the buffer file from disk if it exists.
346
+
347
+ If no path is provided, the default path from the settings file
348
+ is used. Logs whether the file was successfully removed or not found.
349
+
350
+ Parameters
351
+ ----------
352
+ path : str, optional
353
+ Path to the buffer JSONL file. If not provided,
354
+ defaults to ``DUMP_BUFFER_PATH`` as configured in the settings.
355
+
356
+ Returns
357
+ -------
358
+ None
359
+ The file is deleted from disk if it exists, no value is returned.
360
+
361
+ Notes
362
+ -----
363
+ - This operation only affects the file on disk. It does not clear
364
+ the in-memory buffer.
365
+ - Logging is performed through the class logger.
366
+
367
+ Examples
368
+ --------
369
+ >>> flowcept.delete_buffer_file("buffer.jsonl")
370
+ # Deletes buffer.jsonl if it exists
371
+
372
+ >>> flowcept.delete_buffer_file()
373
+ # Deletes the default buffer file defined in settings
374
+ """
375
+ if path is None:
376
+ path = DUMP_BUFFER_PATH
377
+
378
+ try:
379
+ if os.path.exists(path):
380
+ os.remove(path)
381
+ FlowceptLogger().info(f"Buffer file deleted: {path}")
382
+ except Exception as e:
383
+ FlowceptLogger().error(f"Failed to delete buffer file: {path}")
384
+ FlowceptLogger().exception(e)
385
+
251
386
  def save_workflow(self, interceptor: str, interceptor_instance: BaseInterceptor):
252
387
  """
253
388
  Save the current workflow and send its metadata using the provided interceptor.
@@ -297,12 +432,12 @@ class Flowcept(object):
297
432
 
298
433
  from flowcept.flowceptor.consumers.document_inserter import DocumentInserter
299
434
 
300
- doc_inserter = DocumentInserter(check_safe_stops=self._check_safe_stops, bundle_exec_id=self._bundle_exec_id)
435
+ doc_inserter = DocumentInserter(check_safe_stops=self._check_safe_stops, bundle_exec_id=self.bundle_exec_id)
301
436
  doc_inserter.start()
302
437
  self._db_inserters.append(doc_inserter)
303
438
 
304
439
  def stop(self):
305
- """Stop it."""
440
+ """Stop Flowcept controller."""
306
441
  if not self.is_started or not self.enabled:
307
442
  self.logger.warning("Flowcept is already stopped or may never have been started!")
308
443
  return
@@ -316,7 +451,7 @@ class Flowcept(object):
316
451
  if len(self._db_inserters):
317
452
  self.logger.info("Stopping DB Inserters...")
318
453
  for db_inserter in self._db_inserters:
319
- db_inserter.stop(bundle_exec_id=self._bundle_exec_id)
454
+ db_inserter.stop(bundle_exec_id=self.bundle_exec_id)
320
455
 
321
456
  Flowcept.buffer = self.buffer = None
322
457
  self.is_started = False
@@ -346,8 +481,7 @@ class Flowcept(object):
346
481
  - The method tests the liveness of the MQ service using `MQDao`.
347
482
  - If `MONGO_ENABLED` is True, it also checks the liveness of the MongoDB service
348
483
  using `MongoDBDAO`.
349
- - Logs errors if any service is not ready, and logs success when both services are
350
- operational.
484
+ - Logs errors if any service is not ready, and logs success when both services are operational.
351
485
 
352
486
  Examples
353
487
  --------
@@ -10,6 +10,7 @@ from flowcept.commons.flowcept_dataclasses.workflow_object import (
10
10
  from flowcept.configs import (
11
11
  ENRICH_MESSAGES,
12
12
  TELEMETRY_ENABLED,
13
+ TELEMETRY_CAPTURE,
13
14
  )
14
15
  from flowcept.commons.flowcept_logger import FlowceptLogger
15
16
  from flowcept.commons.daos.mq_dao.mq_dao_base import MQDao
@@ -135,7 +136,7 @@ class BaseInterceptor(object):
135
136
  # TODO :base-interceptor-refactor: :code-reorg: :usability:
136
137
  raise Exception(f"This interceptor {id(self)} has never been started!")
137
138
  workflow_obj.interceptor_ids = [self._interceptor_instance_id]
138
- if self.telemetry_capture:
139
+ if self.telemetry_capture and TELEMETRY_CAPTURE.get("machine_info", False):
139
140
  machine_info = self.telemetry_capture.capture_machine_info()
140
141
  if workflow_obj.machine_info is None:
141
142
  workflow_obj.machine_info = dict()
@@ -1,4 +1,4 @@
1
- """FlowCept Loop module."""
1
+ """Flowcept Loop module."""
2
2
 
3
3
  import uuid
4
4
  from time import time
@@ -49,6 +49,7 @@ class FlowceptLoop:
49
49
  ----------
50
50
  items : Union[Sized, Iterator, int]
51
51
  The items to iterate over. Can be:
52
+
52
53
  - A sized iterable (e.g., list, range).
53
54
  - An integer (interpreted as ``range(items)``).
54
55
  - An iterator (requires ``items_length`` if length cannot be inferred).
@@ -216,8 +217,7 @@ class FlowceptLightweightLoop:
216
217
 
217
218
  The `FlowceptLightweightLoop` class supports iterating over a collection of items or a numeric
218
219
  range while capturing metadata for each iteration and for the loop as a whole.
219
- This is particularly useful in scenarios where tracking and instrumentation of loop executions
220
- is required.
220
+ This is particularly useful in scenarios where tracking and instrumentation of loop executions is required.
221
221
 
222
222
  Parameters
223
223
  ----------
@@ -269,6 +269,7 @@ class FlowceptLightweightLoop:
269
269
  ----------
270
270
  items : Union[Sized, Iterator]
271
271
  The items to iterate over. Must either be:
272
+
272
273
  - A sized iterable (with ``__len__``).
273
274
  - An explicit iterator (length must be given by ``items_length``).
274
275
  loop_name : str, optional
flowcept/version.py CHANGED
@@ -4,4 +4,4 @@
4
4
  # The expected format is: <Major>.<Minor>.<Patch>
5
5
  # This file is supposed to be automatically modified by the CI Bot.
6
6
  # See .github/workflows/version_bumper.py
7
- __version__ = "0.9.9"
7
+ __version__ = "0.9.11"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: flowcept
3
- Version: 0.9.9
3
+ Version: 0.9.11
4
4
  Summary: Capture and query workflow provenance data using data observability
5
5
  Author: Oak Ridge National Laboratory
6
6
  License-Expression: MIT
@@ -56,6 +56,7 @@ Requires-Dist: tensorflow; extra == 'all'
56
56
  Requires-Dist: tomli; extra == 'all'
57
57
  Requires-Dist: watchdog; extra == 'all'
58
58
  Provides-Extra: analytics
59
+ Requires-Dist: matplotlib; extra == 'analytics'
59
60
  Requires-Dist: plotly; extra == 'analytics'
60
61
  Requires-Dist: scipy; extra == 'analytics'
61
62
  Requires-Dist: seaborn; extra == 'analytics'
@@ -219,7 +220,7 @@ def main():
219
220
  if __name__ == "__main__":
220
221
  main()
221
222
 
222
- prov_messages = Flowcept.read_messages_file()
223
+ prov_messages = Flowcept.read_buffer_file()
223
224
  assert len(prov_messages) == 2
224
225
  print(json.dumps(prov_messages, indent=2))
225
226
  ```
@@ -1,7 +1,7 @@
1
- flowcept/__init__.py,sha256=urpwIEJeikV0P6ORXKsM5Lq4o6wCwhySS9A487BYGy4,2241
2
- flowcept/cli.py,sha256=eVnUrmZtVhZ1ldRMGB1QsqBzNC1Pf2CX33efnlaZ4gs,22842
3
- flowcept/configs.py,sha256=aXgBkBpTs4_4MpvAe76aQ5lXl1gTmgk92bFiNqMQXPM,8382
4
- flowcept/version.py,sha256=ueMAMza92zxBIjD4eZdTMYArVaKDr6QL5eZ23IZjyd4,306
1
+ flowcept/__init__.py,sha256=tvVZKyymdqv3qOsgpAyDppBlUiBc0ag4QF21IcS-mVk,2449
2
+ flowcept/cli.py,sha256=AU6EuawCboCv933Ghb1xFL74UMIJeVTzNocPOoaJS0Q,25696
3
+ flowcept/configs.py,sha256=DBkYx0CAaDSl8x2EJY1665PFY80eCp9PEriYH-BNwL4,8781
4
+ flowcept/version.py,sha256=czeTBejrSGB7lfhNV-3pEINXGkwZD73Z8SRmBycnbvI,307
5
5
  flowcept/agents/__init__.py,sha256=8eeD2CiKBtHiDsWdrHK_UreIkKlTq4dUbhHDyzw372o,175
6
6
  flowcept/agents/agent_client.py,sha256=UiBQkC9WE2weLZR2OTkEOEQt9-zqQOkPwRA17HfI-jk,2027
7
7
  flowcept/agents/agents_utils.py,sha256=Az5lvWTsBHs_3sWWwy7jSdDjNn-PvZ7KmYd79wxvdyU,6666
@@ -9,7 +9,7 @@ flowcept/agents/dynamic_schema_tracker.py,sha256=TsmXRRkyUkqB-0bEgmeqSms8xj1tMMJ
9
9
  flowcept/agents/flowcept_agent.py,sha256=1sidjnNMdG0S6lUKBvml7ZfIb6o3u7zc6HNogsJbl9g,871
10
10
  flowcept/agents/flowcept_ctx_manager.py,sha256=-WmkddzzFY2dnU9LbZaoY4-5RcSAQH4FziEJgcC5LEI,7083
11
11
  flowcept/agents/gui/__init__.py,sha256=Qw9YKbAzgZqBjMQGnF7XWmfUo0fivtkDISQRK3LA3gU,113
12
- flowcept/agents/gui/agent_gui.py,sha256=44G6AXM9zJbVcJUziZcU5aA6mVDwtvgO8H58nTCFC0M,2855
12
+ flowcept/agents/gui/agent_gui.py,sha256=VpwhQamzFKBfrmibxOIc-8wXtZnd2Cq7tbKahZZOp7c,2995
13
13
  flowcept/agents/gui/audio_utils.py,sha256=piA_dc36io1sYqLF6QArS4AMl-cfDa001jGhYz5LkB4,4279
14
14
  flowcept/agents/gui/gui_utils.py,sha256=cQVhOgnfxJNUVZyXyO8f40nB1yaKAKVtBrwQmJjL0B0,14933
15
15
  flowcept/agents/llms/__init__.py,sha256=kzOaJic5VhMBnGvy_Fr5C6sRKVrRntH1ZnYz7f5_4-s,23
@@ -19,7 +19,7 @@ flowcept/agents/prompts/__init__.py,sha256=7ICsNhLYzvPS1esG3Vg519s51b1c4yN0WegJU
19
19
  flowcept/agents/prompts/general_prompts.py,sha256=b0QhnF-ytIE1_WWrgpamC4VybjS8KuS051DgVVt8r2U,3961
20
20
  flowcept/agents/prompts/in_memory_query_prompts.py,sha256=iRaGySybNxZf5vuQ3n9cb14VNk6bMQ0z3tn2mVVke0E,19817
21
21
  flowcept/agents/tools/__init__.py,sha256=Xqz2E4-LL_7DDcm1XYJFx2f5RdAsjeTpOJb_DPC7xyc,27
22
- flowcept/agents/tools/general_tools.py,sha256=yOxNb7ZgrP93Ky85cdjiZkCZSH_1biB0pA2K1e9bzFQ,5038
22
+ flowcept/agents/tools/general_tools.py,sha256=JSMG_UGdRKcQfC4_ixzDXDHW92UX5i0UsLTzFq0fmZg,4402
23
23
  flowcept/agents/tools/in_memory_queries/__init__.py,sha256=K8-JI_lXUgquKkgga8Nef8AntGg_logQtjjQjaEE7yI,39
24
24
  flowcept/agents/tools/in_memory_queries/in_memory_queries_tools.py,sha256=GcfAiUBhQ1DU3QKk0kAy9TSq8XmZw691Xs0beZoO76A,25984
25
25
  flowcept/agents/tools/in_memory_queries/pandas_agent_utils.py,sha256=xyrZupR86qoUptnnQ7PeF0LTzSOquEK2cjc0ghT1KBs,9018
@@ -33,8 +33,8 @@ flowcept/commons/flowcept_logger.py,sha256=0asRucrDMeRXvsdhuCmH6lWO7lAt_Z5o5uW7r
33
33
  flowcept/commons/query_utils.py,sha256=3tyK5VYA10iDtmtzNwa8OQGn93DBxsu6rTjHDphftSc,2208
34
34
  flowcept/commons/settings_factory.py,sha256=bMTjgXRfb5HsL2lPnLfem-9trqELbNWE04Ie7lSlxYM,1731
35
35
  flowcept/commons/task_data_preprocess.py,sha256=-ceLexv2ZfZOAYF43DPagGwQPgt_L_lNKuK8ZCpnzXs,13914
36
- flowcept/commons/utils.py,sha256=gF6ENWlTpR2ZSw3yVNPNBTVzSpcgy-WuzYzwWSXXsug,9252
37
- flowcept/commons/vocabulary.py,sha256=_GzHJ1wSYJlLsu_uu1Am6N3zvc59S4FCuT5yp7lynPw,713
36
+ flowcept/commons/utils.py,sha256=okCShkcuWhzznBtADDDusTdfPXO0W041b2f4Aog-7SE,9831
37
+ flowcept/commons/vocabulary.py,sha256=0psC4NulNFn88mjTcoT_aT4QxX8ljMFgTOF3FxzM40A,1118
38
38
  flowcept/commons/daos/__init__.py,sha256=RO51svfHOg9naN676zuQwbj_RQ6IFHu-RALeefvtwwk,23
39
39
  flowcept/commons/daos/keyvalue_dao.py,sha256=g7zgC9hVC1NTllwUAqGt44YqdqYUgAKgPlX8_G4BRGw,3599
40
40
  flowcept/commons/daos/redis_conn.py,sha256=gFyW-5yf6B8ExEYopCmbap8ki-iEwuIw-KH9f6o7UGQ,1495
@@ -43,7 +43,7 @@ flowcept/commons/daos/docdb_dao/docdb_dao_base.py,sha256=YbfSVJPwZGK2GBYkeapRC83
43
43
  flowcept/commons/daos/docdb_dao/lmdb_dao.py,sha256=ZuCsdEhI2wGAmjAf82j-1t3tbR6YMmDeaJ_C3HcsLYo,10461
44
44
  flowcept/commons/daos/docdb_dao/mongodb_dao.py,sha256=5x0un15uCDTcnuITOyOhvF9mKj_bUmF2du0AHQfjN9k,40055
45
45
  flowcept/commons/daos/mq_dao/__init__.py,sha256=Xxm4FmbBUZDQ7XIAmSFbeKE_AdHsbgFmSuftvMWSykQ,21
46
- flowcept/commons/daos/mq_dao/mq_dao_base.py,sha256=EL8eQedvNLsVLMz4oHemBAsR1S6xFZiezM8dIqKmmCA,9696
46
+ flowcept/commons/daos/mq_dao/mq_dao_base.py,sha256=VXqXzesU01dCHE5i0urnYQppixUNGZbJMRmm4jSAcgM,9424
47
47
  flowcept/commons/daos/mq_dao/mq_dao_kafka.py,sha256=kjZqPLIu5PaNeM4IDvOxkDRVGTd5UWwq3zhDvVirqW8,5067
48
48
  flowcept/commons/daos/mq_dao/mq_dao_mofka.py,sha256=tRdMGYDzdeIJxad-B4-DE6u8Wzs61eTzOW4ojZrnTxs,4057
49
49
  flowcept/commons/daos/mq_dao/mq_dao_redis.py,sha256=ejBMxImA-h2KuMEAk3l7aU0chCcObCbUXEOXM6L4Zhc,5571
@@ -54,7 +54,7 @@ flowcept/commons/flowcept_dataclasses/telemetry.py,sha256=9_5ONCo-06r5nKHXmi5HfI
54
54
  flowcept/commons/flowcept_dataclasses/workflow_object.py,sha256=cauWtXHhBv9lHS-q6cb7yUsNiwQ6PkZPuSinR1TKcqU,6161
55
55
  flowcept/flowcept_api/__init__.py,sha256=T1ty86YlocQ5Z18l5fUqHj_CC6Unq_iBv0lFyiI7Ao8,22
56
56
  flowcept/flowcept_api/db_api.py,sha256=hKXep-n50rp9cAzV0ljk2QVEF8O64yxi3ujXv5_Ibac,9723
57
- flowcept/flowcept_api/flowcept_controller.py,sha256=jfssXUvG55RVXJBziq-lXekt7Dog3mAalo5Zsp_7_to,16060
57
+ flowcept/flowcept_api/flowcept_controller.py,sha256=az1bktiL8_xs4pc97Zqgd1ezsg-cD0whf3XWA1ZN08Q,20652
58
58
  flowcept/flowcept_api/task_query_api.py,sha256=SrwB0OCVtbpvCPECkE2ySM10G_g8Wlk5PJ8h-0xEaNc,23821
59
59
  flowcept/flowcept_webserver/__init__.py,sha256=8411GIXGddKTKoHUvbo_Rq6svosNG7tG8VzvUEBd7WI,28
60
60
  flowcept/flowcept_webserver/app.py,sha256=VUV8_JZbIbx9u_1O7m7XtRdhZb_7uifUa-iNlPhmZws,658
@@ -64,7 +64,7 @@ flowcept/flowcept_webserver/resources/task_messages_rsrc.py,sha256=0u68it2W-9NzU
64
64
  flowcept/flowceptor/__init__.py,sha256=wVxRXUv07iNx6SMRRma2vqhR_GIcRl0re_WCYG65PUs,29
65
65
  flowcept/flowceptor/telemetry_capture.py,sha256=CWyR8E1rTAjFbUFI9BxaGfJyDd2UbiK0uLGt4m8BnSU,13932
66
66
  flowcept/flowceptor/adapters/__init__.py,sha256=SuZbSZVVQeBJ9zXW-M9jF09dw3XIjre3lSGrUO1Y8Po,27
67
- flowcept/flowceptor/adapters/base_interceptor.py,sha256=kbdYW6VuvmBibOVy7Pg3OzeD3OUaHc6jnAhRBpj9f14,6517
67
+ flowcept/flowceptor/adapters/base_interceptor.py,sha256=oqnD19TNqi1FHlwAsbyEkiLgywpm5HYLHT1gYlsOHTk,6589
68
68
  flowcept/flowceptor/adapters/instrumentation_interceptor.py,sha256=DhK2bBnpghqPSeA62BUqRg6pl8zxuYrP33dK4x6PhRE,733
69
69
  flowcept/flowceptor/adapters/interceptor_state_manager.py,sha256=xRzmi5YFKBEqNtX8F5s6XlMTRe27ml4BmQtBO4WtG2c,919
70
70
  flowcept/flowceptor/adapters/brokers/__init__.py,sha256=mhQXVmh0JklvL93GUtJZLJnPRYX9Nmb8IqcyKJGQBzk,36
@@ -90,13 +90,13 @@ flowcept/flowceptor/consumers/agent/base_agent_context_manager.py,sha256=5fBPYs-
90
90
  flowcept/instrumentation/__init__.py,sha256=M5bTmg80E4QyN91gUX3qfw_nbtJSXwGWcKxdZP3vJz0,34
91
91
  flowcept/instrumentation/flowcept_agent_task.py,sha256=XN9JU4LODca0SgojUm4F5iU_V8tuWkOt1fAKcoOAG34,10757
92
92
  flowcept/instrumentation/flowcept_decorator.py,sha256=X4Lp_FSsoL08K8ZhRM4mC0OjKupbQtbMQR8zxy3ezDY,1350
93
- flowcept/instrumentation/flowcept_loop.py,sha256=jea_hYPuXg5_nOWf-nNb4vx8A__OBM4m96_92-J51o4,15670
93
+ flowcept/instrumentation/flowcept_loop.py,sha256=nF7Sov-DCDapyYvS8zx-1ZFrnjc3CPg2VsjDaxFs0Cc,15667
94
94
  flowcept/instrumentation/flowcept_task.py,sha256=EmKODpjl8usNklKSVmsKYyCa6gC_QMqKhAr3DKaw44s,8199
95
95
  flowcept/instrumentation/flowcept_torch.py,sha256=kkZQRYq6cDBpdBU6J39_4oKRVkhyF3ODlz8ydV5WGKw,23455
96
96
  flowcept/instrumentation/task_capture.py,sha256=1g9EtLdqsTB0RHsF-eRmA2Xh9l_YqTd953d4v89IC24,8287
97
- resources/sample_settings.yaml,sha256=2IxbkbpFEymoUk-qcR70wsdXX2Y2cVP31cVWDVy0BNg,6779
98
- flowcept-0.9.9.dist-info/METADATA,sha256=NoHey7ehIO4kN0PrOCbz4z-TQ1Susn_yFOTsbd464uk,32439
99
- flowcept-0.9.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
100
- flowcept-0.9.9.dist-info/entry_points.txt,sha256=i8q67WE0201rVxYI2lyBtS52shvgl93x2Szp4q8zMlw,47
101
- flowcept-0.9.9.dist-info/licenses/LICENSE,sha256=r5-2P6tFTuRGWT5TiX32s1y0tnp4cIqBEC1QjTaXe2k,1086
102
- flowcept-0.9.9.dist-info/RECORD,,
97
+ resources/sample_settings.yaml,sha256=5insrkZdP-DHoB6rKxO9QkSqZUBVMaVlkTKWh_8TjIg,6881
98
+ flowcept-0.9.11.dist-info/METADATA,sha256=CIU3PnotF8ynCtOLhTYofW7CYWz5aGZhOhOP1h-hb8c,32486
99
+ flowcept-0.9.11.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
100
+ flowcept-0.9.11.dist-info/entry_points.txt,sha256=i8q67WE0201rVxYI2lyBtS52shvgl93x2Szp4q8zMlw,47
101
+ flowcept-0.9.11.dist-info/licenses/LICENSE,sha256=r5-2P6tFTuRGWT5TiX32s1y0tnp4cIqBEC1QjTaXe2k,1086
102
+ flowcept-0.9.11.dist-info/RECORD,,
@@ -1,13 +1,15 @@
1
- flowcept_version: 0.9.9 # Version of the Flowcept package. This setting file is compatible with this version.
1
+ flowcept_version: 0.9.11 # Version of the Flowcept package. This setting file is compatible with this version.
2
2
 
3
3
  project:
4
4
  debug: true # Toggle debug mode. This will add a property `debug: true` to all saved data, making it easier to retrieve/delete them later.
5
5
  json_serializer: default # JSON serialization mode: default or complex. If "complex", Flowcept will deal with complex python dicts that may contain JSON unserializable values
6
6
  replace_non_json_serializable: true # Replace values that can't be JSON serialized
7
7
  performance_logging: false # Enable performance logging if true. Particularly useful for MQ flushes.
8
- enrich_messages: true # Add extra metadata to task messages, such as IP addresses and UTC timestamps.
8
+ enrich_messages: true # Add extra metadata to task messages, such as IP addresses of the node that executed the task, UTC timestamps, GitHub repo metadata.
9
9
  db_flush_mode: online # Mode for flushing DB entries: "online" or "offline". If online, flushes to the DB will happen before the workflow ends.
10
- # dump_buffer_path: flowcept_messages.jsonl # This is useful if you need to run completely offline. If you omit this, even offline, buffer data will not be persisted.
10
+ dump_buffer: # This is particularly useful if you need to run completely offline. If you omit this, even offline, buffer data will not be persisted.
11
+ enabled: false
12
+ path: flowcept_buffer.jsonl
11
13
 
12
14
  log:
13
15
  log_path: "default" # Path for log file output; "default" will write the log in the directory where the main executable is running from.
@@ -106,6 +108,7 @@ databases:
106
108
  db: flowcept
107
109
  create_collection_index: true # Whether flowcept should create collection indices if they haven't been created yet. This is done only at the Flowcept start up.
108
110
  # bin: /usr/bin/mongod
111
+ # db_path:
109
112
  # log_path: /var/log/mongodb/mongod.log
110
113
  # lock_file_path: /var/run/mongod.pid
111
114