flowcept 0.8.10__py3-none-any.whl → 0.8.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowcept/__init__.py +7 -4
- flowcept/agents/__init__.py +5 -0
- flowcept/agents/agent_client.py +58 -0
- flowcept/agents/agents_utils.py +181 -0
- flowcept/agents/dynamic_schema_tracker.py +191 -0
- flowcept/agents/flowcept_agent.py +30 -0
- flowcept/agents/flowcept_ctx_manager.py +175 -0
- flowcept/agents/gui/__init__.py +5 -0
- flowcept/agents/gui/agent_gui.py +76 -0
- flowcept/agents/gui/gui_utils.py +239 -0
- flowcept/agents/llms/__init__.py +1 -0
- flowcept/agents/llms/claude_gcp.py +139 -0
- flowcept/agents/llms/gemini25.py +119 -0
- flowcept/agents/prompts/__init__.py +1 -0
- flowcept/agents/prompts/general_prompts.py +69 -0
- flowcept/agents/prompts/in_memory_query_prompts.py +297 -0
- flowcept/agents/tools/__init__.py +1 -0
- flowcept/agents/tools/general_tools.py +102 -0
- flowcept/agents/tools/in_memory_queries/__init__.py +1 -0
- flowcept/agents/tools/in_memory_queries/in_memory_queries_tools.py +704 -0
- flowcept/agents/tools/in_memory_queries/pandas_agent_utils.py +309 -0
- flowcept/cli.py +459 -17
- flowcept/commons/daos/docdb_dao/mongodb_dao.py +47 -0
- flowcept/commons/daos/keyvalue_dao.py +19 -23
- flowcept/commons/daos/mq_dao/mq_dao_base.py +49 -38
- flowcept/commons/daos/mq_dao/mq_dao_kafka.py +20 -3
- flowcept/commons/daos/mq_dao/mq_dao_mofka.py +4 -0
- flowcept/commons/daos/mq_dao/mq_dao_redis.py +38 -5
- flowcept/commons/daos/redis_conn.py +47 -0
- flowcept/commons/flowcept_dataclasses/task_object.py +50 -27
- flowcept/commons/flowcept_dataclasses/workflow_object.py +9 -1
- flowcept/commons/settings_factory.py +2 -4
- flowcept/commons/task_data_preprocess.py +400 -0
- flowcept/commons/utils.py +26 -7
- flowcept/configs.py +48 -29
- flowcept/flowcept_api/flowcept_controller.py +102 -18
- flowcept/flowceptor/adapters/base_interceptor.py +24 -11
- flowcept/flowceptor/adapters/brokers/__init__.py +1 -0
- flowcept/flowceptor/adapters/brokers/mqtt_interceptor.py +132 -0
- flowcept/flowceptor/adapters/mlflow/mlflow_interceptor.py +3 -3
- flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py +3 -3
- flowcept/flowceptor/consumers/agent/__init__.py +1 -0
- flowcept/flowceptor/consumers/agent/base_agent_context_manager.py +125 -0
- flowcept/flowceptor/consumers/base_consumer.py +94 -0
- flowcept/flowceptor/consumers/consumer_utils.py +5 -4
- flowcept/flowceptor/consumers/document_inserter.py +135 -36
- flowcept/flowceptor/telemetry_capture.py +6 -3
- flowcept/instrumentation/flowcept_agent_task.py +294 -0
- flowcept/instrumentation/flowcept_decorator.py +43 -0
- flowcept/instrumentation/flowcept_loop.py +3 -3
- flowcept/instrumentation/flowcept_task.py +64 -24
- flowcept/instrumentation/flowcept_torch.py +5 -5
- flowcept/instrumentation/task_capture.py +87 -4
- flowcept/version.py +1 -1
- {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/METADATA +48 -11
- flowcept-0.8.12.dist-info/RECORD +101 -0
- resources/sample_settings.yaml +46 -14
- flowcept/flowceptor/adapters/zambeze/__init__.py +0 -1
- flowcept/flowceptor/adapters/zambeze/zambeze_dataclasses.py +0 -41
- flowcept/flowceptor/adapters/zambeze/zambeze_interceptor.py +0 -102
- flowcept-0.8.10.dist-info/RECORD +0 -75
- {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/WHEEL +0 -0
- {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/entry_points.txt +0 -0
- {flowcept-0.8.10.dist-info → flowcept-0.8.12.dist-info}/licenses/LICENSE +0 -0
|
@@ -2,6 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
import threading
|
|
4
4
|
from time import time
|
|
5
|
+
import inspect
|
|
5
6
|
from functools import wraps
|
|
6
7
|
import argparse
|
|
7
8
|
from flowcept.commons.flowcept_dataclasses.task_object import (
|
|
@@ -11,10 +12,7 @@ from flowcept.commons.vocabulary import Status
|
|
|
11
12
|
from flowcept.commons.flowcept_logger import FlowceptLogger
|
|
12
13
|
|
|
13
14
|
from flowcept.commons.utils import replace_non_serializable
|
|
14
|
-
from flowcept.configs import
|
|
15
|
-
REPLACE_NON_JSON_SERIALIZABLE,
|
|
16
|
-
INSTRUMENTATION_ENABLED,
|
|
17
|
-
)
|
|
15
|
+
from flowcept.configs import REPLACE_NON_JSON_SERIALIZABLE, INSTRUMENTATION_ENABLED, HOSTNAME, TELEMETRY_ENABLED
|
|
18
16
|
from flowcept.flowcept_api.flowcept_controller import Flowcept
|
|
19
17
|
from flowcept.flowceptor.adapters.instrumentation_interceptor import InstrumentationInterceptor
|
|
20
18
|
|
|
@@ -54,8 +52,8 @@ def telemetry_flowcept_task(func=None):
|
|
|
54
52
|
_thread_local._flowcept_current_context_task_id = task_obj["task_id"]
|
|
55
53
|
task_obj["workflow_id"] = kwargs.pop("workflow_id", Flowcept.current_workflow_id)
|
|
56
54
|
task_obj["used"] = kwargs
|
|
57
|
-
|
|
58
|
-
|
|
55
|
+
if TELEMETRY_ENABLED:
|
|
56
|
+
tel = interceptor.telemetry_capture.capture()
|
|
59
57
|
task_obj["telemetry_at_start"] = tel.to_dict()
|
|
60
58
|
try:
|
|
61
59
|
result = func(*args, **kwargs)
|
|
@@ -65,8 +63,8 @@ def telemetry_flowcept_task(func=None):
|
|
|
65
63
|
result = None
|
|
66
64
|
task_obj["stderr"] = str(e)
|
|
67
65
|
# task_obj["ended_at"] = time()
|
|
68
|
-
|
|
69
|
-
|
|
66
|
+
if TELEMETRY_ENABLED:
|
|
67
|
+
tel = interceptor.telemetry_capture.capture()
|
|
70
68
|
task_obj["telemetry_at_end"] = tel.to_dict()
|
|
71
69
|
task_obj["generated"] = result
|
|
72
70
|
interceptor.intercept(task_obj)
|
|
@@ -107,39 +105,52 @@ def lightweight_flowcept_task(func=None):
|
|
|
107
105
|
return decorator(func)
|
|
108
106
|
|
|
109
107
|
|
|
110
|
-
# def flowcept_task_switch(mode=None):
|
|
111
|
-
# if mode is None:
|
|
112
|
-
# return flowcept_task
|
|
113
|
-
# elif mode == "disable":
|
|
114
|
-
# return lambda _: _
|
|
115
|
-
# else:
|
|
116
|
-
# raise NotImplementedError
|
|
117
|
-
|
|
118
|
-
|
|
119
108
|
def flowcept_task(func=None, **decorator_kwargs):
|
|
120
|
-
"""
|
|
109
|
+
"""Flowcept task decorator."""
|
|
121
110
|
if INSTRUMENTATION_ENABLED:
|
|
122
111
|
interceptor = InstrumentationInterceptor.get_instance()
|
|
123
112
|
logger = FlowceptLogger()
|
|
124
113
|
|
|
125
114
|
def decorator(func):
|
|
115
|
+
# Precompute once (perf)
|
|
116
|
+
sig = inspect.signature(func)
|
|
117
|
+
args_handler = decorator_kwargs.get("args_handler", default_args_handler)
|
|
118
|
+
custom_metadata = decorator_kwargs.get("custom_metadata", None)
|
|
119
|
+
tags = decorator_kwargs.get("tags", None)
|
|
120
|
+
subtype = decorator_kwargs.get("subtype", None)
|
|
121
|
+
output_names = decorator_kwargs.get("output_names", None)
|
|
122
|
+
|
|
126
123
|
@wraps(func)
|
|
127
124
|
def wrapper(*args, **kwargs):
|
|
128
125
|
if not INSTRUMENTATION_ENABLED:
|
|
129
126
|
return func(*args, **kwargs)
|
|
130
127
|
|
|
131
|
-
|
|
132
|
-
|
|
128
|
+
# Bind inputs to parameter names
|
|
129
|
+
try:
|
|
130
|
+
bound_args = sig.bind(*args, **kwargs)
|
|
131
|
+
bound_args.apply_defaults()
|
|
132
|
+
handled_args = args_handler(**dict(bound_args.arguments))
|
|
133
|
+
except Exception as e:
|
|
134
|
+
if isinstance(e, TypeError):
|
|
135
|
+
raise e
|
|
136
|
+
else:
|
|
137
|
+
handled_args = args_handler(*args, **kwargs)
|
|
133
138
|
|
|
139
|
+
task_obj = TaskObject()
|
|
140
|
+
task_obj.subtype = subtype
|
|
134
141
|
task_obj.activity_id = func.__name__
|
|
135
|
-
handled_args = args_handler(*args, **kwargs)
|
|
136
142
|
task_obj.workflow_id = handled_args.pop("workflow_id", Flowcept.current_workflow_id)
|
|
137
143
|
task_obj.campaign_id = handled_args.pop("campaign_id", Flowcept.campaign_id)
|
|
138
144
|
task_obj.used = handled_args
|
|
145
|
+
task_obj.tags = tags
|
|
139
146
|
task_obj.started_at = time()
|
|
147
|
+
task_obj.custom_metadata = custom_metadata
|
|
148
|
+
task_obj.hostname = HOSTNAME
|
|
140
149
|
task_obj.task_id = str(task_obj.started_at)
|
|
141
150
|
_thread_local._flowcept_current_context_task_id = task_obj.task_id
|
|
142
|
-
|
|
151
|
+
if TELEMETRY_ENABLED:
|
|
152
|
+
task_obj.telemetry_at_start = interceptor.telemetry_capture.capture()
|
|
153
|
+
|
|
143
154
|
try:
|
|
144
155
|
result = func(*args, **kwargs)
|
|
145
156
|
task_obj.status = Status.FINISHED
|
|
@@ -148,13 +159,42 @@ def flowcept_task(func=None, **decorator_kwargs):
|
|
|
148
159
|
result = None
|
|
149
160
|
logger.exception(e)
|
|
150
161
|
task_obj.stderr = str(e)
|
|
162
|
+
|
|
151
163
|
task_obj.ended_at = time()
|
|
152
|
-
|
|
164
|
+
if TELEMETRY_ENABLED:
|
|
165
|
+
task_obj.telemetry_at_end = interceptor.telemetry_capture.capture()
|
|
166
|
+
|
|
167
|
+
# Output handling: only use output_names if provided
|
|
153
168
|
try:
|
|
154
169
|
if result is not None:
|
|
170
|
+
named = None
|
|
171
|
+
|
|
155
172
|
if isinstance(result, dict):
|
|
156
|
-
|
|
173
|
+
# User already returned a mapping; pass through
|
|
174
|
+
try:
|
|
175
|
+
task_obj.generated = args_handler(**result)
|
|
176
|
+
except Exception:
|
|
177
|
+
task_obj.generated = result
|
|
178
|
+
elif output_names:
|
|
179
|
+
# If output_names provided, map scalar or tuple/list to names
|
|
180
|
+
if isinstance(result, (tuple, list)):
|
|
181
|
+
if len(output_names) == len(result):
|
|
182
|
+
named = {k: v for k, v in zip(output_names, result)}
|
|
183
|
+
elif isinstance(output_names, str):
|
|
184
|
+
named = {output_names: result}
|
|
185
|
+
elif isinstance(output_names, (tuple, list)) and len(output_names) == 1:
|
|
186
|
+
named = {output_names[0]: result}
|
|
187
|
+
|
|
188
|
+
if isinstance(named, dict):
|
|
189
|
+
try:
|
|
190
|
+
task_obj.generated = args_handler(**named)
|
|
191
|
+
except Exception:
|
|
192
|
+
task_obj.generated = named
|
|
193
|
+
else:
|
|
194
|
+
# Mismatch or no mapping possible -> original behavior
|
|
195
|
+
task_obj.generated = args_handler(result)
|
|
157
196
|
else:
|
|
197
|
+
# No output_names: original behavior
|
|
158
198
|
task_obj.generated = args_handler(result)
|
|
159
199
|
except Exception as e:
|
|
160
200
|
logger.exception(e)
|
|
@@ -18,9 +18,9 @@ from flowcept.commons.flowcept_dataclasses.workflow_object import (
|
|
|
18
18
|
from flowcept.commons.vocabulary import Status
|
|
19
19
|
from flowcept.configs import (
|
|
20
20
|
INSTRUMENTATION,
|
|
21
|
-
TELEMETRY_CAPTURE,
|
|
22
21
|
REPLACE_NON_JSON_SERIALIZABLE,
|
|
23
22
|
INSTRUMENTATION_ENABLED,
|
|
23
|
+
TELEMETRY_ENABLED,
|
|
24
24
|
)
|
|
25
25
|
from flowcept.flowcept_api.flowcept_controller import Flowcept
|
|
26
26
|
from flowcept.flowceptor.adapters.base_interceptor import BaseInterceptor
|
|
@@ -192,8 +192,8 @@ def flowcept_torch(cls):
|
|
|
192
192
|
if self._current_epoch < 1:
|
|
193
193
|
forward_task["generated"] = {"tensor": _inspect_torch_tensor(y)}
|
|
194
194
|
|
|
195
|
-
|
|
196
|
-
|
|
195
|
+
if TELEMETRY_ENABLED:
|
|
196
|
+
tel = TorchModuleWrapper._interceptor.telemetry_capture.capture()
|
|
197
197
|
forward_task["telemetry_at_end"] = tel.to_dict()
|
|
198
198
|
|
|
199
199
|
TorchModuleWrapper._interceptor.intercept(forward_task)
|
|
@@ -322,9 +322,9 @@ def flowcept_torch(cls):
|
|
|
322
322
|
|
|
323
323
|
def _get_our_child_forward_func(mode):
|
|
324
324
|
"""Pick the torch_task function."""
|
|
325
|
-
if "telemetry" in mode and
|
|
325
|
+
if "telemetry" in mode and not TELEMETRY_ENABLED:
|
|
326
326
|
raise Exception(
|
|
327
|
-
"Your telemetry settings are
|
|
327
|
+
"Your telemetry settings are disabled but you chose a telemetry mode. Please revise your settings."
|
|
328
328
|
)
|
|
329
329
|
elif mode == "lightweight":
|
|
330
330
|
return _our_forward_lightweight
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from time import time
|
|
2
|
-
from typing import Dict
|
|
2
|
+
from typing import Dict, Any
|
|
3
3
|
import os
|
|
4
4
|
import threading
|
|
5
5
|
import random
|
|
@@ -8,7 +8,7 @@ from flowcept.commons.flowcept_dataclasses.task_object import (
|
|
|
8
8
|
TaskObject,
|
|
9
9
|
)
|
|
10
10
|
from flowcept.commons.vocabulary import Status
|
|
11
|
-
from flowcept.configs import INSTRUMENTATION_ENABLED
|
|
11
|
+
from flowcept.configs import INSTRUMENTATION_ENABLED, TELEMETRY_ENABLED
|
|
12
12
|
from flowcept.flowcept_api.flowcept_controller import Flowcept
|
|
13
13
|
from flowcept.flowceptor.adapters.instrumentation_interceptor import InstrumentationInterceptor
|
|
14
14
|
|
|
@@ -58,24 +58,88 @@ class FlowceptTask(object):
|
|
|
58
58
|
workflow_id: str = None,
|
|
59
59
|
campaign_id: str = None,
|
|
60
60
|
activity_id: str = None,
|
|
61
|
+
agent_id: str = None,
|
|
62
|
+
parent_task_id: str = None,
|
|
61
63
|
used: Dict = None,
|
|
64
|
+
data: Any = None,
|
|
65
|
+
subtype: str = None,
|
|
62
66
|
custom_metadata: Dict = None,
|
|
67
|
+
generated: Dict = None,
|
|
68
|
+
ended_at: float = None,
|
|
69
|
+
stdout: str = None,
|
|
70
|
+
stderr: str = None,
|
|
71
|
+
status: Status = None,
|
|
63
72
|
):
|
|
73
|
+
"""
|
|
74
|
+
Initializes a FlowceptTask and optionally finalizes it.
|
|
75
|
+
|
|
76
|
+
If any of the following optional arguments are provided — `generated`, `ended_at`, `stdout`,
|
|
77
|
+
`stderr`, or `status` — the task will be automatically finalized by calling `end()` during
|
|
78
|
+
initialization. This is useful when the task's outcome is already known at the moment of
|
|
79
|
+
instantiation.
|
|
80
|
+
|
|
81
|
+
Parameters
|
|
82
|
+
----------
|
|
83
|
+
task_id : str, optional
|
|
84
|
+
Unique identifier for the task. Defaults to a generated ID.
|
|
85
|
+
workflow_id : str, optional
|
|
86
|
+
ID of the workflow to which this task belongs.
|
|
87
|
+
campaign_id : str, optional
|
|
88
|
+
ID of the campaign to which this task belongs.
|
|
89
|
+
activity_id : str, optional
|
|
90
|
+
Describes the specific activity this task captures.
|
|
91
|
+
used : Dict, optional
|
|
92
|
+
Metadata about resources or data used during the task.
|
|
93
|
+
subtype : str, optional
|
|
94
|
+
Optional string categorizing the task subtype.
|
|
95
|
+
custom_metadata : Dict, optional
|
|
96
|
+
Additional user-defined metadata to associate with the task.
|
|
97
|
+
generated : Dict, optional
|
|
98
|
+
Output data generated during the task execution.
|
|
99
|
+
ended_at : float, optional
|
|
100
|
+
Timestamp indicating when the task ended.
|
|
101
|
+
stdout : str, optional
|
|
102
|
+
Captured standard output from the task.
|
|
103
|
+
stderr : str, optional
|
|
104
|
+
Captured standard error from the task.
|
|
105
|
+
status : Status, optional
|
|
106
|
+
Task completion status. If provided, defaults to Status.FINISHED if unspecified.
|
|
107
|
+
"""
|
|
64
108
|
if not INSTRUMENTATION_ENABLED:
|
|
65
109
|
self._ended = True
|
|
66
110
|
return
|
|
111
|
+
|
|
67
112
|
self._task = TaskObject()
|
|
68
113
|
self._interceptor = InstrumentationInterceptor.get_instance()
|
|
69
|
-
|
|
114
|
+
|
|
115
|
+
if TELEMETRY_ENABLED:
|
|
116
|
+
tel = self._interceptor.telemetry_capture.capture()
|
|
117
|
+
self._task.telemetry_at_start = tel
|
|
118
|
+
|
|
70
119
|
self._task.activity_id = activity_id
|
|
71
120
|
self._task.started_at = time()
|
|
72
121
|
self._task.task_id = task_id or self._gen_task_id()
|
|
73
122
|
self._task.workflow_id = workflow_id or Flowcept.current_workflow_id
|
|
74
123
|
self._task.campaign_id = campaign_id or Flowcept.campaign_id
|
|
124
|
+
self._task.parent_task_id = parent_task_id
|
|
75
125
|
self._task.used = used
|
|
126
|
+
self._task.data = data
|
|
127
|
+
self._task.subtype = subtype
|
|
128
|
+
self._task.agent_id = agent_id
|
|
76
129
|
self._task.custom_metadata = custom_metadata
|
|
130
|
+
|
|
77
131
|
self._ended = False
|
|
78
132
|
|
|
133
|
+
# Check if any of the end-like fields were provided. If yes, end it.
|
|
134
|
+
if any([generated, ended_at, stdout, stderr is not None]):
|
|
135
|
+
self.end(
|
|
136
|
+
generated=generated,
|
|
137
|
+
ended_at=ended_at,
|
|
138
|
+
stdout=stdout,
|
|
139
|
+
stderr=stderr,
|
|
140
|
+
status=status or Status.FINISHED,
|
|
141
|
+
)
|
|
142
|
+
|
|
79
143
|
def __enter__(self):
|
|
80
144
|
return self
|
|
81
145
|
|
|
@@ -124,11 +188,30 @@ class FlowceptTask(object):
|
|
|
124
188
|
"""
|
|
125
189
|
if not INSTRUMENTATION_ENABLED:
|
|
126
190
|
return
|
|
127
|
-
|
|
191
|
+
if TELEMETRY_ENABLED:
|
|
192
|
+
tel = self._interceptor.telemetry_capture.capture()
|
|
193
|
+
self._task.telemetry_at_end = tel
|
|
128
194
|
self._task.ended_at = ended_at or time()
|
|
129
195
|
self._task.status = status
|
|
130
196
|
self._task.stderr = stderr
|
|
131
197
|
self._task.stdout = stdout
|
|
132
198
|
self._task.generated = generated
|
|
199
|
+
if self._interceptor._mq_dao.buffer is None:
|
|
200
|
+
raise Exception("Did you start Flowcept?")
|
|
133
201
|
self._interceptor.intercept(self._task.to_dict())
|
|
134
202
|
self._ended = True
|
|
203
|
+
|
|
204
|
+
def send(self):
|
|
205
|
+
"""
|
|
206
|
+
Finalizes and sends the task data if not already ended.
|
|
207
|
+
|
|
208
|
+
This method acts as a simple alias for finalizing the task without requiring additional
|
|
209
|
+
arguments. It sends the task object to the interceptor for capture and marks it as ended
|
|
210
|
+
to prevent multiple submissions.
|
|
211
|
+
"""
|
|
212
|
+
if not self._ended:
|
|
213
|
+
if self._interceptor._mq_dao.buffer is None:
|
|
214
|
+
raise Exception("Did you start Flowcept?")
|
|
215
|
+
self._task.ended_at = self._task.started_at # message sents are not going to be analyzed for task duration
|
|
216
|
+
self._interceptor.intercept(self._task.to_dict())
|
|
217
|
+
self._ended = True
|
flowcept/version.py
CHANGED
|
@@ -1,44 +1,52 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: flowcept
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.12
|
|
4
4
|
Summary: Capture and query workflow provenance data using data observability
|
|
5
|
-
Project-URL: GitHub, https://github.com/ORNL/flowcept
|
|
6
5
|
Author: Oak Ridge National Laboratory
|
|
7
6
|
License-Expression: MIT
|
|
8
7
|
License-File: LICENSE
|
|
9
|
-
Keywords: ai,big-data,dask,data-analytics,data-integration,databases,lineage,machine-learning,ml,mlflow,model-management,parallel-processing,provenance,reproducibility,responsible-ai,scientific-workflows,tensorboard,workflows
|
|
8
|
+
Keywords: agentic-ai,agentic-workflows,ai,big-data,dask,data-analytics,data-integration,databases,lineage,llm,machine-learning,ml,mlflow,model-management,parallel-processing,provenance,reproducibility,responsible-ai,scientific-workflows,tensorboard,workflows
|
|
10
9
|
Classifier: License :: OSI Approved :: MIT License
|
|
11
10
|
Classifier: Operating System :: OS Independent
|
|
12
11
|
Classifier: Programming Language :: Python :: 3
|
|
13
12
|
Requires-Python: >=3.10
|
|
14
|
-
Requires-Dist: flask-restful
|
|
15
|
-
Requires-Dist: lmdb
|
|
16
13
|
Requires-Dist: msgpack
|
|
14
|
+
Requires-Dist: numpy
|
|
17
15
|
Requires-Dist: omegaconf
|
|
18
|
-
Requires-Dist:
|
|
19
|
-
Requires-Dist: psutil>=6.1.1
|
|
20
|
-
Requires-Dist: py-cpuinfo
|
|
21
|
-
Requires-Dist: pyarrow
|
|
22
|
-
Requires-Dist: redis
|
|
23
|
-
Requires-Dist: requests
|
|
16
|
+
Requires-Dist: orjson
|
|
24
17
|
Provides-Extra: all
|
|
25
18
|
Requires-Dist: alembic; extra == 'all'
|
|
26
19
|
Requires-Dist: confluent-kafka<=2.8.0; extra == 'all'
|
|
20
|
+
Requires-Dist: cryptography; extra == 'all'
|
|
27
21
|
Requires-Dist: dask[distributed]<=2024.10.0; extra == 'all'
|
|
22
|
+
Requires-Dist: flask-restful; extra == 'all'
|
|
28
23
|
Requires-Dist: furo; extra == 'all'
|
|
24
|
+
Requires-Dist: gitpython; extra == 'all'
|
|
25
|
+
Requires-Dist: google-genai; extra == 'all'
|
|
29
26
|
Requires-Dist: jupyterlab; extra == 'all'
|
|
27
|
+
Requires-Dist: langchain-community; extra == 'all'
|
|
28
|
+
Requires-Dist: lmdb; extra == 'all'
|
|
29
|
+
Requires-Dist: mcp[cli]; extra == 'all'
|
|
30
30
|
Requires-Dist: mlflow-skinny; extra == 'all'
|
|
31
31
|
Requires-Dist: nbmake; extra == 'all'
|
|
32
|
+
Requires-Dist: paho-mqtt; extra == 'all'
|
|
33
|
+
Requires-Dist: pandas; extra == 'all'
|
|
32
34
|
Requires-Dist: pika; extra == 'all'
|
|
33
35
|
Requires-Dist: plotly; extra == 'all'
|
|
36
|
+
Requires-Dist: psutil>=6.1.1; extra == 'all'
|
|
37
|
+
Requires-Dist: py-cpuinfo; extra == 'all'
|
|
38
|
+
Requires-Dist: pyarrow; extra == 'all'
|
|
34
39
|
Requires-Dist: pymongo; extra == 'all'
|
|
35
40
|
Requires-Dist: pytest; extra == 'all'
|
|
36
41
|
Requires-Dist: pyyaml; extra == 'all'
|
|
42
|
+
Requires-Dist: redis; extra == 'all'
|
|
43
|
+
Requires-Dist: requests; extra == 'all'
|
|
37
44
|
Requires-Dist: ruff; extra == 'all'
|
|
38
45
|
Requires-Dist: scipy; extra == 'all'
|
|
39
46
|
Requires-Dist: seaborn; extra == 'all'
|
|
40
47
|
Requires-Dist: sphinx; extra == 'all'
|
|
41
48
|
Requires-Dist: sqlalchemy; extra == 'all'
|
|
49
|
+
Requires-Dist: streamlit; extra == 'all'
|
|
42
50
|
Requires-Dist: tbparse; extra == 'all'
|
|
43
51
|
Requires-Dist: tensorboard; extra == 'all'
|
|
44
52
|
Requires-Dist: tensorflow; extra == 'all'
|
|
@@ -63,8 +71,28 @@ Requires-Dist: sphinx; extra == 'dev'
|
|
|
63
71
|
Provides-Extra: docs
|
|
64
72
|
Requires-Dist: furo; extra == 'docs'
|
|
65
73
|
Requires-Dist: sphinx; extra == 'docs'
|
|
74
|
+
Provides-Extra: extras
|
|
75
|
+
Requires-Dist: flask-restful; extra == 'extras'
|
|
76
|
+
Requires-Dist: gitpython; extra == 'extras'
|
|
77
|
+
Requires-Dist: lmdb; extra == 'extras'
|
|
78
|
+
Requires-Dist: pandas; extra == 'extras'
|
|
79
|
+
Requires-Dist: psutil>=6.1.1; extra == 'extras'
|
|
80
|
+
Requires-Dist: py-cpuinfo; extra == 'extras'
|
|
81
|
+
Requires-Dist: redis; extra == 'extras'
|
|
82
|
+
Requires-Dist: requests; extra == 'extras'
|
|
66
83
|
Provides-Extra: kafka
|
|
67
84
|
Requires-Dist: confluent-kafka<=2.8.0; extra == 'kafka'
|
|
85
|
+
Provides-Extra: llm-agent
|
|
86
|
+
Requires-Dist: langchain-community; extra == 'llm-agent'
|
|
87
|
+
Requires-Dist: mcp[cli]; extra == 'llm-agent'
|
|
88
|
+
Requires-Dist: streamlit; extra == 'llm-agent'
|
|
89
|
+
Provides-Extra: llm-google
|
|
90
|
+
Requires-Dist: google-genai; extra == 'llm-google'
|
|
91
|
+
Requires-Dist: langchain-community; extra == 'llm-google'
|
|
92
|
+
Requires-Dist: mcp[cli]; extra == 'llm-google'
|
|
93
|
+
Requires-Dist: streamlit; extra == 'llm-google'
|
|
94
|
+
Provides-Extra: lmdb
|
|
95
|
+
Requires-Dist: lmdb; extra == 'lmdb'
|
|
68
96
|
Provides-Extra: ml-dev
|
|
69
97
|
Requires-Dist: datasets==2.17.0; extra == 'ml-dev'
|
|
70
98
|
Requires-Dist: nltk; extra == 'ml-dev'
|
|
@@ -75,13 +103,22 @@ Requires-Dist: torchtext==0.17.2; extra == 'ml-dev'
|
|
|
75
103
|
Requires-Dist: torchvision==0.17.2; extra == 'ml-dev'
|
|
76
104
|
Provides-Extra: mlflow
|
|
77
105
|
Requires-Dist: alembic; extra == 'mlflow'
|
|
106
|
+
Requires-Dist: cryptography; extra == 'mlflow'
|
|
78
107
|
Requires-Dist: mlflow-skinny; extra == 'mlflow'
|
|
79
108
|
Requires-Dist: sqlalchemy; extra == 'mlflow'
|
|
80
109
|
Requires-Dist: watchdog; extra == 'mlflow'
|
|
81
110
|
Provides-Extra: mongo
|
|
111
|
+
Requires-Dist: pyarrow; extra == 'mongo'
|
|
82
112
|
Requires-Dist: pymongo; extra == 'mongo'
|
|
113
|
+
Provides-Extra: mqtt
|
|
114
|
+
Requires-Dist: paho-mqtt; extra == 'mqtt'
|
|
83
115
|
Provides-Extra: nvidia
|
|
84
116
|
Requires-Dist: nvidia-ml-py; extra == 'nvidia'
|
|
117
|
+
Provides-Extra: redis
|
|
118
|
+
Requires-Dist: redis; extra == 'redis'
|
|
119
|
+
Provides-Extra: telemetry
|
|
120
|
+
Requires-Dist: psutil>=6.1.1; extra == 'telemetry'
|
|
121
|
+
Requires-Dist: py-cpuinfo; extra == 'telemetry'
|
|
85
122
|
Provides-Extra: tensorboard
|
|
86
123
|
Requires-Dist: tbparse; extra == 'tensorboard'
|
|
87
124
|
Requires-Dist: tensorboard; extra == 'tensorboard'
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
flowcept/__init__.py,sha256=ZDHSYTpv7qNrCgx7km3mCNRaJ2jfc0KRKKvRXdVxFwA,2101
|
|
2
|
+
flowcept/cli.py,sha256=NB7rzu38Rc8Zyb8ou1XNa7X2NN--EQ7GKdyh0_Kx1Ts,22852
|
|
3
|
+
flowcept/configs.py,sha256=DOpwjKMGE-4GDT22DhNrYbeGPMgPOlngDJnuC2rQuDM,8195
|
|
4
|
+
flowcept/version.py,sha256=bMjQQHKMyz43zE-bJkgh3iaVPOlS5qK_sCzyrr5fH9Y,307
|
|
5
|
+
flowcept/agents/__init__.py,sha256=8eeD2CiKBtHiDsWdrHK_UreIkKlTq4dUbhHDyzw372o,175
|
|
6
|
+
flowcept/agents/agent_client.py,sha256=UiBQkC9WE2weLZR2OTkEOEQt9-zqQOkPwRA17HfI-jk,2027
|
|
7
|
+
flowcept/agents/agents_utils.py,sha256=Az5lvWTsBHs_3sWWwy7jSdDjNn-PvZ7KmYd79wxvdyU,6666
|
|
8
|
+
flowcept/agents/dynamic_schema_tracker.py,sha256=TsmXRRkyUkqB-0bEgmeqSms8xj1tMMJeYvjoaO2mtwI,6829
|
|
9
|
+
flowcept/agents/flowcept_agent.py,sha256=1sidjnNMdG0S6lUKBvml7ZfIb6o3u7zc6HNogsJbl9g,871
|
|
10
|
+
flowcept/agents/flowcept_ctx_manager.py,sha256=-WYulunHE62w61z8cy3u7TEnvgV1hflNEXsnm8YxwFw,6840
|
|
11
|
+
flowcept/agents/gui/__init__.py,sha256=Qw9YKbAzgZqBjMQGnF7XWmfUo0fivtkDISQRK3LA3gU,113
|
|
12
|
+
flowcept/agents/gui/agent_gui.py,sha256=8sTG3MjWBi6oc4tnfHa-duTBXWEE6RBxBE5uHooGkzI,2501
|
|
13
|
+
flowcept/agents/gui/gui_utils.py,sha256=Qex0G9Asgb_UnLTySB8cYNEEy9ZnmLYnLddbornoDcI,7861
|
|
14
|
+
flowcept/agents/llms/__init__.py,sha256=kzOaJic5VhMBnGvy_Fr5C6sRKVrRntH1ZnYz7f5_4-s,23
|
|
15
|
+
flowcept/agents/llms/claude_gcp.py,sha256=fzz7235DgzVueuFj5odsr93jWtYHpYlXkSGW1kmmJwU,4915
|
|
16
|
+
flowcept/agents/llms/gemini25.py,sha256=VARrjb3tITIh3_Wppmocp_ocSKVZNon0o0GeFEwTnTI,4229
|
|
17
|
+
flowcept/agents/prompts/__init__.py,sha256=7ICsNhLYzvPS1esG3Vg519s51b1c4yN0WegJUb6Qvww,26
|
|
18
|
+
flowcept/agents/prompts/general_prompts.py,sha256=5UYBGti2Mdr5VIPm2Ewn1wxZsVXgRE8jWNvQ-8HZ0Oo,3685
|
|
19
|
+
flowcept/agents/prompts/in_memory_query_prompts.py,sha256=oWvZQNUHBBrGq-f94ulhIZW4bkkze02EzAuHY5640QM,17934
|
|
20
|
+
flowcept/agents/tools/__init__.py,sha256=Xqz2E4-LL_7DDcm1XYJFx2f5RdAsjeTpOJb_DPC7xyc,27
|
|
21
|
+
flowcept/agents/tools/general_tools.py,sha256=Dw1vYNzVUp8dIB48KFPNxGenERoS8UqJj0HIEfhjQeA,2752
|
|
22
|
+
flowcept/agents/tools/in_memory_queries/__init__.py,sha256=K8-JI_lXUgquKkgga8Nef8AntGg_logQtjjQjaEE7yI,39
|
|
23
|
+
flowcept/agents/tools/in_memory_queries/in_memory_queries_tools.py,sha256=hrVal1ktf6lvBmVWS7cR_lQy4cIz7ZNYLC-MN61WNRg,25450
|
|
24
|
+
flowcept/agents/tools/in_memory_queries/pandas_agent_utils.py,sha256=xi69oywlGb6IUkhQKXoKoswYuWK5FyiWHy2MnRjTzds,9055
|
|
25
|
+
flowcept/analytics/__init__.py,sha256=46q-7vsHq_ddPNrzNnDgEOiRgvlx-5Ggu2ocyROMV0w,641
|
|
26
|
+
flowcept/analytics/analytics_utils.py,sha256=FRJdBtQa7Hrk2oR_FFhmhmMf3X6YyZ4nbH5RIYh7KL4,8753
|
|
27
|
+
flowcept/analytics/data_augmentation.py,sha256=Dyr5x316Zf-k1e8rVoQMCpFOrklYVHjfejRPrtoycmc,1641
|
|
28
|
+
flowcept/analytics/plot.py,sha256=L56y1HRnTE6-Fxs62Y0rV2OtDwjSwgSP3yLdalkiRBQ,2932
|
|
29
|
+
flowcept/commons/__init__.py,sha256=W94CqapS0IGuuIGHHaz4sNuuiYhgtJWtpDEbnI0pGwI,26
|
|
30
|
+
flowcept/commons/autoflush_buffer.py,sha256=8M0fcIeHck-mSGQ2HFpW3_Af8-dHswhIbUMX5FATm48,2589
|
|
31
|
+
flowcept/commons/flowcept_logger.py,sha256=0asRucrDMeRXvsdhuCmH6lWO7lAt_Z5o5uW7rrQhcjc,1857
|
|
32
|
+
flowcept/commons/query_utils.py,sha256=3tyK5VYA10iDtmtzNwa8OQGn93DBxsu6rTjHDphftSc,2208
|
|
33
|
+
flowcept/commons/settings_factory.py,sha256=bMTjgXRfb5HsL2lPnLfem-9trqELbNWE04Ie7lSlxYM,1731
|
|
34
|
+
flowcept/commons/task_data_preprocess.py,sha256=yxLOq3PhfJYDeOUrbBzLc-x7zDrKqB30pwk1nIqtdgo,13552
|
|
35
|
+
flowcept/commons/utils.py,sha256=gF6ENWlTpR2ZSw3yVNPNBTVzSpcgy-WuzYzwWSXXsug,9252
|
|
36
|
+
flowcept/commons/vocabulary.py,sha256=_GzHJ1wSYJlLsu_uu1Am6N3zvc59S4FCuT5yp7lynPw,713
|
|
37
|
+
flowcept/commons/daos/__init__.py,sha256=RO51svfHOg9naN676zuQwbj_RQ6IFHu-RALeefvtwwk,23
|
|
38
|
+
flowcept/commons/daos/keyvalue_dao.py,sha256=g7zgC9hVC1NTllwUAqGt44YqdqYUgAKgPlX8_G4BRGw,3599
|
|
39
|
+
flowcept/commons/daos/redis_conn.py,sha256=gFyW-5yf6B8ExEYopCmbap8ki-iEwuIw-KH9f6o7UGQ,1495
|
|
40
|
+
flowcept/commons/daos/docdb_dao/__init__.py,sha256=qRvXREeUJ4mkhxdC9bzpOsVX6M2FB5hDyLFxhMxTGhs,30
|
|
41
|
+
flowcept/commons/daos/docdb_dao/docdb_dao_base.py,sha256=YbfSVJPwZGK2GBYkeapRC83HkmP0c6Msv5TriD88RcI,11812
|
|
42
|
+
flowcept/commons/daos/docdb_dao/lmdb_dao.py,sha256=dJOLgCx_lwdz6MKiMpM_UE4rm0angDCPaVz_WU5KqIA,10407
|
|
43
|
+
flowcept/commons/daos/docdb_dao/mongodb_dao.py,sha256=5x0un15uCDTcnuITOyOhvF9mKj_bUmF2du0AHQfjN9k,40055
|
|
44
|
+
flowcept/commons/daos/mq_dao/__init__.py,sha256=Xxm4FmbBUZDQ7XIAmSFbeKE_AdHsbgFmSuftvMWSykQ,21
|
|
45
|
+
flowcept/commons/daos/mq_dao/mq_dao_base.py,sha256=jo98CIyaEjMMHtaw9XIQRPhnN8IgKj2x-cTmWV4u0Ws,9596
|
|
46
|
+
flowcept/commons/daos/mq_dao/mq_dao_kafka.py,sha256=kjZqPLIu5PaNeM4IDvOxkDRVGTd5UWwq3zhDvVirqW8,5067
|
|
47
|
+
flowcept/commons/daos/mq_dao/mq_dao_mofka.py,sha256=tRdMGYDzdeIJxad-B4-DE6u8Wzs61eTzOW4ojZrnTxs,4057
|
|
48
|
+
flowcept/commons/daos/mq_dao/mq_dao_redis.py,sha256=WKPoMPBSce4shqbBkgsnuqJAJoZZ4U_hdebhyFqtejQ,5535
|
|
49
|
+
flowcept/commons/flowcept_dataclasses/__init__.py,sha256=8KkiJh0WSRAB50waVluxCSI8Tb9X1L9nup4c8RN3ulc,30
|
|
50
|
+
flowcept/commons/flowcept_dataclasses/base_settings_dataclasses.py,sha256=Cjw2PGYtZDfnwecz6G3S42Ncmxj7AIZVEBx05bsxRUo,399
|
|
51
|
+
flowcept/commons/flowcept_dataclasses/task_object.py,sha256=ITTfGNRCPhHdbM9kJxb-4_ROR1yrJuQQ8kM780oe8NQ,5610
|
|
52
|
+
flowcept/commons/flowcept_dataclasses/telemetry.py,sha256=9_5ONCo-06r5nKHXmi5HfIhiZSuPgmTECiq_u9MlxXM,2822
|
|
53
|
+
flowcept/commons/flowcept_dataclasses/workflow_object.py,sha256=JHvPo1BfF38fxRqZO1OTA3rTMINnPlkNw8e3l2fWn-M,4624
|
|
54
|
+
flowcept/flowcept_api/__init__.py,sha256=T1ty86YlocQ5Z18l5fUqHj_CC6Unq_iBv0lFyiI7Ao8,22
|
|
55
|
+
flowcept/flowcept_api/db_api.py,sha256=hKXep-n50rp9cAzV0ljk2QVEF8O64yxi3ujXv5_Ibac,9723
|
|
56
|
+
flowcept/flowcept_api/flowcept_controller.py,sha256=D5-HeG3LKIqtFvIQ54b9B6qyk1Vx_lWAxS_9_I5MKF4,14895
|
|
57
|
+
flowcept/flowcept_api/task_query_api.py,sha256=SrwB0OCVtbpvCPECkE2ySM10G_g8Wlk5PJ8h-0xEaNc,23821
|
|
58
|
+
flowcept/flowcept_webserver/__init__.py,sha256=8411GIXGddKTKoHUvbo_Rq6svosNG7tG8VzvUEBd7WI,28
|
|
59
|
+
flowcept/flowcept_webserver/app.py,sha256=VUV8_JZbIbx9u_1O7m7XtRdhZb_7uifUa-iNlPhmZws,658
|
|
60
|
+
flowcept/flowcept_webserver/resources/__init__.py,sha256=XOk5yhLeLU6JmVXxbl3TY2zksqz7Yh2hfC3OMhkALn8,28
|
|
61
|
+
flowcept/flowcept_webserver/resources/query_rsrc.py,sha256=Mk1XDC_wVYkMk0eaazqWWrTC07gQU9U0toKfip0ihZE,1353
|
|
62
|
+
flowcept/flowcept_webserver/resources/task_messages_rsrc.py,sha256=0u68it2W-9NzUUx5fWOZCqvRKe5EsLI8oyvto9634Ng,666
|
|
63
|
+
flowcept/flowceptor/__init__.py,sha256=wVxRXUv07iNx6SMRRma2vqhR_GIcRl0re_WCYG65PUs,29
|
|
64
|
+
flowcept/flowceptor/telemetry_capture.py,sha256=9-Q09LjANAntG6dAz3L1rHWkb7zqtqU9GSFj__FCyyc,13810
|
|
65
|
+
flowcept/flowceptor/adapters/__init__.py,sha256=SuZbSZVVQeBJ9zXW-M9jF09dw3XIjre3lSGrUO1Y8Po,27
|
|
66
|
+
flowcept/flowceptor/adapters/base_interceptor.py,sha256=kbdYW6VuvmBibOVy7Pg3OzeD3OUaHc6jnAhRBpj9f14,6517
|
|
67
|
+
flowcept/flowceptor/adapters/instrumentation_interceptor.py,sha256=DhK2bBnpghqPSeA62BUqRg6pl8zxuYrP33dK4x6PhRE,733
|
|
68
|
+
flowcept/flowceptor/adapters/interceptor_state_manager.py,sha256=xRzmi5YFKBEqNtX8F5s6XlMTRe27ml4BmQtBO4WtG2c,919
|
|
69
|
+
flowcept/flowceptor/adapters/brokers/__init__.py,sha256=mhQXVmh0JklvL93GUtJZLJnPRYX9Nmb8IqcyKJGQBzk,36
|
|
70
|
+
flowcept/flowceptor/adapters/brokers/mqtt_interceptor.py,sha256=wx3STMYPHeXB6ilUn-UQYsxesGp2hF7TRlfn52hNJtY,4845
|
|
71
|
+
flowcept/flowceptor/adapters/dask/__init__.py,sha256=GKreb5L_nliD2BEckyB943zOQ-b6Gn1fLDj81FqSK2Y,23
|
|
72
|
+
flowcept/flowceptor/adapters/dask/dask_dataclasses.py,sha256=6LTG-kdcc6AUuVINvkqB5QHw6pchg1aMqj0sdWt2Ef8,580
|
|
73
|
+
flowcept/flowceptor/adapters/dask/dask_interceptor.py,sha256=uBQpLluYXzlT1gBDfTe4_WueC_fWBEs5Xr8ntpOmljE,5869
|
|
74
|
+
flowcept/flowceptor/adapters/dask/dask_plugins.py,sha256=s1ENAi9N61PC_6RiFvOYhJsgWzSm_lFWm3w87V-R1YY,2473
|
|
75
|
+
flowcept/flowceptor/adapters/mlflow/__init__.py,sha256=3mzHrvh1XQOy68qx1A3so9Nq27tIb0i2mSXfv3F6gZg,25
|
|
76
|
+
flowcept/flowceptor/adapters/mlflow/interception_event_handler.py,sha256=-SsIRdOcZjQUTzWgsZ41ouqpla4Qd32jIWXIAGU1pPw,494
|
|
77
|
+
flowcept/flowceptor/adapters/mlflow/mlflow_dao.py,sha256=dPEgCduiw14_pzT5WCjuokwaN7p5Tu7UvWS2rtGh4qk,4589
|
|
78
|
+
flowcept/flowceptor/adapters/mlflow/mlflow_dataclasses.py,sha256=vbijpDW6npHdsA9-28otXw94O4a9R-PWtq3xlJapsyY,690
|
|
79
|
+
flowcept/flowceptor/adapters/mlflow/mlflow_interceptor.py,sha256=OLmVBdOCMS3GPcdxSdCD794RDbW6p4f8eBh1PXWcvHE,3799
|
|
80
|
+
flowcept/flowceptor/adapters/tensorboard/__init__.py,sha256=LrcR4WCIlBwwHIUSteQ8k8JBdCJTFqLvvgAfnoLeREw,30
|
|
81
|
+
flowcept/flowceptor/adapters/tensorboard/tensorboard_dataclasses.py,sha256=lSfDd6TucVNzGxbm69BYyCVgMr2p9iUEQjnsS4jIfeI,554
|
|
82
|
+
flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py,sha256=PUKGlCsYcybsk1HK573Brs6FiXQRoaj6MKgZ3Oyeec4,4881
|
|
83
|
+
flowcept/flowceptor/consumers/__init__.py,sha256=foxtVEb2ZEe9g1slfYIKM4tIFv-He1l7XS--SYs7nlQ,28
|
|
84
|
+
flowcept/flowceptor/consumers/base_consumer.py,sha256=cKEkZAmfzirBcnVNjx3To57zP1Qwdz4lkMbjeZ8D4Q8,3163
|
|
85
|
+
flowcept/flowceptor/consumers/consumer_utils.py,sha256=a7GJYgYiTZnxsm3W3MOalgnC8oyQSs7OjqF4LWYI_vI,5704
|
|
86
|
+
flowcept/flowceptor/consumers/document_inserter.py,sha256=IeVl6Y4Q1KlpYGvE7uDI0vKQf-MGf2pgnIpxCYtyzKE,13392
|
|
87
|
+
flowcept/flowceptor/consumers/agent/__init__.py,sha256=R1uvjBPeTLw9SpYgyUc6Qmo16pE84PFHcELTTFvyTWU,56
|
|
88
|
+
flowcept/flowceptor/consumers/agent/base_agent_context_manager.py,sha256=5fBPYs-k4bsKDcIXyUbps9KoiQkfAWLHJB52lypYKas,4161
|
|
89
|
+
flowcept/instrumentation/__init__.py,sha256=M5bTmg80E4QyN91gUX3qfw_nbtJSXwGWcKxdZP3vJz0,34
|
|
90
|
+
flowcept/instrumentation/flowcept_agent_task.py,sha256=XN9JU4LODca0SgojUm4F5iU_V8tuWkOt1fAKcoOAG34,10757
|
|
91
|
+
flowcept/instrumentation/flowcept_decorator.py,sha256=X4Lp_FSsoL08K8ZhRM4mC0OjKupbQtbMQR8zxy3ezDY,1350
|
|
92
|
+
flowcept/instrumentation/flowcept_loop.py,sha256=7hkcolXxbwwccNzoSbAeCCEu02i4zT317YeJ6dO1MDs,12208
|
|
93
|
+
flowcept/instrumentation/flowcept_task.py,sha256=EmKODpjl8usNklKSVmsKYyCa6gC_QMqKhAr3DKaw44s,8199
|
|
94
|
+
flowcept/instrumentation/flowcept_torch.py,sha256=kkZQRYq6cDBpdBU6J39_4oKRVkhyF3ODlz8ydV5WGKw,23455
|
|
95
|
+
flowcept/instrumentation/task_capture.py,sha256=la4VaMuihpDycJjHMb490RgujJTgn8s5ilv8o7ZJ5MA,8317
|
|
96
|
+
resources/sample_settings.yaml,sha256=QcQi1ivLAfPaFsx5BkIrVuUXxFzGYiEWU6aOULxnOS0,6690
|
|
97
|
+
flowcept-0.8.12.dist-info/METADATA,sha256=M2o70v7MgX3K99iWJaeBZzwtU4fEprWtgvtr8BFP3Jo,20051
|
|
98
|
+
flowcept-0.8.12.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
99
|
+
flowcept-0.8.12.dist-info/entry_points.txt,sha256=i8q67WE0201rVxYI2lyBtS52shvgl93x2Szp4q8zMlw,47
|
|
100
|
+
flowcept-0.8.12.dist-info/licenses/LICENSE,sha256=r5-2P6tFTuRGWT5TiX32s1y0tnp4cIqBEC1QjTaXe2k,1086
|
|
101
|
+
flowcept-0.8.12.dist-info/RECORD,,
|
resources/sample_settings.yaml
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
flowcept_version: 0.8.
|
|
1
|
+
flowcept_version: 0.8.12 # Version of the Flowcept package. This setting file is compatible with this version.
|
|
2
2
|
|
|
3
3
|
project:
|
|
4
4
|
debug: true # Toggle debug mode. This will add a property `debug: true` to all saved data, making it easier to retrieve/delete them later.
|
|
@@ -7,6 +7,7 @@ project:
|
|
|
7
7
|
performance_logging: false # Enable performance logging if true. Particularly useful for MQ flushes.
|
|
8
8
|
enrich_messages: true # Add extra metadata to task messages, such as IP addresses and UTC timestamps.
|
|
9
9
|
db_flush_mode: online # Mode for flushing DB entries: "online" or "offline". If online, flushes to the DB will happen before the workflow ends.
|
|
10
|
+
# dump_buffer_path: flowcept_messages.jsonl # This is useful if you need to run completely offline.
|
|
10
11
|
|
|
11
12
|
log:
|
|
12
13
|
log_path: "default" # Path for log file output; "default" will write the log in the directory where the main executable is running from.
|
|
@@ -39,18 +40,24 @@ experiment:
|
|
|
39
40
|
mq:
|
|
40
41
|
type: redis # or kafka or mofka; Please adjust the port (kafka's default is 9092; redis is 6379). If mofka, adjust the group_file.
|
|
41
42
|
host: localhost
|
|
42
|
-
#
|
|
43
|
+
# uri: ?
|
|
44
|
+
# instances: ["localhost:6379"] # We can have multiple MQ instances being accessed by the consumers but each interceptor will currently access one single MQ..
|
|
43
45
|
port: 6379
|
|
44
46
|
# group_file: mofka.json
|
|
45
47
|
channel: interception
|
|
46
48
|
buffer_size: 50
|
|
47
49
|
insertion_buffer_time_secs: 5
|
|
48
50
|
timing: false
|
|
51
|
+
# uri: use Redis connection uri here
|
|
49
52
|
chunk_size: -1 # use 0 or -1 to disable this. Or simply omit this from the config file.
|
|
53
|
+
same_as_kvdb: false # Set this to true if you are using the same Redis instance both as an MQ and as the KV_DB. In that case, no need to repeat connection parameters in MQ. Use only what you define in KV_DB.
|
|
54
|
+
# bin: /usr/local/bin/redis-server # Use this if you want to start redis using the flowcept-cli.
|
|
55
|
+
# conf_file: /etc/redis/redis.conf
|
|
50
56
|
|
|
51
|
-
kv_db:
|
|
57
|
+
kv_db:
|
|
52
58
|
host: localhost
|
|
53
59
|
port: 6379
|
|
60
|
+
enabled: true
|
|
54
61
|
# uri: use Redis connection uri here
|
|
55
62
|
|
|
56
63
|
web_server:
|
|
@@ -72,8 +79,18 @@ db_buffer:
|
|
|
72
79
|
insertion_buffer_time_secs: 5 # Time interval (in seconds) to buffer incoming records before flushing to the database
|
|
73
80
|
buffer_size: 50 # Maximum number of records to hold in the buffer before forcing a flush
|
|
74
81
|
remove_empty_fields: false # If true, fields with null/empty values will be removed before insertion
|
|
75
|
-
stop_max_trials:
|
|
76
|
-
stop_trials_sleep: 0.
|
|
82
|
+
stop_max_trials: 300 # Maximum number of trials before giving up when waiting for a fully safe stop (i.e., all records have been inserted as expected).
|
|
83
|
+
stop_trials_sleep: 0.1 # Sleep duration (in seconds) between trials when waiting for a fully safe stop.
|
|
84
|
+
|
|
85
|
+
agent:
|
|
86
|
+
enabled: false
|
|
87
|
+
mcp_host: localhost
|
|
88
|
+
mcp_port: 8000
|
|
89
|
+
llm_server_url: '?'
|
|
90
|
+
api_key: '?'
|
|
91
|
+
model: '?'
|
|
92
|
+
service_provider: '?'
|
|
93
|
+
model_kwargs: {}
|
|
77
94
|
|
|
78
95
|
databases:
|
|
79
96
|
|
|
@@ -87,19 +104,33 @@ databases:
|
|
|
87
104
|
port: 27017
|
|
88
105
|
db: flowcept
|
|
89
106
|
create_collection_index: true # Whether flowcept should create collection indices if they haven't been created yet. This is done only at the Flowcept start up.
|
|
107
|
+
# bin: /usr/bin/mongod
|
|
108
|
+
# log_path: /var/log/mongodb/mongod.log
|
|
109
|
+
# lock_file_path: /var/run/mongod.pid
|
|
110
|
+
|
|
90
111
|
|
|
91
112
|
adapters:
|
|
92
113
|
# For each key below, you can have multiple instances. Like mlflow1, mlflow2; zambeze1, zambeze2. Use an empty dict, {}, if you won't use any adapter.
|
|
93
|
-
|
|
94
|
-
|
|
114
|
+
|
|
115
|
+
broker_mqtt:
|
|
116
|
+
kind: broker
|
|
95
117
|
host: localhost
|
|
96
|
-
port:
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
118
|
+
port: 30011
|
|
119
|
+
protocol: mqtt3.1.1
|
|
120
|
+
queues: ["#"]
|
|
121
|
+
username: postman
|
|
122
|
+
password: p
|
|
123
|
+
qos: 2
|
|
124
|
+
task_subtype: intersect_msg
|
|
125
|
+
tracked_keys:
|
|
126
|
+
used: payload
|
|
127
|
+
generated: ~
|
|
128
|
+
custom_metadata: [headers, msgId]
|
|
129
|
+
activity_id: operationId
|
|
130
|
+
submitted_at: ~
|
|
131
|
+
started_at: ~
|
|
132
|
+
ended_at: ~
|
|
133
|
+
registered_at: ~
|
|
103
134
|
|
|
104
135
|
mlflow:
|
|
105
136
|
kind: mlflow
|
|
@@ -122,3 +153,4 @@ adapters:
|
|
|
122
153
|
worker_should_get_output: true
|
|
123
154
|
scheduler_create_timestamps: true
|
|
124
155
|
worker_create_timestamps: false
|
|
156
|
+
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"""Zambeze subpackage."""
|