flowcept 0.8.9__py3-none-any.whl → 0.8.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowcept/cli.py +460 -0
- flowcept/commons/daos/keyvalue_dao.py +19 -23
- flowcept/commons/daos/mq_dao/mq_dao_base.py +29 -29
- flowcept/commons/daos/mq_dao/mq_dao_kafka.py +4 -3
- flowcept/commons/daos/mq_dao/mq_dao_mofka.py +4 -0
- flowcept/commons/daos/mq_dao/mq_dao_redis.py +38 -5
- flowcept/commons/daos/redis_conn.py +47 -0
- flowcept/commons/flowcept_dataclasses/task_object.py +36 -8
- flowcept/commons/settings_factory.py +2 -4
- flowcept/commons/task_data_preprocess.py +200 -0
- flowcept/commons/utils.py +1 -1
- flowcept/configs.py +11 -9
- flowcept/flowcept_api/flowcept_controller.py +30 -13
- flowcept/flowceptor/adapters/agents/__init__.py +1 -0
- flowcept/flowceptor/adapters/agents/agents_utils.py +89 -0
- flowcept/flowceptor/adapters/agents/flowcept_agent.py +292 -0
- flowcept/flowceptor/adapters/agents/flowcept_llm_prov_capture.py +186 -0
- flowcept/flowceptor/adapters/agents/prompts.py +51 -0
- flowcept/flowceptor/adapters/base_interceptor.py +17 -19
- flowcept/flowceptor/adapters/brokers/__init__.py +1 -0
- flowcept/flowceptor/adapters/brokers/mqtt_interceptor.py +132 -0
- flowcept/flowceptor/adapters/mlflow/mlflow_interceptor.py +3 -3
- flowcept/flowceptor/adapters/tensorboard/tensorboard_interceptor.py +3 -3
- flowcept/flowceptor/consumers/agent/__init__.py +1 -0
- flowcept/flowceptor/consumers/agent/base_agent_context_manager.py +101 -0
- flowcept/flowceptor/consumers/agent/client_agent.py +48 -0
- flowcept/flowceptor/consumers/agent/flowcept_agent_context_manager.py +145 -0
- flowcept/flowceptor/consumers/agent/flowcept_qa_manager.py +112 -0
- flowcept/flowceptor/consumers/base_consumer.py +90 -0
- flowcept/flowceptor/consumers/document_inserter.py +138 -53
- flowcept/flowceptor/telemetry_capture.py +1 -1
- flowcept/instrumentation/task_capture.py +19 -9
- flowcept/version.py +1 -1
- {flowcept-0.8.9.dist-info → flowcept-0.8.11.dist-info}/METADATA +18 -6
- {flowcept-0.8.9.dist-info → flowcept-0.8.11.dist-info}/RECORD +39 -25
- flowcept-0.8.11.dist-info/entry_points.txt +2 -0
- resources/sample_settings.yaml +44 -23
- flowcept/flowceptor/adapters/zambeze/__init__.py +0 -1
- flowcept/flowceptor/adapters/zambeze/zambeze_dataclasses.py +0 -41
- flowcept/flowceptor/adapters/zambeze/zambeze_interceptor.py +0 -102
- {flowcept-0.8.9.dist-info → flowcept-0.8.11.dist-info}/WHEEL +0 -0
- {flowcept-0.8.9.dist-info → flowcept-0.8.11.dist-info}/licenses/LICENSE +0 -0
flowcept/cli.py
ADDED
|
@@ -0,0 +1,460 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Flowcept CLI.
|
|
3
|
+
|
|
4
|
+
How to add a new command:
|
|
5
|
+
--------------------------
|
|
6
|
+
1. Write a function with type-annotated arguments and a NumPy-style docstring.
|
|
7
|
+
2. Add it to one of the groups in `COMMAND_GROUPS`.
|
|
8
|
+
3. It will automatically become available as `flowcept --<function-name>` (underscores become hyphens).
|
|
9
|
+
|
|
10
|
+
Supports:
|
|
11
|
+
- `flowcept --command`
|
|
12
|
+
- `flowcept --command --arg=value`
|
|
13
|
+
- `flowcept -h` or `flowcept` for full help
|
|
14
|
+
- `flowcept --help --command` for command-specific help
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import subprocess
|
|
18
|
+
from time import sleep
|
|
19
|
+
from typing import Dict, Optional
|
|
20
|
+
import argparse
|
|
21
|
+
import os
|
|
22
|
+
import sys
|
|
23
|
+
import json
|
|
24
|
+
import textwrap
|
|
25
|
+
import inspect
|
|
26
|
+
from functools import wraps
|
|
27
|
+
from importlib import resources
|
|
28
|
+
from pathlib import Path
|
|
29
|
+
from typing import List
|
|
30
|
+
|
|
31
|
+
from flowcept import Flowcept, configs
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def no_docstring(func):
|
|
35
|
+
"""Decorator to silence linter for missing docstrings."""
|
|
36
|
+
|
|
37
|
+
@wraps(func)
|
|
38
|
+
def wrapper(*args, **kwargs):
|
|
39
|
+
return func(*args, **kwargs)
|
|
40
|
+
|
|
41
|
+
return wrapper
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def show_config():
|
|
45
|
+
"""
|
|
46
|
+
Show Flowcept configuration.
|
|
47
|
+
"""
|
|
48
|
+
config_data = {
|
|
49
|
+
"session_settings_path": configs.SETTINGS_PATH,
|
|
50
|
+
"env_FLOWCEPT_SETTINGS_PATH": os.environ.get("FLOWCEPT_SETTINGS_PATH", None),
|
|
51
|
+
}
|
|
52
|
+
print(f"This is the settings path in this session: {configs.SETTINGS_PATH}")
|
|
53
|
+
print(
|
|
54
|
+
f"This is your FLOWCEPT_SETTINGS_PATH environment variable value: {config_data['env_FLOWCEPT_SETTINGS_PATH']}"
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def init_settings():
|
|
59
|
+
"""
|
|
60
|
+
Create a new settings.yaml file in your home directory under ~/.flowcept.
|
|
61
|
+
"""
|
|
62
|
+
dest_path = Path(os.path.join(configs._SETTINGS_DIR, "settings.yaml"))
|
|
63
|
+
|
|
64
|
+
if dest_path.exists():
|
|
65
|
+
overwrite = input(f"{dest_path} already exists. Overwrite? (y/N): ").strip().lower()
|
|
66
|
+
if overwrite != "y":
|
|
67
|
+
print("Operation aborted.")
|
|
68
|
+
return
|
|
69
|
+
|
|
70
|
+
os.makedirs(configs._SETTINGS_DIR, exist_ok=True)
|
|
71
|
+
|
|
72
|
+
SAMPLE_SETTINGS_PATH = str(resources.files("resources").joinpath("sample_settings.yaml"))
|
|
73
|
+
|
|
74
|
+
with open(SAMPLE_SETTINGS_PATH, "rb") as src_file, open(dest_path, "wb") as dst_file:
|
|
75
|
+
dst_file.write(src_file.read())
|
|
76
|
+
print(f"Copied {configs.SETTINGS_PATH} to {dest_path}")
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def start_consumption_services(bundle_exec_id: str = None, check_safe_stops: bool = False, consumers: List[str] = None):
|
|
80
|
+
"""
|
|
81
|
+
Start services that consume data from a queue or other source.
|
|
82
|
+
|
|
83
|
+
Parameters
|
|
84
|
+
----------
|
|
85
|
+
bundle_exec_id : str, optional
|
|
86
|
+
The ID of the bundle execution to associate with the consumers.
|
|
87
|
+
check_safe_stops : bool, optional
|
|
88
|
+
Whether to check for safe stopping conditions before starting.
|
|
89
|
+
consumers : list of str, optional
|
|
90
|
+
List of consumer IDs to start. If not provided, all consumers will be started.
|
|
91
|
+
"""
|
|
92
|
+
print("Starting consumption services...")
|
|
93
|
+
print(f" bundle_exec_id: {bundle_exec_id}")
|
|
94
|
+
print(f" check_safe_stops: {check_safe_stops}")
|
|
95
|
+
print(f" consumers: {consumers or []}")
|
|
96
|
+
|
|
97
|
+
Flowcept.start_consumption_services(
|
|
98
|
+
bundle_exec_id=bundle_exec_id,
|
|
99
|
+
check_safe_stops=check_safe_stops,
|
|
100
|
+
consumers=consumers,
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def stop_consumption_services():
|
|
105
|
+
"""
|
|
106
|
+
Stop the document inserter.
|
|
107
|
+
"""
|
|
108
|
+
print("Not implemented yet.")
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def start_services(with_mongo: bool = False):
|
|
112
|
+
"""
|
|
113
|
+
Start Flowcept services (optionally including MongoDB).
|
|
114
|
+
|
|
115
|
+
Parameters
|
|
116
|
+
----------
|
|
117
|
+
with_mongo : bool, optional
|
|
118
|
+
Whether to also start MongoDB.
|
|
119
|
+
"""
|
|
120
|
+
print(f"Starting services{' with Mongo' if with_mongo else ''}")
|
|
121
|
+
print("Not implemented yet.")
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def stop_services():
|
|
125
|
+
"""
|
|
126
|
+
Stop Flowcept services.
|
|
127
|
+
"""
|
|
128
|
+
print("Not implemented yet.")
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def workflow_count(workflow_id: str):
|
|
132
|
+
"""
|
|
133
|
+
Count number of documents in the DB.
|
|
134
|
+
|
|
135
|
+
Parameters
|
|
136
|
+
----------
|
|
137
|
+
workflow_id : str
|
|
138
|
+
The ID of the workflow to count tasks for.
|
|
139
|
+
"""
|
|
140
|
+
result = {
|
|
141
|
+
"workflow_id": workflow_id,
|
|
142
|
+
"tasks": len(Flowcept.db.query({"workflow_id": workflow_id})),
|
|
143
|
+
"workflows": len(Flowcept.db.query({"workflow_id": workflow_id}, collection="workflows")),
|
|
144
|
+
"objects": len(Flowcept.db.query({"workflow_id": workflow_id}, collection="objects")),
|
|
145
|
+
}
|
|
146
|
+
print(json.dumps(result, indent=2))
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def query(filter: str, project: str = None, sort: str = None, limit: int = 0):
|
|
150
|
+
"""
|
|
151
|
+
Query the MongoDB task collection with an optional projection, sort, and limit.
|
|
152
|
+
|
|
153
|
+
Parameters
|
|
154
|
+
----------
|
|
155
|
+
filter : str
|
|
156
|
+
A JSON string representing the MongoDB filter query.
|
|
157
|
+
project : str, optional
|
|
158
|
+
A JSON string specifying fields to include or exclude in the result (MongoDB projection).
|
|
159
|
+
sort : str, optional
|
|
160
|
+
A JSON string specifying sorting criteria (e.g., '[["started_at", -1]]').
|
|
161
|
+
limit : int, optional
|
|
162
|
+
Maximum number of documents to return. Default is 0 (no limit).
|
|
163
|
+
|
|
164
|
+
Returns
|
|
165
|
+
-------
|
|
166
|
+
List[dict]
|
|
167
|
+
A list of task documents matching the query.
|
|
168
|
+
"""
|
|
169
|
+
_filter = json.loads(filter)
|
|
170
|
+
_project = json.loads(project) or None
|
|
171
|
+
_sort = list(sort) or None
|
|
172
|
+
print(
|
|
173
|
+
json.dumps(Flowcept.db.query(filter=_filter, project=_project, sort=_sort, limit=limit), indent=2, default=str)
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def get_task(task_id: str):
|
|
178
|
+
"""
|
|
179
|
+
Query the Document DB to retrieve a task.
|
|
180
|
+
|
|
181
|
+
Parameters
|
|
182
|
+
----------
|
|
183
|
+
task_id : str
|
|
184
|
+
The identifier of the task.
|
|
185
|
+
"""
|
|
186
|
+
_query = {"task_id": task_id}
|
|
187
|
+
print(json.dumps(Flowcept.db.query(_query), indent=2, default=str))
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def start_agent():
|
|
191
|
+
"""Start Flowcept agent."""
|
|
192
|
+
from flowcept.flowceptor.adapters.agents.flowcept_agent import main
|
|
193
|
+
|
|
194
|
+
main()
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def agent_client(tool_name: str, kwargs: str = None):
|
|
198
|
+
"""Agent Client.
|
|
199
|
+
|
|
200
|
+
Parameters.
|
|
201
|
+
----------
|
|
202
|
+
tool_name : str
|
|
203
|
+
Name of the tool
|
|
204
|
+
kwargs : str, optional
|
|
205
|
+
A stringfied JSON containing the kwargs for the tool, if needed.
|
|
206
|
+
"""
|
|
207
|
+
print(kwargs)
|
|
208
|
+
if kwargs is not None:
|
|
209
|
+
kwargs = json.loads(kwargs)
|
|
210
|
+
|
|
211
|
+
print(f"Going to run agent tool '{tool_name}'.")
|
|
212
|
+
if kwargs:
|
|
213
|
+
print(f"Using kwargs: {kwargs}")
|
|
214
|
+
print("-----------------")
|
|
215
|
+
from flowcept.flowceptor.consumers.agent.client_agent import run_tool
|
|
216
|
+
|
|
217
|
+
result = run_tool(tool_name, kwargs)[0]
|
|
218
|
+
|
|
219
|
+
print(result.text)
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def check_services():
|
|
223
|
+
"""
|
|
224
|
+
Run a full diagnostic test on the Flowcept system and its dependencies.
|
|
225
|
+
|
|
226
|
+
This function:
|
|
227
|
+
- Prints the current configuration path.
|
|
228
|
+
- Checks if required services (e.g., MongoDB, agent) are alive.
|
|
229
|
+
- Runs a test function wrapped with Flowcept instrumentation.
|
|
230
|
+
- Verifies MongoDB insertion (if enabled).
|
|
231
|
+
- Verifies agent communication and LLM connectivity (if enabled).
|
|
232
|
+
|
|
233
|
+
Returns
|
|
234
|
+
-------
|
|
235
|
+
None
|
|
236
|
+
Prints diagnostics to stdout; returns nothing.
|
|
237
|
+
"""
|
|
238
|
+
print(f"Testing with settings at: {configs.SETTINGS_PATH}")
|
|
239
|
+
from flowcept.configs import MONGO_ENABLED, AGENT, KVDB_ENABLED, INSERTION_BUFFER_TIME
|
|
240
|
+
|
|
241
|
+
if not Flowcept.services_alive():
|
|
242
|
+
print("Some of the enabled services are not alive!")
|
|
243
|
+
return
|
|
244
|
+
|
|
245
|
+
check_safe_stops = KVDB_ENABLED
|
|
246
|
+
|
|
247
|
+
from uuid import uuid4
|
|
248
|
+
from flowcept.instrumentation.flowcept_task import flowcept_task
|
|
249
|
+
|
|
250
|
+
workflow_id = str(uuid4())
|
|
251
|
+
|
|
252
|
+
@flowcept_task
|
|
253
|
+
def test_function(n: int) -> Dict[str, int]:
|
|
254
|
+
return {"output": n + 1}
|
|
255
|
+
|
|
256
|
+
with Flowcept(workflow_id=workflow_id, check_safe_stops=check_safe_stops):
|
|
257
|
+
test_function(2)
|
|
258
|
+
|
|
259
|
+
if MONGO_ENABLED:
|
|
260
|
+
print("MongoDB is enabled, so we are testing it too.")
|
|
261
|
+
tasks = Flowcept.db.query({"workflow_id": workflow_id})
|
|
262
|
+
if len(tasks) != 1:
|
|
263
|
+
print(f"The query result, {len(tasks)}, is not what we expected.")
|
|
264
|
+
return
|
|
265
|
+
|
|
266
|
+
if AGENT.get("enabled", False):
|
|
267
|
+
print("Agent is enabled, so we are testing it too.")
|
|
268
|
+
from flowcept.flowceptor.consumers.agent.client_agent import run_tool
|
|
269
|
+
|
|
270
|
+
try:
|
|
271
|
+
print(run_tool("check_liveness"))
|
|
272
|
+
except Exception as e:
|
|
273
|
+
print(e)
|
|
274
|
+
return
|
|
275
|
+
|
|
276
|
+
print("Testing LLM connectivity")
|
|
277
|
+
check_llm_result = run_tool("check_llm")[0]
|
|
278
|
+
print(check_llm_result.text)
|
|
279
|
+
|
|
280
|
+
if "error" in check_llm_result.text.lower():
|
|
281
|
+
print("There is an error with the LLM communication.")
|
|
282
|
+
return
|
|
283
|
+
elif MONGO_ENABLED:
|
|
284
|
+
print("Testing if llm chat was stored in MongoDB.")
|
|
285
|
+
response_metadata = json.loads(check_llm_result.text.split("\n")[0])
|
|
286
|
+
print(response_metadata)
|
|
287
|
+
sleep(INSERTION_BUFFER_TIME * 1.05)
|
|
288
|
+
chats = Flowcept.db.query({"workflow_id": response_metadata["agent_id"]})
|
|
289
|
+
if chats:
|
|
290
|
+
print(chats)
|
|
291
|
+
else:
|
|
292
|
+
print("Could not find chat history. Make sure that the DB Inserter service is on.")
|
|
293
|
+
print("\n\nAll expected services seem to be working properly!")
|
|
294
|
+
return
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
COMMAND_GROUPS = [
|
|
298
|
+
("Basic Commands", [check_services, show_config, init_settings, start_services, stop_services]),
|
|
299
|
+
("Consumption Commands", [start_consumption_services, stop_consumption_services]),
|
|
300
|
+
("Database Commands", [workflow_count, query, get_task]),
|
|
301
|
+
("Agent Commands", [start_agent, agent_client]),
|
|
302
|
+
]
|
|
303
|
+
|
|
304
|
+
COMMANDS = set(f for _, fs in COMMAND_GROUPS for f in fs)
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def _run_command(cmd_str: str, check_output: bool = True, popen_kwargs: Optional[Dict] = None) -> Optional[str]:
|
|
308
|
+
"""
|
|
309
|
+
Run a shell command with optional output capture.
|
|
310
|
+
|
|
311
|
+
Parameters
|
|
312
|
+
----------
|
|
313
|
+
cmd_str : str
|
|
314
|
+
The command to execute.
|
|
315
|
+
check_output : bool, optional
|
|
316
|
+
If True, capture and return the command's standard output.
|
|
317
|
+
If False, run interactively (stdout/stderr goes to terminal).
|
|
318
|
+
popen_kwargs : dict, optional
|
|
319
|
+
Extra keyword arguments to pass to subprocess.run.
|
|
320
|
+
|
|
321
|
+
Returns
|
|
322
|
+
-------
|
|
323
|
+
output : str or None
|
|
324
|
+
The standard output of the command if check_output is True, else None.
|
|
325
|
+
|
|
326
|
+
Raises
|
|
327
|
+
------
|
|
328
|
+
subprocess.CalledProcessError
|
|
329
|
+
If the command exits with a non-zero status.
|
|
330
|
+
"""
|
|
331
|
+
if popen_kwargs is None:
|
|
332
|
+
popen_kwargs = {}
|
|
333
|
+
|
|
334
|
+
kwargs = {"shell": True, "check": True, **popen_kwargs}
|
|
335
|
+
|
|
336
|
+
if check_output:
|
|
337
|
+
kwargs.update({"capture_output": True, "text": True})
|
|
338
|
+
result = subprocess.run(cmd_str, **kwargs)
|
|
339
|
+
return result.stdout.strip()
|
|
340
|
+
else:
|
|
341
|
+
subprocess.run(cmd_str, **kwargs)
|
|
342
|
+
return None
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
def _parse_numpy_doc(docstring: str):
|
|
346
|
+
parsed = {}
|
|
347
|
+
lines = docstring.splitlines() if docstring else []
|
|
348
|
+
in_params = False
|
|
349
|
+
for line in lines:
|
|
350
|
+
line = line.strip()
|
|
351
|
+
if line.lower().startswith("parameters"):
|
|
352
|
+
in_params = True
|
|
353
|
+
continue
|
|
354
|
+
if in_params:
|
|
355
|
+
if " : " in line:
|
|
356
|
+
name, typeinfo = line.split(" : ", 1)
|
|
357
|
+
parsed[name.strip()] = {"type": typeinfo.strip(), "desc": ""}
|
|
358
|
+
elif parsed:
|
|
359
|
+
last = list(parsed)[-1]
|
|
360
|
+
parsed[last]["desc"] += " " + line
|
|
361
|
+
return parsed
|
|
362
|
+
|
|
363
|
+
|
|
364
|
+
@no_docstring
|
|
365
|
+
def main(): # noqa: D103
|
|
366
|
+
parser = argparse.ArgumentParser(
|
|
367
|
+
description="Flowcept CLI", formatter_class=argparse.RawTextHelpFormatter, add_help=False
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
for func in COMMANDS:
|
|
371
|
+
doc = func.__doc__ or ""
|
|
372
|
+
func_name = func.__name__
|
|
373
|
+
flag = f"--{func_name.replace('_', '-')}"
|
|
374
|
+
short_help = doc.strip().splitlines()[0] if doc else ""
|
|
375
|
+
parser.add_argument(flag, action="store_true", help=short_help)
|
|
376
|
+
|
|
377
|
+
for pname, param in inspect.signature(func).parameters.items():
|
|
378
|
+
arg_name = f"--{pname.replace('_', '-')}"
|
|
379
|
+
params_doc = _parse_numpy_doc(doc).get(pname, {})
|
|
380
|
+
help_text = f"{params_doc.get('type', '')} - {params_doc.get('desc', '').strip()}"
|
|
381
|
+
if isinstance(param.annotation, bool):
|
|
382
|
+
parser.add_argument(arg_name, action="store_true", help=help_text)
|
|
383
|
+
elif param.annotation == List[str]:
|
|
384
|
+
parser.add_argument(arg_name, type=lambda s: s.split(","), help=help_text)
|
|
385
|
+
else:
|
|
386
|
+
parser.add_argument(arg_name, type=str, help=help_text)
|
|
387
|
+
|
|
388
|
+
# Handle --help --command
|
|
389
|
+
help_flag = "--help" in sys.argv
|
|
390
|
+
command_flags = {f"--{f.__name__.replace('_', '-')}" for f in COMMANDS}
|
|
391
|
+
matched_command_flag = next((arg for arg in sys.argv if arg in command_flags), None)
|
|
392
|
+
|
|
393
|
+
if help_flag and matched_command_flag:
|
|
394
|
+
command_func = next(f for f in COMMANDS if f"--{f.__name__.replace('_', '-')}" == matched_command_flag)
|
|
395
|
+
doc = command_func.__doc__ or ""
|
|
396
|
+
sig = inspect.signature(command_func)
|
|
397
|
+
print(f"\nHelp for `flowcept {matched_command_flag}`:\n")
|
|
398
|
+
print(textwrap.indent(doc.strip(), " "))
|
|
399
|
+
print("\n Arguments:")
|
|
400
|
+
params = _parse_numpy_doc(doc)
|
|
401
|
+
for pname, p in sig.parameters.items():
|
|
402
|
+
meta = params.get(pname, {})
|
|
403
|
+
opt = p.default != inspect.Parameter.empty
|
|
404
|
+
print(
|
|
405
|
+
f" --{pname:<18} {meta.get('type', 'str')}, "
|
|
406
|
+
f"{'optional' if opt else 'required'} - {meta.get('desc', '').strip()}"
|
|
407
|
+
)
|
|
408
|
+
print()
|
|
409
|
+
sys.exit(0)
|
|
410
|
+
|
|
411
|
+
if len(sys.argv) == 1 or help_flag:
|
|
412
|
+
print("\nFlowcept CLI\n")
|
|
413
|
+
for group, funcs in COMMAND_GROUPS:
|
|
414
|
+
print(f"{group}:\n")
|
|
415
|
+
for func in funcs:
|
|
416
|
+
name = func.__name__
|
|
417
|
+
flag = f"--{name.replace('_', '-')}"
|
|
418
|
+
doc = func.__doc__ or ""
|
|
419
|
+
summary = doc.strip().splitlines()[0] if doc else ""
|
|
420
|
+
sig = inspect.signature(func)
|
|
421
|
+
print(f" flowcept {flag}", end="")
|
|
422
|
+
for pname, p in sig.parameters.items():
|
|
423
|
+
is_opt = p.default != inspect.Parameter.empty
|
|
424
|
+
print(f" [--{pname.replace('_', '-')}] " if is_opt else f" --{pname.replace('_', '-')}", end="")
|
|
425
|
+
print(f"\n {summary}")
|
|
426
|
+
params = _parse_numpy_doc(doc)
|
|
427
|
+
if params:
|
|
428
|
+
print(" Arguments:")
|
|
429
|
+
for argname, meta in params.items():
|
|
430
|
+
opt = sig.parameters[argname].default != inspect.Parameter.empty
|
|
431
|
+
print(
|
|
432
|
+
f" --"
|
|
433
|
+
f"{argname:<18} {meta['type']}, "
|
|
434
|
+
f"{'optional' if opt else 'required'} - {meta['desc'].strip()}"
|
|
435
|
+
)
|
|
436
|
+
print()
|
|
437
|
+
print("Run `flowcept --<command>` to invoke a command.\n")
|
|
438
|
+
sys.exit(0)
|
|
439
|
+
|
|
440
|
+
args = vars(parser.parse_args())
|
|
441
|
+
|
|
442
|
+
for func in COMMANDS:
|
|
443
|
+
flag = f"--{func.__name__.replace('_', '-')}"
|
|
444
|
+
if args.get(func.__name__.replace("-", "_")):
|
|
445
|
+
sig = inspect.signature(func)
|
|
446
|
+
kwargs = {}
|
|
447
|
+
for pname in sig.parameters:
|
|
448
|
+
val = args.get(pname.replace("-", "_"))
|
|
449
|
+
if val is not None:
|
|
450
|
+
kwargs[pname] = val
|
|
451
|
+
func(**kwargs)
|
|
452
|
+
break
|
|
453
|
+
else:
|
|
454
|
+
print("Unknown command. Use `flowcept -h` to see available commands.")
|
|
455
|
+
sys.exit(1)
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
if __name__ == "__main__":
|
|
459
|
+
main()
|
|
460
|
+
# check_services()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""Key value module."""
|
|
2
2
|
|
|
3
|
-
from
|
|
3
|
+
from flowcept.commons.daos.redis_conn import RedisConn
|
|
4
4
|
|
|
5
5
|
from flowcept.commons.flowcept_logger import FlowceptLogger
|
|
6
6
|
from flowcept.configs import (
|
|
@@ -24,32 +24,13 @@ class KeyValueDAO:
|
|
|
24
24
|
cls._instance = super(KeyValueDAO, cls).__new__(cls)
|
|
25
25
|
return cls._instance
|
|
26
26
|
|
|
27
|
-
@staticmethod
|
|
28
|
-
def build_redis_conn_pool():
|
|
29
|
-
"""Utility function to build Redis connection."""
|
|
30
|
-
pool = ConnectionPool(
|
|
31
|
-
host=KVDB_HOST,
|
|
32
|
-
port=KVDB_PORT,
|
|
33
|
-
db=0,
|
|
34
|
-
password=KVDB_PASSWORD,
|
|
35
|
-
decode_responses=False,
|
|
36
|
-
max_connections=10000, # TODO: Config file
|
|
37
|
-
socket_keepalive=True,
|
|
38
|
-
retry_on_timeout=True,
|
|
39
|
-
)
|
|
40
|
-
return Redis(connection_pool=pool)
|
|
41
|
-
# return Redis()
|
|
42
|
-
|
|
43
27
|
def __init__(self):
|
|
44
28
|
if not hasattr(self, "_initialized"):
|
|
45
29
|
self._initialized = True
|
|
46
30
|
self.logger = FlowceptLogger()
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
else:
|
|
51
|
-
# Otherwise, use the host, port, and password settings
|
|
52
|
-
self.redis_conn = KeyValueDAO.build_redis_conn_pool()
|
|
31
|
+
self.redis_conn = RedisConn.build_redis_conn_pool(
|
|
32
|
+
host=KVDB_HOST, port=KVDB_PORT, password=KVDB_PASSWORD, uri=KVDB_URI
|
|
33
|
+
)
|
|
53
34
|
|
|
54
35
|
def delete_set(self, set_name: str):
|
|
55
36
|
"""Delete it."""
|
|
@@ -133,3 +114,18 @@ class KeyValueDAO:
|
|
|
133
114
|
None
|
|
134
115
|
"""
|
|
135
116
|
self.redis_conn.delete(key)
|
|
117
|
+
|
|
118
|
+
def liveness_test(self):
|
|
119
|
+
"""Get the livelyness of it."""
|
|
120
|
+
try:
|
|
121
|
+
response = self.redis_conn.ping()
|
|
122
|
+
if response:
|
|
123
|
+
return True
|
|
124
|
+
else:
|
|
125
|
+
return False
|
|
126
|
+
except ConnectionError as e:
|
|
127
|
+
self.logger.exception(e)
|
|
128
|
+
return False
|
|
129
|
+
except Exception as e:
|
|
130
|
+
self.logger.exception(e)
|
|
131
|
+
return False
|
|
@@ -20,6 +20,7 @@ from flowcept.configs import (
|
|
|
20
20
|
MQ_CHUNK_SIZE,
|
|
21
21
|
MQ_TYPE,
|
|
22
22
|
MQ_TIMING,
|
|
23
|
+
KVDB_ENABLED,
|
|
23
24
|
)
|
|
24
25
|
|
|
25
26
|
from flowcept.commons.utils import GenericJSONEncoder
|
|
@@ -67,7 +68,13 @@ class MQDao(ABC):
|
|
|
67
68
|
self.logger = FlowceptLogger()
|
|
68
69
|
self.started = False
|
|
69
70
|
self._adapter_settings = adapter_settings
|
|
70
|
-
|
|
71
|
+
if KVDB_ENABLED:
|
|
72
|
+
self._keyvalue_dao = KeyValueDAO()
|
|
73
|
+
else:
|
|
74
|
+
self._keyvalue_dao = None
|
|
75
|
+
self.logger.warning(
|
|
76
|
+
"We are going to run without KVDB. If you are running a workflow, this may lead to errors."
|
|
77
|
+
)
|
|
71
78
|
self._time_based_flushing_started = False
|
|
72
79
|
self.buffer: Union[AutoflushBuffer, List] = None
|
|
73
80
|
if MQ_TIMING:
|
|
@@ -138,7 +145,7 @@ class MQDao(ABC):
|
|
|
138
145
|
"""
|
|
139
146
|
self._keyvalue_dao.delete_key("current_campaign_id")
|
|
140
147
|
|
|
141
|
-
def init_buffer(self, interceptor_instance_id: str, exec_bundle_id=None):
|
|
148
|
+
def init_buffer(self, interceptor_instance_id: str, exec_bundle_id=None, check_safe_stops=True):
|
|
142
149
|
"""Create the buffer."""
|
|
143
150
|
if not self.started:
|
|
144
151
|
if flowcept.configs.DB_FLUSH_MODE == "online":
|
|
@@ -147,7 +154,8 @@ class MQDao(ABC):
|
|
|
147
154
|
max_size=MQ_BUFFER_SIZE,
|
|
148
155
|
flush_interval=MQ_INSERTION_BUFFER_TIME,
|
|
149
156
|
)
|
|
150
|
-
|
|
157
|
+
if check_safe_stops:
|
|
158
|
+
self.register_time_based_thread_init(interceptor_instance_id, exec_bundle_id)
|
|
151
159
|
self._time_based_flushing_started = True
|
|
152
160
|
else:
|
|
153
161
|
self.buffer = list()
|
|
@@ -164,9 +172,9 @@ class MQDao(ABC):
|
|
|
164
172
|
self.bulk_publish(self.buffer)
|
|
165
173
|
self.buffer = list()
|
|
166
174
|
|
|
167
|
-
def _stop_timed(self, interceptor_instance_id: str, bundle_exec_id: int = None):
|
|
175
|
+
def _stop_timed(self, interceptor_instance_id: str, check_safe_stops: bool = True, bundle_exec_id: int = None):
|
|
168
176
|
t1 = time()
|
|
169
|
-
self._stop(interceptor_instance_id, bundle_exec_id)
|
|
177
|
+
self._stop(interceptor_instance_id, check_safe_stops, bundle_exec_id)
|
|
170
178
|
t2 = time()
|
|
171
179
|
self._flush_events.append(["final", t1, t2, t2 - t1, "n/a"])
|
|
172
180
|
|
|
@@ -175,14 +183,14 @@ class MQDao(ABC):
|
|
|
175
183
|
writer.writerow(["type", "start", "end", "duration", "size"])
|
|
176
184
|
writer.writerows(self._flush_events)
|
|
177
185
|
|
|
178
|
-
def _stop(self, interceptor_instance_id: str, bundle_exec_id: int = None):
|
|
179
|
-
"""Stop
|
|
180
|
-
|
|
181
|
-
self.logger.debug(msg0 + f"{bundle_exec_id}; interceptor id: {interceptor_instance_id}")
|
|
186
|
+
def _stop(self, interceptor_instance_id: str, check_safe_stops: bool = True, bundle_exec_id: int = None):
|
|
187
|
+
"""Stop MQ publisher."""
|
|
188
|
+
self.logger.debug(f"MQ pub received stop sign: bundle={bundle_exec_id}, interceptor={interceptor_instance_id}")
|
|
182
189
|
self._close_buffer()
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
190
|
+
self.logger.debug("Flushed MQ for the last time!")
|
|
191
|
+
if check_safe_stops:
|
|
192
|
+
self.logger.debug(f"Sending stop msg. Bundle: {bundle_exec_id}; interceptor id: {interceptor_instance_id}")
|
|
193
|
+
self._send_mq_dao_time_thread_stop(interceptor_instance_id, bundle_exec_id)
|
|
186
194
|
self.started = False
|
|
187
195
|
|
|
188
196
|
def _send_mq_dao_time_thread_stop(self, interceptor_instance_id, exec_bundle_id=None):
|
|
@@ -197,10 +205,10 @@ class MQDao(ABC):
|
|
|
197
205
|
# self.logger.info("Control msg sent: " + str(msg))
|
|
198
206
|
self.send_message(msg)
|
|
199
207
|
|
|
200
|
-
def send_document_inserter_stop(self):
|
|
208
|
+
def send_document_inserter_stop(self, exec_bundle_id=None):
|
|
201
209
|
"""Send the document."""
|
|
202
210
|
# These control_messages are handled by the document inserter
|
|
203
|
-
msg = {"type": "flowcept_control", "info": "stop_document_inserter"}
|
|
211
|
+
msg = {"type": "flowcept_control", "info": "stop_document_inserter", "exec_bundle_id": exec_bundle_id}
|
|
204
212
|
self.send_message(msg)
|
|
205
213
|
|
|
206
214
|
@abstractmethod
|
|
@@ -223,20 +231,12 @@ class MQDao(ABC):
|
|
|
223
231
|
"""Subscribe to the interception channel."""
|
|
224
232
|
raise NotImplementedError()
|
|
225
233
|
|
|
234
|
+
@abstractmethod
|
|
235
|
+
def unsubscribe(self):
|
|
236
|
+
"""Subscribe to the interception channel."""
|
|
237
|
+
raise NotImplementedError()
|
|
238
|
+
|
|
226
239
|
@abstractmethod
|
|
227
240
|
def liveness_test(self) -> bool:
|
|
228
|
-
"""
|
|
229
|
-
|
|
230
|
-
"""
|
|
231
|
-
try:
|
|
232
|
-
response = self._keyvalue_dao.redis_conn.ping()
|
|
233
|
-
if response:
|
|
234
|
-
return True
|
|
235
|
-
else:
|
|
236
|
-
return False
|
|
237
|
-
except ConnectionError as e:
|
|
238
|
-
self.logger.exception(e)
|
|
239
|
-
return False
|
|
240
|
-
except Exception as e:
|
|
241
|
-
self.logger.exception(e)
|
|
242
|
-
return False
|
|
241
|
+
"""Checks if the MQ system is alive."""
|
|
242
|
+
raise NotImplementedError()
|
|
@@ -108,12 +108,13 @@ class MQDaoKafka(MQDao):
|
|
|
108
108
|
def liveness_test(self):
|
|
109
109
|
"""Get the livelyness of it."""
|
|
110
110
|
try:
|
|
111
|
-
if not super().liveness_test():
|
|
112
|
-
self.logger.error("KV Store not alive!")
|
|
113
|
-
return False
|
|
114
111
|
admin_client = AdminClient(self._kafka_conf)
|
|
115
112
|
kafka_metadata = admin_client.list_topics(timeout=5)
|
|
116
113
|
return MQ_CHANNEL in kafka_metadata.topics
|
|
117
114
|
except Exception as e:
|
|
118
115
|
self.logger.exception(e)
|
|
119
116
|
return False
|
|
117
|
+
|
|
118
|
+
def unsubscribe(self):
|
|
119
|
+
"""Unsubscribes from Kafka topic."""
|
|
120
|
+
raise NotImplementedError()
|