flock-core 0.4.0b38__py3-none-any.whl → 0.4.0b40__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/config.py +6 -1
- flock/core/api/custom_endpoint.py +40 -0
- flock/core/api/main.py +120 -6
- flock/core/api/runner.py +9 -3
- flock/core/execution/batch_executor.py +88 -55
- flock/core/flock.py +6 -3
- flock/core/flock_registry.py +18 -4
- flock/core/logging/telemetry.py +32 -0
- {flock_core-0.4.0b38.dist-info → flock_core-0.4.0b40.dist-info}/METADATA +1 -1
- {flock_core-0.4.0b38.dist-info → flock_core-0.4.0b40.dist-info}/RECORD +13 -12
- {flock_core-0.4.0b38.dist-info → flock_core-0.4.0b40.dist-info}/WHEEL +0 -0
- {flock_core-0.4.0b38.dist-info → flock_core-0.4.0b40.dist-info}/entry_points.txt +0 -0
- {flock_core-0.4.0b38.dist-info → flock_core-0.4.0b40.dist-info}/licenses/LICENSE +0 -0
flock/config.py
CHANGED
|
@@ -36,7 +36,9 @@ OTEL_FILE_NAME = config("OTEL_FILE_NAME", "flock_events.jsonl")
|
|
|
36
36
|
OTEL_ENABLE_SQL: bool = config("OTEL_ENABLE_SQL", True) == "True"
|
|
37
37
|
OTEL_ENABLE_FILE: bool = config("OTEL_ENABLE_FILE", True) == "True"
|
|
38
38
|
OTEL_ENABLE_JAEGER: bool = config("OTEL_ENABLE_JAEGER", False) == "True"
|
|
39
|
-
|
|
39
|
+
OTEL_ENABLE_OTLP: bool = config("OTEL_ENABLE_OTLP", False) == "True"
|
|
40
|
+
OTEL_EXPORTER_OTLP_PROTOCOL: str = config("OTEL_EXPORTER_OTLP_PROTOCOL", "grpc")
|
|
41
|
+
OTEL_EXPORTER_OTLP_ENDPOINT: str = config("OTEL_EXPORTER_OTLP_ENDPOINT", "http://localhost:4317")
|
|
40
42
|
|
|
41
43
|
TELEMETRY = TelemetryConfig(
|
|
42
44
|
OTEL_SERVICE_NAME,
|
|
@@ -48,4 +50,7 @@ TELEMETRY = TelemetryConfig(
|
|
|
48
50
|
OTEL_ENABLE_JAEGER,
|
|
49
51
|
OTEL_ENABLE_FILE,
|
|
50
52
|
OTEL_ENABLE_SQL,
|
|
53
|
+
OTEL_ENABLE_OTLP,
|
|
54
|
+
OTEL_EXPORTER_OTLP_PROTOCOL,
|
|
55
|
+
OTEL_EXPORTER_OTLP_ENDPOINT,
|
|
51
56
|
)
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"""Lightweight helper object for declaring additional REST routes.
|
|
2
|
+
|
|
3
|
+
Developers can pass instances of :class:`FlockEndpoint` to
|
|
4
|
+
``Flock.start_api(custom_endpoints=[...])`` instead of the terse dictionary
|
|
5
|
+
syntax. The class carries optional Pydantic request/response models plus
|
|
6
|
+
OpenAPI metadata so the generated docs look perfect.
|
|
7
|
+
"""
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
from collections.abc import Callable
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
from pydantic import BaseModel
|
|
14
|
+
|
|
15
|
+
__all__ = [
|
|
16
|
+
"FlockEndpoint",
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class FlockEndpoint(BaseModel):
|
|
21
|
+
"""Declarative description of an extra API route."""
|
|
22
|
+
|
|
23
|
+
path: str
|
|
24
|
+
methods: list[str] = ["GET"]
|
|
25
|
+
callback: Callable[..., Any]
|
|
26
|
+
|
|
27
|
+
# Optional schema models
|
|
28
|
+
request_model: type[BaseModel] | None = None
|
|
29
|
+
response_model: type[BaseModel] | None = None
|
|
30
|
+
|
|
31
|
+
# OpenAPI / Swagger metadata
|
|
32
|
+
summary: str | None = None
|
|
33
|
+
description: str | None = None
|
|
34
|
+
name: str | None = None # Route name in FastAPI
|
|
35
|
+
include_in_schema: bool = True
|
|
36
|
+
|
|
37
|
+
model_config = {
|
|
38
|
+
"arbitrary_types_allowed": True,
|
|
39
|
+
"validate_default": True,
|
|
40
|
+
}
|
flock/core/api/main.py
CHANGED
|
@@ -1,17 +1,22 @@
|
|
|
1
1
|
# src/flock/core/api/main.py
|
|
2
2
|
"""Main Flock API server class and setup."""
|
|
3
3
|
|
|
4
|
-
from
|
|
4
|
+
from collections.abc import Callable, Sequence
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
5
6
|
|
|
6
7
|
import uvicorn
|
|
7
8
|
from fastapi import FastAPI
|
|
8
9
|
from fastapi.responses import RedirectResponse
|
|
10
|
+
from pydantic import BaseModel
|
|
9
11
|
|
|
10
12
|
# Flock core imports
|
|
11
13
|
from flock.core.api.models import FlockBatchRequest
|
|
12
|
-
from flock.core.flock import Flock
|
|
13
14
|
from flock.core.logging.logging import get_logger
|
|
14
15
|
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
# These imports are only for type hints
|
|
18
|
+
from flock.core.flock import Flock
|
|
19
|
+
|
|
15
20
|
logger = get_logger("api.main")
|
|
16
21
|
|
|
17
22
|
from .endpoints import create_api_router
|
|
@@ -63,14 +68,57 @@ except ImportError:
|
|
|
63
68
|
def format_result_to_html(*args, **kwargs):
|
|
64
69
|
return ""
|
|
65
70
|
|
|
71
|
+
from flock.core.api.custom_endpoint import FlockEndpoint
|
|
66
72
|
|
|
67
|
-
class FlockAPI:
|
|
68
|
-
"""Coordinates the Flock API server, including endpoints and UI."""
|
|
69
73
|
|
|
70
|
-
|
|
74
|
+
class FlockAPI:
|
|
75
|
+
"""Coordinates the Flock API server, including endpoints and UI.
|
|
76
|
+
|
|
77
|
+
A user can provide custom FastAPI-style routes via the ``custom_endpoints`` dict.
|
|
78
|
+
Each key is a tuple of ``(<path:str>, <methods:list[str] | None>)`` and the
|
|
79
|
+
value is a callback ``Callable``. ``methods`` can be ``None`` or an empty
|
|
80
|
+
list to default to ``["GET"]``. The callback can be synchronous or
|
|
81
|
+
``async``. At execution time we provide the following keyword arguments and
|
|
82
|
+
filter them to the callback's signature:
|
|
83
|
+
|
|
84
|
+
• ``body`` – request json/plain payload (for POST/PUT/PATCH)
|
|
85
|
+
• ``query`` – dict of query parameters
|
|
86
|
+
• ``flock`` – current :class:`Flock` instance
|
|
87
|
+
• any path parameters extracted from the route pattern
|
|
88
|
+
"""
|
|
89
|
+
|
|
90
|
+
def __init__(
|
|
91
|
+
self,
|
|
92
|
+
flock: "Flock",
|
|
93
|
+
custom_endpoints: Sequence[FlockEndpoint] | dict[tuple[str, list[str] | None], Callable[..., Any]] | None = None,
|
|
94
|
+
):
|
|
71
95
|
self.flock = flock
|
|
96
|
+
# Normalize into list[FlockEndpoint]
|
|
97
|
+
self.custom_endpoints: list[FlockEndpoint] = []
|
|
98
|
+
if custom_endpoints:
|
|
99
|
+
merged: list[FlockEndpoint] = []
|
|
100
|
+
if isinstance(custom_endpoints, dict):
|
|
101
|
+
for (path, methods), cb in custom_endpoints.items():
|
|
102
|
+
merged.append(
|
|
103
|
+
FlockEndpoint(path=path, methods=list(methods) if methods else ["GET"], callback=cb)
|
|
104
|
+
)
|
|
105
|
+
else:
|
|
106
|
+
merged.extend(list(custom_endpoints))
|
|
107
|
+
|
|
108
|
+
pending_endpoints = merged
|
|
109
|
+
else:
|
|
110
|
+
pending_endpoints = []
|
|
111
|
+
|
|
112
|
+
# FastAPI app instance
|
|
72
113
|
self.app = FastAPI(title="Flock API")
|
|
73
|
-
|
|
114
|
+
|
|
115
|
+
# Store run information
|
|
116
|
+
self.run_store = RunStore()
|
|
117
|
+
|
|
118
|
+
# Register any pending custom endpoints collected before app creation
|
|
119
|
+
if pending_endpoints:
|
|
120
|
+
self.custom_endpoints.extend(pending_endpoints)
|
|
121
|
+
|
|
74
122
|
self._setup_routes()
|
|
75
123
|
|
|
76
124
|
def _setup_routes(self):
|
|
@@ -81,6 +129,71 @@ class FlockAPI:
|
|
|
81
129
|
|
|
82
130
|
# Root redirect (if UI is enabled later) will be added in start()
|
|
83
131
|
|
|
132
|
+
# --- Register user-supplied custom endpoints ---------------------
|
|
133
|
+
if self.custom_endpoints:
|
|
134
|
+
import inspect
|
|
135
|
+
|
|
136
|
+
from fastapi import Request
|
|
137
|
+
|
|
138
|
+
# Register any endpoints collected during __init__ (self.custom_endpoints)
|
|
139
|
+
if self.custom_endpoints:
|
|
140
|
+
from fastapi import Body
|
|
141
|
+
|
|
142
|
+
def _create_handler_factory(callback: Callable[..., Any], req_model: type[BaseModel] | None):
|
|
143
|
+
|
|
144
|
+
if req_model is not None:
|
|
145
|
+
|
|
146
|
+
async def _route_handler(body: req_model = Body(...), request: Request = None): # type: ignore[arg-type,valid-type]
|
|
147
|
+
payload: dict[str, Any] = {
|
|
148
|
+
"body": body,
|
|
149
|
+
"query": dict(request.query_params) if request else {},
|
|
150
|
+
"flock": self.flock,
|
|
151
|
+
**(request.path_params if request else {}),
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
sig = inspect.signature(callback)
|
|
155
|
+
filtered_kwargs = {k: v for k, v in payload.items() if k in sig.parameters}
|
|
156
|
+
|
|
157
|
+
if inspect.iscoroutinefunction(callback):
|
|
158
|
+
return await callback(**filtered_kwargs)
|
|
159
|
+
return callback(**filtered_kwargs)
|
|
160
|
+
|
|
161
|
+
else:
|
|
162
|
+
|
|
163
|
+
async def _route_handler(request: Request):
|
|
164
|
+
payload: dict[str, Any] = {
|
|
165
|
+
"query": dict(request.query_params),
|
|
166
|
+
"flock": self.flock,
|
|
167
|
+
**request.path_params,
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
if request.method in {"POST", "PUT", "PATCH"}:
|
|
171
|
+
try:
|
|
172
|
+
payload["body"] = await request.json()
|
|
173
|
+
except Exception:
|
|
174
|
+
payload["body"] = await request.body()
|
|
175
|
+
|
|
176
|
+
sig = inspect.signature(callback)
|
|
177
|
+
filtered_kwargs = {k: v for k, v in payload.items() if k in sig.parameters}
|
|
178
|
+
|
|
179
|
+
if inspect.iscoroutinefunction(callback):
|
|
180
|
+
return await callback(**filtered_kwargs)
|
|
181
|
+
return callback(**filtered_kwargs)
|
|
182
|
+
|
|
183
|
+
return _route_handler
|
|
184
|
+
|
|
185
|
+
for ep in self.custom_endpoints:
|
|
186
|
+
self.app.add_api_route(
|
|
187
|
+
ep.path,
|
|
188
|
+
_create_handler_factory(ep.callback, ep.request_model),
|
|
189
|
+
methods=ep.methods or ["GET"],
|
|
190
|
+
name=ep.name or f"custom:{ep.path}",
|
|
191
|
+
include_in_schema=ep.include_in_schema,
|
|
192
|
+
response_model=ep.response_model,
|
|
193
|
+
summary=ep.summary,
|
|
194
|
+
description=ep.description,
|
|
195
|
+
)
|
|
196
|
+
|
|
84
197
|
# --- Core Execution Helper Methods ---
|
|
85
198
|
# These remain here as they need access to self.flock and self.run_store
|
|
86
199
|
|
|
@@ -431,6 +544,7 @@ class FlockAPI:
|
|
|
431
544
|
port: int = 8344,
|
|
432
545
|
server_name: str = "Flock API",
|
|
433
546
|
create_ui: bool = False,
|
|
547
|
+
#custom_endpoints: Sequence[FlockEndpoint] | dict[tuple[str, list[str] | None], Callable[..., Any]] | None = None,
|
|
434
548
|
):
|
|
435
549
|
"""Start the API server. If create_ui is True, it mounts the new webapp or the old FastHTML UI at the root."""
|
|
436
550
|
if create_ui:
|
flock/core/api/runner.py
CHANGED
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
# src/flock/api/runner.py
|
|
2
2
|
"""Provides functionality to start the Flock API server."""
|
|
3
3
|
|
|
4
|
-
from
|
|
4
|
+
from collections.abc import Callable, Sequence
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
5
6
|
|
|
7
|
+
from flock.core.api.custom_endpoint import FlockEndpoint
|
|
6
8
|
from flock.core.logging.logging import get_logger
|
|
7
9
|
|
|
8
10
|
if TYPE_CHECKING:
|
|
@@ -17,6 +19,7 @@ def start_flock_api(
|
|
|
17
19
|
port: int = 8344,
|
|
18
20
|
server_name: str = "Flock API",
|
|
19
21
|
create_ui: bool = False,
|
|
22
|
+
custom_endpoints: Sequence[FlockEndpoint] | dict[tuple[str, list[str] | None], Callable[..., Any]] | None = None,
|
|
20
23
|
) -> None:
|
|
21
24
|
"""Start a REST API server for the given Flock instance."""
|
|
22
25
|
try:
|
|
@@ -32,7 +35,10 @@ def start_flock_api(
|
|
|
32
35
|
logger.info(
|
|
33
36
|
f"Preparing to start API server for Flock '{flock.name}' on {host}:{port} {'with UI' if create_ui else 'without UI'}"
|
|
34
37
|
)
|
|
35
|
-
api_instance = FlockAPI(flock) # Pass the Flock instance to the API
|
|
38
|
+
api_instance = FlockAPI(flock, custom_endpoints=custom_endpoints) # Pass the Flock instance to the API
|
|
36
39
|
api_instance.start(
|
|
37
|
-
host=host,
|
|
40
|
+
host=host,
|
|
41
|
+
port=port,
|
|
42
|
+
server_name=server_name,
|
|
43
|
+
create_ui=create_ui,
|
|
38
44
|
)
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
import concurrent.futures # For real parallelism via threads
|
|
2
3
|
from pathlib import Path
|
|
3
4
|
from typing import TYPE_CHECKING, Any
|
|
4
5
|
|
|
@@ -184,52 +185,77 @@ class BatchProcessor:
|
|
|
184
185
|
)
|
|
185
186
|
progress.start()
|
|
186
187
|
|
|
187
|
-
results = [None] * len(
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
188
|
+
results = [None] * len(prepared_batch_inputs) # Pre-allocate results list
|
|
189
|
+
|
|
190
|
+
# --- Worker Definitions ---
|
|
191
|
+
# We implement two flavours:
|
|
192
|
+
# * async_worker: used for Temporal or sequential runs (keeps the original behaviour)
|
|
193
|
+
# * thread_worker: executes the run in a dedicated thread via ThreadPoolExecutor for true parallelism.
|
|
194
|
+
|
|
195
|
+
async def async_worker(index: int, item_inputs: dict[str, Any]):
|
|
196
|
+
"""Original coroutine worker used for non-threaded execution paths."""
|
|
197
|
+
full_input = {**(static_inputs or {}), **item_inputs}
|
|
198
|
+
context = FlockContext()
|
|
199
|
+
context.set_variable(FLOCK_BATCH_SILENT_MODE, silent_mode)
|
|
200
|
+
|
|
201
|
+
run_desc = f"Batch item {index + 1}"
|
|
202
|
+
logger.debug(f"{run_desc} started (async).")
|
|
203
|
+
try:
|
|
204
|
+
result = await self.flock.run_async(
|
|
205
|
+
start_agent,
|
|
206
|
+
full_input,
|
|
207
|
+
box_result=box_results,
|
|
208
|
+
context=context,
|
|
209
|
+
)
|
|
210
|
+
results[index] = result
|
|
211
|
+
logger.debug(f"{run_desc} finished successfully.")
|
|
212
|
+
except Exception as e:
|
|
213
|
+
logger.error(f"{run_desc} failed: {e}", exc_info=not return_errors)
|
|
214
|
+
if return_errors:
|
|
215
|
+
results[index] = e
|
|
216
|
+
else:
|
|
217
|
+
raise # Propagate to calling gather
|
|
218
|
+
finally:
|
|
219
|
+
if progress_context:
|
|
220
|
+
progress.update(progress_task_id, advance=1)
|
|
221
|
+
|
|
222
|
+
# ThreadPool worker for real parallelism (suitable for blocking I/O)
|
|
223
|
+
def _thread_worker(index: int, item_inputs: dict[str, Any]):
|
|
224
|
+
"""Synchronous helper executed inside a worker thread."""
|
|
225
|
+
full_input = {**(static_inputs or {}), **item_inputs}
|
|
226
|
+
run_desc = f"Batch item {index + 1}"
|
|
227
|
+
logger.debug(f"{run_desc} started (thread).")
|
|
228
|
+
try:
|
|
229
|
+
# Use the synchronous wrapper to avoid nested event-loop issues inside threads
|
|
230
|
+
result = self.flock.run(
|
|
231
|
+
start_agent=start_agent,
|
|
232
|
+
input=full_input,
|
|
233
|
+
box_result=box_results,
|
|
234
|
+
)
|
|
235
|
+
logger.debug(f"{run_desc} finished successfully.")
|
|
236
|
+
return index, result, None
|
|
237
|
+
except Exception as e:
|
|
238
|
+
logger.error(f"{run_desc} failed: {e}")
|
|
239
|
+
return index, None, e
|
|
232
240
|
|
|
241
|
+
async def thread_worker(executor, index: int, item_inputs: dict[str, Any]):
|
|
242
|
+
"""Coroutine wrapper that submits _thread_worker to the specified executor."""
|
|
243
|
+
loop = asyncio.get_running_loop()
|
|
244
|
+
idx, res, err = await loop.run_in_executor(
|
|
245
|
+
executor, _thread_worker, index, item_inputs
|
|
246
|
+
)
|
|
247
|
+
# Handle result / error on the asyncio side
|
|
248
|
+
if err:
|
|
249
|
+
if return_errors:
|
|
250
|
+
results[idx] = err
|
|
251
|
+
else:
|
|
252
|
+
raise err
|
|
253
|
+
else:
|
|
254
|
+
results[idx] = res
|
|
255
|
+
if progress_context:
|
|
256
|
+
progress.update(progress_task_id, advance=1)
|
|
257
|
+
|
|
258
|
+
tasks = []
|
|
233
259
|
try:
|
|
234
260
|
if effective_use_temporal:
|
|
235
261
|
# Temporal Batching (Simplified: sequential execution for this example)
|
|
@@ -238,25 +264,32 @@ class BatchProcessor:
|
|
|
238
264
|
"Running batch using Temporal (executing sequentially for now)..."
|
|
239
265
|
)
|
|
240
266
|
for i, item_data in enumerate(prepared_batch_inputs):
|
|
241
|
-
await
|
|
267
|
+
await async_worker(i, item_data) # Run sequentially for demo
|
|
242
268
|
# TODO: Implement true parallel Temporal workflow execution if needed
|
|
243
269
|
|
|
244
270
|
elif parallel:
|
|
271
|
+
# --- Real parallelism using ThreadPoolExecutor ---
|
|
245
272
|
logger.info(
|
|
246
|
-
f"Running batch in parallel with max_workers={max_workers}..."
|
|
273
|
+
f"Running batch in parallel (threads) with max_workers={max_workers}..."
|
|
247
274
|
)
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
275
|
+
loop = asyncio.get_running_loop()
|
|
276
|
+
with concurrent.futures.ThreadPoolExecutor(
|
|
277
|
+
max_workers=max_workers, thread_name_prefix="flock-batch"
|
|
278
|
+
) as executor:
|
|
279
|
+
for i, item_data in enumerate(prepared_batch_inputs):
|
|
280
|
+
tasks.append(
|
|
281
|
+
asyncio.create_task(
|
|
282
|
+
thread_worker(executor, i, item_data)
|
|
283
|
+
)
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
# Wait for all tasks allowing exceptions to propagate as needed
|
|
287
|
+
await asyncio.gather(*tasks)
|
|
253
288
|
|
|
254
289
|
else: # Sequential Local
|
|
255
290
|
logger.info("Running batch sequentially...")
|
|
256
291
|
for i, item_data in enumerate(prepared_batch_inputs):
|
|
257
|
-
await
|
|
258
|
-
i, item_data
|
|
259
|
-
) # Already handles errors internally based on return_errors
|
|
292
|
+
await async_worker(i, item_data) # Already handles errors internally based on return_errors
|
|
260
293
|
|
|
261
294
|
logger.info("Batch execution finished.")
|
|
262
295
|
|
flock/core/flock.py
CHANGED
|
@@ -6,7 +6,7 @@ from __future__ import annotations # Ensure forward references work
|
|
|
6
6
|
import asyncio
|
|
7
7
|
import os
|
|
8
8
|
import uuid
|
|
9
|
-
from collections.abc import Callable
|
|
9
|
+
from collections.abc import Callable, Sequence
|
|
10
10
|
from pathlib import Path
|
|
11
11
|
from typing import (
|
|
12
12
|
TYPE_CHECKING,
|
|
@@ -32,6 +32,7 @@ from pydantic import BaseModel, Field
|
|
|
32
32
|
|
|
33
33
|
# Flock core components & utilities
|
|
34
34
|
from flock.config import DEFAULT_MODEL, TELEMETRY
|
|
35
|
+
from flock.core.api.custom_endpoint import FlockEndpoint
|
|
35
36
|
from flock.core.context.context import FlockContext
|
|
36
37
|
from flock.core.context.context_manager import initialize_context
|
|
37
38
|
from flock.core.execution.temporal_executor import run_temporal_workflow
|
|
@@ -116,7 +117,7 @@ class Flock(BaseModel, Serializable):
|
|
|
116
117
|
# Internal agent storage - not part of the Pydantic model for direct serialization
|
|
117
118
|
_agents: dict[str, FlockAgent]
|
|
118
119
|
_start_agent_name: str | None = None # For potential pre-configuration
|
|
119
|
-
_start_input: dict = {} #
|
|
120
|
+
_start_input: dict = {} # Instance attribute overwritten in __init__; kept for typing clarity
|
|
120
121
|
|
|
121
122
|
# Pydantic v2 model config
|
|
122
123
|
model_config = {
|
|
@@ -276,7 +277,7 @@ class Flock(BaseModel, Serializable):
|
|
|
276
277
|
def run(
|
|
277
278
|
self,
|
|
278
279
|
start_agent: FlockAgent | str | None = None,
|
|
279
|
-
input: dict =
|
|
280
|
+
input: dict | None = None,
|
|
280
281
|
context: FlockContext | None = None,
|
|
281
282
|
run_id: str = "",
|
|
282
283
|
box_result: bool = True,
|
|
@@ -685,6 +686,7 @@ class Flock(BaseModel, Serializable):
|
|
|
685
686
|
server_name: str = "Flock API",
|
|
686
687
|
create_ui: bool = False,
|
|
687
688
|
ui_theme: str | None = None,
|
|
689
|
+
custom_endpoints: Sequence[FlockEndpoint] | dict[tuple[str, list[str] | None], Callable[..., Any]] | None = None,
|
|
688
690
|
) -> None:
|
|
689
691
|
"""Starts a REST API server for this Flock instance.
|
|
690
692
|
If create_ui is True, integrates the web UI, potentially with a specific theme.
|
|
@@ -734,6 +736,7 @@ class Flock(BaseModel, Serializable):
|
|
|
734
736
|
port=port,
|
|
735
737
|
server_name=server_name,
|
|
736
738
|
create_ui=False, # Explicitly false for API only runner
|
|
739
|
+
custom_endpoints=custom_endpoints,
|
|
737
740
|
)
|
|
738
741
|
|
|
739
742
|
# --- CLI Starter ---
|
flock/core/flock_registry.py
CHANGED
|
@@ -169,17 +169,31 @@ class FlockRegistry:
|
|
|
169
169
|
return None
|
|
170
170
|
|
|
171
171
|
# --- Agent Registration ---
|
|
172
|
-
def register_agent(self, agent: FlockAgent) -> None:
|
|
173
|
-
"""Registers a FlockAgent instance by its name.
|
|
172
|
+
def register_agent(self, agent: FlockAgent, *, force: bool = False) -> None:
|
|
173
|
+
"""Registers a FlockAgent instance by its name.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
agent: The agent instance to register.
|
|
177
|
+
force: If True, allow overwriting an existing **different** agent registered under the same name.
|
|
178
|
+
If False and a conflicting registration exists, a ValueError is raised.
|
|
179
|
+
"""
|
|
174
180
|
if not hasattr(agent, "name") or not agent.name:
|
|
175
181
|
logger.error(
|
|
176
182
|
"Attempted to register an agent without a valid 'name' attribute."
|
|
177
183
|
)
|
|
178
184
|
return
|
|
179
|
-
|
|
185
|
+
|
|
186
|
+
if agent.name in self._agents and self._agents[agent.name] is not agent:
|
|
187
|
+
# Same agent already registered → silently ignore; different instance → error/force.
|
|
188
|
+
if not force:
|
|
189
|
+
raise ValueError(
|
|
190
|
+
f"Agent '{agent.name}' already registered with a different instance. "
|
|
191
|
+
"Pass force=True to overwrite the existing registration."
|
|
192
|
+
)
|
|
180
193
|
logger.warning(
|
|
181
|
-
f"
|
|
194
|
+
f"Overwriting existing agent '{agent.name}' registration due to force=True."
|
|
182
195
|
)
|
|
196
|
+
|
|
183
197
|
self._agents[agent.name] = agent
|
|
184
198
|
logger.debug(f"Registered agent: {agent.name}")
|
|
185
199
|
|
flock/core/logging/telemetry.py
CHANGED
|
@@ -42,6 +42,9 @@ class TelemetryConfig:
|
|
|
42
42
|
enable_jaeger: bool = True,
|
|
43
43
|
enable_file: bool = True,
|
|
44
44
|
enable_sql: bool = True,
|
|
45
|
+
enable_otlp: bool = True,
|
|
46
|
+
otlp_protocol: str = "grpc",
|
|
47
|
+
otlp_endpoint: str = "http://localhost:4317",
|
|
45
48
|
batch_processor_options: dict | None = None,
|
|
46
49
|
):
|
|
47
50
|
""":param service_name: Name of your service.
|
|
@@ -61,6 +64,9 @@ class TelemetryConfig:
|
|
|
61
64
|
self.enable_jaeger = enable_jaeger
|
|
62
65
|
self.enable_file = enable_file
|
|
63
66
|
self.enable_sql = enable_sql
|
|
67
|
+
self.enable_otlp = enable_otlp
|
|
68
|
+
self.otlp_protocol = otlp_protocol
|
|
69
|
+
self.otlp_endpoint = otlp_endpoint
|
|
64
70
|
self.global_tracer = None
|
|
65
71
|
|
|
66
72
|
def setup_tracing(self):
|
|
@@ -97,6 +103,32 @@ class TelemetryConfig:
|
|
|
97
103
|
|
|
98
104
|
span_processors.append(SimpleSpanProcessor(jaeger_exporter))
|
|
99
105
|
|
|
106
|
+
|
|
107
|
+
if self.enable_otlp:
|
|
108
|
+
if self.otlp_protocol == "grpc":
|
|
109
|
+
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
|
|
110
|
+
OTLPSpanExporter,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
otlp_exporter = OTLPSpanExporter(
|
|
114
|
+
endpoint=self.otlp_endpoint,
|
|
115
|
+
insecure=True,
|
|
116
|
+
)
|
|
117
|
+
elif self.otlp_protocol == "http":
|
|
118
|
+
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
|
|
119
|
+
OTLPSpanExporter,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
otlp_exporter = OTLPSpanExporter(
|
|
123
|
+
collector_endpoint=self.otlp_endpoint,
|
|
124
|
+
)
|
|
125
|
+
else:
|
|
126
|
+
raise ValueError(
|
|
127
|
+
"Invalid OTEL_EXPORTER_OTLP_PROTOCOL specified. Use 'grpc' or 'http'."
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
span_processors.append(SimpleSpanProcessor(otlp_exporter))
|
|
131
|
+
|
|
100
132
|
# If a file path is provided, add the custom file exporter.
|
|
101
133
|
if self.file_export_name and self.enable_file:
|
|
102
134
|
file_exporter = FileSpanExporter(
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
flock/__init__.py,sha256=1tMdEwpFvJXVso96jyGvGPXhD9P7EbZfkSV-WX1fXuE,5918
|
|
2
|
-
flock/config.py,sha256=
|
|
2
|
+
flock/config.py,sha256=9aUYglHavosdTS212WXW9qc_snZVimvxaXZ3Z5NDGWM,1923
|
|
3
3
|
flock/cli/config.py,sha256=5DvFLObOx3ObisHnc9JfnUBnK83y0CBsUQzXfxPZve0,138
|
|
4
4
|
flock/cli/constants.py,sha256=ZyXtTW91P1hUMkbMwmOwp_JEL5e9-YkcuM3vHM5glP4,978
|
|
5
5
|
flock/cli/create_agent.py,sha256=DkeLUlrb7rGx3nZ04aADU9HXXu5mZTf_DBwT0xhzIv4,7
|
|
@@ -19,19 +19,20 @@ flock/cli/view_results.py,sha256=dOzK0O1FHSIDERnx48y-2Xke9BkOHS7pcOhs64AyIg0,781
|
|
|
19
19
|
flock/cli/yaml_editor.py,sha256=K3N0bh61G1TSDAZDnurqW9e_-hO6CtSQKXQqlDhCjVo,12527
|
|
20
20
|
flock/cli/assets/release_notes.md,sha256=bqnk50jxM3w5uY44Dc7MkdT8XmRREFxrVBAG9XCOSSU,4896
|
|
21
21
|
flock/core/__init__.py,sha256=p7lmQULRu9ejIAELfanZiyMhW0CougIPvyFHW2nqBFQ,847
|
|
22
|
-
flock/core/flock.py,sha256=
|
|
22
|
+
flock/core/flock.py,sha256=WzLA7-xoAUq7Yn_ioieQIsk6CG_VvvDPeq_S6FWNgOY,30424
|
|
23
23
|
flock/core/flock_agent.py,sha256=JTqaGD_OnZSd3bVU989WMsK1rAT6UGn-JYrPxFV15EE,39576
|
|
24
24
|
flock/core/flock_evaluator.py,sha256=dOXZeDOGZcAmJ9ahqq_2bdGUU1VOXY4skmwTVpAjiVw,1685
|
|
25
25
|
flock/core/flock_factory.py,sha256=_4zsjkEmJnCR7IvJ3SUHnDbX6c7Tt3E4P5ohxwKvE6w,3173
|
|
26
26
|
flock/core/flock_module.py,sha256=UCK6TFe4viXs596zeng0GD3gln4ZNGu_gCWkXIIMREg,3090
|
|
27
|
-
flock/core/flock_registry.py,sha256=
|
|
27
|
+
flock/core/flock_registry.py,sha256=aC-RK0js676DQkjXmNuYHuD5t6GmFhpQoCKaO3i7xFg,24920
|
|
28
28
|
flock/core/flock_router.py,sha256=1OAXDsdaIIFApEfo6SRfFEDoTuGt3Si7n2MXiySEfis,2644
|
|
29
29
|
flock/core/api/__init__.py,sha256=OKlhzDWZJfA6ddBwxQUmATY0TSzESsH032u00iVGvdA,228
|
|
30
|
+
flock/core/api/custom_endpoint.py,sha256=nCm8lhvq1OOVlHB5f1DD1Pgm5U-PgggPlYjlMRK4nPc,1090
|
|
30
31
|
flock/core/api/endpoints.py,sha256=qQnJmtcYGkjdKtLllVpyJVjc-iZrvu5EEeVIryyt4tc,12987
|
|
31
|
-
flock/core/api/main.py,sha256=
|
|
32
|
+
flock/core/api/main.py,sha256=f7uZkl8wIOLSoaIztdRG40LqmmRQSdIe-WxXsZx4Q-U,28681
|
|
32
33
|
flock/core/api/models.py,sha256=seqKuzhbN37nCNO7KrcJjI2mWuwiOKCLFcJcTPvTtag,3422
|
|
33
34
|
flock/core/api/run_store.py,sha256=bFodJvVyWogzoezVy0cOoWWU3MdEBXf_6_5sBqCRWps,9227
|
|
34
|
-
flock/core/api/runner.py,sha256=
|
|
35
|
+
flock/core/api/runner.py,sha256=3izg6cVk1RoR1hDIDwMAO1gi3lnLcp8DPv7AnJBYx6A,1443
|
|
35
36
|
flock/core/api/ui/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
37
|
flock/core/api/ui/routes.py,sha256=nS-wWO94mshE5ozWfOQZ-HOvtes_1qxDVcqpMZtU5JQ,8885
|
|
37
38
|
flock/core/api/ui/utils.py,sha256=V7PqYHNK519hFJ8jvvwf7bGpbBXCRz_HQG3BDCCqlNA,4802
|
|
@@ -39,14 +40,14 @@ flock/core/context/context.py,sha256=GFqMwYXLheqECGvWcxar7sQ2-GuY3RVynZ7kjwd65R0
|
|
|
39
40
|
flock/core/context/context_manager.py,sha256=FANSWa6DEhdhtZ7t_9Gza0v80UdpoDOhHbfVOccmjkA,1181
|
|
40
41
|
flock/core/context/context_vars.py,sha256=ASPA29hpENWub4mgRoG62FtTVakCHQZfn6IhJQKe3C8,347
|
|
41
42
|
flock/core/evaluation/utils.py,sha256=ZJkIMC9YT-HA2SPCZ4_bQ98isW1i6nbltVEYbjze-b0,12827
|
|
42
|
-
flock/core/execution/batch_executor.py,sha256=
|
|
43
|
+
flock/core/execution/batch_executor.py,sha256=mHwCI-DHqApCv_EVCN0ZOUd-LCQLjREpxKbAUPC0pcY,15266
|
|
43
44
|
flock/core/execution/evaluation_executor.py,sha256=D9EO0sU-2qWj3vomjmUUi-DOtHNJNFRf30kGDHuzREE,17702
|
|
44
45
|
flock/core/execution/local_executor.py,sha256=rnIQvaJOs6zZORUcR3vvyS6LPREDJTjaygl_Db0M8ao,952
|
|
45
46
|
flock/core/execution/temporal_executor.py,sha256=dHcb0xuzPFWU_wbwTgI7glLNyyppei93Txs2sapjhaw,6283
|
|
46
47
|
flock/core/interpreter/python_interpreter.py,sha256=RaUMZuufsKBNQ4FAeSaOgUuxzs8VYu5TgUUs-xwaxxM,26376
|
|
47
48
|
flock/core/logging/__init__.py,sha256=Q8hp9-1ilPIUIV0jLgJ3_cP7COrea32cVwL7dicPnlM,82
|
|
48
49
|
flock/core/logging/logging.py,sha256=JcgABQ8QJU1hhzhfF93eqnE0jhyXGZ2oObZst68sKR8,15409
|
|
49
|
-
flock/core/logging/telemetry.py,sha256=
|
|
50
|
+
flock/core/logging/telemetry.py,sha256=3pApCiKAHbznuCBpJhlDxOrDJWK8cLs3ENk8CpA6WKc,6571
|
|
50
51
|
flock/core/logging/trace_and_logged.py,sha256=5vNrK1kxuPMoPJ0-QjQg-EDJL1oiEzvU6UNi6X8FiMs,2117
|
|
51
52
|
flock/core/logging/formatters/enum_builder.py,sha256=LgEYXUv84wK5vwHflZ5h8HBGgvLH3sByvUQe8tZiyY0,981
|
|
52
53
|
flock/core/logging/formatters/theme_builder.py,sha256=Wnaal3HuUDA4HFg9tdql1BxYwK83ACOZBBQy-DXnxcA,17342
|
|
@@ -494,8 +495,8 @@ flock/workflow/agent_execution_activity.py,sha256=Gy6FtuVAjf0NiUXmC3syS2eJpNQF4R
|
|
|
494
495
|
flock/workflow/flock_workflow.py,sha256=iSUF_soFvWar0ffpkzE4irkDZRx0p4HnwmEBi_Ne2sY,9666
|
|
495
496
|
flock/workflow/temporal_config.py,sha256=3_8O7SDEjMsSMXsWJBfnb6XTp0TFaz39uyzSlMTSF_I,3988
|
|
496
497
|
flock/workflow/temporal_setup.py,sha256=YIHnSBntzOchHfMSh8hoLeNXrz3B1UbR14YrR6soM7A,1606
|
|
497
|
-
flock_core-0.4.
|
|
498
|
-
flock_core-0.4.
|
|
499
|
-
flock_core-0.4.
|
|
500
|
-
flock_core-0.4.
|
|
501
|
-
flock_core-0.4.
|
|
498
|
+
flock_core-0.4.0b40.dist-info/METADATA,sha256=W-nPONRJfGIBfsyt2hEPqtLqu6mxQto7fP1IYuw1oH8,17125
|
|
499
|
+
flock_core-0.4.0b40.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
500
|
+
flock_core-0.4.0b40.dist-info/entry_points.txt,sha256=rWaS5KSpkTmWySURGFZk6PhbJ87TmvcFQDi2uzjlagQ,37
|
|
501
|
+
flock_core-0.4.0b40.dist-info/licenses/LICENSE,sha256=iYEqWy0wjULzM9GAERaybP4LBiPeu7Z1NEliLUdJKSc,1072
|
|
502
|
+
flock_core-0.4.0b40.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|