flock-core 0.4.0b39__py3-none-any.whl → 0.4.0b42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

@@ -27,6 +27,8 @@ class FlockEndpoint(BaseModel):
27
27
  # Optional schema models
28
28
  request_model: type[BaseModel] | None = None
29
29
  response_model: type[BaseModel] | None = None
30
+ # Query-string parameters as a Pydantic model (treated as Depends())
31
+ query_model: type[BaseModel] | None = None
30
32
 
31
33
  # OpenAPI / Swagger metadata
32
34
  summary: str | None = None
@@ -34,6 +36,9 @@ class FlockEndpoint(BaseModel):
34
36
  name: str | None = None # Route name in FastAPI
35
37
  include_in_schema: bool = True
36
38
 
39
+ # FastAPI dependency injections (e.g. security)
40
+ dependencies: list[Any] | None = None
41
+
37
42
  model_config = {
38
43
  "arbitrary_types_allowed": True,
39
44
  "validate_default": True,
flock/core/api/main.py CHANGED
@@ -133,65 +133,82 @@ class FlockAPI:
133
133
  if self.custom_endpoints:
134
134
  import inspect
135
135
 
136
- from fastapi import Request
136
+ from fastapi import Body, Depends, Request
137
137
 
138
138
  # Register any endpoints collected during __init__ (self.custom_endpoints)
139
139
  if self.custom_endpoints:
140
- from fastapi import Body
141
-
142
- def _create_handler_factory(callback: Callable[..., Any], req_model: type[BaseModel] | None):
143
-
140
+ def _create_handler_factory(callback: Callable[..., Any], req_model: type[BaseModel] | None, query_model: type[BaseModel] | None):
141
+ async def _invoke(request: Request, body, query):
142
+ payload: dict[str, Any] = {"flock": self.flock}
143
+ if request:
144
+ payload.update(request.path_params)
145
+ if query is None:
146
+ payload["query"] = dict(request.query_params)
147
+ else:
148
+ payload["query"] = query
149
+ else:
150
+ payload["query"] = query or {}
151
+ if body is not None:
152
+ payload["body"] = body
153
+ elif request and request.method in {"POST", "PUT", "PATCH"} and req_model is None:
154
+ try:
155
+ payload["body"] = await request.json()
156
+ except Exception:
157
+ payload["body"] = await request.body()
158
+
159
+ sig = inspect.signature(callback)
160
+ filtered_kwargs = {k: v for k, v in payload.items() if k in sig.parameters}
161
+ if inspect.iscoroutinefunction(callback):
162
+ return await callback(**filtered_kwargs)
163
+ return callback(**filtered_kwargs)
164
+
165
+ # Dynamically build wrapper with appropriate signature so FastAPI can document it
166
+ params: list[str] = []
144
167
  if req_model is not None:
145
-
146
- async def _route_handler(body: req_model = Body(...), request: Request = None): # type: ignore[arg-type,valid-type]
147
- payload: dict[str, Any] = {
148
- "body": body,
149
- "query": dict(request.query_params) if request else {},
150
- "flock": self.flock,
151
- **(request.path_params if request else {}),
152
- }
153
-
154
- sig = inspect.signature(callback)
155
- filtered_kwargs = {k: v for k, v in payload.items() if k in sig.parameters}
156
-
157
- if inspect.iscoroutinefunction(callback):
158
- return await callback(**filtered_kwargs)
159
- return callback(**filtered_kwargs)
168
+ params.append("body")
169
+ if query_model is not None:
170
+ params.append("query")
171
+
172
+ # Build wrapper function based on which params are present
173
+ if req_model and query_model:
174
+ async def _route_handler(
175
+ request: Request,
176
+ body: req_model = Body(...), # type: ignore[arg-type,valid-type]
177
+ query: query_model = Depends(), # type: ignore[arg-type,valid-type]
178
+ ):
179
+ return await _invoke(request, body, query)
180
+
181
+ elif req_model and not query_model:
182
+ async def _route_handler(
183
+ request: Request,
184
+ body: req_model = Body(...), # type: ignore[arg-type,valid-type]
185
+ ):
186
+ return await _invoke(request, body, None)
187
+
188
+ elif query_model and not req_model:
189
+ async def _route_handler(
190
+ request: Request,
191
+ query: query_model = Depends(), # type: ignore[arg-type,valid-type]
192
+ ):
193
+ return await _invoke(request, None, query)
160
194
 
161
195
  else:
162
-
163
196
  async def _route_handler(request: Request):
164
- payload: dict[str, Any] = {
165
- "query": dict(request.query_params),
166
- "flock": self.flock,
167
- **request.path_params,
168
- }
169
-
170
- if request.method in {"POST", "PUT", "PATCH"}:
171
- try:
172
- payload["body"] = await request.json()
173
- except Exception:
174
- payload["body"] = await request.body()
175
-
176
- sig = inspect.signature(callback)
177
- filtered_kwargs = {k: v for k, v in payload.items() if k in sig.parameters}
178
-
179
- if inspect.iscoroutinefunction(callback):
180
- return await callback(**filtered_kwargs)
181
- return callback(**filtered_kwargs)
197
+ return await _invoke(request, None, None)
182
198
 
183
199
  return _route_handler
184
200
 
185
201
  for ep in self.custom_endpoints:
186
202
  self.app.add_api_route(
187
203
  ep.path,
188
- _create_handler_factory(ep.callback, ep.request_model),
204
+ _create_handler_factory(ep.callback, ep.request_model, ep.query_model),
189
205
  methods=ep.methods or ["GET"],
190
206
  name=ep.name or f"custom:{ep.path}",
191
207
  include_in_schema=ep.include_in_schema,
192
208
  response_model=ep.response_model,
193
209
  summary=ep.summary,
194
210
  description=ep.description,
211
+ dependencies=ep.dependencies,
195
212
  )
196
213
 
197
214
  # --- Core Execution Helper Methods ---
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import concurrent.futures # For real parallelism via threads
2
3
  from pathlib import Path
3
4
  from typing import TYPE_CHECKING, Any
4
5
 
@@ -184,52 +185,77 @@ class BatchProcessor:
184
185
  )
185
186
  progress.start()
186
187
 
187
- results = [None] * len(
188
- prepared_batch_inputs
189
- ) # Pre-allocate results list
190
- tasks = []
191
- semaphore = asyncio.Semaphore(
192
- max_workers if parallel and not effective_use_temporal else 1
193
- ) # Semaphore for parallel local
194
-
195
- async def worker(index, item_inputs):
196
- async with semaphore:
197
- full_input = {**(static_inputs or {}), **item_inputs}
198
- context = FlockContext()
199
- context.set_variable(FLOCK_BATCH_SILENT_MODE, silent_mode)
200
-
201
- run_desc = f"Batch item {index + 1}"
202
- logger.debug(f"{run_desc} started.")
203
- try:
204
- result = await self.flock.run_async(
205
- start_agent,
206
- full_input,
207
- box_result=box_results,
208
- context=context,
209
- )
210
- results[index] = result
211
- logger.debug(f"{run_desc} finished successfully.")
212
- except Exception as e:
213
- logger.error(
214
- f"{run_desc} failed: {e}", exc_info=not return_errors
215
- )
216
- if return_errors:
217
- results[index] = e
218
- else:
219
- # If not returning errors, ensure the exception propagates
220
- # to stop asyncio.gather if running in parallel.
221
- if parallel and not effective_use_temporal:
222
- raise # Re-raise to stop gather
223
- else:
224
- # For sequential, we just store None or the exception if return_errors=True
225
- # For Temporal, error handling happens within the workflow/activity usually
226
- results[index] = e if return_errors else None
227
- finally:
228
- if progress_context:
229
- progress.update(
230
- progress_task_id, advance=1
231
- ) # Update progress
188
+ results = [None] * len(prepared_batch_inputs) # Pre-allocate results list
189
+
190
+ # --- Worker Definitions ---
191
+ # We implement two flavours:
192
+ # * async_worker: used for Temporal or sequential runs (keeps the original behaviour)
193
+ # * thread_worker: executes the run in a dedicated thread via ThreadPoolExecutor for true parallelism.
194
+
195
+ async def async_worker(index: int, item_inputs: dict[str, Any]):
196
+ """Original coroutine worker used for non-threaded execution paths."""
197
+ full_input = {**(static_inputs or {}), **item_inputs}
198
+ context = FlockContext()
199
+ context.set_variable(FLOCK_BATCH_SILENT_MODE, silent_mode)
200
+
201
+ run_desc = f"Batch item {index + 1}"
202
+ logger.debug(f"{run_desc} started (async).")
203
+ try:
204
+ result = await self.flock.run_async(
205
+ start_agent,
206
+ full_input,
207
+ box_result=box_results,
208
+ context=context,
209
+ )
210
+ results[index] = result
211
+ logger.debug(f"{run_desc} finished successfully.")
212
+ except Exception as e:
213
+ logger.error(f"{run_desc} failed: {e}", exc_info=not return_errors)
214
+ if return_errors:
215
+ results[index] = e
216
+ else:
217
+ raise # Propagate to calling gather
218
+ finally:
219
+ if progress_context:
220
+ progress.update(progress_task_id, advance=1)
221
+
222
+ # ThreadPool worker for real parallelism (suitable for blocking I/O)
223
+ def _thread_worker(index: int, item_inputs: dict[str, Any]):
224
+ """Synchronous helper executed inside a worker thread."""
225
+ full_input = {**(static_inputs or {}), **item_inputs}
226
+ run_desc = f"Batch item {index + 1}"
227
+ logger.debug(f"{run_desc} started (thread).")
228
+ try:
229
+ # Use the synchronous wrapper to avoid nested event-loop issues inside threads
230
+ result = self.flock.run(
231
+ start_agent=start_agent,
232
+ input=full_input,
233
+ box_result=box_results,
234
+ )
235
+ logger.debug(f"{run_desc} finished successfully.")
236
+ return index, result, None
237
+ except Exception as e:
238
+ logger.error(f"{run_desc} failed: {e}")
239
+ return index, None, e
232
240
 
241
+ async def thread_worker(executor, index: int, item_inputs: dict[str, Any]):
242
+ """Coroutine wrapper that submits _thread_worker to the specified executor."""
243
+ loop = asyncio.get_running_loop()
244
+ idx, res, err = await loop.run_in_executor(
245
+ executor, _thread_worker, index, item_inputs
246
+ )
247
+ # Handle result / error on the asyncio side
248
+ if err:
249
+ if return_errors:
250
+ results[idx] = err
251
+ else:
252
+ raise err
253
+ else:
254
+ results[idx] = res
255
+ if progress_context:
256
+ progress.update(progress_task_id, advance=1)
257
+
258
+ tasks = []
233
259
  try:
234
260
  if effective_use_temporal:
235
261
  # Temporal Batching (Simplified: sequential execution for this example)
@@ -238,25 +264,32 @@ class BatchProcessor:
238
264
  "Running batch using Temporal (executing sequentially for now)..."
239
265
  )
240
266
  for i, item_data in enumerate(prepared_batch_inputs):
241
- await worker(i, item_data) # Run sequentially for demo
267
+ await async_worker(i, item_data) # Run sequentially for demo
242
268
  # TODO: Implement true parallel Temporal workflow execution if needed
243
269
 
244
270
  elif parallel:
271
+ # --- Real parallelism using ThreadPoolExecutor ---
245
272
  logger.info(
246
- f"Running batch in parallel with max_workers={max_workers}..."
273
+ f"Running batch in parallel (threads) with max_workers={max_workers}..."
247
274
  )
248
- for i, item_data in enumerate(prepared_batch_inputs):
249
- tasks.append(asyncio.create_task(worker(i, item_data)))
250
- await asyncio.gather(
251
- *tasks
252
- ) # gather handles exceptions based on return_errors logic in worker
275
+ loop = asyncio.get_running_loop()
276
+ with concurrent.futures.ThreadPoolExecutor(
277
+ max_workers=max_workers, thread_name_prefix="flock-batch"
278
+ ) as executor:
279
+ for i, item_data in enumerate(prepared_batch_inputs):
280
+ tasks.append(
281
+ asyncio.create_task(
282
+ thread_worker(executor, i, item_data)
283
+ )
284
+ )
285
+
286
+ # Wait for all tasks allowing exceptions to propagate as needed
287
+ await asyncio.gather(*tasks)
253
288
 
254
289
  else: # Sequential Local
255
290
  logger.info("Running batch sequentially...")
256
291
  for i, item_data in enumerate(prepared_batch_inputs):
257
- await worker(
258
- i, item_data
259
- ) # Already handles errors internally based on return_errors
292
+ await async_worker(i, item_data) # Already handles errors internally based on return_errors
260
293
 
261
294
  logger.info("Batch execution finished.")
262
295
 
flock/core/flock.py CHANGED
@@ -117,7 +117,7 @@ class Flock(BaseModel, Serializable):
117
117
  # Internal agent storage - not part of the Pydantic model for direct serialization
118
118
  _agents: dict[str, FlockAgent]
119
119
  _start_agent_name: str | None = None # For potential pre-configuration
120
- _start_input: dict = {} # For potential pre-configuration
120
+ _start_input: dict = {} # Instance attribute overwritten in __init__; kept for typing clarity
121
121
 
122
122
  # Pydantic v2 model config
123
123
  model_config = {
@@ -277,7 +277,7 @@ class Flock(BaseModel, Serializable):
277
277
  def run(
278
278
  self,
279
279
  start_agent: FlockAgent | str | None = None,
280
- input: dict = {},
280
+ input: dict | None = None,
281
281
  context: FlockContext | None = None,
282
282
  run_id: str = "",
283
283
  box_result: bool = True,
@@ -169,17 +169,31 @@ class FlockRegistry:
169
169
  return None
170
170
 
171
171
  # --- Agent Registration ---
172
- def register_agent(self, agent: FlockAgent) -> None:
173
- """Registers a FlockAgent instance by its name."""
172
+ def register_agent(self, agent: FlockAgent, *, force: bool = False) -> None:
173
+ """Registers a FlockAgent instance by its name.
174
+
175
+ Args:
176
+ agent: The agent instance to register.
177
+ force: If True, allow overwriting an existing **different** agent registered under the same name.
178
+ If False and a conflicting registration exists, a ValueError is raised.
179
+ """
174
180
  if not hasattr(agent, "name") or not agent.name:
175
181
  logger.error(
176
182
  "Attempted to register an agent without a valid 'name' attribute."
177
183
  )
178
184
  return
179
- if agent.name in self._agents and self._agents[agent.name] != agent:
185
+
186
+ if agent.name in self._agents and self._agents[agent.name] is not agent:
187
+ # Same agent already registered → silently ignore; different instance → error/force.
188
+ if not force:
189
+ raise ValueError(
190
+ f"Agent '{agent.name}' already registered with a different instance. "
191
+ "Pass force=True to overwrite the existing registration."
192
+ )
180
193
  logger.warning(
181
- f"Agent '{agent.name}' already registered. Overwriting."
194
+ f"Overwriting existing agent '{agent.name}' registration due to force=True."
182
195
  )
196
+
183
197
  self._agents[agent.name] = agent
184
198
  logger.debug(f"Registered agent: {agent.name}")
185
199
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: flock-core
3
- Version: 0.4.0b39
3
+ Version: 0.4.0b42
4
4
  Summary: Declarative LLM Orchestration at Scale
5
5
  Author-email: Andre Ratzenberger <andre.ratzenberger@whiteduck.de>
6
6
  License-File: LICENSE
@@ -19,17 +19,17 @@ flock/cli/view_results.py,sha256=dOzK0O1FHSIDERnx48y-2Xke9BkOHS7pcOhs64AyIg0,781
19
19
  flock/cli/yaml_editor.py,sha256=K3N0bh61G1TSDAZDnurqW9e_-hO6CtSQKXQqlDhCjVo,12527
20
20
  flock/cli/assets/release_notes.md,sha256=bqnk50jxM3w5uY44Dc7MkdT8XmRREFxrVBAG9XCOSSU,4896
21
21
  flock/core/__init__.py,sha256=p7lmQULRu9ejIAELfanZiyMhW0CougIPvyFHW2nqBFQ,847
22
- flock/core/flock.py,sha256=A75h8f_vwZHKNkSVoUie9AZlKbUKpbUX1wNA3VNsPyY,30379
22
+ flock/core/flock.py,sha256=WzLA7-xoAUq7Yn_ioieQIsk6CG_VvvDPeq_S6FWNgOY,30424
23
23
  flock/core/flock_agent.py,sha256=JTqaGD_OnZSd3bVU989WMsK1rAT6UGn-JYrPxFV15EE,39576
24
24
  flock/core/flock_evaluator.py,sha256=dOXZeDOGZcAmJ9ahqq_2bdGUU1VOXY4skmwTVpAjiVw,1685
25
25
  flock/core/flock_factory.py,sha256=_4zsjkEmJnCR7IvJ3SUHnDbX6c7Tt3E4P5ohxwKvE6w,3173
26
26
  flock/core/flock_module.py,sha256=UCK6TFe4viXs596zeng0GD3gln4ZNGu_gCWkXIIMREg,3090
27
- flock/core/flock_registry.py,sha256=Qcu9juUFNyDAOEsqVxauwVlWdfgKZrSzc8yT8JMiK-c,24246
27
+ flock/core/flock_registry.py,sha256=aC-RK0js676DQkjXmNuYHuD5t6GmFhpQoCKaO3i7xFg,24920
28
28
  flock/core/flock_router.py,sha256=1OAXDsdaIIFApEfo6SRfFEDoTuGt3Si7n2MXiySEfis,2644
29
29
  flock/core/api/__init__.py,sha256=OKlhzDWZJfA6ddBwxQUmATY0TSzESsH032u00iVGvdA,228
30
- flock/core/api/custom_endpoint.py,sha256=nCm8lhvq1OOVlHB5f1DD1Pgm5U-PgggPlYjlMRK4nPc,1090
30
+ flock/core/api/custom_endpoint.py,sha256=Mbk2owdcXVATaT5FtEWXFzllgursozcmqP8ouG5btc0,1305
31
31
  flock/core/api/endpoints.py,sha256=qQnJmtcYGkjdKtLllVpyJVjc-iZrvu5EEeVIryyt4tc,12987
32
- flock/core/api/main.py,sha256=f7uZkl8wIOLSoaIztdRG40LqmmRQSdIe-WxXsZx4Q-U,28681
32
+ flock/core/api/main.py,sha256=d7JQLsi2xMpT1yjuU26uU0apNh7YOdvwt04AuEmQSQM,29831
33
33
  flock/core/api/models.py,sha256=seqKuzhbN37nCNO7KrcJjI2mWuwiOKCLFcJcTPvTtag,3422
34
34
  flock/core/api/run_store.py,sha256=bFodJvVyWogzoezVy0cOoWWU3MdEBXf_6_5sBqCRWps,9227
35
35
  flock/core/api/runner.py,sha256=3izg6cVk1RoR1hDIDwMAO1gi3lnLcp8DPv7AnJBYx6A,1443
@@ -40,7 +40,7 @@ flock/core/context/context.py,sha256=GFqMwYXLheqECGvWcxar7sQ2-GuY3RVynZ7kjwd65R0
40
40
  flock/core/context/context_manager.py,sha256=FANSWa6DEhdhtZ7t_9Gza0v80UdpoDOhHbfVOccmjkA,1181
41
41
  flock/core/context/context_vars.py,sha256=ASPA29hpENWub4mgRoG62FtTVakCHQZfn6IhJQKe3C8,347
42
42
  flock/core/evaluation/utils.py,sha256=ZJkIMC9YT-HA2SPCZ4_bQ98isW1i6nbltVEYbjze-b0,12827
43
- flock/core/execution/batch_executor.py,sha256=nvsFOVaH4c4uPw_gwZ5jCIULpK59EL1kmcoPTja5kko,13745
43
+ flock/core/execution/batch_executor.py,sha256=mHwCI-DHqApCv_EVCN0ZOUd-LCQLjREpxKbAUPC0pcY,15266
44
44
  flock/core/execution/evaluation_executor.py,sha256=D9EO0sU-2qWj3vomjmUUi-DOtHNJNFRf30kGDHuzREE,17702
45
45
  flock/core/execution/local_executor.py,sha256=rnIQvaJOs6zZORUcR3vvyS6LPREDJTjaygl_Db0M8ao,952
46
46
  flock/core/execution/temporal_executor.py,sha256=dHcb0xuzPFWU_wbwTgI7glLNyyppei93Txs2sapjhaw,6283
@@ -495,8 +495,8 @@ flock/workflow/agent_execution_activity.py,sha256=Gy6FtuVAjf0NiUXmC3syS2eJpNQF4R
495
495
  flock/workflow/flock_workflow.py,sha256=iSUF_soFvWar0ffpkzE4irkDZRx0p4HnwmEBi_Ne2sY,9666
496
496
  flock/workflow/temporal_config.py,sha256=3_8O7SDEjMsSMXsWJBfnb6XTp0TFaz39uyzSlMTSF_I,3988
497
497
  flock/workflow/temporal_setup.py,sha256=YIHnSBntzOchHfMSh8hoLeNXrz3B1UbR14YrR6soM7A,1606
498
- flock_core-0.4.0b39.dist-info/METADATA,sha256=zZnZWqBTRaem-XCem8L92sWCVkLIf0qe4ZfuMYiz4_U,17125
499
- flock_core-0.4.0b39.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
500
- flock_core-0.4.0b39.dist-info/entry_points.txt,sha256=rWaS5KSpkTmWySURGFZk6PhbJ87TmvcFQDi2uzjlagQ,37
501
- flock_core-0.4.0b39.dist-info/licenses/LICENSE,sha256=iYEqWy0wjULzM9GAERaybP4LBiPeu7Z1NEliLUdJKSc,1072
502
- flock_core-0.4.0b39.dist-info/RECORD,,
498
+ flock_core-0.4.0b42.dist-info/METADATA,sha256=uKNHqJX2rGGs56oYa3iYkym2QpVCE0PT9D22am5OSa0,17125
499
+ flock_core-0.4.0b42.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
500
+ flock_core-0.4.0b42.dist-info/entry_points.txt,sha256=rWaS5KSpkTmWySURGFZk6PhbJ87TmvcFQDi2uzjlagQ,37
501
+ flock_core-0.4.0b42.dist-info/licenses/LICENSE,sha256=iYEqWy0wjULzM9GAERaybP4LBiPeu7Z1NEliLUdJKSc,1072
502
+ flock_core-0.4.0b42.dist-info/RECORD,,