flock-core 0.4.0b39__py3-none-any.whl → 0.4.0b40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import concurrent.futures # For real parallelism via threads
2
3
  from pathlib import Path
3
4
  from typing import TYPE_CHECKING, Any
4
5
 
@@ -184,52 +185,77 @@ class BatchProcessor:
184
185
  )
185
186
  progress.start()
186
187
 
187
- results = [None] * len(
188
- prepared_batch_inputs
189
- ) # Pre-allocate results list
190
- tasks = []
191
- semaphore = asyncio.Semaphore(
192
- max_workers if parallel and not effective_use_temporal else 1
193
- ) # Semaphore for parallel local
194
-
195
- async def worker(index, item_inputs):
196
- async with semaphore:
197
- full_input = {**(static_inputs or {}), **item_inputs}
198
- context = FlockContext()
199
- context.set_variable(FLOCK_BATCH_SILENT_MODE, silent_mode)
200
-
201
- run_desc = f"Batch item {index + 1}"
202
- logger.debug(f"{run_desc} started.")
203
- try:
204
- result = await self.flock.run_async(
205
- start_agent,
206
- full_input,
207
- box_result=box_results,
208
- context=context,
209
- )
210
- results[index] = result
211
- logger.debug(f"{run_desc} finished successfully.")
212
- except Exception as e:
213
- logger.error(
214
- f"{run_desc} failed: {e}", exc_info=not return_errors
215
- )
216
- if return_errors:
217
- results[index] = e
218
- else:
219
- # If not returning errors, ensure the exception propagates
220
- # to stop asyncio.gather if running in parallel.
221
- if parallel and not effective_use_temporal:
222
- raise # Re-raise to stop gather
223
- else:
224
- # For sequential, we just store None or the exception if return_errors=True
225
- # For Temporal, error handling happens within the workflow/activity usually
226
- results[index] = e if return_errors else None
227
- finally:
228
- if progress_context:
229
- progress.update(
230
- progress_task_id, advance=1
231
- ) # Update progress
188
+ results = [None] * len(prepared_batch_inputs) # Pre-allocate results list
189
+
190
+ # --- Worker Definitions ---
191
+ # We implement two flavours:
192
+ # * async_worker: used for Temporal or sequential runs (keeps the original behaviour)
193
+ # * thread_worker: executes the run in a dedicated thread via ThreadPoolExecutor for true parallelism.
194
+
195
+ async def async_worker(index: int, item_inputs: dict[str, Any]):
196
+ """Original coroutine worker used for non-threaded execution paths."""
197
+ full_input = {**(static_inputs or {}), **item_inputs}
198
+ context = FlockContext()
199
+ context.set_variable(FLOCK_BATCH_SILENT_MODE, silent_mode)
200
+
201
+ run_desc = f"Batch item {index + 1}"
202
+ logger.debug(f"{run_desc} started (async).")
203
+ try:
204
+ result = await self.flock.run_async(
205
+ start_agent,
206
+ full_input,
207
+ box_result=box_results,
208
+ context=context,
209
+ )
210
+ results[index] = result
211
+ logger.debug(f"{run_desc} finished successfully.")
212
+ except Exception as e:
213
+ logger.error(f"{run_desc} failed: {e}", exc_info=not return_errors)
214
+ if return_errors:
215
+ results[index] = e
216
+ else:
217
+ raise # Propagate to calling gather
218
+ finally:
219
+ if progress_context:
220
+ progress.update(progress_task_id, advance=1)
221
+
222
+ # ThreadPool worker for real parallelism (suitable for blocking I/O)
223
+ def _thread_worker(index: int, item_inputs: dict[str, Any]):
224
+ """Synchronous helper executed inside a worker thread."""
225
+ full_input = {**(static_inputs or {}), **item_inputs}
226
+ run_desc = f"Batch item {index + 1}"
227
+ logger.debug(f"{run_desc} started (thread).")
228
+ try:
229
+ # Use the synchronous wrapper to avoid nested event-loop issues inside threads
230
+ result = self.flock.run(
231
+ start_agent=start_agent,
232
+ input=full_input,
233
+ box_result=box_results,
234
+ )
235
+ logger.debug(f"{run_desc} finished successfully.")
236
+ return index, result, None
237
+ except Exception as e:
238
+ logger.error(f"{run_desc} failed: {e}")
239
+ return index, None, e
232
240
 
241
+ async def thread_worker(executor, index: int, item_inputs: dict[str, Any]):
242
+ """Coroutine wrapper that submits _thread_worker to the specified executor."""
243
+ loop = asyncio.get_running_loop()
244
+ idx, res, err = await loop.run_in_executor(
245
+ executor, _thread_worker, index, item_inputs
246
+ )
247
+ # Handle result / error on the asyncio side
248
+ if err:
249
+ if return_errors:
250
+ results[idx] = err
251
+ else:
252
+ raise err
253
+ else:
254
+ results[idx] = res
255
+ if progress_context:
256
+ progress.update(progress_task_id, advance=1)
257
+
258
+ tasks = []
233
259
  try:
234
260
  if effective_use_temporal:
235
261
  # Temporal Batching (Simplified: sequential execution for this example)
@@ -238,25 +264,32 @@ class BatchProcessor:
238
264
  "Running batch using Temporal (executing sequentially for now)..."
239
265
  )
240
266
  for i, item_data in enumerate(prepared_batch_inputs):
241
- await worker(i, item_data) # Run sequentially for demo
267
+ await async_worker(i, item_data) # Run sequentially for demo
242
268
  # TODO: Implement true parallel Temporal workflow execution if needed
243
269
 
244
270
  elif parallel:
271
+ # --- Real parallelism using ThreadPoolExecutor ---
245
272
  logger.info(
246
- f"Running batch in parallel with max_workers={max_workers}..."
273
+ f"Running batch in parallel (threads) with max_workers={max_workers}..."
247
274
  )
248
- for i, item_data in enumerate(prepared_batch_inputs):
249
- tasks.append(asyncio.create_task(worker(i, item_data)))
250
- await asyncio.gather(
251
- *tasks
252
- ) # gather handles exceptions based on return_errors logic in worker
275
+ loop = asyncio.get_running_loop()
276
+ with concurrent.futures.ThreadPoolExecutor(
277
+ max_workers=max_workers, thread_name_prefix="flock-batch"
278
+ ) as executor:
279
+ for i, item_data in enumerate(prepared_batch_inputs):
280
+ tasks.append(
281
+ asyncio.create_task(
282
+ thread_worker(executor, i, item_data)
283
+ )
284
+ )
285
+
286
+ # Wait for all tasks allowing exceptions to propagate as needed
287
+ await asyncio.gather(*tasks)
253
288
 
254
289
  else: # Sequential Local
255
290
  logger.info("Running batch sequentially...")
256
291
  for i, item_data in enumerate(prepared_batch_inputs):
257
- await worker(
258
- i, item_data
259
- ) # Already handles errors internally based on return_errors
292
+ await async_worker(i, item_data) # Already handles errors internally based on return_errors
260
293
 
261
294
  logger.info("Batch execution finished.")
262
295
 
flock/core/flock.py CHANGED
@@ -117,7 +117,7 @@ class Flock(BaseModel, Serializable):
117
117
  # Internal agent storage - not part of the Pydantic model for direct serialization
118
118
  _agents: dict[str, FlockAgent]
119
119
  _start_agent_name: str | None = None # For potential pre-configuration
120
- _start_input: dict = {} # For potential pre-configuration
120
+ _start_input: dict = {} # Instance attribute overwritten in __init__; kept for typing clarity
121
121
 
122
122
  # Pydantic v2 model config
123
123
  model_config = {
@@ -277,7 +277,7 @@ class Flock(BaseModel, Serializable):
277
277
  def run(
278
278
  self,
279
279
  start_agent: FlockAgent | str | None = None,
280
- input: dict = {},
280
+ input: dict | None = None,
281
281
  context: FlockContext | None = None,
282
282
  run_id: str = "",
283
283
  box_result: bool = True,
@@ -169,17 +169,31 @@ class FlockRegistry:
169
169
  return None
170
170
 
171
171
  # --- Agent Registration ---
172
- def register_agent(self, agent: FlockAgent) -> None:
173
- """Registers a FlockAgent instance by its name."""
172
+ def register_agent(self, agent: FlockAgent, *, force: bool = False) -> None:
173
+ """Registers a FlockAgent instance by its name.
174
+
175
+ Args:
176
+ agent: The agent instance to register.
177
+ force: If True, allow overwriting an existing **different** agent registered under the same name.
178
+ If False and a conflicting registration exists, a ValueError is raised.
179
+ """
174
180
  if not hasattr(agent, "name") or not agent.name:
175
181
  logger.error(
176
182
  "Attempted to register an agent without a valid 'name' attribute."
177
183
  )
178
184
  return
179
- if agent.name in self._agents and self._agents[agent.name] != agent:
185
+
186
+ if agent.name in self._agents and self._agents[agent.name] is not agent:
187
+ # Same agent already registered → silently ignore; different instance → error/force.
188
+ if not force:
189
+ raise ValueError(
190
+ f"Agent '{agent.name}' already registered with a different instance. "
191
+ "Pass force=True to overwrite the existing registration."
192
+ )
180
193
  logger.warning(
181
- f"Agent '{agent.name}' already registered. Overwriting."
194
+ f"Overwriting existing agent '{agent.name}' registration due to force=True."
182
195
  )
196
+
183
197
  self._agents[agent.name] = agent
184
198
  logger.debug(f"Registered agent: {agent.name}")
185
199
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: flock-core
3
- Version: 0.4.0b39
3
+ Version: 0.4.0b40
4
4
  Summary: Declarative LLM Orchestration at Scale
5
5
  Author-email: Andre Ratzenberger <andre.ratzenberger@whiteduck.de>
6
6
  License-File: LICENSE
@@ -19,12 +19,12 @@ flock/cli/view_results.py,sha256=dOzK0O1FHSIDERnx48y-2Xke9BkOHS7pcOhs64AyIg0,781
19
19
  flock/cli/yaml_editor.py,sha256=K3N0bh61G1TSDAZDnurqW9e_-hO6CtSQKXQqlDhCjVo,12527
20
20
  flock/cli/assets/release_notes.md,sha256=bqnk50jxM3w5uY44Dc7MkdT8XmRREFxrVBAG9XCOSSU,4896
21
21
  flock/core/__init__.py,sha256=p7lmQULRu9ejIAELfanZiyMhW0CougIPvyFHW2nqBFQ,847
22
- flock/core/flock.py,sha256=A75h8f_vwZHKNkSVoUie9AZlKbUKpbUX1wNA3VNsPyY,30379
22
+ flock/core/flock.py,sha256=WzLA7-xoAUq7Yn_ioieQIsk6CG_VvvDPeq_S6FWNgOY,30424
23
23
  flock/core/flock_agent.py,sha256=JTqaGD_OnZSd3bVU989WMsK1rAT6UGn-JYrPxFV15EE,39576
24
24
  flock/core/flock_evaluator.py,sha256=dOXZeDOGZcAmJ9ahqq_2bdGUU1VOXY4skmwTVpAjiVw,1685
25
25
  flock/core/flock_factory.py,sha256=_4zsjkEmJnCR7IvJ3SUHnDbX6c7Tt3E4P5ohxwKvE6w,3173
26
26
  flock/core/flock_module.py,sha256=UCK6TFe4viXs596zeng0GD3gln4ZNGu_gCWkXIIMREg,3090
27
- flock/core/flock_registry.py,sha256=Qcu9juUFNyDAOEsqVxauwVlWdfgKZrSzc8yT8JMiK-c,24246
27
+ flock/core/flock_registry.py,sha256=aC-RK0js676DQkjXmNuYHuD5t6GmFhpQoCKaO3i7xFg,24920
28
28
  flock/core/flock_router.py,sha256=1OAXDsdaIIFApEfo6SRfFEDoTuGt3Si7n2MXiySEfis,2644
29
29
  flock/core/api/__init__.py,sha256=OKlhzDWZJfA6ddBwxQUmATY0TSzESsH032u00iVGvdA,228
30
30
  flock/core/api/custom_endpoint.py,sha256=nCm8lhvq1OOVlHB5f1DD1Pgm5U-PgggPlYjlMRK4nPc,1090
@@ -40,7 +40,7 @@ flock/core/context/context.py,sha256=GFqMwYXLheqECGvWcxar7sQ2-GuY3RVynZ7kjwd65R0
40
40
  flock/core/context/context_manager.py,sha256=FANSWa6DEhdhtZ7t_9Gza0v80UdpoDOhHbfVOccmjkA,1181
41
41
  flock/core/context/context_vars.py,sha256=ASPA29hpENWub4mgRoG62FtTVakCHQZfn6IhJQKe3C8,347
42
42
  flock/core/evaluation/utils.py,sha256=ZJkIMC9YT-HA2SPCZ4_bQ98isW1i6nbltVEYbjze-b0,12827
43
- flock/core/execution/batch_executor.py,sha256=nvsFOVaH4c4uPw_gwZ5jCIULpK59EL1kmcoPTja5kko,13745
43
+ flock/core/execution/batch_executor.py,sha256=mHwCI-DHqApCv_EVCN0ZOUd-LCQLjREpxKbAUPC0pcY,15266
44
44
  flock/core/execution/evaluation_executor.py,sha256=D9EO0sU-2qWj3vomjmUUi-DOtHNJNFRf30kGDHuzREE,17702
45
45
  flock/core/execution/local_executor.py,sha256=rnIQvaJOs6zZORUcR3vvyS6LPREDJTjaygl_Db0M8ao,952
46
46
  flock/core/execution/temporal_executor.py,sha256=dHcb0xuzPFWU_wbwTgI7glLNyyppei93Txs2sapjhaw,6283
@@ -495,8 +495,8 @@ flock/workflow/agent_execution_activity.py,sha256=Gy6FtuVAjf0NiUXmC3syS2eJpNQF4R
495
495
  flock/workflow/flock_workflow.py,sha256=iSUF_soFvWar0ffpkzE4irkDZRx0p4HnwmEBi_Ne2sY,9666
496
496
  flock/workflow/temporal_config.py,sha256=3_8O7SDEjMsSMXsWJBfnb6XTp0TFaz39uyzSlMTSF_I,3988
497
497
  flock/workflow/temporal_setup.py,sha256=YIHnSBntzOchHfMSh8hoLeNXrz3B1UbR14YrR6soM7A,1606
498
- flock_core-0.4.0b39.dist-info/METADATA,sha256=zZnZWqBTRaem-XCem8L92sWCVkLIf0qe4ZfuMYiz4_U,17125
499
- flock_core-0.4.0b39.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
500
- flock_core-0.4.0b39.dist-info/entry_points.txt,sha256=rWaS5KSpkTmWySURGFZk6PhbJ87TmvcFQDi2uzjlagQ,37
501
- flock_core-0.4.0b39.dist-info/licenses/LICENSE,sha256=iYEqWy0wjULzM9GAERaybP4LBiPeu7Z1NEliLUdJKSc,1072
502
- flock_core-0.4.0b39.dist-info/RECORD,,
498
+ flock_core-0.4.0b40.dist-info/METADATA,sha256=W-nPONRJfGIBfsyt2hEPqtLqu6mxQto7fP1IYuw1oH8,17125
499
+ flock_core-0.4.0b40.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
500
+ flock_core-0.4.0b40.dist-info/entry_points.txt,sha256=rWaS5KSpkTmWySURGFZk6PhbJ87TmvcFQDi2uzjlagQ,37
501
+ flock_core-0.4.0b40.dist-info/licenses/LICENSE,sha256=iYEqWy0wjULzM9GAERaybP4LBiPeu7Z1NEliLUdJKSc,1072
502
+ flock_core-0.4.0b40.dist-info/RECORD,,