flock-core 0.5.0b19__py3-none-any.whl → 0.5.0b21__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

@@ -1,7 +1,9 @@
1
1
  # src/flock/components/evaluation/declarative_evaluation_component.py
2
2
  """DeclarativeEvaluationComponent - DSPy-based evaluation using the unified component system."""
3
3
 
4
- from collections.abc import Generator
4
+ from collections import OrderedDict
5
+ from collections.abc import Callable, Generator
6
+ from contextlib import nullcontext
5
7
  from typing import Any, Literal, override
6
8
 
7
9
  from temporalio import workflow
@@ -22,6 +24,73 @@ from flock.core.registry import flock_component
22
24
  logger = get_logger("components.evaluation.declarative")
23
25
 
24
26
 
27
+ _live_patch_applied = False
28
+
29
+
30
+ def _ensure_live_crop_above() -> None:
31
+ """Monkeypatch rich.live_render to support 'crop_above' overflow."""
32
+ global _live_patch_applied
33
+ if _live_patch_applied:
34
+ return
35
+ try:
36
+ from typing import Literal as _Literal
37
+
38
+ from rich import live_render as _lr
39
+ except Exception:
40
+ return
41
+
42
+ # Extend the accepted literal at runtime so type checks don't block the new option.
43
+ current_args = getattr(_lr.VerticalOverflowMethod, '__args__', ())
44
+ if 'crop_above' not in current_args:
45
+ _lr.VerticalOverflowMethod = _Literal['crop', 'crop_above', 'ellipsis', 'visible'] # type: ignore[assignment]
46
+
47
+ if getattr(_lr.LiveRender.__rich_console__, '_flock_crop_above', False):
48
+ _live_patch_applied = True
49
+ return
50
+
51
+ Segment = _lr.Segment
52
+ Text = _lr.Text
53
+ loop_last = _lr.loop_last
54
+
55
+ def _patched_rich_console(self, console, options):
56
+ renderable = self.renderable
57
+ style = console.get_style(self.style)
58
+ lines = console.render_lines(renderable, options, style=style, pad=False)
59
+ shape = Segment.get_shape(lines)
60
+
61
+ _, height = shape
62
+ max_height = options.size.height
63
+ if height > max_height:
64
+ if self.vertical_overflow == 'crop':
65
+ lines = lines[: max_height]
66
+ shape = Segment.get_shape(lines)
67
+ elif self.vertical_overflow == 'crop_above':
68
+ lines = lines[-max_height:]
69
+ shape = Segment.get_shape(lines)
70
+ elif self.vertical_overflow == 'ellipsis' and max_height > 0:
71
+ lines = lines[: (max_height - 1)]
72
+ overflow_text = Text(
73
+ '...',
74
+ overflow='crop',
75
+ justify='center',
76
+ end='',
77
+ style='live.ellipsis',
78
+ )
79
+ lines.append(list(console.render(overflow_text)))
80
+ shape = Segment.get_shape(lines)
81
+ self._shape = shape
82
+
83
+ new_line = Segment.line()
84
+ for last, line in loop_last(lines):
85
+ yield from line
86
+ if not last:
87
+ yield new_line
88
+
89
+ _patched_rich_console._flock_crop_above = True # type: ignore[attr-defined]
90
+ _lr.LiveRender.__rich_console__ = _patched_rich_console
91
+ _live_patch_applied = True
92
+
93
+
25
94
  class DeclarativeEvaluationConfig(AgentComponentConfig):
26
95
  """Configuration for the DeclarativeEvaluationComponent."""
27
96
 
@@ -32,6 +101,10 @@ class DeclarativeEvaluationConfig(AgentComponentConfig):
32
101
  max_tokens: int = 32000
33
102
  max_retries: int = 3
34
103
  max_tool_calls: int = 10
104
+ no_output: bool = Field(
105
+ default=False,
106
+ description="Disable output from the underlying DSPy program.",
107
+ )
35
108
  stream: bool = Field(
36
109
  default=False,
37
110
  description="Enable streaming output from the underlying DSPy program.",
@@ -52,6 +125,11 @@ class DeclarativeEvaluationConfig(AgentComponentConfig):
52
125
  default=None,
53
126
  description="Extraction LM for TwoStepAdapter when adapter='two_step'",
54
127
  )
128
+ stream_callbacks: list[Callable[..., Any] | Any] | None = None
129
+ stream_vertical_overflow: Literal["crop", "ellipsis", "crop_above", "visible"] = Field(
130
+ default="crop_above",
131
+ description=("Rich Live vertical overflow strategy; select how tall output is handled; 'crop_above' keeps the most recent rows visible."),
132
+ )
55
133
  kwargs: dict[str, Any] = Field(default_factory=dict)
56
134
 
57
135
 
@@ -165,7 +243,7 @@ class DeclarativeEvaluationComponent(
165
243
  return await self._execute_standard(agent_task, inputs, agent)
166
244
 
167
245
  async def _execute_streaming(self, signature, agent_task, inputs: dict[str, Any], agent: Any, console) -> dict[str, Any]:
168
- """Execute DSPy program in streaming mode (from original implementation)."""
246
+ """Execute DSPy program in streaming mode with rich table updates."""
169
247
  logger.info(f"Evaluating agent '{agent.name}' with async streaming.")
170
248
 
171
249
  if not callable(agent_task):
@@ -177,7 +255,9 @@ class DeclarativeEvaluationComponent(
177
255
  try:
178
256
  for name, field in signature.output_fields.items():
179
257
  if field.annotation is str:
180
- listeners.append(dspy.streaming.StreamListener(signature_field_name=name))
258
+ listeners.append(
259
+ dspy.streaming.StreamListener(signature_field_name=name)
260
+ )
181
261
  except Exception:
182
262
  listeners = []
183
263
 
@@ -188,56 +268,148 @@ class DeclarativeEvaluationComponent(
188
268
  )
189
269
  stream_generator: Generator = streaming_task(**inputs)
190
270
 
191
- console.print("\n")
271
+ from collections import defaultdict
272
+
273
+ from rich.live import Live
274
+
275
+ signature_order = []
276
+ try:
277
+ signature_order = list(signature.output_fields.keys())
278
+ except Exception:
279
+ signature_order = []
280
+
281
+ display_data: OrderedDict[str, Any] = OrderedDict()
282
+ for key in inputs:
283
+ display_data[key] = inputs[key]
284
+
285
+ for field_name in signature_order:
286
+ if field_name not in display_data:
287
+ display_data[field_name] = ""
288
+
289
+ stream_buffers: defaultdict[str, list[str]] = defaultdict(list)
290
+
291
+ formatter = theme_dict = styles = agent_label = None
292
+ live_cm = nullcontext()
293
+ overflow_mode = self.config.stream_vertical_overflow
294
+ initial_panel = None
295
+ if not self.config.no_output:
296
+ _ensure_live_crop_above()
297
+ (
298
+ formatter,
299
+ theme_dict,
300
+ styles,
301
+ agent_label,
302
+ ) = self._prepare_stream_formatter(agent)
303
+ initial_panel = formatter.format_result(
304
+ display_data, agent_label, theme_dict, styles
305
+ )
306
+ live_cm = Live(
307
+ initial_panel,
308
+ console=console,
309
+ refresh_per_second=400,
310
+ transient=False,
311
+ vertical_overflow=overflow_mode,
312
+ )
313
+
192
314
  final_result: dict[str, Any] | None = None
193
- async for value in stream_generator:
194
- # Handle DSPy streaming artifacts
195
- try:
196
- from dspy.streaming import StatusMessage, StreamResponse
197
- from litellm import ModelResponseStream
198
- import dspy as _d
199
- except Exception:
200
- StatusMessage = object # type: ignore
201
- StreamResponse = object # type: ignore
202
- ModelResponseStream = object # type: ignore
203
- _d = None
204
-
205
- if isinstance(value, StatusMessage):
206
- # Optionally surface status to console
207
- console.print(f"[status] {getattr(value, 'message', '')}")
208
- continue
209
- if isinstance(value, StreamResponse):
210
- token = getattr(value, "token", None)
211
- if token:
212
- console.print(token, end="")
213
- continue
214
- if isinstance(value, ModelResponseStream):
215
- # Raw model chunk; print minimal content if available for debug
315
+
316
+ with live_cm as live:
317
+ def _refresh_panel() -> None:
318
+ if formatter is None or live is None:
319
+ return
320
+ live.update(
321
+ formatter.format_result(
322
+ display_data, agent_label, theme_dict, styles
323
+ )
324
+ )
325
+
326
+ async for value in stream_generator:
216
327
  try:
217
- chunk = value
218
- text = chunk.choices[0].delta.content or ""
219
- if text:
220
- console.print(text, end="")
328
+ import dspy as _d
329
+ from dspy.streaming import StatusMessage, StreamResponse
330
+ from litellm import ModelResponseStream
221
331
  except Exception:
222
- pass
223
- continue
224
- if _d and isinstance(value, _d.Prediction):
225
- # Final prediction
226
- result_dict, cost, lm_history = self._process_result(value, inputs)
227
- self._cost = cost
228
- self._lm_history = lm_history
229
- final_result = result_dict
230
-
231
- console.print("\n")
332
+ StatusMessage = object # type: ignore
333
+ StreamResponse = object # type: ignore
334
+ ModelResponseStream = object # type: ignore
335
+ _d = None
336
+
337
+ if isinstance(value, StatusMessage):
338
+ message = getattr(value, "message", "")
339
+ if message and live is not None:
340
+ live.console.log(f"[status] {message}")
341
+ continue
342
+
343
+ if isinstance(value, StreamResponse):
344
+ for callback in self.config.stream_callbacks or []:
345
+ try:
346
+ callback(value)
347
+ except Exception as e:
348
+ logger.warning(f"Stream callback error: {e}")
349
+ token = getattr(value, "chunk", None)
350
+ signature_field = getattr(value, "signature_field_name", None)
351
+ if signature_field:
352
+ if signature_field not in display_data:
353
+ display_data[signature_field] = ""
354
+ if token:
355
+ stream_buffers[signature_field].append(str(token))
356
+ display_data[signature_field] = "".join(
357
+ stream_buffers[signature_field]
358
+ )
359
+ if formatter is not None:
360
+ _refresh_panel()
361
+ continue
362
+
363
+ if isinstance(value, ModelResponseStream):
364
+ try:
365
+ chunk = value
366
+ text = chunk.choices[0].delta.content or ""
367
+ if text and live is not None:
368
+ live.console.log(text)
369
+ except Exception:
370
+ pass
371
+ continue
372
+
373
+ if _d and isinstance(value, _d.Prediction):
374
+ result_dict, cost, lm_history = self._process_result(
375
+ value, inputs
376
+ )
377
+ self._cost = cost
378
+ self._lm_history = lm_history
379
+ final_result = result_dict
380
+
381
+ if formatter is not None:
382
+ ordered_final = OrderedDict()
383
+ for key in inputs:
384
+ if key in final_result:
385
+ ordered_final[key] = final_result[key]
386
+ for field_name in signature_order:
387
+ if field_name in final_result:
388
+ ordered_final[field_name] = final_result[field_name]
389
+ for key, val in final_result.items():
390
+ if key not in ordered_final:
391
+ ordered_final[key] = val
392
+ display_data.clear()
393
+ display_data.update(ordered_final)
394
+ _refresh_panel()
395
+
232
396
  if final_result is None:
233
397
  raise RuntimeError("Streaming did not yield a final prediction.")
234
- final_result = self.filter_reasoning(
398
+
399
+ filtered_result = self.filter_reasoning(
235
400
  final_result, self.config.include_reasoning
236
401
  )
237
- return self.filter_thought_process(
238
- final_result, self.config.include_thought_process
402
+ filtered_result = self.filter_thought_process(
403
+ filtered_result, self.config.include_thought_process
239
404
  )
240
405
 
406
+ if not self.config.no_output:
407
+ context = getattr(agent, "context", None)
408
+ if context is not None:
409
+ context.state["_flock_stream_live_active"] = True
410
+
411
+ return filtered_result
412
+
241
413
  async def _execute_standard(self, agent_task, inputs: dict[str, Any], agent: Any) -> dict[str, Any]:
242
414
  """Execute DSPy program in standard mode (from original implementation)."""
243
415
  logger.info(f"Evaluating agent '{agent.name}' without streaming.")
@@ -261,6 +433,62 @@ class DeclarativeEvaluationComponent(
261
433
  )
262
434
  raise RuntimeError(f"Evaluation failed: {e}") from e
263
435
 
436
+ def _prepare_stream_formatter(
437
+ self, agent: Any
438
+ ) -> tuple[Any, dict[str, Any], dict[str, Any], str]:
439
+ """Build formatter + theme metadata for streaming tables."""
440
+ import pathlib
441
+
442
+ from flock.core.logging.formatters.themed_formatter import (
443
+ ThemedAgentResultFormatter,
444
+ create_pygments_syntax_theme,
445
+ get_default_styles,
446
+ load_syntax_theme_from_file,
447
+ load_theme_from_file,
448
+ )
449
+ from flock.core.logging.formatters.themes import OutputTheme
450
+
451
+ stream_theme = OutputTheme.afterglow
452
+ output_component = None
453
+ try:
454
+ output_component = agent.get_component("output_formatter")
455
+ except Exception:
456
+ output_component = None
457
+ if output_component and getattr(output_component, "config", None):
458
+ stream_theme = getattr(
459
+ output_component.config, "theme", stream_theme
460
+ )
461
+
462
+ formatter = ThemedAgentResultFormatter(theme=stream_theme)
463
+
464
+ themes_dir = pathlib.Path(__file__).resolve().parents[2] / "themes"
465
+ theme_filename = stream_theme.value
466
+ if not theme_filename.endswith(".toml"):
467
+ theme_filename = f"{theme_filename}.toml"
468
+ theme_path = themes_dir / theme_filename
469
+
470
+ try:
471
+ theme_dict = load_theme_from_file(theme_path)
472
+ except Exception:
473
+ fallback_path = themes_dir / "afterglow.toml"
474
+ theme_dict = load_theme_from_file(fallback_path)
475
+ theme_path = fallback_path
476
+
477
+ styles = get_default_styles(theme_dict)
478
+ formatter.styles = styles
479
+ try:
480
+ syntax_theme = load_syntax_theme_from_file(theme_path)
481
+ formatter.syntax_style = create_pygments_syntax_theme(syntax_theme)
482
+ except Exception:
483
+ formatter.syntax_style = None
484
+
485
+ model_label = getattr(agent, "model", None) or self.config.model or ""
486
+ agent_label = (
487
+ agent.name if not model_label else f"{agent.name} - {model_label}"
488
+ )
489
+
490
+ return formatter, theme_dict, styles, agent_label
491
+
264
492
  def filter_thought_process(
265
493
  self, result_dict: dict[str, Any], include_thought_process: bool
266
494
  ) -> dict[str, Any]:
@@ -146,11 +146,23 @@ class OutputUtilityComponent(UtilityComponent):
146
146
  """Format and display the output."""
147
147
  logger.debug("Formatting and displaying output")
148
148
 
149
+ streaming_live_handled = False
150
+ if context:
151
+ streaming_live_handled = bool(
152
+ context.get_variable("_flock_stream_live_active", False)
153
+ )
154
+ if streaming_live_handled:
155
+ context.state.pop("_flock_stream_live_active", None)
156
+
149
157
  # Determine if output should be suppressed
150
158
  is_silent = self.config.no_output or (
151
159
  context and context.get_variable(FLOCK_BATCH_SILENT_MODE, False)
152
160
  )
153
161
 
162
+ if streaming_live_handled:
163
+ logger.debug("Skipping static table because streaming rendered live output.")
164
+ return result
165
+
154
166
  if is_silent:
155
167
  logger.debug("Output suppressed (config or batch silent mode).")
156
168
  return result # Skip console output
@@ -8,7 +8,7 @@ and composes the standard components under the hood.
8
8
  from __future__ import annotations
9
9
 
10
10
  from collections.abc import Callable
11
- from typing import Any
11
+ from typing import Any, Literal
12
12
 
13
13
  from flock.components.utility.metrics_utility_component import (
14
14
  MetricsUtilityComponent,
@@ -46,7 +46,9 @@ class DefaultAgent(FlockAgent):
46
46
  max_tokens: int | None = None,
47
47
  max_tool_calls: int = 0,
48
48
  max_retries: int = 2,
49
- stream: bool = False,
49
+ stream: bool = True,
50
+ stream_callbacks: list[Callable[..., Any] | Any] | None = None,
51
+ stream_vertical_overflow: Literal["crop", "ellipsis", "crop_above", "visible"] = "crop_above",
50
52
  include_thought_process: bool = False,
51
53
  include_reasoning: bool = False,
52
54
  # Output utility parameters
@@ -84,6 +86,8 @@ class DefaultAgent(FlockAgent):
84
86
  max_tool_calls: Maximum number of tool calls per evaluation
85
87
  max_retries: Maximum retries for failed LLM calls
86
88
  stream: Whether to enable streaming responses
89
+ stream_callbacks: Optional callbacks invoked with each streaming chunk
90
+ stream_vertical_overflow: Rich Live overflow handling ('ellipsis', 'crop', 'crop_above', 'visible')
87
91
  include_thought_process: Include reasoning in output
88
92
  include_reasoning: Include detailed reasoning steps
89
93
  enable_rich_tables: Enable rich table formatting for output
@@ -119,7 +123,10 @@ class DefaultAgent(FlockAgent):
119
123
  temperature=temperature,
120
124
  max_tool_calls=max_tool_calls,
121
125
  max_retries=max_retries,
126
+ no_output=no_output,
122
127
  stream=stream,
128
+ stream_callbacks=stream_callbacks,
129
+ stream_vertical_overflow=stream_vertical_overflow,
123
130
  include_thought_process=include_thought_process,
124
131
  include_reasoning=include_reasoning,
125
132
  )
@@ -51,7 +51,7 @@ def init_console(clear_screen: bool = True, show_banner: bool = True, model: str
51
51
  │ ▒█▀▀▀ █░░ █▀▀█ █▀▀ █░█ │
52
52
  │ ▒█▀▀▀ █░░ █░░█ █░░ █▀▄ │
53
53
  │ ▒█░░░ ▀▀▀ ▀▀▀▀ ▀▀▀ ▀░▀ │
54
- ╰━━━━━━━━v{__version__}━━━━━━━━╯
54
+ ╰━━━━━━━━v{__version__}━━━━━━━╯
55
55
  🦆 🐤 🐧 🐓
56
56
  """,
57
57
  justify="center",
@@ -63,11 +63,11 @@ def init_console(clear_screen: bool = True, show_banner: bool = True, model: str
63
63
  if show_banner:
64
64
  console.print(banner_text)
65
65
  console.print(
66
- "[italic]'Magpie'[/] milestone - [bold]white duck GmbH[/] - [cyan]https://whiteduck.de[/]\n"
66
+ "[italic]'Kea'[/] milestone - [bold]white duck GmbH[/] - [cyan]https://whiteduck.de[/]\n"
67
67
  )
68
68
 
69
69
  if model:
70
- console.print(f"[italic]Global Model:[/] {model}")
70
+ console.print(f"[italic]Global Model:[/] {model}\n")
71
71
 
72
72
 
73
73
  def display_banner_no_version():
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: flock-core
3
- Version: 0.5.0b19
3
+ Version: 0.5.0b21
4
4
  Summary: Declarative LLM Orchestration at Scale
5
5
  Author-email: Andre Ratzenberger <andre.ratzenberger@whiteduck.de>
6
6
  License-File: LICENSE
@@ -27,7 +27,7 @@ flock/cli/yaml_editor.py,sha256=K3N0bh61G1TSDAZDnurqW9e_-hO6CtSQKXQqlDhCjVo,1252
27
27
  flock/cli/assets/release_notes.md,sha256=bqnk50jxM3w5uY44Dc7MkdT8XmRREFxrVBAG9XCOSSU,4896
28
28
  flock/components/__init__.py,sha256=qDcaP0O7_b5RlUEXluqwskpKCkhM73kSMeNXReze63M,963
29
29
  flock/components/evaluation/__init__.py,sha256=_M3UlRFeNN90fEny6byt5VdLDE5o5khbd0EPT0o9S9k,303
30
- flock/components/evaluation/declarative_evaluation_component.py,sha256=OXuJlH7TTQAy3upg3K68oSr4cS89XlSty2GeM3l6fDs,11606
30
+ flock/components/evaluation/declarative_evaluation_component.py,sha256=a-RPCcpdXq66XFSPYite2OYwC_5Cnckf4VQK1ie1E-w,20269
31
31
  flock/components/routing/__init__.py,sha256=BH_pFm9T6bUuf8HH4byDJ0dO0fzEVHv9m-ghUdDVdm0,542
32
32
  flock/components/routing/conditional_routing_component.py,sha256=WqZLMz-0Dhfb97xvttNrJCIVe6FNMLEQ2m4KQTDpIbI,21374
33
33
  flock/components/routing/default_routing_component.py,sha256=ZHt2Kjf-GHB5n7evU5NSGeQJ1Wuims5soeMswqaUb1E,3370
@@ -35,7 +35,7 @@ flock/components/routing/llm_routing_component.py,sha256=SAaOFjlnhnenM6QEBn3WIpj
35
35
  flock/components/utility/__init__.py,sha256=JRj932upddjzZMWs1avOupEFr_GZNu21ac66Rhw_XgY,532
36
36
  flock/components/utility/memory_utility_component.py,sha256=26Io61bbCGjD8UQ4BltMA5RLkMXp8tQoQmddXbQSrzA,20183
37
37
  flock/components/utility/metrics_utility_component.py,sha256=Mck_sFCkfXvNpoSgW2N_WOLnjxazzx8jh79tIx5zJhw,24635
38
- flock/components/utility/output_utility_component.py,sha256=c4K_PL3bGqdyy_v6dnOrmTqV-MkWKAB2w0HS8kzg82k,7613
38
+ flock/components/utility/output_utility_component.py,sha256=TdHhY5qJJDUk-_LK54zAFMSG_Zafe-UiEkwiJwPjfh0,8063
39
39
  flock/core/__init__.py,sha256=OkjsVjRkAB-I6ibeTKVikZ3MxLIcTIzWKphHTbzbr7s,3231
40
40
  flock/core/flock.py,sha256=wRycQlGeaq-Vd75mFpPe02qyWTOEyXthT873iBhA3TI,23388
41
41
  flock/core/flock_agent.py,sha256=4Vdhyk-rdsPEuN3xYBsLBBsfpklad6bNj_it9r6XIDc,12868
@@ -43,7 +43,7 @@ flock/core/flock_factory.py,sha256=Z6GJpYXN9_DXuOqvBH9ir0SMoUw78DkWhrhkm90luAQ,2
43
43
  flock/core/flock_scheduler.py,sha256=ng_s7gyijmc-AmYvBn5rtg61CSUZiIkXPRSlA1xO6VQ,8766
44
44
  flock/core/flock_server_manager.py,sha256=tM_nOs37vAbEvxmhwy_DL2JPvgFViWroNxrRSu5MfUQ,4523
45
45
  flock/core/agent/__init__.py,sha256=l32KFMJnC_gidMXpAXK8-OX228bWOhNc8OY_NzXm59Q,515
46
- flock/core/agent/default_agent.py,sha256=W5ewr4l4adjZjstcCvr6S8r2EnrmH0wFOVEtA8OQprI,6962
46
+ flock/core/agent/default_agent.py,sha256=924SWDx8axJ57JCWREZuLzV8039Wt_-5WIBNTvx479Y,7483
47
47
  flock/core/agent/flock_agent_components.py,sha256=LamOgpRC7wDKuU3d6enDG0UFlNxyKPErLpH7SQ_Pi74,4539
48
48
  flock/core/agent/flock_agent_execution.py,sha256=pdOddBGv8y1P89Ix8XFWa1eW9i3bWjOYiQQxeY2K0yo,4217
49
49
  flock/core/agent/flock_agent_integration.py,sha256=fnxzEA8-gIopHwD1de8QKt2A7Ilb1iH5Koxk1uiASas,10737
@@ -125,7 +125,7 @@ flock/core/serialization/json_encoder.py,sha256=gAKj2zU_8wQiNvdkby2hksSA4fbPNwTj
125
125
  flock/core/serialization/secure_serializer.py,sha256=n5-zRvvXddgJv1FFHsaQ2wuYdL3WUSGPvG_LGaffEJo,6144
126
126
  flock/core/serialization/serializable.py,sha256=qlv8TsTqRuklXiNuCMrvro5VKz764xC2i3FlgLJSkdk,12129
127
127
  flock/core/serialization/serialization_utils.py,sha256=kxsuWy-8kFBcihHQvSOSNYp96ZPKxBMnasyRTtvIktY,15532
128
- flock/core/util/cli_helper.py,sha256=w8N7UJZOdOFhkcUSSusnL22JDlmJGgWmH0DgO--j-5c,50057
128
+ flock/core/util/cli_helper.py,sha256=upRcEvWdGTrZvaKhi701PtFyW-Wp_B8PY3Gt0QY9szY,50053
129
129
  flock/core/util/file_path_utils.py,sha256=Odf7uU32C-x1KNighbNERSiMtkzW4h8laABIoFK7A5M,6246
130
130
  flock/core/util/hydrator.py,sha256=qRfVTDBEwqv1-ET2D4s5NI25f-UA_tGsoAmt5jaJMDI,10693
131
131
  flock/core/util/input_resolver.py,sha256=t3C98xz_-LGnDH0YeWQyV8yKZrls-_ekOYR-IKrAXDs,6232
@@ -551,8 +551,8 @@ flock/workflow/agent_execution_activity.py,sha256=0exwmeWKYXXxdUqDf4YaUVpn0zl06S
551
551
  flock/workflow/flock_workflow.py,sha256=sKFsRIL_bDGonXSNhK1zwu6UechghC_PihJJMidI-VI,9139
552
552
  flock/workflow/temporal_config.py,sha256=3_8O7SDEjMsSMXsWJBfnb6XTp0TFaz39uyzSlMTSF_I,3988
553
553
  flock/workflow/temporal_setup.py,sha256=KR6MlWOrpMtv8NyhaIPAsfl4tjobt81OBByQvg8Kw-Y,1948
554
- flock_core-0.5.0b19.dist-info/METADATA,sha256=IIENs1thHIvHmhkyhR3G62jimiMS7_ImCmVLv3AKAlQ,9997
555
- flock_core-0.5.0b19.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
556
- flock_core-0.5.0b19.dist-info/entry_points.txt,sha256=rWaS5KSpkTmWySURGFZk6PhbJ87TmvcFQDi2uzjlagQ,37
557
- flock_core-0.5.0b19.dist-info/licenses/LICENSE,sha256=iYEqWy0wjULzM9GAERaybP4LBiPeu7Z1NEliLUdJKSc,1072
558
- flock_core-0.5.0b19.dist-info/RECORD,,
554
+ flock_core-0.5.0b21.dist-info/METADATA,sha256=UV0JZGY5epos-_zJXKP6msrOE9M21KBkIlDRVvbXzvk,9997
555
+ flock_core-0.5.0b21.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
556
+ flock_core-0.5.0b21.dist-info/entry_points.txt,sha256=rWaS5KSpkTmWySURGFZk6PhbJ87TmvcFQDi2uzjlagQ,37
557
+ flock_core-0.5.0b21.dist-info/licenses/LICENSE,sha256=iYEqWy0wjULzM9GAERaybP4LBiPeu7Z1NEliLUdJKSc,1072
558
+ flock_core-0.5.0b21.dist-info/RECORD,,