flock-core 0.4.0b43__py3-none-any.whl → 0.4.0b44__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of flock-core might be problematic. Click here for more details.

Files changed (42) hide show
  1. flock/core/api/__init__.py +1 -2
  2. flock/core/api/endpoints.py +149 -217
  3. flock/core/api/main.py +134 -653
  4. flock/core/api/service.py +214 -0
  5. flock/core/flock.py +192 -134
  6. flock/webapp/app/api/agent_management.py +135 -164
  7. flock/webapp/app/api/execution.py +76 -85
  8. flock/webapp/app/api/flock_management.py +60 -33
  9. flock/webapp/app/chat.py +233 -0
  10. flock/webapp/app/config.py +6 -3
  11. flock/webapp/app/dependencies.py +95 -0
  12. flock/webapp/app/main.py +320 -906
  13. flock/webapp/app/services/flock_service.py +183 -161
  14. flock/webapp/run.py +176 -100
  15. flock/webapp/static/css/chat.css +227 -0
  16. flock/webapp/static/css/components.css +167 -0
  17. flock/webapp/static/css/header.css +39 -0
  18. flock/webapp/static/css/layout.css +46 -0
  19. flock/webapp/static/css/sidebar.css +127 -0
  20. flock/webapp/templates/base.html +6 -1
  21. flock/webapp/templates/chat.html +60 -0
  22. flock/webapp/templates/chat_settings.html +20 -0
  23. flock/webapp/templates/flock_editor.html +1 -1
  24. flock/webapp/templates/partials/_agent_detail_form.html +4 -4
  25. flock/webapp/templates/partials/_agent_list.html +2 -2
  26. flock/webapp/templates/partials/_agent_manager_view.html +3 -4
  27. flock/webapp/templates/partials/_chat_container.html +9 -0
  28. flock/webapp/templates/partials/_chat_messages.html +13 -0
  29. flock/webapp/templates/partials/_chat_settings_form.html +65 -0
  30. flock/webapp/templates/partials/_execution_form.html +2 -2
  31. flock/webapp/templates/partials/_execution_view_container.html +1 -1
  32. flock/webapp/templates/partials/_flock_properties_form.html +2 -2
  33. flock/webapp/templates/partials/_registry_viewer_content.html +3 -3
  34. flock/webapp/templates/partials/_sidebar.html +17 -1
  35. flock/webapp/templates/registry_viewer.html +3 -3
  36. {flock_core-0.4.0b43.dist-info → flock_core-0.4.0b44.dist-info}/METADATA +1 -1
  37. {flock_core-0.4.0b43.dist-info → flock_core-0.4.0b44.dist-info}/RECORD +40 -29
  38. flock/webapp/static/css/custom.css +0 -612
  39. flock/webapp/templates/partials/_agent_manager_view_old.html +0 -19
  40. {flock_core-0.4.0b43.dist-info → flock_core-0.4.0b44.dist-info}/WHEEL +0 -0
  41. {flock_core-0.4.0b43.dist-info → flock_core-0.4.0b44.dist-info}/entry_points.txt +0 -0
  42. {flock_core-0.4.0b43.dist-info → flock_core-0.4.0b44.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,214 @@
1
+ # flock/core/api/service.py
2
+ from typing import TYPE_CHECKING, Any
3
+
4
+ if TYPE_CHECKING:
5
+ from flock.core.api.endpoints import FlockBatchRequest
6
+ from flock.core.api.run_store import RunStore
7
+ from flock.core.flock import Flock
8
+
9
+
10
+ from flock.core.logging.logging import get_logger
11
+
12
+ logger = get_logger("flock.api")
13
+
14
+ class FlockApiService:
15
+ def __init__(self, flock_instance: "Flock", run_store_instance: "RunStore"):
16
+ self.flock = flock_instance
17
+ self.run_store = run_store_instance
18
+ # You would move the _run_flock, _run_batch, _type_convert_inputs methods here
19
+ # from the old FlockAPI class.
20
+ async def _run_flock(
21
+ self, run_id: str, agent_name: str, inputs: dict[str, Any]
22
+ ):
23
+ """Executes a flock workflow run (internal helper)."""
24
+ try:
25
+ if agent_name not in self.flock.agents:
26
+ raise ValueError(f"Starting agent '{agent_name}' not found")
27
+
28
+ typed_inputs = self._type_convert_inputs(agent_name, inputs)
29
+
30
+ logger.debug(
31
+ f"Executing flock workflow starting with '{agent_name}' (run_id: {run_id})",
32
+ inputs=typed_inputs,
33
+ )
34
+ # Flock.run_async now handles context creation and execution
35
+ result = await self.flock.run_async(
36
+ start_agent=agent_name, input=typed_inputs
37
+ )
38
+ self.run_store.update_run_result(run_id, result)
39
+
40
+ final_agent_name = (
41
+ result.get("agent_name", "N/A") if isinstance(result, dict) else "N/A"
42
+ ) # Handle if result is not a dict (e.g. Box)
43
+ logger.info(
44
+ f"Flock workflow completed (run_id: {run_id})",
45
+ final_agent=final_agent_name,
46
+ )
47
+ except Exception as e:
48
+ logger.error(
49
+ f"Error in flock run {run_id} (started with '{agent_name}'): {e!s}",
50
+ exc_info=True,
51
+ )
52
+ self.run_store.update_run_status(run_id, "failed", str(e))
53
+ raise
54
+
55
+ async def _run_batch(self, batch_id: str, request: "FlockBatchRequest"):
56
+ """Executes a batch of runs (internal helper)."""
57
+ try:
58
+ if request.agent_name not in self.flock.agents:
59
+ raise ValueError(f"Agent '{request.agent_name}' not found")
60
+
61
+ logger.debug(
62
+ f"Executing batch run starting with '{request.agent_name}' (batch_id: {batch_id})",
63
+ batch_size=len(request.batch_inputs)
64
+ if isinstance(request.batch_inputs, list)
65
+ else "CSV/DataFrame",
66
+ )
67
+
68
+ # --- Re-integrating the threaded batch execution from Flock.run_batch_async ---
69
+ import asyncio
70
+ import threading
71
+ from concurrent.futures import ThreadPoolExecutor
72
+
73
+ def run_batch_sync_in_thread():
74
+ loop = asyncio.new_event_loop()
75
+ asyncio.set_event_loop(loop)
76
+ try:
77
+ batch_size = (
78
+ len(request.batch_inputs)
79
+ if isinstance(request.batch_inputs, list)
80
+ else 0 # Or attempt to get from DataFrame/CSV load
81
+ )
82
+ if batch_size > 0:
83
+ self.run_store.set_batch_total_items(batch_id, batch_size)
84
+
85
+ class ProgressTracker:
86
+ def __init__(self, store, b_id, total_size):
87
+ self.store, self.batch_id, self.total_size = store, b_id, total_size
88
+ self.current_count, self.partial_results, self._lock = 0, [], threading.Lock()
89
+ def increment(self, res=None):
90
+ with self._lock:
91
+ self.current_count += 1
92
+ if res is not None: self.partial_results.append(res)
93
+ try: self.store.update_batch_progress(self.batch_id, self.current_count, self.partial_results)
94
+ except Exception as e_prog: logger.error(f"Error updating progress: {e_prog}")
95
+ return self.current_count
96
+
97
+ progress_tracker = ProgressTracker(self.run_store, batch_id, batch_size)
98
+
99
+ async def progress_aware_worker(index, item_inputs):
100
+ try:
101
+ # Call Flock's run_async for a single item
102
+ item_result = await self.flock.run_async(
103
+ start_agent=request.agent_name,
104
+ input=item_inputs,
105
+ box_result=request.box_results,
106
+ )
107
+ progress_tracker.increment(item_result)
108
+ return item_result
109
+ except Exception as item_err:
110
+ logger.error(f"Error processing batch item {index}: {item_err}")
111
+ progress_tracker.increment(item_err if request.return_errors else None)
112
+ if request.return_errors: return item_err
113
+ return None
114
+
115
+ batch_inputs_list = request.batch_inputs
116
+ actual_results_list = []
117
+
118
+ if isinstance(batch_inputs_list, list):
119
+ tasks = []
120
+ for i, item_inputs in enumerate(batch_inputs_list):
121
+ full_inputs = {**(request.static_inputs or {}), **item_inputs}
122
+ tasks.append(progress_aware_worker(i, full_inputs))
123
+
124
+ if request.parallel and request.max_workers > 1:
125
+ semaphore = asyncio.Semaphore(request.max_workers)
126
+ async def bounded_worker(idx, inputs_item):
127
+ async with semaphore: return await progress_aware_worker(idx, inputs_item)
128
+ bounded_tasks = [bounded_worker(i, {**(request.static_inputs or {}), **item}) for i, item in enumerate(batch_inputs_list)]
129
+ actual_results_list = loop.run_until_complete(asyncio.gather(*bounded_tasks, return_exceptions=request.return_errors))
130
+ else:
131
+ for i, item_inputs in enumerate(batch_inputs_list):
132
+ full_inputs = {**(request.static_inputs or {}), **item_inputs}
133
+ actual_results_list.append(loop.run_until_complete(progress_aware_worker(i, full_inputs)))
134
+ else: # DataFrame or CSV path - let Flock's batch processor handle this directly
135
+ # This path relies on self.flock.run_batch_async being able to run within this new event loop.
136
+ # It might be simpler to always convert DataFrame/CSV to list of dicts before this point.
137
+ actual_results_list = loop.run_until_complete(
138
+ self.flock.run_batch_async(
139
+ start_agent=request.agent_name,
140
+ batch_inputs=request.batch_inputs, # DataFrame or path
141
+ input_mapping=request.input_mapping,
142
+ static_inputs=request.static_inputs,
143
+ parallel=request.parallel, # Will be re-evaluated by internal BatchProcessor
144
+ max_workers=request.max_workers,
145
+ use_temporal=request.use_temporal, # Will be re-evaluated
146
+ box_results=request.box_results,
147
+ return_errors=request.return_errors,
148
+ silent_mode=True, # Internal batch runs silently for API
149
+ write_to_csv=None # API handles CSV output separately if needed
150
+ )
151
+ )
152
+ # Progress for DataFrame/CSV would need integration into BatchProcessor or this loop
153
+ if actual_results_list:
154
+ self.run_store.set_batch_total_items(batch_id, len(actual_results_list))
155
+ self.run_store.update_batch_progress(batch_id, len(actual_results_list), actual_results_list)
156
+
157
+
158
+ self.run_store.update_batch_result(batch_id, actual_results_list)
159
+ logger.info(f"Batch run completed (batch_id: {batch_id})", num_results=len(actual_results_list))
160
+ return actual_results_list
161
+ except Exception as thread_err:
162
+ logger.error(f"Error in batch run thread {batch_id}: {thread_err!s}", exc_info=True)
163
+ self.run_store.update_batch_status(batch_id, "failed", str(thread_err))
164
+ return None
165
+ finally:
166
+ loop.close()
167
+ # --- End of re-integrated threaded batch execution ---
168
+
169
+ # Submit the synchronous function to a thread pool from the main event loop
170
+ main_loop = asyncio.get_running_loop()
171
+ with ThreadPoolExecutor(thread_name_prefix="flock-api-batch") as pool:
172
+ await main_loop.run_in_executor(pool, run_batch_sync_in_thread)
173
+
174
+ except Exception as e:
175
+ logger.error(
176
+ f"Error setting up batch run {batch_id} (started with '{request.agent_name}'): {e!s}",
177
+ exc_info=True,
178
+ )
179
+ self.run_store.update_batch_status(batch_id, "failed", str(e))
180
+ raise
181
+
182
+
183
+
184
+
185
+
186
+ def _type_convert_inputs(
187
+ self, agent_name: str, inputs: dict[str, Any]
188
+ ) -> dict[str, Any]:
189
+ """Converts input values (esp. from forms) to expected Python types."""
190
+ typed_inputs = {}
191
+ agent_def = self.flock.agents.get(agent_name)
192
+ if not agent_def or not agent_def.input or not isinstance(agent_def.input, str):
193
+ return inputs # Return original if no spec or spec is not a string
194
+
195
+ parsed_fields = self._parse_input_spec(agent_def.input) # Relies on the old UI helper
196
+ field_types = {f["name"]: f["type"] for f in parsed_fields}
197
+
198
+ for k, v in inputs.items():
199
+ target_type_str = field_types.get(k)
200
+ if target_type_str:
201
+ if target_type_str.startswith("bool"):
202
+ typed_inputs[k] = str(v).lower() in ["true", "on", "1", "yes"] if isinstance(v, str) else bool(v)
203
+ elif target_type_str.startswith("int"):
204
+ try: typed_inputs[k] = int(v)
205
+ except (ValueError, TypeError): logger.warning(f"Could not convert '{k}' value '{v}' to int for agent '{agent_name}'"); typed_inputs[k] = v
206
+ elif target_type_str.startswith("float"):
207
+ try: typed_inputs[k] = float(v)
208
+ except (ValueError, TypeError): logger.warning(f"Could not convert '{k}' value '{v}' to float for agent '{agent_name}'"); typed_inputs[k] = v
209
+ # TODO: Add list/dict parsing (e.g., json.loads) if type_str indicates these,
210
+ # especially if inputs come from HTML forms as strings.
211
+ else: typed_inputs[k] = v
212
+ else:
213
+ typed_inputs[k] = v
214
+ return typed_inputs