flock-core 0.5.0b7__py3-none-any.whl → 0.5.0b9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/components/evaluation/declarative_evaluation_component.py +24 -0
- flock/core/flock_factory.py +3 -1
- flock/webapp/app/api/execution.py +13 -2
- flock/webapp/app/services/sharing_store.py +152 -104
- flock_core-0.5.0b9.dist-info/METADATA +271 -0
- {flock_core-0.5.0b7.dist-info → flock_core-0.5.0b9.dist-info}/RECORD +9 -9
- flock_core-0.5.0b7.dist-info/METADATA +0 -635
- {flock_core-0.5.0b7.dist-info → flock_core-0.5.0b9.dist-info}/WHEEL +0 -0
- {flock_core-0.5.0b7.dist-info → flock_core-0.5.0b9.dist-info}/entry_points.txt +0 -0
- {flock_core-0.5.0b7.dist-info → flock_core-0.5.0b9.dist-info}/licenses/LICENSE +0 -0
|
@@ -40,6 +40,10 @@ class DeclarativeEvaluationConfig(AgentComponentConfig):
|
|
|
40
40
|
default=False,
|
|
41
41
|
description="Include the thought process in the output.",
|
|
42
42
|
)
|
|
43
|
+
include_reasoning: bool = Field(
|
|
44
|
+
default=False,
|
|
45
|
+
description="Include the reasoning in the output.",
|
|
46
|
+
)
|
|
43
47
|
kwargs: dict[str, Any] = Field(default_factory=dict)
|
|
44
48
|
|
|
45
49
|
|
|
@@ -160,6 +164,9 @@ class DeclarativeEvaluationComponent(
|
|
|
160
164
|
self._lm_history = lm_history
|
|
161
165
|
|
|
162
166
|
console.print("\n")
|
|
167
|
+
result_dict = self.filter_reasoning(
|
|
168
|
+
result_dict, self.config.include_reasoning
|
|
169
|
+
)
|
|
163
170
|
return self.filter_thought_process(
|
|
164
171
|
result_dict, self.config.include_thought_process
|
|
165
172
|
)
|
|
@@ -174,6 +181,9 @@ class DeclarativeEvaluationComponent(
|
|
|
174
181
|
result_dict, cost, lm_history = self._process_result(result_obj, inputs)
|
|
175
182
|
self._cost = cost
|
|
176
183
|
self._lm_history = lm_history
|
|
184
|
+
result_dict = self.filter_reasoning(
|
|
185
|
+
result_dict, self.config.include_reasoning
|
|
186
|
+
)
|
|
177
187
|
return self.filter_thought_process(
|
|
178
188
|
result_dict, self.config.include_thought_process
|
|
179
189
|
)
|
|
@@ -196,3 +206,17 @@ class DeclarativeEvaluationComponent(
|
|
|
196
206
|
for k, v in result_dict.items()
|
|
197
207
|
if not (k.startswith("reasoning") or k.startswith("trajectory"))
|
|
198
208
|
}
|
|
209
|
+
|
|
210
|
+
def filter_reasoning(
|
|
211
|
+
self, result_dict: dict[str, Any], include_reasoning: bool
|
|
212
|
+
) -> dict[str, Any]:
|
|
213
|
+
"""Filter out reasoning from the result dictionary."""
|
|
214
|
+
if include_reasoning:
|
|
215
|
+
return result_dict
|
|
216
|
+
else:
|
|
217
|
+
return {
|
|
218
|
+
k: v
|
|
219
|
+
for k, v in result_dict.items()
|
|
220
|
+
if not (k.startswith("reasoning"))
|
|
221
|
+
}
|
|
222
|
+
|
flock/core/flock_factory.py
CHANGED
|
@@ -370,7 +370,7 @@ class FlockFactory:
|
|
|
370
370
|
|
|
371
371
|
if not server_config:
|
|
372
372
|
raise ValueError(
|
|
373
|
-
|
|
373
|
+
"Unable to create server configuration for passed params."
|
|
374
374
|
)
|
|
375
375
|
|
|
376
376
|
server = concrete_server_cls(config=server_config)
|
|
@@ -408,6 +408,7 @@ class FlockFactory:
|
|
|
408
408
|
write_to_file: bool = False,
|
|
409
409
|
stream: bool = False,
|
|
410
410
|
include_thought_process: bool = False,
|
|
411
|
+
include_reasoning: bool = False,
|
|
411
412
|
next_agent: DynamicStr | None = None,
|
|
412
413
|
temporal_activity_config: TemporalActivityConfig | None = None,
|
|
413
414
|
) -> FlockAgent:
|
|
@@ -444,6 +445,7 @@ class FlockFactory:
|
|
|
444
445
|
max_retries=max_retries,
|
|
445
446
|
stream=stream,
|
|
446
447
|
include_thought_process=include_thought_process,
|
|
448
|
+
include_reasoning=include_reasoning,
|
|
447
449
|
)
|
|
448
450
|
evaluator = DeclarativeEvaluationComponent(
|
|
449
451
|
name="default_evaluator", config=eval_config
|
|
@@ -10,6 +10,7 @@ from fastapi import ( # Ensure Form and HTTPException are imported
|
|
|
10
10
|
Form,
|
|
11
11
|
Request,
|
|
12
12
|
)
|
|
13
|
+
from fastapi.encoders import jsonable_encoder
|
|
13
14
|
from fastapi.responses import HTMLResponse
|
|
14
15
|
from fastapi.templating import Jinja2Templates
|
|
15
16
|
|
|
@@ -151,7 +152,13 @@ async def htmx_run_flock(
|
|
|
151
152
|
return HTMLResponse(f"<p class='error'>Error processing inputs for {start_agent_name}: {e_parse}</p>")
|
|
152
153
|
|
|
153
154
|
result_data = await run_current_flock_service(start_agent_name, inputs, request.app.state)
|
|
154
|
-
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
raw_json_for_template = json.dumps(
|
|
158
|
+
jsonable_encoder(result_data), # ← converts every nested BaseModel, datetime, etc.
|
|
159
|
+
indent=2,
|
|
160
|
+
ensure_ascii=False
|
|
161
|
+
)
|
|
155
162
|
# Unescape newlines for proper display in HTML <pre> tag
|
|
156
163
|
result_data_raw_json_str = raw_json_for_template.replace('\\n', '\n')
|
|
157
164
|
root_path = request.scope.get("root_path", "")
|
|
@@ -215,7 +222,11 @@ async def htmx_run_shared_flock(
|
|
|
215
222
|
|
|
216
223
|
shared_logger.info(f"HTMX Run Shared: Executing agent '{start_agent_name}' in pre-loaded Flock '{temp_flock.name}'. Inputs: {list(inputs.keys())}")
|
|
217
224
|
result_data = await temp_flock.run_async(start_agent=start_agent_name, input=inputs, box_result=False)
|
|
218
|
-
raw_json_for_template = json.dumps(
|
|
225
|
+
raw_json_for_template = json.dumps(
|
|
226
|
+
jsonable_encoder(result_data), # ← converts every nested BaseModel, datetime, etc.
|
|
227
|
+
indent=2,
|
|
228
|
+
ensure_ascii=False
|
|
229
|
+
)
|
|
219
230
|
# Unescape newlines for proper display in HTML <pre> tag
|
|
220
231
|
result_data_raw_json_str = raw_json_for_template.replace('\\n', '\n')
|
|
221
232
|
shared_logger.info(f"HTMX Run Shared: Agent '{start_agent_name}' executed. Result keys: {list(result_data.keys()) if isinstance(result_data, dict) else 'N/A'}")
|
|
@@ -4,6 +4,7 @@ import logging
|
|
|
4
4
|
import sqlite3
|
|
5
5
|
from abc import ABC, abstractmethod
|
|
6
6
|
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
7
8
|
|
|
8
9
|
import aiosqlite
|
|
9
10
|
|
|
@@ -228,135 +229,182 @@ class SQLiteSharedLinkStore(SharedLinkStoreInterface):
|
|
|
228
229
|
logger.error(f"SQLite error saving feedback {record.feedback_id}: {e}", exc_info=True)
|
|
229
230
|
raise
|
|
230
231
|
|
|
232
|
+
|
|
233
|
+
# ---------------------------------------------------------------------------
|
|
234
|
+
# Azure Table + Blob implementation
|
|
235
|
+
# ---------------------------------------------------------------------------
|
|
236
|
+
|
|
237
|
+
try:
|
|
238
|
+
from azure.storage.blob.aio import BlobServiceClient
|
|
239
|
+
AZURE_BLOB_AVAILABLE = True
|
|
240
|
+
except ImportError: # blob SDK not installed
|
|
241
|
+
AZURE_BLOB_AVAILABLE = False
|
|
242
|
+
BlobServiceClient = None
|
|
243
|
+
|
|
231
244
|
class AzureTableSharedLinkStore(SharedLinkStoreInterface):
|
|
232
|
-
"""Azure Table
|
|
245
|
+
"""Store configs in Azure Table; store large flock YAML in Blob Storage."""
|
|
246
|
+
|
|
247
|
+
_TABLE_NAME = "flocksharedlinks"
|
|
248
|
+
_FEEDBACK_TBL_NAME = "flockfeedback"
|
|
249
|
+
_CONTAINER_NAME = "flocksharedlinkdefs" # blobs live here
|
|
250
|
+
_PARTITION_KEY = "shared_links"
|
|
233
251
|
|
|
234
252
|
def __init__(self, connection_string: str):
|
|
235
|
-
"""Initialize Azure Table Storage store with connection string."""
|
|
236
253
|
if not AZURE_AVAILABLE:
|
|
237
|
-
raise ImportError("
|
|
254
|
+
raise ImportError("pip install azure-data-tables")
|
|
255
|
+
if not AZURE_BLOB_AVAILABLE:
|
|
256
|
+
raise ImportError("pip install azure-storage-blob")
|
|
238
257
|
|
|
239
258
|
self.connection_string = connection_string
|
|
240
|
-
self.
|
|
241
|
-
self.
|
|
242
|
-
self.feedback_table_name = "flockfeedback"
|
|
243
|
-
logger.info("AzureTableSharedLinkStore initialized")
|
|
259
|
+
self.table_svc = TableServiceClient.from_connection_string(connection_string)
|
|
260
|
+
self.blob_svc = BlobServiceClient.from_connection_string(connection_string)
|
|
244
261
|
|
|
262
|
+
# ------------------------------------------------------------------ init
|
|
245
263
|
async def initialize(self) -> None:
|
|
246
|
-
|
|
264
|
+
# 1. Azure Tables ----------------------------------------------------
|
|
247
265
|
try:
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
except ResourceExistsError:
|
|
253
|
-
logger.debug(f"Azure Table already exists: {self.shared_links_table_name}")
|
|
254
|
-
|
|
255
|
-
# Create feedback table
|
|
256
|
-
try:
|
|
257
|
-
await self.table_service_client.create_table(self.feedback_table_name)
|
|
258
|
-
logger.info(f"Created Azure Table: {self.feedback_table_name}")
|
|
259
|
-
except ResourceExistsError:
|
|
260
|
-
logger.debug(f"Azure Table already exists: {self.feedback_table_name}")
|
|
261
|
-
|
|
262
|
-
logger.info("Azure Table Storage initialized successfully")
|
|
263
|
-
except Exception as e:
|
|
264
|
-
logger.error(f"Error initializing Azure Table Storage: {e}", exc_info=True)
|
|
265
|
-
raise
|
|
266
|
+
await self.table_svc.create_table(self._TABLE_NAME)
|
|
267
|
+
logger.info("Created Azure Table '%s'", self._TABLE_NAME)
|
|
268
|
+
except ResourceExistsError:
|
|
269
|
+
logger.debug("Azure Table '%s' already exists", self._TABLE_NAME)
|
|
266
270
|
|
|
267
|
-
async def save_config(self, config: SharedLinkConfig) -> SharedLinkConfig:
|
|
268
|
-
"""Saves a shared link configuration to Azure Table Storage."""
|
|
269
271
|
try:
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
"RowKey": config.share_id,
|
|
275
|
-
"share_id": config.share_id,
|
|
276
|
-
"agent_name": config.agent_name,
|
|
277
|
-
"flock_definition": config.flock_definition,
|
|
278
|
-
"created_at": config.created_at.isoformat(),
|
|
279
|
-
"share_type": config.share_type,
|
|
280
|
-
"chat_message_key": config.chat_message_key,
|
|
281
|
-
"chat_history_key": config.chat_history_key,
|
|
282
|
-
"chat_response_key": config.chat_response_key,
|
|
283
|
-
}
|
|
284
|
-
|
|
285
|
-
await table_client.upsert_entity(entity)
|
|
286
|
-
logger.info(f"Saved shared link config to Azure Table Storage for ID: {config.share_id} with type: {config.share_type}")
|
|
287
|
-
return config
|
|
288
|
-
except Exception as e:
|
|
289
|
-
logger.error(f"Error saving config to Azure Table Storage for ID {config.share_id}: {e}", exc_info=True)
|
|
290
|
-
raise
|
|
272
|
+
await self.table_svc.create_table(self._FEEDBACK_TBL_NAME)
|
|
273
|
+
logger.info("Created Azure Table '%s'", self._FEEDBACK_TBL_NAME)
|
|
274
|
+
except ResourceExistsError:
|
|
275
|
+
logger.debug("Azure Table '%s' already exists", self._FEEDBACK_TBL_NAME)
|
|
291
276
|
|
|
277
|
+
# 2. Blob container --------------------------------------------------
|
|
278
|
+
try:
|
|
279
|
+
await self.blob_svc.create_container(self._CONTAINER_NAME)
|
|
280
|
+
logger.info("Created Blob container '%s'", self._CONTAINER_NAME)
|
|
281
|
+
except ResourceExistsError:
|
|
282
|
+
logger.debug("Blob container '%s' already exists", self._CONTAINER_NAME)
|
|
283
|
+
|
|
284
|
+
# ------------------------------------------------------------- save_config
|
|
285
|
+
async def save_config(self, config: SharedLinkConfig) -> SharedLinkConfig:
|
|
286
|
+
"""Upload YAML to Blob, then upsert table row containing the blob name."""
|
|
287
|
+
blob_name = f"{config.share_id}.yaml"
|
|
288
|
+
blob_client = self.blob_svc.get_blob_client(self._CONTAINER_NAME, blob_name)
|
|
289
|
+
|
|
290
|
+
# 1. Upload flock_definition (overwrite in case of retry)
|
|
291
|
+
await blob_client.upload_blob(config.flock_definition,
|
|
292
|
+
overwrite=True,
|
|
293
|
+
content_type="text/yaml")
|
|
294
|
+
logger.debug("Uploaded blob '%s' (%d bytes)",
|
|
295
|
+
blob_name, len(config.flock_definition.encode()))
|
|
296
|
+
|
|
297
|
+
# 2. Persist lightweight record in the table
|
|
298
|
+
tbl_client = self.table_svc.get_table_client(self._TABLE_NAME)
|
|
299
|
+
entity = {
|
|
300
|
+
"PartitionKey": self._PARTITION_KEY,
|
|
301
|
+
"RowKey": config.share_id,
|
|
302
|
+
"agent_name": config.agent_name,
|
|
303
|
+
"created_at": config.created_at.isoformat(),
|
|
304
|
+
"share_type": config.share_type,
|
|
305
|
+
"chat_message_key": config.chat_message_key,
|
|
306
|
+
"chat_history_key": config.chat_history_key,
|
|
307
|
+
"chat_response_key": config.chat_response_key,
|
|
308
|
+
# NEW – just a few bytes, well under 64 KiB
|
|
309
|
+
"flock_blob_name": blob_name,
|
|
310
|
+
}
|
|
311
|
+
await tbl_client.upsert_entity(entity)
|
|
312
|
+
logger.info("Saved shared link %s → blob '%s'", config.share_id, blob_name)
|
|
313
|
+
return config
|
|
314
|
+
|
|
315
|
+
# -------------------------------------------------------------- get_config
|
|
292
316
|
async def get_config(self, share_id: str) -> SharedLinkConfig | None:
|
|
293
|
-
|
|
317
|
+
tbl_client = self.table_svc.get_table_client(self._TABLE_NAME)
|
|
294
318
|
try:
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
entity = await table_client.get_entity(partition_key="shared_links", row_key=share_id)
|
|
298
|
-
|
|
299
|
-
logger.debug(f"Retrieved shared link config from Azure Table Storage for ID: {share_id}")
|
|
300
|
-
return SharedLinkConfig(
|
|
301
|
-
share_id=entity["share_id"],
|
|
302
|
-
agent_name=entity["agent_name"],
|
|
303
|
-
created_at=entity["created_at"], # Pydantic will parse from ISO format
|
|
304
|
-
flock_definition=entity["flock_definition"],
|
|
305
|
-
share_type=entity.get("share_type", "agent_run"),
|
|
306
|
-
chat_message_key=entity.get("chat_message_key"),
|
|
307
|
-
chat_history_key=entity.get("chat_history_key"),
|
|
308
|
-
chat_response_key=entity.get("chat_response_key"),
|
|
309
|
-
)
|
|
319
|
+
entity = await tbl_client.get_entity(self._PARTITION_KEY, share_id)
|
|
310
320
|
except ResourceNotFoundError:
|
|
311
|
-
logger.debug(
|
|
321
|
+
logger.debug("No config entity for id '%s'", share_id)
|
|
312
322
|
return None
|
|
323
|
+
|
|
324
|
+
blob_name = entity["flock_blob_name"]
|
|
325
|
+
blob_client = self.blob_svc.get_blob_client(self._CONTAINER_NAME, blob_name)
|
|
326
|
+
try:
|
|
327
|
+
blob_bytes = await (await blob_client.download_blob()).readall()
|
|
328
|
+
flock_yaml = blob_bytes.decode()
|
|
313
329
|
except Exception as e:
|
|
314
|
-
logger.error(
|
|
315
|
-
|
|
330
|
+
logger.error("Cannot download blob '%s' for share_id=%s: %s",
|
|
331
|
+
blob_name, share_id, e, exc_info=True)
|
|
332
|
+
raise
|
|
316
333
|
|
|
334
|
+
return SharedLinkConfig(
|
|
335
|
+
share_id = share_id,
|
|
336
|
+
agent_name = entity["agent_name"],
|
|
337
|
+
created_at = entity["created_at"],
|
|
338
|
+
flock_definition = flock_yaml,
|
|
339
|
+
share_type = entity.get("share_type", "agent_run"),
|
|
340
|
+
chat_message_key = entity.get("chat_message_key"),
|
|
341
|
+
chat_history_key = entity.get("chat_history_key"),
|
|
342
|
+
chat_response_key = entity.get("chat_response_key"),
|
|
343
|
+
)
|
|
344
|
+
|
|
345
|
+
# ----------------------------------------------------------- delete_config
|
|
317
346
|
async def delete_config(self, share_id: str) -> bool:
|
|
318
|
-
|
|
347
|
+
tbl_client = self.table_svc.get_table_client(self._TABLE_NAME)
|
|
319
348
|
try:
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
await table_client.delete_entity(partition_key="shared_links", row_key=share_id)
|
|
323
|
-
logger.info(f"Deleted shared link config from Azure Table Storage for ID: {share_id}")
|
|
324
|
-
return True
|
|
349
|
+
entity = await tbl_client.get_entity(self._PARTITION_KEY, share_id)
|
|
325
350
|
except ResourceNotFoundError:
|
|
326
|
-
logger.info(
|
|
327
|
-
return False
|
|
328
|
-
except Exception as e:
|
|
329
|
-
logger.error(f"Error deleting config from Azure Table Storage for ID {share_id}: {e}", exc_info=True)
|
|
351
|
+
logger.info("Delete: entity %s not found", share_id)
|
|
330
352
|
return False
|
|
331
353
|
|
|
332
|
-
|
|
354
|
+
# 1. Remove blob (ignore missing blob)
|
|
355
|
+
blob_name = entity["flock_blob_name"]
|
|
356
|
+
blob_client = self.blob_svc.get_blob_client(self._CONTAINER_NAME, blob_name)
|
|
357
|
+
try:
|
|
358
|
+
await blob_client.delete_blob(delete_snapshots="include")
|
|
359
|
+
logger.debug("Deleted blob '%s'", blob_name)
|
|
360
|
+
except ResourceNotFoundError:
|
|
361
|
+
logger.warning("Blob '%s' already gone", blob_name)
|
|
362
|
+
|
|
363
|
+
# 2. Remove table row
|
|
364
|
+
await tbl_client.delete_entity(self._PARTITION_KEY, share_id)
|
|
365
|
+
logger.info("Deleted shared link %s and its blob", share_id)
|
|
366
|
+
return True
|
|
333
367
|
|
|
368
|
+
# -------------------------------------------------------- save_feedback --
|
|
334
369
|
async def save_feedback(self, record: FeedbackRecord) -> FeedbackRecord:
|
|
335
|
-
"""Persist a feedback record
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
370
|
+
"""Persist a feedback record. If a flock_definition is present, upload it as a blob and
|
|
371
|
+
store only a reference in the table row to avoid oversized entities (64 KiB limit).
|
|
372
|
+
"""
|
|
373
|
+
tbl_client = self.table_svc.get_table_client(self._FEEDBACK_TBL_NAME)
|
|
374
|
+
|
|
375
|
+
# Core entity fields (avoid dumping the full Pydantic model – too many columns / large value)
|
|
376
|
+
entity: dict[str, Any] = {
|
|
377
|
+
"PartitionKey": "feedback",
|
|
378
|
+
"RowKey": record.feedback_id,
|
|
379
|
+
"share_id": record.share_id,
|
|
380
|
+
"context_type": record.context_type,
|
|
381
|
+
"reason": record.reason,
|
|
382
|
+
"expected_response": record.expected_response,
|
|
383
|
+
"actual_response": record.actual_response,
|
|
384
|
+
"created_at": record.created_at.isoformat(),
|
|
385
|
+
}
|
|
386
|
+
if record.flock_name is not None:
|
|
387
|
+
entity["flock_name"] = record.flock_name
|
|
388
|
+
if record.agent_name is not None:
|
|
389
|
+
entity["agent_name"] = record.agent_name
|
|
390
|
+
|
|
391
|
+
# ------------------------------------------------------------------ YAML → Blob
|
|
392
|
+
if record.flock_definition:
|
|
393
|
+
blob_name = f"{record.feedback_id}.yaml"
|
|
394
|
+
blob_client = self.blob_svc.get_blob_client(self._CONTAINER_NAME, blob_name)
|
|
395
|
+
# Overwrite=true so repeated feedback_id uploads (shouldn't happen) won't error
|
|
396
|
+
await blob_client.upload_blob(record.flock_definition,
|
|
397
|
+
overwrite=True,
|
|
398
|
+
content_type="text/yaml")
|
|
399
|
+
entity["flock_blob_name"] = blob_name # lightweight reference only
|
|
400
|
+
|
|
401
|
+
# ------------------------------------------------------------------ Table upsert
|
|
402
|
+
await tbl_client.upsert_entity(entity)
|
|
403
|
+
logger.info("Saved feedback %s%s",
|
|
404
|
+
record.feedback_id,
|
|
405
|
+
f" → blob '{entity['flock_blob_name']}'" if "flock_blob_name" in entity else "")
|
|
406
|
+
return record
|
|
407
|
+
|
|
360
408
|
|
|
361
409
|
|
|
362
410
|
# ----------------------- Factory Function -----------------------
|