flock-core 0.4.0b50__py3-none-any.whl → 0.4.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of flock-core might be problematic. Click here for more details.
- flock/adapter/__init__.py +14 -0
- flock/adapter/azure_adapter.py +68 -0
- flock/adapter/chroma_adapter.py +73 -0
- flock/adapter/faiss_adapter.py +97 -0
- flock/adapter/pinecone_adapter.py +51 -0
- flock/adapter/vector_base.py +47 -0
- flock/cli/constants.py +1 -1
- flock/config.py +1 -1
- flock/core/context/context.py +20 -0
- flock/core/flock.py +71 -91
- flock/core/flock_agent.py +58 -3
- flock/core/flock_module.py +5 -0
- flock/core/util/cli_helper.py +1 -1
- flock/di.py +41 -0
- flock/modules/enterprise_memory/README.md +99 -0
- flock/modules/enterprise_memory/enterprise_memory_module.py +526 -0
- flock/modules/mem0/mem0_module.py +79 -16
- flock/modules/mem0_async/async_mem0_module.py +126 -0
- flock/modules/memory/memory_module.py +28 -8
- flock/modules/performance/metrics_module.py +24 -1
- flock/modules/zep/__init__.py +1 -0
- flock/modules/zep/zep_module.py +192 -0
- flock/webapp/app/api/execution.py +79 -2
- flock/webapp/app/chat.py +83 -3
- flock/webapp/app/services/sharing_models.py +38 -0
- flock/webapp/app/services/sharing_store.py +60 -1
- flock/webapp/static/css/chat.css +2 -0
- flock/webapp/templates/partials/_chat_messages.html +50 -4
- flock/webapp/templates/partials/_results_display.html +39 -0
- {flock_core-0.4.0b50.dist-info → flock_core-0.4.2.dist-info}/METADATA +5 -7
- {flock_core-0.4.0b50.dist-info → flock_core-0.4.2.dist-info}/RECORD +35 -24
- flock/modules/mem0graph/mem0_graph_module.py +0 -63
- /flock/modules/{mem0graph → mem0_async}/__init__.py +0 -0
- {flock_core-0.4.0b50.dist-info → flock_core-0.4.2.dist-info}/WHEEL +0 -0
- {flock_core-0.4.0b50.dist-info → flock_core-0.4.2.dist-info}/entry_points.txt +0 -0
- {flock_core-0.4.0b50.dist-info → flock_core-0.4.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
"""Adapter package for pluggable vector-store back-ends.
|
|
4
|
+
|
|
5
|
+
Importing the package will NOT import heavy third-party clients by default –
|
|
6
|
+
individual adapters are only loaded when referenced explicitly.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from .vector_base import VectorAdapter, VectorHit
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"VectorAdapter",
|
|
13
|
+
"VectorHit",
|
|
14
|
+
]
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from .vector_base import VectorAdapter, VectorHit
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class AzureSearchAdapter(VectorAdapter):
|
|
9
|
+
"""Adapter for Azure Cognitive Search vector capabilities."""
|
|
10
|
+
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
*,
|
|
14
|
+
endpoint: str,
|
|
15
|
+
key: str,
|
|
16
|
+
index_name: str,
|
|
17
|
+
embedding_field: str = "embedding",
|
|
18
|
+
) -> None:
|
|
19
|
+
super().__init__()
|
|
20
|
+
try:
|
|
21
|
+
from azure.core.credentials import AzureKeyCredential
|
|
22
|
+
from azure.search.documents import SearchClient
|
|
23
|
+
except ImportError as e:
|
|
24
|
+
raise RuntimeError("azure-search-documents package is required for AzureSearchAdapter") from e
|
|
25
|
+
|
|
26
|
+
self._client = SearchClient(
|
|
27
|
+
endpoint=endpoint,
|
|
28
|
+
index_name=index_name,
|
|
29
|
+
credential=AzureKeyCredential(key),
|
|
30
|
+
)
|
|
31
|
+
self._embedding_field = embedding_field
|
|
32
|
+
|
|
33
|
+
# -----------------------------
|
|
34
|
+
def add(
|
|
35
|
+
self,
|
|
36
|
+
*,
|
|
37
|
+
id: str,
|
|
38
|
+
content: str,
|
|
39
|
+
embedding: list[float],
|
|
40
|
+
metadata: dict[str, Any] | None = None,
|
|
41
|
+
) -> None:
|
|
42
|
+
document = {
|
|
43
|
+
"id": id,
|
|
44
|
+
"content": content,
|
|
45
|
+
self._embedding_field: embedding,
|
|
46
|
+
**(metadata or {}),
|
|
47
|
+
}
|
|
48
|
+
# Upload is sync but returns iterator; consume to check errors
|
|
49
|
+
list(self._client.upload_documents(documents=[document]))
|
|
50
|
+
|
|
51
|
+
def query(self, *, embedding: list[float], k: int) -> list[VectorHit]:
|
|
52
|
+
results = self._client.search(
|
|
53
|
+
search_text=None,
|
|
54
|
+
vector=embedding,
|
|
55
|
+
k=k,
|
|
56
|
+
vector_fields=self._embedding_field,
|
|
57
|
+
)
|
|
58
|
+
hits: list[VectorHit] = []
|
|
59
|
+
for doc in results:
|
|
60
|
+
hits.append(
|
|
61
|
+
VectorHit(
|
|
62
|
+
id=doc["id"],
|
|
63
|
+
content=doc.get("content"),
|
|
64
|
+
metadata={k: v for k, v in doc.items() if k not in ("id", "content", self._embedding_field, "@search.score")},
|
|
65
|
+
score=doc["@search.score"],
|
|
66
|
+
)
|
|
67
|
+
)
|
|
68
|
+
return hits
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
from .vector_base import VectorAdapter, VectorHit
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ChromaAdapter(VectorAdapter):
|
|
10
|
+
"""Adapter for Chroma vector DB (local or HTTP)."""
|
|
11
|
+
|
|
12
|
+
def __init__(
|
|
13
|
+
self,
|
|
14
|
+
*,
|
|
15
|
+
collection: str = "flock_memories",
|
|
16
|
+
host: str | None = None,
|
|
17
|
+
port: int = 8000,
|
|
18
|
+
path: str | None = "./vector_store",
|
|
19
|
+
) -> None:
|
|
20
|
+
super().__init__()
|
|
21
|
+
try:
|
|
22
|
+
import chromadb
|
|
23
|
+
from chromadb.config import Settings
|
|
24
|
+
except ImportError as e:
|
|
25
|
+
raise RuntimeError("chromadb is required for ChromaAdapter") from e
|
|
26
|
+
|
|
27
|
+
if host:
|
|
28
|
+
client = chromadb.HttpClient(host=host, port=port)
|
|
29
|
+
else:
|
|
30
|
+
p = Path(path or "./vector_store")
|
|
31
|
+
p.mkdir(parents=True, exist_ok=True)
|
|
32
|
+
client = chromadb.PersistentClient(settings=Settings(path=str(p)))
|
|
33
|
+
|
|
34
|
+
self._collection = client.get_or_create_collection(collection)
|
|
35
|
+
|
|
36
|
+
# -------------------------------
|
|
37
|
+
# VectorAdapter implementation
|
|
38
|
+
# -------------------------------
|
|
39
|
+
def add(
|
|
40
|
+
self,
|
|
41
|
+
*,
|
|
42
|
+
id: str,
|
|
43
|
+
content: str,
|
|
44
|
+
embedding: list[float],
|
|
45
|
+
metadata: dict[str, Any] | None = None,
|
|
46
|
+
) -> None:
|
|
47
|
+
self._collection.add(
|
|
48
|
+
ids=[id],
|
|
49
|
+
documents=[content],
|
|
50
|
+
embeddings=[embedding],
|
|
51
|
+
metadatas=[metadata or {}],
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
def query(self, *, embedding: list[float], k: int) -> list[VectorHit]:
|
|
55
|
+
res = self._collection.query(
|
|
56
|
+
query_embeddings=[embedding],
|
|
57
|
+
n_results=k,
|
|
58
|
+
include=["documents", "metadatas", "distances", "ids"],
|
|
59
|
+
)
|
|
60
|
+
hits: list[VectorHit] = []
|
|
61
|
+
if res and res["ids"]:
|
|
62
|
+
for idx in range(len(res["ids"][0])):
|
|
63
|
+
dist = res["distances"][0][idx]
|
|
64
|
+
score = 1 - dist # Convert L2 → similarity
|
|
65
|
+
hits.append(
|
|
66
|
+
VectorHit(
|
|
67
|
+
id=res["ids"][0][idx],
|
|
68
|
+
content=res["documents"][0][idx],
|
|
69
|
+
metadata=res["metadatas"][0][idx],
|
|
70
|
+
score=score,
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
return hits
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any
|
|
5
|
+
|
|
6
|
+
import numpy as np
|
|
7
|
+
|
|
8
|
+
from .vector_base import VectorAdapter, VectorHit
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class FAISSAdapter(VectorAdapter):
|
|
12
|
+
"""Simple on-disk FAISS vector store.
|
|
13
|
+
|
|
14
|
+
Index is stored in `index_path` (flat L2). Metadata & content are kept in a
|
|
15
|
+
parallel JSONL file for quick prototyping; not optimised for massive scale.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(self, *, index_path: str = "./faiss.index") -> None:
|
|
19
|
+
super().__init__()
|
|
20
|
+
try:
|
|
21
|
+
import faiss # type: ignore
|
|
22
|
+
except ImportError as e:
|
|
23
|
+
raise RuntimeError("faiss library is required for FAISSAdapter") from e
|
|
24
|
+
|
|
25
|
+
self._faiss = __import__("faiss") # lazy alias
|
|
26
|
+
self._index_path = Path(index_path)
|
|
27
|
+
self._meta_path = self._index_path.with_suffix(".meta.jsonl")
|
|
28
|
+
self._metadata: dict[int, dict[str, Any]] = {}
|
|
29
|
+
|
|
30
|
+
if self._index_path.exists():
|
|
31
|
+
self._index = self._faiss.read_index(str(self._index_path))
|
|
32
|
+
# Load metadata
|
|
33
|
+
if self._meta_path.exists():
|
|
34
|
+
import json
|
|
35
|
+
|
|
36
|
+
with open(self._meta_path) as f:
|
|
37
|
+
for line_no, line in enumerate(f):
|
|
38
|
+
self._metadata[line_no] = json.loads(line)
|
|
39
|
+
else:
|
|
40
|
+
self._index = None # created on first add
|
|
41
|
+
|
|
42
|
+
# -----------------------------
|
|
43
|
+
def _ensure_index(self, dim: int):
|
|
44
|
+
if self._index is None:
|
|
45
|
+
self._index = self._faiss.IndexFlatL2(dim)
|
|
46
|
+
|
|
47
|
+
def add(
|
|
48
|
+
self,
|
|
49
|
+
*,
|
|
50
|
+
id: str,
|
|
51
|
+
content: str,
|
|
52
|
+
embedding: list[float],
|
|
53
|
+
metadata: dict[str, Any] | None = None,
|
|
54
|
+
) -> None:
|
|
55
|
+
import json
|
|
56
|
+
|
|
57
|
+
vec = np.array([embedding], dtype="float32")
|
|
58
|
+
self._ensure_index(vec.shape[1])
|
|
59
|
+
self._index.add(vec)
|
|
60
|
+
# Row id is current size - 1
|
|
61
|
+
row_id = self._index.ntotal - 1
|
|
62
|
+
self._metadata[row_id] = {
|
|
63
|
+
"id": id,
|
|
64
|
+
"content": content,
|
|
65
|
+
"metadata": metadata or {},
|
|
66
|
+
}
|
|
67
|
+
# Append metadata to file for persistence
|
|
68
|
+
self._meta_path.parent.mkdir(parents=True, exist_ok=True)
|
|
69
|
+
with open(self._meta_path, "a") as f:
|
|
70
|
+
f.write(json.dumps(self._metadata[row_id]) + "\n")
|
|
71
|
+
# Persist index lazily every 100 inserts
|
|
72
|
+
if row_id % 100 == 0:
|
|
73
|
+
self._faiss.write_index(self._index, str(self._index_path))
|
|
74
|
+
|
|
75
|
+
def query(self, *, embedding: list[float], k: int) -> list[VectorHit]:
|
|
76
|
+
if self._index is None or self._index.ntotal == 0:
|
|
77
|
+
return []
|
|
78
|
+
vec = np.array([embedding], dtype="float32")
|
|
79
|
+
distances, indices = self._index.search(vec, k)
|
|
80
|
+
hits: list[VectorHit] = []
|
|
81
|
+
for dist, idx in zip(distances[0], indices[0]):
|
|
82
|
+
if idx == -1:
|
|
83
|
+
continue
|
|
84
|
+
meta = self._metadata.get(idx, {})
|
|
85
|
+
hits.append(
|
|
86
|
+
VectorHit(
|
|
87
|
+
id=meta.get("id", str(idx)),
|
|
88
|
+
content=meta.get("content"),
|
|
89
|
+
metadata=meta.get("metadata", {}),
|
|
90
|
+
score=1 - float(dist), # approximate similarity
|
|
91
|
+
)
|
|
92
|
+
)
|
|
93
|
+
return hits
|
|
94
|
+
|
|
95
|
+
def close(self) -> None:
|
|
96
|
+
if self._index is not None:
|
|
97
|
+
self._faiss.write_index(self._index, str(self._index_path))
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from .vector_base import VectorAdapter, VectorHit
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class PineconeAdapter(VectorAdapter):
|
|
9
|
+
"""Adapter for Pinecone vector DB."""
|
|
10
|
+
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
*,
|
|
14
|
+
api_key: str,
|
|
15
|
+
environment: str,
|
|
16
|
+
index: str,
|
|
17
|
+
) -> None:
|
|
18
|
+
super().__init__()
|
|
19
|
+
try:
|
|
20
|
+
import pinecone
|
|
21
|
+
except ImportError as e:
|
|
22
|
+
raise RuntimeError("pinecone-client is required for PineconeAdapter") from e
|
|
23
|
+
|
|
24
|
+
pinecone.init(api_key=api_key, environment=environment)
|
|
25
|
+
self._index = pinecone.Index(index)
|
|
26
|
+
|
|
27
|
+
# -------------------------------
|
|
28
|
+
def add(
|
|
29
|
+
self,
|
|
30
|
+
*,
|
|
31
|
+
id: str,
|
|
32
|
+
content: str,
|
|
33
|
+
embedding: list[float],
|
|
34
|
+
metadata: dict[str, Any] | None = None,
|
|
35
|
+
) -> None:
|
|
36
|
+
meta = {"content": content, **(metadata or {})}
|
|
37
|
+
self._index.upsert(vectors=[(id, embedding, meta)])
|
|
38
|
+
|
|
39
|
+
def query(self, *, embedding: list[float], k: int) -> list[VectorHit]:
|
|
40
|
+
res = self._index.query(vector=embedding, top_k=k, include_values=False, include_metadata=True)
|
|
41
|
+
hits: list[VectorHit] = []
|
|
42
|
+
for match in res.matches or []:
|
|
43
|
+
hits.append(
|
|
44
|
+
VectorHit(
|
|
45
|
+
id=match.id,
|
|
46
|
+
content=match.metadata.get("content") if match.metadata else None,
|
|
47
|
+
metadata={k: v for k, v in (match.metadata or {}).items() if k != "content"},
|
|
48
|
+
score=match.score,
|
|
49
|
+
)
|
|
50
|
+
)
|
|
51
|
+
return hits
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from dataclasses import dataclass
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class VectorHit:
|
|
10
|
+
"""Result object returned from vector search."""
|
|
11
|
+
|
|
12
|
+
id: str
|
|
13
|
+
content: str | None
|
|
14
|
+
metadata: dict[str, Any]
|
|
15
|
+
score: float # similarity score (higher = more similar)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class VectorAdapter(ABC):
|
|
19
|
+
"""Protocol for vector-store adapters."""
|
|
20
|
+
|
|
21
|
+
def __init__(self, **kwargs):
|
|
22
|
+
"""Store-specific kwargs are passed through subclass constructor."""
|
|
23
|
+
super().__init__()
|
|
24
|
+
|
|
25
|
+
# ----------------------
|
|
26
|
+
# CRUD operations
|
|
27
|
+
# ----------------------
|
|
28
|
+
@abstractmethod
|
|
29
|
+
def add(
|
|
30
|
+
self,
|
|
31
|
+
*,
|
|
32
|
+
id: str,
|
|
33
|
+
content: str,
|
|
34
|
+
embedding: list[float],
|
|
35
|
+
metadata: dict[str, Any] | None = None,
|
|
36
|
+
) -> None: # pragma: no cover – interface
|
|
37
|
+
"""Insert or upsert a single document."""
|
|
38
|
+
|
|
39
|
+
@abstractmethod
|
|
40
|
+
def query(
|
|
41
|
+
self, *, embedding: list[float], k: int
|
|
42
|
+
) -> list[VectorHit]: # pragma: no cover – interface
|
|
43
|
+
"""Return top-k most similar hits."""
|
|
44
|
+
|
|
45
|
+
def close(self) -> None: # Optional override
|
|
46
|
+
"""Free resources / flush buffers."""
|
|
47
|
+
return
|
flock/cli/constants.py
CHANGED
|
@@ -18,7 +18,7 @@ CLI_LOAD_FLOCK = "Load a *.flock file"
|
|
|
18
18
|
CLI_THEME_BUILDER = "Theme builder"
|
|
19
19
|
CLI_LOAD_EXAMPLE = "Load a example"
|
|
20
20
|
CLI_SETTINGS = "Settings"
|
|
21
|
-
CLI_NOTES = "'
|
|
21
|
+
CLI_NOTES = "'Magpie' release notes"
|
|
22
22
|
CLI_START_WEB_SERVER = "Start web server"
|
|
23
23
|
CLI_REGISTRY_MANAGEMENT = "Registry management"
|
|
24
24
|
CLI_EXIT = "Exit"
|
flock/config.py
CHANGED
|
@@ -22,7 +22,7 @@ GITHUB_USERNAME = config("GITHUB_USERNAME", "")
|
|
|
22
22
|
# -- Debugging and Logging Configurations --
|
|
23
23
|
LOCAL_DEBUG = config("LOCAL_DEBUG", True)
|
|
24
24
|
LOG_LEVEL = config("LOG_LEVEL", "DEBUG")
|
|
25
|
-
LOGGING_DIR = config("LOGGING_DIR", "logs")
|
|
25
|
+
LOGGING_DIR = config("LOGGING_DIR", ".flock/logs")
|
|
26
26
|
|
|
27
27
|
OTEL_SERVICE_NAME = config("OTL_SERVICE_NAME", "otel-flock")
|
|
28
28
|
JAEGER_ENDPOINT = config(
|
flock/core/context/context.py
CHANGED
|
@@ -192,3 +192,23 @@ class FlockContext(Serializable, BaseModel):
|
|
|
192
192
|
|
|
193
193
|
converted = convert(data)
|
|
194
194
|
return cls(**converted)
|
|
195
|
+
|
|
196
|
+
def resolve(self, svc_type):
|
|
197
|
+
"""Resolve a service from the request-scoped DI container if present.
|
|
198
|
+
|
|
199
|
+
The bootstrap code is expected to store the active `ServiceProvider` from
|
|
200
|
+
`wd.di` in the context variable key ``di.container``. This helper
|
|
201
|
+
provides a convenient façade so that Flock components can simply call
|
|
202
|
+
``context.resolve(SomeType)`` regardless of whether a container is
|
|
203
|
+
available. When the container is missing or the service cannot be
|
|
204
|
+
resolved, ``None`` is returned instead of raising to keep backward
|
|
205
|
+
compatibility.
|
|
206
|
+
"""
|
|
207
|
+
container = self.get_variable("di.container")
|
|
208
|
+
if container is None:
|
|
209
|
+
return None
|
|
210
|
+
try:
|
|
211
|
+
return container.get_service(svc_type)
|
|
212
|
+
except Exception:
|
|
213
|
+
# Service not registered or other resolution error – fall back to None
|
|
214
|
+
return None
|
flock/core/flock.py
CHANGED
|
@@ -4,10 +4,11 @@
|
|
|
4
4
|
from __future__ import annotations # Ensure forward references work
|
|
5
5
|
|
|
6
6
|
import asyncio
|
|
7
|
-
import
|
|
7
|
+
import contextvars
|
|
8
8
|
import os
|
|
9
9
|
import uuid
|
|
10
|
-
from collections.abc import Callable, Sequence
|
|
10
|
+
from collections.abc import Awaitable, Callable, Sequence
|
|
11
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
11
12
|
from pathlib import Path
|
|
12
13
|
from typing import (
|
|
13
14
|
TYPE_CHECKING,
|
|
@@ -16,6 +17,7 @@ from typing import (
|
|
|
16
17
|
TypeVar,
|
|
17
18
|
)
|
|
18
19
|
|
|
20
|
+
_R = TypeVar("_R")
|
|
19
21
|
# Third-party imports
|
|
20
22
|
from box import Box
|
|
21
23
|
from temporalio import workflow
|
|
@@ -142,6 +144,28 @@ class Flock(BaseModel, Serializable):
|
|
|
142
144
|
"ignored_types": (type(FlockRegistry),),
|
|
143
145
|
}
|
|
144
146
|
|
|
147
|
+
def _run_sync(self, coro: Awaitable[_R]) -> _R:
|
|
148
|
+
"""Execute *coro* synchronously.
|
|
149
|
+
|
|
150
|
+
* If no loop is running → ``asyncio.run``.
|
|
151
|
+
* Otherwise run ``asyncio.run`` inside a fresh thread **with**
|
|
152
|
+
context-vars propagation.
|
|
153
|
+
"""
|
|
154
|
+
try:
|
|
155
|
+
asyncio.get_running_loop()
|
|
156
|
+
except RuntimeError: # no loop → simple
|
|
157
|
+
return asyncio.run(coro)
|
|
158
|
+
|
|
159
|
+
# A loop is already running – Jupyter / ASGI / etc.
|
|
160
|
+
ctx = contextvars.copy_context() # propagate baggage
|
|
161
|
+
with ThreadPoolExecutor(max_workers=1) as pool:
|
|
162
|
+
future = pool.submit(ctx.run, asyncio.run, coro)
|
|
163
|
+
try:
|
|
164
|
+
return future.result()
|
|
165
|
+
finally:
|
|
166
|
+
if not future.done():
|
|
167
|
+
future.cancel()
|
|
168
|
+
|
|
145
169
|
def __init__(
|
|
146
170
|
self,
|
|
147
171
|
name: str | None = None,
|
|
@@ -356,39 +380,19 @@ class Flock(BaseModel, Serializable):
|
|
|
356
380
|
run_id: str = "",
|
|
357
381
|
box_result: bool = True,
|
|
358
382
|
agents: list[FlockAgent] | None = None,
|
|
383
|
+
memo: dict[str, Any] | None = None
|
|
359
384
|
) -> Box | dict:
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
)
|
|
370
|
-
|
|
371
|
-
try:
|
|
372
|
-
# Check if an event loop is already running in the current thread
|
|
373
|
-
loop = asyncio.get_running_loop()
|
|
374
|
-
except RuntimeError:
|
|
375
|
-
# No event loop is running in the current thread.
|
|
376
|
-
# We can safely use asyncio.run() to create a new loop,
|
|
377
|
-
# run the coroutine, and close the loop.
|
|
378
|
-
return asyncio.run(coro)
|
|
379
|
-
else:
|
|
380
|
-
# An event loop is already running in the current thread.
|
|
381
|
-
# Calling loop.run_until_complete() or asyncio.run() here would raise an error.
|
|
382
|
-
# To run the async code and wait for its result synchronously,
|
|
383
|
-
# we execute it in a separate thread with its own event loop.
|
|
384
|
-
logger.debug(
|
|
385
|
-
"Flock.run called in a context with an existing event loop. "
|
|
386
|
-
"Running async task in a separate thread to avoid event loop conflict."
|
|
385
|
+
return self._run_sync(
|
|
386
|
+
self.run_async(
|
|
387
|
+
start_agent=start_agent,
|
|
388
|
+
input=input,
|
|
389
|
+
context=context,
|
|
390
|
+
run_id=run_id,
|
|
391
|
+
box_result=box_result,
|
|
392
|
+
agents=agents,
|
|
393
|
+
memo=memo,
|
|
387
394
|
)
|
|
388
|
-
|
|
389
|
-
future = executor.submit(asyncio.run, coro)
|
|
390
|
-
# Block and wait for the result from the other thread
|
|
391
|
-
return future.result()
|
|
395
|
+
)
|
|
392
396
|
|
|
393
397
|
|
|
394
398
|
async def run_async(
|
|
@@ -602,35 +606,24 @@ class Flock(BaseModel, Serializable):
|
|
|
602
606
|
hide_columns: list[str] | None = None,
|
|
603
607
|
delimiter: str = ",",
|
|
604
608
|
) -> list[Box | dict | None | Exception]:
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
609
|
+
return self._run_sync(
|
|
610
|
+
self.run_batch_async(
|
|
611
|
+
start_agent=start_agent,
|
|
612
|
+
batch_inputs=batch_inputs,
|
|
613
|
+
input_mapping=input_mapping,
|
|
614
|
+
static_inputs=static_inputs,
|
|
615
|
+
parallel=parallel,
|
|
616
|
+
max_workers=max_workers,
|
|
617
|
+
use_temporal=use_temporal,
|
|
618
|
+
box_results=box_results,
|
|
619
|
+
return_errors=return_errors,
|
|
620
|
+
silent_mode=silent_mode,
|
|
621
|
+
write_to_csv=write_to_csv,
|
|
622
|
+
hide_columns=hide_columns,
|
|
623
|
+
delimiter=delimiter,
|
|
624
|
+
)
|
|
620
625
|
)
|
|
621
626
|
|
|
622
|
-
try:
|
|
623
|
-
loop = asyncio.get_running_loop()
|
|
624
|
-
except RuntimeError:
|
|
625
|
-
return asyncio.run(coro)
|
|
626
|
-
else:
|
|
627
|
-
logger.debug(
|
|
628
|
-
"Flock.run_batch called in a context with an existing event loop. "
|
|
629
|
-
"Running async task in a separate thread."
|
|
630
|
-
)
|
|
631
|
-
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
|
|
632
|
-
future = executor.submit(asyncio.run, coro)
|
|
633
|
-
return future.result()
|
|
634
627
|
|
|
635
628
|
# --- Evaluation (Delegation) ---
|
|
636
629
|
async def evaluate_async(
|
|
@@ -704,38 +697,25 @@ class Flock(BaseModel, Serializable):
|
|
|
704
697
|
silent_mode: bool = False,
|
|
705
698
|
metadata_columns: list[str] | None = None,
|
|
706
699
|
) -> DataFrame | list[dict[str, Any]]: # type: ignore
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
)
|
|
725
|
-
|
|
726
|
-
try:
|
|
727
|
-
loop = asyncio.get_running_loop()
|
|
728
|
-
except RuntimeError:
|
|
729
|
-
return asyncio.run(coro)
|
|
730
|
-
else:
|
|
731
|
-
logger.debug(
|
|
732
|
-
"Flock.evaluate called in a context with an existing event loop. "
|
|
733
|
-
"Running async task in a separate thread."
|
|
700
|
+
return self._run_sync(
|
|
701
|
+
self.evaluate_async(
|
|
702
|
+
dataset=dataset,
|
|
703
|
+
start_agent=start_agent,
|
|
704
|
+
input_mapping=input_mapping,
|
|
705
|
+
answer_mapping=answer_mapping,
|
|
706
|
+
metrics=metrics,
|
|
707
|
+
metric_configs=metric_configs,
|
|
708
|
+
static_inputs=static_inputs,
|
|
709
|
+
parallel=parallel,
|
|
710
|
+
max_workers=max_workers,
|
|
711
|
+
use_temporal=use_temporal,
|
|
712
|
+
error_handling=error_handling,
|
|
713
|
+
output_file=output_file,
|
|
714
|
+
return_dataframe=return_dataframe,
|
|
715
|
+
silent_mode=silent_mode,
|
|
716
|
+
metadata_columns=metadata_columns,
|
|
734
717
|
)
|
|
735
|
-
|
|
736
|
-
future = executor.submit(asyncio.run, coro)
|
|
737
|
-
return future.result()
|
|
738
|
-
|
|
718
|
+
)
|
|
739
719
|
# --- Server & CLI Starters (Delegation) ---
|
|
740
720
|
def start_api(
|
|
741
721
|
self,
|
flock/core/flock_agent.py
CHANGED
|
@@ -335,9 +335,64 @@ class FlockAgent(BaseModel, Serializable, DSPyIntegrationMixin, ABC):
|
|
|
335
335
|
# For now, assume evaluator handles tool resolution if necessary
|
|
336
336
|
registered_tools = self.tools
|
|
337
337
|
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
338
|
+
# --------------------------------------------------
|
|
339
|
+
# Optional DI middleware pipeline
|
|
340
|
+
# --------------------------------------------------
|
|
341
|
+
container = None
|
|
342
|
+
if self.context is not None:
|
|
343
|
+
container = self.context.get_variable("di.container")
|
|
344
|
+
|
|
345
|
+
# If a MiddlewarePipeline is registered in DI, wrap the evaluator
|
|
346
|
+
result: dict[str, Any] | None = None
|
|
347
|
+
|
|
348
|
+
if container is not None:
|
|
349
|
+
try:
|
|
350
|
+
from wd.di.middleware import (
|
|
351
|
+
MiddlewarePipeline,
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
pipeline: MiddlewarePipeline | None = None
|
|
355
|
+
try:
|
|
356
|
+
pipeline = container.get_service(MiddlewarePipeline)
|
|
357
|
+
except Exception:
|
|
358
|
+
pipeline = None
|
|
359
|
+
|
|
360
|
+
if pipeline is not None:
|
|
361
|
+
# Build execution chain where the evaluator is the terminal handler
|
|
362
|
+
|
|
363
|
+
async def _final_handler():
|
|
364
|
+
return await self.evaluator.evaluate(
|
|
365
|
+
self, current_inputs, registered_tools
|
|
366
|
+
)
|
|
367
|
+
|
|
368
|
+
idx = 0
|
|
369
|
+
|
|
370
|
+
async def _invoke_next():
|
|
371
|
+
nonlocal idx
|
|
372
|
+
|
|
373
|
+
if idx < len(pipeline._middleware):
|
|
374
|
+
mw = pipeline._middleware[idx]
|
|
375
|
+
idx += 1
|
|
376
|
+
return await mw(self.context, _invoke_next) # type: ignore[arg-type]
|
|
377
|
+
return await _final_handler()
|
|
378
|
+
|
|
379
|
+
# Execute pipeline
|
|
380
|
+
result = await _invoke_next()
|
|
381
|
+
else:
|
|
382
|
+
# No pipeline registered, direct evaluation
|
|
383
|
+
result = await self.evaluator.evaluate(
|
|
384
|
+
self, current_inputs, registered_tools
|
|
385
|
+
)
|
|
386
|
+
except ImportError:
|
|
387
|
+
# wd.di not installed – fall back
|
|
388
|
+
result = await self.evaluator.evaluate(
|
|
389
|
+
self, current_inputs, registered_tools
|
|
390
|
+
)
|
|
391
|
+
else:
|
|
392
|
+
# No DI container – standard execution
|
|
393
|
+
result = await self.evaluator.evaluate(
|
|
394
|
+
self, current_inputs, registered_tools
|
|
395
|
+
)
|
|
341
396
|
except Exception as eval_error:
|
|
342
397
|
logger.error(
|
|
343
398
|
"Error during evaluate",
|