flowllm 0.1.2__py3-none-any.whl → 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. flowllm/__init__.py +8 -3
  2. flowllm/app.py +1 -1
  3. flowllm/config/base.yaml +75 -0
  4. flowllm/config/fin_supply.yaml +39 -0
  5. flowllm/config/pydantic_config_parser.py +16 -1
  6. flowllm/context/__init__.py +2 -0
  7. flowllm/context/base_context.py +10 -20
  8. flowllm/context/flow_context.py +45 -2
  9. flowllm/context/service_context.py +73 -12
  10. flowllm/embedding_model/openai_compatible_embedding_model.py +1 -2
  11. flowllm/enumeration/chunk_enum.py +1 -0
  12. flowllm/flow/__init__.py +9 -0
  13. flowllm/flow/base_flow.py +44 -11
  14. flowllm/flow/expression/__init__.py +1 -0
  15. flowllm/flow/{parser → expression}/expression_parser.py +5 -2
  16. flowllm/flow/expression/expression_tool_flow.py +25 -0
  17. flowllm/flow/gallery/__init__.py +1 -8
  18. flowllm/flow/gallery/mock_tool_flow.py +46 -33
  19. flowllm/flow/tool_op_flow.py +97 -0
  20. flowllm/llm/base_llm.py +0 -2
  21. flowllm/llm/litellm_llm.py +2 -1
  22. flowllm/op/__init__.py +3 -3
  23. flowllm/op/akshare/get_ak_a_code_op.py +1 -1
  24. flowllm/op/akshare/get_ak_a_info_op.py +1 -1
  25. flowllm/op/base_llm_op.py +3 -2
  26. flowllm/op/base_op.py +258 -25
  27. flowllm/op/base_tool_op.py +47 -0
  28. flowllm/op/gallery/__init__.py +0 -1
  29. flowllm/op/gallery/mock_op.py +13 -7
  30. flowllm/op/llm/__init__.py +3 -0
  31. flowllm/op/llm/react_llm_op.py +105 -0
  32. flowllm/op/{agent/react_prompt.yaml → llm/react_llm_prompt.yaml} +17 -10
  33. flowllm/op/llm/simple_llm_op.py +48 -0
  34. flowllm/op/llm/stream_llm_op.py +61 -0
  35. flowllm/op/mcp/__init__.py +2 -0
  36. flowllm/op/mcp/ant_op.py +42 -0
  37. flowllm/op/mcp/base_sse_mcp_op.py +28 -0
  38. flowllm/op/parallel_op.py +5 -1
  39. flowllm/op/search/__init__.py +1 -2
  40. flowllm/op/search/dashscope_search_op.py +73 -121
  41. flowllm/op/search/tavily_search_op.py +69 -80
  42. flowllm/op/sequential_op.py +4 -0
  43. flowllm/schema/flow_stream_chunk.py +11 -0
  44. flowllm/schema/message.py +2 -0
  45. flowllm/schema/service_config.py +8 -3
  46. flowllm/schema/tool_call.py +53 -4
  47. flowllm/service/__init__.py +0 -1
  48. flowllm/service/base_service.py +31 -14
  49. flowllm/service/http_service.py +46 -37
  50. flowllm/service/mcp_service.py +17 -23
  51. flowllm/storage/vector_store/__init__.py +1 -0
  52. flowllm/storage/vector_store/base_vector_store.py +99 -12
  53. flowllm/storage/vector_store/chroma_vector_store.py +250 -8
  54. flowllm/storage/vector_store/es_vector_store.py +291 -35
  55. flowllm/storage/vector_store/local_vector_store.py +206 -9
  56. flowllm/storage/vector_store/memory_vector_store.py +509 -0
  57. flowllm/utils/common_utils.py +54 -0
  58. flowllm/utils/logger_utils.py +28 -0
  59. flowllm/utils/miner_u_pdf_processor.py +726 -0
  60. {flowllm-0.1.2.dist-info → flowllm-0.1.5.dist-info}/METADATA +7 -6
  61. flowllm-0.1.5.dist-info/RECORD +98 -0
  62. flowllm/config/default.yaml +0 -77
  63. flowllm/config/empty.yaml +0 -37
  64. flowllm/flow/gallery/cmd_flow.py +0 -11
  65. flowllm/flow/gallery/code_tool_flow.py +0 -30
  66. flowllm/flow/gallery/dashscope_search_tool_flow.py +0 -34
  67. flowllm/flow/gallery/deepsearch_tool_flow.py +0 -39
  68. flowllm/flow/gallery/expression_tool_flow.py +0 -18
  69. flowllm/flow/gallery/tavily_search_tool_flow.py +0 -30
  70. flowllm/flow/gallery/terminate_tool_flow.py +0 -30
  71. flowllm/flow/parser/__init__.py +0 -0
  72. flowllm/op/agent/__init__.py +0 -0
  73. flowllm/op/agent/react_op.py +0 -83
  74. flowllm/op/base_ray_op.py +0 -313
  75. flowllm/op/code/__init__.py +0 -1
  76. flowllm/op/code/execute_code_op.py +0 -42
  77. flowllm/op/gallery/terminate_op.py +0 -29
  78. flowllm/op/search/dashscope_deep_research_op.py +0 -260
  79. flowllm/service/cmd_service.py +0 -15
  80. flowllm-0.1.2.dist-info/RECORD +0 -99
  81. {flowllm-0.1.2.dist-info → flowllm-0.1.5.dist-info}/WHEEL +0 -0
  82. {flowllm-0.1.2.dist-info → flowllm-0.1.5.dist-info}/entry_points.txt +0 -0
  83. {flowllm-0.1.2.dist-info → flowllm-0.1.5.dist-info}/licenses/LICENSE +0 -0
  84. {flowllm-0.1.2.dist-info → flowllm-0.1.5.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,4 @@
1
- from abc import abstractmethod, ABC
1
+ from abc import ABC
2
2
  from typing import Dict, Optional
3
3
 
4
4
  from loguru import logger
@@ -6,16 +6,19 @@ from pydantic import create_model, Field
6
6
 
7
7
  from flowllm.config.pydantic_config_parser import PydanticConfigParser
8
8
  from flowllm.context.service_context import C
9
+ from flowllm.flow.base_tool_flow import BaseToolFlow
9
10
  from flowllm.schema.flow_request import FlowRequest
10
11
  from flowllm.schema.service_config import ServiceConfig
11
12
  from flowllm.schema.tool_call import ParamAttrs
12
- from flowllm.utils.common_utils import snake_to_camel
13
+ from flowllm.utils.common_utils import snake_to_camel, print_banner
13
14
 
14
15
 
15
16
  class BaseService(ABC):
16
17
  TYPE_MAPPING = {
17
18
  "str": str,
19
+ "string": str,
18
20
  "int": int,
21
+ "integer": int,
19
22
  "float": float,
20
23
  "bool": bool,
21
24
  "list": list,
@@ -24,10 +27,16 @@ class BaseService(ABC):
24
27
 
25
28
  def __init__(self, service_config: ServiceConfig):
26
29
  self.service_config = service_config
27
-
28
30
  self.mcp_config = self.service_config.mcp
29
31
  self.http_config = self.service_config.http
32
+
33
+ def __enter__(self):
34
+ C.prepare_sse_mcp()
30
35
  C.init_by_service_config(self.service_config)
36
+ return self
37
+
38
+ def __exit__(self, exc_type, exc_val, exc_tb):
39
+ C.stop_by_service_config()
31
40
 
32
41
  @classmethod
33
42
  def get_service(cls, *args, parser: type[PydanticConfigParser] = PydanticConfigParser) -> "BaseService":
@@ -49,20 +58,28 @@ class BaseService(ABC):
49
58
 
50
59
  return create_model(f"{snake_to_camel(flow_name)}Model", __base__=FlowRequest, **fields)
51
60
 
52
- def integrate_tool_flow(self, tool_flow_name: str):
61
+ def integrate_flow(self, tool_flow: BaseToolFlow):
62
+ ...
63
+
64
+ def integrate_stream_flow(self, tool_flow: BaseToolFlow):
53
65
  ...
54
66
 
55
- def integrate_tool_flows(self):
67
+ def integrate_flows(self):
56
68
  for tool_flow_name in C.tool_flow_names:
57
- self.integrate_tool_flow(tool_flow_name)
58
- logger.info(f"integrate flow_endpoint={tool_flow_name}")
69
+ tool_flow: BaseToolFlow = C.get_tool_flow(tool_flow_name)
70
+ if tool_flow.stream:
71
+ self.integrate_stream_flow(tool_flow)
72
+ logger.info(f"integrate stream_endpoint={tool_flow_name}")
59
73
 
60
- def __enter__(self):
61
- return self
74
+ else:
75
+ self.integrate_flow(tool_flow)
76
+ logger.info(f"integrate endpoint={tool_flow_name}")
62
77
 
63
- def __exit__(self, exc_type, exc_val, exc_tb):
64
- ...
78
+ def __call__(self, logo: str = ""):
79
+ self.integrate_flows()
80
+ if logo:
81
+ print_banner(name=logo, service_config=self.service_config, width=400)
82
+ self.execute()
65
83
 
66
- @abstractmethod
67
- def __call__(self):
68
- ...
84
+ def execute(self):
85
+ ...
@@ -1,14 +1,16 @@
1
1
  import asyncio
2
- from functools import partial
2
+ from typing import AsyncGenerator
3
3
 
4
4
  import uvicorn
5
5
  from fastapi import FastAPI
6
6
  from fastapi.middleware.cors import CORSMiddleware
7
+ from fastapi.responses import StreamingResponse
7
8
  from loguru import logger
8
9
 
9
10
  from flowllm.context.service_context import C
10
11
  from flowllm.flow.base_tool_flow import BaseToolFlow
11
12
  from flowllm.schema.flow_response import FlowResponse
13
+ from flowllm.schema.flow_stream_chunk import FlowStreamChunk
12
14
  from flowllm.service.base_service import BaseService
13
15
 
14
16
 
@@ -18,60 +20,67 @@ class HttpService(BaseService):
18
20
  def __init__(self, *args, **kwargs):
19
21
  super().__init__(*args, **kwargs)
20
22
  self.app = FastAPI(title="FlowLLM", description="HTTP API for FlowLLM")
23
+ self.app.add_middleware(CORSMiddleware,
24
+ allow_origins=["*"],
25
+ allow_credentials=True,
26
+ allow_methods=["*"],
27
+ allow_headers=["*"])
21
28
 
22
- # Add CORS middleware
23
- self.app.add_middleware(
24
- CORSMiddleware,
25
- allow_origins=["*"],
26
- allow_credentials=True,
27
- allow_methods=["*"],
28
- allow_headers=["*"],
29
- )
30
-
31
- # Add health check endpoint
32
29
  self.app.get("/health")(self.health_check)
33
30
 
34
31
  @staticmethod
35
32
  def health_check():
36
33
  return {"status": "healthy"}
37
34
 
38
- def integrate_tool_flow(self, tool_flow_name: str):
39
- tool_flow: BaseToolFlow = C.get_tool_flow(tool_flow_name)
40
- request_model = self._create_pydantic_model(tool_flow_name, tool_flow.tool_call.input_schema)
35
+ def integrate_flow(self, tool_flow: BaseToolFlow):
36
+ if "http" not in tool_flow.service_type:
37
+ return
38
+
39
+ request_model = self._create_pydantic_model(tool_flow.name, tool_flow.tool_call.input_schema)
41
40
 
42
41
  async def execute_endpoint(request: request_model) -> FlowResponse:
43
- loop = asyncio.get_event_loop()
44
- response: FlowResponse = await loop.run_in_executor(
45
- executor=C.thread_pool,
46
- func=partial(tool_flow.__call__, **request.model_dump())) # noqa
42
+ return await tool_flow(**request.model_dump())
47
43
 
48
- return response
44
+ self.app.post(f"/{tool_flow.name}", response_model=FlowResponse)(execute_endpoint)
49
45
 
50
- endpoint_path = f"/{tool_flow.name}"
51
- self.app.post(endpoint_path, response_model=FlowResponse)(execute_endpoint)
46
+ @staticmethod
47
+ def gen_stream_response(queue: asyncio.Queue):
48
+ async def generate_stream() -> AsyncGenerator[bytes, None]:
49
+ while True:
50
+ stream_chunk: FlowStreamChunk = await queue.get()
51
+ if stream_chunk.done:
52
+ yield f"data:[DONE]\n\n".encode('utf-8')
53
+ break
54
+ else:
55
+ yield f"data:{stream_chunk.model_dump_json()}\n\n".encode("utf-8")
52
56
 
53
- def integrate_tool_flows(self):
54
- super().integrate_tool_flows()
57
+ return StreamingResponse(generate_stream(), media_type="text/event-stream")
55
58
 
56
- async def execute_endpoint() -> list:
57
- loop = asyncio.get_event_loop()
59
+ def integrate_stream_flow(self, tool_flow: BaseToolFlow):
60
+ if "http" not in tool_flow.service_type:
61
+ return
58
62
 
59
- def list_tool_flows() -> list:
60
- tool_flow_schemas = []
61
- for name, tool_flow in C.tool_flow_dict.items():
62
- assert isinstance(tool_flow, BaseToolFlow)
63
- tool_flow_schemas.append(tool_flow.tool_call.simple_input_dump())
64
- return tool_flow_schemas
63
+ request_model = self._create_pydantic_model(tool_flow.name, tool_flow.tool_call.input_schema)
65
64
 
66
- return await loop.run_in_executor(executor=C.thread_pool, func=list_tool_flows) # noqa
65
+ async def execute_stream_endpoint(request: request_model) -> StreamingResponse:
66
+ stream_queue = asyncio.Queue()
67
+ asyncio.create_task(tool_flow(stream_queue=stream_queue, **request.model_dump()))
68
+ return self.gen_stream_response(stream_queue)
67
69
 
68
- endpoint_path = "/list"
69
- self.app.get(endpoint_path, response_model=list)(execute_endpoint)
70
- logger.info(f"integrate endpoint={endpoint_path}")
70
+ self.app.post(f"/{tool_flow.name}")(execute_stream_endpoint)
71
+
72
+ def integrate_flows(self):
73
+ super().integrate_flows()
74
+
75
+ async def execute_endpoint() -> list:
76
+ loop = asyncio.get_event_loop()
77
+ return await loop.run_in_executor(executor=C.thread_pool, func=C.list_flow_schemas) # noqa
71
78
 
72
- def __call__(self):
73
- self.integrate_tool_flows()
79
+ endpoint_path = "list"
80
+ self.app.get(f"/{endpoint_path}", response_model=list)(execute_endpoint)
81
+ logger.info(f"integrate endpoint={endpoint_path}")
74
82
 
83
+ def execute(self):
75
84
  uvicorn.run(self.app,
76
85
  host=self.http_config.host,
77
86
  port=self.http_config.port,
@@ -1,12 +1,9 @@
1
- import asyncio
2
- from functools import partial
3
-
4
1
  from fastmcp import FastMCP
5
2
  from fastmcp.tools import FunctionTool
6
3
 
7
- from flowllm.context.service_context import C
8
- from flowllm.flow.base_tool_flow import BaseToolFlow
9
- from flowllm.service.base_service import BaseService
4
+ from .base_service import BaseService
5
+ from ..context.service_context import C
6
+ from ..flow.base_tool_flow import BaseToolFlow
10
7
 
11
8
 
12
9
  @C.register_service("mcp")
@@ -16,32 +13,29 @@ class MCPService(BaseService):
16
13
  super().__init__(*args, **kwargs)
17
14
  self.mcp = FastMCP(name="FlowLLM")
18
15
 
19
- def integrate_tool_flow(self, tool_flow_name: str):
20
- tool_flow: BaseToolFlow = C.get_tool_flow(tool_flow_name)
21
- request_model = self._create_pydantic_model(tool_flow_name, tool_flow.tool_call.input_schema)
16
+ def integrate_flow(self, tool_flow: BaseToolFlow):
17
+ if "mcp" not in tool_flow.service_type:
18
+ return
19
+
20
+ request_model = self._create_pydantic_model(tool_flow.name, tool_flow.tool_call.input_schema)
22
21
 
23
- async def execute_flow_async(**kwargs) -> str:
24
- request: request_model = request_model(**kwargs)
25
- loop = asyncio.get_event_loop()
26
- response = await loop.run_in_executor(
27
- executor=C.thread_pool,
28
- func=partial(tool_flow.__call__, **request.model_dump())) # noqa
22
+ async def execute_tool(**kwargs) -> str:
23
+ response = await tool_flow(**request_model(**kwargs).model_dump())
29
24
  return response.answer
30
25
 
26
+ # tool_flow.tool_call.name
31
27
  tool = FunctionTool(name=tool_flow.name, # noqa
32
28
  description=tool_flow.tool_call.description, # noqa
33
- fn=execute_flow_async,
29
+ fn=execute_tool,
34
30
  parameters=tool_flow.tool_call.input_schema)
35
31
  self.mcp.add_tool(tool)
36
32
 
37
- def __call__(self):
38
- self.integrate_tool_flows()
39
-
33
+ def execute(self):
40
34
  if self.mcp_config.transport == "sse":
41
- self.mcp.run(transport="sse", host=self.mcp_config.host, port=self.mcp_config.port)
42
- if self.mcp_config.transport == "http":
43
- self.mcp.run(transport="http", host=self.mcp_config.host, port=self.mcp_config.port)
35
+ self.mcp.run(transport="sse", host=self.mcp_config.host, port=self.mcp_config.port, show_banner=False)
36
+ elif self.mcp_config.transport == "http":
37
+ self.mcp.run(transport="http", host=self.mcp_config.host, port=self.mcp_config.port, show_banner=False)
44
38
  elif self.mcp_config.transport == "stdio":
45
- self.mcp.run(transport="stdio")
39
+ self.mcp.run(transport="stdio", show_banner=False)
46
40
  else:
47
41
  raise ValueError(f"unsupported mcp transport: {self.mcp_config.transport}")
@@ -1,3 +1,4 @@
1
1
  from .chroma_vector_store import ChromaVectorStore
2
2
  from .es_vector_store import EsVectorStore
3
3
  from .local_vector_store import LocalVectorStore
4
+ from .memory_vector_store import MemoryVectorStore
@@ -1,9 +1,12 @@
1
- from abc import ABC
1
+ import asyncio
2
+ from abc import ABC, abstractmethod
3
+ from functools import partial
2
4
  from pathlib import Path
3
- from typing import List, Iterable
5
+ from typing import List, Iterable, Dict, Any, Optional
4
6
 
5
7
  from pydantic import BaseModel, Field
6
8
 
9
+ from flowllm.context.service_context import C
7
10
  from flowllm.embedding_model.base_embedding_model import BaseEmbeddingModel
8
11
  from flowllm.schema.vector_node import VectorNode
9
12
 
@@ -12,33 +15,117 @@ class BaseVectorStore(BaseModel, ABC):
12
15
  embedding_model: BaseEmbeddingModel | None = Field(default=None)
13
16
  batch_size: int = Field(default=1024)
14
17
 
18
+ @abstractmethod
15
19
  def exist_workspace(self, workspace_id: str, **kwargs) -> bool:
20
+ """Check if a workspace exists in the vector store."""
16
21
  raise NotImplementedError
17
22
 
18
- def delete_workspace(self, workspace_id: str, **kwargs):
23
+ @abstractmethod
24
+ def delete_workspace(self, workspace_id: str, **kwargs) -> None:
25
+ """Delete a workspace from the vector store."""
19
26
  raise NotImplementedError
20
27
 
21
- def create_workspace(self, workspace_id: str, **kwargs):
28
+ @abstractmethod
29
+ def create_workspace(self, workspace_id: str, **kwargs) -> None:
30
+ """Create a new workspace in the vector store."""
22
31
  raise NotImplementedError
23
32
 
24
- def _iter_workspace_nodes(self, workspace_id: str, **kwargs) -> Iterable[VectorNode]:
33
+ @abstractmethod
34
+ def iter_workspace_nodes(self, workspace_id: str, callback_fn=None, **kwargs) -> Iterable[VectorNode]:
35
+ """Iterate over all nodes in a workspace."""
25
36
  raise NotImplementedError
26
37
 
27
- def dump_workspace(self, workspace_id: str, path: str | Path = "", callback_fn=None, **kwargs):
38
+ @abstractmethod
39
+ def dump_workspace(self, workspace_id: str, path: str | Path = "", callback_fn=None, **kwargs) -> None:
40
+ """Dump workspace data to a file or path."""
28
41
  raise NotImplementedError
29
42
 
30
- def load_workspace(self, workspace_id: str, path: str | Path = "", nodes: List[VectorNode] = None, callback_fn=None,
31
- **kwargs):
43
+ @abstractmethod
44
+ def load_workspace(self, workspace_id: str, path: str | Path = "", nodes: Optional[List[VectorNode]] = None,
45
+ callback_fn=None, **kwargs) -> None:
46
+ """Load workspace data from a file or path, or from provided nodes."""
32
47
  raise NotImplementedError
33
48
 
34
- def copy_workspace(self, src_workspace_id: str, dest_workspace_id: str, **kwargs):
49
+ @abstractmethod
50
+ def copy_workspace(self, src_workspace_id: str, dest_workspace_id: str, **kwargs) -> None:
51
+ """Copy one workspace to another."""
35
52
  raise NotImplementedError
36
53
 
37
- def search(self, query: str, workspace_id: str, top_k: int = 1, **kwargs) -> List[VectorNode]:
54
+ @abstractmethod
55
+ def search(self, query: str, workspace_id: str, top_k: int = 1, filter_dict: Optional[Dict[str, Any]] = None,
56
+ **kwargs) -> List[VectorNode]:
57
+ """Search for similar vectors in the workspace."""
38
58
  raise NotImplementedError
39
59
 
40
- def insert(self, nodes: VectorNode | List[VectorNode], workspace_id: str, **kwargs):
60
+ @abstractmethod
61
+ def insert(self, nodes: VectorNode | List[VectorNode], workspace_id: str, **kwargs) -> None:
62
+ """Insert nodes into the workspace."""
41
63
  raise NotImplementedError
42
64
 
43
- def delete(self, node_ids: str | List[str], workspace_id: str, **kwargs):
65
+ @abstractmethod
66
+ def delete(self, node_ids: str | List[str], workspace_id: str, **kwargs) -> None:
67
+ """Delete nodes from the workspace by their IDs."""
44
68
  raise NotImplementedError
69
+
70
+ def close(self) -> None:
71
+ """Close the vector store and clean up resources. Default implementation does nothing."""
72
+ pass
73
+
74
+ """
75
+ Async versions of all methods
76
+ """
77
+
78
+ async def async_exist_workspace(self, workspace_id: str, **kwargs) -> bool:
79
+ """Async version of exist_workspace."""
80
+ loop = asyncio.get_event_loop()
81
+ return await loop.run_in_executor(C.thread_pool, partial(self.exist_workspace, workspace_id, **kwargs))
82
+
83
+ async def async_delete_workspace(self, workspace_id: str, **kwargs) -> None:
84
+ """Async version of delete_workspace."""
85
+ loop = asyncio.get_event_loop()
86
+ return await loop.run_in_executor(C.thread_pool, partial(self.delete_workspace, workspace_id, **kwargs))
87
+
88
+ async def async_create_workspace(self, workspace_id: str, **kwargs) -> None:
89
+ """Async version of create_workspace."""
90
+ loop = asyncio.get_event_loop()
91
+ return await loop.run_in_executor(C.thread_pool, partial(self.create_workspace, workspace_id, **kwargs))
92
+
93
+ async def async_iter_workspace_nodes(self, workspace_id: str, callback_fn=None, **kwargs) -> Iterable[VectorNode]:
94
+ """Async version of iter_workspace_nodes. Returns an iterable, not an async iterator."""
95
+ loop = asyncio.get_event_loop()
96
+ return await loop.run_in_executor(C.thread_pool, partial(self.iter_workspace_nodes, workspace_id,
97
+ callback_fn, **kwargs))
98
+
99
+ async def async_dump_workspace(self, workspace_id: str, path: str | Path = "", callback_fn=None, **kwargs):
100
+ loop = asyncio.get_event_loop()
101
+ return await loop.run_in_executor(C.thread_pool, partial(self.dump_workspace, workspace_id, path,
102
+ callback_fn, **kwargs))
103
+
104
+ async def async_load_workspace(self, workspace_id: str, path: str | Path = "", nodes: List[VectorNode] = None,
105
+ callback_fn=None, **kwargs):
106
+ loop = asyncio.get_event_loop()
107
+ return await loop.run_in_executor(C.thread_pool, partial(self.load_workspace, workspace_id, path, nodes,
108
+ callback_fn, **kwargs))
109
+
110
+ async def async_copy_workspace(self, src_workspace_id: str, dest_workspace_id: str, **kwargs):
111
+ loop = asyncio.get_event_loop()
112
+ return await loop.run_in_executor(C.thread_pool, partial(self.copy_workspace, src_workspace_id,
113
+ dest_workspace_id, **kwargs))
114
+
115
+ async def async_search(self, query: str, workspace_id: str, top_k: int = 1, filter_dict: dict = None,
116
+ **kwargs) -> List[VectorNode]:
117
+ loop = asyncio.get_event_loop()
118
+ return await loop.run_in_executor(C.thread_pool, partial(self.search, query, workspace_id, top_k,
119
+ filter_dict, **kwargs))
120
+
121
+ async def async_insert(self, nodes: VectorNode | List[VectorNode], workspace_id: str, **kwargs):
122
+ loop = asyncio.get_event_loop()
123
+ return await loop.run_in_executor(C.thread_pool, partial(self.insert, nodes, workspace_id, **kwargs))
124
+
125
+ async def async_delete(self, node_ids: str | List[str], workspace_id: str, **kwargs):
126
+ loop = asyncio.get_event_loop()
127
+ return await loop.run_in_executor(C.thread_pool, partial(self.delete, node_ids, workspace_id, **kwargs))
128
+
129
+ async def async_close(self):
130
+ loop = asyncio.get_event_loop()
131
+ return await loop.run_in_executor(C.thread_pool, self.close)