vectorvein 0.2.24__py3-none-any.whl → 0.2.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vectorvein/chat_clients/anthropic_client.py +62 -48
- vectorvein/workflow/graph/edge.py +6 -6
- vectorvein/workflow/graph/workflow.py +115 -2
- vectorvein/workflow/nodes/llms.py +97 -8
- {vectorvein-0.2.24.dist-info → vectorvein-0.2.26.dist-info}/METADATA +1 -1
- {vectorvein-0.2.24.dist-info → vectorvein-0.2.26.dist-info}/RECORD +8 -8
- {vectorvein-0.2.24.dist-info → vectorvein-0.2.26.dist-info}/WHEEL +0 -0
- {vectorvein-0.2.24.dist-info → vectorvein-0.2.26.dist-info}/entry_points.txt +0 -0
@@ -35,6 +35,7 @@ from anthropic import (
|
|
35
35
|
AsyncAnthropicBedrock,
|
36
36
|
)
|
37
37
|
from anthropic._types import NOT_GIVEN
|
38
|
+
from anthropic._exceptions import APIStatusError as AnthropicAPIStatusError
|
38
39
|
from anthropic.types import (
|
39
40
|
TextBlock,
|
40
41
|
ThinkingBlock,
|
@@ -52,6 +53,7 @@ from ..types import defaults as defs
|
|
52
53
|
from .utils import cutoff_messages, get_message_token_counts
|
53
54
|
from .base_client import BaseChatClient, BaseAsyncChatClient
|
54
55
|
from .openai_compatible_client import OpenAICompatibleChatClient, AsyncOpenAICompatibleChatClient
|
56
|
+
from ..types.exception import APIStatusError
|
55
57
|
from ..types.enums import ContextLengthControlType, BackendType
|
56
58
|
from ..types.llm_parameters import (
|
57
59
|
Usage,
|
@@ -601,18 +603,21 @@ class AnthropicChatClient(BaseChatClient):
|
|
601
603
|
self._acquire_rate_limit(self.endpoint, self.model, messages)
|
602
604
|
|
603
605
|
if self.stream:
|
604
|
-
|
605
|
-
|
606
|
-
|
607
|
-
|
608
|
-
|
609
|
-
|
610
|
-
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
606
|
+
try:
|
607
|
+
stream_response = raw_client.messages.create(
|
608
|
+
model=self.model_id,
|
609
|
+
messages=messages,
|
610
|
+
system=system_prompt,
|
611
|
+
stream=True,
|
612
|
+
temperature=self.temperature,
|
613
|
+
max_tokens=max_tokens,
|
614
|
+
tools=tools_params,
|
615
|
+
tool_choice=tool_choice_param,
|
616
|
+
top_p=top_p,
|
617
|
+
thinking=thinking,
|
618
|
+
)
|
619
|
+
except AnthropicAPIStatusError as e:
|
620
|
+
raise APIStatusError(message=e.message, response=e.response, body=e.body)
|
616
621
|
|
617
622
|
def generator():
|
618
623
|
result = {"content": "", "reasoning_content": "", "usage": {}, "tool_calls": []}
|
@@ -675,18 +680,21 @@ class AnthropicChatClient(BaseChatClient):
|
|
675
680
|
|
676
681
|
return generator()
|
677
682
|
else:
|
678
|
-
|
679
|
-
|
680
|
-
|
681
|
-
|
682
|
-
|
683
|
-
|
684
|
-
|
685
|
-
|
686
|
-
|
687
|
-
|
688
|
-
|
689
|
-
|
683
|
+
try:
|
684
|
+
response = raw_client.messages.create(
|
685
|
+
model=self.model_id,
|
686
|
+
messages=messages,
|
687
|
+
system=system_prompt,
|
688
|
+
stream=False,
|
689
|
+
temperature=self.temperature,
|
690
|
+
max_tokens=max_tokens,
|
691
|
+
tools=tools_params,
|
692
|
+
tool_choice=tool_choice_param,
|
693
|
+
top_p=top_p,
|
694
|
+
thinking=thinking,
|
695
|
+
)
|
696
|
+
except AnthropicAPIStatusError as e:
|
697
|
+
raise APIStatusError(message=e.message, response=e.response, body=e.body)
|
690
698
|
|
691
699
|
result = {
|
692
700
|
"content": "",
|
@@ -1140,18 +1148,21 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
|
|
1140
1148
|
await self._acquire_rate_limit(self.endpoint, self.model, messages)
|
1141
1149
|
|
1142
1150
|
if self.stream:
|
1143
|
-
|
1144
|
-
|
1145
|
-
|
1146
|
-
|
1147
|
-
|
1148
|
-
|
1149
|
-
|
1150
|
-
|
1151
|
-
|
1152
|
-
|
1153
|
-
|
1154
|
-
|
1151
|
+
try:
|
1152
|
+
stream_response = await raw_client.messages.create(
|
1153
|
+
model=self.model_id,
|
1154
|
+
messages=messages,
|
1155
|
+
system=system_prompt,
|
1156
|
+
stream=True,
|
1157
|
+
temperature=self.temperature,
|
1158
|
+
max_tokens=max_tokens,
|
1159
|
+
tools=tools_params,
|
1160
|
+
tool_choice=tool_choice_param,
|
1161
|
+
top_p=top_p,
|
1162
|
+
thinking=thinking,
|
1163
|
+
)
|
1164
|
+
except AnthropicAPIStatusError as e:
|
1165
|
+
raise APIStatusError(message=e.message, response=e.response, body=e.body)
|
1155
1166
|
|
1156
1167
|
async def generator():
|
1157
1168
|
result = {"content": "", "reasoning_content": "", "usage": {}, "tool_calls": []}
|
@@ -1214,18 +1225,21 @@ class AsyncAnthropicChatClient(BaseAsyncChatClient):
|
|
1214
1225
|
|
1215
1226
|
return generator()
|
1216
1227
|
else:
|
1217
|
-
|
1218
|
-
|
1219
|
-
|
1220
|
-
|
1221
|
-
|
1222
|
-
|
1223
|
-
|
1224
|
-
|
1225
|
-
|
1226
|
-
|
1227
|
-
|
1228
|
-
|
1228
|
+
try:
|
1229
|
+
response = await raw_client.messages.create(
|
1230
|
+
model=self.model_id,
|
1231
|
+
messages=messages,
|
1232
|
+
system=system_prompt,
|
1233
|
+
stream=False,
|
1234
|
+
temperature=self.temperature,
|
1235
|
+
max_tokens=max_tokens,
|
1236
|
+
tools=tools_params,
|
1237
|
+
tool_choice=tool_choice_param,
|
1238
|
+
top_p=top_p,
|
1239
|
+
thinking=thinking,
|
1240
|
+
)
|
1241
|
+
except AnthropicAPIStatusError as e:
|
1242
|
+
raise APIStatusError(message=e.message, response=e.response, body=e.body)
|
1229
1243
|
|
1230
1244
|
result = {
|
1231
1245
|
"content": "",
|
@@ -6,17 +6,17 @@ class Edge:
|
|
6
6
|
self,
|
7
7
|
id: str,
|
8
8
|
source: str,
|
9
|
-
|
9
|
+
source_handle: str,
|
10
10
|
target: str,
|
11
|
-
|
11
|
+
target_handle: str,
|
12
12
|
animated: bool = True,
|
13
13
|
type: str = "default",
|
14
14
|
) -> None:
|
15
15
|
self.id: str = id
|
16
16
|
self.source: str = source
|
17
|
-
self.
|
17
|
+
self.source_handle: str = source_handle
|
18
18
|
self.target: str = target
|
19
|
-
self.
|
19
|
+
self.target_handle: str = target_handle
|
20
20
|
self.animated: bool = animated
|
21
21
|
self.type: str = type
|
22
22
|
self.style: Dict[str, Union[str, int]] = {"stroke": "#28c5e5", "strokeWidth": 3}
|
@@ -25,9 +25,9 @@ class Edge:
|
|
25
25
|
return {
|
26
26
|
"id": self.id,
|
27
27
|
"source": self.source,
|
28
|
-
"sourceHandle": self.
|
28
|
+
"sourceHandle": self.source_handle,
|
29
29
|
"target": self.target,
|
30
|
-
"targetHandle": self.
|
30
|
+
"targetHandle": self.target_handle,
|
31
31
|
"animated": self.animated,
|
32
32
|
"type": self.type,
|
33
33
|
"style": self.style,
|
@@ -1,10 +1,17 @@
|
|
1
1
|
import json
|
2
|
-
from typing import List, Union
|
2
|
+
from typing import List, Union, TypedDict
|
3
3
|
|
4
4
|
from .node import Node
|
5
5
|
from .edge import Edge
|
6
6
|
|
7
7
|
|
8
|
+
class WorkflowCheckResult(TypedDict):
|
9
|
+
"""工作流检查结果类型。"""
|
10
|
+
|
11
|
+
no_cycle: bool # 工作流是否不包含环
|
12
|
+
no_isolated_nodes: bool # 工作流是否不包含孤立节点
|
13
|
+
|
14
|
+
|
8
15
|
class Workflow:
|
9
16
|
def __init__(self) -> None:
|
10
17
|
self.nodes: List[Node] = []
|
@@ -58,6 +65,13 @@ class Workflow:
|
|
58
65
|
if not target_node_obj.has_input_port(target_port):
|
59
66
|
raise ValueError(f"目标节点 {target_node_id} 不存在输入端口: {target_port}")
|
60
67
|
|
68
|
+
# 检查目标端口是否已有被连接的线
|
69
|
+
for edge in self.edges:
|
70
|
+
if edge.target == target_node_id and edge.target_handle == target_port:
|
71
|
+
raise ValueError(
|
72
|
+
f"目标节点 {target_node_id} 的输入端口 {target_port} 已经被连接: {edge.source}({edge.source_handle}) → {edge.target}({edge.target_handle})"
|
73
|
+
)
|
74
|
+
|
61
75
|
# 创建并添加边
|
62
76
|
edge_id = f"vueflow__edge-{source_node_id}{source_port}-{target_node_id}{target_port}"
|
63
77
|
edge = Edge(edge_id, source_node_id, source_port, target_node_id, target_port)
|
@@ -105,7 +119,106 @@ class Workflow:
|
|
105
119
|
for edge in self.edges:
|
106
120
|
source_label = node_id_to_label[edge.source]
|
107
121
|
target_label = node_id_to_label[edge.target]
|
108
|
-
label = f"{edge.
|
122
|
+
label = f"{edge.source_handle} → {edge.target_handle}"
|
109
123
|
lines.append(f" {source_label} -->|{label}| {target_label}")
|
110
124
|
|
111
125
|
return "\n".join(lines)
|
126
|
+
|
127
|
+
def _check_dag(self) -> WorkflowCheckResult:
|
128
|
+
"""检查流程图是否为有向无环图,并检测是否存在孤立节点。
|
129
|
+
|
130
|
+
Returns:
|
131
|
+
WorkflowCheckResult: 包含检查结果的字典
|
132
|
+
- no_cycle (bool): 如果流程图是有向无环图返回 True,否则返回 False
|
133
|
+
- no_isolated_nodes (bool): 如果不存在孤立节点返回 True,否则返回 False
|
134
|
+
"""
|
135
|
+
result: WorkflowCheckResult = {"no_cycle": True, "no_isolated_nodes": True}
|
136
|
+
|
137
|
+
# 过滤掉触发器节点和辅助节点
|
138
|
+
trigger_nodes = [
|
139
|
+
node.id
|
140
|
+
for node in self.nodes
|
141
|
+
if hasattr(node, "category") and (node.category == "triggers" or node.category == "assistedNodes")
|
142
|
+
]
|
143
|
+
|
144
|
+
# 获取需要检查的节点和边
|
145
|
+
regular_nodes = [node.id for node in self.nodes if node.id not in trigger_nodes]
|
146
|
+
regular_edges = [
|
147
|
+
edge for edge in self.edges if edge.source not in trigger_nodes and edge.target not in trigger_nodes
|
148
|
+
]
|
149
|
+
|
150
|
+
# ---------- 检查有向图是否有环 ----------
|
151
|
+
# 构建邻接表
|
152
|
+
adjacency = {node_id: [] for node_id in regular_nodes}
|
153
|
+
for edge in regular_edges:
|
154
|
+
if edge.source in adjacency: # 确保节点在字典中
|
155
|
+
adjacency[edge.source].append(edge.target)
|
156
|
+
|
157
|
+
# 三种状态: 0 = 未访问, 1 = 正在访问, 2 = 已访问完成
|
158
|
+
visited = {node_id: 0 for node_id in regular_nodes}
|
159
|
+
|
160
|
+
def dfs_cycle_detection(node_id):
|
161
|
+
# 如果节点正在被访问,说明找到了环
|
162
|
+
if visited[node_id] == 1:
|
163
|
+
return False
|
164
|
+
|
165
|
+
# 如果节点已经访问完成,无需再次访问
|
166
|
+
if visited[node_id] == 2:
|
167
|
+
return True
|
168
|
+
|
169
|
+
# 标记为正在访问
|
170
|
+
visited[node_id] = 1
|
171
|
+
|
172
|
+
# 访问所有邻居
|
173
|
+
for neighbor in adjacency[node_id]:
|
174
|
+
if neighbor in visited and not dfs_cycle_detection(neighbor):
|
175
|
+
return False
|
176
|
+
|
177
|
+
# 标记为已访问完成
|
178
|
+
visited[node_id] = 2
|
179
|
+
return True
|
180
|
+
|
181
|
+
# 对每个未访问的节点进行 DFS 检测环
|
182
|
+
for node_id in regular_nodes:
|
183
|
+
if visited[node_id] == 0:
|
184
|
+
if not dfs_cycle_detection(node_id):
|
185
|
+
result["no_cycle"] = False
|
186
|
+
break
|
187
|
+
|
188
|
+
# ---------- 检查是否存在孤立节点 ----------
|
189
|
+
# 构建无向图邻接表
|
190
|
+
undirected_adjacency = {node_id: [] for node_id in regular_nodes}
|
191
|
+
for edge in regular_edges:
|
192
|
+
if edge.source in undirected_adjacency and edge.target in undirected_adjacency:
|
193
|
+
undirected_adjacency[edge.source].append(edge.target)
|
194
|
+
undirected_adjacency[edge.target].append(edge.source)
|
195
|
+
|
196
|
+
# 深度优先搜索来检测连通分量
|
197
|
+
undirected_visited = set()
|
198
|
+
|
199
|
+
def dfs_connected_components(node_id):
|
200
|
+
undirected_visited.add(node_id)
|
201
|
+
for neighbor in undirected_adjacency[node_id]:
|
202
|
+
if neighbor not in undirected_visited:
|
203
|
+
dfs_connected_components(neighbor)
|
204
|
+
|
205
|
+
# 计算连通分量数量
|
206
|
+
connected_components_count = 0
|
207
|
+
for node_id in regular_nodes:
|
208
|
+
if node_id not in undirected_visited:
|
209
|
+
connected_components_count += 1
|
210
|
+
dfs_connected_components(node_id)
|
211
|
+
|
212
|
+
# 如果连通分量数量大于1,说明存在孤立节点
|
213
|
+
if connected_components_count > 1 and len(regular_nodes) > 0:
|
214
|
+
result["no_isolated_nodes"] = False
|
215
|
+
|
216
|
+
return result
|
217
|
+
|
218
|
+
def check(self) -> WorkflowCheckResult:
|
219
|
+
"""检查流程图的有效性。
|
220
|
+
|
221
|
+
Returns:
|
222
|
+
WorkflowCheckResult: 包含各种检查结果的字典
|
223
|
+
"""
|
224
|
+
return self._check_dag()
|
@@ -292,6 +292,8 @@ class Claude(Node):
|
|
292
292
|
port_type=PortType.SELECT,
|
293
293
|
value="claude-3-5-haiku",
|
294
294
|
options=[
|
295
|
+
{"value": "claude-3-7-sonnet-thinking", "label": "claude-3-7-sonnet-thinking"},
|
296
|
+
{"value": "claude-3-7-sonnet", "label": "claude-3-7-sonnet"},
|
295
297
|
{"value": "claude-3-5-sonnet", "label": "claude-3-5-sonnet"},
|
296
298
|
{"value": "claude-3-5-haiku", "label": "claude-3-5-haiku"},
|
297
299
|
{"value": "claude-3-opus", "label": "claude-3-opus"},
|
@@ -338,8 +340,8 @@ class Deepseek(Node):
|
|
338
340
|
value="deepseek-chat",
|
339
341
|
options=[
|
340
342
|
{"value": "deepseek-chat", "label": "deepseek-chat"},
|
341
|
-
{"value": "deepseek-reasoner", "label": "deepseek-
|
342
|
-
{"value": "deepseek-
|
343
|
+
{"value": "deepseek-reasoner", "label": "deepseek-r1"},
|
344
|
+
{"value": "deepseek-r1-distill-qwen-32b", "label": "deepseek-r1-distill-qwen-32b"},
|
343
345
|
],
|
344
346
|
),
|
345
347
|
"temperature": InputPort(
|
@@ -523,12 +525,10 @@ class LingYiWanWu(Node):
|
|
523
525
|
port_type=PortType.SELECT,
|
524
526
|
value="yi-lightning",
|
525
527
|
options=[
|
526
|
-
{
|
527
|
-
|
528
|
-
|
529
|
-
|
530
|
-
{"value": "yi-medium-200k", "label": "yi-medium-200k"},
|
531
|
-
{"value": "yi-spark", "label": "yi-spark"},
|
528
|
+
{
|
529
|
+
"value": "yi-lightning",
|
530
|
+
"label": "yi-lightning",
|
531
|
+
},
|
532
532
|
],
|
533
533
|
),
|
534
534
|
"temperature": InputPort(
|
@@ -746,6 +746,7 @@ class OpenAI(Node):
|
|
746
746
|
{"value": "gpt-4o-mini", "label": "gpt-4o-mini"},
|
747
747
|
{"value": "o1-mini", "label": "o1-mini"},
|
748
748
|
{"value": "o1-preview", "label": "o1-preview"},
|
749
|
+
{"value": "o3-mini", "label": "o3-mini"},
|
749
750
|
],
|
750
751
|
),
|
751
752
|
"temperature": InputPort(
|
@@ -893,3 +894,91 @@ class XAi(Node):
|
|
893
894
|
),
|
894
895
|
},
|
895
896
|
)
|
897
|
+
|
898
|
+
|
899
|
+
class CustomModel(Node):
|
900
|
+
def __init__(self, id: Optional[str] = None):
|
901
|
+
super().__init__(
|
902
|
+
node_type="CustomModel",
|
903
|
+
category="llms",
|
904
|
+
task_name="llms.custom_model",
|
905
|
+
node_id=id,
|
906
|
+
ports={
|
907
|
+
"prompt": InputPort(
|
908
|
+
name="prompt",
|
909
|
+
port_type=PortType.TEXTAREA,
|
910
|
+
value="",
|
911
|
+
),
|
912
|
+
"model_family": InputPort(
|
913
|
+
name="model_family",
|
914
|
+
port_type=PortType.SELECT,
|
915
|
+
value="",
|
916
|
+
options=[],
|
917
|
+
),
|
918
|
+
"llm_model": InputPort(
|
919
|
+
name="llm_model",
|
920
|
+
port_type=PortType.SELECT,
|
921
|
+
value="",
|
922
|
+
options=[],
|
923
|
+
),
|
924
|
+
"temperature": InputPort(
|
925
|
+
name="temperature",
|
926
|
+
port_type=PortType.TEMPERATURE,
|
927
|
+
value=0.7,
|
928
|
+
),
|
929
|
+
"top_p": InputPort(
|
930
|
+
name="top_p",
|
931
|
+
port_type=PortType.NUMBER,
|
932
|
+
value=0.95,
|
933
|
+
),
|
934
|
+
"stream": InputPort(
|
935
|
+
name="stream",
|
936
|
+
port_type=PortType.CHECKBOX,
|
937
|
+
value=False,
|
938
|
+
),
|
939
|
+
"system_prompt": InputPort(
|
940
|
+
name="system_prompt",
|
941
|
+
port_type=PortType.TEXTAREA,
|
942
|
+
value="",
|
943
|
+
),
|
944
|
+
"response_format": InputPort(
|
945
|
+
name="response_format",
|
946
|
+
port_type=PortType.SELECT,
|
947
|
+
value="text",
|
948
|
+
options=[
|
949
|
+
{"value": "text", "label": "Text"},
|
950
|
+
{"value": "json_object", "label": "JSON"},
|
951
|
+
],
|
952
|
+
),
|
953
|
+
"use_function_call": InputPort(
|
954
|
+
name="use_function_call",
|
955
|
+
port_type=PortType.CHECKBOX,
|
956
|
+
value=False,
|
957
|
+
),
|
958
|
+
"functions": InputPort(
|
959
|
+
name="functions",
|
960
|
+
port_type=PortType.SELECT,
|
961
|
+
value=[],
|
962
|
+
),
|
963
|
+
"function_call_mode": InputPort(
|
964
|
+
name="function_call_mode",
|
965
|
+
port_type=PortType.SELECT,
|
966
|
+
value="auto",
|
967
|
+
options=[
|
968
|
+
{"value": "auto", "label": "auto"},
|
969
|
+
{"value": "none", "label": "none"},
|
970
|
+
],
|
971
|
+
),
|
972
|
+
"output": OutputPort(
|
973
|
+
name="output",
|
974
|
+
),
|
975
|
+
"function_call_output": OutputPort(
|
976
|
+
name="function_call_output",
|
977
|
+
condition="return fieldsData.use_function_call.value",
|
978
|
+
),
|
979
|
+
"function_call_arguments": OutputPort(
|
980
|
+
name="function_call_arguments",
|
981
|
+
condition="return fieldsData.use_function_call.value",
|
982
|
+
),
|
983
|
+
},
|
984
|
+
)
|
@@ -1,13 +1,13 @@
|
|
1
|
-
vectorvein-0.2.
|
2
|
-
vectorvein-0.2.
|
3
|
-
vectorvein-0.2.
|
1
|
+
vectorvein-0.2.26.dist-info/METADATA,sha256=FuIheW9ox95gDN74fCkhXrE9d0VUYdp5VuoJ4xvPUgY,4570
|
2
|
+
vectorvein-0.2.26.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
3
|
+
vectorvein-0.2.26.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
4
4
|
vectorvein/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
vectorvein/api/__init__.py,sha256=lfY-XA46fgD2iIZTU0VYP8i07AwA03Egj4Qua0vUKrQ,738
|
6
6
|
vectorvein/api/client.py,sha256=xF-leKDQzVyyy9FnIRaz0k4eElYW1XbbzeRLcpnyk90,33047
|
7
7
|
vectorvein/api/exceptions.py,sha256=uS_PAdx0ksC0r3dgfSGWdbLMZm4qdLeWSSqCv1g3_Gc,772
|
8
8
|
vectorvein/api/models.py,sha256=xtPWMsB0yIJI7i-gY4B6MtvXv0ZIXnoeKspmeInH6fU,1449
|
9
9
|
vectorvein/chat_clients/__init__.py,sha256=UIytpIgwo8qkZpIyrHVxLYTyliUOTp4J7C4iHRjbtWE,23850
|
10
|
-
vectorvein/chat_clients/anthropic_client.py,sha256=
|
10
|
+
vectorvein/chat_clients/anthropic_client.py,sha256=PVUP9UddiyFG7MXFXyon_PkQZxOgUc4GrsAmNkSu2co,59915
|
11
11
|
vectorvein/chat_clients/baichuan_client.py,sha256=CVMvpgjdrZGv0BWnTOBD-f2ufZ3wq3496wqukumsAr4,526
|
12
12
|
vectorvein/chat_clients/base_client.py,sha256=p7s-G4Wh9MSpDKEfG8wuFAeWy5DGvj5Go31hqrpQPhM,38817
|
13
13
|
vectorvein/chat_clients/deepseek_client.py,sha256=3qWu01NlJAP2N-Ff62d5-CZXZitlizE1fzb20LNetig,526
|
@@ -41,16 +41,16 @@ vectorvein/types/settings.py,sha256=82RsherFSCc8s9-v0E7F_FK6avVh_XvC1b7EkNlFHbc,
|
|
41
41
|
vectorvein/utilities/media_processing.py,sha256=7KtbLFzOYIn1e9QTN9G6C76NH8CBlV9kfAgiRKEIeXY,6263
|
42
42
|
vectorvein/utilities/rate_limiter.py,sha256=dwolIUVw2wP83Odqpx0AAaE77de1GzxkYDGH4tM_u_4,10300
|
43
43
|
vectorvein/utilities/retry.py,sha256=6KFS9R2HdhqM3_9jkjD4F36ZSpEx2YNFGOVlpOsUetM,2208
|
44
|
-
vectorvein/workflow/graph/edge.py,sha256=
|
44
|
+
vectorvein/workflow/graph/edge.py,sha256=1ckyyjCue_PLm7P1ItUfKOy6AKkemOpZ9m1WJ8UXIHQ,1072
|
45
45
|
vectorvein/workflow/graph/node.py,sha256=A3M_GghrSju1D3xc_HtPdGyr-7XSkplGPKJveOUiIF4,3256
|
46
46
|
vectorvein/workflow/graph/port.py,sha256=Q6HmI2cUi6viJ98ec6-MmMPMRtKS1-OgaudP3LMwVLA,6054
|
47
|
-
vectorvein/workflow/graph/workflow.py,sha256=
|
47
|
+
vectorvein/workflow/graph/workflow.py,sha256=l0rSDZeSd6OtHMwxzhIENfOKS0fJKKB4JUow-dV-LUI,8610
|
48
48
|
vectorvein/workflow/nodes/__init__.py,sha256=jd4O27kIJdOtkij1FYZ6aJnJy2OQa7xtL1r-Yv8ylO0,3103
|
49
49
|
vectorvein/workflow/nodes/audio_generation.py,sha256=ht2S0vnd0mIAt6FBaSWlADGbb7f_1DAySYrgYnvZT1Q,5726
|
50
50
|
vectorvein/workflow/nodes/control_flows.py,sha256=Zc_uWuroYznLrU-BZCncyzvejC-zFl6EuN_VP8oq5mY,6573
|
51
51
|
vectorvein/workflow/nodes/file_processing.py,sha256=Rsjc8al0z-2KuweO0nIybWvceqxbqOPQyTs0-pgy5m4,3980
|
52
52
|
vectorvein/workflow/nodes/image_generation.py,sha256=fXOhLGodJ3OdKBPXO5a3rq4wN2GMJ0jwqKO_gJFdocU,32852
|
53
|
-
vectorvein/workflow/nodes/llms.py,sha256=
|
53
|
+
vectorvein/workflow/nodes/llms.py,sha256=rXN5Vgn6EvoglNb_BEzVUIrc4dCDxlinEYqznSE-Bek,38121
|
54
54
|
vectorvein/workflow/nodes/media_editing.py,sha256=Od0X0SdcyRhcJckWpDM4WvgWEKxaIsgMXpMifN8Sc5M,29405
|
55
55
|
vectorvein/workflow/nodes/media_processing.py,sha256=t-azYDphXmLRdOyHDfXFTS1tsEOyKqKskDyD0y232j8,19043
|
56
56
|
vectorvein/workflow/nodes/output.py,sha256=_UQxiddHtGv2rkjhUFE-KDgrjnh0AGJQJyq9-4Aji5A,12567
|
@@ -62,4 +62,4 @@ vectorvein/workflow/nodes/vector_db.py,sha256=t6I17q6iR3yQreiDHpRrksMdWDPIvgqJs0
|
|
62
62
|
vectorvein/workflow/nodes/video_generation.py,sha256=qmdg-t_idpxq1veukd-jv_ChICMOoInKxprV9Z4Vi2w,4118
|
63
63
|
vectorvein/workflow/nodes/web_crawlers.py,sha256=LsqomfXfqrXfHJDO1cl0Ox48f4St7X_SL12DSbAMSOw,5415
|
64
64
|
vectorvein/workflow/utils/json_to_code.py,sha256=F7dhDy8kGc8ndOeihGLRLGFGlquoxVlb02ENtxnQ0C8,5914
|
65
|
-
vectorvein-0.2.
|
65
|
+
vectorvein-0.2.26.dist-info/RECORD,,
|
File without changes
|
File without changes
|