vectorvein 0.2.97__py3-none-any.whl → 0.2.99__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vectorvein/types/defaults.py +24 -0
- vectorvein/workflow/nodes/image_generation.py +122 -14
- vectorvein/workflow/nodes/llms.py +43 -18
- vectorvein/workflow/nodes/media_processing.py +16 -7
- vectorvein/workflow/nodes/output.py +2 -4
- vectorvein/workflow/nodes/tools.py +31 -0
- {vectorvein-0.2.97.dist-info → vectorvein-0.2.99.dist-info}/METADATA +1 -1
- {vectorvein-0.2.97.dist-info → vectorvein-0.2.99.dist-info}/RECORD +10 -10
- {vectorvein-0.2.97.dist-info → vectorvein-0.2.99.dist-info}/WHEEL +0 -0
- {vectorvein-0.2.97.dist-info → vectorvein-0.2.99.dist-info}/entry_points.txt +0 -0
vectorvein/types/defaults.py
CHANGED
@@ -996,6 +996,30 @@ GEMINI_MODELS: Final[Dict[str, ModelSettingDict]] = {
|
|
996
996
|
"response_format_available": True,
|
997
997
|
"native_multimodal": True,
|
998
998
|
},
|
999
|
+
"gemini-2.5-pro-preview-06-05": {
|
1000
|
+
"id": "gemini-2.5-pro-preview-06-05",
|
1001
|
+
"context_length": 1048576,
|
1002
|
+
"max_output_tokens": 65536,
|
1003
|
+
"function_call_available": True,
|
1004
|
+
"response_format_available": True,
|
1005
|
+
"native_multimodal": True,
|
1006
|
+
},
|
1007
|
+
"gemini-2.5-pro": {
|
1008
|
+
"id": "gemini-2.5-pro",
|
1009
|
+
"context_length": 1048576,
|
1010
|
+
"max_output_tokens": 65536,
|
1011
|
+
"function_call_available": True,
|
1012
|
+
"response_format_available": True,
|
1013
|
+
"native_multimodal": True,
|
1014
|
+
},
|
1015
|
+
"gemini-2.5-flash": {
|
1016
|
+
"id": "gemini-2.5-flash",
|
1017
|
+
"context_length": 1048576,
|
1018
|
+
"max_output_tokens": 65536,
|
1019
|
+
"function_call_available": True,
|
1020
|
+
"response_format_available": True,
|
1021
|
+
"native_multimodal": True,
|
1022
|
+
},
|
999
1023
|
}
|
1000
1024
|
|
1001
1025
|
# 百度文心一言 ERNIE 模型
|
@@ -182,6 +182,16 @@ class Flux1(Node):
|
|
182
182
|
value="",
|
183
183
|
multiple=True,
|
184
184
|
),
|
185
|
+
"input_image": InputPort(
|
186
|
+
name="input_image",
|
187
|
+
port_type=PortType.FILE,
|
188
|
+
value=list(),
|
189
|
+
support_file_types=[".jpg", ".jpeg", ".png", ".webp"],
|
190
|
+
multiple=True,
|
191
|
+
show=True,
|
192
|
+
condition="return fieldsData.model.value.startsWith('FLUX.1 Kontext')",
|
193
|
+
condition_python=lambda ports: ports["model"].value.startswith("FLUX.1 Kontext"),
|
194
|
+
),
|
185
195
|
"model": InputPort(
|
186
196
|
name="model",
|
187
197
|
port_type=PortType.SELECT,
|
@@ -191,6 +201,9 @@ class Flux1(Node):
|
|
191
201
|
{"value": "FLUX.1 [dev]", "label": "FLUX.1 [dev]"},
|
192
202
|
{"value": "FLUX.1 [pro]", "label": "FLUX.1 [pro]"},
|
193
203
|
{"value": "FLUX.1 [pro] ultra", "label": "FLUX.1 [pro] ultra"},
|
204
|
+
{"value": "FLUX.1 Kontext [pro]", "label": "FLUX.1 Kontext [pro]"},
|
205
|
+
{"value": "FLUX.1 Kontext [max]", "label": "FLUX.1 Kontext [max]"},
|
206
|
+
{"value": "FLUX.1 Kontext [max] Multi", "label": "FLUX.1 Kontext [max] Multi"},
|
194
207
|
],
|
195
208
|
multiple=True,
|
196
209
|
),
|
@@ -199,16 +212,16 @@ class Flux1(Node):
|
|
199
212
|
port_type=PortType.NUMBER,
|
200
213
|
value=1024,
|
201
214
|
max=1536,
|
202
|
-
condition="return fieldsData.model.value !== 'FLUX.1 [pro] ultra'",
|
203
|
-
condition_python=lambda ports: ports["model"].value != "FLUX.1 [pro] ultra",
|
215
|
+
condition="return fieldsData.model.value !== 'FLUX.1 [pro] ultra' && !fieldsData.model.value.startsWith('FLUX.1 Kontext')",
|
216
|
+
condition_python=lambda ports: ports["model"].value != "FLUX.1 [pro] ultra" and not ports["model"].value.startswith("FLUX.1 Kontext"),
|
204
217
|
),
|
205
218
|
"height": InputPort(
|
206
219
|
name="height",
|
207
220
|
port_type=PortType.NUMBER,
|
208
221
|
value=1024,
|
209
222
|
max=1536,
|
210
|
-
condition="return fieldsData.model.value !== 'FLUX.1 [pro] ultra'",
|
211
|
-
condition_python=lambda ports: ports["model"].value != "FLUX.1 [pro] ultra",
|
223
|
+
condition="return fieldsData.model.value !== 'FLUX.1 [pro] ultra' && !fieldsData.model.value.startsWith('FLUX.1 Kontext')",
|
224
|
+
condition_python=lambda ports: ports["model"].value != "FLUX.1 [pro] ultra" and not ports["model"].value.startswith("FLUX.1 Kontext"),
|
212
225
|
),
|
213
226
|
"aspect_ratio": InputPort(
|
214
227
|
name="aspect_ratio",
|
@@ -374,10 +387,7 @@ class Kolors(Node):
|
|
374
387
|
{"value": "EulerDiscreteScheduler", "label": "EulerDiscreteScheduler"},
|
375
388
|
{"value": "EulerAncestralDiscreteScheduler", "label": "EulerAncestralDiscreteScheduler"},
|
376
389
|
{"value": "DPMSolverMultistepScheduler", "label": "DPMSolverMultistepScheduler"},
|
377
|
-
{
|
378
|
-
"value": "DPMSolverMultistepScheduler_SDE_karras",
|
379
|
-
"label": "DPMSolverMultistepScheduler_SDE_karras",
|
380
|
-
},
|
390
|
+
{"value": "DPMSolverMultistepScheduler_SDE_karras", "label": "DPMSolverMultistepScheduler_SDE_karras"},
|
381
391
|
{"value": "UniPCMultistepScheduler", "label": "UniPCMultistepScheduler"},
|
382
392
|
{"value": "DEISMultistepScheduler", "label": "DEISMultistepScheduler"},
|
383
393
|
],
|
@@ -566,8 +576,7 @@ class Recraft(Node):
|
|
566
576
|
{"value": "motion_blur", "label": "motion_blur"},
|
567
577
|
],
|
568
578
|
condition="return fieldsData.generation_type.value === 'text_to_image' && fieldsData.base_style.value === 'realistic_image'",
|
569
|
-
condition_python=lambda ports: ports["generation_type"].value == "text_to_image"
|
570
|
-
and ports["base_style"].value == "realistic_image",
|
579
|
+
condition_python=lambda ports: ports["generation_type"].value == "text_to_image" and ports["base_style"].value == "realistic_image",
|
571
580
|
multiple=True,
|
572
581
|
),
|
573
582
|
"substyle_digital_illustration": InputPort(
|
@@ -587,8 +596,7 @@ class Recraft(Node):
|
|
587
596
|
{"value": "2d_art_poster_2", "label": "2d_art_poster_2"},
|
588
597
|
],
|
589
598
|
condition="return fieldsData.generation_type.value === 'text_to_image' && fieldsData.base_style.value === 'digital_illustration'",
|
590
|
-
condition_python=lambda ports: ports["generation_type"].value == "text_to_image"
|
591
|
-
and ports["base_style"].value == "digital_illustration",
|
599
|
+
condition_python=lambda ports: ports["generation_type"].value == "text_to_image" and ports["base_style"].value == "digital_illustration",
|
592
600
|
multiple=True,
|
593
601
|
),
|
594
602
|
"substyle_vector_illustration": InputPort(
|
@@ -603,8 +611,7 @@ class Recraft(Node):
|
|
603
611
|
{"value": "linocut", "label": "linocut"},
|
604
612
|
],
|
605
613
|
condition="return fieldsData.generation_type.value === 'text_to_image' && fieldsData.base_style.value === 'vector_illustration'",
|
606
|
-
condition_python=lambda ports: ports["generation_type"].value == "text_to_image"
|
607
|
-
and ports["base_style"].value == "vector_illustration",
|
614
|
+
condition_python=lambda ports: ports["generation_type"].value == "text_to_image" and ports["base_style"].value == "vector_illustration",
|
608
615
|
multiple=True,
|
609
616
|
),
|
610
617
|
"size": InputPort(
|
@@ -663,6 +670,107 @@ class Recraft(Node):
|
|
663
670
|
)
|
664
671
|
|
665
672
|
|
673
|
+
class GptImage(Node):
|
674
|
+
def __init__(self, id: Optional[str] = None):
|
675
|
+
super().__init__(
|
676
|
+
node_type="GptImage",
|
677
|
+
category="image_generation",
|
678
|
+
task_name="image_generation.gpt_image",
|
679
|
+
node_id=id,
|
680
|
+
ports={
|
681
|
+
"action": InputPort(
|
682
|
+
name="action",
|
683
|
+
port_type=PortType.SELECT,
|
684
|
+
value="generation",
|
685
|
+
options=[
|
686
|
+
{"value": "generation", "label": "generation"},
|
687
|
+
{"value": "edit", "label": "edit"},
|
688
|
+
],
|
689
|
+
),
|
690
|
+
"prompt": InputPort(
|
691
|
+
name="prompt",
|
692
|
+
port_type=PortType.TEXTAREA,
|
693
|
+
value="",
|
694
|
+
multiple=True,
|
695
|
+
show=True,
|
696
|
+
),
|
697
|
+
"individual_images": InputPort(
|
698
|
+
name="individual_images",
|
699
|
+
port_type=PortType.CHECKBOX,
|
700
|
+
value=False,
|
701
|
+
condition="return fieldsData.action.value === 'edit'",
|
702
|
+
condition_python=lambda ports: ports["action"].value == "edit",
|
703
|
+
),
|
704
|
+
"image": InputPort(
|
705
|
+
name="image",
|
706
|
+
port_type=PortType.FILE,
|
707
|
+
value=list(),
|
708
|
+
support_file_types=[".jpg", ".jpeg", ".png", ".webp"],
|
709
|
+
multiple=False,
|
710
|
+
condition="return fieldsData.action.value === 'edit'",
|
711
|
+
condition_python=lambda ports: ports["action"].value == "edit",
|
712
|
+
),
|
713
|
+
"mask": InputPort(
|
714
|
+
name="mask",
|
715
|
+
port_type=PortType.FILE,
|
716
|
+
value=list(),
|
717
|
+
support_file_types=[".png"],
|
718
|
+
condition="return fieldsData.action.value === 'edit'",
|
719
|
+
condition_python=lambda ports: ports["action"].value == "edit",
|
720
|
+
),
|
721
|
+
"model": InputPort(
|
722
|
+
name="model",
|
723
|
+
port_type=PortType.SELECT,
|
724
|
+
value="gpt-image-1",
|
725
|
+
options=[
|
726
|
+
{"value": "gpt-image-1", "label": "gpt-image-1"},
|
727
|
+
],
|
728
|
+
multiple=True,
|
729
|
+
),
|
730
|
+
"size": InputPort(
|
731
|
+
name="size",
|
732
|
+
port_type=PortType.SELECT,
|
733
|
+
value="1024x1024",
|
734
|
+
options=[
|
735
|
+
{"value": "1024x1024", "label": "1024x1024"},
|
736
|
+
{"value": "1024x1536", "label": "1024x1536"},
|
737
|
+
{"value": "1536x1024", "label": "1536x1024"},
|
738
|
+
],
|
739
|
+
multiple=True,
|
740
|
+
),
|
741
|
+
"n": InputPort(
|
742
|
+
name="n",
|
743
|
+
port_type=PortType.NUMBER,
|
744
|
+
value=1,
|
745
|
+
min=1,
|
746
|
+
max=10,
|
747
|
+
),
|
748
|
+
"quality": InputPort(
|
749
|
+
name="quality",
|
750
|
+
port_type=PortType.SELECT,
|
751
|
+
value="high",
|
752
|
+
options=[
|
753
|
+
{"value": "low", "label": "low"},
|
754
|
+
{"value": "medium", "label": "medium"},
|
755
|
+
{"value": "high", "label": "high"},
|
756
|
+
],
|
757
|
+
multiple=True,
|
758
|
+
),
|
759
|
+
"output_type": InputPort(
|
760
|
+
name="output_type",
|
761
|
+
port_type=PortType.SELECT,
|
762
|
+
value="markdown",
|
763
|
+
options=[
|
764
|
+
{"value": "only_link", "label": "only_link"},
|
765
|
+
{"value": "markdown", "label": "markdown"},
|
766
|
+
{"value": "html", "label": "html"},
|
767
|
+
],
|
768
|
+
),
|
769
|
+
"output": OutputPort(),
|
770
|
+
},
|
771
|
+
)
|
772
|
+
|
773
|
+
|
666
774
|
class StableDiffusion(Node):
|
667
775
|
def __init__(self, id: Optional[str] = None):
|
668
776
|
special_width_height_models = [
|
@@ -21,15 +21,31 @@ class AliyunQwen(Node):
|
|
21
21
|
"llm_model": InputPort(
|
22
22
|
name="llm_model",
|
23
23
|
port_type=PortType.SELECT,
|
24
|
-
value="
|
24
|
+
value="qwen3-32b",
|
25
25
|
options=[
|
26
26
|
{"value": "qwen2.5-72b-instruct", "label": "qwen2.5-72b-instruct"},
|
27
27
|
{"value": "qwen2.5-32b-instruct", "label": "qwen2.5-32b-instruct"},
|
28
28
|
{"value": "qwen2.5-coder-32b-instruct", "label": "qwen2.5-coder-32b-instruct"},
|
29
|
-
{"value": "qwq-32b
|
29
|
+
{"value": "qwq-32b", "label": "qwq-32b"},
|
30
30
|
{"value": "qwen2.5-14b-instruct", "label": "qwen2.5-14b-instruct"},
|
31
31
|
{"value": "qwen2.5-7b-instruct", "label": "qwen2.5-7b-instruct"},
|
32
32
|
{"value": "qwen2.5-coder-7b-instruct", "label": "qwen2.5-coder-7b-instruct"},
|
33
|
+
{"value": "qwen3-235b-a22b", "label": "qwen3-235b-a22b"},
|
34
|
+
{"value": "qwen3-235b-a22b-thinking", "label": "qwen3-235b-a22b-thinking"},
|
35
|
+
{"value": "qwen3-32b", "label": "qwen3-32b"},
|
36
|
+
{"value": "qwen3-32b-thinking", "label": "qwen3-32b-thinking"},
|
37
|
+
{"value": "qwen3-30b-a3b", "label": "qwen3-30b-a3b"},
|
38
|
+
{"value": "qwen3-30b-a3b-thinking", "label": "qwen3-30b-a3b-thinking"},
|
39
|
+
{"value": "qwen3-14b", "label": "qwen3-14b"},
|
40
|
+
{"value": "qwen3-14b-thinking", "label": "qwen3-14b-thinking"},
|
41
|
+
{"value": "qwen3-8b", "label": "qwen3-8b"},
|
42
|
+
{"value": "qwen3-8b-thinking", "label": "qwen3-8b-thinking"},
|
43
|
+
{"value": "qwen3-4b", "label": "qwen3-4b"},
|
44
|
+
{"value": "qwen3-4b-thinking", "label": "qwen3-4b-thinking"},
|
45
|
+
{"value": "qwen3-1.7b", "label": "qwen3-1.7b"},
|
46
|
+
{"value": "qwen3-1.7b-thinking", "label": "qwen3-1.7b-thinking"},
|
47
|
+
{"value": "qwen3-0.6b", "label": "qwen3-0.6b"},
|
48
|
+
{"value": "qwen3-0.6b-thinking", "label": "qwen3-0.6b-thinking"},
|
33
49
|
],
|
34
50
|
),
|
35
51
|
"top_p": InputPort(
|
@@ -180,6 +196,7 @@ class BaiduWenxin(Node):
|
|
180
196
|
{"value": "ernie-speed", "label": "ernie-speed"},
|
181
197
|
{"value": "ernie-3.5", "label": "ernie-3.5"},
|
182
198
|
{"value": "ernie-4.0", "label": "ernie-4.0"},
|
199
|
+
{"value": "ernie-4.5", "label": "ernie-4.5"},
|
183
200
|
],
|
184
201
|
),
|
185
202
|
"temperature": InputPort(
|
@@ -224,6 +241,9 @@ class ChatGLM(Node):
|
|
224
241
|
{"value": "glm-4-flash", "label": "glm-4-flash"},
|
225
242
|
{"value": "glm-4-long", "label": "glm-4-long"},
|
226
243
|
{"value": "glm-zero-preview", "label": "glm-zero-preview"},
|
244
|
+
{"value": "glm-z1-air", "label": "glm-z1-air"},
|
245
|
+
{"value": "glm-z1-airx", "label": "glm-z1-airx"},
|
246
|
+
{"value": "glm-z1-flash", "label": "glm-z1-flash"},
|
227
247
|
],
|
228
248
|
),
|
229
249
|
"temperature": InputPort(
|
@@ -299,8 +319,12 @@ class Claude(Node):
|
|
299
319
|
"llm_model": InputPort(
|
300
320
|
name="llm_model",
|
301
321
|
port_type=PortType.SELECT,
|
302
|
-
value="claude-
|
322
|
+
value="claude-sonnet-4-20250514",
|
303
323
|
options=[
|
324
|
+
{"value": "claude-opus-4-20250514-thinking", "label": "claude-opus-4-20250514-thinking"},
|
325
|
+
{"value": "claude-opus-4-20250514", "label": "claude-opus-4-20250514"},
|
326
|
+
{"value": "claude-sonnet-4-20250514-thinking", "label": "claude-sonnet-4-20250514-thinking"},
|
327
|
+
{"value": "claude-sonnet-4-20250514", "label": "claude-sonnet-4-20250514"},
|
304
328
|
{"value": "claude-3-7-sonnet-thinking", "label": "claude-3-7-sonnet-thinking"},
|
305
329
|
{"value": "claude-3-7-sonnet", "label": "claude-3-7-sonnet"},
|
306
330
|
{"value": "claude-3-5-sonnet", "label": "claude-3-5-sonnet"},
|
@@ -441,21 +465,13 @@ class Gemini(Node):
|
|
441
465
|
"llm_model": InputPort(
|
442
466
|
name="llm_model",
|
443
467
|
port_type=PortType.SELECT,
|
444
|
-
value="gemini-
|
468
|
+
value="gemini-2.5-pro-preview-06-05",
|
445
469
|
options=[
|
446
|
-
{"
|
447
|
-
{"
|
448
|
-
{"value": "gemini-2.0-flash", "label": "gemini-2.0-flash"},
|
449
|
-
{
|
450
|
-
|
451
|
-
"label": "gemini-2.0-flash-thinking-exp-01-21",
|
452
|
-
},
|
453
|
-
{"value": "gemini-2.0-pro-exp-02-05", "label": "gemini-2.0-pro-exp-02-05"},
|
454
|
-
{
|
455
|
-
"value": "gemini-2.0-flash-lite-preview-02-05",
|
456
|
-
"label": "gemini-2.0-flash-lite-preview-02-05",
|
457
|
-
},
|
458
|
-
{"value": "gemini-exp-1206", "label": "gemini-exp-1206"},
|
470
|
+
{"label": "gemini-2.0-flash", "value": "gemini-2.0-flash"},
|
471
|
+
{"label": "gemini-2.0-flash-lite-preview-02-05", "value": "gemini-2.0-flash-lite-preview-02-05"},
|
472
|
+
{"value": "gemini-2.0-flash-thinking-exp-01-21", "label": "gemini-2.0-flash-thinking-exp-01-21"},
|
473
|
+
{"label": "gemini-2.5-pro-preview-06-05", "value": "gemini-2.5-pro-preview-06-05"},
|
474
|
+
{"label": "gemini-2.5-flash-preview-05-20", "value": "gemini-2.5-flash-preview-05-20"},
|
459
475
|
],
|
460
476
|
),
|
461
477
|
"temperature": InputPort(
|
@@ -678,6 +694,7 @@ class Moonshot(Node):
|
|
678
694
|
{"value": "moonshot-v1-8k", "label": "moonshot-v1-8k"},
|
679
695
|
{"value": "moonshot-v1-32k", "label": "moonshot-v1-32k"},
|
680
696
|
{"value": "moonshot-v1-128k", "label": "moonshot-v1-128k"},
|
697
|
+
{"value": "kimi-latest", "label": "kimi-latest"},
|
681
698
|
],
|
682
699
|
),
|
683
700
|
"temperature": InputPort(
|
@@ -771,6 +788,10 @@ class OpenAI(Node):
|
|
771
788
|
{"value": "o1-mini", "label": "o1-mini"},
|
772
789
|
{"value": "o1-preview", "label": "o1-preview"},
|
773
790
|
{"value": "o3-mini", "label": "o3-mini"},
|
791
|
+
{"value": "o3-mini-high", "label": "o3-mini-high"},
|
792
|
+
{"value": "gpt-4.1", "label": "gpt-4.1"},
|
793
|
+
{"value": "o4-mini", "label": "o4-mini"},
|
794
|
+
{"value": "o4-mini-high", "label": "o4-mini-high"},
|
774
795
|
],
|
775
796
|
),
|
776
797
|
"temperature": InputPort(
|
@@ -855,9 +876,13 @@ class XAi(Node):
|
|
855
876
|
"llm_model": InputPort(
|
856
877
|
name="llm_model",
|
857
878
|
port_type=PortType.SELECT,
|
858
|
-
value="grok-beta",
|
879
|
+
value="grok-3-beta",
|
859
880
|
options=[
|
860
881
|
{"value": "grok-beta", "label": "grok-beta"},
|
882
|
+
{"value": "grok-3-beta", "label": "grok-3-beta"},
|
883
|
+
{"value": "grok-3-fast-beta", "label": "grok-3-fast-beta"},
|
884
|
+
{"value": "grok-3-mini-beta", "label": "grok-3-mini-beta"},
|
885
|
+
{"value": "grok-3-mini-fast-beta", "label": "grok-3-mini-fast-beta"},
|
861
886
|
],
|
862
887
|
),
|
863
888
|
"temperature": InputPort(
|
@@ -21,8 +21,14 @@ class ClaudeVision(Node):
|
|
21
21
|
"llm_model": InputPort(
|
22
22
|
name="llm_model",
|
23
23
|
port_type=PortType.SELECT,
|
24
|
-
value="claude-
|
24
|
+
value="claude-sonnet-4-20250514",
|
25
25
|
options=[
|
26
|
+
{"value": "claude-opus-4-20250514-thinking", "label": "claude-opus-4-20250514-thinking"},
|
27
|
+
{"value": "claude-opus-4-20250514", "label": "claude-opus-4-20250514"},
|
28
|
+
{"value": "claude-sonnet-4-20250514-thinking", "label": "claude-sonnet-4-20250514-thinking"},
|
29
|
+
{"value": "claude-sonnet-4-20250514", "label": "claude-sonnet-4-20250514"},
|
30
|
+
{"value": "claude-3-7-sonnet-thinking", "label": "claude-3-7-sonnet-thinking"},
|
31
|
+
{"value": "claude-3-7-sonnet", "label": "claude-3-7-sonnet"},
|
26
32
|
{"value": "claude-3-5-sonnet", "label": "claude-3-5-sonnet"},
|
27
33
|
{"value": "claude-3-opus", "label": "claude-3-opus"},
|
28
34
|
{"value": "claude-3-sonnet", "label": "claude-3-sonnet"},
|
@@ -134,13 +140,13 @@ class GeminiVision(Node):
|
|
134
140
|
"llm_model": InputPort(
|
135
141
|
name="llm_model",
|
136
142
|
port_type=PortType.SELECT,
|
137
|
-
value="gemini-
|
143
|
+
value="gemini-2.5-pro-preview-06-05",
|
138
144
|
options=[
|
139
|
-
{"
|
140
|
-
{"
|
141
|
-
{"value": "gemini-2.0-flash-exp", "label": "gemini-2.0-flash-exp"},
|
142
|
-
{"
|
143
|
-
{"
|
145
|
+
{"label": "gemini-2.0-flash", "value": "gemini-2.0-flash"},
|
146
|
+
{"label": "gemini-2.0-flash-lite-preview-02-05", "value": "gemini-2.0-flash-lite-preview-02-05"},
|
147
|
+
{"value": "gemini-2.0-flash-thinking-exp-01-21", "label": "gemini-2.0-flash-thinking-exp-01-21"},
|
148
|
+
{"label": "gemini-2.5-pro-preview-06-05", "value": "gemini-2.5-pro-preview-06-05"},
|
149
|
+
{"label": "gemini-2.5-flash-preview-05-20", "value": "gemini-2.5-flash-preview-05-20"},
|
144
150
|
],
|
145
151
|
),
|
146
152
|
"multiple_input": InputPort(
|
@@ -254,6 +260,9 @@ class GptVision(Node):
|
|
254
260
|
options=[
|
255
261
|
{"value": "gpt-4o", "label": "gpt-4o"},
|
256
262
|
{"value": "gpt-4o-mini", "label": "gpt-4o-mini"},
|
263
|
+
{"value": "o4-mini", "label": "o4-mini"},
|
264
|
+
{"value": "o4-mini-high", "label": "o4-mini-high"},
|
265
|
+
{"value": "gpt-4.1", "label": "gpt-4.1"},
|
257
266
|
],
|
258
267
|
),
|
259
268
|
"images_or_urls": InputPort(
|
@@ -332,16 +332,14 @@ class PictureRender(Node):
|
|
332
332
|
port_type=PortType.NUMBER,
|
333
333
|
value=1200,
|
334
334
|
condition="return ['url', 'html_code', 'markdown', 'mindmap', 'mermaid'].includes(fieldsData.render_type.value)",
|
335
|
-
condition_python=lambda ports: ports["render_type"].value
|
336
|
-
in ["url", "html_code", "markdown", "mindmap", "mermaid"],
|
335
|
+
condition_python=lambda ports: ports["render_type"].value in ["url", "html_code", "markdown", "mindmap", "mermaid"],
|
337
336
|
),
|
338
337
|
"height": InputPort(
|
339
338
|
name="height",
|
340
339
|
port_type=PortType.NUMBER,
|
341
340
|
value=800,
|
342
341
|
condition="return ['url', 'html_code', 'markdown', 'mindmap', 'mermaid'].includes(fieldsData.render_type.value)",
|
343
|
-
condition_python=lambda ports: ports["render_type"].value
|
344
|
-
in ["url", "html_code", "markdown", "mindmap", "mermaid"],
|
342
|
+
condition_python=lambda ports: ports["render_type"].value in ["url", "html_code", "markdown", "mindmap", "mermaid"],
|
345
343
|
),
|
346
344
|
"base64_encode": InputPort(
|
347
345
|
name="base64_encode",
|
@@ -161,11 +161,30 @@ class TextSearch(Node):
|
|
161
161
|
options=[
|
162
162
|
{"value": "bing", "label": "bing"},
|
163
163
|
{"value": "bochaai", "label": "bochaai"},
|
164
|
+
{"value": "exa.ai", "label": "exa.ai"},
|
164
165
|
{"value": "jina.ai", "label": "jina.ai"},
|
165
166
|
{"value": "zhipuai", "label": "zhipuai"},
|
166
167
|
{"value": "duckduckgo", "label": "duckduckgo"},
|
167
168
|
],
|
168
169
|
),
|
170
|
+
"result_category": InputPort(
|
171
|
+
name="result_category",
|
172
|
+
port_type=PortType.SELECT,
|
173
|
+
value="all",
|
174
|
+
options=[
|
175
|
+
{"value": "all", "label": "all"},
|
176
|
+
{"value": "company", "label": "company"},
|
177
|
+
{"value": "research_paper", "label": "research_paper"},
|
178
|
+
{"value": "news", "label": "news"},
|
179
|
+
{"value": "pdf", "label": "pdf"},
|
180
|
+
{"value": "github", "label": "github"},
|
181
|
+
{"value": "personal_site", "label": "personal_site"},
|
182
|
+
{"value": "linkedin_profile", "label": "linkedin_profile"},
|
183
|
+
{"value": "financial_report", "label": "financial_report"},
|
184
|
+
],
|
185
|
+
condition="return fieldsData.search_engine.value === 'exa.ai'",
|
186
|
+
condition_python=lambda ports: ports["search_engine"].value == "exa.ai",
|
187
|
+
),
|
169
188
|
"count": InputPort(
|
170
189
|
name="count",
|
171
190
|
port_type=PortType.NUMBER,
|
@@ -219,14 +238,26 @@ class TextSearch(Node):
|
|
219
238
|
"output_page_title": OutputPort(
|
220
239
|
name="output_page_title",
|
221
240
|
port_type=PortType.LIST,
|
241
|
+
condition="!fieldsData.combine_result_in_text.value",
|
242
|
+
condition_python=lambda ports: not ports["combine_result_in_text"].value,
|
222
243
|
),
|
223
244
|
"output_page_url": OutputPort(
|
224
245
|
name="output_page_url",
|
225
246
|
port_type=PortType.LIST,
|
247
|
+
condition="!fieldsData.combine_result_in_text.value",
|
248
|
+
condition_python=lambda ports: not ports["combine_result_in_text"].value,
|
226
249
|
),
|
227
250
|
"output_page_snippet": OutputPort(
|
228
251
|
name="output_page_snippet",
|
229
252
|
port_type=PortType.LIST,
|
253
|
+
condition="!fieldsData.combine_result_in_text.value",
|
254
|
+
condition_python=lambda ports: not ports["combine_result_in_text"].value,
|
255
|
+
),
|
256
|
+
"output_combined": OutputPort(
|
257
|
+
name="output_combined",
|
258
|
+
port_type=PortType.LIST,
|
259
|
+
condition="!fieldsData.combine_result_in_text.value",
|
260
|
+
condition_python=lambda ports: not ports["combine_result_in_text"].value,
|
230
261
|
),
|
231
262
|
},
|
232
263
|
)
|
@@ -1,6 +1,6 @@
|
|
1
|
-
vectorvein-0.2.
|
2
|
-
vectorvein-0.2.
|
3
|
-
vectorvein-0.2.
|
1
|
+
vectorvein-0.2.99.dist-info/METADATA,sha256=JR9coEk5Bbq_tkM-3KSTe25uAqw5rEEY_zYtmRfCfhk,4567
|
2
|
+
vectorvein-0.2.99.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
vectorvein-0.2.99.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
4
4
|
vectorvein/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
5
5
|
vectorvein/api/__init__.py,sha256=lfY-XA46fgD2iIZTU0VYP8i07AwA03Egj4Qua0vUKrQ,738
|
6
6
|
vectorvein/api/client.py,sha256=xF-leKDQzVyyy9FnIRaz0k4eElYW1XbbzeRLcpnyk90,33047
|
@@ -32,7 +32,7 @@ vectorvein/server/token_server.py,sha256=36F9PKSNOX8ZtYBXY_l-76GQTpUSmQ2Y8EMy1H7
|
|
32
32
|
vectorvein/settings/__init__.py,sha256=j8BNRqJ23GWI83vFzOQJZvZuy-WtKMeOTJRghG4cG5I,11471
|
33
33
|
vectorvein/settings/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
34
34
|
vectorvein/types/__init__.py,sha256=0XNY7FGPklSk0eKPR0ZgwG2kNqyZ0z3Z3G7oLP0ep8Y,3838
|
35
|
-
vectorvein/types/defaults.py,sha256=
|
35
|
+
vectorvein/types/defaults.py,sha256=VoT-lUn8k2Gw8AtswuL2dkoUpD8QVQOGYjSeZ6xsTrI,39259
|
36
36
|
vectorvein/types/enums.py,sha256=LplSVkXLBK-t8TWtJKj_f7ktWTd6CSHWRLb67XKMm54,1716
|
37
37
|
vectorvein/types/exception.py,sha256=KtnqZ-1DstHm95SZAyZdHhkGq1bJ4A9Aw3Zfdu-VIFo,130
|
38
38
|
vectorvein/types/llm_parameters.py,sha256=q2Ilrh0mjERnI8qRDJ-2exQlHiMb-HEXVFTDiAVk6Dk,9452
|
@@ -49,14 +49,14 @@ vectorvein/workflow/nodes/__init__.py,sha256=dWrWtL3q0Vsn-MLgJ7gNgLCrwZ5BrqjrN2Q
|
|
49
49
|
vectorvein/workflow/nodes/audio_generation.py,sha256=ZRFZ_ycMTSJ2LKmekctagQdJYTl-3q4TNOIKETpS9AM,5870
|
50
50
|
vectorvein/workflow/nodes/control_flows.py,sha256=fDySWek8Isbfznwn0thmbTwTP4c99w68Up9dlASAtIo,6805
|
51
51
|
vectorvein/workflow/nodes/file_processing.py,sha256=f4PlfgSAVFhwuqcEAvcLarNIkHUFP4FJucxnb3kekTU,4498
|
52
|
-
vectorvein/workflow/nodes/image_generation.py,sha256=
|
53
|
-
vectorvein/workflow/nodes/llms.py,sha256=
|
52
|
+
vectorvein/workflow/nodes/image_generation.py,sha256=aH5TUc3Cjs07OcHJAA_fIsexwI9Jy5J2eX6wSgUl0B8,40708
|
53
|
+
vectorvein/workflow/nodes/llms.py,sha256=iceW_AA0eyq701pcs5_pvNaDG9yR-zZoW2TJd7jMiCI,42684
|
54
54
|
vectorvein/workflow/nodes/media_editing.py,sha256=ut4NN9_VUqnsqT2rlv0JrLhyxRLNUkvHb0c4QZDiKz8,34320
|
55
|
-
vectorvein/workflow/nodes/media_processing.py,sha256=
|
56
|
-
vectorvein/workflow/nodes/output.py,sha256=
|
55
|
+
vectorvein/workflow/nodes/media_processing.py,sha256=zfFMgKtggADJ1mbs9TAWKZK49rvpMHD3U7J0WOWgF4g,23013
|
56
|
+
vectorvein/workflow/nodes/output.py,sha256=60Eef45OhyvSHhzbiotjBPYD1eIlJZqnUckJWQqPmvo,13132
|
57
57
|
vectorvein/workflow/nodes/relational_db.py,sha256=Zg4G3xIQ94uoWE-Z4YER1bBhWgBQ6mYbJVQDeAN895I,5498
|
58
58
|
vectorvein/workflow/nodes/text_processing.py,sha256=BRmFSyLPADFplbUqUNjoJdmHzQvrPknJvBvvgtzaklk,8744
|
59
|
-
vectorvein/workflow/nodes/tools.py,sha256=
|
59
|
+
vectorvein/workflow/nodes/tools.py,sha256=GDJnxv4fzlATlP5zACs_1CwMsNcssKLLHPgQEsVLnZA,15795
|
60
60
|
vectorvein/workflow/nodes/triggers.py,sha256=BolH4X6S8HSuU2kwHmYKr-ozHbgKBmdZRcnXpK5EfGA,597
|
61
61
|
vectorvein/workflow/nodes/vector_db.py,sha256=p9AT_E8ASbcYHZqHYTCIGvqkIqzxaFM4UxaUELJEe-c,6112
|
62
62
|
vectorvein/workflow/nodes/video_generation.py,sha256=qmdg-t_idpxq1veukd-jv_ChICMOoInKxprV9Z4Vi2w,4118
|
@@ -65,4 +65,4 @@ vectorvein/workflow/utils/analyse.py,sha256=msmvyz35UTYTwqQR5sg9H0sm1vxmGDSmep9X
|
|
65
65
|
vectorvein/workflow/utils/check.py,sha256=B_NdwqIqnc7Ko2HHqFpfOmWVaAu21tPITe0szKfiZKc,11414
|
66
66
|
vectorvein/workflow/utils/json_to_code.py,sha256=P8dhhSNgKhTnW17qXNjLO2aLdb0rA8qMAWxhObol2TU,7295
|
67
67
|
vectorvein/workflow/utils/layout.py,sha256=j0bRD3uaXu40xCS6U6BGahBI8FrHa5MiF55GbTrZ1LM,4565
|
68
|
-
vectorvein-0.2.
|
68
|
+
vectorvein-0.2.99.dist-info/RECORD,,
|
File without changes
|
File without changes
|