ibm-watsonx-orchestrate 1.10.0b0__py3-none-any.whl → 1.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. ibm_watsonx_orchestrate/__init__.py +1 -2
  2. ibm_watsonx_orchestrate/agent_builder/connections/__init__.py +1 -1
  3. ibm_watsonx_orchestrate/agent_builder/connections/connections.py +6 -3
  4. ibm_watsonx_orchestrate/agent_builder/connections/types.py +68 -17
  5. ibm_watsonx_orchestrate/agent_builder/knowledge_bases/types.py +47 -3
  6. ibm_watsonx_orchestrate/agent_builder/toolkits/types.py +18 -15
  7. ibm_watsonx_orchestrate/agent_builder/tools/types.py +1 -1
  8. ibm_watsonx_orchestrate/cli/commands/connections/connections_command.py +40 -11
  9. ibm_watsonx_orchestrate/cli/commands/connections/connections_controller.py +96 -30
  10. ibm_watsonx_orchestrate/cli/commands/knowledge_bases/knowledge_bases_controller.py +32 -10
  11. ibm_watsonx_orchestrate/cli/commands/server/server_command.py +95 -17
  12. ibm_watsonx_orchestrate/cli/commands/server/types.py +14 -6
  13. ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_command.py +43 -10
  14. ibm_watsonx_orchestrate/cli/commands/toolkit/toolkit_controller.py +52 -25
  15. ibm_watsonx_orchestrate/client/connections/connections_client.py +4 -3
  16. ibm_watsonx_orchestrate/client/knowledge_bases/knowledge_base_client.py +4 -4
  17. ibm_watsonx_orchestrate/docker/compose-lite.yml +52 -13
  18. ibm_watsonx_orchestrate/docker/default.env +21 -14
  19. ibm_watsonx_orchestrate/flow_builder/data_map.py +4 -1
  20. ibm_watsonx_orchestrate/flow_builder/flows/__init__.py +2 -0
  21. ibm_watsonx_orchestrate/flow_builder/flows/flow.py +204 -17
  22. ibm_watsonx_orchestrate/flow_builder/node.py +114 -19
  23. ibm_watsonx_orchestrate/flow_builder/types.py +206 -34
  24. ibm_watsonx_orchestrate/run/connections.py +2 -2
  25. {ibm_watsonx_orchestrate-1.10.0b0.dist-info → ibm_watsonx_orchestrate-1.10.1.dist-info}/METADATA +1 -1
  26. {ibm_watsonx_orchestrate-1.10.0b0.dist-info → ibm_watsonx_orchestrate-1.10.1.dist-info}/RECORD +29 -29
  27. {ibm_watsonx_orchestrate-1.10.0b0.dist-info → ibm_watsonx_orchestrate-1.10.1.dist-info}/WHEEL +0 -0
  28. {ibm_watsonx_orchestrate-1.10.0b0.dist-info → ibm_watsonx_orchestrate-1.10.1.dist-info}/entry_points.txt +0 -0
  29. {ibm_watsonx_orchestrate-1.10.0b0.dist-info → ibm_watsonx_orchestrate-1.10.1.dist-info}/licenses/LICENSE +0 -0
@@ -25,13 +25,13 @@ from ibm_watsonx_orchestrate.client.tools.tool_client import ToolClient
25
25
  from ibm_watsonx_orchestrate.client.tools.tempus_client import TempusClient
26
26
  from ibm_watsonx_orchestrate.client.utils import instantiate_client
27
27
  from ..types import (
28
- EndNodeSpec, Expression, ForeachPolicy, ForeachSpec, LoopSpec, BranchNodeSpec, MatchPolicy, PromptLLMParameters, PromptNodeSpec,
28
+ DocProcKVPSchema, Assignment, Conditions, EndNodeSpec, Expression, ForeachPolicy, ForeachSpec, LoopSpec, BranchNodeSpec, MatchPolicy, NodeIdCondition, PlainTextReadingOrder, PromptExample, PromptLLMParameters, PromptNodeSpec, TimerNodeSpec,
29
29
  StartNodeSpec, ToolSpec, JsonSchemaObject, ToolRequestBody, ToolResponseBody, UserFieldKind, UserFieldOption, UserFlowSpec, UserNodeSpec, WaitPolicy,
30
- DocProcSpec, TextExtractionResponse, DocProcInput, DecisionsNodeSpec, DecisionsRule, DocExtSpec, File
30
+ DocProcSpec, TextExtractionResponse, DocProcInput, DecisionsNodeSpec, DecisionsRule, DocExtSpec, File, DocumentClassificationResponse, DocClassifierSpec, DocumentProcessingCommonInput
31
31
  )
32
32
  from .constants import CURRENT_USER, START, END, ANY_USER
33
33
  from ..node import (
34
- EndNode, Node, PromptNode, StartNode, UserNode, AgentNode, DataMap, ToolNode, DocProcNode, DecisionsNode, DocExtNode
34
+ EndNode, Node, PromptNode, StartNode, UserNode, AgentNode, DataMap, ToolNode, DocProcNode, DecisionsNode, DocExtNode, DocClassifierNode
35
35
  )
36
36
  from ..types import (
37
37
  AgentNodeSpec, extract_node_spec, FlowContext, FlowEventType, FlowEvent, FlowSpec,
@@ -64,7 +64,7 @@ class FlowEdge(BaseModel):
64
64
 
65
65
  class Flow(Node):
66
66
  '''Flow represents a flow that will be run by wxO Flow engine.'''
67
- output_map: DataMap | None = None
67
+ output_map: dict[str, DataMap] | None = None
68
68
  nodes: dict[str, SerializeAsAny[Node]] = {}
69
69
  edges: List[FlowEdge] = []
70
70
  schemas: dict[str, JsonSchemaObject] = {}
@@ -401,6 +401,7 @@ class Flow(Node):
401
401
  display_name: str|None=None,
402
402
  system_prompt: str | list[str] | None = None,
403
403
  user_prompt: str | list[str] | None = None,
404
+ prompt_examples: list[PromptExample] | None = None,
404
405
  llm: str | None = None,
405
406
  llm_parameters: PromptLLMParameters | None = None,
406
407
  description: str | None = None,
@@ -422,6 +423,7 @@ class Flow(Node):
422
423
  description=description,
423
424
  system_prompt=system_prompt,
424
425
  user_prompt=user_prompt,
426
+ prompt_examples=prompt_examples,
425
427
  llm=llm,
426
428
  llm_parameters=llm_parameters,
427
429
  input_schema=_get_tool_request_body(input_schema_obj),
@@ -438,23 +440,95 @@ class Flow(Node):
438
440
  node = self._add_node(node)
439
441
  return cast(PromptNode, node)
440
442
 
443
+ def docclassfier(self,
444
+ name: str,
445
+ llm : str = "watsonx/meta-llama/llama-3-2-90b-vision-instruct",
446
+ version: str = "TIP",
447
+ display_name: str| None = None,
448
+ classes: type[BaseModel]| None = None,
449
+ description: str | None = None,
450
+ min_confidence: float = 0.0,
451
+ input_map: DataMap = None) -> DocClassifierNode:
452
+
453
+ if name is None :
454
+ raise ValueError("name must be provided.")
455
+
456
+ doc_classifier_config = DocClassifierNode.generate_config(llm=llm, min_confidence=min_confidence,input_classes=classes)
457
+
458
+ input_schema_obj = _get_json_schema_obj(parameter_name = "input", type_def = DocumentProcessingCommonInput)
459
+ output_schema_obj = _get_json_schema_obj(parameter_name = "output", type_def = DocumentClassificationResponse)
460
+
461
+ if "$defs" in output_schema_obj.model_extra:
462
+ output_schema_obj.model_extra.pop("$defs")
463
+ # Create the docclassifier spec
464
+ task_spec = DocClassifierSpec(
465
+ name=name,
466
+ display_name=display_name if display_name is not None else name,
467
+ description=description,
468
+ input_schema=_get_tool_request_body(input_schema_obj),
469
+ output_schema=_get_tool_response_body(output_schema_obj),
470
+ output_schema_object = output_schema_obj,
471
+ config=doc_classifier_config,
472
+ version=version
473
+ )
474
+ node = DocClassifierNode(spec=task_spec)
475
+
476
+ # setup input map
477
+ if input_map:
478
+ node.input_map = self._get_data_map(input_map)
479
+
480
+ # add the node to the list of node
481
+
482
+ node = self._add_node(node)
483
+ return cast(DocClassifierNode, node)
484
+
485
+
486
+ def timer(self,
487
+ name: str,
488
+ delay: int,
489
+ display_name: str | None = None,
490
+ description: str | None = None,
491
+ input_map: DataMap = None) -> Node:
492
+
493
+ if name is None:
494
+ raise ValueError("name must be provided.")
495
+ if delay < 0:
496
+ raise ValueError("delay must be non-negative.")
497
+
498
+ timer_spec = TimerNodeSpec(
499
+ name=name,
500
+ display_name=display_name if display_name is not None else name,
501
+ description=description,
502
+ delay=delay
503
+ )
504
+
505
+ node = Node(spec=timer_spec)
506
+
507
+ if input_map:
508
+ node.input_map = self._get_data_map(input_map)
509
+
510
+ node = self._add_node(node)
511
+ return node
512
+
513
+
441
514
  def docext(self,
442
515
  name: str,
443
- llm : str = "meta-llama/llama-3-2-11b-vision-instruct",
516
+ llm : str = "watsonx/meta-llama/llama-3-2-90b-vision-instruct",
444
517
  version: str = "TIP",
445
518
  display_name: str| None = None,
446
- input_entities: type[BaseModel]| None = None,
519
+ fields: type[BaseModel]| None = None,
447
520
  description: str | None = None,
448
- input_map: DataMap = None) -> tuple[DocExtNode, type[BaseModel]]:
521
+ input_map: DataMap = None,
522
+ enable_hw: bool = False) -> tuple[DocExtNode, type[BaseModel]]:
449
523
 
450
524
  if name is None :
451
525
  raise ValueError("name must be provided.")
452
526
 
453
- doc_ext_config = DocExtNode.generate_config(llm=llm, input_entites=input_entities)
527
+ doc_ext_config = DocExtNode.generate_config(llm=llm, fields=fields)
454
528
 
455
- DocExtFieldValue = DocExtNode.generate_docext_field_value_model(input_entities=input_entities)
529
+ DocExtFieldValue = DocExtNode.generate_docext_field_value_model(fields=fields)
456
530
 
457
- input_schema_obj = _get_json_schema_obj(parameter_name = "input", type_def = File)
531
+ input_schema_obj = _get_json_schema_obj(parameter_name = "input", type_def = DocumentProcessingCommonInput)
458
532
  output_schema_obj = _get_json_schema_obj("output", DocExtFieldValue)
459
533
 
460
534
  if "$defs" in output_schema_obj.model_extra:
@@ -469,7 +543,8 @@ class Flow(Node):
469
543
  output_schema=_get_tool_response_body(output_schema_obj),
470
544
  output_schema_object = output_schema_obj,
471
545
  config=doc_ext_config,
472
- version=version
546
+ version=version,
547
+ enable_hw=enable_hw
473
548
  )
474
549
  node = DocExtNode(spec=task_spec)
475
550
 
@@ -528,9 +603,12 @@ class Flow(Node):
528
603
  def docproc(self,
529
604
  name: str,
530
605
  task: str,
606
+ plain_text_reading_order : PlainTextReadingOrder = PlainTextReadingOrder.block_structure,
531
607
  display_name: str|None=None,
532
608
  description: str | None = None,
533
- input_map: DataMap = None) -> DocProcNode:
609
+ input_map: DataMap = None,
610
+ kvp_schemas: list[DocProcKVPSchema] = None,
611
+ enable_hw: bool = False) -> DocProcNode:
534
612
 
535
613
  if name is None :
536
614
  raise ValueError("name must be provided.")
@@ -552,7 +630,10 @@ class Flow(Node):
552
630
  input_schema=_get_tool_request_body(input_schema_obj),
553
631
  output_schema=_get_tool_response_body(output_schema_obj),
554
632
  output_schema_object = output_schema_obj,
555
- task=task
633
+ task=task,
634
+ plain_text_reading_order=plain_text_reading_order,
635
+ enable_hw=enable_hw,
636
+ kvp_schemas=kvp_schemas
556
637
  )
557
638
 
558
639
  node = DocProcNode(spec=task_spec)
@@ -642,7 +723,7 @@ class Flow(Node):
642
723
  '''Create a single node flow with an automatic START and END node.'''
643
724
  return self.sequence(START, node, END)
644
725
 
645
- def branch(self, evaluator: Union[Callable, Expression]) -> "Branch":
726
+ def branch(self, evaluator: Union[Callable, Expression, Conditions]) -> 'Branch':
646
727
  '''Create a BRANCH node'''
647
728
  e = evaluator
648
729
  if isinstance(evaluator, Callable):
@@ -656,11 +737,19 @@ class Flow(Node):
656
737
  # e = new_script_spec
657
738
  elif isinstance(evaluator, str):
658
739
  e = Expression(expression=evaluator)
740
+ elif isinstance(evaluator, list):
741
+ e = Conditions(conditions=evaluator)
659
742
 
660
743
  spec = BranchNodeSpec(name = "branch_" + str(self._next_sequence_id()), evaluator=e)
661
744
  branch_node = Branch(spec = spec, containing_flow=self)
662
745
  return cast(Branch, self._add_node(branch_node))
663
746
 
747
+ def conditions(self) -> 'Branch':
748
+ '''Create a Branch node with empty Conditions evaluator (if-else)'''
749
+ spec = BranchNodeSpec(name = "branch_" + str(self._next_sequence_id()), evaluator=Conditions(conditions=[]))
750
+ branch_conditions_node = Branch(spec = spec, containing_flow=self)
751
+ return cast(Branch, self._add_node(branch_conditions_node))
752
+
664
753
  def wait_for(self, *args) -> "Wait":
665
754
  '''Wait for all incoming nodes to complete.'''
666
755
  raise ValueError("Not implemented yet.")
@@ -675,6 +764,77 @@ class Flow(Node):
675
764
 
676
765
  # return cast(Wait, self.node(wait_node))
677
766
 
767
+ def map_flow_output_with_variable(self, target_output_variable: str, variable: str, default_value: str = None) -> Self:
768
+ if self.output_map and "spec" in self.output_map:
769
+ maps = self.output_map["spec"].maps or []
770
+ else:
771
+ maps = []
772
+
773
+ curr_map_metadata = {
774
+ "assignmentType": "variable"
775
+ }
776
+
777
+ target_variable = "flow.output." + target_output_variable
778
+ value_expression = "flow." + variable
779
+
780
+ if default_value:
781
+ maps.append(Assignment(target_variable=target_variable, value_expression=value_expression, default_value=default_value, metadata=curr_map_metadata))
782
+ else:
783
+ maps.append(Assignment(target_variable=target_variable, value_expression=value_expression, metadata=curr_map_metadata))
784
+
785
+ flow_output_map_spec = DataMap(maps=maps)
786
+
787
+ if self.output_map and "spec" in self.output_map:
788
+ self.output_map["spec"] = flow_output_map_spec
789
+ else:
790
+ self.output_map = {"spec": flow_output_map_spec}
791
+ return self
792
+
793
+ def map_output(self, output_variable: str, expression: str, default_value: str = None) -> Self:
794
+ if self.output_map and "spec" in self.output_map:
795
+ maps = self.output_map["spec"].maps or []
796
+ else:
797
+ maps = []
798
+
799
+ curr_map_metadata = {
800
+ "assignmentType": "pyExpression"
801
+ }
802
+
803
+ target_variable = "flow.output." + output_variable
804
+ value_expression = expression
805
+
806
+ if default_value:
807
+ maps.append(Assignment(target_variable=target_variable, value_expression=value_expression, default_value=default_value, metadata=curr_map_metadata))
808
+ else:
809
+ maps.append(Assignment(target_variable=target_variable, value_expression=value_expression, metadata=curr_map_metadata))
810
+
811
+ flow_output_map_spec = DataMap(maps=maps)
812
+
813
+ if self.output_map and "spec" in self.output_map:
814
+ self.output_map["spec"] = flow_output_map_spec
815
+ else:
816
+ self.output_map = {"spec": flow_output_map_spec}
817
+ return self
818
+
819
+ def map_flow_output_with_none(self, target_output_variable: str) -> Self:
820
+ if self.output_map and "spec" in self.output_map:
821
+ maps = self.output_map["spec"].maps or []
822
+ else:
823
+ maps = []
824
+
825
+
826
+ target_variable = "flow.output." + target_output_variable
827
+
828
+ maps.append(Assignment(target_variable=target_variable, value_expression=None))
829
+
830
+ flow_output_map_spec = DataMap(maps=maps)
831
+
832
+ if self.output_map and "spec" in self.output_map:
833
+ self.output_map["spec"] = flow_output_map_spec
834
+ else:
835
+ self.output_map = {"spec": flow_output_map_spec}
836
+ return self
837
+
678
838
 
679
839
  def foreach(self, item_schema: type[BaseModel],
680
840
  input_schema: type[BaseModel] |None=None,
@@ -735,8 +895,6 @@ class Flow(Node):
735
895
  input_schema: type[BaseModel] |None=None,
736
896
  output_schema: type[BaseModel] |None=None) -> "UserFlow": # return a UserFlow object
737
897
 
738
- raise ValueError("userflow is NOT supported yet and it's interface will change.")
739
-
740
898
  output_schema_obj = _get_json_schema_obj("output", output_schema)
741
899
  input_schema_obj = _get_json_schema_obj("input", input_schema)
742
900
 
@@ -838,6 +996,11 @@ class Flow(Node):
838
996
  for key, value in self.metadata.items():
839
997
  metadata_dict[key] = value
840
998
  flow_dict["metadata"] = metadata_dict
999
+
1000
+ if self.output_map and "spec" in self.output_map:
1001
+ flow_dict["output_map"] = {
1002
+ "spec": self.output_map["spec"].to_json()
1003
+ }
841
1004
  return flow_dict
842
1005
 
843
1006
  def _get_node_id(self, node: Union[str, Node]) -> str:
@@ -1147,6 +1310,27 @@ class Branch(FlowControl):
1147
1310
  raise ValueError("Cannot have custom label __default__. Use default() instead.")
1148
1311
 
1149
1312
  return self._add_case(label, node)
1313
+
1314
+ def condition(self, to_node: Node, expression: str="", default: bool=False) -> Self:
1315
+ '''
1316
+ Add a condition to this branch node.
1317
+
1318
+ Parameters:
1319
+ expression (str): The expression of this condition.
1320
+ to_node (Node): The node to go to when expression is evaluated to true.
1321
+ default (bool): The condition is the default (else) case.
1322
+ '''
1323
+
1324
+ node_id = self.containing_flow._get_node_id(to_node)
1325
+ if default:
1326
+ condition = NodeIdCondition(node_id=node_id, default=default)
1327
+ else:
1328
+ condition = NodeIdCondition(expression=expression, node_id=node_id, default=default)
1329
+
1330
+ self.spec.evaluator.conditions.append(condition)
1331
+ self.containing_flow.edge(self, to_node)
1332
+
1333
+ return self
1150
1334
 
1151
1335
  def default(self, node: Node) -> Self:
1152
1336
  '''
@@ -1366,13 +1550,14 @@ class UserFlow(Flow):
1366
1550
  kind: UserFieldKind = UserFieldKind.Text,
1367
1551
  display_name: str | None = None,
1368
1552
  description: str | None = None,
1553
+ direction: str | None = None,
1369
1554
  default: Any | None = None,
1370
1555
  text: str = None, # The text used to ask question to the user, e.g. 'what is your name?'
1371
1556
  option: UserFieldOption | None = None,
1372
1557
  is_list: bool = False,
1373
1558
  min: Any | None = None,
1374
1559
  max: Any | None = None,
1375
- input_map: DataMap = None,
1560
+ input_map: DataMap | None= None,
1376
1561
  custom: dict[str, Any] = {}) -> UserNode:
1377
1562
  '''create a node in the flow'''
1378
1563
  # create a json schema object based on the single field
@@ -1404,6 +1589,8 @@ class UserFlow(Flow):
1404
1589
  description = description,
1405
1590
  default = default,
1406
1591
  text = text,
1592
+ direction = direction,
1593
+ input_map = input_map,
1407
1594
  option = option,
1408
1595
  is_list = is_list,
1409
1596
  min = min,
@@ -6,14 +6,14 @@ import yaml
6
6
  from pydantic import BaseModel, Field, SerializeAsAny, create_model
7
7
  from enum import Enum
8
8
 
9
- from .types import EndNodeSpec, NodeSpec, AgentNodeSpec, PromptNodeSpec, StartNodeSpec, ToolNodeSpec, UserFieldKind, UserFieldOption, UserNodeSpec, DocProcSpec, \
10
- DocExtSpec, DocExtConfig, LanguageCode, DecisionsNodeSpec
9
+ from .types import Assignment, DocExtConfigField, EndNodeSpec, NodeSpec, AgentNodeSpec, PromptNodeSpec, TimerNodeSpec, StartNodeSpec, ToolNodeSpec, UserFieldKind, UserFieldOption, UserNodeSpec, DocProcSpec, \
10
+ DocExtSpec, DocExtConfig, DocClassifierSpec, DecisionsNodeSpec, DocClassifierConfig
11
11
 
12
12
  from .data_map import DataMap
13
13
 
14
14
  class Node(BaseModel):
15
15
  spec: SerializeAsAny[NodeSpec]
16
- input_map: DataMap | None = None
16
+ input_map: dict[str, DataMap] | None = None
17
17
 
18
18
  def __call__(self, **kwargs):
19
19
  pass
@@ -40,10 +40,77 @@ class Node(BaseModel):
40
40
  def to_json(self) -> dict[str, Any]:
41
41
  model_spec = {}
42
42
  model_spec["spec"] = self.spec.to_json()
43
- if self.input_map is not None:
44
- model_spec['input_map'] = self.input_map.to_json()
43
+ if self.input_map is not None and "spec" in self.input_map:
44
+ model_spec['input_map'] = {
45
+ "spec": self.input_map["spec"].to_json()
46
+ }
45
47
 
46
48
  return model_spec
49
+
50
+ def map_node_input_with_variable(self, target_input_variable: str, variable: str, default_value: str = None) -> None:
51
+ if self.input_map and "spec" in self.input_map:
52
+ maps = self.input_map["spec"].maps or []
53
+ else:
54
+ maps = []
55
+
56
+ curr_map_metadata = {
57
+ "assignmentType": "variable"
58
+ }
59
+
60
+ target_variable = "self.input." + target_input_variable
61
+ value_expression = "flow." + variable
62
+
63
+ if default_value:
64
+ maps.append(Assignment(target_variable=target_variable, value_expression=value_expression, default_value=default_value, metadata=curr_map_metadata))
65
+ else:
66
+ maps.append(Assignment(target_variable=target_variable, value_expression=value_expression, metadata=curr_map_metadata))
67
+
68
+ node_input_map_spec = DataMap(maps=maps)
69
+ if self.input_map and "spec" in self.input_map:
70
+ self.input_map["spec"] = node_input_map_spec
71
+ else:
72
+ self.input_map = {"spec": node_input_map_spec}
73
+
74
+ def map_input(self, input_variable: str, expression: str, default_value: str = None) -> None:
75
+ if self.input_map and "spec" in self.input_map:
76
+ maps = self.input_map["spec"].maps or []
77
+ else:
78
+ maps = []
79
+
80
+ curr_map_metadata = {
81
+ "assignmentType": "pyExpression"
82
+ }
83
+
84
+ target_variable = "self.input." + input_variable
85
+ value_expression = expression
86
+
87
+ if default_value:
88
+ maps.append(Assignment(target_variable=target_variable, value_expression=value_expression, default_value=default_value, metadata=curr_map_metadata))
89
+ else:
90
+ maps.append(Assignment(target_variable=target_variable, value_expression=value_expression, metadata=curr_map_metadata))
91
+
92
+ node_input_map_spec = DataMap(maps=maps)
93
+ if self.input_map and "spec" in self.input_map:
94
+ self.input_map["spec"] = node_input_map_spec
95
+ else:
96
+ self.input_map = {"spec": node_input_map_spec}
97
+
98
+ def map_node_input_with_none(self, target_input_variable: str) -> None:
99
+ if self.input_map and "spec" in self.input_map:
100
+ maps = self.input_map["spec"].maps or []
101
+ else:
102
+ maps = []
103
+
104
+
105
+ target_variable = "self.input." + target_input_variable
106
+
107
+ maps.append(Assignment(target_variable=target_variable, value_expression=None))
108
+
109
+ node_input_map_spec = DataMap(maps=maps)
110
+ if self.input_map and "spec" in self.input_map:
111
+ self.input_map["spec"] = node_input_map_spec
112
+ else:
113
+ self.input_map = {"spec": node_input_map_spec}
47
114
 
48
115
  class StartNode(Node):
49
116
  def __repr__(self):
@@ -83,6 +150,8 @@ class UserNode(Node):
83
150
  option: UserFieldOption | None = None,
84
151
  min: Any | None = None,
85
152
  max: Any | None = None,
153
+ direction: str | None = None,
154
+ input_map: DataMap | None = None,
86
155
  is_list: bool = False,
87
156
  custom: dict[str, Any] | None = None,
88
157
  widget: str | None = None):
@@ -97,7 +166,9 @@ class UserNode(Node):
97
166
  max=max,
98
167
  is_list=is_list,
99
168
  custom=custom,
100
- widget=widget)
169
+ widget=widget,
170
+ direction=direction,
171
+ input_map=input_map)
101
172
 
102
173
  class AgentNode(Node):
103
174
  def __repr__(self):
@@ -120,6 +191,24 @@ class DocProcNode(Node):
120
191
  def get_spec(self) -> DocProcSpec:
121
192
  return cast(DocProcSpec, self.spec)
122
193
 
194
+ class DocClassifierNode(Node):
195
+ def __repr__(self):
196
+ return f"DocClassifierNode(name='{self.spec.name}', description='{self.spec.description}')"
197
+
198
+ def get_spec(self) -> DocClassifierSpec:
199
+ return cast(DocClassifierSpec, self.spec)
200
+
201
+ @staticmethod
202
+ def generate_config(llm: str, input_classes: type[BaseModel], min_confidence: float) -> DocClassifierConfig:
203
+ return DocClassifierConfig(llm=llm, classes=input_classes.__dict__.values(), min_confidence=min_confidence)
204
+
205
+ class TimerNode(Node):
206
+ def __repr__(self):
207
+ return f"TimerNode(name='{self.spec.name}', description='{self.spec.description}')"
208
+
209
+ def get_spec(self) -> TimerNodeSpec:
210
+ return cast(TimerNodeSpec, self.spec)
211
+
123
212
  class DocExtNode(Node):
124
213
  def __repr__(self):
125
214
  return f"DocExtNode(name='{self.spec.name}', description='{self.spec.description}')"
@@ -128,23 +217,29 @@ class DocExtNode(Node):
128
217
  return cast(DocExtSpec, self.spec)
129
218
 
130
219
  @staticmethod
131
- def generate_config(llm: str, input_entites: type[BaseModel]) -> DocExtConfig:
132
- entities = input_entites.__dict__.values()
133
- return DocExtConfig(llm=llm, entities=entities)
220
+ def generate_config(llm: str, fields: type[BaseModel]) -> DocExtConfig:
221
+ return DocExtConfig(llm=llm, fields=fields.__dict__.values())
134
222
 
135
223
  @staticmethod
136
- def generate_docext_field_value_model(input_entities: type[BaseModel]) -> type[BaseModel]:
224
+ def generate_docext_field_value_model(fields: type[BaseModel]) -> type[BaseModel]:
137
225
  create_field_value_description = lambda field_name: "Extracted value for " + field_name
226
+ field_definitions = {}
227
+
228
+ for name, value in fields.model_dump().items():
229
+ field_type = str
230
+ field_kwargs = {
231
+ "title": value['name'],
232
+ "description": create_field_value_description(value['name']),
233
+ "type": value["type"] if value["type"] != "date" else "string"
234
+ }
235
+
236
+ # Add json_schema_extra if type is 'date'
237
+ if value["type"] == "date":
238
+ field_kwargs["json_schema_extra"] = {"format": "date"}
239
+
240
+ field_definitions[name] = (field_type, Field(**field_kwargs))
138
241
 
139
- DocExtFieldValue = create_model(
140
- "DocExtFieldValue",
141
- **{
142
- name: (str, Field(
143
- title=value['name'],
144
- description=create_field_value_description(value['name']),
145
- )
146
- )
147
- for name, value in input_entities.model_dump().items()})
242
+ DocExtFieldValue = create_model("DocExtFieldValue", **field_definitions)
148
243
  return DocExtFieldValue
149
244
 
150
245
  class DecisionsNode(Node):