waldiez 0.3.10__py3-none-any.whl → 0.3.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of waldiez might be problematic. Click here for more details.

Files changed (45) hide show
  1. waldiez/_version.py +1 -1
  2. waldiez/exporter.py +6 -69
  3. waldiez/exporting/agent/agent_exporter.py +22 -15
  4. waldiez/exporting/agent/utils/__init__.py +2 -4
  5. waldiez/exporting/agent/utils/captain_agent.py +254 -0
  6. waldiez/exporting/chats/chats_exporter.py +3 -3
  7. waldiez/exporting/chats/utils/sequential.py +1 -0
  8. waldiez/exporting/chats/utils/single_chat.py +3 -0
  9. waldiez/exporting/flow/flow_exporter.py +11 -3
  10. waldiez/exporting/flow/utils/def_main.py +15 -6
  11. waldiez/exporting/flow/utils/flow_content.py +11 -10
  12. waldiez/exporting/flow/utils/importing_utils.py +1 -0
  13. waldiez/exporting/models/models_exporter.py +7 -0
  14. waldiez/exporting/models/utils.py +4 -0
  15. waldiez/models/__init__.py +6 -0
  16. waldiez/models/agents/__init__.py +14 -0
  17. waldiez/models/agents/agent/agent.py +71 -8
  18. waldiez/models/agents/agents.py +13 -3
  19. waldiez/models/agents/captain_agent/__init__.py +15 -0
  20. waldiez/models/agents/captain_agent/captain_agent.py +45 -0
  21. waldiez/models/agents/captain_agent/captain_agent_data.py +62 -0
  22. waldiez/models/agents/captain_agent/captain_agent_lib_entry.py +38 -0
  23. waldiez/models/agents/extra_requirements.py +88 -0
  24. waldiez/models/common/__init__.py +2 -0
  25. waldiez/models/common/ag2_version.py +30 -0
  26. waldiez/models/common/base.py +4 -0
  27. waldiez/models/common/date_utils.py +2 -0
  28. waldiez/models/common/dict_utils.py +2 -0
  29. waldiez/models/flow/__init__.py +2 -0
  30. waldiez/models/flow/flow.py +88 -10
  31. waldiez/models/flow/flow_data.py +15 -1
  32. waldiez/models/flow/utils.py +61 -1
  33. waldiez/models/model/__init__.py +2 -0
  34. waldiez/models/model/extra_requirements.py +55 -0
  35. waldiez/models/model/model.py +8 -2
  36. waldiez/models/model/model_data.py +2 -1
  37. waldiez/models/waldiez.py +26 -75
  38. {waldiez-0.3.10.dist-info → waldiez-0.3.12.dist-info}/METADATA +38 -29
  39. {waldiez-0.3.10.dist-info → waldiez-0.3.12.dist-info}/RECORD +43 -37
  40. waldiez/exporting/agent/utils/agent_class_name.py +0 -36
  41. waldiez/exporting/agent/utils/agent_imports.py +0 -55
  42. {waldiez-0.3.10.dist-info → waldiez-0.3.12.dist-info}/WHEEL +0 -0
  43. {waldiez-0.3.10.dist-info → waldiez-0.3.12.dist-info}/entry_points.txt +0 -0
  44. {waldiez-0.3.10.dist-info → waldiez-0.3.12.dist-info}/licenses/LICENSE +0 -0
  45. {waldiez-0.3.10.dist-info → waldiez-0.3.12.dist-info}/licenses/NOTICE.md +0 -0
waldiez/_version.py CHANGED
@@ -2,4 +2,4 @@
2
2
  # Copyright (c) 2024 - 2025 Waldiez and contributors.
3
3
  """Version information for Waldiez."""
4
4
 
5
- __version__ = "0.3.10"
5
+ __version__ = "0.3.12"
waldiez/exporter.py CHANGED
@@ -6,21 +6,12 @@ to an autogen's flow with one or more chats.
6
6
 
7
7
  The resulting file(s): a `flow.py` file with one `main()` function
8
8
  to trigger the chat(s).
9
- If additional tools/skills are used,
10
- they are exported as their `skill_name` in the same directory with
11
- the `flow.py` file. So the `flow.py` could have entries like:
12
- `form {flow_name}_{skill1_name} import {skill1_name}`
13
- `form {flow_name}_{skill2_name} import {skill2_name}`
14
9
  """
15
10
 
16
- # pylint: disable=inconsistent-quotes
17
-
18
- import os
19
- import shutil
20
- import subprocess
21
- import sys
22
11
  from pathlib import Path
23
- from typing import List, Optional, Union
12
+ from typing import Union
13
+
14
+ import jupytext # type: ignore[import-untyped]
24
15
 
25
16
  from .exporting import FlowExporter
26
17
  from .models import Waldiez
@@ -125,25 +116,10 @@ class WaldiezExporter:
125
116
  py_path = path.with_suffix(".tmp.py")
126
117
  with open(py_path, "w", encoding="utf-8", newline="\n") as f:
127
118
  f.write(content)
128
- if not shutil.which("jupytext"): # pragma: no cover
129
- run_command(
130
- [sys.executable, "-m", "pip", "install", "jupytext"],
131
- allow_error=False,
132
- )
133
- run_command(
134
- [
135
- sys.executable,
136
- "-m",
137
- "jupytext",
138
- "--to",
139
- "notebook",
140
- str(py_path),
141
- ],
142
- allow_error=False,
143
- )
119
+ with open(py_path, "r", encoding="utf-8") as py_out:
120
+ content = jupytext.read(py_out, fmt="py:light")
144
121
  ipynb_path = str(py_path).replace(".tmp.py", ".tmp.ipynb")
145
- if not os.path.exists(ipynb_path): # pragma: no cover
146
- raise RuntimeError("Could not generate notebook")
122
+ jupytext.write(content, ipynb_path, fmt="ipynb")
147
123
  Path(ipynb_path).rename(ipynb_path.replace(".tmp.ipynb", ".ipynb"))
148
124
  py_path.unlink(missing_ok=True)
149
125
 
@@ -182,42 +158,3 @@ class WaldiezExporter:
182
158
  """
183
159
  with open(file_path, "w", encoding="utf-8", newline="\n") as file:
184
160
  file.write(self.waldiez.model_dump_json())
185
-
186
-
187
- def run_command(
188
- cmd: List[str],
189
- cwd: Optional[Path] = None,
190
- allow_error: bool = True,
191
- ) -> None:
192
- """Run a command.
193
-
194
- Parameters
195
- ----------
196
- cmd : List[str]
197
- The command to run.
198
- cwd : Path, optional
199
- The working directory, by default None (current working directory).
200
- allow_error : bool, optional
201
- Whether to allow errors, by default True.
202
-
203
- Raises
204
- ------
205
- RuntimeError
206
- If the command fails and allow_error is False.
207
- """
208
- if not cwd:
209
- cwd = Path.cwd()
210
- # pylint: disable=broad-except
211
- try:
212
- subprocess.run(
213
- cmd,
214
- check=True,
215
- cwd=cwd,
216
- env=os.environ,
217
- stdout=subprocess.PIPE,
218
- stderr=subprocess.PIPE,
219
- ) # nosemgrep # nosec
220
- except BaseException as error: # pragma: no cover
221
- if allow_error:
222
- return
223
- raise RuntimeError(f"Error running command: {error}") from error
@@ -6,7 +6,7 @@
6
6
  from pathlib import Path
7
7
  from typing import Callable, Dict, List, Optional, Tuple, Union
8
8
 
9
- from waldiez.models import WaldiezAgent, WaldiezChat
9
+ from waldiez.models import WaldiezAgent, WaldiezChat, WaldiezModel
10
10
 
11
11
  from ..base import (
12
12
  AgentPosition,
@@ -18,9 +18,8 @@ from ..base import (
18
18
  ImportPosition,
19
19
  )
20
20
  from .utils import (
21
- get_agent_class_name,
22
21
  get_agent_code_execution_config,
23
- get_agent_imports,
22
+ get_captain_agent_extras,
24
23
  get_group_manager_extras,
25
24
  get_is_termination_message,
26
25
  get_rag_user_extras,
@@ -36,9 +35,9 @@ class AgentExporter(BaseExporter, ExporterMixin):
36
35
  self,
37
36
  agent: WaldiezAgent,
38
37
  agent_names: Dict[str, str],
39
- model_names: Dict[str, str],
40
- skill_names: Dict[str, str],
38
+ models: Tuple[List[WaldiezModel], Dict[str, str]],
41
39
  chats: Tuple[List[WaldiezChat], Dict[str, str]],
40
+ skill_names: Dict[str, str],
42
41
  is_async: bool,
43
42
  group_chat_members: List[WaldiezAgent],
44
43
  for_notebook: bool,
@@ -53,12 +52,12 @@ class AgentExporter(BaseExporter, ExporterMixin):
53
52
  The agent to export.
54
53
  agent_names : Dict[str, str]
55
54
  The agent ids to names mapping.
56
- model_names : Dict[str, str]
57
- The model ids to names mapping.
58
- skill_names : Dict[str, str]
59
- The skill ids to names mapping.
55
+ models : Tuple[List[WaldiezModel], Dict[str, str]]
56
+ All the models and the model ids to names mapping.
60
57
  chats : Tuple[List[WaldiezChat], Dict[str, str]]
61
58
  All the chats and the chat ids to names mapping.
59
+ skill_names : Dict[str, str]
60
+ The skill ids to names mapping.
62
61
  is_async : bool
63
62
  Whether the whole flow is async.
64
63
  for_notebook : bool
@@ -72,14 +71,14 @@ class AgentExporter(BaseExporter, ExporterMixin):
72
71
  if output_dir is not None and not isinstance(output_dir, Path):
73
72
  output_dir = Path(output_dir)
74
73
  self.output_dir = output_dir
75
- self.model_names = model_names
74
+ self.models = models[0]
75
+ self.model_names = models[1]
76
76
  self.skill_names = skill_names
77
77
  self.arguments_resolver = arguments_resolver
78
78
  self.group_chat_members = group_chat_members
79
79
  self.chats = chats
80
80
  self.is_async = is_async
81
81
  self._agent_name = agent_names[agent.id]
82
- self._agent_class = get_agent_class_name(self.agent)
83
82
  # content, argument, import
84
83
  self._code_execution = get_agent_code_execution_config(
85
84
  agent=self.agent,
@@ -119,6 +118,13 @@ class AgentExporter(BaseExporter, ExporterMixin):
119
118
  agent=self.agent,
120
119
  serializer=self.serializer,
121
120
  )
121
+ self._captain = get_captain_agent_extras(
122
+ agent=self.agent,
123
+ agent_names=self.agent_names,
124
+ all_models=self.models,
125
+ serializer=self.serializer,
126
+ output_dir=self.output_dir,
127
+ )
122
128
 
123
129
  def get_imports(self) -> Optional[List[Tuple[str, ImportPosition]]]:
124
130
  """Get the imports.
@@ -130,7 +136,7 @@ class AgentExporter(BaseExporter, ExporterMixin):
130
136
  """
131
137
  position = ImportPosition.THIRD_PARTY
132
138
  # default imports based on the agent class.
133
- agent_imports = get_agent_imports(self._agent_class)
139
+ agent_imports = self.agent.ag2_imports
134
140
  # if code execution is enabled, update the imports.
135
141
  if self._code_execution[2]:
136
142
  agent_imports.add(self._code_execution[2])
@@ -220,7 +226,6 @@ class AgentExporter(BaseExporter, ExporterMixin):
220
226
  """
221
227
  agent = self.agent
222
228
  agent_name = self._agent_name
223
- agent_class = self._agent_class
224
229
  retrieve_arg = self._rag[1]
225
230
  group_chat_arg = self._group_chat[1]
226
231
  is_termination = self._termination[0]
@@ -231,8 +236,10 @@ class AgentExporter(BaseExporter, ExporterMixin):
231
236
  default_auto_reply = (
232
237
  f'"{self.string_escape(agent.data.agent_default_auto_reply)}"'
233
238
  )
234
- extras = f"{group_chat_arg}{retrieve_arg}{self._reasoning}"
235
- agent_str = f"""{agent_name} = {agent_class}(
239
+ extras = (
240
+ f"{group_chat_arg}{retrieve_arg}{self._reasoning}{self._captain}"
241
+ )
242
+ agent_str = f"""{agent_name} = {self.agent.ag2_class}(
236
243
  name="{agent_name}",
237
244
  description="{agent.description}"{system_message_arg},
238
245
  human_input_mode="{agent.data.human_input_mode}",
@@ -2,8 +2,7 @@
2
2
  # Copyright (c) 2024 - 2025 Waldiez and contributors.
3
3
  """Utility functions for generating agent related strings."""
4
4
 
5
- from .agent_class_name import get_agent_class_name
6
- from .agent_imports import get_agent_imports
5
+ from .captain_agent import get_captain_agent_extras
7
6
  from .code_execution import get_agent_code_execution_config
8
7
  from .group_manager import get_group_manager_extras
9
8
  from .rag_user import get_rag_user_extras
@@ -13,10 +12,9 @@ from .teachability import get_agent_teachability_string
13
12
  from .termination_message import get_is_termination_message
14
13
 
15
14
  __all__ = [
16
- "get_agent_class_name",
17
- "get_agent_imports",
18
15
  "get_agent_code_execution_config",
19
16
  "get_agent_teachability_string",
17
+ "get_captain_agent_extras",
20
18
  "get_group_manager_extras",
21
19
  "get_is_termination_message",
22
20
  "get_rag_user_extras",
@@ -0,0 +1,254 @@
1
+ # SPDX-License-Identifier: Apache-2.0.
2
+ # Copyright (c) 2024 - 2025 Waldiez and contributors.
3
+ """ "Extras for exporting a captain agent."""
4
+
5
+ import json
6
+ import os
7
+ from datetime import datetime, timezone
8
+ from pathlib import Path
9
+ from typing import Any, Callable, Dict, List, Optional, Union
10
+
11
+ from waldiez.models import (
12
+ WaldiezAgent,
13
+ WaldiezCaptainAgent,
14
+ WaldiezModel,
15
+ WaldiezModelData,
16
+ )
17
+
18
+
19
+ def get_captain_agent_extras(
20
+ agent: WaldiezAgent,
21
+ agent_names: Dict[str, str],
22
+ all_models: List[WaldiezModel],
23
+ serializer: Callable[..., str],
24
+ output_dir: Optional[Union[str, Path]],
25
+ ) -> str:
26
+ """Get the extra args for the captain agent.
27
+
28
+ Parameters
29
+ ----------
30
+ agent : WaldiezAgent
31
+ The agent.
32
+ agent_names : Dict[str, str]
33
+ A mapping of agent ids to agent names.
34
+ all_models : List[WaldiezModel]
35
+ All the models in the flow.
36
+ serializer : Callable[..., str]
37
+ The serializer to use.
38
+ output_dir : Optional[Union[str, Path]]
39
+ The output directory to save the agent lib and nested config.
40
+ Returns
41
+ -------
42
+ str
43
+ The extra args to use in the captain agent.
44
+ """
45
+ # extra args: nested_config, agent_lib, tool_lib
46
+ if not isinstance(agent, WaldiezCaptainAgent):
47
+ return ""
48
+ agent_name = agent_names[agent.id]
49
+ save_path = str(output_dir) if output_dir else "."
50
+ extra_args_content = "\n" + f' agent_config_save_path=r"{save_path}",'
51
+ if agent.data.agent_lib:
52
+ lib_dict = [
53
+ lib.model_dump(by_alias=False) for lib in agent.data.agent_lib
54
+ ]
55
+ lib_json_name = f"{agent_name}_agent_lib.json"
56
+ agent_lib_path = os.path.join(save_path, lib_json_name)
57
+ with open(agent_lib_path, "w", encoding="utf-8", newline="\n") as f:
58
+ json.dump(lib_dict, f, ensure_ascii=False, indent=4)
59
+ extra_args_content += "\n" + f' agent_lib=r"{agent_lib_path}",'
60
+ if agent.data.tool_lib:
61
+ extra_args_content += "\n" + f' tool_lib="{agent.data.tool_lib}",'
62
+ nested_config = generate_nested_config(
63
+ agent,
64
+ agent_name,
65
+ all_models,
66
+ save_path,
67
+ )
68
+ serialized_nested_config = serializer(nested_config)
69
+ extra_args_content += (
70
+ "\n" + f" nested_config={serialized_nested_config},"
71
+ )
72
+ return extra_args_content
73
+
74
+
75
+ def generate_nested_config(
76
+ agent: WaldiezCaptainAgent,
77
+ agent_name: str,
78
+ all_models: List[WaldiezModel],
79
+ save_path: str,
80
+ ) -> Dict[str, Any]:
81
+ """Generate the nested config for the captain agent.
82
+
83
+ Parameters
84
+ ----------
85
+ agent : WaldiezCaptainAgent
86
+ The captain agent.
87
+ agent_name : str
88
+ The agent name.
89
+ all_models : List[WaldiezModel]
90
+ All the models in the flow.
91
+ save_path : str
92
+ The path to save the nested config.
93
+ Returns
94
+ -------
95
+ Dict[str, Any]
96
+ The nested config.
97
+ """
98
+ config_file_or_env_name = f"{agent_name}_llm_config.json"
99
+ llm_config = get_llm_config(agent, all_models)
100
+ to_serialize = {
101
+ "config_list": [llm_config],
102
+ }
103
+ os.makedirs(save_path, exist_ok=True)
104
+ config_file_or_env_path = os.path.join(save_path, config_file_or_env_name)
105
+ with open(
106
+ config_file_or_env_path, "w", encoding="utf-8", newline="\n"
107
+ ) as f:
108
+ json.dump(to_serialize, f, ensure_ascii=False, indent=4)
109
+ config_file_or_env = f'r"{config_file_or_env_path}"'
110
+ nested_config = {
111
+ "autobuild_init_config": {
112
+ "config_file_or_env": config_file_or_env,
113
+ "builder_model": llm_config["model"],
114
+ "agent_model": llm_config["model"],
115
+ },
116
+ "autobuild_build_config": get_auto_build_build_config(
117
+ agent, llm_config
118
+ ),
119
+ "group_chat_config": {"max_round": agent.data.max_round},
120
+ "group_chat_llm_config": None,
121
+ "max_turns": agent.data.max_turns,
122
+ }
123
+ return nested_config
124
+
125
+
126
+ def get_llm_config(
127
+ agent: WaldiezAgent,
128
+ all_models: List[WaldiezModel],
129
+ ) -> Dict[str, Any]:
130
+ """Get the config list environment variable name and its dict value.
131
+
132
+ Parameters
133
+ ----------
134
+ agent : WaldiezAgent
135
+ The agent.
136
+ all_models : List[WaldiezModel]
137
+ All the models in the flow.
138
+ Returns
139
+ -------
140
+ Dict[str, str]
141
+ The llm config dict.
142
+ """
143
+ model_name = "gpt-4o"
144
+ temperature: Optional[float] = 1
145
+ top_p: Optional[float] = 0.95
146
+ max_tokens: Optional[int] = 2048
147
+ if agent.data.model_ids:
148
+ waldiez_model = get_waldiez_model(agent.data.model_ids[0], all_models)
149
+ model_name = waldiez_model.name
150
+ temperature = waldiez_model.data.temperature
151
+ top_p = waldiez_model.data.top_p
152
+ max_tokens = waldiez_model.data.max_tokens
153
+ config_dict = {
154
+ "model": model_name,
155
+ "temperature": temperature,
156
+ "top_p": top_p,
157
+ "max_tokens": max_tokens,
158
+ }
159
+ return config_dict
160
+
161
+
162
+ def get_auto_build_build_config(
163
+ agent: WaldiezAgent,
164
+ llm_config: Dict[str, Any],
165
+ ) -> Dict[str, Any]:
166
+ """Get the auto build build config.
167
+
168
+ Parameters
169
+ ----------
170
+ agent : WaldiezAgent
171
+ The agent.
172
+ llm_config : Dict[str, Any]
173
+ The llm config.
174
+
175
+ Returns
176
+ -------
177
+ Dict[str, Any]
178
+ The auto build build config.
179
+ """
180
+ coding = False
181
+ code_execution_config = {
182
+ "timeout": 300,
183
+ "work_dir": "groupchat",
184
+ "last_n_messages": 1,
185
+ "use_docker": False,
186
+ }
187
+ if agent.data.code_execution_config is not False:
188
+ coding = True
189
+ code_execution_config["work_dir"] = (
190
+ agent.data.code_execution_config.work_dir or "groupchat"
191
+ )
192
+ code_execution_config["last_n_messages"] = (
193
+ agent.data.code_execution_config.last_n_messages or 1
194
+ )
195
+ code_execution_config["timeout"] = (
196
+ agent.data.code_execution_config.timeout or 300
197
+ )
198
+ return {
199
+ "default_llm_config": {
200
+ "temperature": llm_config["temperature"],
201
+ "top_p": llm_config["top_p"],
202
+ "max_tokens": llm_config["max_tokens"],
203
+ },
204
+ "code_execution_config": code_execution_config,
205
+ "coding": coding,
206
+ }
207
+
208
+
209
+ def get_waldiez_model(
210
+ model_id: str, all_models: List[WaldiezModel]
211
+ ) -> WaldiezModel:
212
+ """Get the model name from the model id.
213
+
214
+ Parameters
215
+ ----------
216
+ model_id : str
217
+ The model id.
218
+ all_models : List[WaldiezModel]
219
+ All the models in the flow.
220
+
221
+ Returns
222
+ -------
223
+ str
224
+ The model name.
225
+ """
226
+ for model in all_models:
227
+ if model.id == model_id:
228
+ return model
229
+ now = (
230
+ datetime.now(tz=timezone.utc)
231
+ .isoformat(timespec="milliseconds")
232
+ .replace("+00:00", "Z")
233
+ )
234
+ return WaldiezModel(
235
+ id=model_id,
236
+ type="model",
237
+ name="gpt-4o",
238
+ description="The GPT-4o model.",
239
+ tags=["gpt-4o"],
240
+ requirements=[],
241
+ created_at=now,
242
+ updated_at=now,
243
+ data=WaldiezModelData(
244
+ api_type="openai",
245
+ temperature=1,
246
+ top_p=0.95,
247
+ max_tokens=2048,
248
+ base_url=None,
249
+ api_key=os.environ.get("OPENAI_API_KEY", "REPLACE_ME"),
250
+ api_version=None,
251
+ default_headers={},
252
+ price=None,
253
+ ),
254
+ )
@@ -97,7 +97,7 @@ class ChatsExporter(BaseExporter, ExporterMixin):
97
97
  recipient=recipient,
98
98
  serializer=self.serializer,
99
99
  string_escape=self.string_escape,
100
- tabs=0 if self.for_notebook else 1,
100
+ tabs=1 if self.for_notebook else 2,
101
101
  is_async=self.is_async,
102
102
  )
103
103
  return
@@ -109,7 +109,7 @@ class ChatsExporter(BaseExporter, ExporterMixin):
109
109
  chat_names=self.chat_names,
110
110
  serializer=self.serializer,
111
111
  string_escape=self.string_escape,
112
- tabs=0 if self.for_notebook else 1,
112
+ tabs=1 if self.for_notebook else 2,
113
113
  is_async=self.is_async,
114
114
  )
115
115
  return
@@ -119,7 +119,7 @@ class ChatsExporter(BaseExporter, ExporterMixin):
119
119
  chat_names=self.chat_names,
120
120
  serializer=self.serializer,
121
121
  string_escape=self.string_escape,
122
- tabs=0 if self.for_notebook else 1,
122
+ tabs=1 if self.for_notebook else 2,
123
123
  is_async=self.is_async,
124
124
  )
125
125
 
@@ -181,6 +181,7 @@ def _get_chat_dict_string(
181
181
  chat_string = "{"
182
182
  chat_string += "\n" + f'{tab} "sender": {agent_names[sender.id]},'
183
183
  chat_string += "\n" + f'{tab} "recipient": {agent_names[recipient.id]},'
184
+ chat_string += "\n" + f'{tab} "cache": cache,'
184
185
  additional_methods_string = ""
185
186
  for key, value in chat_args.items():
186
187
  if isinstance(value, str):
@@ -86,6 +86,7 @@ def export_single_chat(
86
86
  agent1.initiate_chat(
87
87
  agent2,
88
88
  message="Hello, how are you?",
89
+ cache=cache,
89
90
  )
90
91
  ```
91
92
  """
@@ -167,6 +168,7 @@ def get_simple_chat_string(
167
168
  recipient_name = agent_names[recipient.id]
168
169
  chat_string = "\n" + f"{tab}results = {sender_name}.{initiate}(" + "\n"
169
170
  chat_string += f"{tab} {recipient_name},"
171
+ chat_string += "\n" + f"{tab} cache=cache,"
170
172
  for key, value in chat_args.items():
171
173
  if isinstance(value, str):
172
174
  chat_string += "\n" + f'{tab} {key}="{value}",'
@@ -228,6 +230,7 @@ def get_empty_simple_chat_string(
228
230
  initiate = "a_initiate_chat" if is_async else "initiate_chat"
229
231
  content = "\n" + f"{tab}results = {sender_name}.{initiate}(" + "\n"
230
232
  content += f"{tab} {recipient_name}," + "\n"
233
+ content += f"{tab} cache=cache," + "\n"
231
234
  message_arg, _ = get_chat_message(
232
235
  tab=tab,
233
236
  chat=chat,
@@ -202,6 +202,7 @@ class FlowExporter(BaseExporter, ExporterMixin):
202
202
  The merged export contents.
203
203
  """
204
204
  is_async = self.waldiez.is_async
205
+ cache_seed = self.waldiez.cache_seed
205
206
  content = (
206
207
  get_py_content_start(self.waldiez)
207
208
  if not self.for_notebook
@@ -236,9 +237,15 @@ class FlowExporter(BaseExporter, ExporterMixin):
236
237
  chats_content,
237
238
  after_run=after_run,
238
239
  is_async=self.waldiez.is_async,
240
+ cache_seed=cache_seed,
239
241
  )
240
242
  else:
241
- content += "\n" + chats_content + "\n"
243
+ if chats_content.startswith("\n"):
244
+ chats_content = chats_content[1:]
245
+ content += (
246
+ "\n" + f"with Cache.disk(cache_seed={cache_seed}) as cache:"
247
+ "\n" + chats_content + "\n"
248
+ )
242
249
  if is_async:
243
250
  content += "await stop_logging()"
244
251
  else:
@@ -332,6 +339,7 @@ class FlowExporter(BaseExporter, ExporterMixin):
332
339
  model_names=self.model_names,
333
340
  for_notebook=self.for_notebook,
334
341
  output_dir=self.output_dir,
342
+ cache_seed=self.waldiez.cache_seed,
335
343
  )
336
344
  return exporter.export()
337
345
 
@@ -420,9 +428,9 @@ class FlowExporter(BaseExporter, ExporterMixin):
420
428
  exporter = AgentExporter(
421
429
  agent=agent,
422
430
  agent_names=self.agent_names,
423
- model_names=self.model_names,
424
- skill_names=self.skill_names,
431
+ models=(self.models, self.model_names),
425
432
  chats=(self.chats, self.chat_names),
433
+ skill_names=self.skill_names,
426
434
  is_async=self.waldiez.is_async,
427
435
  for_notebook=self.for_notebook,
428
436
  output_dir=self.output_dir,
@@ -4,8 +4,12 @@
4
4
  # pylint: disable=inconsistent-quotes, line-too-long
5
5
  """Get the main function."""
6
6
 
7
+ from typing import Optional
7
8
 
8
- def get_def_main(flow_chats: str, after_run: str, is_async: bool) -> str:
9
+
10
+ def get_def_main(
11
+ flow_chats: str, after_run: str, is_async: bool, cache_seed: Optional[int]
12
+ ) -> str:
9
13
  """Get the main function.
10
14
 
11
15
  When exporting to python, waldiez_chats string will be the
@@ -22,17 +26,21 @@ def get_def_main(flow_chats: str, after_run: str, is_async: bool) -> str:
22
26
  The content after the run of the flow.
23
27
  is_async : bool
24
28
  Whether the main function is asynchronous.
29
+ cache_seed : Optional[int]
30
+ The seed for the cache. If None, cache should be disabled.
25
31
  Returns
26
32
  -------
27
33
  str
28
34
  The main function.
29
35
  """
36
+ if flow_chats.startswith("\n"):
37
+ flow_chats = flow_chats[1:]
30
38
  content = ""
31
39
  if is_async:
32
40
  content += "async "
33
- content += "def main():\n"
34
- content += " # type: () -> Union[ChatResult, List[ChatResult], Dict[int, ChatResult]]\n"
41
+ content += "def main() -> Union[ChatResult, List[ChatResult], Dict[int, ChatResult]]:\n"
35
42
  content += ' """Start chatting."""\n'
43
+ content += f" with Cache.disk(cache_seed={cache_seed}" + ") as cache:\n"
36
44
  content += f"{flow_chats}" + "\n"
37
45
  if is_async:
38
46
  content += " await stop_logging()"
@@ -54,11 +62,12 @@ def get_def_main(flow_chats: str, after_run: str, is_async: bool) -> str:
54
62
  content += " ordered_results = dict(sorted(results.items()))\n"
55
63
  content += " for _, result in ordered_results.items():\n"
56
64
  content += " pprint(asdict(result))\n"
57
- content += " elif isinstance(results, list):\n"
65
+ content += " else:\n"
66
+ content += " if not isinstance(results, list):\n"
67
+ content += " results = [results]\n"
58
68
  content += " for result in results:\n"
59
69
  content += " pprint(asdict(result))\n"
60
- content += " else:\n"
61
- content += " pprint(asdict(results))\n"
70
+ content += "\n\n"
62
71
  content += 'if __name__ == "__main__":\n'
63
72
  if is_async:
64
73
  content += " anyio.run(call_main)\n"
@@ -136,25 +136,26 @@ def get_after_run_content(
136
136
  # if th eflow has reasoning agents, we add
137
137
  # visualize_tree(agent._root) for each agent
138
138
  content = ""
139
- space = " " * tabs
139
+ tab = " "
140
+ space = tab * tabs
140
141
  for agent in waldiez.agents:
141
142
  if agent.agent_type == "reasoning":
142
143
  agent_name = agent_names[agent.id]
143
144
  content += f"""
144
145
  {space}# pylint: disable=broad-except,too-many-try-statements
145
146
  {space}try:
146
- {space}{space}visualize_tree({agent_name}._root) # pylint: disable=protected-access
147
- {space}{space}if os.path.exists("tree_of_thoughts.png"):
148
- {space}{space}{space}new_name = "{agent_name}_tree_of_thoughts.png"
149
- {space}{space}{space}os.rename("tree_of_thoughts.png", new_name)
147
+ {space}{tab}visualize_tree({agent_name}._root) # pylint: disable=protected-access
148
+ {space}{tab}if os.path.exists("tree_of_thoughts.png"):
149
+ {space}{tab}{tab}new_name = "{agent_name}_tree_of_thoughts.png"
150
+ {space}{tab}{tab}os.rename("tree_of_thoughts.png", new_name)
150
151
  {space}except BaseException:
151
- {space}{space}pass
152
+ {space}{tab}pass
152
153
  {space}# save the tree to json
153
154
  {space}try:
154
- {space}{space}data = {agent_name}._root.to_dict() # pylint: disable=protected-access
155
- {space}{space}with open("{agent_name}_reasoning_tree.json", "w", encoding="utf-8") as f:
156
- {space}{space}{space}json.dump(data, f)
155
+ {space}{tab}data = {agent_name}._root.to_dict() # pylint: disable=protected-access
156
+ {space}{tab}with open("{agent_name}_reasoning_tree.json", "w", encoding="utf-8") as f:
157
+ {space}{tab}{tab}json.dump(data, f)
157
158
  {space}except BaseException:
158
- {space}{space}pass
159
+ {space}{tab}pass
159
160
  """
160
161
  return content