camel-ai 0.2.23__py3-none-any.whl → 0.2.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

camel/__init__.py CHANGED
@@ -14,7 +14,7 @@
14
14
 
15
15
  from camel.logger import disable_logging, enable_logging, set_log_level
16
16
 
17
- __version__ = '0.2.23'
17
+ __version__ = '0.2.24'
18
18
 
19
19
  __all__ = [
20
20
  '__version__',
@@ -23,23 +23,24 @@ class AnthropicConfig(BaseConfig):
23
23
  r"""Defines the parameters for generating chat completions using the
24
24
  Anthropic API.
25
25
 
26
- See: https://docs.anthropic.com/claude/reference/complete_post
26
+ See: https://docs.anthropic.com/en/api/messages
27
27
  Args:
28
28
  max_tokens (int, optional): The maximum number of tokens to
29
29
  generate before stopping. Note that Anthropic models may stop
30
30
  before reaching this maximum. This parameter only specifies the
31
31
  absolute maximum number of tokens to generate.
32
32
  (default: :obj:`8192`)
33
- stop_sequences (List[str], optional): Sequences that will cause the
34
- model to stop generating completion text. Anthropic models stop
35
- on "\n\nHuman:", and may include additional built-in stop sequences
36
- in the future. By providing the stop_sequences parameter, you may
37
- include additional strings that will cause the model to stop
38
- generating. (default: :obj:`[]`)
33
+ stop_sequences (List[str], optional): Custom text sequences that will
34
+ cause the model to stop generating. The models will normally stop
35
+ when they have naturally completed their turn. If the model
36
+ encounters one of these custom sequences, the response will be
37
+ terminated and the stop_reason will be "stop_sequence".
38
+ (default: :obj:`[]`)
39
39
  temperature (float, optional): Amount of randomness injected into the
40
40
  response. Defaults to 1. Ranges from 0 to 1. Use temp closer to 0
41
41
  for analytical / multiple choice, and closer to 1 for creative
42
- and generative tasks. (default: :obj:`1`)
42
+ and generative tasks. Note that even with temperature of 0.0, the
43
+ results will not be fully deterministic. (default: :obj:`1`)
43
44
  top_p (float, optional): Use nucleus sampling. In nucleus sampling, we
44
45
  compute the cumulative distribution over all the options for each
45
46
  subsequent token in decreasing probability order and cut it off
@@ -49,9 +50,20 @@ class AnthropicConfig(BaseConfig):
49
50
  top_k (int, optional): Only sample from the top K options for each
50
51
  subsequent token. Used to remove "long tail" low probability
51
52
  responses. (default: :obj:`5`)
52
- metadata: An object describing metadata about the request.
53
53
  stream (bool, optional): Whether to incrementally stream the response
54
54
  using server-sent events. (default: :obj:`False`)
55
+ metadata (Union[dict, NotGiven], optional): An object describing
56
+ metadata about the request. Can include user_id as an external
57
+ identifier for the user associated with the request.
58
+ (default: :obj:`NotGiven()`)
59
+ thinking (Union[dict, NotGiven], optional): Configuration for enabling
60
+ Claude's extended thinking. When enabled, responses include
61
+ thinking content blocks showing Claude's thinking process.
62
+ (default: :obj:`NotGiven()`)
63
+ tool_choice (Union[dict, NotGiven], optional): How the model should
64
+ use the provided tools. The model can use a specific tool, any
65
+ available tool, decide by itself, or not use tools at all.
66
+ (default: :obj:`NotGiven()`)
55
67
  """
56
68
 
57
69
  max_tokens: int = 8192
@@ -60,11 +72,33 @@ class AnthropicConfig(BaseConfig):
60
72
  top_p: Union[float, NotGiven] = 0.7
61
73
  top_k: Union[int, NotGiven] = 5
62
74
  stream: bool = False
75
+ metadata: Union[dict, NotGiven] = NotGiven()
76
+ thinking: Union[dict, NotGiven] = NotGiven()
77
+ tool_choice: Union[dict, NotGiven] = NotGiven()
63
78
 
64
79
  def as_dict(self) -> dict[str, Any]:
65
80
  config_dict = super().as_dict()
66
- if "tools" in config_dict:
67
- del config_dict["tools"] # TODO: Support tool calling.
81
+ # Create a list of keys to remove to avoid modifying dict
82
+ keys_to_remove = [
83
+ key
84
+ for key, value in config_dict.items()
85
+ if isinstance(value, NotGiven)
86
+ ]
87
+
88
+ for key in keys_to_remove:
89
+ del config_dict[key]
90
+
91
+ # remove some keys if thinking is enabled
92
+ thinking_enabled = (
93
+ not isinstance(self.thinking, NotGiven)
94
+ and self.thinking["type"] == "enabled"
95
+ )
96
+ if thinking_enabled:
97
+ # `top_p`, `top_k`, `temperature` must be unset when thinking is
98
+ # enabled.
99
+ config_dict.pop("top_k", None)
100
+ config_dict.pop("top_p", None)
101
+ config_dict.pop("temperature", None)
68
102
  return config_dict
69
103
 
70
104
 
@@ -84,7 +84,11 @@ class AnthropicModel(BaseModelBackend):
84
84
  index=0,
85
85
  message={
86
86
  "role": "assistant",
87
- "content": response.content[0].text,
87
+ "content": next(
88
+ content.text
89
+ for content in response.content
90
+ if content.type == "text"
91
+ ),
88
92
  },
89
93
  finish_reason=response.stop_reason,
90
94
  )
@@ -468,6 +468,42 @@ class RolePlaying:
468
468
 
469
469
  return init_msg
470
470
 
471
+ async def ainit_chat(
472
+ self, init_msg_content: Optional[str] = None
473
+ ) -> BaseMessage:
474
+ r"""Asynchronously initializes the chat by resetting both of the
475
+ assistant and user agents. Returns an initial message for the
476
+ role-playing session.
477
+
478
+ Args:
479
+ init_msg_content (str, optional): A user-specified initial message.
480
+ Will be sent to the role-playing session as the initial
481
+ message. (default: :obj:`None`)
482
+
483
+ Returns:
484
+ BaseMessage: A single `BaseMessage` representing the initial
485
+ message.
486
+ """
487
+ # Currently, reset() is synchronous, but if it becomes async in the
488
+ # future, we can await it here
489
+ self.assistant_agent.reset()
490
+ self.user_agent.reset()
491
+ default_init_msg_content = (
492
+ "Now start to give me instructions one by one. "
493
+ "Only reply with Instruction and Input."
494
+ )
495
+ if init_msg_content is None:
496
+ init_msg_content = default_init_msg_content
497
+
498
+ # Initialize a message sent by the assistant
499
+ init_msg = BaseMessage.make_assistant_message(
500
+ role_name=getattr(self.assistant_sys_msg, 'role_name', None)
501
+ or "assistant",
502
+ content=init_msg_content,
503
+ )
504
+
505
+ return init_msg
506
+
471
507
  def step(
472
508
  self,
473
509
  assistant_msg: BaseMessage,
@@ -549,3 +585,86 @@ class RolePlaying:
549
585
  info=user_response.info,
550
586
  ),
551
587
  )
588
+
589
+ async def astep(
590
+ self,
591
+ assistant_msg: BaseMessage,
592
+ ) -> Tuple[ChatAgentResponse, ChatAgentResponse]:
593
+ r"""Asynchronously advances the conversation by taking a message from
594
+ the assistant, processing it using the user agent, and then processing
595
+ the resulting message using the assistant agent. Returns a tuple
596
+ containing the resulting assistant message, whether the assistant
597
+ agent terminated the conversation, and any additional assistant
598
+ information, as well as a tuple containing the resulting user message,
599
+ whether the user agent terminated the conversation, and any additional
600
+ user information.
601
+
602
+ Args:
603
+ assistant_msg: A `BaseMessage` representing the message from the
604
+ assistant.
605
+
606
+ Returns:
607
+ Tuple[ChatAgentResponse, ChatAgentResponse]: A tuple containing two
608
+ ChatAgentResponse: the first struct contains the resulting
609
+ assistant message, whether the assistant agent terminated the
610
+ conversation, and any additional assistant information; the
611
+ second struct contains the resulting user message, whether the
612
+ user agent terminated the conversation, and any additional user
613
+ information.
614
+ """
615
+ user_response = await self.user_agent.astep(assistant_msg)
616
+ if user_response.terminated or user_response.msgs is None:
617
+ return (
618
+ ChatAgentResponse(msgs=[], terminated=False, info={}),
619
+ ChatAgentResponse(
620
+ msgs=[],
621
+ terminated=user_response.terminated,
622
+ info=user_response.info,
623
+ ),
624
+ )
625
+ user_msg = self._reduce_message_options(user_response.msgs)
626
+
627
+ # To prevent recording the same memory more than once (once in chat
628
+ # step and once in role play), and the model generates only one
629
+ # response when multi-response support is enabled.
630
+ if (
631
+ 'n' in self.user_agent.model_backend.model_config_dict.keys()
632
+ and self.user_agent.model_backend.model_config_dict['n'] > 1
633
+ ):
634
+ self.user_agent.record_message(user_msg)
635
+
636
+ assistant_response = await self.assistant_agent.astep(user_msg)
637
+ if assistant_response.terminated or assistant_response.msgs is None:
638
+ return (
639
+ ChatAgentResponse(
640
+ msgs=[],
641
+ terminated=assistant_response.terminated,
642
+ info=assistant_response.info,
643
+ ),
644
+ ChatAgentResponse(
645
+ msgs=[user_msg], terminated=False, info=user_response.info
646
+ ),
647
+ )
648
+ assistant_msg = self._reduce_message_options(assistant_response.msgs)
649
+
650
+ # To prevent recording the same memory more than once (once in chat
651
+ # step and once in role play), and the model generates only one
652
+ # response when multi-response support is enabled.
653
+ if (
654
+ 'n' in self.assistant_agent.model_backend.model_config_dict.keys()
655
+ and self.assistant_agent.model_backend.model_config_dict['n'] > 1
656
+ ):
657
+ self.assistant_agent.record_message(assistant_msg)
658
+
659
+ return (
660
+ ChatAgentResponse(
661
+ msgs=[assistant_msg],
662
+ terminated=assistant_response.terminated,
663
+ info=assistant_response.info,
664
+ ),
665
+ ChatAgentResponse(
666
+ msgs=[user_msg],
667
+ terminated=user_response.terminated,
668
+ info=user_response.info,
669
+ ),
670
+ )
@@ -54,7 +54,10 @@ from .audio_analysis_toolkit import AudioAnalysisToolkit
54
54
  from .excel_toolkit import ExcelToolkit
55
55
  from .video_analysis_toolkit import VideoAnalysisToolkit
56
56
  from .image_analysis_toolkit import ImageAnalysisToolkit
57
+ from .mcp_toolkit import MCPToolkit
57
58
  from .web_toolkit import WebToolkit
59
+ from .file_write_toolkit import FileWriteToolkit
60
+ from .terminal_toolkit import TerminalToolkit
58
61
 
59
62
 
60
63
  __all__ = [
@@ -93,9 +96,12 @@ __all__ = [
93
96
  'ZapierToolkit',
94
97
  'SymPyToolkit',
95
98
  'MinerUToolkit',
99
+ 'MCPToolkit',
96
100
  'AudioAnalysisToolkit',
97
101
  'ExcelToolkit',
98
102
  'VideoAnalysisToolkit',
99
103
  'ImageAnalysisToolkit',
100
104
  'WebToolkit',
105
+ 'FileWriteToolkit',
106
+ 'TerminalToolkit',
101
107
  ]
@@ -0,0 +1,371 @@
1
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14
+
15
+
16
+ from datetime import datetime
17
+ from pathlib import Path
18
+ from typing import List, Optional, Union
19
+
20
+ from camel.logger import get_logger
21
+ from camel.toolkits.base import BaseToolkit
22
+ from camel.toolkits.function_tool import FunctionTool
23
+
24
+ logger = get_logger(__name__)
25
+
26
+ # Default format when no extension is provided
27
+ DEFAULT_FORMAT = '.md'
28
+
29
+
30
+ class FileWriteToolkit(BaseToolkit):
31
+ r"""A toolkit for creating, writing, and modifying text in files.
32
+
33
+ This class provides cross-platform (macOS, Linux, Windows) support for
34
+ writing to various file formats (Markdown, DOCX, PDF, and plaintext),
35
+ replacing text in existing files, automatic backups, custom encoding,
36
+ and enhanced formatting options for specialized formats.
37
+ """
38
+
39
+ def __init__(
40
+ self,
41
+ output_dir: str = "./",
42
+ timeout: Optional[float] = None,
43
+ default_encoding: str = "utf-8",
44
+ backup_enabled: bool = True,
45
+ ) -> None:
46
+ r"""Initialize the FileWriteToolkit.
47
+
48
+ Args:
49
+ output_dir (str): The default directory for output files.
50
+ Defaults to the current working directory.
51
+ timeout (Optional[float]): The timeout for the toolkit.
52
+ (default: :obj: `None`)
53
+ default_encoding (str): Default character encoding for text
54
+ operations. (default: :obj: `utf-8`)
55
+ backup_enabled (bool): Whether to create backups of existing files
56
+ before overwriting. (default: :obj: `True`)
57
+ """
58
+ super().__init__(timeout=timeout)
59
+ self.output_dir = Path(output_dir).resolve()
60
+ self.output_dir.mkdir(parents=True, exist_ok=True)
61
+ self.default_encoding = default_encoding
62
+ self.backup_enabled = backup_enabled
63
+ logger.info(
64
+ f"FileWriteToolkit initialized with output directory"
65
+ f": {self.output_dir}, encoding: {default_encoding}"
66
+ )
67
+
68
+ def _resolve_filepath(self, file_path: str) -> Path:
69
+ r"""Convert the given string path to a Path object.
70
+
71
+ If the provided path is not absolute, it is made relative to the
72
+ default output directory.
73
+
74
+ Args:
75
+ file_path (str): The file path to resolve.
76
+
77
+ Returns:
78
+ Path: A fully resolved (absolute) Path object.
79
+ """
80
+ path_obj = Path(file_path)
81
+ if not path_obj.is_absolute():
82
+ path_obj = self.output_dir / path_obj
83
+ return path_obj.resolve()
84
+
85
+ def _write_text_file(
86
+ self, file_path: Path, content: str, encoding: str = "utf-8"
87
+ ) -> None:
88
+ r"""Write text content to a plaintext file.
89
+
90
+ Args:
91
+ file_path (Path): The target file path.
92
+ content (str): The text content to write.
93
+ encoding (str): Character encoding to use. (default: :obj: `utf-8`)
94
+ """
95
+ with file_path.open("w", encoding=encoding) as f:
96
+ f.write(content)
97
+ logger.debug(f"Wrote text to {file_path} with {encoding} encoding")
98
+
99
+ def _create_backup(self, file_path: Path) -> None:
100
+ r"""Create a backup of the file if it exists and backup is enabled.
101
+
102
+ Args:
103
+ file_path (Path): Path to the file to backup.
104
+ """
105
+ import shutil
106
+
107
+ if not self.backup_enabled or not file_path.exists():
108
+ return
109
+
110
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
111
+ backup_path = file_path.parent / f"{file_path.name}.{timestamp}.bak"
112
+ shutil.copy2(file_path, backup_path)
113
+ logger.info(f"Created backup at {backup_path}")
114
+
115
+ def _write_docx_file(self, file_path: Path, content: str) -> None:
116
+ r"""Write text content to a DOCX file with default formatting.
117
+
118
+ Args:
119
+ file_path (Path): The target file path.
120
+ content (str): The text content to write.
121
+ """
122
+ import docx
123
+
124
+ # Use default formatting values
125
+ font_name = 'Calibri'
126
+ font_size = 11
127
+ line_spacing = 1.0
128
+
129
+ document = docx.Document()
130
+ style = document.styles['Normal']
131
+ style.font.name = font_name
132
+ style.font.size = docx.shared.Pt(font_size)
133
+ style.paragraph_format.line_spacing = line_spacing
134
+
135
+ # Split content into paragraphs and add them
136
+ for para_text in content.split('\n'):
137
+ para = document.add_paragraph(para_text)
138
+ para.style = style
139
+
140
+ document.save(str(file_path))
141
+ logger.debug(f"Wrote DOCX to {file_path} with default formatting")
142
+
143
+ def _write_pdf_file(self, file_path: Path, content: str, **kwargs) -> None:
144
+ r"""Write text content to a PDF file with default formatting.
145
+
146
+ Args:
147
+ file_path (Path): The target file path.
148
+ content (str): The text content to write.
149
+
150
+ Raises:
151
+ RuntimeError: If the 'fpdf' library is not installed.
152
+ """
153
+ from fpdf import FPDF
154
+
155
+ # Use default formatting values
156
+ font_family = 'Arial'
157
+ font_size = 12
158
+ font_style = ''
159
+ line_height = 10
160
+ margin = 10
161
+
162
+ pdf = FPDF()
163
+ pdf.set_margins(margin, margin, margin)
164
+
165
+ pdf.add_page()
166
+ pdf.set_font(font_family, style=font_style, size=font_size)
167
+
168
+ # Split content into paragraphs and add them
169
+ for para in content.split('\n'):
170
+ if para.strip(): # Skip empty paragraphs
171
+ pdf.multi_cell(0, line_height, para)
172
+ else:
173
+ pdf.ln(line_height) # Add empty line
174
+
175
+ pdf.output(str(file_path))
176
+ logger.debug(f"Wrote PDF to {file_path} with custom formatting")
177
+
178
+ def _write_csv_file(
179
+ self,
180
+ file_path: Path,
181
+ content: Union[str, List[List]],
182
+ encoding: str = "utf-8",
183
+ ) -> None:
184
+ r"""Write CSV content to a file.
185
+
186
+ Args:
187
+ file_path (Path): The target file path.
188
+ content (Union[str, List[List]]): The CSV content as a string or
189
+ list of lists.
190
+ encoding (str): Character encoding to use. (default: :obj: `utf-8`)
191
+ """
192
+ import csv
193
+
194
+ with file_path.open("w", encoding=encoding, newline='') as f:
195
+ if isinstance(content, str):
196
+ f.write(content)
197
+ else:
198
+ writer = csv.writer(f)
199
+ writer.writerows(content)
200
+ logger.debug(f"Wrote CSV to {file_path} with {encoding} encoding")
201
+
202
+ def _write_json_file(
203
+ self,
204
+ file_path: Path,
205
+ content: str,
206
+ encoding: str = "utf-8",
207
+ ) -> None:
208
+ r"""Write JSON content to a file.
209
+
210
+ Args:
211
+ file_path (Path): The target file path.
212
+ content (str): The JSON content as a string.
213
+ encoding (str): Character encoding to use. (default: :obj: `utf-8`)
214
+ """
215
+ import json
216
+
217
+ with file_path.open("w", encoding=encoding) as f:
218
+ if isinstance(content, str):
219
+ try:
220
+ # Try parsing as JSON string first
221
+ data = json.loads(content)
222
+ json.dump(data, f)
223
+ except json.JSONDecodeError:
224
+ # If not valid JSON string, write as is
225
+ f.write(content)
226
+ else:
227
+ # If not string, dump as JSON
228
+ json.dump(content, f)
229
+ logger.debug(f"Wrote JSON to {file_path} with {encoding} encoding")
230
+
231
+ def _write_yaml_file(
232
+ self,
233
+ file_path: Path,
234
+ content: str,
235
+ encoding: str = "utf-8",
236
+ ) -> None:
237
+ r"""Write YAML content to a file.
238
+
239
+ Args:
240
+ file_path (Path): The target file path.
241
+ content (str): The YAML content as a string.
242
+ encoding (str): Character encoding to use. (default: :obj: `utf-8`)
243
+ """
244
+ with file_path.open("w", encoding=encoding) as f:
245
+ f.write(content)
246
+ logger.debug(f"Wrote YAML to {file_path} with {encoding} encoding")
247
+
248
+ def _write_html_file(
249
+ self, file_path: Path, content: str, encoding: str = "utf-8"
250
+ ) -> None:
251
+ r"""Write text content to an HTML file.
252
+
253
+ Args:
254
+ file_path (Path): The target file path.
255
+ content (str): The HTML content to write.
256
+ encoding (str): Character encoding to use. (default: :obj: `utf-8`)
257
+ """
258
+ with file_path.open("w", encoding=encoding) as f:
259
+ f.write(content)
260
+ logger.debug(f"Wrote HTML to {file_path} with {encoding} encoding")
261
+
262
+ def _write_markdown_file(
263
+ self, file_path: Path, content: str, encoding: str = "utf-8"
264
+ ) -> None:
265
+ r"""Write text content to a Markdown file.
266
+
267
+ Args:
268
+ file_path (Path): The target file path.
269
+ content (str): The Markdown content to write.
270
+ encoding (str): Character encoding to use. (default: :obj: `utf-8`)
271
+ """
272
+ with file_path.open("w", encoding=encoding) as f:
273
+ f.write(content)
274
+ logger.debug(f"Wrote Markdown to {file_path} with {encoding} encoding")
275
+
276
+ def write_to_file(
277
+ self,
278
+ content: Union[str, List[List[str]]],
279
+ filename: str,
280
+ encoding: Optional[str] = None,
281
+ ) -> str:
282
+ r"""Write the given content to a file.
283
+
284
+ If the file exists, it will be overwritten. Supports multiple formats:
285
+ Markdown (.md, .markdown, default), Plaintext (.txt), CSV (.csv),
286
+ DOC/DOCX (.doc, .docx), PDF (.pdf), JSON (.json), YAML (.yml, .yaml),
287
+ and HTML (.html, .htm).
288
+
289
+ Args:
290
+ content (Union[str, List[List[str]]]): The content to write to the
291
+ file. For all formats, content must be a string or list in the
292
+ appropriate format.
293
+ filename (str): The name or path of the file. If a relative path is
294
+ supplied, it is resolved to self.output_dir.
295
+ encoding (Optional[str]): The character encoding to use. (default:
296
+ :obj: `None`)
297
+
298
+ Returns:
299
+ str: A message indicating success or error details.
300
+ """
301
+ file_path = self._resolve_filepath(filename)
302
+ file_path.parent.mkdir(parents=True, exist_ok=True)
303
+
304
+ # Create backup if file exists
305
+ self._create_backup(file_path)
306
+
307
+ extension = file_path.suffix.lower()
308
+
309
+ # If no extension is provided, use the default format
310
+ if extension == "":
311
+ file_path = file_path.with_suffix(DEFAULT_FORMAT)
312
+ extension = DEFAULT_FORMAT
313
+
314
+ try:
315
+ # Get encoding or use default
316
+ file_encoding = encoding or self.default_encoding
317
+
318
+ if extension in [".doc", ".docx"]:
319
+ self._write_docx_file(file_path, str(content))
320
+ elif extension == ".pdf":
321
+ self._write_pdf_file(file_path, str(content))
322
+ elif extension == ".csv":
323
+ self._write_csv_file(
324
+ file_path, content, encoding=file_encoding
325
+ )
326
+ elif extension == ".json":
327
+ self._write_json_file(
328
+ file_path,
329
+ content, # type: ignore[arg-type]
330
+ encoding=file_encoding,
331
+ )
332
+ elif extension in [".yml", ".yaml"]:
333
+ self._write_yaml_file(
334
+ file_path, str(content), encoding=file_encoding
335
+ )
336
+ elif extension in [".html", ".htm"]:
337
+ self._write_html_file(
338
+ file_path, str(content), encoding=file_encoding
339
+ )
340
+ elif extension in [".md", ".markdown"]:
341
+ self._write_markdown_file(
342
+ file_path, str(content), encoding=file_encoding
343
+ )
344
+ else:
345
+ # Fallback to simple text writing for unknown or .txt
346
+ # extensions
347
+ self._write_text_file(
348
+ file_path, str(content), encoding=file_encoding
349
+ )
350
+
351
+ msg = f"Content successfully written to file: {file_path}"
352
+ logger.info(msg)
353
+ return msg
354
+ except Exception as e:
355
+ error_msg = (
356
+ f"Error occurred while writing to file {file_path}: {e}"
357
+ )
358
+ logger.error(error_msg)
359
+ return error_msg
360
+
361
+ def get_tools(self) -> List[FunctionTool]:
362
+ r"""Return a list of FunctionTool objects representing the functions
363
+ in the toolkit.
364
+
365
+ Returns:
366
+ List[FunctionTool]: A list of FunctionTool objects representing
367
+ the available functions in this toolkit.
368
+ """
369
+ return [
370
+ FunctionTool(self.write_to_file),
371
+ ]