aiecs 1.0.8__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of aiecs might be problematic. Click here for more details.

Files changed (81) hide show
  1. aiecs/__init__.py +1 -1
  2. aiecs/aiecs_client.py +159 -1
  3. aiecs/config/config.py +6 -0
  4. aiecs/domain/__init__.py +95 -0
  5. aiecs/domain/community/__init__.py +159 -0
  6. aiecs/domain/community/agent_adapter.py +516 -0
  7. aiecs/domain/community/analytics.py +465 -0
  8. aiecs/domain/community/collaborative_workflow.py +99 -7
  9. aiecs/domain/community/communication_hub.py +649 -0
  10. aiecs/domain/community/community_builder.py +322 -0
  11. aiecs/domain/community/community_integration.py +365 -12
  12. aiecs/domain/community/community_manager.py +481 -5
  13. aiecs/domain/community/decision_engine.py +459 -13
  14. aiecs/domain/community/exceptions.py +238 -0
  15. aiecs/domain/community/models/__init__.py +36 -0
  16. aiecs/domain/community/resource_manager.py +1 -1
  17. aiecs/domain/community/shared_context_manager.py +621 -0
  18. aiecs/domain/context/__init__.py +24 -0
  19. aiecs/domain/context/context_engine.py +37 -33
  20. aiecs/main.py +20 -2
  21. aiecs/scripts/aid/VERSION_MANAGEMENT.md +97 -0
  22. aiecs/scripts/aid/__init__.py +15 -0
  23. aiecs/scripts/aid/version_manager.py +224 -0
  24. aiecs/scripts/dependance_check/__init__.py +18 -0
  25. aiecs/scripts/{download_nlp_data.py → dependance_check/download_nlp_data.py} +51 -8
  26. aiecs/scripts/dependance_patch/__init__.py +8 -0
  27. aiecs/scripts/dependance_patch/fix_weasel/__init__.py +12 -0
  28. aiecs/scripts/tools_develop/README.md +340 -0
  29. aiecs/scripts/tools_develop/__init__.py +16 -0
  30. aiecs/scripts/tools_develop/check_type_annotations.py +263 -0
  31. aiecs/scripts/tools_develop/validate_tool_schemas.py +346 -0
  32. aiecs/tools/__init__.py +53 -34
  33. aiecs/tools/docs/__init__.py +106 -0
  34. aiecs/tools/docs/ai_document_orchestrator.py +556 -0
  35. aiecs/tools/docs/ai_document_writer_orchestrator.py +2222 -0
  36. aiecs/tools/docs/content_insertion_tool.py +1234 -0
  37. aiecs/tools/docs/document_creator_tool.py +1179 -0
  38. aiecs/tools/docs/document_layout_tool.py +1105 -0
  39. aiecs/tools/docs/document_parser_tool.py +924 -0
  40. aiecs/tools/docs/document_writer_tool.py +1636 -0
  41. aiecs/tools/langchain_adapter.py +102 -51
  42. aiecs/tools/schema_generator.py +265 -0
  43. aiecs/tools/statistics/__init__.py +82 -0
  44. aiecs/tools/statistics/ai_data_analysis_orchestrator.py +581 -0
  45. aiecs/tools/statistics/ai_insight_generator_tool.py +473 -0
  46. aiecs/tools/statistics/ai_report_orchestrator_tool.py +629 -0
  47. aiecs/tools/statistics/data_loader_tool.py +518 -0
  48. aiecs/tools/statistics/data_profiler_tool.py +599 -0
  49. aiecs/tools/statistics/data_transformer_tool.py +531 -0
  50. aiecs/tools/statistics/data_visualizer_tool.py +460 -0
  51. aiecs/tools/statistics/model_trainer_tool.py +470 -0
  52. aiecs/tools/statistics/statistical_analyzer_tool.py +426 -0
  53. aiecs/tools/task_tools/chart_tool.py +2 -1
  54. aiecs/tools/task_tools/image_tool.py +43 -43
  55. aiecs/tools/task_tools/office_tool.py +48 -36
  56. aiecs/tools/task_tools/pandas_tool.py +37 -33
  57. aiecs/tools/task_tools/report_tool.py +67 -56
  58. aiecs/tools/task_tools/research_tool.py +32 -31
  59. aiecs/tools/task_tools/scraper_tool.py +53 -46
  60. aiecs/tools/task_tools/search_tool.py +1123 -0
  61. aiecs/tools/task_tools/stats_tool.py +20 -15
  62. {aiecs-1.0.8.dist-info → aiecs-1.2.0.dist-info}/METADATA +5 -1
  63. aiecs-1.2.0.dist-info/RECORD +135 -0
  64. aiecs-1.2.0.dist-info/entry_points.txt +10 -0
  65. aiecs/tools/task_tools/search_api.py +0 -7
  66. aiecs-1.0.8.dist-info/RECORD +0 -98
  67. aiecs-1.0.8.dist-info/entry_points.txt +0 -7
  68. /aiecs/scripts/{DEPENDENCY_SYSTEM_SUMMARY.md → dependance_check/DEPENDENCY_SYSTEM_SUMMARY.md} +0 -0
  69. /aiecs/scripts/{README_DEPENDENCY_CHECKER.md → dependance_check/README_DEPENDENCY_CHECKER.md} +0 -0
  70. /aiecs/scripts/{dependency_checker.py → dependance_check/dependency_checker.py} +0 -0
  71. /aiecs/scripts/{dependency_fixer.py → dependance_check/dependency_fixer.py} +0 -0
  72. /aiecs/scripts/{quick_dependency_check.py → dependance_check/quick_dependency_check.py} +0 -0
  73. /aiecs/scripts/{setup_nlp_data.sh → dependance_check/setup_nlp_data.sh} +0 -0
  74. /aiecs/scripts/{README_WEASEL_PATCH.md → dependance_patch/fix_weasel/README_WEASEL_PATCH.md} +0 -0
  75. /aiecs/scripts/{fix_weasel_validator.py → dependance_patch/fix_weasel/fix_weasel_validator.py} +0 -0
  76. /aiecs/scripts/{fix_weasel_validator.sh → dependance_patch/fix_weasel/fix_weasel_validator.sh} +0 -0
  77. /aiecs/scripts/{patch_weasel_library.sh → dependance_patch/fix_weasel/patch_weasel_library.sh} +0 -0
  78. /aiecs/scripts/{run_weasel_patch.sh → dependance_patch/fix_weasel/run_weasel_patch.sh} +0 -0
  79. {aiecs-1.0.8.dist-info → aiecs-1.2.0.dist-info}/WHEEL +0 -0
  80. {aiecs-1.0.8.dist-info → aiecs-1.2.0.dist-info}/licenses/LICENSE +0 -0
  81. {aiecs-1.0.8.dist-info → aiecs-1.2.0.dist-info}/top_level.txt +0 -0
@@ -13,6 +13,9 @@ import logging
13
13
  from typing import Any, Dict, List, Optional, Type, Union, get_type_hints
14
14
  from pydantic import BaseModel, Field
15
15
 
16
+ # Import schema generator
17
+ from aiecs.tools.schema_generator import generate_schema_from_method
18
+
16
19
  try:
17
20
  from langchain.tools import BaseTool as LangchainBaseTool
18
21
  from langchain.callbacks.manager import CallbackManagerForToolRun, AsyncCallbackManagerForToolRun
@@ -33,24 +36,27 @@ logger = logging.getLogger(__name__)
33
36
  class LangchainToolAdapter(LangchainBaseTool):
34
37
  """
35
38
  Langchain tool adapter for single operation
36
-
39
+
37
40
  Wraps one operation method of BaseTool as an independent Langchain tool
38
41
  """
39
-
42
+
40
43
  # Define class attributes
41
44
  name: str = ""
42
45
  description: str = ""
43
-
46
+ base_tool_name: str = ""
47
+ operation_name: str = ""
48
+ operation_schema: Optional[Type[BaseModel]] = None
49
+
44
50
  def __init__(
45
- self,
51
+ self,
46
52
  base_tool_name: str,
47
- operation_name: str,
53
+ operation_name: str,
48
54
  operation_schema: Optional[Type[BaseModel]] = None,
49
55
  description: Optional[str] = None
50
56
  ):
51
57
  """
52
58
  Initialize adapter
53
-
59
+
54
60
  Args:
55
61
  base_tool_name: Original tool name
56
62
  operation_name: Operation name
@@ -58,56 +64,55 @@ class LangchainToolAdapter(LangchainBaseTool):
58
64
  description: Tool description
59
65
  """
60
66
  # Construct tool name and description
61
- self.name = f"{base_tool_name}_{operation_name}"
62
- self.description = description or f"Execute {operation_name} operation from {base_tool_name} tool"
63
-
64
- # Store tool information (use self.__dict__ to set directly to avoid pydantic validation)
65
- self.__dict__['base_tool_name'] = base_tool_name
66
- self.__dict__['operation_name'] = operation_name
67
- self.__dict__['operation_schema'] = operation_schema
68
-
69
- # Set parameter Schema
70
- if operation_schema:
71
- self.args_schema = operation_schema
72
-
73
- super().__init__()
67
+ tool_name = f"{base_tool_name}_{operation_name}"
68
+ tool_description = description or f"Execute {operation_name} operation from {base_tool_name} tool"
69
+
70
+ # Initialize parent class with all required fields
71
+ super().__init__(
72
+ name=tool_name,
73
+ description=tool_description,
74
+ base_tool_name=base_tool_name,
75
+ operation_name=operation_name,
76
+ operation_schema=operation_schema,
77
+ args_schema=operation_schema
78
+ )
74
79
 
75
80
  def _run(
76
- self,
77
- run_manager: Optional[CallbackManagerForToolRun] = None,
81
+ self,
82
+ run_manager: Optional[CallbackManagerForToolRun] = None,
78
83
  **kwargs: Any
79
84
  ) -> Any:
80
85
  """Execute operation synchronously"""
81
86
  try:
82
87
  # Get original tool instance
83
- base_tool = get_tool(self.__dict__['base_tool_name'])
84
-
88
+ base_tool = get_tool(self.base_tool_name)
89
+
85
90
  # Execute operation
86
- result = base_tool.run(self.__dict__['operation_name'], **kwargs)
87
-
91
+ result = base_tool.run(self.operation_name, **kwargs)
92
+
88
93
  logger.info(f"Successfully executed {self.name} with result type: {type(result)}")
89
94
  return result
90
-
95
+
91
96
  except Exception as e:
92
97
  logger.error(f"Error executing {self.name}: {str(e)}")
93
98
  raise
94
-
99
+
95
100
  async def _arun(
96
- self,
101
+ self,
97
102
  run_manager: Optional[AsyncCallbackManagerForToolRun] = None,
98
103
  **kwargs: Any
99
104
  ) -> Any:
100
105
  """Execute operation asynchronously"""
101
106
  try:
102
107
  # Get original tool instance
103
- base_tool = get_tool(self.__dict__['base_tool_name'])
104
-
108
+ base_tool = get_tool(self.base_tool_name)
109
+
105
110
  # Execute asynchronous operation
106
- result = await base_tool.run_async(self.__dict__['operation_name'], **kwargs)
107
-
111
+ result = await base_tool.run_async(self.operation_name, **kwargs)
112
+
108
113
  logger.info(f"Successfully executed {self.name} async with result type: {type(result)}")
109
114
  return result
110
-
115
+
111
116
  except Exception as e:
112
117
  logger.error(f"Error executing {self.name} async: {str(e)}")
113
118
  raise
@@ -121,47 +126,90 @@ class ToolRegistry:
121
126
  def discover_operations(self, base_tool_class: Type[BaseTool]) -> List[Dict[str, Any]]:
122
127
  """
123
128
  Discover all operation methods and Schemas of BaseTool class
124
-
129
+
125
130
  Args:
126
131
  base_tool_class: BaseTool subclass
127
-
132
+
128
133
  Returns:
129
134
  List of operation information, including method names, Schemas, descriptions, etc.
130
135
  """
131
136
  operations = []
132
-
137
+
133
138
  # Get all Schema classes
139
+ # Build a mapping from normalized names to Schema classes
140
+ # Check both class-level and module-level schemas
134
141
  schemas = {}
142
+
143
+ # 1. Check class-level schemas (e.g., ChartTool)
135
144
  for attr_name in dir(base_tool_class):
136
145
  attr = getattr(base_tool_class, attr_name)
137
146
  if isinstance(attr, type) and issubclass(attr, BaseModel) and attr.__name__.endswith('Schema'):
138
- op_name = attr.__name__.replace('Schema', '').lower()
139
- schemas[op_name] = attr
140
-
147
+ # Normalize: remove 'Schema' suffix, convert to lowercase, remove underscores
148
+ schema_base_name = attr.__name__.replace('Schema', '')
149
+ normalized_name = schema_base_name.replace('_', '').lower()
150
+ schemas[normalized_name] = attr
151
+ logger.debug(f"Found class-level schema {attr.__name__} -> normalized: {normalized_name}")
152
+
153
+ # 2. Check module-level schemas (e.g., ImageTool)
154
+ tool_module = inspect.getmodule(base_tool_class)
155
+ if tool_module:
156
+ for attr_name in dir(tool_module):
157
+ if attr_name.startswith('_'):
158
+ continue
159
+ attr = getattr(tool_module, attr_name)
160
+ if isinstance(attr, type) and issubclass(attr, BaseModel) and attr.__name__.endswith('Schema'):
161
+ # Skip if already found at class level
162
+ schema_base_name = attr.__name__.replace('Schema', '')
163
+ normalized_name = schema_base_name.replace('_', '').lower()
164
+ if normalized_name not in schemas:
165
+ schemas[normalized_name] = attr
166
+ logger.debug(f"Found module-level schema {attr.__name__} -> normalized: {normalized_name}")
167
+
141
168
  # Get all public methods
142
169
  for method_name in dir(base_tool_class):
143
170
  if method_name.startswith('_'):
144
171
  continue
145
-
172
+
146
173
  method = getattr(base_tool_class, method_name)
147
174
  if not callable(method):
148
175
  continue
149
-
150
- # Skip base class methods
176
+
177
+ # Skip base class methods and Schema classes themselves
151
178
  if method_name in ['run', 'run_async', 'run_batch']:
152
179
  continue
153
-
180
+
181
+ # Skip if it's a class (like Config or Schema classes)
182
+ if isinstance(method, type):
183
+ continue
184
+
185
+ # Normalize method name: remove underscores and convert to lowercase
186
+ normalized_method_name = method_name.replace('_', '').lower()
187
+
188
+ # Try to find matching schema
189
+ matching_schema = schemas.get(normalized_method_name)
190
+
191
+ if matching_schema:
192
+ logger.debug(f"Matched method {method_name} with manual schema {matching_schema.__name__}")
193
+ else:
194
+ # Auto-generate schema if not found
195
+ auto_schema = generate_schema_from_method(method, method_name)
196
+ if auto_schema:
197
+ matching_schema = auto_schema
198
+ logger.debug(f"Auto-generated schema for method {method_name}: {auto_schema.__name__}")
199
+ else:
200
+ logger.debug(f"No schema found or generated for method {method_name}")
201
+
154
202
  # Get method information
155
203
  operation_info = {
156
204
  'name': method_name,
157
205
  'method': method,
158
- 'schema': schemas.get(method_name),
206
+ 'schema': matching_schema,
159
207
  'description': inspect.getdoc(method) or f"Execute {method_name} operation",
160
208
  'is_async': inspect.iscoroutinefunction(method)
161
209
  }
162
-
210
+
163
211
  operations.append(operation_info)
164
-
212
+
165
213
  return operations
166
214
 
167
215
  def _extract_description(self, method, base_tool_name: str, operation_name: str, schema: Optional[Type[BaseModel]] = None) -> str:
@@ -255,20 +303,23 @@ class ToolRegistry:
255
303
  def create_all_langchain_tools(self) -> List[LangchainToolAdapter]:
256
304
  """
257
305
  Create Langchain adapters for all registered BaseTools
258
-
306
+
259
307
  Returns:
260
308
  List of all Langchain tool adapters
261
309
  """
262
310
  all_tools = []
263
-
264
- for tool_name in list_tools():
311
+
312
+ # list_tools() returns a list of dicts, extract tool names
313
+ tool_infos = list_tools()
314
+ for tool_info in tool_infos:
315
+ tool_name = tool_info['name']
265
316
  try:
266
317
  tools = self.create_langchain_tools(tool_name)
267
318
  all_tools.extend(tools)
268
319
  except Exception as e:
269
320
  logger.error(f"Failed to create Langchain tools for {tool_name}: {e}")
270
-
271
- logger.info(f"Created total {len(all_tools)} Langchain tools from {len(list_tools())} base tools")
321
+
322
+ logger.info(f"Created total {len(all_tools)} Langchain tools from {len(tool_infos)} base tools")
272
323
  return all_tools
273
324
 
274
325
  def get_tool(self, name: str) -> Optional[LangchainToolAdapter]:
@@ -0,0 +1,265 @@
1
+ """
2
+ Automatic Schema Generation Tool
3
+
4
+ Automatically generate Pydantic Schema from method signatures and type annotations
5
+ """
6
+
7
+ import inspect
8
+ import logging
9
+ from typing import Any, Dict, List, Optional, Type, get_type_hints, Union
10
+ from pydantic import BaseModel, Field, create_model, ConfigDict
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ def _normalize_type(param_type: Type) -> Type:
16
+ """
17
+ Normalize types, handle unsupported types
18
+
19
+ Map complex types like pandas.DataFrame to Any
20
+ """
21
+ # Get type name
22
+ type_name = getattr(param_type, '__name__', str(param_type))
23
+
24
+ # Check if it's a pandas type
25
+ if 'DataFrame' in type_name or 'Series' in type_name:
26
+ return Any
27
+
28
+ return param_type
29
+
30
+
31
+ def _extract_param_description_from_docstring(docstring: str, param_name: str) -> Optional[str]:
32
+ """
33
+ Extract parameter description from docstring
34
+
35
+ Supported formats:
36
+ - Google style: Args: param_name: description
37
+ - NumPy style: Parameters: param_name : type description
38
+ """
39
+ if not docstring:
40
+ return None
41
+
42
+ lines = docstring.split('\n')
43
+ in_args_section = False
44
+ current_param = None
45
+ description_lines = []
46
+
47
+ for line in lines:
48
+ stripped = line.strip()
49
+
50
+ # Detect Args/Parameters section
51
+ if stripped in ['Args:', 'Arguments:', 'Parameters:']:
52
+ in_args_section = True
53
+ continue
54
+
55
+ # Detect end
56
+ if in_args_section and stripped in ['Returns:', 'Raises:', 'Yields:', 'Examples:', 'Note:', 'Notes:']:
57
+ break
58
+
59
+ if in_args_section:
60
+ # Google style: param_name: description or param_name (type): description
61
+ if ':' in stripped and not stripped.startswith(' '):
62
+ # Save previous parameter
63
+ if current_param == param_name and description_lines:
64
+ return ' '.join(description_lines).strip()
65
+
66
+ # Parse new parameter
67
+ parts = stripped.split(':', 1)
68
+ if len(parts) == 2:
69
+ # Remove possible type annotation (type)
70
+ param_part = parts[0].strip()
71
+ if '(' in param_part:
72
+ param_part = param_part.split('(')[0].strip()
73
+
74
+ current_param = param_part
75
+ description_lines = [parts[1].strip()]
76
+ elif current_param and stripped:
77
+ # Continue description
78
+ description_lines.append(stripped)
79
+
80
+ # Check last parameter
81
+ if current_param == param_name and description_lines:
82
+ return ' '.join(description_lines).strip()
83
+
84
+ return None
85
+
86
+
87
+ def generate_schema_from_method(
88
+ method: callable,
89
+ method_name: str,
90
+ base_class: Type[BaseModel] = BaseModel
91
+ ) -> Optional[Type[BaseModel]]:
92
+ """
93
+ Automatically generate Pydantic Schema from method signature
94
+
95
+ Args:
96
+ method: Method to generate Schema for
97
+ method_name: Method name
98
+ base_class: Schema base class
99
+
100
+ Returns:
101
+ Generated Pydantic Schema class, returns None if unable to generate
102
+ """
103
+ try:
104
+ # Get method signature
105
+ sig = inspect.signature(method)
106
+
107
+ # Get type annotations
108
+ try:
109
+ type_hints = get_type_hints(method)
110
+ except Exception as e:
111
+ logger.debug(f"Failed to get type hints for {method_name}: {e}")
112
+ type_hints = {}
113
+
114
+ # Get docstring
115
+ docstring = inspect.getdoc(method) or f"Execute {method_name} operation"
116
+
117
+ # Extract short description (first line)
118
+ first_line = docstring.split('\n')[0].strip()
119
+ schema_description = first_line if first_line else f"Execute {method_name} operation"
120
+
121
+ # Build field definitions
122
+ field_definitions = {}
123
+
124
+ for param_name, param in sig.parameters.items():
125
+ # Skip self parameter
126
+ if param_name == 'self':
127
+ continue
128
+
129
+ # Get parameter type and normalize
130
+ param_type = type_hints.get(param_name, Any)
131
+ param_type = _normalize_type(param_type)
132
+
133
+ # Get default value
134
+ has_default = param.default != inspect.Parameter.empty
135
+ default_value = param.default if has_default else ...
136
+
137
+ # Extract parameter description from docstring
138
+ field_description = _extract_param_description_from_docstring(docstring, param_name)
139
+ if not field_description:
140
+ field_description = f"Parameter {param_name}"
141
+
142
+ # Create Field
143
+ if has_default:
144
+ if default_value is None:
145
+ # Optional parameter
146
+ field_definitions[param_name] = (
147
+ param_type,
148
+ Field(default=None, description=field_description)
149
+ )
150
+ else:
151
+ field_definitions[param_name] = (
152
+ param_type,
153
+ Field(default=default_value, description=field_description)
154
+ )
155
+ else:
156
+ # Required parameter
157
+ field_definitions[param_name] = (
158
+ param_type,
159
+ Field(description=field_description)
160
+ )
161
+
162
+ # If no parameters (except self), return None
163
+ if not field_definitions:
164
+ logger.debug(f"No parameters found for {method_name}, skipping schema generation")
165
+ return None
166
+
167
+ # Generate Schema class name
168
+ schema_name = f"{method_name.title().replace('_', '')}Schema"
169
+
170
+ # Create Schema class, allow arbitrary types
171
+ schema_class = create_model(
172
+ schema_name,
173
+ __base__=base_class,
174
+ __doc__=schema_description,
175
+ __config__=ConfigDict(arbitrary_types_allowed=True),
176
+ **field_definitions
177
+ )
178
+
179
+ logger.debug(f"Generated schema {schema_name} for method {method_name}")
180
+ return schema_class
181
+
182
+ except Exception as e:
183
+ logger.warning(f"Failed to generate schema for {method_name}: {e}")
184
+ return None
185
+
186
+
187
+ def generate_schemas_for_tool(tool_class: Type) -> Dict[str, Type[BaseModel]]:
188
+ """
189
+ Generate Schema for all methods of a tool class
190
+
191
+ Args:
192
+ tool_class: Tool class
193
+
194
+ Returns:
195
+ Mapping from method names to Schema classes
196
+ """
197
+ schemas = {}
198
+
199
+ for method_name in dir(tool_class):
200
+ # Skip private methods and special methods
201
+ if method_name.startswith('_'):
202
+ continue
203
+
204
+ # Skip base class methods
205
+ if method_name in ['run', 'run_async', 'run_batch']:
206
+ continue
207
+
208
+ method = getattr(tool_class, method_name)
209
+
210
+ # Skip non-method attributes
211
+ if not callable(method):
212
+ continue
213
+
214
+ # Skip classes (like Config, Schema, etc.)
215
+ if isinstance(method, type):
216
+ continue
217
+
218
+ # Generate Schema
219
+ schema = generate_schema_from_method(method, method_name)
220
+
221
+ if schema:
222
+ # Normalize method name (remove underscores, convert to lowercase)
223
+ normalized_name = method_name.replace('_', '').lower()
224
+ schemas[normalized_name] = schema
225
+ logger.info(f"Generated schema for {method_name}")
226
+
227
+ return schemas
228
+
229
+
230
+
231
+
232
+
233
+ # Usage example
234
+ if __name__ == '__main__':
235
+ import sys
236
+ sys.path.insert(0, '/home/coder1/python-middleware-dev')
237
+
238
+ from aiecs.tools import discover_tools, TOOL_CLASSES
239
+
240
+ # Configure logging
241
+ logging.basicConfig(level=logging.INFO)
242
+
243
+ # Discover tools
244
+ discover_tools()
245
+
246
+ # Generate Schema for PandasTool
247
+ print("Generating Schema for PandasTool:")
248
+ print("=" * 80)
249
+
250
+ pandas_tool = TOOL_CLASSES['pandas']
251
+ schemas = generate_schemas_for_tool(pandas_tool)
252
+
253
+ print(f"\nGenerated {len(schemas)} Schemas:\n")
254
+
255
+ # Show first 3 examples
256
+ for method_name, schema in list(schemas.items())[:3]:
257
+ print(f"{schema.__name__}:")
258
+ print(f" Description: {schema.__doc__}")
259
+ print(f" Fields:")
260
+ for field_name, field_info in schema.model_fields.items():
261
+ required = "Required" if field_info.is_required() else "Optional"
262
+ default = f" (default: {field_info.default})" if not field_info.is_required() and field_info.default is not None else ""
263
+ print(f" - {field_name}: {field_info.description} [{required}]{default}")
264
+ print()
265
+
@@ -0,0 +1,82 @@
1
+ """
2
+ Statistics and Data Analysis Tools Module
3
+
4
+ This module contains specialized tools for data analysis and statistical operations:
5
+ - data_loader: Universal data loading from multiple file formats
6
+ - data_profiler: Comprehensive data profiling and quality assessment
7
+ - data_transformer: Data cleaning, transformation, and feature engineering
8
+ - data_visualizer: Smart data visualization and chart generation
9
+ - statistical_analyzer: Advanced statistical analysis and hypothesis testing
10
+ - model_trainer: AutoML and machine learning model training
11
+ - ai_data_analysis_orchestrator: AI-powered end-to-end analysis orchestration
12
+ - ai_insight_generator: AI-driven insight discovery and pattern detection
13
+ - ai_report_orchestrator: AI-powered comprehensive report generation
14
+ """
15
+
16
+ # Lazy import strategy to avoid heavy dependencies at import time
17
+ import os
18
+ import logging
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+ # Define available tools for lazy loading
23
+ _AVAILABLE_STATISTICS_TOOLS = [
24
+ 'data_loader_tool',
25
+ 'data_profiler_tool',
26
+ 'data_transformer_tool',
27
+ 'data_visualizer_tool',
28
+ 'statistical_analyzer_tool',
29
+ 'model_trainer_tool',
30
+ 'ai_data_analysis_orchestrator',
31
+ 'ai_insight_generator_tool',
32
+ 'ai_report_orchestrator_tool'
33
+ ]
34
+
35
+ # Track which tools have been loaded
36
+ _LOADED_STATISTICS_TOOLS = set()
37
+
38
+ def _lazy_load_statistics_tool(tool_name: str):
39
+ """Lazy load a specific statistics tool module"""
40
+ if tool_name in _LOADED_STATISTICS_TOOLS:
41
+ return
42
+
43
+ try:
44
+ if tool_name == 'data_loader_tool':
45
+ from . import data_loader_tool
46
+ elif tool_name == 'data_profiler_tool':
47
+ from . import data_profiler_tool
48
+ elif tool_name == 'data_transformer_tool':
49
+ from . import data_transformer_tool
50
+ elif tool_name == 'data_visualizer_tool':
51
+ from . import data_visualizer_tool
52
+ elif tool_name == 'statistical_analyzer_tool':
53
+ from . import statistical_analyzer_tool
54
+ elif tool_name == 'model_trainer_tool':
55
+ from . import model_trainer_tool
56
+ elif tool_name == 'ai_data_analysis_orchestrator':
57
+ from . import ai_data_analysis_orchestrator
58
+ elif tool_name == 'ai_insight_generator_tool':
59
+ from . import ai_insight_generator_tool
60
+ elif tool_name == 'ai_report_orchestrator_tool':
61
+ from . import ai_report_orchestrator_tool
62
+
63
+ _LOADED_STATISTICS_TOOLS.add(tool_name)
64
+ logger.info(f"Successfully loaded statistics tool: {tool_name}")
65
+
66
+ except Exception as e:
67
+ logger.warning(f"Failed to load statistics tool {tool_name}: {e}")
68
+
69
+ def load_all_statistics_tools():
70
+ """Load all available statistics tools"""
71
+ for tool_name in _AVAILABLE_STATISTICS_TOOLS:
72
+ _lazy_load_statistics_tool(tool_name)
73
+
74
+ # Auto-load all tools when module is imported
75
+ # This ensures all tools are registered
76
+ load_all_statistics_tools()
77
+
78
+ __all__ = [
79
+ 'load_all_statistics_tools',
80
+ '_lazy_load_statistics_tool'
81
+ ]
82
+