ai-data-science-team 0.0.0.9008__py3-none-any.whl → 0.0.0.9010__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (29) hide show
  1. ai_data_science_team/_version.py +1 -1
  2. ai_data_science_team/agents/__init__.py +0 -1
  3. ai_data_science_team/agents/data_cleaning_agent.py +50 -39
  4. ai_data_science_team/agents/data_loader_tools_agent.py +69 -0
  5. ai_data_science_team/agents/data_visualization_agent.py +45 -50
  6. ai_data_science_team/agents/data_wrangling_agent.py +50 -49
  7. ai_data_science_team/agents/feature_engineering_agent.py +48 -67
  8. ai_data_science_team/agents/sql_database_agent.py +130 -76
  9. ai_data_science_team/ml_agents/__init__.py +2 -0
  10. ai_data_science_team/ml_agents/h2o_ml_agent.py +852 -0
  11. ai_data_science_team/ml_agents/mlflow_tools_agent.py +327 -0
  12. ai_data_science_team/multiagents/sql_data_analyst.py +120 -9
  13. ai_data_science_team/parsers/__init__.py +0 -0
  14. ai_data_science_team/{tools → parsers}/parsers.py +0 -1
  15. ai_data_science_team/templates/__init__.py +1 -0
  16. ai_data_science_team/templates/agent_templates.py +78 -7
  17. ai_data_science_team/tools/data_loader.py +378 -0
  18. ai_data_science_team/tools/{metadata.py → dataframe.py} +0 -91
  19. ai_data_science_team/tools/h2o.py +643 -0
  20. ai_data_science_team/tools/mlflow.py +961 -0
  21. ai_data_science_team/tools/sql.py +126 -0
  22. ai_data_science_team/{tools → utils}/regex.py +59 -1
  23. {ai_data_science_team-0.0.0.9008.dist-info → ai_data_science_team-0.0.0.9010.dist-info}/METADATA +56 -24
  24. ai_data_science_team-0.0.0.9010.dist-info/RECORD +35 -0
  25. ai_data_science_team-0.0.0.9008.dist-info/RECORD +0 -26
  26. /ai_data_science_team/{tools → utils}/logging.py +0 -0
  27. {ai_data_science_team-0.0.0.9008.dist-info → ai_data_science_team-0.0.0.9010.dist-info}/LICENSE +0 -0
  28. {ai_data_science_team-0.0.0.9008.dist-info → ai_data_science_team-0.0.0.9010.dist-info}/WHEEL +0 -0
  29. {ai_data_science_team-0.0.0.9008.dist-info → ai_data_science_team-0.0.0.9010.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,126 @@
1
+
2
+ import pandas as pd
3
+ import sqlalchemy as sql
4
+ from sqlalchemy import inspect
5
+
6
+
7
+ def get_database_metadata(connection, n_samples=10) -> dict:
8
+ """
9
+ Collects metadata and sample data from a database, with safe identifier quoting and
10
+ basic dialect-aware row limiting. Prevents issues with spaces/reserved words in identifiers.
11
+
12
+ Parameters
13
+ ----------
14
+ connection : Union[sql.engine.base.Connection, sql.engine.base.Engine]
15
+ An active SQLAlchemy connection or engine.
16
+ n_samples : int
17
+ Number of sample values to retrieve for each column.
18
+
19
+ Returns
20
+ -------
21
+ dict
22
+ A dictionary with database metadata, including some sample data from each column.
23
+ """
24
+ is_engine = isinstance(connection, sql.engine.base.Engine)
25
+ conn = connection.connect() if is_engine else connection
26
+
27
+ metadata = {
28
+ "dialect": None,
29
+ "driver": None,
30
+ "connection_url": None,
31
+ "schemas": [],
32
+ }
33
+
34
+ try:
35
+ sql_engine = conn.engine
36
+ dialect_name = sql_engine.dialect.name.lower()
37
+
38
+ metadata["dialect"] = sql_engine.dialect.name
39
+ metadata["driver"] = sql_engine.driver
40
+ metadata["connection_url"] = str(sql_engine.url)
41
+
42
+ inspector = inspect(sql_engine)
43
+ preparer = inspector.bind.dialect.identifier_preparer
44
+
45
+ # For each schema
46
+ for schema_name in inspector.get_schema_names():
47
+ schema_obj = {
48
+ "schema_name": schema_name,
49
+ "tables": []
50
+ }
51
+
52
+ tables = inspector.get_table_names(schema=schema_name)
53
+ for table_name in tables:
54
+ table_info = {
55
+ "table_name": table_name,
56
+ "columns": [],
57
+ "primary_key": [],
58
+ "foreign_keys": [],
59
+ "indexes": []
60
+ }
61
+ # Get columns
62
+ columns = inspector.get_columns(table_name, schema=schema_name)
63
+ for col in columns:
64
+ col_name = col["name"]
65
+ col_type = str(col["type"])
66
+ table_name_quoted = f"{preparer.quote_identifier(schema_name)}.{preparer.quote_identifier(table_name)}"
67
+ col_name_quoted = preparer.quote_identifier(col_name)
68
+
69
+ # Build query for sample data
70
+ query = build_query(col_name_quoted, table_name_quoted, n_samples, dialect_name)
71
+
72
+ # Retrieve sample data
73
+ try:
74
+ df = pd.read_sql(query, conn)
75
+ samples = df[col_name].head(n_samples).tolist()
76
+ except Exception as e:
77
+ samples = [f"Error retrieving data: {str(e)}"]
78
+
79
+ table_info["columns"].append({
80
+ "name": col_name,
81
+ "type": col_type,
82
+ "sample_values": samples
83
+ })
84
+
85
+ # Primary keys
86
+ pk_constraint = inspector.get_pk_constraint(table_name, schema=schema_name)
87
+ table_info["primary_key"] = pk_constraint.get("constrained_columns", [])
88
+
89
+ # Foreign keys
90
+ fks = inspector.get_foreign_keys(table_name, schema=schema_name)
91
+ table_info["foreign_keys"] = [
92
+ {
93
+ "local_cols": fk["constrained_columns"],
94
+ "referred_table": fk["referred_table"],
95
+ "referred_cols": fk["referred_columns"]
96
+ }
97
+ for fk in fks
98
+ ]
99
+
100
+ # Indexes
101
+ idxs = inspector.get_indexes(table_name, schema=schema_name)
102
+ table_info["indexes"] = idxs
103
+
104
+ schema_obj["tables"].append(table_info)
105
+
106
+ metadata["schemas"].append(schema_obj)
107
+
108
+ finally:
109
+ if is_engine:
110
+ conn.close()
111
+
112
+ return metadata
113
+
114
+ def build_query(col_name_quoted: str, table_name_quoted: str, n: int, dialect_name: str) -> str:
115
+ # Example: expand your build_query to handle random sampling if possible
116
+ if "postgres" in dialect_name:
117
+ return f"SELECT {col_name_quoted} FROM {table_name_quoted} ORDER BY RANDOM() LIMIT {n}"
118
+ if "mysql" in dialect_name:
119
+ return f"SELECT {col_name_quoted} FROM {table_name_quoted} ORDER BY RAND() LIMIT {n}"
120
+ if "sqlite" in dialect_name:
121
+ return f"SELECT {col_name_quoted} FROM {table_name_quoted} ORDER BY RANDOM() LIMIT {n}"
122
+ if "mssql" in dialect_name:
123
+ return f"SELECT TOP {n} {col_name_quoted} FROM {table_name_quoted} ORDER BY NEWID()"
124
+ # Oracle or fallback
125
+ return f"SELECT {col_name_quoted} FROM {table_name_quoted} WHERE ROWNUM <= {n}"
126
+
@@ -103,4 +103,62 @@ def format_recommended_steps(raw_text: str, heading: str = "# Recommended Steps:
103
103
  if not seen_heading:
104
104
  new_lines.insert(0, heading)
105
105
 
106
- return "\n".join(new_lines)
106
+ return "\n".join(new_lines)
107
+
108
+ def get_generic_summary(report_dict: dict, code_lang = "python") -> str:
109
+ """
110
+ Takes a dictionary of unknown structure (e.g., from json.loads(...))
111
+ and returns a textual summary. It assumes:
112
+ 1) 'report_title' (if present) should be displayed first.
113
+ 2) If a key includes 'code' or 'function',
114
+ the value is treated as a code block.
115
+ 3) Otherwise, key-value pairs are displayed as text.
116
+
117
+ Parameters
118
+ ----------
119
+ report_dict : dict
120
+ The dictionary holding the agent output or user report.
121
+
122
+ Returns
123
+ -------
124
+ str
125
+ A formatted summary string.
126
+ """
127
+ # 1) Grab the report title (or default)
128
+ title = report_dict.get("report_title", "Untitled Report")
129
+
130
+ lines = []
131
+ lines.append(f"# {title}")
132
+
133
+ # 2) Iterate over all other keys
134
+ for key, value in report_dict.items():
135
+ # Skip the title key, since we already displayed it
136
+ if key == "report_title":
137
+ continue
138
+
139
+ # 3) Check if it's code or function
140
+ # (You can tweak this logic if you have different rules)
141
+ key_lower = key.lower()
142
+ if "code" in key_lower or "function" in key_lower:
143
+ # Treat as code
144
+ lines.append(f"\n## {format_agent_name(key).upper()}")
145
+ lines.append(f"```{code_lang}\n" + str(value) + "\n```")
146
+ else:
147
+ # 4) Otherwise, just display the key-value as text
148
+ lines.append(f"\n## {format_agent_name(key).upper()}")
149
+ lines.append(str(value))
150
+
151
+ return "\n".join(lines)
152
+
153
+ def remove_consecutive_duplicates(messages):
154
+ unique_messages = []
155
+ prev_message = None
156
+
157
+ for msg in messages:
158
+ if msg.content != prev_message:
159
+ unique_messages.append(msg)
160
+ prev_message = msg.content # Update previous message to current
161
+
162
+ return unique_messages
163
+
164
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ai-data-science-team
3
- Version: 0.0.0.9008
3
+ Version: 0.0.0.9010
4
4
  Summary: Build and run an AI-powered data science team.
5
5
  Home-page: https://github.com/business-science/ai-data-science-team
6
6
  Author: Matt Dancho
@@ -21,17 +21,26 @@ Requires-Dist: langchain_experimental
21
21
  Requires-Dist: langgraph>=0.2.57
22
22
  Requires-Dist: openai
23
23
  Requires-Dist: pandas
24
+ Requires-Dist: sqlalchemy
24
25
  Requires-Dist: numpy
25
26
  Requires-Dist: plotly
26
27
  Requires-Dist: streamlit
27
28
  Requires-Dist: scikit-learn
28
29
  Requires-Dist: xgboost
30
+ Requires-Dist: psutil
31
+ Provides-Extra: machine-learning
32
+ Requires-Dist: h2o; extra == "machine-learning"
33
+ Requires-Dist: mlflow; extra == "machine-learning"
34
+ Provides-Extra: all
35
+ Requires-Dist: h2o; extra == "all"
36
+ Requires-Dist: mlflow; extra == "all"
29
37
  Dynamic: author
30
38
  Dynamic: author-email
31
39
  Dynamic: classifier
32
40
  Dynamic: description
33
41
  Dynamic: description-content-type
34
42
  Dynamic: home-page
43
+ Dynamic: provides-extra
35
44
  Dynamic: requires-dist
36
45
  Dynamic: requires-python
37
46
  Dynamic: summary
@@ -39,7 +48,7 @@ Dynamic: summary
39
48
  <div align="center">
40
49
  <a href="https://github.com/business-science/ai-data-science-team">
41
50
  <picture>
42
- <img src="/img/ai_data_science_team_logo.jpg" alt="AI Data Science Team" width="400">
51
+ <img src="/img/ai_data_science_team_logo_small.jpg" alt="AI Data Science Team" width="400">
43
52
  </picture>
44
53
  </a>
45
54
  </div>
@@ -47,13 +56,13 @@ Dynamic: summary
47
56
  <em>An AI-powered data science team of agents to help you perform common data science tasks 10X faster</em>
48
57
  </div>
49
58
  <div align="center">
50
- <a href="https://pypi.python.org/pypi/ai-data-science-team"><img src="https://img.shields.io/pypi/v/ai-data-science-team.svg" alt="PyPI"></a>
51
- <a href="https://github.com/business-science/ai-data-science-team"><img src="https://img.shields.io/pypi/pyversions/ai-data-science-team.svg" alt="versions"></a>
52
- <a href="https://github.com/business-science/ai-data-science-team/blob/main/LICENSE"><img src="https://img.shields.io/github/license/business-science/ai-data-science-team.svg?v" alt="license"></a>
59
+ <a href="https://pypi.python.org/pypi/ai-data-science-team"><img src="https://img.shields.io/pypi/v/ai-data-science-team.svg?style=for-the-badge" alt="PyPI"></a>
60
+ <a href="https://github.com/business-science/ai-data-science-team"><img src="https://img.shields.io/pypi/pyversions/ai-data-science-team.svg?style=for-the-badge" alt="versions"></a>
61
+ <a href="https://github.com/business-science/ai-data-science-team/blob/main/LICENSE"><img src="https://img.shields.io/github/license/business-science/ai-data-science-team.svg?style=for-the-badge" alt="license"></a>
53
62
  </div>
54
63
 
55
64
 
56
- # Your AI Data Science Team (An Army Of Agents)
65
+ # Your AI Data Science Team (🪖 An Army Of Agents)
57
66
 
58
67
  **An AI-powered data science team of agents to help you perform common data science tasks 10X faster**.
59
68
 
@@ -74,14 +83,19 @@ The AI Data Science Team of Copilots includes Agents that specialize data cleani
74
83
 
75
84
  ## Table of Contents
76
85
 
77
- - [Your AI Data Science Team (An Army Of Agents)](#your-ai-data-science-team-an-army-of-agents)
86
+ - [Your AI Data Science Team (🪖 An Army Of Agents)](#your-ai-data-science-team--an-army-of-agents)
78
87
  - [Table of Contents](#table-of-contents)
79
88
  - [Companies That Want A Custom AI Data Science Team (And AI Apps)](#companies-that-want-a-custom-ai-data-science-team-and-ai-apps)
80
- - [Free How To Build AI Agents for Data Scientists Workshop](#free-how-to-build-ai-agents-for-data-scientists-workshop)
89
+ - [Generative AI for Data Scientists Workshop](#generative-ai-for-data-scientists-workshop)
81
90
  - [Data Science Agents](#data-science-agents)
82
- - [Coming Soon: Multi-Agents](#coming-soon-multi-agents)
83
- - [...And after that, the Multi-Agent Data Science Apps](#and-after-that-the-multi-agent-data-science-apps)
91
+ - [NEW: Multi-Agents](#new-multi-agents)
92
+ - [Data Science Apps](#data-science-apps)
93
+ - [Apps Available Now](#apps-available-now)
94
+ - [🔥 Agentic Applications](#-agentic-applications)
84
95
  - [Agents Available Now](#agents-available-now)
96
+ - [🔥🔥 NEW! Machine Learning Agents](#-new-machine-learning-agents)
97
+ - [Data Science Agents](#data-science-agents-1)
98
+ - [Multi-Agents](#multi-agents)
85
99
  - [Agents Coming Soon](#agents-coming-soon)
86
100
  - [Disclaimer](#disclaimer)
87
101
  - [Installation](#installation)
@@ -94,11 +108,11 @@ The AI Data Science Team of Copilots includes Agents that specialize data cleani
94
108
 
95
109
  ## Companies That Want A Custom AI Data Science Team (And AI Apps)
96
110
 
97
- Want to have your own _customized_ enterprise-grade AI Data Science Team and domain-specifici AI-powered Apps?
111
+ Want to have your own _customized_ enterprise-grade AI Data Science Team and *domain-specific* AI-powered Apps?
98
112
 
99
113
  **Send inquiries here:** [https://www.business-science.io/contact.html](https://www.business-science.io/contact.html)
100
114
 
101
- ## Free How To Build AI Agents for Data Scientists Workshop
115
+ ## Generative AI for Data Scientists Workshop
102
116
 
103
117
  If you're an aspiring data scientist who wants to learn how to build AI Agents and AI Apps for your company that performs Data Science, Business Intelligence, Churn Modeling, Time Series Forecasting, and more, then I'd love to help you.
104
118
 
@@ -110,32 +124,50 @@ This project is a work in progress. New data science agents will be released soo
110
124
 
111
125
  ![Data Science Team](/img/ai_data_science_team.jpg)
112
126
 
113
- ### Coming Soon: Multi-Agents
127
+ ### NEW: Multi-Agents
114
128
 
115
- This is the internals of the Business Intelligence SQL Agent I'm working on:
129
+ This is the internals of the SQL Data Analyst Agent that connects to SQL databases to pull data into the data science environment. It creates pipelines to automate data extraction, performs Joins, Aggregations, and other SQL Query operations. And it includes a Data Visualization Agent that creates visualizations to help you understand your data.:
116
130
 
117
131
  ![Business Intelligence SQL Agent](/img/multi_agent_sql_data_visualization.jpg)
118
132
 
119
- ### ...And after that, the Multi-Agent Data Science Apps
133
+ ### Data Science Apps
120
134
 
121
135
  This is a top secret project I'm working on. It's a multi-agent data science app that performs time series forecasting.
122
136
 
123
- ![Multi-Agent Data Science App](/img/ai_powered_apps.jpg)
137
+ ![Multi-Agent Data Science App](/img/ai_powered_apps.jpg)
138
+
139
+ ### Apps Available Now
140
+
141
+ [See all available apps here](/apps)
142
+
143
+ #### 🔥 Agentic Applications
144
+
145
+ 1. **SQL Database Agent App:** Connects any SQL Database, generates SQL queries from natural language, and returns data as a downloadable table. [See Application](/apps/sql-database-agent-app/)
124
146
 
125
147
  ### Agents Available Now
126
148
 
127
- 1. **Data Wrangling Agent:** Merges, Joins, Preps and Wrangles data into a format that is ready for data analysis.
128
- 2. **Data Visualization Agent:** Creates visualizations to help you understand your data. Returns JSON serializable plotly visualizations.
129
- 3. **Data Cleaning Agent:** Performs Data Preparation steps including handling missing values, outliers, and data type conversions.
130
- 4. **Feature Engineering Agent:** Converts the prepared data into ML-ready data. Adds features to increase predictive accuracy of ML models.
131
- 5. **SQL Database Agent:** Connects to SQL databases to pull data into the data science environment. Creates pipelines to automate data extraction. Performs Joins, Aggregations, and other SQL Query operations.
149
+ #### 🔥🔥 NEW! Machine Learning Agents
150
+
151
+ 1. **🔥 H2O Machine Learning Agent:** Builds and logs 100's of high-performance machine learning models. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/ml_agents/h2o_machine_learning_agent.ipynb)
152
+ 2. **🔥 MLflow Tools Agent (MLOps):** This agent has 11+ tools for managing models, ML projects, and making production ML predictions with MLflow. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/ml_agents/mlflow_tools_agent.ipynb)
153
+
154
+ #### Data Science Agents
155
+
156
+ 1. **Data Wrangling Agent:** Merges, Joins, Preps and Wrangles data into a format that is ready for data analysis. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/data_wrangling_agent.ipynb)
157
+ 2. **Data Visualization Agent:** Creates visualizations to help you understand your data. Returns JSON serializable plotly visualizations. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/data_visualization_agent.ipynb)
158
+ 3. **Data Cleaning Agent:** Performs Data Preparation steps including handling missing values, outliers, and data type conversions. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/data_cleaning_agent.ipynb)
159
+ 4. **Feature Engineering Agent:** Converts the prepared data into ML-ready data. Adds features to increase predictive accuracy of ML models. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/feature_engineering_agent.ipynb)
160
+ 5. **SQL Database Agent:** Connects to SQL databases to pull data into the data science environment. Creates pipelines to automate data extraction. Performs Joins, Aggregations, and other SQL Query operations. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/sql_database_agent.ipynb)
161
+
162
+ #### Multi-Agents
163
+
164
+ 1. **SQL Data Analyst Agent:** Connects to SQL databases to pull data into the data science environment. Creates pipelines to automate data extraction. Performs Joins, Aggregations, and other SQL Query operations. Includes a Data Visualization Agent that creates visualizations to help you understand your data. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/multiagents/sql_data_analyst.ipynb)
132
165
 
133
166
  ### Agents Coming Soon
134
167
 
135
168
  1. **Data Analyst:** Analyzes data structure, creates exploratory visualizations, and performs correlation analysis to identify relationships.
136
- 2. **Machine Learning Agent:** Builds and logs the machine learning models.
137
- 3. **Interpretability Agent:** Performs Interpretable ML to explain why the model returned predictions including which features were the most important to the model.
138
- 4. **Supervisor:** Forms task list. Moderates sub-agents. Returns completed assignment.
169
+ 2. **Interpretability Agent:** Performs Interpretable ML to explain why the model returned predictions including which features were the most important to the model.
170
+ 3. **Supervisor:** Forms task list. Moderates sub-agents. Returns completed assignment.
139
171
 
140
172
  ## Disclaimer
141
173
 
@@ -0,0 +1,35 @@
1
+ ai_data_science_team/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ ai_data_science_team/_version.py,sha256=Jp-j7mSYzOstBlPFLGcnMvasT0m3q0YN3vk4cJAm2XM,26
3
+ ai_data_science_team/orchestration.py,sha256=xiIFOsrLwPdkSmtme7wNCCGv8XopnMTNElNzlZokL-4,303
4
+ ai_data_science_team/agents/__init__.py,sha256=KSwxfciazWyaDG-xM93SadiIyT6X4d3uJLTdvHvVKq0,553
5
+ ai_data_science_team/agents/data_cleaning_agent.py,sha256=V5tJMwGJK0JwrF_H-7r3S0E8UkAY6ci4BGxqjhZiGBI,27352
6
+ ai_data_science_team/agents/data_loader_tools_agent.py,sha256=N1PuepOaP0ocV3bDDJOj_DYf997c82k2dg-YCV4rG2E,1668
7
+ ai_data_science_team/agents/data_visualization_agent.py,sha256=tJy9Ehnh9mvAu6H--TXI8esSHmK1RW_L1RDAdn7Xek4,28821
8
+ ai_data_science_team/agents/data_wrangling_agent.py,sha256=LxzphH-TmrFG0GjejGOjulhPq4SsWFo5Y9tk4WEuN4M,32347
9
+ ai_data_science_team/agents/feature_engineering_agent.py,sha256=KmPBkj7WUBz6LFUlDDfQHMi7ujXwsH5P9LWRS-F4tdM,31026
10
+ ai_data_science_team/agents/sql_database_agent.py,sha256=1K2o3NiuKgGKdbMz_Tq9IeQ8xhXjpfGOxx9lArZh1yE,31173
11
+ ai_data_science_team/ml_agents/__init__.py,sha256=qq3UlDCRV_z4FHQ1jj3YR6zPbA6kuCvYCisj_bHYfO4,190
12
+ ai_data_science_team/ml_agents/h2o_ml_agent.py,sha256=1e0ozjBXHgZCSRf-k705gcmujVQFnsmVSqsm71LP1z4,33208
13
+ ai_data_science_team/ml_agents/mlflow_tools_agent.py,sha256=IFc0oP9LO1EoIOq2iR8osrua2QoqhaL0cSd8koX0S2Q,10049
14
+ ai_data_science_team/multiagents/__init__.py,sha256=aI4GztEwmkexZKT5XHcH3cAjO-xYUhncb3yfPJQDqTA,99
15
+ ai_data_science_team/multiagents/sql_data_analyst.py,sha256=kmmED3gLf5STWWY6ZVJYd7_Pt8NMl6SHyBocuQzRDGk,14193
16
+ ai_data_science_team/multiagents/supervised_data_analyst.py,sha256=uduCYpicga-UCf9nPQktQggW96-HDlqvioYmEdWejtI,158
17
+ ai_data_science_team/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
+ ai_data_science_team/parsers/parsers.py,sha256=hIsMZXRHz9hqs8R1ebymKA7D6NxOf5UVMpDAr_gGhE8,2027
19
+ ai_data_science_team/templates/__init__.py,sha256=_IcyFUu_mM8dFtttz95h0csJZ-XWDP3cEFuf22-R5RM,330
20
+ ai_data_science_team/templates/agent_templates.py,sha256=Lezp0ugtIP3m5WUOmjLwghNnjjyQVQecysONeIHWwi0,29133
21
+ ai_data_science_team/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ ai_data_science_team/tools/data_loader.py,sha256=qrDZGzkQ2pmDHVw2Ld-W1lKvuJx3ANPRFGeYy_Fw6o4,12640
23
+ ai_data_science_team/tools/dataframe.py,sha256=qSflGDByqqCXv4TjuvOFvGPZmegzeOesb0Y4i4Y0gdQ,4551
24
+ ai_data_science_team/tools/h2o.py,sha256=gSK0f2FULfAfipFTTjDMUS6DjHwFFvvl4jxshr6QpS0,38997
25
+ ai_data_science_team/tools/mlflow.py,sha256=8NTkSOvbTk01GOmwFaMkLBRse80w9Kk7Ypi6Fv4kTII,29475
26
+ ai_data_science_team/tools/sql.py,sha256=vvz_CiOg6GqXo2_mlF4kq5IS6if79dpaizAgLR9sRyg,4784
27
+ ai_data_science_team/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
+ ai_data_science_team/utils/logging.py,sha256=7wFOv6GGhXR_RPbh-8p0GyrS608XOnZtiaGK2IbDl_s,2081
29
+ ai_data_science_team/utils/plotly.py,sha256=nST-NG0oizKVHhH6HsjHUpTUumq9bCccBdxjuaJWnVQ,504
30
+ ai_data_science_team/utils/regex.py,sha256=lwarbLqTA2VfNQSyqKCl-PBlH_0WH3zXZvYGBYGUiu4,5144
31
+ ai_data_science_team-0.0.0.9010.dist-info/LICENSE,sha256=Xif0IRLdd2HGLATxV2EVp91aSY6KOuacRr_6BorKGzA,1084
32
+ ai_data_science_team-0.0.0.9010.dist-info/METADATA,sha256=EqD39-xaFz_EA_F92aGBKwogUL42wi74reGGKJLeoYs,11642
33
+ ai_data_science_team-0.0.0.9010.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
34
+ ai_data_science_team-0.0.0.9010.dist-info/top_level.txt,sha256=CnoMgOphCoAdGTLueWdCVByVyjwOubaGiTB1lchdy4M,21
35
+ ai_data_science_team-0.0.0.9010.dist-info/RECORD,,
@@ -1,26 +0,0 @@
1
- ai_data_science_team/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- ai_data_science_team/_version.py,sha256=P58HXrtvcvSlic1oJw_w9WwHrQ3kBtvlqYwnMEbOL6g,26
3
- ai_data_science_team/orchestration.py,sha256=xiIFOsrLwPdkSmtme7wNCCGv8XopnMTNElNzlZokL-4,303
4
- ai_data_science_team/agents/__init__.py,sha256=6qGE7p8X291aiw5CFwTNot00_LF3_1fboLbjVf_TlHo,554
5
- ai_data_science_team/agents/data_cleaning_agent.py,sha256=sMyyWvJ3NK6bEqdkttqRZU03pN6Q2gcR7d39eA0wj-w,27225
6
- ai_data_science_team/agents/data_visualization_agent.py,sha256=S0gvUepJBVveMyTFaU0xcNCuOgLLkuDCZbwTGpyjNNQ,29186
7
- ai_data_science_team/agents/data_wrangling_agent.py,sha256=s2w9ub92mHFl9oj3jUxlIfEq4Yg8uwGOcwyX3rIgAxk,32477
8
- ai_data_science_team/agents/feature_engineering_agent.py,sha256=nB5KBcPzrxtN82sWAXFVZgkezEBG2uscSxb12njLux0,31596
9
- ai_data_science_team/agents/sql_database_agent.py,sha256=GbqMh-ImoKaoDMtvv3IZOQT82WGewCubZKyDU4iYIG4,28796
10
- ai_data_science_team/multiagents/__init__.py,sha256=aI4GztEwmkexZKT5XHcH3cAjO-xYUhncb3yfPJQDqTA,99
11
- ai_data_science_team/multiagents/sql_data_analyst.py,sha256=cFAqCKnLKKJ0zKxmRWSZupbRrVZLI-ugxLAgasWhjVc,9974
12
- ai_data_science_team/multiagents/supervised_data_analyst.py,sha256=uduCYpicga-UCf9nPQktQggW96-HDlqvioYmEdWejtI,158
13
- ai_data_science_team/templates/__init__.py,sha256=Dt3K5sdhEEQSc1hLasjXPkhmPn-JpPndSFc85ANIAyo,294
14
- ai_data_science_team/templates/agent_templates.py,sha256=mlsWxfmLRu9ocgR0l5UQxwki0rnoCoksRyx87WGvbeI,26804
15
- ai_data_science_team/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- ai_data_science_team/tools/logging.py,sha256=7wFOv6GGhXR_RPbh-8p0GyrS608XOnZtiaGK2IbDl_s,2081
17
- ai_data_science_team/tools/metadata.py,sha256=tbnca_tDp67oBA6qD29AKVooJG10VqGr4vwzj4rPUas,8348
18
- ai_data_science_team/tools/parsers.py,sha256=BAi-fJT7BBt9nRS3w5n9LDTsu7JAJsH8CAI9-Qf7jCs,2086
19
- ai_data_science_team/tools/regex.py,sha256=dDHzeGkHU0fGQ5qbfuOR9SXdypjeekvSUn1nQztXuvo,3296
20
- ai_data_science_team/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
21
- ai_data_science_team/utils/plotly.py,sha256=nST-NG0oizKVHhH6HsjHUpTUumq9bCccBdxjuaJWnVQ,504
22
- ai_data_science_team-0.0.0.9008.dist-info/LICENSE,sha256=Xif0IRLdd2HGLATxV2EVp91aSY6KOuacRr_6BorKGzA,1084
23
- ai_data_science_team-0.0.0.9008.dist-info/METADATA,sha256=MLWo_wXkAnJP0YcddIDpE3NDhSQViALw_Dai9l3WSS0,9014
24
- ai_data_science_team-0.0.0.9008.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
25
- ai_data_science_team-0.0.0.9008.dist-info/top_level.txt,sha256=CnoMgOphCoAdGTLueWdCVByVyjwOubaGiTB1lchdy4M,21
26
- ai_data_science_team-0.0.0.9008.dist-info/RECORD,,
File without changes