ai-data-science-team 0.0.0.9009__py3-none-any.whl → 0.0.0.9010__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. ai_data_science_team/_version.py +1 -1
  2. ai_data_science_team/agents/data_cleaning_agent.py +6 -6
  3. ai_data_science_team/agents/data_loader_tools_agent.py +69 -0
  4. ai_data_science_team/agents/data_visualization_agent.py +6 -7
  5. ai_data_science_team/agents/data_wrangling_agent.py +6 -6
  6. ai_data_science_team/agents/feature_engineering_agent.py +6 -6
  7. ai_data_science_team/agents/sql_database_agent.py +6 -6
  8. ai_data_science_team/ml_agents/__init__.py +1 -0
  9. ai_data_science_team/ml_agents/h2o_ml_agent.py +205 -385
  10. ai_data_science_team/ml_agents/mlflow_tools_agent.py +327 -0
  11. ai_data_science_team/multiagents/sql_data_analyst.py +3 -4
  12. ai_data_science_team/parsers/__init__.py +0 -0
  13. ai_data_science_team/{tools → parsers}/parsers.py +0 -1
  14. ai_data_science_team/templates/agent_templates.py +6 -6
  15. ai_data_science_team/tools/data_loader.py +378 -0
  16. ai_data_science_team/tools/dataframe.py +139 -0
  17. ai_data_science_team/tools/h2o.py +643 -0
  18. ai_data_science_team/tools/mlflow.py +961 -0
  19. ai_data_science_team/tools/{metadata.py → sql.py} +1 -137
  20. {ai_data_science_team-0.0.0.9009.dist-info → ai_data_science_team-0.0.0.9010.dist-info}/METADATA +34 -16
  21. ai_data_science_team-0.0.0.9010.dist-info/RECORD +35 -0
  22. ai_data_science_team-0.0.0.9009.dist-info/RECORD +0 -28
  23. /ai_data_science_team/{tools → utils}/logging.py +0 -0
  24. /ai_data_science_team/{tools → utils}/regex.py +0 -0
  25. {ai_data_science_team-0.0.0.9009.dist-info → ai_data_science_team-0.0.0.9010.dist-info}/LICENSE +0 -0
  26. {ai_data_science_team-0.0.0.9009.dist-info → ai_data_science_team-0.0.0.9010.dist-info}/WHEEL +0 -0
  27. {ai_data_science_team-0.0.0.9009.dist-info → ai_data_science_team-0.0.0.9010.dist-info}/top_level.txt +0 -0
@@ -1,143 +1,7 @@
1
- import io
1
+
2
2
  import pandas as pd
3
3
  import sqlalchemy as sql
4
4
  from sqlalchemy import inspect
5
- from typing import Union, List, Dict
6
-
7
- def get_dataframe_summary(
8
- dataframes: Union[pd.DataFrame, List[pd.DataFrame], Dict[str, pd.DataFrame]],
9
- n_sample: int = 30,
10
- skip_stats: bool = False,
11
- ) -> List[str]:
12
- """
13
- Generate a summary for one or more DataFrames. Accepts a single DataFrame, a list of DataFrames,
14
- or a dictionary mapping names to DataFrames.
15
-
16
- Parameters
17
- ----------
18
- dataframes : pandas.DataFrame or list of pandas.DataFrame or dict of (str -> pandas.DataFrame)
19
- - Single DataFrame: produce a single summary (returned within a one-element list).
20
- - List of DataFrames: produce a summary for each DataFrame, using index-based names.
21
- - Dictionary of DataFrames: produce a summary for each DataFrame, using dictionary keys as names.
22
- n_sample : int, default 30
23
- Number of rows to display in the "Data (first 30 rows)" section.
24
- skip_stats : bool, default False
25
- If True, skip the descriptive statistics and DataFrame info sections.
26
-
27
- Example:
28
- --------
29
- ``` python
30
- import pandas as pd
31
- from sklearn.datasets import load_iris
32
- data = load_iris(as_frame=True)
33
- dataframes = {
34
- "iris": data.frame,
35
- "iris_target": data.target,
36
- }
37
- summaries = get_dataframe_summary(dataframes)
38
- print(summaries[0])
39
- ```
40
-
41
- Returns
42
- -------
43
- list of str
44
- A list of summaries, one for each provided DataFrame. Each summary includes:
45
- - Shape of the DataFrame (rows, columns)
46
- - Column data types
47
- - Missing value percentage
48
- - Unique value counts
49
- - First 30 rows
50
- - Descriptive statistics
51
- - DataFrame info output
52
- """
53
-
54
- summaries = []
55
-
56
- # --- Dictionary Case ---
57
- if isinstance(dataframes, dict):
58
- for dataset_name, df in dataframes.items():
59
- summaries.append(_summarize_dataframe(df, dataset_name, n_sample, skip_stats))
60
-
61
- # --- Single DataFrame Case ---
62
- elif isinstance(dataframes, pd.DataFrame):
63
- summaries.append(_summarize_dataframe(dataframes, "Single_Dataset", n_sample, skip_stats))
64
-
65
- # --- List of DataFrames Case ---
66
- elif isinstance(dataframes, list):
67
- for idx, df in enumerate(dataframes):
68
- dataset_name = f"Dataset_{idx}"
69
- summaries.append(_summarize_dataframe(df, dataset_name, n_sample, skip_stats))
70
-
71
- else:
72
- raise TypeError(
73
- "Input must be a single DataFrame, a list of DataFrames, or a dictionary of DataFrames."
74
- )
75
-
76
- return summaries
77
-
78
-
79
- def _summarize_dataframe(df: pd.DataFrame, dataset_name: str, n_sample=30, skip_stats=False) -> str:
80
- """Generate a summary string for a single DataFrame."""
81
- # 1. Convert dictionary-type cells to strings
82
- # This prevents unhashable dict errors during df.nunique().
83
- df = df.apply(lambda col: col.map(lambda x: str(x) if isinstance(x, dict) else x))
84
-
85
- # 2. Capture df.info() output
86
- buffer = io.StringIO()
87
- df.info(buf=buffer)
88
- info_text = buffer.getvalue()
89
-
90
- # 3. Calculate missing value stats
91
- missing_stats = (df.isna().sum() / len(df) * 100).sort_values(ascending=False)
92
- missing_summary = "\n".join([f"{col}: {val:.2f}%" for col, val in missing_stats.items()])
93
-
94
- # 4. Get column data types
95
- column_types = "\n".join([f"{col}: {dtype}" for col, dtype in df.dtypes.items()])
96
-
97
- # 5. Get unique value counts
98
- unique_counts = df.nunique() # Will no longer fail on unhashable dict
99
- unique_counts_summary = "\n".join([f"{col}: {count}" for col, count in unique_counts.items()])
100
-
101
- # 6. Generate the summary text
102
- if not skip_stats:
103
- summary_text = f"""
104
- Dataset Name: {dataset_name}
105
- ----------------------------
106
- Shape: {df.shape[0]} rows x {df.shape[1]} columns
107
-
108
- Column Data Types:
109
- {column_types}
110
-
111
- Missing Value Percentage:
112
- {missing_summary}
113
-
114
- Unique Value Counts:
115
- {unique_counts_summary}
116
-
117
- Data (first {n_sample} rows):
118
- {df.head(n_sample).to_string()}
119
-
120
- Data Description:
121
- {df.describe().to_string()}
122
-
123
- Data Info:
124
- {info_text}
125
- """
126
- else:
127
- summary_text = f"""
128
- Dataset Name: {dataset_name}
129
- ----------------------------
130
- Shape: {df.shape[0]} rows x {df.shape[1]} columns
131
-
132
- Column Data Types:
133
- {column_types}
134
-
135
- Data (first {n_sample} rows):
136
- {df.head(n_sample).to_string()}
137
- """
138
-
139
- return summary_text.strip()
140
-
141
5
 
142
6
 
143
7
  def get_database_metadata(connection, n_samples=10) -> dict:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ai-data-science-team
3
- Version: 0.0.0.9009
3
+ Version: 0.0.0.9010
4
4
  Summary: Build and run an AI-powered data science team.
5
5
  Home-page: https://github.com/business-science/ai-data-science-team
6
6
  Author: Matt Dancho
@@ -27,10 +27,13 @@ Requires-Dist: plotly
27
27
  Requires-Dist: streamlit
28
28
  Requires-Dist: scikit-learn
29
29
  Requires-Dist: xgboost
30
- Provides-Extra: machine-learning-agent
31
- Requires-Dist: h2o; extra == "machine-learning-agent"
30
+ Requires-Dist: psutil
31
+ Provides-Extra: machine-learning
32
+ Requires-Dist: h2o; extra == "machine-learning"
33
+ Requires-Dist: mlflow; extra == "machine-learning"
32
34
  Provides-Extra: all
33
35
  Requires-Dist: h2o; extra == "all"
36
+ Requires-Dist: mlflow; extra == "all"
34
37
  Dynamic: author
35
38
  Dynamic: author-email
36
39
  Dynamic: classifier
@@ -45,7 +48,7 @@ Dynamic: summary
45
48
  <div align="center">
46
49
  <a href="https://github.com/business-science/ai-data-science-team">
47
50
  <picture>
48
- <img src="/img/ai_data_science_team_logo.jpg" alt="AI Data Science Team" width="400">
51
+ <img src="/img/ai_data_science_team_logo_small.jpg" alt="AI Data Science Team" width="400">
49
52
  </picture>
50
53
  </a>
51
54
  </div>
@@ -86,8 +89,11 @@ The AI Data Science Team of Copilots includes Agents that specialize data cleani
86
89
  - [Generative AI for Data Scientists Workshop](#generative-ai-for-data-scientists-workshop)
87
90
  - [Data Science Agents](#data-science-agents)
88
91
  - [NEW: Multi-Agents](#new-multi-agents)
89
- - [Coming Soon: Data Science Apps](#coming-soon-data-science-apps)
92
+ - [Data Science Apps](#data-science-apps)
93
+ - [Apps Available Now](#apps-available-now)
94
+ - [🔥 Agentic Applications](#-agentic-applications)
90
95
  - [Agents Available Now](#agents-available-now)
96
+ - [🔥🔥 NEW! Machine Learning Agents](#-new-machine-learning-agents)
91
97
  - [Data Science Agents](#data-science-agents-1)
92
98
  - [Multi-Agents](#multi-agents)
93
99
  - [Agents Coming Soon](#agents-coming-soon)
@@ -124,32 +130,44 @@ This is the internals of the SQL Data Analyst Agent that connects to SQL databas
124
130
 
125
131
  ![Business Intelligence SQL Agent](/img/multi_agent_sql_data_visualization.jpg)
126
132
 
127
- ### Coming Soon: Data Science Apps
133
+ ### Data Science Apps
128
134
 
129
135
  This is a top secret project I'm working on. It's a multi-agent data science app that performs time series forecasting.
130
136
 
131
- ![Multi-Agent Data Science App](/img/ai_powered_apps.jpg)
137
+ ![Multi-Agent Data Science App](/img/ai_powered_apps.jpg)
138
+
139
+ ### Apps Available Now
140
+
141
+ [See all available apps here](/apps)
142
+
143
+ #### 🔥 Agentic Applications
144
+
145
+ 1. **SQL Database Agent App:** Connects any SQL Database, generates SQL queries from natural language, and returns data as a downloadable table. [See Application](/apps/sql-database-agent-app/)
132
146
 
133
147
  ### Agents Available Now
134
148
 
149
+ #### 🔥🔥 NEW! Machine Learning Agents
150
+
151
+ 1. **🔥 H2O Machine Learning Agent:** Builds and logs 100's of high-performance machine learning models. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/ml_agents/h2o_machine_learning_agent.ipynb)
152
+ 2. **🔥 MLflow Tools Agent (MLOps):** This agent has 11+ tools for managing models, ML projects, and making production ML predictions with MLflow. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/ml_agents/mlflow_tools_agent.ipynb)
153
+
135
154
  #### Data Science Agents
136
155
 
137
- 1. **Data Wrangling Agent:** Merges, Joins, Preps and Wrangles data into a format that is ready for data analysis.
138
- 2. **Data Visualization Agent:** Creates visualizations to help you understand your data. Returns JSON serializable plotly visualizations.
139
- 3. **Data Cleaning Agent:** Performs Data Preparation steps including handling missing values, outliers, and data type conversions.
140
- 4. **Feature Engineering Agent:** Converts the prepared data into ML-ready data. Adds features to increase predictive accuracy of ML models.
141
- 5. **SQL Database Agent:** Connects to SQL databases to pull data into the data science environment. Creates pipelines to automate data extraction. Performs Joins, Aggregations, and other SQL Query operations.
156
+ 1. **Data Wrangling Agent:** Merges, Joins, Preps and Wrangles data into a format that is ready for data analysis. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/data_wrangling_agent.ipynb)
157
+ 2. **Data Visualization Agent:** Creates visualizations to help you understand your data. Returns JSON serializable plotly visualizations. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/data_visualization_agent.ipynb)
158
+ 3. **Data Cleaning Agent:** Performs Data Preparation steps including handling missing values, outliers, and data type conversions. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/data_cleaning_agent.ipynb)
159
+ 4. **Feature Engineering Agent:** Converts the prepared data into ML-ready data. Adds features to increase predictive accuracy of ML models. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/feature_engineering_agent.ipynb)
160
+ 5. **SQL Database Agent:** Connects to SQL databases to pull data into the data science environment. Creates pipelines to automate data extraction. Performs Joins, Aggregations, and other SQL Query operations. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/sql_database_agent.ipynb)
142
161
 
143
162
  #### Multi-Agents
144
163
 
145
- 1. **SQL Data Analyst Agent:** Connects to SQL databases to pull data into the data science environment. Creates pipelines to automate data extraction. Performs Joins, Aggregations, and other SQL Query operations. Includes a Data Visualization Agent that creates visualizations to help you understand your data.
164
+ 1. **SQL Data Analyst Agent:** Connects to SQL databases to pull data into the data science environment. Creates pipelines to automate data extraction. Performs Joins, Aggregations, and other SQL Query operations. Includes a Data Visualization Agent that creates visualizations to help you understand your data. [See Example](https://github.com/business-science/ai-data-science-team/blob/master/examples/multiagents/sql_data_analyst.ipynb)
146
165
 
147
166
  ### Agents Coming Soon
148
167
 
149
168
  1. **Data Analyst:** Analyzes data structure, creates exploratory visualizations, and performs correlation analysis to identify relationships.
150
- 2. **Machine Learning Agent:** Builds and logs the machine learning models.
151
- 3. **Interpretability Agent:** Performs Interpretable ML to explain why the model returned predictions including which features were the most important to the model.
152
- 4. **Supervisor:** Forms task list. Moderates sub-agents. Returns completed assignment.
169
+ 2. **Interpretability Agent:** Performs Interpretable ML to explain why the model returned predictions including which features were the most important to the model.
170
+ 3. **Supervisor:** Forms task list. Moderates sub-agents. Returns completed assignment.
153
171
 
154
172
  ## Disclaimer
155
173
 
@@ -0,0 +1,35 @@
1
+ ai_data_science_team/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ ai_data_science_team/_version.py,sha256=Jp-j7mSYzOstBlPFLGcnMvasT0m3q0YN3vk4cJAm2XM,26
3
+ ai_data_science_team/orchestration.py,sha256=xiIFOsrLwPdkSmtme7wNCCGv8XopnMTNElNzlZokL-4,303
4
+ ai_data_science_team/agents/__init__.py,sha256=KSwxfciazWyaDG-xM93SadiIyT6X4d3uJLTdvHvVKq0,553
5
+ ai_data_science_team/agents/data_cleaning_agent.py,sha256=V5tJMwGJK0JwrF_H-7r3S0E8UkAY6ci4BGxqjhZiGBI,27352
6
+ ai_data_science_team/agents/data_loader_tools_agent.py,sha256=N1PuepOaP0ocV3bDDJOj_DYf997c82k2dg-YCV4rG2E,1668
7
+ ai_data_science_team/agents/data_visualization_agent.py,sha256=tJy9Ehnh9mvAu6H--TXI8esSHmK1RW_L1RDAdn7Xek4,28821
8
+ ai_data_science_team/agents/data_wrangling_agent.py,sha256=LxzphH-TmrFG0GjejGOjulhPq4SsWFo5Y9tk4WEuN4M,32347
9
+ ai_data_science_team/agents/feature_engineering_agent.py,sha256=KmPBkj7WUBz6LFUlDDfQHMi7ujXwsH5P9LWRS-F4tdM,31026
10
+ ai_data_science_team/agents/sql_database_agent.py,sha256=1K2o3NiuKgGKdbMz_Tq9IeQ8xhXjpfGOxx9lArZh1yE,31173
11
+ ai_data_science_team/ml_agents/__init__.py,sha256=qq3UlDCRV_z4FHQ1jj3YR6zPbA6kuCvYCisj_bHYfO4,190
12
+ ai_data_science_team/ml_agents/h2o_ml_agent.py,sha256=1e0ozjBXHgZCSRf-k705gcmujVQFnsmVSqsm71LP1z4,33208
13
+ ai_data_science_team/ml_agents/mlflow_tools_agent.py,sha256=IFc0oP9LO1EoIOq2iR8osrua2QoqhaL0cSd8koX0S2Q,10049
14
+ ai_data_science_team/multiagents/__init__.py,sha256=aI4GztEwmkexZKT5XHcH3cAjO-xYUhncb3yfPJQDqTA,99
15
+ ai_data_science_team/multiagents/sql_data_analyst.py,sha256=kmmED3gLf5STWWY6ZVJYd7_Pt8NMl6SHyBocuQzRDGk,14193
16
+ ai_data_science_team/multiagents/supervised_data_analyst.py,sha256=uduCYpicga-UCf9nPQktQggW96-HDlqvioYmEdWejtI,158
17
+ ai_data_science_team/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
+ ai_data_science_team/parsers/parsers.py,sha256=hIsMZXRHz9hqs8R1ebymKA7D6NxOf5UVMpDAr_gGhE8,2027
19
+ ai_data_science_team/templates/__init__.py,sha256=_IcyFUu_mM8dFtttz95h0csJZ-XWDP3cEFuf22-R5RM,330
20
+ ai_data_science_team/templates/agent_templates.py,sha256=Lezp0ugtIP3m5WUOmjLwghNnjjyQVQecysONeIHWwi0,29133
21
+ ai_data_science_team/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ ai_data_science_team/tools/data_loader.py,sha256=qrDZGzkQ2pmDHVw2Ld-W1lKvuJx3ANPRFGeYy_Fw6o4,12640
23
+ ai_data_science_team/tools/dataframe.py,sha256=qSflGDByqqCXv4TjuvOFvGPZmegzeOesb0Y4i4Y0gdQ,4551
24
+ ai_data_science_team/tools/h2o.py,sha256=gSK0f2FULfAfipFTTjDMUS6DjHwFFvvl4jxshr6QpS0,38997
25
+ ai_data_science_team/tools/mlflow.py,sha256=8NTkSOvbTk01GOmwFaMkLBRse80w9Kk7Ypi6Fv4kTII,29475
26
+ ai_data_science_team/tools/sql.py,sha256=vvz_CiOg6GqXo2_mlF4kq5IS6if79dpaizAgLR9sRyg,4784
27
+ ai_data_science_team/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
+ ai_data_science_team/utils/logging.py,sha256=7wFOv6GGhXR_RPbh-8p0GyrS608XOnZtiaGK2IbDl_s,2081
29
+ ai_data_science_team/utils/plotly.py,sha256=nST-NG0oizKVHhH6HsjHUpTUumq9bCccBdxjuaJWnVQ,504
30
+ ai_data_science_team/utils/regex.py,sha256=lwarbLqTA2VfNQSyqKCl-PBlH_0WH3zXZvYGBYGUiu4,5144
31
+ ai_data_science_team-0.0.0.9010.dist-info/LICENSE,sha256=Xif0IRLdd2HGLATxV2EVp91aSY6KOuacRr_6BorKGzA,1084
32
+ ai_data_science_team-0.0.0.9010.dist-info/METADATA,sha256=EqD39-xaFz_EA_F92aGBKwogUL42wi74reGGKJLeoYs,11642
33
+ ai_data_science_team-0.0.0.9010.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
34
+ ai_data_science_team-0.0.0.9010.dist-info/top_level.txt,sha256=CnoMgOphCoAdGTLueWdCVByVyjwOubaGiTB1lchdy4M,21
35
+ ai_data_science_team-0.0.0.9010.dist-info/RECORD,,
@@ -1,28 +0,0 @@
1
- ai_data_science_team/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- ai_data_science_team/_version.py,sha256=IEp7uHd_8RlLYvLFCpp_wJCutdvJI7cJ73IN0GzK3ts,26
3
- ai_data_science_team/orchestration.py,sha256=xiIFOsrLwPdkSmtme7wNCCGv8XopnMTNElNzlZokL-4,303
4
- ai_data_science_team/agents/__init__.py,sha256=KSwxfciazWyaDG-xM93SadiIyT6X4d3uJLTdvHvVKq0,553
5
- ai_data_science_team/agents/data_cleaning_agent.py,sha256=OWJ3tEA5cy2fo92bTmKS8CDA48ZRRqmWg2kH7cacjDM,27337
6
- ai_data_science_team/agents/data_visualization_agent.py,sha256=pm7yln3GI91mOAjwDveenWwYXtJqh990oFvsoFhX3aA,28864
7
- ai_data_science_team/agents/data_wrangling_agent.py,sha256=UUoejYBmVFdM4At_CKQjYUyFHkaloowdd6yAElfeV9Q,32332
8
- ai_data_science_team/agents/feature_engineering_agent.py,sha256=bngc0COOYa8AolJwQrNuO1aDRgwBCp6LCN9_otIscWk,31011
9
- ai_data_science_team/agents/sql_database_agent.py,sha256=M_7IBOu7ISZZEtDAC9KGQIE7FPaXSyQ5IdD8vu91_DM,31164
10
- ai_data_science_team/ml_agents/__init__.py,sha256=fA5uX6dSVMAf2ApmBJXEArbnKNmsmuE0nbBsCeNAksk,86
11
- ai_data_science_team/ml_agents/h2o_ml_agent.py,sha256=h6kz8ZPw7ApCdfrKBqggHfBnwBJ3kDSzLMwxMxz_2tM,55181
12
- ai_data_science_team/multiagents/__init__.py,sha256=aI4GztEwmkexZKT5XHcH3cAjO-xYUhncb3yfPJQDqTA,99
13
- ai_data_science_team/multiagents/sql_data_analyst.py,sha256=2gETU9O5t9R5Ut1kEW1T3H-6Sh8xDzDfQmFV3i5lMKs,14233
14
- ai_data_science_team/multiagents/supervised_data_analyst.py,sha256=uduCYpicga-UCf9nPQktQggW96-HDlqvioYmEdWejtI,158
15
- ai_data_science_team/templates/__init__.py,sha256=_IcyFUu_mM8dFtttz95h0csJZ-XWDP3cEFuf22-R5RM,330
16
- ai_data_science_team/templates/agent_templates.py,sha256=pphuitXUVv21ljr_H-aof6Xq78KvDY0adF3K6lXGEz4,29107
17
- ai_data_science_team/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- ai_data_science_team/tools/logging.py,sha256=7wFOv6GGhXR_RPbh-8p0GyrS608XOnZtiaGK2IbDl_s,2081
19
- ai_data_science_team/tools/metadata.py,sha256=3lPxLEUr3I9AF6wIKx5en-GV6JVkpUHDSLQxKj1N5Gs,9313
20
- ai_data_science_team/tools/parsers.py,sha256=BAi-fJT7BBt9nRS3w5n9LDTsu7JAJsH8CAI9-Qf7jCs,2086
21
- ai_data_science_team/tools/regex.py,sha256=lwarbLqTA2VfNQSyqKCl-PBlH_0WH3zXZvYGBYGUiu4,5144
22
- ai_data_science_team/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
- ai_data_science_team/utils/plotly.py,sha256=nST-NG0oizKVHhH6HsjHUpTUumq9bCccBdxjuaJWnVQ,504
24
- ai_data_science_team-0.0.0.9009.dist-info/LICENSE,sha256=Xif0IRLdd2HGLATxV2EVp91aSY6KOuacRr_6BorKGzA,1084
25
- ai_data_science_team-0.0.0.9009.dist-info/METADATA,sha256=iMIyksmkPmuj9TI_oIa1lXGJYKVspxhWvflBJ1zlW0o,9875
26
- ai_data_science_team-0.0.0.9009.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
27
- ai_data_science_team-0.0.0.9009.dist-info/top_level.txt,sha256=CnoMgOphCoAdGTLueWdCVByVyjwOubaGiTB1lchdy4M,21
28
- ai_data_science_team-0.0.0.9009.dist-info/RECORD,,
File without changes
File without changes