MindsDB 25.1.5.2__py3-none-any.whl → 25.2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of MindsDB might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: MindsDB
3
- Version: 25.1.5.2
3
+ Version: 25.2.1.0
4
4
  Summary: MindsDB's AI SQL Server enables developers to build AI tools that need access to real-time data to perform their tasks
5
5
  Home-page: https://github.com/mindsdb/mindsdb
6
6
  Download-URL: https://pypi.org/project/mindsdb/
@@ -31,7 +31,7 @@ Requires-Dist: redis<6.0.0,>=5.0.0
31
31
  Requires-Dist: walrus==0.9.3
32
32
  Requires-Dist: flask-compress>=1.0.0
33
33
  Requires-Dist: appdirs>=1.0.0
34
- Requires-Dist: mindsdb-sql-parser~=0.2.0
34
+ Requires-Dist: mindsdb-sql-parser~=0.3.0
35
35
  Requires-Dist: pydantic~=2.7.0
36
36
  Requires-Dist: mindsdb-evaluator<0.1.0,>=0.0.7
37
37
  Requires-Dist: duckdb==0.9.1
@@ -80,13 +80,13 @@ Requires-Dist: virtualenv
80
80
  Requires-Dist: pyarrow==14.0.1
81
81
  Requires-Dist: scylla-driver
82
82
  Requires-Dist: clickhouse-sqlalchemy>=0.3.1
83
- Requires-Dist: tiktoken
84
83
  Requires-Dist: openai==1.55.3
85
84
  Requires-Dist: litellm==1.44.8
85
+ Requires-Dist: anthropic>=0.26.1
86
86
  Requires-Dist: chromadb~=0.6.3
87
- Requires-Dist: wikipedia==1.4.0
88
87
  Requires-Dist: openai<2.0.0,>=1.54.0
89
- Requires-Dist: anthropic>=0.26.1
88
+ Requires-Dist: wikipedia==1.4.0
89
+ Requires-Dist: tiktoken
90
90
  Requires-Dist: mysql-connector-python==9.1.0
91
91
  Requires-Dist: openai<2.0.0,>=1.54.0
92
92
  Requires-Dist: pydantic-settings>=2.1.0
@@ -99,8 +99,8 @@ Requires-Dist: slack_sdk==3.30.0
99
99
  Requires-Dist: statsforecast==1.6.0
100
100
  Requires-Dist: nixtla==0.5.0
101
101
  Requires-Dist: tweepy
102
- Requires-Dist: html2text
103
102
  Requires-Dist: bs4
103
+ Requires-Dist: html2text
104
104
  Provides-Extra: all-extras
105
105
  Provides-Extra: access
106
106
  Requires-Dist: sqlalchemy-access; extra == "access"
@@ -108,33 +108,33 @@ Requires-Dist: pyodbc; extra == "access"
108
108
  Provides-Extra: aerospike
109
109
  Requires-Dist: aerospike~=13.0.0; extra == "aerospike"
110
110
  Provides-Extra: altibase
111
- Requires-Dist: jaydebeapi; extra == "altibase"
112
111
  Requires-Dist: pyodbc; extra == "altibase"
112
+ Requires-Dist: jaydebeapi; extra == "altibase"
113
113
  Provides-Extra: anomaly-detection
114
- Requires-Dist: pyod>=1.1; extra == "anomaly-detection"
114
+ Requires-Dist: joblib; extra == "anomaly-detection"
115
115
  Requires-Dist: catboost>=1.2; extra == "anomaly-detection"
116
+ Requires-Dist: pyod>=1.1; extra == "anomaly-detection"
116
117
  Requires-Dist: xgboost; extra == "anomaly-detection"
117
- Requires-Dist: joblib; extra == "anomaly-detection"
118
118
  Provides-Extra: anthropic
119
119
  Requires-Dist: anthropic==0.18.1; extra == "anthropic"
120
120
  Provides-Extra: anyscale-endpoints
121
+ Requires-Dist: tiktoken; extra == "anyscale-endpoints"
121
122
  Requires-Dist: openai<2.0.0,>=1.54.0; extra == "anyscale-endpoints"
122
123
  Requires-Dist: pydantic-settings>=2.1.0; extra == "anyscale-endpoints"
123
124
  Requires-Dist: openai==1.24.0; extra == "anyscale-endpoints"
124
- Requires-Dist: tiktoken; extra == "anyscale-endpoints"
125
125
  Provides-Extra: apache-doris
126
126
  Requires-Dist: mysql-connector-python==9.1.0; extra == "apache-doris"
127
127
  Provides-Extra: aurora
128
128
  Requires-Dist: mysql-connector-python==9.1.0; extra == "aurora"
129
129
  Provides-Extra: autogluon
130
- Requires-Dist: type_infer==0.0.20; extra == "autogluon"
131
130
  Requires-Dist: autogluon; extra == "autogluon"
131
+ Requires-Dist: type_infer==0.0.20; extra == "autogluon"
132
132
  Provides-Extra: autokeras
133
133
  Requires-Dist: tensorflow; extra == "autokeras"
134
134
  Requires-Dist: autokeras; extra == "autokeras"
135
135
  Provides-Extra: autosklearn
136
- Requires-Dist: type_infer==0.0.20; extra == "autosklearn"
137
136
  Requires-Dist: auto-sklearn; extra == "autosklearn"
137
+ Requires-Dist: type_infer==0.0.20; extra == "autosklearn"
138
138
  Provides-Extra: azure-blob
139
139
  Requires-Dist: azure-storage-blob; extra == "azure-blob"
140
140
  Provides-Extra: bedrock
@@ -158,11 +158,11 @@ Requires-Dist: ckanapi; extra == "ckan"
158
158
  Provides-Extra: clickhouse
159
159
  Requires-Dist: clickhouse-sqlalchemy>=0.3.1; extra == "clickhouse"
160
160
  Provides-Extra: cloud-spanner
161
- Requires-Dist: google-cloud-spanner; extra == "cloud-spanner"
162
161
  Requires-Dist: sqlalchemy-spanner; extra == "cloud-spanner"
162
+ Requires-Dist: google-cloud-spanner; extra == "cloud-spanner"
163
163
  Provides-Extra: cloud-sql
164
- Requires-Dist: mysql-connector-python==9.1.0; extra == "cloud-sql"
165
164
  Requires-Dist: pymssql>=2.1.4; extra == "cloud-sql"
165
+ Requires-Dist: mysql-connector-python==9.1.0; extra == "cloud-sql"
166
166
  Provides-Extra: cohere
167
167
  Requires-Dist: cohere==4.5.1; extra == "cohere"
168
168
  Provides-Extra: coinbase
@@ -173,8 +173,8 @@ Requires-Dist: couchbase==4.3.1; extra == "couchbase"
173
173
  Provides-Extra: couchbasevector
174
174
  Requires-Dist: couchbase==4.3.1; extra == "couchbasevector"
175
175
  Provides-Extra: crate
176
- Requires-Dist: sqlalchemy-cratedb; extra == "crate"
177
176
  Requires-Dist: crate; extra == "crate"
177
+ Requires-Dist: sqlalchemy-cratedb; extra == "crate"
178
178
  Provides-Extra: d0lt
179
179
  Requires-Dist: pymysql; extra == "d0lt"
180
180
  Provides-Extra: databend
@@ -196,21 +196,21 @@ Requires-Dist: dropbox; extra == "dropbox"
196
196
  Provides-Extra: druid
197
197
  Requires-Dist: pydruid; extra == "druid"
198
198
  Provides-Extra: dspy
199
- Requires-Dist: tiktoken; extra == "dspy"
200
- Requires-Dist: chromadb; extra == "dspy"
201
- Requires-Dist: wikipedia==1.4.0; extra == "dspy"
202
199
  Requires-Dist: dspy-ai==2.4.12; extra == "dspy"
200
+ Requires-Dist: anthropic>=0.26.1; extra == "dspy"
203
201
  Requires-Dist: dspy==0.1.4; extra == "dspy"
204
202
  Requires-Dist: openai<2.0.0,>=1.54.0; extra == "dspy"
205
- Requires-Dist: anthropic>=0.26.1; extra == "dspy"
203
+ Requires-Dist: chromadb; extra == "dspy"
204
+ Requires-Dist: wikipedia==1.4.0; extra == "dspy"
205
+ Requires-Dist: tiktoken; extra == "dspy"
206
206
  Provides-Extra: edgelessdb
207
207
  Requires-Dist: mysql-connector-python==9.1.0; extra == "edgelessdb"
208
208
  Provides-Extra: elasticsearch
209
209
  Requires-Dist: elasticsearch-dbapi==0.2.11; extra == "elasticsearch"
210
210
  Requires-Dist: elasticsearch==7.13.4; extra == "elasticsearch"
211
211
  Provides-Extra: email
212
- Requires-Dist: bs4; extra == "email"
213
212
  Requires-Dist: chardet; extra == "email"
213
+ Requires-Dist: bs4; extra == "email"
214
214
  Provides-Extra: empress
215
215
  Requires-Dist: pyodbc; extra == "empress"
216
216
  Provides-Extra: eventbrite
@@ -222,14 +222,14 @@ Provides-Extra: firebird
222
222
  Requires-Dist: fdb; extra == "firebird"
223
223
  Requires-Dist: sqlalchemy-firebird<3.0.0,>=2.0.0; extra == "firebird"
224
224
  Provides-Extra: flaml
225
- Requires-Dist: type_infer==0.0.20; extra == "flaml"
226
225
  Requires-Dist: flaml<=1.2.3; extra == "flaml"
226
+ Requires-Dist: type_infer==0.0.20; extra == "flaml"
227
227
  Provides-Extra: frappe
228
228
  Provides-Extra: gcs
229
- Requires-Dist: google-auth; extra == "gcs"
230
- Requires-Dist: google-cloud-storage; extra == "gcs"
231
229
  Requires-Dist: gcsfs; extra == "gcs"
230
+ Requires-Dist: google-cloud-storage; extra == "gcs"
232
231
  Requires-Dist: fsspec; extra == "gcs"
232
+ Requires-Dist: google-auth; extra == "gcs"
233
233
  Provides-Extra: github
234
234
  Requires-Dist: pygithub; extra == "github"
235
235
  Provides-Extra: gitlab
@@ -238,8 +238,8 @@ Provides-Extra: gmail
238
238
  Requires-Dist: google-api-python-client; extra == "gmail"
239
239
  Requires-Dist: google-auth-httplib2; extra == "gmail"
240
240
  Provides-Extra: google-analytics
241
- Requires-Dist: google-api-python-client; extra == "google-analytics"
242
241
  Requires-Dist: google-analytics-admin; extra == "google-analytics"
242
+ Requires-Dist: google-api-python-client; extra == "google-analytics"
243
243
  Provides-Extra: google-books
244
244
  Requires-Dist: google-api-python-client; extra == "google-books"
245
245
  Requires-Dist: google-auth-httplib2; extra == "google-books"
@@ -250,12 +250,12 @@ Provides-Extra: google-content-shopping
250
250
  Requires-Dist: google-api-python-client; extra == "google-content-shopping"
251
251
  Requires-Dist: google-auth-httplib2; extra == "google-content-shopping"
252
252
  Provides-Extra: google-fit
253
+ Requires-Dist: google; extra == "google-fit"
253
254
  Requires-Dist: google-api-python-client; extra == "google-fit"
254
255
  Requires-Dist: tzlocal; extra == "google-fit"
255
- Requires-Dist: google; extra == "google-fit"
256
256
  Provides-Extra: google-gemini
257
- Requires-Dist: pillow; extra == "google-gemini"
258
257
  Requires-Dist: google-generativeai==0.3.2; extra == "google-gemini"
258
+ Requires-Dist: pillow; extra == "google-gemini"
259
259
  Provides-Extra: google-search
260
260
  Requires-Dist: google-api-python-client; extra == "google-search"
261
261
  Requires-Dist: google-auth-httplib2; extra == "google-search"
@@ -266,12 +266,12 @@ Requires-Dist: openai<2.0.0,>=1.54.0; extra == "groq"
266
266
  Requires-Dist: pydantic-settings>=2.1.0; extra == "groq"
267
267
  Requires-Dist: tiktoken; extra == "groq"
268
268
  Provides-Extra: hana
269
- Requires-Dist: sqlalchemy-hana; extra == "hana"
270
269
  Requires-Dist: hdbcli; extra == "hana"
270
+ Requires-Dist: sqlalchemy-hana; extra == "hana"
271
271
  Provides-Extra: hive
272
- Requires-Dist: pyhive; extra == "hive"
273
- Requires-Dist: thrift-sasl; extra == "hive"
274
272
  Requires-Dist: thrift; extra == "hive"
273
+ Requires-Dist: thrift-sasl; extra == "hive"
274
+ Requires-Dist: pyhive; extra == "hive"
275
275
  Provides-Extra: hsqldb
276
276
  Requires-Dist: pyodbc==4.0.34; extra == "hsqldb"
277
277
  Provides-Extra: hubspot
@@ -280,17 +280,17 @@ Provides-Extra: huggingface-api
280
280
  Requires-Dist: hugging_py_face; extra == "huggingface-api"
281
281
  Requires-Dist: huggingface-hub; extra == "huggingface-api"
282
282
  Provides-Extra: huggingface
283
- Requires-Dist: evaluate; extra == "huggingface"
284
- Requires-Dist: nltk; extra == "huggingface"
283
+ Requires-Dist: torch; extra == "huggingface"
285
284
  Requires-Dist: datasets==2.16.1; extra == "huggingface"
286
285
  Requires-Dist: huggingface-hub; extra == "huggingface"
287
- Requires-Dist: torch; extra == "huggingface"
286
+ Requires-Dist: evaluate; extra == "huggingface"
287
+ Requires-Dist: nltk; extra == "huggingface"
288
288
  Provides-Extra: huggingface-cpu
289
- Requires-Dist: evaluate; extra == "huggingface-cpu"
290
- Requires-Dist: torch==2.2.0+cpu; extra == "huggingface-cpu"
291
- Requires-Dist: nltk; extra == "huggingface-cpu"
292
289
  Requires-Dist: datasets==2.16.1; extra == "huggingface-cpu"
293
290
  Requires-Dist: huggingface-hub; extra == "huggingface-cpu"
291
+ Requires-Dist: evaluate; extra == "huggingface-cpu"
292
+ Requires-Dist: nltk; extra == "huggingface-cpu"
293
+ Requires-Dist: torch==2.2.0+cpu; extra == "huggingface-cpu"
294
294
  Provides-Extra: ibm-cos
295
295
  Requires-Dist: ibm-cos-sdk; extra == "ibm-cos"
296
296
  Provides-Extra: ignite
@@ -308,40 +308,40 @@ Provides-Extra: jira
308
308
  Requires-Dist: atlassian-python-api; extra == "jira"
309
309
  Provides-Extra: lancedb
310
310
  Requires-Dist: pyarrow~=14.0.1; extra == "lancedb"
311
- Requires-Dist: lancedb~=0.3.1; extra == "lancedb"
312
311
  Requires-Dist: lance; extra == "lancedb"
312
+ Requires-Dist: lancedb~=0.3.1; extra == "lancedb"
313
313
  Provides-Extra: langchain-embedding
314
- Requires-Dist: tiktoken; extra == "langchain-embedding"
315
314
  Requires-Dist: openai==1.55.3; extra == "langchain-embedding"
315
+ Requires-Dist: tiktoken; extra == "langchain-embedding"
316
316
  Provides-Extra: langchain
317
- Requires-Dist: tiktoken; extra == "langchain"
318
317
  Requires-Dist: openai==1.55.3; extra == "langchain"
319
318
  Requires-Dist: litellm==1.44.8; extra == "langchain"
319
+ Requires-Dist: anthropic>=0.26.1; extra == "langchain"
320
320
  Requires-Dist: chromadb~=0.6.3; extra == "langchain"
321
- Requires-Dist: wikipedia==1.4.0; extra == "langchain"
322
321
  Requires-Dist: openai<2.0.0,>=1.54.0; extra == "langchain"
323
- Requires-Dist: anthropic>=0.26.1; extra == "langchain"
322
+ Requires-Dist: wikipedia==1.4.0; extra == "langchain"
323
+ Requires-Dist: tiktoken; extra == "langchain"
324
324
  Provides-Extra: leonardoai
325
325
  Provides-Extra: libsql
326
326
  Requires-Dist: libsql-experimental; extra == "libsql"
327
327
  Provides-Extra: lightfm
328
328
  Requires-Dist: lightfm==1.17; extra == "lightfm"
329
329
  Provides-Extra: lightwood
330
+ Requires-Dist: lightwood[xai]~=24.12.3.0; extra == "lightwood"
330
331
  Requires-Dist: lightwood[extra]~=24.12.3.0; extra == "lightwood"
331
- Requires-Dist: type_infer==0.0.20; extra == "lightwood"
332
332
  Requires-Dist: lightwood~=24.12.3.0; extra == "lightwood"
333
- Requires-Dist: lightwood[xai]~=24.12.3.0; extra == "lightwood"
333
+ Requires-Dist: type_infer==0.0.20; extra == "lightwood"
334
334
  Provides-Extra: lindorm
335
335
  Requires-Dist: phoenixdb; extra == "lindorm"
336
336
  Requires-Dist: pyphoenix; extra == "lindorm"
337
337
  Provides-Extra: litellm
338
338
  Requires-Dist: litellm==1.44.8; extra == "litellm"
339
339
  Provides-Extra: llama-index
340
- Requires-Dist: llama-index-embeddings-openai; extra == "llama-index"
341
340
  Requires-Dist: llama-index==0.10.13; extra == "llama-index"
342
341
  Requires-Dist: llama-index-readers-web; extra == "llama-index"
343
- Requires-Dist: openai==1.24.0; extra == "llama-index"
344
342
  Requires-Dist: pydantic-settings>=2.1.0; extra == "llama-index"
343
+ Requires-Dist: openai==1.24.0; extra == "llama-index"
344
+ Requires-Dist: llama-index-embeddings-openai; extra == "llama-index"
345
345
  Provides-Extra: ludwig
346
346
  Requires-Dist: ray==2.8.1; extra == "ludwig"
347
347
  Requires-Dist: ludwig[distributed]>=0.5.2; extra == "ludwig"
@@ -358,8 +358,8 @@ Requires-Dist: mediawikiapi; extra == "mediawiki"
358
358
  Provides-Extra: mendeley
359
359
  Requires-Dist: mendeley; extra == "mendeley"
360
360
  Provides-Extra: merlion
361
- Requires-Dist: salesforce-merlion<=1.3.1,>=1.2.0; extra == "merlion"
362
361
  Requires-Dist: scipy; extra == "merlion"
362
+ Requires-Dist: salesforce-merlion<=1.3.1,>=1.2.0; extra == "merlion"
363
363
  Provides-Extra: milvus
364
364
  Requires-Dist: pymilvus==2.3; extra == "milvus"
365
365
  Provides-Extra: minds-endpoint
@@ -369,8 +369,8 @@ Requires-Dist: tiktoken; extra == "minds-endpoint"
369
369
  Provides-Extra: mlflow
370
370
  Requires-Dist: mlflow; extra == "mlflow"
371
371
  Provides-Extra: monetdb
372
- Requires-Dist: pymonetdb; extra == "monetdb"
373
372
  Requires-Dist: sqlalchemy-monetdb; extra == "monetdb"
373
+ Requires-Dist: pymonetdb; extra == "monetdb"
374
374
  Provides-Extra: monkeylearn
375
375
  Requires-Dist: monkeylearn==3.6.0; extra == "monkeylearn"
376
376
  Provides-Extra: ms-teams
@@ -381,11 +381,11 @@ Requires-Dist: pymssql>=2.1.4; extra == "mssql"
381
381
  Provides-Extra: mysql
382
382
  Requires-Dist: mysql-connector-python==9.1.0; extra == "mysql"
383
383
  Provides-Extra: neuralforecast
384
- Requires-Dist: ray[tune]>=2.8.1; extra == "neuralforecast"
385
384
  Requires-Dist: neuralforecast<1.7.0,>=1.6.0; extra == "neuralforecast"
385
+ Requires-Dist: ray[tune]>=2.8.1; extra == "neuralforecast"
386
386
  Provides-Extra: neuralforecast-extra
387
- Requires-Dist: ray[tune]>=2.2.0; extra == "neuralforecast-extra"
388
387
  Requires-Dist: neuralforecast<1.7.0,>=1.6.0; extra == "neuralforecast-extra"
388
+ Requires-Dist: ray[tune]>=2.2.0; extra == "neuralforecast-extra"
389
389
  Provides-Extra: newsapi
390
390
  Requires-Dist: newsapi-python; extra == "newsapi"
391
391
  Provides-Extra: notion
@@ -398,8 +398,8 @@ Provides-Extra: openai
398
398
  Requires-Dist: openai<2.0.0,>=1.54.0; extra == "openai"
399
399
  Requires-Dist: tiktoken; extra == "openai"
400
400
  Provides-Extra: openbb
401
- Requires-Dist: openbb==4.3.1; extra == "openbb"
402
401
  Requires-Dist: openbb-core==1.3.1; extra == "openbb"
402
+ Requires-Dist: openbb==4.3.1; extra == "openbb"
403
403
  Provides-Extra: openstreetmap
404
404
  Requires-Dist: overpy; extra == "openstreetmap"
405
405
  Provides-Extra: oracle
@@ -425,8 +425,8 @@ Requires-Dist: polars; extra == "popularity-recommender"
425
425
  Provides-Extra: portkey
426
426
  Requires-Dist: portkey-ai>=1.8.2; extra == "portkey"
427
427
  Provides-Extra: pycaret
428
- Requires-Dist: pycaret; extra == "pycaret"
429
428
  Requires-Dist: pycaret[models]; extra == "pycaret"
429
+ Requires-Dist: pycaret; extra == "pycaret"
430
430
  Provides-Extra: qdrant
431
431
  Requires-Dist: qdrant-client; extra == "qdrant"
432
432
  Provides-Extra: questdb
@@ -434,8 +434,8 @@ Requires-Dist: questdb; extra == "questdb"
434
434
  Provides-Extra: quickbooks
435
435
  Requires-Dist: qbosdk; extra == "quickbooks"
436
436
  Provides-Extra: rag
437
- Requires-Dist: writerai~=1.1.0; extra == "rag"
438
437
  Requires-Dist: openai==1.55.3; extra == "rag"
438
+ Requires-Dist: writerai~=1.1.0; extra == "rag"
439
439
  Requires-Dist: sentence-transformers; extra == "rag"
440
440
  Requires-Dist: faiss-cpu; extra == "rag"
441
441
  Requires-Dist: html2text; extra == "rag"
@@ -455,8 +455,8 @@ Requires-Dist: scylla-driver; extra == "scylla"
455
455
  Provides-Extra: sendinblue
456
456
  Requires-Dist: sib_api_v3_sdk; extra == "sendinblue"
457
457
  Provides-Extra: sentence-transformers
458
- Requires-Dist: writerai~=1.1.0; extra == "sentence-transformers"
459
458
  Requires-Dist: openai==1.55.3; extra == "sentence-transformers"
459
+ Requires-Dist: writerai~=1.1.0; extra == "sentence-transformers"
460
460
  Requires-Dist: sentence-transformers; extra == "sentence-transformers"
461
461
  Requires-Dist: faiss-cpu; extra == "sentence-transformers"
462
462
  Requires-Dist: html2text; extra == "sentence-transformers"
@@ -468,8 +468,8 @@ Requires-Dist: mysql-connector-python==9.1.0; extra == "singlestore"
468
468
  Provides-Extra: slack
469
469
  Requires-Dist: slack_sdk==3.30.0; extra == "slack"
470
470
  Provides-Extra: snowflake
471
- Requires-Dist: snowflake-sqlalchemy==1.7.0; extra == "snowflake"
472
471
  Requires-Dist: snowflake-connector-python==3.13.1; extra == "snowflake"
472
+ Requires-Dist: snowflake-sqlalchemy==1.7.0; extra == "snowflake"
473
473
  Provides-Extra: solace
474
474
  Requires-Dist: solace-pubsubplus; extra == "solace"
475
475
  Provides-Extra: solr
@@ -477,14 +477,14 @@ Requires-Dist: sqlalchemy-solr; extra == "solr"
477
477
  Provides-Extra: spacy
478
478
  Requires-Dist: spacy; extra == "spacy"
479
479
  Provides-Extra: sqlany
480
- Requires-Dist: sqlanydb; extra == "sqlany"
481
480
  Requires-Dist: sqlalchemy-sqlany; extra == "sqlany"
481
+ Requires-Dist: sqlanydb; extra == "sqlany"
482
482
  Provides-Extra: sqreamdb
483
- Requires-Dist: pysqream_sqlalchemy>=0.8; extra == "sqreamdb"
484
483
  Requires-Dist: pysqream>=3.2.5; extra == "sqreamdb"
484
+ Requires-Dist: pysqream_sqlalchemy>=0.8; extra == "sqreamdb"
485
485
  Provides-Extra: stabilityai
486
- Requires-Dist: pillow; extra == "stabilityai"
487
486
  Requires-Dist: stability-sdk; extra == "stabilityai"
487
+ Requires-Dist: pillow; extra == "stabilityai"
488
488
  Provides-Extra: starrocks
489
489
  Requires-Dist: mysql-connector-python==9.1.0; extra == "starrocks"
490
490
  Provides-Extra: statsforecast
@@ -515,8 +515,8 @@ Provides-Extra: trino
515
515
  Requires-Dist: trino~=0.313.0; extra == "trino"
516
516
  Requires-Dist: pyhive; extra == "trino"
517
517
  Provides-Extra: twelve-labs
518
- Requires-Dist: pydantic-settings>=2.1.0; extra == "twelve-labs"
519
518
  Requires-Dist: requests_toolbelt; extra == "twelve-labs"
519
+ Requires-Dist: pydantic-settings>=2.1.0; extra == "twelve-labs"
520
520
  Provides-Extra: twilio
521
521
  Requires-Dist: twilio; extra == "twilio"
522
522
  Provides-Extra: twitter
@@ -533,22 +533,22 @@ Requires-Dist: mysql-connector-python==9.1.0; extra == "vitess"
533
533
  Provides-Extra: weaviate
534
534
  Requires-Dist: weaviate-client~=3.24.2; extra == "weaviate"
535
535
  Provides-Extra: web
536
- Requires-Dist: html2text; extra == "web"
537
536
  Requires-Dist: bs4; extra == "web"
537
+ Requires-Dist: html2text; extra == "web"
538
538
  Provides-Extra: webz
539
539
  Requires-Dist: dotty-dict==1.3.1; extra == "webz"
540
540
  Requires-Dist: webzio==1.0.2; extra == "webz"
541
541
  Provides-Extra: whatsapp
542
542
  Requires-Dist: twilio; extra == "whatsapp"
543
543
  Provides-Extra: writer
544
- Requires-Dist: writerai~=1.1.0; extra == "writer"
545
544
  Requires-Dist: openai==1.55.3; extra == "writer"
545
+ Requires-Dist: writerai~=1.1.0; extra == "writer"
546
+ Requires-Dist: rouge-score>=0.1.2; extra == "writer"
546
547
  Requires-Dist: sentence-transformers; extra == "writer"
547
548
  Requires-Dist: nltk>=3.8.1; extra == "writer"
548
549
  Requires-Dist: faiss-cpu; extra == "writer"
549
- Requires-Dist: scipy; extra == "writer"
550
- Requires-Dist: rouge-score>=0.1.2; extra == "writer"
551
550
  Requires-Dist: html2text; extra == "writer"
551
+ Requires-Dist: scipy; extra == "writer"
552
552
  Requires-Dist: chromadb~=0.6.3; extra == "writer"
553
553
  Provides-Extra: xata
554
554
  Requires-Dist: xata; extra == "xata"
@@ -560,198 +560,198 @@ Requires-Dist: zenpy; extra == "zendesk"
560
560
  Provides-Extra: zotero
561
561
  Requires-Dist: pyzotero; extra == "zotero"
562
562
  Provides-Extra: all-handlers-extras
563
- Requires-Dist: pyhive; extra == "all-handlers-extras"
564
- Requires-Dist: mlflow; extra == "all-handlers-extras"
565
- Requires-Dist: sqlalchemy-ingres[all]; extra == "all-handlers-extras"
566
- Requires-Dist: plaid-python; extra == "all-handlers-extras"
567
- Requires-Dist: sqlalchemy-informix; extra == "all-handlers-extras"
568
- Requires-Dist: pyphoenix; extra == "all-handlers-extras"
563
+ Requires-Dist: datasets==2.16.1; extra == "all-handlers-extras"
564
+ Requires-Dist: google-cloud-spanner; extra == "all-handlers-extras"
565
+ Requires-Dist: elasticsearch-dbapi==0.2.11; extra == "all-handlers-extras"
566
+ Requires-Dist: tweepy; extra == "all-handlers-extras"
567
+ Requires-Dist: openbb-core==1.3.1; extra == "all-handlers-extras"
568
+ Requires-Dist: rouge-score>=0.1.2; extra == "all-handlers-extras"
569
+ Requires-Dist: atlassian-python-api; extra == "all-handlers-extras"
570
+ Requires-Dist: couchbase==4.3.1; extra == "all-handlers-extras"
571
+ Requires-Dist: scylla-driver; extra == "all-handlers-extras"
572
+ Requires-Dist: ray==2.8.1; extra == "all-handlers-extras"
573
+ Requires-Dist: neuralforecast<1.7.0,>=1.6.0; extra == "all-handlers-extras"
574
+ Requires-Dist: salesforce_api; extra == "all-handlers-extras"
569
575
  Requires-Dist: rocketchat_API; extra == "all-handlers-extras"
570
- Requires-Dist: monkeylearn==3.6.0; extra == "all-handlers-extras"
571
- Requires-Dist: sqlalchemy-spanner; extra == "all-handlers-extras"
576
+ Requires-Dist: bs4; extra == "all-handlers-extras"
572
577
  Requires-Dist: fsspec; extra == "all-handlers-extras"
573
- Requires-Dist: pycaret; extra == "all-handlers-extras"
578
+ Requires-Dist: evaluate; extra == "all-handlers-extras"
579
+ Requires-Dist: chromadb; extra == "all-handlers-extras"
580
+ Requires-Dist: influxdb3-python; extra == "all-handlers-extras"
581
+ Requires-Dist: snowflake-sqlalchemy==1.7.0; extra == "all-handlers-extras"
582
+ Requires-Dist: spacy; extra == "all-handlers-extras"
583
+ Requires-Dist: slack_sdk==3.30.0; extra == "all-handlers-extras"
584
+ Requires-Dist: lightfm==1.17; extra == "all-handlers-extras"
585
+ Requires-Dist: pillow; extra == "all-handlers-extras"
586
+ Requires-Dist: google-auth; extra == "all-handlers-extras"
587
+ Requires-Dist: sqlalchemy-solr; extra == "all-handlers-extras"
588
+ Requires-Dist: pinecone-client==5.0.1; extra == "all-handlers-extras"
589
+ Requires-Dist: huggingface-hub; extra == "all-handlers-extras"
590
+ Requires-Dist: plaid-python; extra == "all-handlers-extras"
591
+ Requires-Dist: jaydebeapi; extra == "all-handlers-extras"
592
+ Requires-Dist: praw; extra == "all-handlers-extras"
574
593
  Requires-Dist: autokeras; extra == "all-handlers-extras"
594
+ Requires-Dist: eventbrite-python; extra == "all-handlers-extras"
595
+ Requires-Dist: ibm-cos-sdk; extra == "all-handlers-extras"
596
+ Requires-Dist: dspy==0.1.4; extra == "all-handlers-extras"
597
+ Requires-Dist: type_infer==0.0.20; extra == "all-handlers-extras"
598
+ Requires-Dist: sqlalchemy-hana; extra == "all-handlers-extras"
575
599
  Requires-Dist: torch; extra == "all-handlers-extras"
576
- Requires-Dist: google; extra == "all-handlers-extras"
577
- Requires-Dist: azure-storage-blob; extra == "all-handlers-extras"
600
+ Requires-Dist: ray[tune]>=2.8.1; extra == "all-handlers-extras"
601
+ Requires-Dist: python-gitlab; extra == "all-handlers-extras"
602
+ Requires-Dist: snowflake-connector-python==3.13.1; extra == "all-handlers-extras"
603
+ Requires-Dist: ibm-db-sa; extra == "all-handlers-extras"
578
604
  Requires-Dist: pymssql>=2.1.4; extra == "all-handlers-extras"
579
- Requires-Dist: nltk; extra == "all-handlers-extras"
580
- Requires-Dist: scipy; extra == "all-handlers-extras"
581
- Requires-Dist: pysurrealdb; extra == "all-handlers-extras"
582
- Requires-Dist: llama-index-readers-web; extra == "all-handlers-extras"
583
- Requires-Dist: solace-pubsubplus; extra == "all-handlers-extras"
584
- Requires-Dist: openai==1.24.0; extra == "all-handlers-extras"
585
- Requires-Dist: lightwood[xai]~=24.12.3.0; extra == "all-handlers-extras"
586
- Requires-Dist: crate; extra == "all-handlers-extras"
587
- Requires-Dist: newsapi-python; extra == "all-handlers-extras"
588
- Requires-Dist: anthropic>=0.26.1; extra == "all-handlers-extras"
605
+ Requires-Dist: oracledb==2.4.1; extra == "all-handlers-extras"
606
+ Requires-Dist: databricks-sql-connector<4.0.0,>=3.7.1; extra == "all-handlers-extras"
607
+ Requires-Dist: google-cloud-aiplatform>=1.35.0; extra == "all-handlers-extras"
608
+ Requires-Dist: teradatasql; extra == "all-handlers-extras"
609
+ Requires-Dist: notion-client; extra == "all-handlers-extras"
610
+ Requires-Dist: pyhive; extra == "all-handlers-extras"
611
+ Requires-Dist: dropbox; extra == "all-handlers-extras"
612
+ Requires-Dist: llama-index==0.10.13; extra == "all-handlers-extras"
589
613
  Requires-Dist: weaviate-client~=3.24.2; extra == "all-handlers-extras"
590
- Requires-Dist: scylla-driver; extra == "all-handlers-extras"
591
- Requires-Dist: pillow; extra == "all-handlers-extras"
592
- Requires-Dist: praw; extra == "all-handlers-extras"
593
- Requires-Dist: sqlanydb; extra == "all-handlers-extras"
614
+ Requires-Dist: tensorflow; extra == "all-handlers-extras"
615
+ Requires-Dist: flaml<=1.2.3; extra == "all-handlers-extras"
616
+ Requires-Dist: azure-storage-blob; extra == "all-handlers-extras"
617
+ Requires-Dist: xata; extra == "all-handlers-extras"
618
+ Requires-Dist: openbb==4.3.1; extra == "all-handlers-extras"
619
+ Requires-Dist: unifyai==0.9.2; extra == "all-handlers-extras"
620
+ Requires-Dist: sqlalchemy-informix; extra == "all-handlers-extras"
621
+ Requires-Dist: pysqream_sqlalchemy>=0.8; extra == "all-handlers-extras"
622
+ Requires-Dist: anthropic==0.18.1; extra == "all-handlers-extras"
623
+ Requires-Dist: faunadb; extra == "all-handlers-extras"
594
624
  Requires-Dist: requests_toolbelt; extra == "all-handlers-extras"
595
- Requires-Dist: atlassian-python-api; extra == "all-handlers-extras"
596
- Requires-Dist: salesforce-merlion<=1.3.1,>=1.2.0; extra == "all-handlers-extras"
625
+ Requires-Dist: pyzotero; extra == "all-handlers-extras"
626
+ Requires-Dist: pycaret; extra == "all-handlers-extras"
627
+ Requires-Dist: chardet; extra == "all-handlers-extras"
628
+ Requires-Dist: thrift-sasl; extra == "all-handlers-extras"
629
+ Requires-Dist: sib_api_v3_sdk; extra == "all-handlers-extras"
630
+ Requires-Dist: faiss-cpu; extra == "all-handlers-extras"
631
+ Requires-Dist: sqlanydb; extra == "all-handlers-extras"
632
+ Requires-Dist: lancedb~=0.3.1; extra == "all-handlers-extras"
597
633
  Requires-Dist: qdrant-client; extra == "all-handlers-extras"
598
- Requires-Dist: chromadb~=0.6.3; extra == "all-handlers-extras"
634
+ Requires-Dist: pymysql; extra == "all-handlers-extras"
635
+ Requires-Dist: webzio==1.0.2; extra == "all-handlers-extras"
636
+ Requires-Dist: qbosdk; extra == "all-handlers-extras"
599
637
  Requires-Dist: sqlalchemy-cratedb; extra == "all-handlers-extras"
600
- Requires-Dist: sqlalchemy_dremio; extra == "all-handlers-extras"
601
- Requires-Dist: questdb; extra == "all-handlers-extras"
602
- Requires-Dist: ibm-db-sa; extra == "all-handlers-extras"
603
- Requires-Dist: google-cloud-aiplatform>=1.35.0; extra == "all-handlers-extras"
604
- Requires-Dist: replicate; extra == "all-handlers-extras"
605
- Requires-Dist: cohere==4.5.1; extra == "all-handlers-extras"
606
- Requires-Dist: sqlalchemy-hana; extra == "all-handlers-extras"
607
- Requires-Dist: pyodbc; extra == "all-handlers-extras"
608
- Requires-Dist: neuralforecast<1.7.0,>=1.6.0; extra == "all-handlers-extras"
609
- Requires-Dist: pygithub; extra == "all-handlers-extras"
610
- Requires-Dist: sqlalchemy-vertica-python; extra == "all-handlers-extras"
611
- Requires-Dist: couchbase==4.3.1; extra == "all-handlers-extras"
612
- Requires-Dist: hugging_py_face; extra == "all-handlers-extras"
613
- Requires-Dist: ShopifyAPI; extra == "all-handlers-extras"
614
- Requires-Dist: type_infer==0.0.20; extra == "all-handlers-extras"
615
- Requires-Dist: auto-sklearn; extra == "all-handlers-extras"
616
- Requires-Dist: chromadb~=0.6.3; extra == "all-handlers-extras"
617
- Requires-Dist: google-generativeai==0.3.2; extra == "all-handlers-extras"
618
- Requires-Dist: notion-client; extra == "all-handlers-extras"
619
- Requires-Dist: salesforce_api; extra == "all-handlers-extras"
620
- Requires-Dist: dspy-ai==2.4.12; extra == "all-handlers-extras"
621
- Requires-Dist: ibm-db; extra == "all-handlers-extras"
622
638
  Requires-Dist: hubspot-api-client; extra == "all-handlers-extras"
623
- Requires-Dist: sqlalchemy-bigquery; extra == "all-handlers-extras"
639
+ Requires-Dist: openai==1.55.3; extra == "all-handlers-extras"
640
+ Requires-Dist: newsapi-python; extra == "all-handlers-extras"
641
+ Requires-Dist: twilio; extra == "all-handlers-extras"
624
642
  Requires-Dist: impyla; extra == "all-handlers-extras"
625
- Requires-Dist: botbuilder-schema; extra == "all-handlers-extras"
626
- Requires-Dist: openbb==4.3.1; extra == "all-handlers-extras"
627
- Requires-Dist: pymilvus==2.3; extra == "all-handlers-extras"
628
- Requires-Dist: stripe; extra == "all-handlers-extras"
629
- Requires-Dist: writerai~=1.1.0; extra == "all-handlers-extras"
630
- Requires-Dist: rouge-score>=0.1.2; extra == "all-handlers-extras"
631
- Requires-Dist: phoenixdb; extra == "all-handlers-extras"
632
- Requires-Dist: google-analytics-admin; extra == "all-handlers-extras"
633
- Requires-Dist: thrift; extra == "all-handlers-extras"
634
- Requires-Dist: pinecone-client==5.0.1; extra == "all-handlers-extras"
635
- Requires-Dist: xgboost; extra == "all-handlers-extras"
643
+ Requires-Dist: lightwood[extra]~=24.12.3.0; extra == "all-handlers-extras"
644
+ Requires-Dist: sqlalchemy-monetdb; extra == "all-handlers-extras"
645
+ Requires-Dist: pyodbc==4.0.34; extra == "all-handlers-extras"
646
+ Requires-Dist: lightwood~=24.12.3.0; extra == "all-handlers-extras"
647
+ Requires-Dist: wikipedia==1.4.0; extra == "all-handlers-extras"
648
+ Requires-Dist: taospy; extra == "all-handlers-extras"
649
+ Requires-Dist: pysqream>=3.2.5; extra == "all-handlers-extras"
636
650
  Requires-Dist: lance; extra == "all-handlers-extras"
637
- Requires-Dist: lancedb~=0.3.1; extra == "all-handlers-extras"
638
- Requires-Dist: statsforecast==1.6.0; extra == "all-handlers-extras"
639
- Requires-Dist: pydruid; extra == "all-handlers-extras"
640
- Requires-Dist: trino~=0.313.0; extra == "all-handlers-extras"
641
- Requires-Dist: pyignite; extra == "all-handlers-extras"
642
- Requires-Dist: webzio==1.0.2; extra == "all-handlers-extras"
651
+ Requires-Dist: portkey-ai>=1.8.2; extra == "all-handlers-extras"
652
+ Requires-Dist: sqlalchemy-firebird<3.0.0,>=2.0.0; extra == "all-handlers-extras"
653
+ Requires-Dist: teradatasqlalchemy; extra == "all-handlers-extras"
654
+ Requires-Dist: sentence-transformers; extra == "all-handlers-extras"
655
+ Requires-Dist: zenpy; extra == "all-handlers-extras"
656
+ Requires-Dist: pycaret[models]; extra == "all-handlers-extras"
657
+ Requires-Dist: dask; extra == "all-handlers-extras"
658
+ Requires-Dist: scipy; extra == "all-handlers-extras"
659
+ Requires-Dist: ray[tune]>=2.2.0; extra == "all-handlers-extras"
660
+ Requires-Dist: hdbcli; extra == "all-handlers-extras"
643
661
  Requires-Dist: torch==2.2.0+cpu; extra == "all-handlers-extras"
644
- Requires-Dist: ibm-cos-sdk; extra == "all-handlers-extras"
662
+ Requires-Dist: pygithub; extra == "all-handlers-extras"
645
663
  Requires-Dist: elasticsearch==7.13.4; extra == "all-handlers-extras"
646
- Requires-Dist: openai==1.55.3; extra == "all-handlers-extras"
647
- Requires-Dist: google-api-python-client; extra == "all-handlers-extras"
648
- Requires-Dist: faunadb; extra == "all-handlers-extras"
649
- Requires-Dist: litellm==1.44.8; extra == "all-handlers-extras"
650
- Requires-Dist: botframework-connector; extra == "all-handlers-extras"
651
- Requires-Dist: sqlalchemy-access; extra == "all-handlers-extras"
652
- Requires-Dist: dotty-dict==1.3.1; extra == "all-handlers-extras"
653
- Requires-Dist: sib_api_v3_sdk; extra == "all-handlers-extras"
654
- Requires-Dist: dspy==0.1.4; extra == "all-handlers-extras"
655
- Requires-Dist: sqlalchemy-firebird<3.0.0,>=2.0.0; extra == "all-handlers-extras"
656
- Requires-Dist: qbosdk; extra == "all-handlers-extras"
657
- Requires-Dist: influxdb3-python; extra == "all-handlers-extras"
658
- Requires-Dist: thrift-sasl; extra == "all-handlers-extras"
659
- Requires-Dist: portkey-ai>=1.8.2; extra == "all-handlers-extras"
660
- Requires-Dist: ray==2.8.1; extra == "all-handlers-extras"
664
+ Requires-Dist: sqlalchemy-bigquery; extra == "all-handlers-extras"
661
665
  Requires-Dist: pinotdb; extra == "all-handlers-extras"
662
- Requires-Dist: snowflake-sqlalchemy==1.7.0; extra == "all-handlers-extras"
663
- Requires-Dist: llama-index==0.10.13; extra == "all-handlers-extras"
666
+ Requires-Dist: dotty-dict==1.3.1; extra == "all-handlers-extras"
667
+ Requires-Dist: xgboost; extra == "all-handlers-extras"
668
+ Requires-Dist: google-api-python-client; extra == "all-handlers-extras"
669
+ Requires-Dist: writerai~=1.1.0; extra == "all-handlers-extras"
670
+ Requires-Dist: symbl; extra == "all-handlers-extras"
671
+ Requires-Dist: ludwig[distributed]>=0.5.2; extra == "all-handlers-extras"
672
+ Requires-Dist: google-cloud-bigquery[pandas]; extra == "all-handlers-extras"
673
+ Requires-Dist: google-generativeai==0.3.2; extra == "all-handlers-extras"
674
+ Requires-Dist: monkeylearn==3.6.0; extra == "all-handlers-extras"
675
+ Requires-Dist: mlflow; extra == "all-handlers-extras"
676
+ Requires-Dist: sqlalchemy-sqlany; extra == "all-handlers-extras"
677
+ Requires-Dist: mendeley; extra == "all-handlers-extras"
678
+ Requires-Dist: lightwood[xai]~=24.12.3.0; extra == "all-handlers-extras"
679
+ Requires-Dist: pymonetdb; extra == "all-handlers-extras"
680
+ Requires-Dist: nixtla==0.5.0; extra == "all-handlers-extras"
681
+ Requires-Dist: openai==1.24.0; extra == "all-handlers-extras"
682
+ Requires-Dist: replicate; extra == "all-handlers-extras"
683
+ Requires-Dist: tzlocal; extra == "all-handlers-extras"
664
684
  Requires-Dist: fdb; extra == "all-handlers-extras"
665
- Requires-Dist: oracledb==2.4.1; extra == "all-handlers-extras"
666
- Requires-Dist: google-cloud-spanner; extra == "all-handlers-extras"
667
- Requires-Dist: pysqream>=3.2.5; extra == "all-handlers-extras"
668
- Requires-Dist: zenpy; extra == "all-handlers-extras"
669
- Requires-Dist: sqlalchemy-monetdb; extra == "all-handlers-extras"
670
- Requires-Dist: pymysql; extra == "all-handlers-extras"
671
- Requires-Dist: llama-index-embeddings-openai; extra == "all-handlers-extras"
672
- Requires-Dist: anthropic==0.18.1; extra == "all-handlers-extras"
673
- Requires-Dist: google-auth-httplib2; extra == "all-handlers-extras"
674
- Requires-Dist: joblib; extra == "all-handlers-extras"
675
- Requires-Dist: taospy; extra == "all-handlers-extras"
676
- Requires-Dist: chardet; extra == "all-handlers-extras"
677
- Requires-Dist: clickhouse-sqlalchemy>=0.3.1; extra == "all-handlers-extras"
678
- Requires-Dist: openai<2.0.0,>=1.54.0; extra == "all-handlers-extras"
685
+ Requires-Dist: auto-sklearn; extra == "all-handlers-extras"
686
+ Requires-Dist: gcsfs; extra == "all-handlers-extras"
679
687
  Requires-Dist: catboost>=1.2; extra == "all-handlers-extras"
680
- Requires-Dist: google-auth; extra == "all-handlers-extras"
681
- Requires-Dist: mysql-connector-python==9.1.0; extra == "all-handlers-extras"
688
+ Requires-Dist: google-analytics-admin; extra == "all-handlers-extras"
689
+ Requires-Dist: questdb; extra == "all-handlers-extras"
690
+ Requires-Dist: pydruid; extra == "all-handlers-extras"
682
691
  Requires-Dist: vertica-python; extra == "all-handlers-extras"
683
- Requires-Dist: pyarrow==14.0.1; extra == "all-handlers-extras"
684
- Requires-Dist: dropbox; extra == "all-handlers-extras"
685
- Requires-Dist: evaluate; extra == "all-handlers-extras"
686
- Requires-Dist: sqlalchemy-solr; extra == "all-handlers-extras"
687
- Requires-Dist: tweepy; extra == "all-handlers-extras"
688
- Requires-Dist: pyzotero; extra == "all-handlers-extras"
689
- Requires-Dist: ludwig[distributed]>=0.5.2; extra == "all-handlers-extras"
690
- Requires-Dist: flaml<=1.2.3; extra == "all-handlers-extras"
691
- Requires-Dist: python-gitlab; extra == "all-handlers-extras"
692
- Requires-Dist: bs4; extra == "all-handlers-extras"
693
- Requires-Dist: nixtla==0.5.0; extra == "all-handlers-extras"
694
- Requires-Dist: nltk>=3.8.1; extra == "all-handlers-extras"
695
- Requires-Dist: datasets==2.16.1; extra == "all-handlers-extras"
696
- Requires-Dist: pyodbc==4.0.34; extra == "all-handlers-extras"
697
- Requires-Dist: pyod>=1.1; extra == "all-handlers-extras"
698
- Requires-Dist: wikipedia==1.4.0; extra == "all-handlers-extras"
699
- Requires-Dist: eventbrite-python; extra == "all-handlers-extras"
700
- Requires-Dist: virtualenv; extra == "all-handlers-extras"
701
- Requires-Dist: huggingface-hub; extra == "all-handlers-extras"
702
- Requires-Dist: box-sdk-gen; extra == "all-handlers-extras"
703
- Requires-Dist: mendeley; extra == "all-handlers-extras"
704
- Requires-Dist: openbb-core==1.3.1; extra == "all-handlers-extras"
705
- Requires-Dist: lightwood[extra]~=24.12.3.0; extra == "all-handlers-extras"
706
692
  Requires-Dist: tiktoken; extra == "all-handlers-extras"
707
- Requires-Dist: sentence-transformers; extra == "all-handlers-extras"
708
- Requires-Dist: jaydebeapi; extra == "all-handlers-extras"
709
- Requires-Dist: pymonetdb; extra == "all-handlers-extras"
710
- Requires-Dist: mediawikiapi; extra == "all-handlers-extras"
711
- Requires-Dist: binance-connector; extra == "all-handlers-extras"
712
- Requires-Dist: teradatasql; extra == "all-handlers-extras"
713
- Requires-Dist: databricks-sql-connector<4.0.0,>=3.7.1; extra == "all-handlers-extras"
714
- Requires-Dist: lightwood~=24.12.3.0; extra == "all-handlers-extras"
715
- Requires-Dist: unifyai==0.9.2; extra == "all-handlers-extras"
716
- Requires-Dist: ray[tune]>=2.8.1; extra == "all-handlers-extras"
717
- Requires-Dist: hdbcli; extra == "all-handlers-extras"
718
- Requires-Dist: twilio; extra == "all-handlers-extras"
719
- Requires-Dist: chromadb; extra == "all-handlers-extras"
693
+ Requires-Dist: autogluon; extra == "all-handlers-extras"
694
+ Requires-Dist: pyodbc; extra == "all-handlers-extras"
695
+ Requires-Dist: pyarrow==14.0.1; extra == "all-handlers-extras"
696
+ Requires-Dist: ibm-db; extra == "all-handlers-extras"
697
+ Requires-Dist: sqlalchemy_dremio; extra == "all-handlers-extras"
698
+ Requires-Dist: botframework-connector; extra == "all-handlers-extras"
699
+ Requires-Dist: openai<2.0.0,>=1.54.0; extra == "all-handlers-extras"
700
+ Requires-Dist: stability-sdk; extra == "all-handlers-extras"
701
+ Requires-Dist: pysurrealdb; extra == "all-handlers-extras"
702
+ Requires-Dist: sqlalchemy-vertica-python; extra == "all-handlers-extras"
720
703
  Requires-Dist: databend-sqlalchemy; extra == "all-handlers-extras"
721
- Requires-Dist: libsql-experimental; extra == "all-handlers-extras"
722
704
  Requires-Dist: ckanapi; extra == "all-handlers-extras"
723
- Requires-Dist: dask; extra == "all-handlers-extras"
724
- Requires-Dist: google-cloud-bigquery[pandas]; extra == "all-handlers-extras"
725
- Requires-Dist: pycaret[models]; extra == "all-handlers-extras"
726
- Requires-Dist: xata; extra == "all-handlers-extras"
705
+ Requires-Dist: botbuilder-schema; extra == "all-handlers-extras"
706
+ Requires-Dist: trino~=0.313.0; extra == "all-handlers-extras"
707
+ Requires-Dist: html2text; extra == "all-handlers-extras"
708
+ Requires-Dist: box-sdk-gen; extra == "all-handlers-extras"
709
+ Requires-Dist: solace-pubsubplus; extra == "all-handlers-extras"
710
+ Requires-Dist: pymilvus==2.3; extra == "all-handlers-extras"
711
+ Requires-Dist: pyphoenix; extra == "all-handlers-extras"
712
+ Requires-Dist: mediawikiapi; extra == "all-handlers-extras"
713
+ Requires-Dist: aerospike~=13.0.0; extra == "all-handlers-extras"
714
+ Requires-Dist: stravalib; extra == "all-handlers-extras"
715
+ Requires-Dist: statsforecast==1.6.0; extra == "all-handlers-extras"
716
+ Requires-Dist: nltk; extra == "all-handlers-extras"
717
+ Requires-Dist: sqlalchemy-spanner; extra == "all-handlers-extras"
718
+ Requires-Dist: paypalrestsdk; extra == "all-handlers-extras"
719
+ Requires-Dist: nltk>=3.8.1; extra == "all-handlers-extras"
720
+ Requires-Dist: sqlalchemy-access; extra == "all-handlers-extras"
721
+ Requires-Dist: thrift; extra == "all-handlers-extras"
722
+ Requires-Dist: crate; extra == "all-handlers-extras"
723
+ Requires-Dist: llama-index-embeddings-openai; extra == "all-handlers-extras"
724
+ Requires-Dist: stripe; extra == "all-handlers-extras"
725
+ Requires-Dist: dspy-ai==2.4.12; extra == "all-handlers-extras"
727
726
  Requires-Dist: overpy; extra == "all-handlers-extras"
728
- Requires-Dist: elasticsearch-dbapi==0.2.11; extra == "all-handlers-extras"
727
+ Requires-Dist: tpot<=0.11.7; extra == "all-handlers-extras"
728
+ Requires-Dist: litellm==1.44.8; extra == "all-handlers-extras"
729
+ Requires-Dist: mysql-connector-python==9.1.0; extra == "all-handlers-extras"
730
+ Requires-Dist: pyignite; extra == "all-handlers-extras"
729
731
  Requires-Dist: youtube-transcript-api; extra == "all-handlers-extras"
730
- Requires-Dist: snowflake-connector-python==3.13.1; extra == "all-handlers-extras"
731
- Requires-Dist: html2text; extra == "all-handlers-extras"
732
+ Requires-Dist: joblib; extra == "all-handlers-extras"
733
+ Requires-Dist: pyod>=1.1; extra == "all-handlers-extras"
734
+ Requires-Dist: chromadb~=0.6.3; extra == "all-handlers-extras"
735
+ Requires-Dist: binance-connector; extra == "all-handlers-extras"
736
+ Requires-Dist: phoenixdb; extra == "all-handlers-extras"
737
+ Requires-Dist: salesforce-merlion<=1.3.1,>=1.2.0; extra == "all-handlers-extras"
738
+ Requires-Dist: anthropic>=0.26.1; extra == "all-handlers-extras"
739
+ Requires-Dist: google-auth-httplib2; extra == "all-handlers-extras"
740
+ Requires-Dist: llama-index-readers-web; extra == "all-handlers-extras"
732
741
  Requires-Dist: pydantic-settings>=2.1.0; extra == "all-handlers-extras"
733
- Requires-Dist: tzlocal; extra == "all-handlers-extras"
734
- Requires-Dist: teradatasqlalchemy; extra == "all-handlers-extras"
735
- Requires-Dist: tensorflow; extra == "all-handlers-extras"
742
+ Requires-Dist: hugging_py_face; extra == "all-handlers-extras"
736
743
  Requires-Dist: polars; extra == "all-handlers-extras"
737
- Requires-Dist: pysqream_sqlalchemy>=0.8; extra == "all-handlers-extras"
738
- Requires-Dist: tpot<=0.11.7; extra == "all-handlers-extras"
739
- Requires-Dist: stravalib; extra == "all-handlers-extras"
740
- Requires-Dist: faiss-cpu; extra == "all-handlers-extras"
741
- Requires-Dist: stability-sdk; extra == "all-handlers-extras"
742
- Requires-Dist: aerospike~=13.0.0; extra == "all-handlers-extras"
743
- Requires-Dist: autogluon; extra == "all-handlers-extras"
744
+ Requires-Dist: virtualenv; extra == "all-handlers-extras"
745
+ Requires-Dist: clickhouse-sqlalchemy>=0.3.1; extra == "all-handlers-extras"
746
+ Requires-Dist: sqlalchemy-ingres[all]; extra == "all-handlers-extras"
747
+ Requires-Dist: cohere==4.5.1; extra == "all-handlers-extras"
748
+ Requires-Dist: pyarrow~=14.0.1; extra == "all-handlers-extras"
744
749
  Requires-Dist: google-generativeai>=0.1.0; extra == "all-handlers-extras"
745
- Requires-Dist: symbl; extra == "all-handlers-extras"
746
- Requires-Dist: gcsfs; extra == "all-handlers-extras"
747
- Requires-Dist: lightfm==1.17; extra == "all-handlers-extras"
748
750
  Requires-Dist: google-cloud-storage; extra == "all-handlers-extras"
749
- Requires-Dist: spacy; extra == "all-handlers-extras"
750
- Requires-Dist: paypalrestsdk; extra == "all-handlers-extras"
751
- Requires-Dist: sqlalchemy-sqlany; extra == "all-handlers-extras"
752
- Requires-Dist: ray[tune]>=2.2.0; extra == "all-handlers-extras"
753
- Requires-Dist: pyarrow~=14.0.1; extra == "all-handlers-extras"
754
- Requires-Dist: slack_sdk==3.30.0; extra == "all-handlers-extras"
751
+ Requires-Dist: chromadb~=0.6.3; extra == "all-handlers-extras"
752
+ Requires-Dist: libsql-experimental; extra == "all-handlers-extras"
753
+ Requires-Dist: google; extra == "all-handlers-extras"
754
+ Requires-Dist: ShopifyAPI; extra == "all-handlers-extras"
755
755
  Dynamic: author
756
756
  Dynamic: author-email
757
757
  Dynamic: classifier
@@ -1,4 +1,4 @@
1
- mindsdb/__about__.py,sha256=yTKWTlVHwoFNow5QlIHB7ZMW57IFpBcKN7fnskXu75M,444
1
+ mindsdb/__about__.py,sha256=gZuL0aW-A1CmFJGqMUkLlhUDyYWCrmFcJRCud93D-mo,444
2
2
  mindsdb/__init__.py,sha256=fZopLiAYa9MzMZ0d48JgHc_LddfFKDzh7n_8icsjrVs,54
3
3
  mindsdb/__main__.py,sha256=VQ3RetGs34NhFRT9d76o5S3UpKxdr-G3c0138kz3f8Y,21435
4
4
  mindsdb/api/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1275,7 +1275,7 @@ mindsdb/integrations/handlers/rag_handler/settings.py,sha256=5T4_oZt9dUD67AJXeZr
1275
1275
  mindsdb/integrations/handlers/ray_serve_handler/__about__.py,sha256=42oqKM1C-nIisvPvAL5fIxBDP-rxEduXnGrUUjrfLIU,345
1276
1276
  mindsdb/integrations/handlers/ray_serve_handler/__init__.py,sha256=MYDLydWeimQ74znwnrrgnh9S6YgtE7UnJcYBVlWv6r8,596
1277
1277
  mindsdb/integrations/handlers/ray_serve_handler/icon.svg,sha256=spxNMpiGLzxt46VM70-m-z4xButrIQ_dzLTarJKYSOA,1990
1278
- mindsdb/integrations/handlers/ray_serve_handler/ray_serve_handler.py,sha256=8nvYD8IqGQGpWqsD63JBdWMSehfvrk8F8U6V0pCJiAw,2924
1278
+ mindsdb/integrations/handlers/ray_serve_handler/ray_serve_handler.py,sha256=X_7Y4mUjU45M3ieP-SlQ2Ez8-0Q91sGxJi4VLhzZI1c,3432
1279
1279
  mindsdb/integrations/handlers/reddit_handler/__about__.py,sha256=Xp4s3kJp-REycH83S1RLSaPZHpFqbn-MjRuQZ5BpBuQ,336
1280
1280
  mindsdb/integrations/handlers/reddit_handler/__init__.py,sha256=n3dhcPIHT_89hQxEXKQbHApOGBGQxtvWxp-fvFL083s,496
1281
1281
  mindsdb/integrations/handlers/reddit_handler/icon.svg,sha256=JAhHkvjiEfOz_BSbBVosQs8VXegNanxdnNUNz1bHyRM,3986
@@ -1742,7 +1742,7 @@ mindsdb/integrations/utilities/rag/settings.py,sha256=aV4u5syOXMBOU-hXgOD9MfhRRP
1742
1742
  mindsdb/integrations/utilities/rag/utils.py,sha256=AAMW1gybfAntUkAPb9AYUeWZUMtZAwWaYiLJcTHNB4A,1620
1743
1743
  mindsdb/integrations/utilities/rag/vector_store.py,sha256=EwCdCf0dXwJXKOYfqTUPWEDOPLumWl2EKQiiXzgy8XA,3782
1744
1744
  mindsdb/integrations/utilities/rag/chains/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1745
- mindsdb/integrations/utilities/rag/chains/map_reduce_summarizer_chain.py,sha256=DsYSCJqjjVqOHiHsO5UrrsRVFV5KcU2hcl01-CIt9JI,10245
1745
+ mindsdb/integrations/utilities/rag/chains/map_reduce_summarizer_chain.py,sha256=ExhMuc2KOqkx5-StmHcnVKLatWTFyH0Bte0WkM-Bgno,10655
1746
1746
  mindsdb/integrations/utilities/rag/loaders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1747
1747
  mindsdb/integrations/utilities/rag/loaders/file_loader.py,sha256=CZWcr1F2LHmiKxrQwqPModAlHCgJ4F9OLuVpaf14DR8,1868
1748
1748
  mindsdb/integrations/utilities/rag/loaders/vector_store_loader/MDBVectorStore.py,sha256=TgLU4hFPc-eKJPuN8Gn9UnwqXWF_EhCUGTZNMEP-_vQ,1476
@@ -1752,7 +1752,7 @@ mindsdb/integrations/utilities/rag/loaders/vector_store_loader/vector_store_load
1752
1752
  mindsdb/integrations/utilities/rag/pipelines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1753
1753
  mindsdb/integrations/utilities/rag/pipelines/rag.py,sha256=fFFwts-6Vai8rNaPmj8znLEvkTKyra3L9qQ0LEG581I,15095
1754
1754
  mindsdb/integrations/utilities/rag/rerankers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1755
- mindsdb/integrations/utilities/rag/rerankers/reranker_compressor.py,sha256=ivSLr6ZA51pFNIrP5vr1STs54fHvnnj_CGqxK-HLFAc,9083
1755
+ mindsdb/integrations/utilities/rag/rerankers/reranker_compressor.py,sha256=L-qCCGtNNdFck034wkeMgvRgO8ii-uNTh9Rp5Ga6ddM,9497
1756
1756
  mindsdb/integrations/utilities/rag/retrievers/__init__.py,sha256=Kuo3AJxzHVXMxPFxGqz2AXNPzjBzyMuk2yQj9pFpOsI,128
1757
1757
  mindsdb/integrations/utilities/rag/retrievers/auto_retriever.py,sha256=ODNXqeBuDfatGQLvKvogO0aA-A5v3Z4xbCbvO5ICvt4,3923
1758
1758
  mindsdb/integrations/utilities/rag/retrievers/base.py,sha256=fomZCUibDLKg-g4_uoTWz6OlhRG-GzqdPPoAR6XyPtk,264
@@ -1766,8 +1766,9 @@ mindsdb/interfaces/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVG
1766
1766
  mindsdb/interfaces/agents/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1767
1767
  mindsdb/interfaces/agents/agents_controller.py,sha256=4lb8t7S9IGG-ZbBCNQsN36-5JN40qHEBvDNq4JaQJqo,18159
1768
1768
  mindsdb/interfaces/agents/callback_handlers.py,sha256=90mGvx6ZIXRA_PAoV6vf8OHjJN65GHgoM3ip_ULOVN8,4711
1769
- mindsdb/interfaces/agents/constants.py,sha256=VrtxjycDDsZ1z1kgVuz84yjfJicvDedeut3J2x0NMWE,4462
1770
- mindsdb/interfaces/agents/langchain_agent.py,sha256=GswT0iPmQThJsoEIBOmWZG3K1eqwuLMuLQtCI36LQPg,25878
1769
+ mindsdb/interfaces/agents/constants.py,sha256=JUXpSW5PZxDZ84BD9zqZpU8u2tI2kUjHLZ1kVcDM7tY,4556
1770
+ mindsdb/interfaces/agents/event_dispatch_callback_handler.py,sha256=-76yTtxTHO5AkFTtr_RvYfkdUROJHcKZx6KJDZvj_-M,1331
1771
+ mindsdb/interfaces/agents/langchain_agent.py,sha256=M4NsaCeUvu9grnAamMI1BqxvV3Cb7CsWYlsuymKQel8,27389
1771
1772
  mindsdb/interfaces/agents/langfuse_callback_handler.py,sha256=EIea9jsKgcGANPCZpdLe929bJy85SVA_bjdsyPiwp_g,4900
1772
1773
  mindsdb/interfaces/agents/mindsdb_chat_model.py,sha256=9e_LxCKrCSOZWqURHWavw-FQUK9PLJ5O18IGYSHD9us,6051
1773
1774
  mindsdb/interfaces/agents/mindsdb_database_agent.py,sha256=lk7UyE7tK807GXLBDr4-b2VVFUUzDtpMx2GjVtywv3o,2459
@@ -1810,7 +1811,7 @@ mindsdb/interfaces/skills/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-
1810
1811
  mindsdb/interfaces/skills/retrieval_tool.py,sha256=zuEEPky--GdKHa1rqd4VhM2wgtlGas5G72eFbooj-Hg,4480
1811
1812
  mindsdb/interfaces/skills/skill_tool.py,sha256=8YjAmQ8PM0dhO8pRfKuwO2Bf3bKizLARelztjbwNz4c,12925
1812
1813
  mindsdb/interfaces/skills/skills_controller.py,sha256=CUY0B_9DBCUX7LzeODrdBs4WDNRivGPTPHYcGtH7b-M,6146
1813
- mindsdb/interfaces/skills/sql_agent.py,sha256=bZBrv2Ya-eRXvEkd4-BCXvXIzKMpTrERUmqEDh2_b7Y,14286
1814
+ mindsdb/interfaces/skills/sql_agent.py,sha256=QlhGaAPVrlFJYQB0x9jQMT4bqbs1QSBaVeaPRQpxwK0,14283
1814
1815
  mindsdb/interfaces/skills/custom/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1815
1816
  mindsdb/interfaces/skills/custom/text2sql/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1816
1817
  mindsdb/interfaces/skills/custom/text2sql/mindsdb_sql_tool.py,sha256=CDi2v2Ym3u-0nr8jq7wyf8CymWRFy_wziCov4Y9b3Iw,1253
@@ -1928,9 +1929,9 @@ mindsdb/utilities/otel/metric_handlers/__init__.py,sha256=3jGsLt5KkdhqaAUUw8ALxE
1928
1929
  mindsdb/utilities/profiler/__init__.py,sha256=d4VXl80uSm1IotR-WwbBInPmLmACiK0AzxXGBA40I-0,251
1929
1930
  mindsdb/utilities/profiler/profiler.py,sha256=KCUtOupkbM_nCoof9MtiuhUzDGezx4a4NsBX6vGWbPA,3936
1930
1931
  mindsdb/utilities/render/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1931
- mindsdb/utilities/render/sqlalchemy_render.py,sha256=XnG2IvB5tTF65EK-xV14HXrfGxyz2cQw7K6zEr9dclI,28287
1932
- MindsDB-25.1.5.2.dist-info/LICENSE,sha256=ziqdjujs6WDn-9g3t0SISjHCBc2pLRht3gnRbQoXmIs,5804
1933
- MindsDB-25.1.5.2.dist-info/METADATA,sha256=nY4sr8A-xoqcA4-VUSC8qeEjPeDFKsEh-YFBWjBEZAk,42706
1934
- MindsDB-25.1.5.2.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
1935
- MindsDB-25.1.5.2.dist-info/top_level.txt,sha256=10wPR96JDf3hM8aMP7Fz0lDlmClEP480zgXISJKr5jE,8
1936
- MindsDB-25.1.5.2.dist-info/RECORD,,
1932
+ mindsdb/utilities/render/sqlalchemy_render.py,sha256=DXKn55Fyb-Bd8tpLL4UUbfsDJI1E03ER8urpaM1nlJI,28621
1933
+ MindsDB-25.2.1.0.dist-info/LICENSE,sha256=ziqdjujs6WDn-9g3t0SISjHCBc2pLRht3gnRbQoXmIs,5804
1934
+ MindsDB-25.2.1.0.dist-info/METADATA,sha256=EWk1ytWP9eYQCaGFQEb-fe0VWtKLQwZu8KRwlSq_mOE,42706
1935
+ MindsDB-25.2.1.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
1936
+ MindsDB-25.2.1.0.dist-info/top_level.txt,sha256=10wPR96JDf3hM8aMP7Fz0lDlmClEP480zgXISJKr5jE,8
1937
+ MindsDB-25.2.1.0.dist-info/RECORD,,
mindsdb/__about__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  __title__ = 'MindsDB'
2
2
  __package_name__ = 'mindsdb'
3
- __version__ = '25.1.5.2'
3
+ __version__ = '25.2.1.0'
4
4
  __description__ = "MindsDB's AI SQL Server enables developers to build AI tools that need access to real-time data to perform their tasks"
5
5
  __email__ = "jorge@mindsdb.com"
6
6
  __author__ = 'MindsDB Inc'
@@ -1,3 +1,5 @@
1
+ import json
2
+
1
3
  import requests
2
4
  from typing import Dict, Optional
3
5
 
@@ -6,6 +8,10 @@ import pandas as pd
6
8
  from mindsdb.integrations.libs.base import BaseMLEngine
7
9
 
8
10
 
11
+ class RayServeException(Exception):
12
+ pass
13
+
14
+
9
15
  class RayServeHandler(BaseMLEngine):
10
16
  """
11
17
  The Ray Serve integration engine needs to have a working connection to Ray Serve. For this:
@@ -37,9 +43,17 @@ class RayServeHandler(BaseMLEngine):
37
43
  except requests.exceptions.InvalidSchema:
38
44
  raise Exception("Error: The URL provided for the training endpoint is invalid.")
39
45
 
40
- resp = resp.json()
41
- if resp['status'] != 'ok':
42
- raise Exception("Error: Training failed: " + resp['status'])
46
+ error = None
47
+ try:
48
+ resp = resp.json()
49
+ except json.JSONDecodeError:
50
+ error = resp.text
51
+ else:
52
+ if resp.get('status') != 'ok':
53
+ error = resp['status']
54
+
55
+ if error:
56
+ raise RayServeException(f"Error: {error}")
43
57
 
44
58
  def predict(self, df, args=None):
45
59
  args = {**(self.model_storage.json_get('args')), **args} # merge incoming args
@@ -48,15 +62,23 @@ class RayServeHandler(BaseMLEngine):
48
62
  resp = requests.post(args['predict_url'],
49
63
  json={'df': df.to_json(orient='records'), 'pred_args': pred_args},
50
64
  headers={'content-type': 'application/json; format=pandas-records'})
51
- response = resp.json()
52
65
 
53
- target = args['target']
54
- if target != 'prediction':
55
- # rename prediction to target
56
- response[target] = response.pop('prediction')
66
+ try:
67
+ response = resp.json()
68
+ except json.JSONDecodeError:
69
+ error = resp.text
70
+ else:
71
+ if 'prediction' in response:
72
+ target = args['target']
73
+ if target != 'prediction':
74
+ # rename prediction to target
75
+ response[target] = response.pop('prediction')
76
+ return pd.DataFrame(response)
77
+ else:
78
+ # something wrong
79
+ error = response
57
80
 
58
- predictions = pd.DataFrame(response)
59
- return predictions
81
+ raise RayServeException(f"Error: {error}")
60
82
 
61
83
  def describe(self, key: Optional[str] = None) -> pd.DataFrame:
62
84
  args = self.model_storage.json_get('args')
@@ -7,6 +7,7 @@ from langchain.chains.base import Chain
7
7
  from langchain.chains.combine_documents.stuff import StuffDocumentsChain
8
8
  from langchain.chains.llm import LLMChain
9
9
  from langchain.chains.combine_documents.map_reduce import MapReduceDocumentsChain, ReduceDocumentsChain
10
+ from langchain_core.callbacks import dispatch_custom_event
10
11
  from langchain_core.callbacks.manager import CallbackManagerForChainRun
11
12
  from langchain_core.documents import Document
12
13
  from langchain_core.prompts import PromptTemplate
@@ -153,6 +154,10 @@ class MapReduceSummarizerChain(Chain):
153
154
  summary = await map_reduce_documents_chain.ainvoke(source_chunks)
154
155
  content = summary.get('output_text', '')
155
156
  logger.debug(f"Generated summary for source ID {source_id}: {content[:100]}...")
157
+
158
+ # Stream summarization update.
159
+ dispatch_custom_event('summary', {'source_id': source_id, 'content': content})
160
+
156
161
  return Summary(source_id=source_id, content=content)
157
162
 
158
163
  async def _get_source_summaries(self, source_ids: List[str], map_reduce_documents_chain: MapReduceDocumentsChain) -> List[Summary]:
@@ -181,6 +186,7 @@ class MapReduceSummarizerChain(Chain):
181
186
  map_reduce_documents_chain = create_map_reduce_documents_chain(self.summarization_config, question)
182
187
  # For each document ID associated with one or more chunks, build the full document by
183
188
  # getting ALL chunks associated with that ID. Then, map reduce summarize the complete document.
189
+ dispatch_custom_event('summary_begin', {'num_documents': len(unique_document_ids)})
184
190
  try:
185
191
  logger.debug("Starting async summary generation")
186
192
  summaries = asyncio.get_event_loop().run_until_complete(self._get_source_summaries(unique_document_ids, map_reduce_documents_chain))
@@ -211,4 +217,7 @@ class MapReduceSummarizerChain(Chain):
211
217
  logger.warning(f"No summary found for doc_id: {doc_id}")
212
218
  chunk.metadata['summary'] = ''
213
219
 
220
+ # Stream summarization update.
221
+ dispatch_custom_event('summary_end', {'num_documents': len(source_id_to_summary)})
222
+
214
223
  return inputs
@@ -8,7 +8,7 @@ import random
8
8
  from typing import Any, Dict, List, Optional, Sequence, Tuple
9
9
 
10
10
  from langchain.retrievers.document_compressors.base import BaseDocumentCompressor
11
- from langchain_core.callbacks import Callbacks
11
+ from langchain_core.callbacks import Callbacks, dispatch_custom_event
12
12
  from langchain_core.documents import Document
13
13
  from openai import AsyncOpenAI
14
14
 
@@ -75,8 +75,15 @@ class LLMReranker(BaseDocumentCompressor):
75
75
  # Extract response and logprobs
76
76
  answer = response.choices[0].message.content
77
77
  logprob = response.choices[0].logprobs.content[0].logprob
78
+ rerank_data = {
79
+ "document": document,
80
+ "answer": answer,
81
+ "logprob": logprob
82
+ }
78
83
 
79
- return {"answer": answer, "logprob": logprob}
84
+ # Stream reranking update.
85
+ dispatch_custom_event("rerank", rerank_data)
86
+ return rerank_data
80
87
 
81
88
  except Exception as e:
82
89
  if attempt == self.max_retries - 1:
@@ -154,6 +161,9 @@ class LLMReranker(BaseDocumentCompressor):
154
161
  await callbacks.on_retriever_end({"documents": []})
155
162
  return []
156
163
 
164
+ # Stream reranking update.
165
+ dispatch_custom_event('rerank_begin', {'num_documents': len(documents)})
166
+
157
167
  try:
158
168
  # Prepare query-document pairs
159
169
  query_document_pairs = [(query, doc.page_content) for doc in documents]
@@ -178,3 +178,4 @@ USER_COLUMN = "question"
178
178
  DEFAULT_EMBEDDINGS_MODEL_PROVIDER = "openai"
179
179
  DEFAULT_EMBEDDINGS_MODEL_CLASS = OpenAIEmbeddings
180
180
  DEFAULT_TIKTOKEN_MODEL_NAME = os.getenv('DEFAULT_TIKTOKEN_MODEL_NAME', 'gpt-4')
181
+ AGENT_CHUNK_POLLING_INTERVAL_SECONDS = os.getenv('AGENT_CHUNK_POLLING_INTERVAL_SECONDS', 1.0)
@@ -0,0 +1,50 @@
1
+ import queue
2
+ from typing import Any, Dict, List, Optional, Sequence
3
+ from uuid import UUID
4
+
5
+ from langchain_core.callbacks import BaseCallbackHandler
6
+ from langchain_core.documents import Document
7
+
8
+
9
+ class EventDispatchCallbackHandler(BaseCallbackHandler):
10
+ '''Puts dispatched events onto an event queue to be processed as a streaming chunk'''
11
+ def __init__(self, queue: queue.Queue):
12
+ self.queue = queue
13
+
14
+ def on_custom_event(
15
+ self,
16
+ name: str,
17
+ data: Any,
18
+ *,
19
+ run_id: UUID,
20
+ tags: Optional[List[str]] = None,
21
+ metadata: Optional[Dict[str, Any]] = None,
22
+ **kwargs
23
+ ):
24
+ self.queue.put({
25
+ 'type': 'event',
26
+ 'name': name,
27
+ 'data': data
28
+ })
29
+
30
+ def on_retriever_end(
31
+ self,
32
+ documents: Sequence[Document],
33
+ *,
34
+ run_id: UUID,
35
+ parent_run_id: Optional[UUID] = None,
36
+ **kwargs: Any,
37
+ ) -> Any:
38
+ document_objects = []
39
+ for d in documents:
40
+ document_objects.append({
41
+ 'content': d.page_content,
42
+ 'metadata': d.metadata
43
+ })
44
+ self.queue.put({
45
+ 'type': 'event',
46
+ 'name': 'retriever_end',
47
+ 'data': {
48
+ 'documents': document_objects
49
+ }
50
+ })
@@ -2,7 +2,9 @@ import json
2
2
  from concurrent.futures import as_completed, TimeoutError
3
3
  from typing import Dict, Iterable, List, Optional
4
4
  from uuid import uuid4
5
+ import queue
5
6
  import re
7
+ import threading
6
8
  import numpy as np
7
9
  import pandas as pd
8
10
 
@@ -14,6 +16,7 @@ from langchain_community.chat_models import (
14
16
  ChatLiteLLM,
15
17
  ChatOllama)
16
18
  from langchain_core.agents import AgentAction, AgentStep
19
+ from langchain_core.callbacks.base import BaseCallbackHandler
17
20
 
18
21
  from langchain_nvidia_ai_endpoints import ChatNVIDIA
19
22
  from langchain_core.messages.base import BaseMessage
@@ -26,6 +29,8 @@ from mindsdb.integrations.handlers.openai_handler.constants import (
26
29
  from mindsdb.integrations.libs.llm.utils import get_llm_config
27
30
  from mindsdb.integrations.utilities.handler_utils import get_api_key
28
31
  from mindsdb.integrations.utilities.rag.settings import DEFAULT_RAG_PROMPT_TEMPLATE
32
+ from mindsdb.interfaces.agents.event_dispatch_callback_handler import EventDispatchCallbackHandler
33
+ from mindsdb.interfaces.agents.constants import AGENT_CHUNK_POLLING_INTERVAL_SECONDS
29
34
  from mindsdb.utilities import log
30
35
  from mindsdb.utilities.context_executor import ContextThreadPoolExecutor
31
36
  from mindsdb.interfaces.storage import db
@@ -575,6 +580,39 @@ AI: {response}"""
575
580
  chunk["trace_id"] = self.langfuse_client_wrapper.get_trace_id()
576
581
  return chunk
577
582
 
583
+ def _stream_agent_executor(self, agent_executor: AgentExecutor, prompt: str, callbacks: List[BaseCallbackHandler]):
584
+ chunk_queue = queue.Queue()
585
+ # Add event dispatch callback handler only to streaming completions.
586
+ event_dispatch_callback_handler = EventDispatchCallbackHandler(chunk_queue)
587
+ callbacks.append(event_dispatch_callback_handler)
588
+ stream_iterator = agent_executor.stream(prompt, config={'callbacks': callbacks})
589
+
590
+ agent_executor_finished_event = threading.Event()
591
+
592
+ def stream_worker(context: dict):
593
+ try:
594
+ ctx.load(context)
595
+ for chunk in stream_iterator:
596
+ chunk_queue.put(chunk)
597
+ finally:
598
+ # Wrap in try/finally to always set the thread event even if there's an exception.
599
+ agent_executor_finished_event.set()
600
+
601
+ # Enqueue Langchain agent streaming chunks in a separate thread to not block event chunks.
602
+ executor_stream_thread = threading.Thread(target=stream_worker, daemon=True, args=(ctx.dump(),))
603
+ executor_stream_thread.start()
604
+
605
+ while not agent_executor_finished_event.is_set():
606
+ try:
607
+ chunk = chunk_queue.get(block=True, timeout=AGENT_CHUNK_POLLING_INTERVAL_SECONDS)
608
+ except queue.Empty:
609
+ continue
610
+ logger.debug(f'Processing streaming chunk {chunk}')
611
+ processed_chunk = self.process_chunk(chunk)
612
+ logger.info(f'Processed chunk: {processed_chunk}')
613
+ yield self.add_chunk_metadata(processed_chunk)
614
+ chunk_queue.task_done()
615
+
578
616
  def stream_agent(self, df: pd.DataFrame, agent_executor: AgentExecutor, args: Dict) -> Iterable[Dict]:
579
617
  base_template = args.get('prompt_template', args['prompt_template'])
580
618
  input_variables = re.findall(r"{{(.*?)}}", base_template)
@@ -589,17 +627,9 @@ AI: {response}"""
589
627
  if not hasattr(agent_executor, 'stream') or not callable(agent_executor.stream):
590
628
  raise AttributeError("The agent_executor does not have a 'stream' method")
591
629
 
592
- stream_iterator = agent_executor.stream(prompts[0],
593
- config={'callbacks': callbacks})
594
-
595
- if not hasattr(stream_iterator, '__iter__'):
596
- raise TypeError("The stream method did not return an iterable")
597
-
630
+ stream_iterator = self._stream_agent_executor(agent_executor, prompts[0], callbacks)
598
631
  for chunk in stream_iterator:
599
- logger.debug(f'Processing streaming chunk {chunk}')
600
- processed_chunk = self.process_chunk(chunk)
601
- logger.info(f'Processed chunk: {processed_chunk}')
602
- yield self.add_chunk_metadata(processed_chunk)
632
+ yield chunk
603
633
 
604
634
  if return_context:
605
635
  # Yield context if required
@@ -121,16 +121,17 @@ class SQLAgent:
121
121
 
122
122
  # Check tables
123
123
  if self._tables_to_include:
124
+ tables_parts = [split_table_name(x) for x in self._tables_to_include]
125
+ no_schema_parts = []
126
+ for t in tables_parts:
127
+ if len(t) == 3:
128
+ no_schema_parts.append([t[0], t[2]])
129
+ tables_parts += no_schema_parts
130
+
124
131
  def _check_f(node, is_table=None, **kwargs):
125
132
  if is_table and isinstance(node, Identifier):
126
- name1 = node.to_string()
127
- name2 = '.'.join(node.parts)
128
- if len(node.parts) == 3:
129
- name3 = '.'.join(node.parts[1:])
130
- else:
131
- name3 = node.parts[-1]
132
- if not {name1, name2, name3}.intersection(self._tables_to_include):
133
- raise ValueError(f"Table {name1} not found. Available tables: {', '.join(self._tables_to_include)}")
133
+ if node.parts not in tables_parts:
134
+ raise ValueError(f"Table {'.'.join(node.parts)} not found. Available tables: {', '.join(self._tables_to_include)}")
134
135
 
135
136
  query_traversal(ast_query, _check_f)
136
137
 
@@ -85,19 +85,25 @@ class SqlalchemyRender:
85
85
  # update version for support float cast
86
86
  self.dialect.server_version_info = (8, 0, 17)
87
87
 
88
- def to_column(self, parts):
88
+ def to_column(self, identifier: ast.Identifier) -> sa.Column:
89
89
  # because sqlalchemy doesn't allow columns consist from parts therefore we do it manually
90
90
 
91
91
  parts2 = []
92
92
 
93
- for i in parts:
93
+ quoted = getattr(identifier, 'is_quoted', [])
94
+ # len can be different
95
+ quoted = quoted + [None] * (len(identifier.parts) - len(quoted))
96
+
97
+ for i, is_quoted in zip(identifier.parts, quoted):
94
98
  if isinstance(i, ast.Star):
95
99
  part = '*'
100
+ elif is_quoted:
101
+ part = self.dialect.identifier_preparer.quote(i)
96
102
  else:
97
103
  part = str(sa.column(i).compile(dialect=self.dialect))
98
104
 
99
105
  if not i.islower():
100
- # if lower value is not be quoted
106
+ # if lower value is not quoted
101
107
  # then it is quoted only because of mixed case
102
108
  # in that case use origin string
103
109
 
@@ -130,7 +136,7 @@ class SqlalchemyRender:
130
136
  if isinstance(t, ast.Star):
131
137
  col = sa.text('*')
132
138
  elif isinstance(t, ast.Last):
133
- col = self.to_column(['last'])
139
+ col = self.to_column(ast.Identifier(parts=['last']))
134
140
  elif isinstance(t, ast.Constant):
135
141
  col = sa.literal(t.value)
136
142
  if t.alias:
@@ -156,7 +162,7 @@ class SqlalchemyRender:
156
162
  elif name == 'CURRENT_USER':
157
163
  col = sa_fnc.current_user()
158
164
  if col is None:
159
- col = self.to_column(t.parts)
165
+ col = self.to_column(t)
160
166
  if t.alias:
161
167
  col = col.label(self.get_alias(t.alias))
162
168
  elif isinstance(t, ast.Select):