chdb 3.6.0__cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of chdb might be problematic. Click here for more details.
- chdb/__init__.py +134 -0
- chdb/__main__.py +38 -0
- chdb/_chdb.abi3.so +0 -0
- chdb/dataframe/__init__.py +19 -0
- chdb/dataframe/query.py +356 -0
- chdb/dbapi/__init__.py +79 -0
- chdb/dbapi/connections.py +100 -0
- chdb/dbapi/constants/FIELD_TYPE.py +31 -0
- chdb/dbapi/constants/__init__.py +0 -0
- chdb/dbapi/converters.py +293 -0
- chdb/dbapi/cursors.py +351 -0
- chdb/dbapi/err.py +61 -0
- chdb/dbapi/times.py +20 -0
- chdb/libpybind11nonlimitedapi_chdb_3.10.so +0 -0
- chdb/libpybind11nonlimitedapi_chdb_3.11.so +0 -0
- chdb/libpybind11nonlimitedapi_chdb_3.12.so +0 -0
- chdb/libpybind11nonlimitedapi_chdb_3.13.so +0 -0
- chdb/libpybind11nonlimitedapi_chdb_3.8.so +0 -0
- chdb/libpybind11nonlimitedapi_chdb_3.9.so +0 -0
- chdb/libpybind11nonlimitedapi_stubs.so +0 -0
- chdb/rwabc.py +65 -0
- chdb/session/__init__.py +3 -0
- chdb/session/state.py +124 -0
- chdb/state/__init__.py +3 -0
- chdb/state/sqlitelike.py +505 -0
- chdb/udf/__init__.py +3 -0
- chdb/udf/udf.py +106 -0
- chdb/utils/__init__.py +9 -0
- chdb/utils/trace.py +74 -0
- chdb/utils/types.py +234 -0
- chdb-3.6.0.dist-info/LICENSE.txt +203 -0
- chdb-3.6.0.dist-info/METADATA +554 -0
- chdb-3.6.0.dist-info/RECORD +36 -0
- chdb-3.6.0.dist-info/WHEEL +6 -0
- chdb-3.6.0.dist-info/top_level.txt +2 -0
- chdb.libs/libpybind11nonlimitedapi_stubs-b5a2bd7f.so +0 -0
|
@@ -0,0 +1,554 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: chdb
|
|
3
|
+
Version: 3.6.0
|
|
4
|
+
Summary: chDB is an in-process SQL OLAP Engine powered by ClickHouse
|
|
5
|
+
Home-page: https://github.com/chdb-io/chdb
|
|
6
|
+
Author: auxten
|
|
7
|
+
Author-email: auxten@clickhouse.com
|
|
8
|
+
License: Apache-2.0
|
|
9
|
+
Project-URL: Homepage, https://clickhouse.com/chdb
|
|
10
|
+
Project-URL: Documentation, https://clickhouse.com/docs/en/chdb
|
|
11
|
+
Project-URL: Source, https://github.com/chdb-io/chdb
|
|
12
|
+
Project-URL: Download, https://pypi.org/project/chdb/#files
|
|
13
|
+
Project-URL: Twitter, https://twitter.com/chdb_io
|
|
14
|
+
Platform: Mac
|
|
15
|
+
Platform: Linux
|
|
16
|
+
Classifier: Development Status :: 4 - Beta
|
|
17
|
+
Classifier: Intended Audience :: Developers
|
|
18
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
|
19
|
+
Classifier: Operating System :: MacOS :: MacOS X
|
|
20
|
+
Classifier: Operating System :: POSIX
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
24
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
25
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
26
|
+
Classifier: Topic :: Database
|
|
27
|
+
Classifier: Topic :: Scientific/Engineering :: Information Analysis
|
|
28
|
+
Requires-Python: >=3.8
|
|
29
|
+
Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
|
|
30
|
+
License-File: LICENSE.txt
|
|
31
|
+
Requires-Dist: pyarrow >=13.0.0
|
|
32
|
+
Requires-Dist: pandas >=2.0.0
|
|
33
|
+
|
|
34
|
+
<div align="center">
|
|
35
|
+
<a href="https://clickhouse.com/blog/chdb-joins-clickhouse-family">📢 chDB joins the ClickHouse family 🐍+🚀</a>
|
|
36
|
+
</div>
|
|
37
|
+
<div align="center">
|
|
38
|
+
<picture>
|
|
39
|
+
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/chdb-io/chdb/raw/main/docs/_static/snake-chdb-dark.png" height="130">
|
|
40
|
+
<img src="https://github.com/chdb-io/chdb/raw/main/docs/_static/snake-chdb.png" height="130">
|
|
41
|
+
</picture>
|
|
42
|
+
|
|
43
|
+
[](https://github.com/chdb-io/chdb/actions/workflows/build_linux_x86_wheels.yml)
|
|
44
|
+
[](https://pypi.org/project/chdb/)
|
|
45
|
+
[](https://pepy.tech/project/chdb)
|
|
46
|
+
[](https://discord.gg/D2Daa2fM5K)
|
|
47
|
+
[](https://twitter.com/chdb_io)
|
|
48
|
+
</div>
|
|
49
|
+
|
|
50
|
+
# chDB
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
> chDB is an in-process SQL OLAP Engine powered by ClickHouse [^1]
|
|
54
|
+
> For more details: [The birth of chDB](https://auxten.com/the-birth-of-chdb/)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
## Features
|
|
58
|
+
|
|
59
|
+
* In-process SQL OLAP Engine, powered by ClickHouse
|
|
60
|
+
* No need to install ClickHouse
|
|
61
|
+
* Minimized data copy from C++ to Python with [python memoryview](https://docs.python.org/3/c-api/memoryview.html)
|
|
62
|
+
* Input&Output support Parquet, CSV, JSON, Arrow, ORC and 60+[more](https://clickhouse.com/docs/en/interfaces/formats) formats, [samples](tests/format_output.py)
|
|
63
|
+
* Support Python DB API 2.0, [example](examples/dbapi.py)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
## Arch
|
|
68
|
+
<div align="center">
|
|
69
|
+
<img src="https://github.com/chdb-io/chdb/raw/main/docs/_static/arch-chdb3.png" width="450">
|
|
70
|
+
</div>
|
|
71
|
+
|
|
72
|
+
## Get Started
|
|
73
|
+
Get started with **chdb** using our [Installation and Usage Examples](https://clickhouse.com/docs/en/chdb)
|
|
74
|
+
|
|
75
|
+
<br>
|
|
76
|
+
|
|
77
|
+
## Installation
|
|
78
|
+
Currently, chDB supports Python 3.8+ on macOS and Linux (x86_64 and ARM64).
|
|
79
|
+
```bash
|
|
80
|
+
pip install chdb
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
## Usage
|
|
84
|
+
|
|
85
|
+
### Run in command line
|
|
86
|
+
> `python3 -m chdb SQL [OutputFormat]`
|
|
87
|
+
```bash
|
|
88
|
+
python3 -m chdb "SELECT 1,'abc'" Pretty
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
<br>
|
|
92
|
+
|
|
93
|
+
### Data Input
|
|
94
|
+
The following methods are available to access on-disk and in-memory data formats:
|
|
95
|
+
|
|
96
|
+
<details>
|
|
97
|
+
<summary><h4>🗂️ Connection based API (recommended)</h4></summary>
|
|
98
|
+
|
|
99
|
+
```python
|
|
100
|
+
import chdb
|
|
101
|
+
|
|
102
|
+
# Create a connection (in-memory by default)
|
|
103
|
+
conn = chdb.connect(":memory:")
|
|
104
|
+
# Or use file-based: conn = chdb.connect("test.db")
|
|
105
|
+
|
|
106
|
+
# Create a cursor
|
|
107
|
+
cur = conn.cursor()
|
|
108
|
+
|
|
109
|
+
# Execute queries
|
|
110
|
+
cur.execute("SELECT number, toString(number) as str FROM system.numbers LIMIT 3")
|
|
111
|
+
|
|
112
|
+
# Fetch data in different ways
|
|
113
|
+
print(cur.fetchone()) # Single row: (0, '0')
|
|
114
|
+
print(cur.fetchmany(2)) # Multiple rows: ((1, '1'), (2, '2'))
|
|
115
|
+
|
|
116
|
+
# Get column information
|
|
117
|
+
print(cur.column_names()) # ['number', 'str']
|
|
118
|
+
print(cur.column_types()) # ['UInt64', 'String']
|
|
119
|
+
|
|
120
|
+
# Use the cursor as an iterator
|
|
121
|
+
cur.execute("SELECT number FROM system.numbers LIMIT 3")
|
|
122
|
+
for row in cur:
|
|
123
|
+
print(row)
|
|
124
|
+
|
|
125
|
+
# Always close resources when done
|
|
126
|
+
cur.close()
|
|
127
|
+
conn.close()
|
|
128
|
+
```
|
|
129
|
+
|
|
130
|
+
For more details, see [examples/connect.py](examples/connect.py).
|
|
131
|
+
</details>
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
<details>
|
|
135
|
+
<summary><h4>🗂️ Query On File</h4> (Parquet, CSV, JSON, Arrow, ORC and 60+)</summary>
|
|
136
|
+
|
|
137
|
+
You can execute SQL and return desired format data.
|
|
138
|
+
|
|
139
|
+
```python
|
|
140
|
+
import chdb
|
|
141
|
+
res = chdb.query('select version()', 'Pretty'); print(res)
|
|
142
|
+
```
|
|
143
|
+
|
|
144
|
+
### Work with Parquet or CSV
|
|
145
|
+
```python
|
|
146
|
+
# See more data type format in tests/format_output.py
|
|
147
|
+
res = chdb.query('select * from file("data.parquet", Parquet)', 'JSON'); print(res)
|
|
148
|
+
res = chdb.query('select * from file("data.csv", CSV)', 'CSV'); print(res)
|
|
149
|
+
print(f"SQL read {res.rows_read()} rows, {res.bytes_read()} bytes, storage read {res.storage_rows_read()} rows, {res.storage_bytes_read()} bytes, elapsed {res.elapsed()} seconds")
|
|
150
|
+
```
|
|
151
|
+
|
|
152
|
+
### Pandas dataframe output
|
|
153
|
+
```python
|
|
154
|
+
# See more in https://clickhouse.com/docs/en/interfaces/formats
|
|
155
|
+
chdb.query('select * from file("data.parquet", Parquet)', 'Dataframe')
|
|
156
|
+
```
|
|
157
|
+
</details>
|
|
158
|
+
|
|
159
|
+
<details>
|
|
160
|
+
<summary><h4>🗂️ Query On Table</h4> (Pandas DataFrame, Parquet file/bytes, Arrow bytes) </summary>
|
|
161
|
+
|
|
162
|
+
### Query On Pandas DataFrame
|
|
163
|
+
```python
|
|
164
|
+
import chdb.dataframe as cdf
|
|
165
|
+
import pandas as pd
|
|
166
|
+
# Join 2 DataFrames
|
|
167
|
+
df1 = pd.DataFrame({'a': [1, 2, 3], 'b': ["one", "two", "three"]})
|
|
168
|
+
df2 = pd.DataFrame({'c': [1, 2, 3], 'd': ["①", "②", "③"]})
|
|
169
|
+
ret_tbl = cdf.query(sql="select * from __tbl1__ t1 join __tbl2__ t2 on t1.a = t2.c",
|
|
170
|
+
tbl1=df1, tbl2=df2)
|
|
171
|
+
print(ret_tbl)
|
|
172
|
+
# Query on the DataFrame Table
|
|
173
|
+
print(ret_tbl.query('select b, sum(a) from __table__ group by b'))
|
|
174
|
+
# Pandas DataFrames are automatically registered as temporary tables in ClickHouse
|
|
175
|
+
chdb.query("SELECT * FROM Python(df1) t1 JOIN Python(df2) t2 ON t1.a = t2.c").show()
|
|
176
|
+
```
|
|
177
|
+
</details>
|
|
178
|
+
|
|
179
|
+
<details>
|
|
180
|
+
<summary><h4>🗂️ Query with Stateful Session</h4></summary>
|
|
181
|
+
|
|
182
|
+
```python
|
|
183
|
+
from chdb import session as chs
|
|
184
|
+
|
|
185
|
+
## Create DB, Table, View in temp session, auto cleanup when session is deleted.
|
|
186
|
+
sess = chs.Session()
|
|
187
|
+
sess.query("CREATE DATABASE IF NOT EXISTS db_xxx ENGINE = Atomic")
|
|
188
|
+
sess.query("CREATE TABLE IF NOT EXISTS db_xxx.log_table_xxx (x String, y Int) ENGINE = Log;")
|
|
189
|
+
sess.query("INSERT INTO db_xxx.log_table_xxx VALUES ('a', 1), ('b', 3), ('c', 2), ('d', 5);")
|
|
190
|
+
sess.query(
|
|
191
|
+
"CREATE VIEW db_xxx.view_xxx AS SELECT * FROM db_xxx.log_table_xxx LIMIT 4;"
|
|
192
|
+
)
|
|
193
|
+
print("Select from view:\n")
|
|
194
|
+
print(sess.query("SELECT * FROM db_xxx.view_xxx", "Pretty"))
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
see also: [test_stateful.py](tests/test_stateful.py).
|
|
198
|
+
</details>
|
|
199
|
+
|
|
200
|
+
<details>
|
|
201
|
+
<summary><h4>🗂️ Query with Python DB-API 2.0</h4></summary>
|
|
202
|
+
|
|
203
|
+
```python
|
|
204
|
+
import chdb.dbapi as dbapi
|
|
205
|
+
print("chdb driver version: {0}".format(dbapi.get_client_info()))
|
|
206
|
+
|
|
207
|
+
conn1 = dbapi.connect()
|
|
208
|
+
cur1 = conn1.cursor()
|
|
209
|
+
cur1.execute('select version()')
|
|
210
|
+
print("description: ", cur1.description)
|
|
211
|
+
print("data: ", cur1.fetchone())
|
|
212
|
+
cur1.close()
|
|
213
|
+
conn1.close()
|
|
214
|
+
```
|
|
215
|
+
</details>
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
<details>
|
|
219
|
+
<summary><h4>🗂️ Query with UDF (User Defined Functions)</h4></summary>
|
|
220
|
+
|
|
221
|
+
```python
|
|
222
|
+
from chdb.udf import chdb_udf
|
|
223
|
+
from chdb import query
|
|
224
|
+
|
|
225
|
+
@chdb_udf()
|
|
226
|
+
def sum_udf(lhs, rhs):
|
|
227
|
+
return int(lhs) + int(rhs)
|
|
228
|
+
|
|
229
|
+
print(query("select sum_udf(12,22)"))
|
|
230
|
+
```
|
|
231
|
+
|
|
232
|
+
Some notes on chDB Python UDF(User Defined Function) decorator.
|
|
233
|
+
1. The function should be stateless. So, only UDFs are supported, not UDAFs(User Defined Aggregation Function).
|
|
234
|
+
2. Default return type is String. If you want to change the return type, you can pass in the return type as an argument.
|
|
235
|
+
The return type should be one of the following: https://clickhouse.com/docs/en/sql-reference/data-types
|
|
236
|
+
3. The function should take in arguments of type String. As the input is TabSeparated, all arguments are strings.
|
|
237
|
+
4. The function will be called for each line of input. Something like this:
|
|
238
|
+
```
|
|
239
|
+
def sum_udf(lhs, rhs):
|
|
240
|
+
return int(lhs) + int(rhs)
|
|
241
|
+
|
|
242
|
+
for line in sys.stdin:
|
|
243
|
+
args = line.strip().split('\t')
|
|
244
|
+
lhs = args[0]
|
|
245
|
+
rhs = args[1]
|
|
246
|
+
print(sum_udf(lhs, rhs))
|
|
247
|
+
sys.stdout.flush()
|
|
248
|
+
```
|
|
249
|
+
5. The function should be pure python function. You SHOULD import all python modules used IN THE FUNCTION.
|
|
250
|
+
```
|
|
251
|
+
def func_use_json(arg):
|
|
252
|
+
import json
|
|
253
|
+
...
|
|
254
|
+
```
|
|
255
|
+
6. Python interpertor used is the same as the one used to run the script. Get from `sys.executable`
|
|
256
|
+
|
|
257
|
+
see also: [test_udf.py](tests/test_udf.py).
|
|
258
|
+
</details>
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
<details>
|
|
262
|
+
<summary><h4>🗂️ Streaming Query</h4></summary>
|
|
263
|
+
|
|
264
|
+
Process large datasets with constant memory usage through chunked streaming.
|
|
265
|
+
|
|
266
|
+
```python
|
|
267
|
+
from chdb import session as chs
|
|
268
|
+
|
|
269
|
+
sess = chs.Session()
|
|
270
|
+
|
|
271
|
+
# Example 1: Basic example of using streaming query
|
|
272
|
+
rows_cnt = 0
|
|
273
|
+
with sess.send_query("SELECT * FROM numbers(200000)", "CSV") as stream_result:
|
|
274
|
+
for chunk in stream_result:
|
|
275
|
+
rows_cnt += chunk.rows_read()
|
|
276
|
+
|
|
277
|
+
print(rows_cnt) # 200000
|
|
278
|
+
|
|
279
|
+
# Example 2: Manual iteration with fetch()
|
|
280
|
+
rows_cnt = 0
|
|
281
|
+
stream_result = sess.send_query("SELECT * FROM numbers(200000)", "CSV")
|
|
282
|
+
while True:
|
|
283
|
+
chunk = stream_result.fetch()
|
|
284
|
+
if chunk is None:
|
|
285
|
+
break
|
|
286
|
+
rows_cnt += chunk.rows_read()
|
|
287
|
+
|
|
288
|
+
print(rows_cnt) # 200000
|
|
289
|
+
|
|
290
|
+
# Example 3: Early cancellation demo
|
|
291
|
+
rows_cnt = 0
|
|
292
|
+
stream_result = sess.send_query("SELECT * FROM numbers(200000)", "CSV")
|
|
293
|
+
while True:
|
|
294
|
+
chunk = stream_result.fetch()
|
|
295
|
+
if chunk is None:
|
|
296
|
+
break
|
|
297
|
+
if rows_cnt > 0:
|
|
298
|
+
stream_result.close()
|
|
299
|
+
break
|
|
300
|
+
rows_cnt += chunk.rows_read()
|
|
301
|
+
|
|
302
|
+
print(rows_cnt) # 65409
|
|
303
|
+
|
|
304
|
+
# Example 4: Using PyArrow RecordBatchReader for batch export and integration with other libraries
|
|
305
|
+
import pyarrow as pa
|
|
306
|
+
from deltalake import write_deltalake
|
|
307
|
+
|
|
308
|
+
# Get streaming result in arrow format
|
|
309
|
+
stream_result = sess.send_query("SELECT * FROM numbers(100000)", "Arrow")
|
|
310
|
+
|
|
311
|
+
# Create RecordBatchReader with custom batch size (default rows_per_batch=1000000)
|
|
312
|
+
batch_reader = stream_result.record_batch(rows_per_batch=10000)
|
|
313
|
+
|
|
314
|
+
# Use RecordBatchReader with external libraries like Delta Lake
|
|
315
|
+
write_deltalake(
|
|
316
|
+
table_or_uri="./my_delta_table",
|
|
317
|
+
data=batch_reader,
|
|
318
|
+
mode="overwrite"
|
|
319
|
+
)
|
|
320
|
+
|
|
321
|
+
stream_result.close()
|
|
322
|
+
|
|
323
|
+
sess.close()
|
|
324
|
+
```
|
|
325
|
+
|
|
326
|
+
**Important Note**: When using streaming queries, if the `StreamingResult` is not fully consumed (due to errors or early termination), you must explicitly call `stream_result.close()` to release resources, or use the `with` statement for automatic cleanup. Failure to do so may block subsequent queries.
|
|
327
|
+
|
|
328
|
+
For more details, see [test_streaming_query.py](tests/test_streaming_query.py) and [test_arrow_record_reader_deltalake.py](tests/test_arrow_record_reader_deltalake.py).
|
|
329
|
+
</details>
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
<details>
|
|
333
|
+
<summary><h4>🗂️ Python Table Engine</h4></summary>
|
|
334
|
+
|
|
335
|
+
### Query on Pandas DataFrame
|
|
336
|
+
|
|
337
|
+
```python
|
|
338
|
+
import chdb
|
|
339
|
+
import pandas as pd
|
|
340
|
+
df = pd.DataFrame(
|
|
341
|
+
{
|
|
342
|
+
"a": [1, 2, 3, 4, 5, 6],
|
|
343
|
+
"b": ["tom", "jerry", "auxten", "tom", "jerry", "auxten"],
|
|
344
|
+
"dict_col": [
|
|
345
|
+
{'id': 1, 'tags': ['urgent', 'important'], 'metadata': {'created': '2024-01-01'}},
|
|
346
|
+
{'id': 2, 'tags': ['normal'], 'metadata': {'created': '2024-02-01'}},
|
|
347
|
+
{'id': 3, 'name': 'tom'},
|
|
348
|
+
{'id': 4, 'value': '100'},
|
|
349
|
+
{'id': 5, 'value': 101},
|
|
350
|
+
{'id': 6, 'value': 102},
|
|
351
|
+
],
|
|
352
|
+
}
|
|
353
|
+
)
|
|
354
|
+
|
|
355
|
+
chdb.query("SELECT b, sum(a) FROM Python(df) GROUP BY b ORDER BY b").show()
|
|
356
|
+
chdb.query("SELECT dict_col.id FROM Python(df) WHERE dict_col.value='100'").show()
|
|
357
|
+
```
|
|
358
|
+
|
|
359
|
+
### Query on Arrow Table
|
|
360
|
+
|
|
361
|
+
```python
|
|
362
|
+
import chdb
|
|
363
|
+
import pyarrow as pa
|
|
364
|
+
arrow_table = pa.table(
|
|
365
|
+
{
|
|
366
|
+
"a": [1, 2, 3, 4, 5, 6],
|
|
367
|
+
"b": ["tom", "jerry", "auxten", "tom", "jerry", "auxten"],
|
|
368
|
+
"dict_col": [
|
|
369
|
+
{'id': 1, 'value': 'tom'},
|
|
370
|
+
{'id': 2, 'value': 'jerry'},
|
|
371
|
+
{'id': 3, 'value': 'auxten'},
|
|
372
|
+
{'id': 4, 'value': 'tom'},
|
|
373
|
+
{'id': 5, 'value': 'jerry'},
|
|
374
|
+
{'id': 6, 'value': 'auxten'},
|
|
375
|
+
],
|
|
376
|
+
}
|
|
377
|
+
)
|
|
378
|
+
|
|
379
|
+
chdb.query("SELECT b, sum(a) FROM Python(arrow_table) GROUP BY b ORDER BY b").show()
|
|
380
|
+
chdb.query("SELECT dict_col.id FROM Python(arrow_table) WHERE dict_col.value='tom'").show()
|
|
381
|
+
```
|
|
382
|
+
|
|
383
|
+
### Query on chdb.PyReader class instance
|
|
384
|
+
|
|
385
|
+
1. You must inherit from chdb.PyReader class and implement the `read` method.
|
|
386
|
+
2. The `read` method should:
|
|
387
|
+
1. return a list of lists, the first demension is the column, the second dimension is the row, the columns order should be the same as the first arg `col_names` of `read`.
|
|
388
|
+
1. return an empty list when there is no more data to read.
|
|
389
|
+
1. be stateful, the cursor should be updated in the `read` method.
|
|
390
|
+
3. An optional `get_schema` method can be implemented to return the schema of the table. The prototype is `def get_schema(self) -> List[Tuple[str, str]]:`, the return value is a list of tuples, each tuple contains the column name and the column type. The column type should be one of the following: https://clickhouse.com/docs/en/sql-reference/data-types
|
|
391
|
+
|
|
392
|
+
```python
|
|
393
|
+
import chdb
|
|
394
|
+
|
|
395
|
+
class myReader(chdb.PyReader):
|
|
396
|
+
def __init__(self, data):
|
|
397
|
+
self.data = data
|
|
398
|
+
self.cursor = 0
|
|
399
|
+
super().__init__(data)
|
|
400
|
+
|
|
401
|
+
def read(self, col_names, count):
|
|
402
|
+
print("Python func read", col_names, count, self.cursor)
|
|
403
|
+
if self.cursor >= len(self.data["a"]):
|
|
404
|
+
self.cursor = 0
|
|
405
|
+
return []
|
|
406
|
+
block = [self.data[col] for col in col_names]
|
|
407
|
+
self.cursor += len(block[0])
|
|
408
|
+
return block
|
|
409
|
+
|
|
410
|
+
def get_schema(self):
|
|
411
|
+
return [
|
|
412
|
+
("a", "int"),
|
|
413
|
+
("b", "str"),
|
|
414
|
+
("dict_col", "json")
|
|
415
|
+
]
|
|
416
|
+
|
|
417
|
+
reader = myReader(
|
|
418
|
+
{
|
|
419
|
+
"a": [1, 2, 3, 4, 5, 6],
|
|
420
|
+
"b": ["tom", "jerry", "auxten", "tom", "jerry", "auxten"],
|
|
421
|
+
"dict_col": [
|
|
422
|
+
{'id': 1, 'tags': ['urgent', 'important'], 'metadata': {'created': '2024-01-01'}},
|
|
423
|
+
{'id': 2, 'tags': ['normal'], 'metadata': {'created': '2024-02-01'}},
|
|
424
|
+
{'id': 3, 'name': 'tom'},
|
|
425
|
+
{'id': 4, 'value': '100'},
|
|
426
|
+
{'id': 5, 'value': 101},
|
|
427
|
+
{'id': 6, 'value': 102}
|
|
428
|
+
],
|
|
429
|
+
}
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
chdb.query("SELECT b, sum(a) FROM Python(reader) GROUP BY b ORDER BY b").show()
|
|
433
|
+
chdb.query("SELECT dict_col.id FROM Python(reader) WHERE dict_col.value='100'").show()
|
|
434
|
+
```
|
|
435
|
+
|
|
436
|
+
see also: [test_query_py.py](tests/test_query_py.py) and [test_query_json.py](tests/test_query_json.py).
|
|
437
|
+
|
|
438
|
+
### JSON Type Inference
|
|
439
|
+
|
|
440
|
+
chDB automatically converts Python dictionary objects to ClickHouse JSON types from these sources:
|
|
441
|
+
|
|
442
|
+
1. **Pandas DataFrame**
|
|
443
|
+
- Columns with `object` dtype are sampled (default 10,000 rows) to detect JSON structures.
|
|
444
|
+
- Control sampling via SQL settings:
|
|
445
|
+
```sql
|
|
446
|
+
SET pandas_analyze_sample = 10000 -- Default sampling
|
|
447
|
+
SET pandas_analyze_sample = 0 -- Force String type
|
|
448
|
+
SET pandas_analyze_sample = -1 -- Force JSON type
|
|
449
|
+
```
|
|
450
|
+
- Columns are converted to `String` if sampling finds non-dictionary values.
|
|
451
|
+
|
|
452
|
+
2. **Arrow Table**
|
|
453
|
+
- `struct` type columns are automatically mapped to JSON columns.
|
|
454
|
+
- Nested structures preserve type information.
|
|
455
|
+
|
|
456
|
+
3. **chdb.PyReader**
|
|
457
|
+
- Implement custom schema mapping in `get_schema()`:
|
|
458
|
+
```python
|
|
459
|
+
def get_schema(self):
|
|
460
|
+
return [
|
|
461
|
+
("c1", "JSON"), # Explicit JSON mapping
|
|
462
|
+
("c2", "String")
|
|
463
|
+
]
|
|
464
|
+
```
|
|
465
|
+
- Column types declared as "JSON" will bypass auto-detection.
|
|
466
|
+
|
|
467
|
+
When converting Python dictionary objects to JSON columns:
|
|
468
|
+
|
|
469
|
+
1. **Nested Structures**
|
|
470
|
+
- Recursively process nested dictionaries, lists, tuples and NumPy arrays.
|
|
471
|
+
|
|
472
|
+
2. **Primitive Types**
|
|
473
|
+
- Automatic type recognition for basic types such as integers, floats, strings, and booleans, and more.
|
|
474
|
+
|
|
475
|
+
3. **Complex Objects**
|
|
476
|
+
- Non-primitive types will be converted to strings.
|
|
477
|
+
|
|
478
|
+
### Limitations
|
|
479
|
+
|
|
480
|
+
1. Column types supported: pandas.Series, pyarrow.array, chdb.PyReader
|
|
481
|
+
1. Data types supported: Int, UInt, Float, String, Date, DateTime, Decimal
|
|
482
|
+
1. Python Object type will be converted to String
|
|
483
|
+
1. Pandas DataFrame performance is all of the best, Arrow Table is better than PyReader
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
</details>
|
|
487
|
+
|
|
488
|
+
For more examples, see [examples](examples) and [tests](tests).
|
|
489
|
+
|
|
490
|
+
<br>
|
|
491
|
+
|
|
492
|
+
## Demos and Examples
|
|
493
|
+
|
|
494
|
+
- [Project Documentation](https://clickhouse.com/docs/en/chdb) and [Usage Examples](https://clickhouse.com/docs/en/chdb/install/python)
|
|
495
|
+
- [Colab Notebooks](https://colab.research.google.com/drive/1-zKB6oKfXeptggXi0kUX87iR8ZTSr4P3?usp=sharing) and other [Script Examples](examples)
|
|
496
|
+
|
|
497
|
+
## Benchmark
|
|
498
|
+
|
|
499
|
+
- [ClickBench of embedded engines](https://benchmark.clickhouse.com/#eyJzeXN0ZW0iOnsiQXRoZW5hIChwYXJ0aXRpb25lZCkiOnRydWUsIkF0aGVuYSAoc2luZ2xlKSI6dHJ1ZSwiQXVyb3JhIGZvciBNeVNRTCI6dHJ1ZSwiQXVyb3JhIGZvciBQb3N0Z3JlU1FMIjp0cnVlLCJCeXRlSG91c2UiOnRydWUsImNoREIiOnRydWUsIkNpdHVzIjp0cnVlLCJjbGlja2hvdXNlLWxvY2FsIChwYXJ0aXRpb25lZCkiOnRydWUsImNsaWNraG91c2UtbG9jYWwgKHNpbmdsZSkiOnRydWUsIkNsaWNrSG91c2UiOnRydWUsIkNsaWNrSG91c2UgKHR1bmVkKSI6dHJ1ZSwiQ2xpY2tIb3VzZSAoenN0ZCkiOnRydWUsIkNsaWNrSG91c2UgQ2xvdWQiOnRydWUsIkNsaWNrSG91c2UgKHdlYikiOnRydWUsIkNyYXRlREIiOnRydWUsIkRhdGFiZW5kIjp0cnVlLCJEYXRhRnVzaW9uIChzaW5nbGUpIjp0cnVlLCJBcGFjaGUgRG9yaXMiOnRydWUsIkRydWlkIjp0cnVlLCJEdWNrREIgKFBhcnF1ZXQpIjp0cnVlLCJEdWNrREIiOnRydWUsIkVsYXN0aWNzZWFyY2giOnRydWUsIkVsYXN0aWNzZWFyY2ggKHR1bmVkKSI6ZmFsc2UsIkdyZWVucGx1bSI6dHJ1ZSwiSGVhdnlBSSI6dHJ1ZSwiSHlkcmEiOnRydWUsIkluZm9icmlnaHQiOnRydWUsIktpbmV0aWNhIjp0cnVlLCJNYXJpYURCIENvbHVtblN0b3JlIjp0cnVlLCJNYXJpYURCIjpmYWxzZSwiTW9uZXREQiI6dHJ1ZSwiTW9uZ29EQiI6dHJ1ZSwiTXlTUUwgKE15SVNBTSkiOnRydWUsIk15U1FMIjp0cnVlLCJQaW5vdCI6dHJ1ZSwiUG9zdGdyZVNRTCI6dHJ1ZSwiUG9zdGdyZVNRTCAodHVuZWQpIjpmYWxzZSwiUXVlc3REQiAocGFydGl0aW9uZWQpIjp0cnVlLCJRdWVzdERCIjp0cnVlLCJSZWRzaGlmdCI6dHJ1ZSwiU2VsZWN0REIiOnRydWUsIlNpbmdsZVN0b3JlIjp0cnVlLCJTbm93Zmxha2UiOnRydWUsIlNRTGl0ZSI6dHJ1ZSwiU3RhclJvY2tzIjp0cnVlLCJUaW1lc2NhbGVEQiAoY29tcHJlc3Npb24pIjp0cnVlLCJUaW1lc2NhbGVEQiI6dHJ1ZX0sInR5cGUiOnsic3RhdGVsZXNzIjpmYWxzZSwibWFuYWdlZCI6ZmFsc2UsIkphdmEiOmZhbHNlLCJjb2x1bW4tb3JpZW50ZWQiOmZhbHNlLCJDKysiOmZhbHNlLCJNeVNRTCBjb21wYXRpYmxlIjpmYWxzZSwicm93LW9yaWVudGVkIjpmYWxzZSwiQyI6ZmFsc2UsIlBvc3RncmVTUUwgY29tcGF0aWJsZSI6ZmFsc2UsIkNsaWNrSG91c2UgZGVyaXZhdGl2ZSI6ZmFsc2UsImVtYmVkZGVkIjp0cnVlLCJzZXJ2ZXJsZXNzIjpmYWxzZSwiUnVzdCI6ZmFsc2UsInNlYXJjaCI6ZmFsc2UsImRvY3VtZW50IjpmYWxzZSwidGltZS1zZXJpZXMiOmZhbHNlfSwibWFjaGluZSI6eyJzZXJ2ZXJsZXNzIjp0cnVlLCIxNmFjdSI6dHJ1ZSwiTCI6dHJ1ZSwiTSI6dHJ1ZSwiUyI6dHJ1ZSwiWFMiOnRydWUsImM2YS5tZXRhbCwgNTAwZ2IgZ3AyIjp0cnVlLCJjNmEuNHhsYXJnZSwgNTAwZ2IgZ3AyIjp0cnVlLCJjNS40eGxhcmdlLCA1MDBnYiBncDIiOnRydWUsIjE2IHRocmVhZHMiOnRydWUsIjIwIHRocmVhZHMiOnRydWUsIjI0IHRocmVhZHMiOnRydWUsIjI4IHRocmVhZHMiOnRydWUsIjMwIHRocmVhZHMiOnRydWUsIjQ4IHRocmVhZHMiOnRydWUsIjYwIHRocmVhZHMiOnRydWUsIm01ZC4yNHhsYXJnZSI6dHJ1ZSwiYzVuLjR4bGFyZ2UsIDIwMGdiIGdwMiI6dHJ1ZSwiYzZhLjR4bGFyZ2UsIDE1MDBnYiBncDIiOnRydWUsImRjMi44eGxhcmdlIjp0cnVlLCJyYTMuMTZ4bGFyZ2UiOnRydWUsInJhMy40eGxhcmdlIjp0cnVlLCJyYTMueGxwbHVzIjp0cnVlLCJTMjQiOnRydWUsIlMyIjp0cnVlLCIyWEwiOnRydWUsIjNYTCI6dHJ1ZSwiNFhMIjp0cnVlLCJYTCI6dHJ1ZX0sImNsdXN0ZXJfc2l6ZSI6eyIxIjp0cnVlLCIyIjp0cnVlLCI0Ijp0cnVlLCI4Ijp0cnVlLCIxNiI6dHJ1ZSwiMzIiOnRydWUsIjY0Ijp0cnVlLCIxMjgiOnRydWUsInNlcnZlcmxlc3MiOnRydWUsInVuZGVmaW5lZCI6dHJ1ZX0sIm1ldHJpYyI6ImhvdCIsInF1ZXJpZXMiOlt0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlXX0=)
|
|
500
|
+
|
|
501
|
+
- [chDB vs Pandas](https://colab.research.google.com/drive/1FogLujJ_-ds7RGurDrUnK-U0IW8a8Qd0)
|
|
502
|
+
|
|
503
|
+
- [Benchmark on DataFrame: chDB Pandas DuckDB Polars](https://benchmark.clickhouse.com/#eyJzeXN0ZW0iOnsiQWxsb3lEQiI6dHJ1ZSwiQWxsb3lEQiAodHVuZWQpIjp0cnVlLCJBdGhlbmEgKHBhcnRpdGlvbmVkKSI6dHJ1ZSwiQXRoZW5hIChzaW5nbGUpIjp0cnVlLCJBdXJvcmEgZm9yIE15U1FMIjp0cnVlLCJBdXJvcmEgZm9yIFBvc3RncmVTUUwiOnRydWUsIkJ5Q29uaXR5Ijp0cnVlLCJCeXRlSG91c2UiOnRydWUsImNoREIgKERhdGFGcmFtZSkiOnRydWUsImNoREIgKFBhcnF1ZXQsIHBhcnRpdGlvbmVkKSI6dHJ1ZSwiY2hEQiI6dHJ1ZSwiQ2l0dXMiOnRydWUsIkNsaWNrSG91c2UgQ2xvdWQgKGF3cykiOnRydWUsIkNsaWNrSG91c2UgQ2xvdWQgKGF6dXJlKSI6dHJ1ZSwiQ2xpY2tIb3VzZSBDbG91ZCAoZ2NwKSI6dHJ1ZSwiQ2xpY2tIb3VzZSAoZGF0YSBsYWtlLCBwYXJ0aXRpb25lZCkiOnRydWUsIkNsaWNrSG91c2UgKGRhdGEgbGFrZSwgc2luZ2xlKSI6dHJ1ZSwiQ2xpY2tIb3VzZSAoUGFycXVldCwgcGFydGl0aW9uZWQpIjp0cnVlLCJDbGlja0hvdXNlIChQYXJxdWV0LCBzaW5nbGUpIjp0cnVlLCJDbGlja0hvdXNlICh3ZWIpIjp0cnVlLCJDbGlja0hvdXNlIjp0cnVlLCJDbGlja0hvdXNlICh0dW5lZCkiOnRydWUsIkNsaWNrSG91c2UgKHR1bmVkLCBtZW1vcnkpIjp0cnVlLCJDbG91ZGJlcnJ5Ijp0cnVlLCJDcmF0ZURCIjp0cnVlLCJDcnVuY2h5IEJyaWRnZSBmb3IgQW5hbHl0aWNzIChQYXJxdWV0KSI6dHJ1ZSwiRGF0YWJlbmQiOnRydWUsIkRhdGFGdXNpb24gKFBhcnF1ZXQsIHBhcnRpdGlvbmVkKSI6dHJ1ZSwiRGF0YUZ1c2lvbiAoUGFycXVldCwgc2luZ2xlKSI6dHJ1ZSwiQXBhY2hlIERvcmlzIjp0cnVlLCJEcnVpZCI6dHJ1ZSwiRHVja0RCIChEYXRhRnJhbWUpIjp0cnVlLCJEdWNrREIgKFBhcnF1ZXQsIHBhcnRpdGlvbmVkKSI6dHJ1ZSwiRHVja0RCIjp0cnVlLCJFbGFzdGljc2VhcmNoIjp0cnVlLCJFbGFzdGljc2VhcmNoICh0dW5lZCkiOmZhbHNlLCJHbGFyZURCIjp0cnVlLCJHcmVlbnBsdW0iOnRydWUsIkhlYXZ5QUkiOnRydWUsIkh5ZHJhIjp0cnVlLCJJbmZvYnJpZ2h0Ijp0cnVlLCJLaW5ldGljYSI6dHJ1ZSwiTWFyaWFEQiBDb2x1bW5TdG9yZSI6dHJ1ZSwiTWFyaWFEQiI6ZmFsc2UsIk1vbmV0REIiOnRydWUsIk1vbmdvREIiOnRydWUsIk1vdGhlcmR1Y2siOnRydWUsIk15U1FMIChNeUlTQU0pIjp0cnVlLCJNeVNRTCI6dHJ1ZSwiT3hsYSI6dHJ1ZSwiUGFuZGFzIChEYXRhRnJhbWUpIjp0cnVlLCJQYXJhZGVEQiAoUGFycXVldCwgcGFydGl0aW9uZWQpIjp0cnVlLCJQYXJhZGVEQiAoUGFycXVldCwgc2luZ2xlKSI6dHJ1ZSwiUGlub3QiOnRydWUsIlBvbGFycyAoRGF0YUZyYW1lKSI6dHJ1ZSwiUG9zdGdyZVNRTCAodHVuZWQpIjpmYWxzZSwiUG9zdGdyZVNRTCI6dHJ1ZSwiUXVlc3REQiAocGFydGl0aW9uZWQpIjp0cnVlLCJRdWVzdERCIjp0cnVlLCJSZWRzaGlmdCI6dHJ1ZSwiU2luZ2xlU3RvcmUiOnRydWUsIlNub3dmbGFrZSI6dHJ1ZSwiU1FMaXRlIjp0cnVlLCJTdGFyUm9ja3MiOnRydWUsIlRhYmxlc3BhY2UiOnRydWUsIlRlbWJvIE9MQVAgKGNvbHVtbmFyKSI6dHJ1ZSwiVGltZXNjYWxlREIgKGNvbXByZXNzaW9uKSI6dHJ1ZSwiVGltZXNjYWxlREIiOnRydWUsIlVtYnJhIjp0cnVlfSwidHlwZSI6eyJDIjpmYWxzZSwiY29sdW1uLW9yaWVudGVkIjpmYWxzZSwiUG9zdGdyZVNRTCBjb21wYXRpYmxlIjpmYWxzZSwibWFuYWdlZCI6ZmFsc2UsImdjcCI6ZmFsc2UsInN0YXRlbGVzcyI6ZmFsc2UsIkphdmEiOmZhbHNlLCJDKysiOmZhbHNlLCJNeVNRTCBjb21wYXRpYmxlIjpmYWxzZSwicm93LW9yaWVudGVkIjpmYWxzZSwiQ2xpY2tIb3VzZSBkZXJpdmF0aXZlIjpmYWxzZSwiZW1iZWRkZWQiOmZhbHNlLCJzZXJ2ZXJsZXNzIjpmYWxzZSwiZGF0YWZyYW1lIjp0cnVlLCJhd3MiOmZhbHNlLCJhenVyZSI6ZmFsc2UsImFuYWx5dGljYWwiOmZhbHNlLCJSdXN0IjpmYWxzZSwic2VhcmNoIjpmYWxzZSwiZG9jdW1lbnQiOmZhbHNlLCJzb21ld2hhdCBQb3N0Z3JlU1FMIGNvbXBhdGlibGUiOmZhbHNlLCJ0aW1lLXNlcmllcyI6ZmFsc2V9LCJtYWNoaW5lIjp7IjE2IHZDUFUgMTI4R0IiOnRydWUsIjggdkNQVSA2NEdCIjp0cnVlLCJzZXJ2ZXJsZXNzIjp0cnVlLCIxNmFjdSI6dHJ1ZSwiYzZhLjR4bGFyZ2UsIDUwMGdiIGdwMiI6dHJ1ZSwiTCI6dHJ1ZSwiTSI6dHJ1ZSwiUyI6dHJ1ZSwiWFMiOnRydWUsImM2YS5tZXRhbCwgNTAwZ2IgZ3AyIjp0cnVlLCIxOTJHQiI6dHJ1ZSwiMjRHQiI6dHJ1ZSwiMzYwR0IiOnRydWUsIjQ4R0IiOnRydWUsIjcyMEdCIjp0cnVlLCI5NkdCIjp0cnVlLCJkZXYiOnRydWUsIjcwOEdCIjp0cnVlLCJjNW4uNHhsYXJnZSwgNTAwZ2IgZ3AyIjp0cnVlLCJBbmFseXRpY3MtMjU2R0IgKDY0IHZDb3JlcywgMjU2IEdCKSI6dHJ1ZSwiYzUuNHhsYXJnZSwgNTAwZ2IgZ3AyIjp0cnVlLCJjNmEuNHhsYXJnZSwgMTUwMGdiIGdwMiI6dHJ1ZSwiY2xvdWQiOnRydWUsImRjMi44eGxhcmdlIjp0cnVlLCJyYTMuMTZ4bGFyZ2UiOnRydWUsInJhMy40eGxhcmdlIjp0cnVlLCJyYTMueGxwbHVzIjp0cnVlLCJTMiI6dHJ1ZSwiUzI0Ijp0cnVlLCIyWEwiOnRydWUsIjNYTCI6dHJ1ZSwiNFhMIjp0cnVlLCJYTCI6dHJ1ZSwiTDEgLSAxNkNQVSAzMkdCIjp0cnVlLCJjNmEuNHhsYXJnZSwgNTAwZ2IgZ3AzIjp0cnVlfSwiY2x1c3Rlcl9zaXplIjp7IjEiOnRydWUsIjIiOnRydWUsIjQiOnRydWUsIjgiOnRydWUsIjE2Ijp0cnVlLCIzMiI6dHJ1ZSwiNjQiOnRydWUsIjEyOCI6dHJ1ZSwic2VydmVybGVzcyI6dHJ1ZX0sIm1ldHJpYyI6ImhvdCIsInF1ZXJpZXMiOlt0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlLHRydWUsdHJ1ZSx0cnVlXX0=)
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
<div align="center">
|
|
507
|
+
<img src="https://github.com/chdb-io/chdb/raw/main/docs/_static/df_bench.png" width="800">
|
|
508
|
+
</div>
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
## Documentation
|
|
512
|
+
- For chdb specific examples and documentation refer to [chDB docs](https://clickhouse.com/docs/en/chdb)
|
|
513
|
+
- For SQL syntax, please refer to [ClickHouse SQL Reference](https://clickhouse.com/docs/en/sql-reference/syntax)
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
## Events
|
|
517
|
+
|
|
518
|
+
- Demo chDB at [ClickHouse v23.7 livehouse!](https://t.co/todc13Kn19) and [Slides](https://docs.google.com/presentation/d/1ikqjOlimRa7QAg588TAB_Fna-Tad2WMg7_4AgnbQbFA/edit?usp=sharing)
|
|
519
|
+
|
|
520
|
+
## Contributing
|
|
521
|
+
Contributions are what make the open source community such an amazing place to be learn, inspire, and create. Any contributions you make are **greatly appreciated**.
|
|
522
|
+
There are something you can help:
|
|
523
|
+
- [ ] Help test and report bugs
|
|
524
|
+
- [ ] Help improve documentation
|
|
525
|
+
- [ ] Help improve code quality and performance
|
|
526
|
+
|
|
527
|
+
### Bindings
|
|
528
|
+
|
|
529
|
+
We welcome bindings for other languages, please refer to [bindings](bindings.md) for more details.
|
|
530
|
+
|
|
531
|
+
## Version Guide
|
|
532
|
+
|
|
533
|
+
Please refer to [VERSION-GUIDE.md](VERSION-GUIDE.md) for more details.
|
|
534
|
+
|
|
535
|
+
## Paper
|
|
536
|
+
|
|
537
|
+
- [ClickHouse - Lightning Fast Analytics for Everyone](https://www.vldb.org/pvldb/vol17/p3731-schulze.pdf)
|
|
538
|
+
|
|
539
|
+
## License
|
|
540
|
+
Apache 2.0, see [LICENSE](LICENSE.txt) for more information.
|
|
541
|
+
|
|
542
|
+
## Acknowledgments
|
|
543
|
+
chDB is mainly based on [ClickHouse](https://github.com/ClickHouse/ClickHouse) [^1]
|
|
544
|
+
for trade mark and other reasons, I named it chDB.
|
|
545
|
+
|
|
546
|
+
## Contact
|
|
547
|
+
- Discord: [https://discord.gg/D2Daa2fM5K](https://discord.gg/D2Daa2fM5K)
|
|
548
|
+
- Email: auxten@clickhouse.com
|
|
549
|
+
- Twitter: [@chdb](https://twitter.com/chdb_io)
|
|
550
|
+
|
|
551
|
+
|
|
552
|
+
<br>
|
|
553
|
+
|
|
554
|
+
[^1]: ClickHouse® is a trademark of ClickHouse Inc. All trademarks, service marks, and logos mentioned or depicted are the property of their respective owners. The use of any third-party trademarks, brand names, product names, and company names does not imply endorsement, affiliation, or association with the respective owners.
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
chdb/__init__.py,sha256=Wn5pgJ95tX05ILVnRAXbek_ucF7ZHZ40h3eH6HPDbDY,3762
|
|
2
|
+
chdb/__main__.py,sha256=vl-gorTYCT9Uh_h4jbQ8O-a5_pokCJPFbF_yplIgKYc,1336
|
|
3
|
+
chdb/_chdb.abi3.so,sha256=GuPQlHaMxcaiWgiMS--ZJ4KmL-wru7bjfvK-n0VN690,708287913
|
|
4
|
+
chdb/libpybind11nonlimitedapi_chdb_3.10.so,sha256=hvB4knN938BCAVgB6AxMOvAlxy5almKc3A4IZZ2QgkY,406337
|
|
5
|
+
chdb/libpybind11nonlimitedapi_chdb_3.11.so,sha256=lrKGIHzhDN-DQDlzUCk5tkEMh8bF89JdfYSmyk4803c,406393
|
|
6
|
+
chdb/libpybind11nonlimitedapi_chdb_3.12.so,sha256=ZKUZwomH8EzbTq2591M5Y5z6mvfEAtxlsxBfFnaIxDY,406393
|
|
7
|
+
chdb/libpybind11nonlimitedapi_chdb_3.13.so,sha256=b_CaRSPGmJn6cTDgsOVKK0UeYOXawZntJ3aE4nL21Gk,406465
|
|
8
|
+
chdb/libpybind11nonlimitedapi_chdb_3.8.so,sha256=u6M92MQIKIWMsy5FnckqgelFRZqdbEg5PCEMkBEnhbA,406273
|
|
9
|
+
chdb/libpybind11nonlimitedapi_chdb_3.9.so,sha256=QEcCC13I7HEIJw6JvdAYvdCGlO1gsnn7_nYa619yIPw,406409
|
|
10
|
+
chdb/libpybind11nonlimitedapi_stubs.so,sha256=taK9f109lvUOZGHqvGUeskQS764ru5s7oODPOrQ6V9A,550192
|
|
11
|
+
chdb/rwabc.py,sha256=tbiwCrXirfrfx46wCJxS64yvFe6pVWIPGdSuvrAL5Ys,2102
|
|
12
|
+
chdb/dataframe/__init__.py,sha256=1_mrZZiJwqBTnH_P8_FCbbYXIWWY5sxnaFpe3-tDLF4,680
|
|
13
|
+
chdb/dataframe/query.py,sha256=ggvE8A5vtabFg9gSTp99S7LCrnIEwbWtb-PtJVT8Ct0,12759
|
|
14
|
+
chdb/dbapi/__init__.py,sha256=aaNhxXNBC1ZkFr260cbGR8msOinTp0VoNTT_j8AXGUc,2205
|
|
15
|
+
chdb/dbapi/connections.py,sha256=RW0EcusyKueMGp7VmSaCO-ukyzY7l2ps_ibA9-pXDvo,2754
|
|
16
|
+
chdb/dbapi/converters.py,sha256=0SDqgixUTCz0LtWke_HHzgF1lFJhpsQrR_-ky3b-JRY,7447
|
|
17
|
+
chdb/dbapi/cursors.py,sha256=CpV-JOr7fVQfq1mYcYF7o08dLnYT2mNhZ2eQLtJa1N4,11961
|
|
18
|
+
chdb/dbapi/err.py,sha256=kUI9-A8LNqBoMoo4jh2NFsLCOLoPEwh9YIuz_qMoLoM,2017
|
|
19
|
+
chdb/dbapi/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360
|
|
20
|
+
chdb/dbapi/constants/FIELD_TYPE.py,sha256=ytFzgAnGmb9hvdsBlnK68qdZv_a6jYFIXT6VSAb60z8,370
|
|
21
|
+
chdb/dbapi/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
22
|
+
chdb/session/__init__.py,sha256=fCUROZ5L1-92o2lcASiWJpFu-80-kDoSrNfouLEmLg8,50
|
|
23
|
+
chdb/session/state.py,sha256=eIZ8CbZGnvHziw-arGrv-0vHibNppW116XF1VnthdMk,4462
|
|
24
|
+
chdb/state/__init__.py,sha256=RVUIWDqDi7gte4Os7Mz1wPXFyFpdHT_p1klJC7QtluI,55
|
|
25
|
+
chdb/state/sqlitelike.py,sha256=PHdIJVfbSUvJWU6doMnrg0jVpovtHUG12I_-acZHOko,18338
|
|
26
|
+
chdb/udf/__init__.py,sha256=qSMaPEre7w1pYz8uJ-iZtuu8wYOUNRcI_8UNuaOymGE,80
|
|
27
|
+
chdb/udf/udf.py,sha256=z0A1RmyZrx55bykpvvS-LpVt1lMrQOexjvU5zxCdCSA,3935
|
|
28
|
+
chdb/utils/__init__.py,sha256=tXRcwBRGW2YQNBZWV4Mitw5QlCu_qlSRCjllw15XHbs,171
|
|
29
|
+
chdb/utils/trace.py,sha256=W-pvDoKlnzq6H_7FiWjr5_teN40UNE4E5--zbUrjOIc,2511
|
|
30
|
+
chdb/utils/types.py,sha256=MGLFIjoDvu7Uc2Wy8EDY60jjue66HmMPxbhrujjrZxQ,7530
|
|
31
|
+
chdb.libs/libpybind11nonlimitedapi_stubs-b5a2bd7f.so,sha256=2vsxG2Njg0nlG-qB4O9tjcOrHpiWwM0nz1QVYdLPGRg,620041
|
|
32
|
+
chdb-3.6.0.dist-info/LICENSE.txt,sha256=isYVtNCO5910aj6e9bJJ6kQceivkLqsMlFSNYwzGGKI,11366
|
|
33
|
+
chdb-3.6.0.dist-info/METADATA,sha256=E6-S8Ha_hYHaVwGrShfPWxGwu9tZy6I1XVqIOGnv_Ys,25714
|
|
34
|
+
chdb-3.6.0.dist-info/WHEEL,sha256=IrCu7Y0-MCf1vI2OljwmGC-5WA5m2iRketNWZSAES5w,149
|
|
35
|
+
chdb-3.6.0.dist-info/top_level.txt,sha256=se0Jj0A2-ijfMW51hIjiuNyDJPqy5xJU1G8a_IEdllI,11
|
|
36
|
+
chdb-3.6.0.dist-info/RECORD,,
|
|
Binary file
|