vastdb 1.3.11__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vastdb/_ibis_support.py +28 -0
- vastdb/_internal.py +167 -180
- vastdb/_table_interface.py +136 -0
- vastdb/bench/perf_bench/orchestrate/results_helpers.py +1 -1
- vastdb/bucket.py +1 -1
- vastdb/conftest.py +42 -19
- vastdb/schema.py +15 -3
- vastdb/session.py +3 -1
- vastdb/table.py +599 -339
- vastdb/table_metadata.py +221 -0
- vastdb/tests/test_duckdb.py +30 -30
- vastdb/tests/test_fixed_list.py +56 -6
- vastdb/tests/test_imports.py +2 -1
- vastdb/tests/test_nested.py +0 -5
- vastdb/tests/test_table_in_tx.py +249 -0
- vastdb/tests/test_tables.py +63 -16
- vastdb/tests/util.py +109 -2
- vastdb/transaction.py +27 -0
- {vastdb-1.3.11.dist-info → vastdb-2.0.0.dist-info}/METADATA +21 -6
- {vastdb-1.3.11.dist-info → vastdb-2.0.0.dist-info}/RECORD +23 -19
- {vastdb-1.3.11.dist-info → vastdb-2.0.0.dist-info}/WHEEL +1 -1
- {vastdb-1.3.11.dist-info → vastdb-2.0.0.dist-info/licenses}/LICENSE +0 -0
- {vastdb-1.3.11.dist-info → vastdb-2.0.0.dist-info}/top_level.txt +0 -0
vastdb/tests/test_tables.py
CHANGED
|
@@ -8,23 +8,24 @@ from contextlib import closing
|
|
|
8
8
|
from tempfile import NamedTemporaryFile
|
|
9
9
|
|
|
10
10
|
import ibis
|
|
11
|
+
import pandas as pd
|
|
11
12
|
import pyarrow as pa
|
|
12
13
|
import pyarrow.compute as pc
|
|
13
14
|
import pyarrow.parquet as pq
|
|
14
15
|
import pytest
|
|
15
16
|
from requests.exceptions import HTTPError
|
|
16
17
|
|
|
17
|
-
from vastdb
|
|
18
|
+
from vastdb import errors
|
|
19
|
+
from vastdb.session import Session
|
|
20
|
+
from vastdb.table import INTERNAL_ROW_ID, QueryConfig
|
|
18
21
|
|
|
19
|
-
from
|
|
20
|
-
from ..table import INTERNAL_ROW_ID, QueryConfig
|
|
21
|
-
from .util import prepare_data
|
|
22
|
+
from .util import assert_row_ids_ascending_on_first_insertion_to_table, prepare_data
|
|
22
23
|
|
|
23
24
|
log = logging.getLogger(__name__)
|
|
24
25
|
|
|
25
26
|
|
|
26
27
|
@pytest.fixture
|
|
27
|
-
def elysium_session(session):
|
|
28
|
+
def elysium_session(session: Session) -> Session:
|
|
28
29
|
with session.transaction() as tx:
|
|
29
30
|
try:
|
|
30
31
|
tx._rpc.features.check_elysium()
|
|
@@ -125,7 +126,7 @@ def test_insert_empty(session, clean_bucket_name):
|
|
|
125
126
|
assert row_ids == []
|
|
126
127
|
|
|
127
128
|
|
|
128
|
-
def test_exists(session, clean_bucket_name):
|
|
129
|
+
def test_exists(session: Session, clean_bucket_name: str):
|
|
129
130
|
with session.transaction() as tx:
|
|
130
131
|
s = tx.bucket(clean_bucket_name).create_schema('s1')
|
|
131
132
|
assert s.tables() == []
|
|
@@ -899,7 +900,7 @@ def test_select_stop(session, clean_bucket_name):
|
|
|
899
900
|
# the tabular server splits the batches is not true anymore and we need to
|
|
900
901
|
# rewrite the test.
|
|
901
902
|
assert read_batches == qc.num_splits * qc.num_sub_splits
|
|
902
|
-
qc.query_id = str(random.randint(0, 2**32))
|
|
903
|
+
qc.query_id = str(random.randint(0, 2 ** 32 - 1))
|
|
903
904
|
log.info("query id is: %s", qc.query_id)
|
|
904
905
|
|
|
905
906
|
def active_threads():
|
|
@@ -953,12 +954,18 @@ def test_audit_log_select(session, clean_bucket_name):
|
|
|
953
954
|
def test_catalog_snapshots_select(session, clean_bucket_name):
|
|
954
955
|
with session.transaction() as tx:
|
|
955
956
|
snaps = tx.catalog_snapshots()
|
|
956
|
-
|
|
957
|
+
filtered_snaps = []
|
|
958
|
+
for snap in snaps:
|
|
959
|
+
log.info("Snapshot: %s", snap)
|
|
960
|
+
if snap.name.startswith("vast-big-catalog-bucket/.snapshot/bc_table"):
|
|
961
|
+
filtered_snaps.append(snap)
|
|
962
|
+
if not filtered_snaps:
|
|
957
963
|
raise NotReady
|
|
958
|
-
latest =
|
|
964
|
+
latest = filtered_snaps[-1]
|
|
965
|
+
log.info("Latest snapshot: %s", latest)
|
|
959
966
|
t = tx.catalog(latest)
|
|
960
967
|
assert t.columns()
|
|
961
|
-
rows = t.select().read_all()
|
|
968
|
+
rows = t.select(limit_rows=10).read_all()
|
|
962
969
|
if not rows:
|
|
963
970
|
raise NotReady
|
|
964
971
|
|
|
@@ -1131,7 +1138,7 @@ def test_tables_elysium(elysium_session, clean_bucket_name):
|
|
|
1131
1138
|
# assert sorted_columns[1].name == 'b'
|
|
1132
1139
|
|
|
1133
1140
|
|
|
1134
|
-
def test_elysium_tx(elysium_session, clean_bucket_name):
|
|
1141
|
+
def test_elysium_tx(elysium_session: Session, clean_bucket_name: str):
|
|
1135
1142
|
columns = pa.schema([
|
|
1136
1143
|
('a', pa.int8()),
|
|
1137
1144
|
('b', pa.int32()),
|
|
@@ -1150,7 +1157,7 @@ def test_elysium_tx(elysium_session, clean_bucket_name):
|
|
|
1150
1157
|
t = s.create_table(table_name, arrow_table.schema)
|
|
1151
1158
|
row_ids_array = t.insert(arrow_table)
|
|
1152
1159
|
row_ids = row_ids_array.to_pylist()
|
|
1153
|
-
|
|
1160
|
+
assert_row_ids_ascending_on_first_insertion_to_table(row_ids, arrow_table.num_rows, t.sorted_table)
|
|
1154
1161
|
sorted_columns = t.sorted_columns()
|
|
1155
1162
|
assert len(sorted_columns) == 0
|
|
1156
1163
|
t.add_sorting_key(sorting)
|
|
@@ -1178,7 +1185,7 @@ def test_elysium_double_enable(elysium_session, clean_bucket_name):
|
|
|
1178
1185
|
[111, 222, 333],
|
|
1179
1186
|
])
|
|
1180
1187
|
sorting = [2, 1]
|
|
1181
|
-
with pytest.raises(BadRequest):
|
|
1188
|
+
with pytest.raises(errors.BadRequest):
|
|
1182
1189
|
with prepare_data(elysium_session, clean_bucket_name, 's', 't', expected, sorting_key=sorting) as t:
|
|
1183
1190
|
sorted_columns = t.sorted_columns()
|
|
1184
1191
|
assert sorted_columns[0].name == 'c'
|
|
@@ -1186,7 +1193,7 @@ def test_elysium_double_enable(elysium_session, clean_bucket_name):
|
|
|
1186
1193
|
t.add_sorting_key(sorting)
|
|
1187
1194
|
|
|
1188
1195
|
|
|
1189
|
-
def test_elysium_update_table_tx(elysium_session, clean_bucket_name):
|
|
1196
|
+
def test_elysium_update_table_tx(elysium_session: Session, clean_bucket_name):
|
|
1190
1197
|
columns = pa.schema([
|
|
1191
1198
|
('a', pa.int64()),
|
|
1192
1199
|
('b', pa.float32()),
|
|
@@ -1205,7 +1212,7 @@ def test_elysium_update_table_tx(elysium_session, clean_bucket_name):
|
|
|
1205
1212
|
t = s.create_table(table_name, arrow_table.schema, sorting_key=sorting)
|
|
1206
1213
|
row_ids_array = t.insert(arrow_table)
|
|
1207
1214
|
row_ids = row_ids_array.to_pylist()
|
|
1208
|
-
|
|
1215
|
+
assert_row_ids_ascending_on_first_insertion_to_table(row_ids, arrow_table.num_rows, t.sorted_table)
|
|
1209
1216
|
sorted_columns = t.sorted_columns()
|
|
1210
1217
|
assert sorted_columns[0].name == 's'
|
|
1211
1218
|
assert sorted_columns[1].name == 'b'
|
|
@@ -1278,7 +1285,7 @@ def test_elysium_splits(elysium_session, clean_bucket_name):
|
|
|
1278
1285
|
t = s.create_table(table_name, arrow_table.schema, sorting_key=sorting)
|
|
1279
1286
|
row_ids_array = t.insert(arrow_table)
|
|
1280
1287
|
row_ids = row_ids_array.to_pylist()
|
|
1281
|
-
|
|
1288
|
+
assert_row_ids_ascending_on_first_insertion_to_table(row_ids, arrow_table.num_rows, t.sorted_table)
|
|
1282
1289
|
sorted_columns = t.sorted_columns()
|
|
1283
1290
|
assert sorted_columns[0].name == 'a'
|
|
1284
1291
|
|
|
@@ -1291,3 +1298,43 @@ def test_elysium_splits(elysium_session, clean_bucket_name):
|
|
|
1291
1298
|
|
|
1292
1299
|
actual = t.select(columns=['a'], predicate=(t['a'] == 1), config=config).read_all()
|
|
1293
1300
|
assert len(actual) == 10000
|
|
1301
|
+
|
|
1302
|
+
|
|
1303
|
+
def to_df(table: pa.Table) -> pd.DataFrame:
|
|
1304
|
+
return table.to_pandas().sort_values(by='a').reset_index(drop=True)
|
|
1305
|
+
|
|
1306
|
+
|
|
1307
|
+
def test_select_splits_sanity(session, clean_bucket_name, check):
|
|
1308
|
+
columns = pa.schema([
|
|
1309
|
+
('a', pa.int64()),
|
|
1310
|
+
('b', pa.float32()),
|
|
1311
|
+
('c', pa.utf8()),
|
|
1312
|
+
])
|
|
1313
|
+
|
|
1314
|
+
length = 1000000
|
|
1315
|
+
|
|
1316
|
+
expected = pa.table(schema=columns, data=[
|
|
1317
|
+
list(range(length)),
|
|
1318
|
+
[i * 0.001 for i in range(length)],
|
|
1319
|
+
[f'a{i}' for i in range(length)],
|
|
1320
|
+
])
|
|
1321
|
+
|
|
1322
|
+
query_config = QueryConfig(
|
|
1323
|
+
num_sub_splits=1,
|
|
1324
|
+
num_splits=4,
|
|
1325
|
+
limit_rows_per_sub_split=2500,
|
|
1326
|
+
num_row_groups_per_sub_split=1,
|
|
1327
|
+
)
|
|
1328
|
+
|
|
1329
|
+
with prepare_data(session, clean_bucket_name, 's', 't', expected) as t:
|
|
1330
|
+
splits_readers = t.select_splits(
|
|
1331
|
+
columns=['a', 'b', 'c'], config=query_config)
|
|
1332
|
+
splits_reader_tables = [splits_reader.read_all().combine_chunks()
|
|
1333
|
+
for splits_reader in splits_readers]
|
|
1334
|
+
|
|
1335
|
+
for splits_reader_table in splits_reader_tables:
|
|
1336
|
+
check.greater(splits_reader_table.num_rows, 0, "if splits readers are empty test is not interesting")
|
|
1337
|
+
|
|
1338
|
+
actual = pa.concat_tables(splits_reader_tables).combine_chunks()
|
|
1339
|
+
|
|
1340
|
+
check.is_true(to_df(actual).equals(to_df(expected)))
|
vastdb/tests/util.py
CHANGED
|
@@ -1,24 +1,53 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from contextlib import contextmanager
|
|
3
|
+
from typing import Any, Iterator, List, cast
|
|
3
4
|
|
|
5
|
+
import numpy as np
|
|
6
|
+
import pandas as pd
|
|
4
7
|
import pyarrow as pa
|
|
8
|
+
from packaging.version import Version
|
|
9
|
+
|
|
10
|
+
from vastdb.session import Session
|
|
11
|
+
from vastdb.table import Table
|
|
12
|
+
from vastdb.transaction import Transaction
|
|
5
13
|
|
|
6
14
|
log = logging.getLogger(__name__)
|
|
7
15
|
|
|
8
16
|
|
|
17
|
+
def assert_row_ids_ascending_on_first_insertion_to_table(row_ids, expected_num_rows, sorted_table):
|
|
18
|
+
adjusted_row_ids = [
|
|
19
|
+
int(row_id) & 0xFFFFFFFFFFFFFF for row_id in row_ids
|
|
20
|
+
] if sorted_table else row_ids
|
|
21
|
+
|
|
22
|
+
assert adjusted_row_ids == list(range(expected_num_rows))
|
|
23
|
+
|
|
24
|
+
|
|
9
25
|
@contextmanager
|
|
10
|
-
def prepare_data(session
|
|
26
|
+
def prepare_data(session: Session,
|
|
27
|
+
clean_bucket_name: str, schema_name: str, table_name: str,
|
|
28
|
+
arrow_table: pa.Table, sorting_key: List[str] = []) -> Iterator[Table]:
|
|
11
29
|
with session.transaction() as tx:
|
|
12
30
|
s = tx.bucket(clean_bucket_name).create_schema(schema_name)
|
|
13
31
|
t = s.create_table(table_name, arrow_table.schema, sorting_key=sorting_key)
|
|
14
32
|
row_ids_array = t.insert(arrow_table)
|
|
15
33
|
row_ids = row_ids_array.to_pylist()
|
|
16
|
-
|
|
34
|
+
assert_row_ids_ascending_on_first_insertion_to_table(row_ids, arrow_table.num_rows, t.sorted_table)
|
|
17
35
|
yield t
|
|
18
36
|
t.drop()
|
|
19
37
|
s.drop()
|
|
20
38
|
|
|
21
39
|
|
|
40
|
+
@contextmanager
|
|
41
|
+
def prepare_data_get_tx(session: Session,
|
|
42
|
+
clean_bucket_name: str,
|
|
43
|
+
schema_name: str,
|
|
44
|
+
table_name: str,
|
|
45
|
+
arrow_table: pa.Table,
|
|
46
|
+
sorting_key: List[str] = []) -> Iterator[Transaction]:
|
|
47
|
+
with prepare_data(session, clean_bucket_name, schema_name, table_name, arrow_table, sorting_key) as table:
|
|
48
|
+
yield table.tx
|
|
49
|
+
|
|
50
|
+
|
|
22
51
|
def compare_pyarrow_tables(t1, t2):
|
|
23
52
|
|
|
24
53
|
def sort_table(table):
|
|
@@ -36,3 +65,81 @@ def compare_pyarrow_tables(t1, t2):
|
|
|
36
65
|
sorted_table1 = sort_table(t1)
|
|
37
66
|
sorted_table2 = sort_table(t2)
|
|
38
67
|
return compare_tables(sorted_table1, sorted_table2)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def convert_pandas_df_to_hashable_values(df: pd.DataFrame) -> pd.DataFrame:
|
|
71
|
+
"""
|
|
72
|
+
Convert all values in the DataFrame to hashable types.
|
|
73
|
+
This is useful for comparing DataFrames or using them as keys in dictionaries.
|
|
74
|
+
|
|
75
|
+
:param df: Input DataFrame.
|
|
76
|
+
:return: DataFrame with all values converted to hashable types.
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
def _to_hashable(x: Any) -> Any:
|
|
80
|
+
if isinstance(x, (list, set, np.ndarray)):
|
|
81
|
+
return tuple(x) # type: ignore
|
|
82
|
+
return x
|
|
83
|
+
|
|
84
|
+
if Version(pd.__version__) >= Version("2.1.0"):
|
|
85
|
+
return df.map(_to_hashable) # type: ignore
|
|
86
|
+
else:
|
|
87
|
+
return df.applymap(_to_hashable) # type: ignore
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def assert_pandas_df_equal(a: pd.DataFrame, b: pd.DataFrame, ignore_columns_order: bool = True,
|
|
91
|
+
ignore_rows_order: bool = True, **kwargs) -> None:
|
|
92
|
+
""" Assert 2 Pandas DataFrames are equal.
|
|
93
|
+
|
|
94
|
+
:param a: First DataFrame.
|
|
95
|
+
:param b: Second DataFrame.
|
|
96
|
+
:param ignore_columns_order: Whether to ignore the column order.
|
|
97
|
+
:param ignore_rows_order: Whether to ignore the rows order.
|
|
98
|
+
:param kwargs: Additional keyword arguments to pass to `pd.testing.assert_frame_equal`.
|
|
99
|
+
"""
|
|
100
|
+
assert set(a.columns) == set(b.columns), f'unmatched columns {a.columns} {b.columns}'
|
|
101
|
+
# Sort columns. Done instead of using pd.testing.assert_frame_equal(check_like=False) in order to allow the sort
|
|
102
|
+
# of the rows according to the same order, their columns order.
|
|
103
|
+
if ignore_columns_order:
|
|
104
|
+
b = b[a.columns]
|
|
105
|
+
pd.testing.assert_index_equal(a.columns, b.columns)
|
|
106
|
+
|
|
107
|
+
# Sort rows.
|
|
108
|
+
if ignore_rows_order:
|
|
109
|
+
a = convert_pandas_df_to_hashable_values(a)
|
|
110
|
+
b = convert_pandas_df_to_hashable_values(b)
|
|
111
|
+
a = a.sort_values(by=a.columns.tolist()).reset_index(drop=True)
|
|
112
|
+
b = b.sort_values(by=b.columns.tolist()).reset_index(drop=True)
|
|
113
|
+
|
|
114
|
+
pd.testing.assert_frame_equal(a, b, **kwargs)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def assert_pandas_df_contained(a: pd.DataFrame, b: pd.DataFrame, ignore_value_count=False, ignore_columns_order=True):
|
|
118
|
+
"""
|
|
119
|
+
Assert A is contained in B.
|
|
120
|
+
|
|
121
|
+
:param a: First DataFrame.
|
|
122
|
+
:param b: Second DataFrame.
|
|
123
|
+
:param ignore_value_count: Succeed even in case a value in A exists in B, but has more occurrences in A.
|
|
124
|
+
:param ignore_columns_order: Whether to ignore the column order.
|
|
125
|
+
:note: Expecting A and B to have the same columns.
|
|
126
|
+
"""
|
|
127
|
+
assert set(a.columns) == set(b.columns), f'unmatched columns {a.columns} {b.columns}'
|
|
128
|
+
if ignore_columns_order:
|
|
129
|
+
b = b[a.columns]
|
|
130
|
+
pd.testing.assert_index_equal(a.columns, b.columns)
|
|
131
|
+
|
|
132
|
+
a = convert_pandas_df_to_hashable_values(a)
|
|
133
|
+
b = convert_pandas_df_to_hashable_values(b)
|
|
134
|
+
|
|
135
|
+
if ignore_value_count:
|
|
136
|
+
merged = a.merge(b, on=a.columns, how='left', indicator=True)
|
|
137
|
+
assert cast(pd.Series, (merged['_merge'] == 'both')).all(), f'values are not contained {merged=}'
|
|
138
|
+
else:
|
|
139
|
+
a_counts = a.value_counts(dropna=False) if Version(pd.__version__) >= Version("1.3.0") else a.value_counts()
|
|
140
|
+
b_counts = b.value_counts(dropna=False) if Version(pd.__version__) >= Version("1.3.0") else b.value_counts()
|
|
141
|
+
|
|
142
|
+
# Check that each row in A occurs in B with at least the same frequency.
|
|
143
|
+
count_compare = cast(pd.Series, a_counts <= b_counts.reindex(a_counts.index, fill_value=0))
|
|
144
|
+
assert count_compare.all(), (f'some values have lower frequency, all values with False has higher frequency in '
|
|
145
|
+
f'A than B {count_compare=}')
|
vastdb/transaction.py
CHANGED
|
@@ -7,9 +7,14 @@ A transcation is used as a context manager, since every Database-related operati
|
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
9
|
import logging
|
|
10
|
+
from copy import deepcopy
|
|
10
11
|
from dataclasses import dataclass
|
|
11
12
|
from typing import TYPE_CHECKING, Iterable, Optional
|
|
12
13
|
|
|
14
|
+
from vastdb._table_interface import ITable
|
|
15
|
+
from vastdb.table import TableInTransaction
|
|
16
|
+
from vastdb.table_metadata import TableMetadata
|
|
17
|
+
|
|
13
18
|
from . import bucket, errors, schema, session
|
|
14
19
|
|
|
15
20
|
if TYPE_CHECKING:
|
|
@@ -28,6 +33,12 @@ AUDIT_LOG_SCHEMA_NAME = 'vast_audit_log_schema'
|
|
|
28
33
|
AUDIT_LOG_TABLE_NAME = 'vast_audit_log_table'
|
|
29
34
|
|
|
30
35
|
|
|
36
|
+
class TransactionNotActiveError(Exception):
|
|
37
|
+
"""Transaction is not active error."""
|
|
38
|
+
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
31
42
|
@dataclass
|
|
32
43
|
class Transaction:
|
|
33
44
|
"""A holder of a single VAST transaction."""
|
|
@@ -83,3 +94,19 @@ class Transaction:
|
|
|
83
94
|
b = bucket.Bucket(AUDIT_LOG_BUCKET_NAME, self)
|
|
84
95
|
s = schema.Schema(AUDIT_LOG_SCHEMA_NAME, b)
|
|
85
96
|
return s.table(name=AUDIT_LOG_TABLE_NAME, fail_if_missing=fail_if_missing)
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def is_active(self) -> bool:
|
|
100
|
+
"""Return whether transaction is active."""
|
|
101
|
+
return self.txid is not None
|
|
102
|
+
|
|
103
|
+
@property
|
|
104
|
+
def active_txid(self) -> int:
|
|
105
|
+
"""Return active transaction ID."""
|
|
106
|
+
if self.txid is None:
|
|
107
|
+
raise TransactionNotActiveError()
|
|
108
|
+
return self.txid
|
|
109
|
+
|
|
110
|
+
def table_from_metadata(self, metadata: TableMetadata) -> ITable:
|
|
111
|
+
"""Create Table from TableMetadata."""
|
|
112
|
+
return TableInTransaction(deepcopy(metadata), tx=self)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: vastdb
|
|
3
|
-
Version:
|
|
3
|
+
Version: 2.0.0
|
|
4
4
|
Summary: VAST Data SDK
|
|
5
5
|
Home-page: https://github.com/vast-data/vastdb_sdk
|
|
6
6
|
Author: VAST DATA
|
|
@@ -9,22 +9,37 @@ License: Copyright (C) VAST Data Ltd.
|
|
|
9
9
|
Classifier: Development Status :: 5 - Production/Stable
|
|
10
10
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
11
11
|
Classifier: Programming Language :: Python :: 3
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
13
12
|
Classifier: Programming Language :: Python :: 3.10
|
|
14
13
|
Classifier: Programming Language :: Python :: 3.11
|
|
15
14
|
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
16
|
Classifier: Topic :: Database
|
|
17
17
|
Classifier: Topic :: Database :: Front-Ends
|
|
18
|
-
Requires-Python: >=3.
|
|
18
|
+
Requires-Python: >=3.10.0
|
|
19
19
|
Description-Content-Type: text/markdown
|
|
20
20
|
License-File: LICENSE
|
|
21
21
|
Requires-Dist: aws-requests-auth
|
|
22
|
+
Requires-Dist: ibis-framework~=10.1
|
|
23
|
+
Requires-Dist: pyarrow~=18.0
|
|
24
|
+
Requires-Dist: pyarrow-hotfix==0.7
|
|
22
25
|
Requires-Dist: flatbuffers
|
|
23
|
-
Requires-Dist:
|
|
24
|
-
Requires-Dist:
|
|
26
|
+
Requires-Dist: packaging
|
|
27
|
+
Requires-Dist: pandas
|
|
28
|
+
Requires-Dist: numpy
|
|
25
29
|
Requires-Dist: requests
|
|
26
30
|
Requires-Dist: xmltodict
|
|
27
31
|
Requires-Dist: backoff==2.2.1
|
|
32
|
+
Dynamic: author
|
|
33
|
+
Dynamic: author-email
|
|
34
|
+
Dynamic: classifier
|
|
35
|
+
Dynamic: description
|
|
36
|
+
Dynamic: description-content-type
|
|
37
|
+
Dynamic: home-page
|
|
38
|
+
Dynamic: license
|
|
39
|
+
Dynamic: license-file
|
|
40
|
+
Dynamic: requires-dist
|
|
41
|
+
Dynamic: requires-python
|
|
42
|
+
Dynamic: summary
|
|
28
43
|
|
|
29
44
|
|
|
30
45
|
`vastdb` is a Python-based SDK designed for interacting
|
|
@@ -1,14 +1,17 @@
|
|
|
1
1
|
vastdb/__init__.py,sha256=uf-AXdzsD4nPxFP7WxkcAXGG0whv8BHLrrXCJtsPGaQ,436
|
|
2
|
-
vastdb/
|
|
3
|
-
vastdb/
|
|
2
|
+
vastdb/_ibis_support.py,sha256=sJieOMvDWpsciPKh1mJzS56jxLtCRVlvK41hW84vexM,866
|
|
3
|
+
vastdb/_internal.py,sha256=ZR9da7t-dQGtax0Cz-3CgtX1C0nLKc5VI9dkfO349RY,108764
|
|
4
|
+
vastdb/_table_interface.py,sha256=EdS50_x9ZVzRWDyV6b5hB9qIUdA7S4hNBgNq3t8deHo,3611
|
|
5
|
+
vastdb/bucket.py,sha256=ulkTI_7xw5FYVzcrTFC7G2ijmTTVSmvJZUdgzycGHR0,2588
|
|
4
6
|
vastdb/config.py,sha256=OehnsWrjzv0-SUouEXmkrKBugiWyhXOn4XiSLV3s9yk,2342
|
|
5
|
-
vastdb/conftest.py,sha256=
|
|
7
|
+
vastdb/conftest.py,sha256=Xa9N0N3176_dbk4GdSKn57sfOiIUOijBoTEDgoGUo5s,4813
|
|
6
8
|
vastdb/errors.py,sha256=NiKdwbfVsWJIixP2Tf3JgiBoEt8rRaZ0VeCyD9mXnoM,5645
|
|
7
9
|
vastdb/features.py,sha256=ivYbvhiGA858B00vhs_CNzlVV9QDUe53yW6V3J5EoxM,1874
|
|
8
|
-
vastdb/schema.py,sha256=
|
|
9
|
-
vastdb/session.py,sha256=
|
|
10
|
-
vastdb/table.py,sha256=
|
|
11
|
-
vastdb/
|
|
10
|
+
vastdb/schema.py,sha256=0o7HOGR2STJQ6itCDP5Lez6OOfKBl6tS0T7KLgL5Gbk,7104
|
|
11
|
+
vastdb/session.py,sha256=Au0L67SzSQXG9ZlrfN5MEqIEX2LeIgzUpdFkGLIpjMs,2126
|
|
12
|
+
vastdb/table.py,sha256=DipsklV03_oEvQGcE67ST8uQbtC_s-HtyCTs_majfNQ,44756
|
|
13
|
+
vastdb/table_metadata.py,sha256=ecQ7u0fMbJtdKByQ9CZ0kPnZN20QGZU8_YGCM3RXQe4,6685
|
|
14
|
+
vastdb/transaction.py,sha256=GR-urtN4XCIUbbK5TV4xWGvDY6vit-HJyMGRvfnIi8k,3799
|
|
12
15
|
vastdb/util.py,sha256=8CUnVRsJukC3uNHNoB5D0qPf0FxS8OSdVB84nNoLJKc,6290
|
|
13
16
|
vastdb/bench/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
14
17
|
vastdb/bench/test_perf.py,sha256=0kbCxK8U9vYO0zCMUYcZHzEICaaII3I0-6FeR5-CNtM,4040
|
|
@@ -30,7 +33,7 @@ vastdb/bench/perf_bench/dataset/schemas.py,sha256=CvCAxCWHsWdI6jE9on2Mm6b0NTDhZX
|
|
|
30
33
|
vastdb/bench/perf_bench/dataset/secmaster.py,sha256=Y3yt8B_RsFvGlhMWKvDqax31UV_ShxZM-7CJO4YmxL0,188169
|
|
31
34
|
vastdb/bench/perf_bench/orchestrate/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
32
35
|
vastdb/bench/perf_bench/orchestrate/bench_spec.py,sha256=XMUVlKPyWuhkRkz_Z2-iKYxqDlMj0EKcA3N46MLIX2c,2469
|
|
33
|
-
vastdb/bench/perf_bench/orchestrate/results_helpers.py,sha256=
|
|
36
|
+
vastdb/bench/perf_bench/orchestrate/results_helpers.py,sha256=lioubu6LjAgHAiYW0ZqMqAirua4XH5o6cJqvnGhE_fI,4188
|
|
34
37
|
vastdb/bench/perf_bench/orchestrate/scenario.py,sha256=DUsIWyVmoLyYbKqPcLpd4veNHVbJsmL9JE-RTdmmyiw,3482
|
|
35
38
|
vastdb/bench/perf_bench/orchestrate/scenario_generator.py,sha256=Plnij1hHqwmMndYpG4EA6L-HNAMJUB-M6-KXm3KZtlk,5978
|
|
36
39
|
vastdb/bench/perf_bench/query/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -40,16 +43,17 @@ vastdb/bench/perf_bench/query/query_pyarrow.py,sha256=Dj5YPUvb4dAj7RskHfJcPijJnM
|
|
|
40
43
|
vastdb/bench/perf_bench/query/query_vastdb.py,sha256=SZYem_EmsaynEftAa_VFobjSJZDAcli9BckyRS3SFvg,2810
|
|
41
44
|
vastdb/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
42
45
|
vastdb/tests/metrics.py,sha256=ZCSeBYFSPMG3yI0JrAHs2CrY6wFjx_5GwRTYHVAwLKA,1026
|
|
43
|
-
vastdb/tests/test_duckdb.py,sha256=
|
|
44
|
-
vastdb/tests/test_fixed_list.py,sha256=
|
|
45
|
-
vastdb/tests/test_imports.py,sha256=
|
|
46
|
-
vastdb/tests/test_nested.py,sha256=
|
|
46
|
+
vastdb/tests/test_duckdb.py,sha256=XZxlw3SXb8tbd2BsJdr9F7jBBka4dW9PVtwvwqEcOXo,2057
|
|
47
|
+
vastdb/tests/test_fixed_list.py,sha256=1KpsijmGvxHcM-pHJcm4zn2o_G8-ISbCFuoKSkXjmfk,14099
|
|
48
|
+
vastdb/tests/test_imports.py,sha256=FhIHjYpttiQW_x3fMe8ENd9ynn7OyUtjxr2ca4z1F-s,21783
|
|
49
|
+
vastdb/tests/test_nested.py,sha256=SKiXDX8RhR20DSdJNTZLHOGWduXVUPuG1uwfC_C5hTk,6663
|
|
47
50
|
vastdb/tests/test_projections.py,sha256=3y1kubwVrzO-xoR0hyps7zrjOJI8niCYspaFTN16Q9w,4540
|
|
48
51
|
vastdb/tests/test_sanity.py,sha256=bv1ypGDzvOgmMvGbucDYiLQu8krQLlE6NB3M__q87x8,3303
|
|
49
52
|
vastdb/tests/test_schemas.py,sha256=l70YQMlx2UL1KRQhApriiG2ZM7GJF-IzWU31H3Yqn1U,3312
|
|
50
|
-
vastdb/tests/
|
|
53
|
+
vastdb/tests/test_table_in_tx.py,sha256=-OFGaZMZQc78HOmR23xHIsH7d0VBneFG3tGdDlogceM,9105
|
|
54
|
+
vastdb/tests/test_tables.py,sha256=pMlOOu_7Tg2x75zYRmLDl8Imm1PuGM0CUD2vz_4Pt34,53142
|
|
51
55
|
vastdb/tests/test_util.py,sha256=n7gvT5Wg6b6bxgqkFXkYqvFd_W1GlUdVfmPv66XYXyA,1956
|
|
52
|
-
vastdb/tests/util.py,sha256=
|
|
56
|
+
vastdb/tests/util.py,sha256=f-0sAij1uv5p7hu-YLZ-cmsNolU4KWNUjezeP5b2nwk,5915
|
|
53
57
|
vastdb/vast_flatbuf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
54
58
|
vastdb/vast_flatbuf/org/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
55
59
|
vastdb/vast_flatbuf/org/apache/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -210,8 +214,8 @@ vastdb/vast_flatbuf/tabular/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMp
|
|
|
210
214
|
vastdb/vast_tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
211
215
|
vastdb/vast_tests/test_ha.py,sha256=744P4G6VJ09RIkHhMQL4wlipCBJWQVMhyvUrSc4k1HQ,975
|
|
212
216
|
vastdb/vast_tests/test_scale.py,sha256=5jGwOdZH6Tv5tPdZYPWoqcxOceI2jA5i2D1zNKZHER4,3958
|
|
213
|
-
vastdb-
|
|
214
|
-
vastdb-
|
|
215
|
-
vastdb-
|
|
216
|
-
vastdb-
|
|
217
|
-
vastdb-
|
|
217
|
+
vastdb-2.0.0.dist-info/licenses/LICENSE,sha256=obffan7LYrq7hLHNrY7vHcn2pKUTBUYXMKu-VOAvDxU,11333
|
|
218
|
+
vastdb-2.0.0.dist-info/METADATA,sha256=GeJUYt9gomO12Gouliy_4TtHlx1-IwuZU6hwtpaT9mA,1686
|
|
219
|
+
vastdb-2.0.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
220
|
+
vastdb-2.0.0.dist-info/top_level.txt,sha256=nnKAaZaQa8GFbYpWAexr_B9HrhonZbUlX6hL6AC--yA,7
|
|
221
|
+
vastdb-2.0.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|