vastdb 0.0.4.0__py3-none-any.whl → 0.0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vastdb/api.py CHANGED
@@ -7,7 +7,7 @@ from datetime import datetime
7
7
  from enum import Enum
8
8
  from typing import List, Union, Optional, Iterator
9
9
  import xmltodict
10
- import concurrent
10
+ import concurrent.futures
11
11
  import threading
12
12
  import queue
13
13
  import math
@@ -22,7 +22,6 @@ import hashlib
22
22
  import hmac
23
23
  import json
24
24
  import itertools
25
- import concurrent.futures
26
25
  from aws_requests_auth.aws_auth import AWSRequestsAuth
27
26
  from io import BytesIO
28
27
 
@@ -72,8 +71,8 @@ import vast_flatbuf.tabular.S3File as tabular_s3_file
72
71
  import vast_flatbuf.tabular.CreateProjectionRequest as tabular_create_projection
73
72
  import vast_flatbuf.tabular.Column as tabular_projecion_column
74
73
  import vast_flatbuf.tabular.ColumnType as tabular_proj_column_type
75
- #import vast_protobuf.tabular.rpc_pb2 as rpc_pb
76
- #import vast_protobuf.substrait.type_pb2 as type_pb
74
+ import vast_protobuf.tabular.rpc_pb2 as rpc_pb
75
+ import vast_protobuf.substrait.type_pb2 as type_pb
77
76
 
78
77
  from vast_flatbuf.org.apache.arrow.computeir.flatbuf.Deref import Deref
79
78
  from vast_flatbuf.org.apache.arrow.computeir.flatbuf.ExpressionImpl import ExpressionImpl
@@ -125,6 +124,11 @@ class AuthType(Enum):
125
124
  SIGV2 = "s3"
126
125
  BASIC = "basic"
127
126
 
127
+
128
+ class TabularException(Exception):
129
+ pass
130
+
131
+
128
132
  def get_unit_to_flatbuff_time_unit(type):
129
133
  unit_to_flatbuff_time_unit = {
130
134
  'ns': TimeUnit.NANOSECOND,
@@ -999,7 +1003,7 @@ class VastdbApi:
999
1003
 
1000
1004
  def create_table_from_parquet_schema(self, bucket, schema, name, parquet_path=None,
1001
1005
  parquet_bucket_name=None, parquet_object_name=None,
1002
- txid=0, client_tags=[]):
1006
+ txid=0, client_tags=[], expected_retvals=[]):
1003
1007
 
1004
1008
  # Use pyarrow.parquet.ParquetDataset to open the Parquet file
1005
1009
  if parquet_path:
@@ -1020,7 +1024,7 @@ class VastdbApi:
1020
1024
  raise RuntimeError(f'invalid type(parquet_ds.schema) = {type(parquet_ds.schema)}')
1021
1025
 
1022
1026
  # create the table
1023
- return self.create_table(bucket, schema, name, arrow_schema, txid, client_tags)
1027
+ return self.create_table(bucket, schema, name, arrow_schema, txid, client_tags, expected_retvals)
1024
1028
 
1025
1029
 
1026
1030
  def get_table_stats(self, bucket, schema, name, txid=0, client_tags=[], expected_retvals=[]):
@@ -1225,6 +1229,11 @@ class VastdbApi:
1225
1229
  if bc_list_internals:
1226
1230
  headers['tabular-bc-list-internal-col'] = "true"
1227
1231
 
1232
+ if exact_match:
1233
+ headers['tabular-name-exact-match'] = name_prefix
1234
+ else:
1235
+ headers['tabular-name-prefix'] = name_prefix
1236
+
1228
1237
  res = self.session.get(self._api_prefix(bucket=bucket, schema=schema, table=table, command="column"),
1229
1238
  headers=headers, stream=True)
1230
1239
  self._check_res(res, "list_columns", expected_retvals)
@@ -1238,11 +1247,8 @@ class VastdbApi:
1238
1247
  schema_buf = b''.join(res.iter_content(chunk_size=128))
1239
1248
  schema_out = pa.ipc.open_stream(schema_buf).schema
1240
1249
  # _logger.info(f"schema={schema_out}")
1241
- for i in range(len(schema_out)):
1242
- f = schema_out.field(i)
1243
- add_column = not name_prefix or (exact_match and f.name == name_prefix) or (not exact_match and f.name.startswith(name_prefix))
1244
- if add_column:
1245
- columns.append([f.name, f.type, f.metadata, f])
1250
+ for f in schema_out:
1251
+ columns.append([f.name, f.type, f.metadata, f])
1246
1252
 
1247
1253
  return columns, next_key, is_truncated, count
1248
1254
 
@@ -1806,7 +1812,7 @@ class VastdbApi:
1806
1812
  source_files: list of (bucket_name, file_name)
1807
1813
  """
1808
1814
  def import_data(self, bucket, schema, table, source_files, txid=0, client_tags=[], expected_retvals=[], case_sensitive=True,
1809
- schedule_id=None, retry_count=0):
1815
+ schedule_id=None, retry_count=0, blocking=True):
1810
1816
  """
1811
1817
  POST /mybucket/myschema/mytable?data HTTP/1.1
1812
1818
  Content-Length: ContentLength
@@ -1854,6 +1860,23 @@ class VastdbApi:
1854
1860
  builder.Finish(params)
1855
1861
  import_req = builder.Output()
1856
1862
 
1863
+ def iterate_over_import_data_response(response, expected_retvals):
1864
+ if response.status_code != 200:
1865
+ return response
1866
+
1867
+ chunk_size = 1024
1868
+ for chunk in res.iter_content(chunk_size=chunk_size):
1869
+ chunk_dict = json.loads(chunk)
1870
+ _logger.info(f"import data chunk={chunk}, result: {chunk_dict['res']}")
1871
+ if chunk_dict['res'] in expected_retvals:
1872
+ _logger.info(f"import finished with expected result={chunk_dict['res']}, error message: {chunk_dict['err_msg']}")
1873
+ return response
1874
+ elif chunk_dict['res'] != 'Success' and chunk_dict['res'] != 'TabularInProgress':
1875
+ raise TabularException(f"Received unexpected error in import_data. "
1876
+ f"status: {chunk_dict['res']}, error message: {chunk_dict['err_msg']}")
1877
+ _logger.info(f"import_data is in progress. status: {chunk_dict['res']}")
1878
+ return response
1879
+
1857
1880
  headers = self._fill_common_headers(txid=txid, client_tags=client_tags)
1858
1881
  headers['Content-Length'] = str(len(import_req))
1859
1882
  headers['tabular-case-sensitive'] = str(case_sensitive)
@@ -1863,6 +1886,8 @@ class VastdbApi:
1863
1886
  headers['tabular-retry-count'] = str(retry_count)
1864
1887
  res = self.session.post(self._api_prefix(bucket=bucket, schema=schema, table=table, command="data"),
1865
1888
  data=import_req, headers=headers, stream=True)
1889
+ if blocking:
1890
+ res = iterate_over_import_data_response(res, expected_retvals)
1866
1891
 
1867
1892
  return self._check_res(res, "import_data", expected_retvals)
1868
1893
 
@@ -2272,6 +2297,12 @@ class VastdbApi:
2272
2297
  headers['tabular-max-keys'] = str(max_keys)
2273
2298
  headers['tabular-next-key'] = str(next_key)
2274
2299
  headers['tabular-list-count-only'] = str(count_only)
2300
+
2301
+ if exact_match:
2302
+ headers['tabular-name-exact-match'] = name_prefix
2303
+ else:
2304
+ headers['tabular-name-prefix'] = name_prefix
2305
+
2275
2306
  url_params = {'name': projection}
2276
2307
 
2277
2308
  res = self.session.get(self._api_prefix(bucket=bucket, schema=schema, table=table, command="projection-columns", url_params=url_params),
@@ -2287,12 +2318,9 @@ class VastdbApi:
2287
2318
  if not count_only:
2288
2319
  schema_buf = b''.join(res.iter_content(chunk_size=128))
2289
2320
  schema_out = pa.ipc.open_stream(schema_buf).schema
2290
- for i in range(len(schema_out)):
2291
- f = schema_out.field(i)
2321
+ for f in schema_out:
2322
+ columns.append([f.name, f.type, f.metadata])
2292
2323
  # sort_type = f.metadata[b'VAST:sort_type'].decode()
2293
- add_column = not name_prefix or (exact_match and f.name == name_prefix) or (not exact_match and f.name.startswith(name_prefix))
2294
- if add_column:
2295
- columns.append([f.name, f.type, f.metadata])
2296
2324
 
2297
2325
  return columns, next_key, is_truncated, count
2298
2326
 
@@ -2308,7 +2336,6 @@ def parse_proto_buf_message(conn, msg_type):
2308
2336
  return msg
2309
2337
 
2310
2338
  def parse_rpc_message(conn, msg_name):
2311
- import vast_protobuf.tabular.rpc_pb2 as rpc_pb
2312
2339
  rpc_msg = parse_proto_buf_message(conn, rpc_pb.Rpc)
2313
2340
  if not rpc_msg.HasField(msg_name):
2314
2341
  raise IOError(f"expected {msg_name} but got rpc_msg={rpc_msg}")
@@ -2320,23 +2347,30 @@ def parse_rpc_message(conn, msg_name):
2320
2347
 
2321
2348
  def parse_select_row_ids_response(conn, debug=False):
2322
2349
  rows_arr = array.array('Q', [])
2350
+ subsplits_state = {}
2323
2351
  while True:
2324
2352
  select_rows_msg, content = parse_rpc_message(conn, 'select_row_ids_response_packet')
2325
2353
  msg_type = select_rows_msg.WhichOneof('type')
2326
2354
  if msg_type == "body":
2355
+ subsplit_id = select_rows_msg.body.subsplit.id
2356
+ if select_rows_msg.body.subsplit.HasField("state"):
2357
+ subsplits_state[subsplit_id] = select_rows_msg.body.subsplit.state
2358
+
2327
2359
  arr = array.array('Q', content)
2328
2360
  rows_arr += arr
2329
2361
  if debug:
2330
- _logger.info(f"arr={arr}")
2362
+ _logger.info(f"arr={arr} metrics={select_rows_msg.body.metrics}")
2363
+ else:
2364
+ _logger.info(f"num_rows={len(arr)} metrics={select_rows_msg.body.metrics}")
2331
2365
  elif msg_type == "trailing":
2332
2366
  status_code = select_rows_msg.trailing.status.code
2333
2367
  finished_pagination = select_rows_msg.trailing.finished_pagination
2334
- _logger.info(f"completed finished_pagination={finished_pagination} res={status_code}")
2335
- assert finished_pagination
2368
+ total_metrics = select_rows_msg.trailing.metrics
2369
+ _logger.info(f"completed finished_pagination={finished_pagination} res={status_code} metrics={total_metrics}")
2336
2370
  if status_code != 0:
2337
2371
  raise IOError(f"Query data stream failed res={select_rows_msg.trailing.status}")
2338
2372
 
2339
- return rows_arr.tobytes()
2373
+ return rows_arr, subsplits_state, finished_pagination
2340
2374
  else:
2341
2375
  raise EOFError(f"unknown response type={msg_type}")
2342
2376
 
@@ -2346,7 +2380,7 @@ def parse_count_rows_response(conn):
2346
2380
  assert count_rows_msg.WhichOneof('type') == "body"
2347
2381
  subsplit_id = count_rows_msg.body.subsplit.id
2348
2382
  num_rows = count_rows_msg.body.amount_of_rows
2349
- _logger.info(f"num_rows={num_rows} subsplit_id={subsplit_id}")
2383
+ _logger.info(f"completed num_rows={num_rows} subsplit_id={subsplit_id} metrics={count_rows_msg.trailing.metrics}")
2350
2384
 
2351
2385
  count_rows_msg, _ = parse_rpc_message(conn, 'count_rows_response_packet')
2352
2386
  assert count_rows_msg.WhichOneof('type') == "trailing"
@@ -2357,7 +2391,6 @@ def parse_count_rows_response(conn):
2357
2391
 
2358
2392
 
2359
2393
  def get_proto_field_type(f):
2360
- import vast_protobuf.substrait.type_pb2 as type_pb
2361
2394
  t = type_pb.Type()
2362
2395
  if f.type.equals(pa.string()):
2363
2396
  t.string.nullability = 0
@@ -2385,7 +2418,6 @@ def serialize_proto_request(req):
2385
2418
  return buf
2386
2419
 
2387
2420
  def build_read_column_request(ids, schema, handles = [], num_subsplits = 1):
2388
- import vast_protobuf.tabular.rpc_pb2 as rpc_pb
2389
2421
  rpc_msg = rpc_pb.Rpc()
2390
2422
  req = rpc_msg.read_columns_request
2391
2423
  req.num_subsplits = num_subsplits
@@ -2404,8 +2436,7 @@ def build_read_column_request(ids, schema, handles = [], num_subsplits = 1):
2404
2436
  return serialize_proto_request(rpc_msg) + ids
2405
2437
 
2406
2438
  def build_count_rows_request(schema: 'pa.Schema' = pa.schema([]), filters: dict = None, field_names: list = None,
2407
- split=(0, 1, 1), num_subsplits=1):
2408
- import vast_protobuf.tabular.rpc_pb2 as rpc_pb
2439
+ split=(0, 1, 1), num_subsplits=1, build_relation=False):
2409
2440
  rpc_msg = rpc_pb.Rpc()
2410
2441
  req = rpc_msg.count_rows_request
2411
2442
  req.split.id = split[0]
@@ -2416,12 +2447,21 @@ def build_count_rows_request(schema: 'pa.Schema' = pa.schema([]), filters: dict
2416
2447
  for _ in range(num_subsplits):
2417
2448
  req.subsplits.states.append(state)
2418
2449
 
2419
- for f in schema:
2420
- req.relation.read.base_schema.names.append(f.name)
2421
- t = get_proto_field_type(f)
2422
- req.relation.read.base_schema.struct.types.append(t)
2423
-
2424
- return serialize_proto_request(rpc_msg)
2450
+ if build_relation:
2451
+ # TODO use ibis or other library to build substrait relation
2452
+ # meanwhile can be similar to build_count_rows_request
2453
+ for field in schema:
2454
+ req.relation.read.base_schema.names.append(field.name)
2455
+ field_type = get_proto_field_type(field)
2456
+ req.relation.read.base_schema.struct.types.append(field_type)
2457
+ return serialize_proto_request(rpc_msg)
2458
+ else:
2459
+ query_data_flatbuffer = build_query_data_request(schema, filters, field_names)
2460
+ serialized_flatbuffer = query_data_flatbuffer.serialized
2461
+ req.legacy_relation.size = len(serialized_flatbuffer)
2462
+ req.legacy_relation.offset = 0
2463
+ rpc_msg.content_size = req.legacy_relation.size
2464
+ return serialize_proto_request(rpc_msg) + serialized_flatbuffer
2425
2465
 
2426
2466
  """
2427
2467
  Expected messages in the ReadColumns flow:
@@ -2446,14 +2486,14 @@ def _iter_read_column_resp_columns(conn, readers):
2446
2486
 
2447
2487
  msg_type = read_column_resp.WhichOneof('type')
2448
2488
  if msg_type == "body":
2449
- body = read_column_resp.body
2450
- stream_id = body.subsplit_id
2451
- start_row_offset = body.start_row_offset
2452
- arrow_msg_size = body.arrow_ipc_info.size
2453
- _logger.info(f"start stream_id={stream_id} arrow_msg_size={arrow_msg_size} start_row_offset={start_row_offset}")
2489
+ stream_id = read_column_resp.body.subsplit_id
2490
+ start_row_offset = read_column_resp.body.start_row_offset
2491
+ arrow_msg_size = read_column_resp.body.arrow_ipc_info.size
2492
+ metrics = read_column_resp.body.metrics
2493
+ _logger.info(f"start stream_id={stream_id} arrow_msg_size={arrow_msg_size} start_row_offset={start_row_offset} metrics={metrics}")
2454
2494
  elif msg_type == "trailing":
2455
2495
  status_code = read_column_resp.trailing.status.code
2456
- _logger.info(f"completed stream_id={stream_id} res={status_code}")
2496
+ _logger.info(f"completed stream_id={stream_id} res={status_code} metrics{read_column_resp.trailing.metrics}")
2457
2497
  if status_code != 0:
2458
2498
  raise IOError(f"Query data stream failed res={read_column_resp.trailing.status}")
2459
2499
 
@@ -2754,8 +2794,12 @@ def build_field(builder: flatbuffers.Builder, f: pa.Field, name: str):
2754
2794
  builder.PrependUOffsetTRelative(offset)
2755
2795
  children = builder.EndVector()
2756
2796
 
2797
+ field_type, field_type_type = get_field_type(builder, f)
2798
+
2757
2799
  child_col_name = builder.CreateString("entries")
2758
2800
  fb_field.Start(builder)
2801
+ fb_field.AddTypeType(builder, field_type_type)
2802
+ fb_field.AddType(builder, field_type)
2759
2803
  fb_field.AddName(builder, child_col_name)
2760
2804
  fb_field.AddChildren(builder, children)
2761
2805
 
@@ -2769,9 +2813,12 @@ def build_field(builder: flatbuffers.Builder, f: pa.Field, name: str):
2769
2813
  children = builder.EndVector()
2770
2814
 
2771
2815
  col_name = builder.CreateString(name)
2772
- _logger.info(f"add col_name={name} to fb")
2816
+ field_type, field_type_type = get_field_type(builder, f)
2817
+ _logger.info(f"add col_name={name} type_type={field_type_type} to fb")
2773
2818
  fb_field.Start(builder)
2774
2819
  fb_field.AddName(builder, col_name)
2820
+ fb_field.AddTypeType(builder, field_type_type)
2821
+ fb_field.AddType(builder, field_type)
2775
2822
  if children is not None:
2776
2823
  _logger.info(f"add col_name={name} childern")
2777
2824
  fb_field.AddChildren(builder, children)
@@ -2791,25 +2838,39 @@ class QueryDataRequest:
2791
2838
 
2792
2839
 
2793
2840
  def build_select_rows_request(schema: 'pa.Schema' = pa.schema([]), filters: dict = None, field_names: list = None, split_id=0,
2794
- total_split=1, row_group_per_split=8, num_subsplits=1):
2795
- import vast_protobuf.tabular.rpc_pb2 as rpc_pb
2841
+ total_split=1, row_group_per_split=8, num_subsplits=1, build_relation=False, limit_rows=0,
2842
+ subsplits_state=None):
2796
2843
  rpc_msg = rpc_pb.Rpc()
2797
2844
  select_rows_req = rpc_msg.select_row_ids_request
2798
2845
  select_rows_req.split.id = split_id
2799
2846
  select_rows_req.split.config.total = total_split
2800
2847
  select_rows_req.split.config.row_groups_per_split = row_group_per_split
2848
+ if limit_rows:
2849
+ select_rows_req.limit_rows = limit_rows
2801
2850
 
2802
2851
  # add empty state
2803
- state = rpc_pb.SubSplit.State()
2804
- for _ in range(num_subsplits):
2805
- select_rows_req.subsplits.states.append(state)
2806
-
2807
- for field in schema:
2808
- select_rows_req.relation.read.base_schema.names.append(field.name)
2809
- field_type = get_proto_field_type(field)
2810
- select_rows_req.relation.read.base_schema.struct.types.append(field_type)
2811
-
2812
- return serialize_proto_request(rpc_msg)
2852
+ empty_state = rpc_pb.SubSplit.State()
2853
+ for i in range(num_subsplits):
2854
+ if subsplits_state and i in subsplits_state:
2855
+ select_rows_req.subsplits.states.append(subsplits_state[i])
2856
+ else:
2857
+ select_rows_req.subsplits.states.append(empty_state)
2858
+
2859
+ if build_relation:
2860
+ # TODO use ibis or other library to build substrait relation
2861
+ # meanwhile can be similar to build_count_rows_request
2862
+ for field in schema:
2863
+ select_rows_req.relation.read.base_schema.names.append(field.name)
2864
+ field_type = get_proto_field_type(field)
2865
+ select_rows_req.relation.read.base_schema.struct.types.append(field_type)
2866
+ return serialize_proto_request(rpc_msg)
2867
+ else:
2868
+ query_data_flatbuffer = build_query_data_request(schema, filters, field_names)
2869
+ serialized_flatbuffer = query_data_flatbuffer.serialized
2870
+ select_rows_req.legacy_relation.size = len(serialized_flatbuffer)
2871
+ select_rows_req.legacy_relation.offset = 0
2872
+ rpc_msg.content_size = select_rows_req.legacy_relation.size
2873
+ return serialize_proto_request(rpc_msg) + serialized_flatbuffer
2813
2874
 
2814
2875
  # TODO use ibis or other library to build SelectRowIds protobuf
2815
2876
  # meanwhile can be similar to build_count_rows_request
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vastdb
3
- Version: 0.0.4.0
3
+ Version: 0.0.5.1
4
4
  Summary: VAST Data SDK
5
5
  Home-page: https://github.com/vast-data/vastdb_sdk
6
6
  Author: VAST DATA
@@ -147,11 +147,26 @@ vast_flatbuf/tabular/ListTablesResponse.py,sha256=V7jZAS8ryKY8s6o_QyjWzgan-rsGm1
147
147
  vast_flatbuf/tabular/ObjectDetails.py,sha256=qW0WtbkCYYE_L-Kw6VNRDCLYaRm5lKvTbLNkfD4zV4A,3589
148
148
  vast_flatbuf/tabular/S3File.py,sha256=KC9c2oS5-JXwTTriUVFdjOvRG0B54Cq9kviSDZY3NI0,4450
149
149
  vast_flatbuf/tabular/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
150
+ vast_protobuf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
151
+ vast_protobuf/substrait/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
152
+ vast_protobuf/substrait/algebra_pb2.py,sha256=Y0RBz_IszyfCTgyNA5fh-tJPq6IJs3QqhYZoyFOy2Wg,100838
153
+ vast_protobuf/substrait/capabilities_pb2.py,sha256=NDfdXUrGPNGfB11h3QI1OrYtZypfAlu9lE17BAVimMQ,2453
154
+ vast_protobuf/substrait/ddl_pb2.py,sha256=2MDXdDznqoD6vtMSWwvkxpiZ-yPBnSXchc8jDQyoKZw,2683
155
+ vast_protobuf/substrait/extended_expression_pb2.py,sha256=Rs8A8HmNcEevxO3jVsNlIeYqyXCaIwGb2xTK8p7_eFU,3481
156
+ vast_protobuf/substrait/function_pb2.py,sha256=dtVctHDJC-BsofPY0ktPYLJkAWxLAKVMsOhGER36hoo,13339
157
+ vast_protobuf/substrait/parameterized_types_pb2.py,sha256=hCTBDXbqunSuSmrxnkvFDNUjCxr8UPE8XrghpX2mqsM,15074
158
+ vast_protobuf/substrait/plan_pb2.py,sha256=LDQFI5QE-KpVItmqzG9k-9XuiAT-eaXuOTMUaMoeODQ,3831
159
+ vast_protobuf/substrait/type_expressions_pb2.py,sha256=hpvSwkZjmpEfh0q6pGeCt_5ARzyOoX2HVXYXNShSh3s,17633
160
+ vast_protobuf/substrait/type_pb2.py,sha256=w-FzIb2OJNpaOCp1f1ox_CVDevB1ID5wudju8e1NkBY,20790
161
+ vast_protobuf/substrait/extensions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
162
+ vast_protobuf/substrait/extensions/extensions_pb2.py,sha256=I_6c6nMmMaYvVtzF-5ycqpzFYlsAVlKQDyatoU8RewQ,6110
163
+ vast_protobuf/tabular/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
164
+ vast_protobuf/tabular/rpc_pb2.py,sha256=7kW2WrA2sGk6WVbD83mc_cKkZ2MxoImSO5GOVz6NbbE,23776
150
165
  vastdb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
151
- vastdb/api.py,sha256=2-c42ThW1UdtSz6OgibrnJxKYYO_idpNLSRMaXdxilA,132436
166
+ vastdb/api.py,sha256=1AWblvumGOElc79AT7SJ0W9ofGhmng2ZzAK3OtWyaNU,135723
152
167
  vastdb/v2.py,sha256=0fLulaIQGlIbVNBBFGd6iwYPuGhaaJIHTiJORyio_YQ,2438
153
- vastdb-0.0.4.0.dist-info/LICENSE,sha256=obffan7LYrq7hLHNrY7vHcn2pKUTBUYXMKu-VOAvDxU,11333
154
- vastdb-0.0.4.0.dist-info/METADATA,sha256=oNKCtB25jHcDenM5ftxUZJhovekQYQLGZ0q329ExUYg,1404
155
- vastdb-0.0.4.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
156
- vastdb-0.0.4.0.dist-info/top_level.txt,sha256=Vsj2MKtlhPg0J4so64slQtnwjhgoPmJgcG-6YcVAwVc,20
157
- vastdb-0.0.4.0.dist-info/RECORD,,
168
+ vastdb-0.0.5.1.dist-info/LICENSE,sha256=obffan7LYrq7hLHNrY7vHcn2pKUTBUYXMKu-VOAvDxU,11333
169
+ vastdb-0.0.5.1.dist-info/METADATA,sha256=-qCDf3o5nRkc4NHiqoAmEycmeWlw2tJswd_Sxsp-mL8,1404
170
+ vastdb-0.0.5.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
171
+ vastdb-0.0.5.1.dist-info/top_level.txt,sha256=34x_PO17U_yvzCKNMDpipTYsWMat2I0U3D4Df_lWwBM,34
172
+ vastdb-0.0.5.1.dist-info/RECORD,,
@@ -1,2 +1,3 @@
1
1
  vast_flatbuf
2
+ vast_protobuf
2
3
  vastdb