quasardb 3.13.6__tar.gz → 3.13.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of quasardb might be problematic. Click here for more details.

Files changed (64) hide show
  1. {quasardb-3.13.6 → quasardb-3.13.7}/LICENSE.md +1 -1
  2. {quasardb-3.13.6 → quasardb-3.13.7}/PKG-INFO +1 -1
  3. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb/__init__.py +1 -1
  4. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb/numpy/__init__.py +76 -7
  5. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb/pandas/__init__.py +1 -1
  6. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb.egg-info/PKG-INFO +1 -1
  7. quasardb-3.13.7/quasardb.egg-info/SOURCES.txt +62 -0
  8. {quasardb-3.13.6 → quasardb-3.13.7}/setup.py +8 -3
  9. quasardb-3.13.7/tests/test_basic.py +19 -0
  10. quasardb-3.13.7/tests/test_batch_inserter.py +343 -0
  11. quasardb-3.13.7/tests/test_bench_pinned_writer.py +194 -0
  12. quasardb-3.13.7/tests/test_blob.py +122 -0
  13. quasardb-3.13.7/tests/test_buf_size.py +66 -0
  14. quasardb-3.13.7/tests/test_connection.py +114 -0
  15. quasardb-3.13.7/tests/test_continuous.py +156 -0
  16. quasardb-3.13.7/tests/test_convert.py +53 -0
  17. quasardb-3.13.7/tests/test_double.py +40 -0
  18. quasardb-3.13.7/tests/test_entry.py +10 -0
  19. quasardb-3.13.7/tests/test_error.py +53 -0
  20. quasardb-3.13.7/tests/test_examples.py +16 -0
  21. quasardb-3.13.7/tests/test_expiry.py +65 -0
  22. quasardb-3.13.7/tests/test_firehose.py +109 -0
  23. quasardb-3.13.7/tests/test_getTimeout.py +10 -0
  24. quasardb-3.13.7/tests/test_info.py +36 -0
  25. quasardb-3.13.7/tests/test_integer.py +40 -0
  26. quasardb-3.13.7/tests/test_logging.py +76 -0
  27. quasardb-3.13.7/tests/test_node.py +148 -0
  28. quasardb-3.13.7/tests/test_numpy.py +387 -0
  29. quasardb-3.13.7/tests/test_option_client_max_parallelism.py +27 -0
  30. quasardb-3.13.7/tests/test_pandas.py +415 -0
  31. quasardb-3.13.7/tests/test_pandas_benchmark.py +162 -0
  32. quasardb-3.13.7/tests/test_perf.py +68 -0
  33. quasardb-3.13.7/tests/test_pinned_writer.py +688 -0
  34. quasardb-3.13.7/tests/test_pool.py +96 -0
  35. quasardb-3.13.7/tests/test_prefix.py +25 -0
  36. quasardb-3.13.7/tests/test_query.py +369 -0
  37. quasardb-3.13.7/tests/test_query_find.py +42 -0
  38. quasardb-3.13.7/tests/test_setCompression.py +18 -0
  39. quasardb-3.13.7/tests/test_setMaxCardinality.py +18 -0
  40. quasardb-3.13.7/tests/test_setTimeout.py +17 -0
  41. quasardb-3.13.7/tests/test_stats.py +129 -0
  42. quasardb-3.13.7/tests/test_string.py +124 -0
  43. quasardb-3.13.7/tests/test_suffix.py +26 -0
  44. quasardb-3.13.7/tests/test_table.py +211 -0
  45. quasardb-3.13.7/tests/test_table_blob.py +103 -0
  46. quasardb-3.13.7/tests/test_table_double.py +101 -0
  47. quasardb-3.13.7/tests/test_table_int64.py +116 -0
  48. quasardb-3.13.7/tests/test_table_reader.py +214 -0
  49. quasardb-3.13.7/tests/test_table_string.py +103 -0
  50. quasardb-3.13.7/tests/test_table_timestamp.py +117 -0
  51. quasardb-3.13.7/tests/test_tag.py +39 -0
  52. quasardb-3.13.7/tests/test_timestamp.py +30 -0
  53. quasardb-3.13.6/quasardb.egg-info/SOURCES.txt +0 -18
  54. {quasardb-3.13.6 → quasardb-3.13.7}/README.md +0 -0
  55. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb/extensions/__init__.py +0 -0
  56. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb/extensions/pinned_writer.py +0 -0
  57. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb/firehose.py +0 -0
  58. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb/pool.py +0 -0
  59. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb/stats.py +0 -0
  60. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb.egg-info/dependency_links.txt +0 -0
  61. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb.egg-info/not-zip-safe +0 -0
  62. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb.egg-info/requires.txt +0 -0
  63. {quasardb-3.13.6 → quasardb-3.13.7}/quasardb.egg-info/top_level.txt +0 -0
  64. {quasardb-3.13.6 → quasardb-3.13.7}/setup.cfg +0 -0
@@ -1,4 +1,4 @@
1
- Copyright (c) 2009-2022, quasardb SAS. All rights reserved.
1
+ Copyright (c) 2009-2023, quasardb SAS. All rights reserved.
2
2
 
3
3
  Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
4
4
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: quasardb
3
- Version: 3.13.6
3
+ Version: 3.13.7
4
4
  Summary: Python API for quasardb
5
5
  Home-page: https://www.quasardb.net/
6
6
  Author: quasardb SAS
@@ -1,6 +1,6 @@
1
1
  # pylint: disable=C0103,C0111,C0302,R0903
2
2
 
3
- # Copyright (c) 2009-2022, quasardb SAS. All rights reserved.
3
+ # Copyright (c) 2009-2023, quasardb SAS. All rights reserved.
4
4
  # All rights reserved.
5
5
  #
6
6
  # Redistribution and use in source and binary forms, with or without
@@ -81,6 +81,19 @@ class IncompatibleDtypeErrors(TypeError):
81
81
  def msg(self):
82
82
  return "\n".join(x.msg() for x in self.xs)
83
83
 
84
+ class InvalidDataCardinalityError(ValueError):
85
+ """
86
+ Raised when the provided data arrays doesn't match the table's columns.
87
+ """
88
+ def __init__(self, data, cinfos):
89
+ self.data = data
90
+ self.cinfos = cinfos
91
+ super().__init__(self.msg())
92
+
93
+ def msg(self):
94
+ return "Provided data array length '{}' exceeds amount of table columns '{}', unable to map data to columns".format(len(self.data), len(self.cinfos))
95
+
96
+
84
97
  # Based on QuasarDB column types, which dtype do we accept?
85
98
  # First entry will always be the 'preferred' dtype, other ones
86
99
  # those that we can natively convert in native code.
@@ -259,6 +272,42 @@ def _coerce_deduplicate(deduplicate, deduplication_mode, columns):
259
272
 
260
273
  return deduplicate
261
274
 
275
+ def _clean_nulls(xs, dtype):
276
+ """
277
+ Numpy's masked arrays have a downside that in case they're not able to convert a (masked!) value to
278
+ the desired dtype, they raise an error. So, for example, if I have a masked array of objects that
279
+ look like this
280
+
281
+ xs: [1.234 <pd.NA> 5.678]
282
+ mask: [1 0 1]
283
+
284
+ even though pd.NA is not "visible", because it cannot be converted to a float(), the operation will
285
+ fail!
286
+
287
+ This function fixes this by replacing the null values with an acceptable value that can always be
288
+ converted to the desired dtype.
289
+ """
290
+
291
+ assert ma.isMA(xs)
292
+
293
+ if xs.dtype is not np.dtype('object'):
294
+ return xs
295
+
296
+ fill_value = None
297
+ if dtype == np.float64 or dtype == np.float32 or dtype == np.float16:
298
+ fill_value = float('nan')
299
+ elif dtype == np.int64 or dtype == np.int32 or dtype == np.int16:
300
+ fill_value = -1
301
+ elif dtype == np.dtype('datetime64[ns]'):
302
+ fill_value = np.datetime64('nat')
303
+
304
+ mask = xs.mask
305
+ xs_ = xs.filled(fill_value)
306
+
307
+ return ma.array(xs_, mask=mask)
308
+
309
+
310
+
262
311
  def _coerce_data(data, dtype):
263
312
  """
264
313
  Coerces each numpy array of `data` to the dtype present in `dtype`.
@@ -271,25 +320,34 @@ def _coerce_data(data, dtype):
271
320
  data_ = data[i]
272
321
 
273
322
  if dtype_ is not None and dtypes_equal(data_.dtype, dtype_) == False:
323
+ data_ = _clean_nulls(data_, dtype_)
324
+
325
+ assert ma.isMA(data_)
326
+
274
327
  logger.debug("data for column with offset %d was provided in dtype '%s', but need '%s': converting data...", i, data_.dtype, dtype_)
275
328
 
276
- logger.debug("type of data[%d] after: %s", i, type(data[i]))
277
- logger.debug("size of data[%d] after: %s", i, ma.size(data[i]))
278
- logger.debug("data of data[%d] after: %s", i, data[i])
329
+ logger.debug("dtype of data[%d] before: %s", i, data_.dtype)
330
+ logger.debug("type of data[%d] after: %s", i, type(data_))
331
+ logger.debug("size of data[%d] after: %s", i, ma.size(data_))
332
+ logger.debug("data of data[%d] after: %s", i, data_)
279
333
 
280
334
  try:
281
335
  data[i] = data_.astype(dtype_)
282
336
  except TypeError as err:
283
337
  # One 'bug' is that, if everything is masked, the underlying data type can be
284
338
  # pretty much anything.
285
- if not _is_all_masked(data_):
339
+ if _is_all_masked(data_):
340
+ logger.debug("array completely empty, re-initializing to empty array of '%s'", dtype_)
341
+ data[i] = ma.masked_all(ma.size(data_),
342
+ dtype=dtype_)
343
+
344
+ # Another 'bug' is that when the input data is objects, we may have null-like values (like pd.NA)
345
+ # that cannot easily be converted to, say, float.
346
+ else:
286
347
  logger.error("An error occured while coercing input data type from dtype '%s' to dtype '%s': ", data_.dtype, dtype_)
287
348
  logger.exception(err)
288
349
  raise err
289
350
 
290
- logger.debug("array completely empty, re-initializing to empty array of '%s'", dtype_)
291
- data[i] = ma.masked_all(ma.size(data_),
292
- dtype=dtype_)
293
351
  assert data[i].dtype.kind == dtype_.kind
294
352
 
295
353
  logger.debug("type of data[%d] after: %s", i, type(data[i]))
@@ -321,6 +379,13 @@ def _ensure_list(xs, cinfos):
321
379
  if isinstance(xs, list):
322
380
  return xs
323
381
 
382
+ if isinstance(xs, np.ndarray):
383
+ ret = []
384
+ for x in xs:
385
+ ret.append(x)
386
+
387
+ return ret
388
+
324
389
  # As we only accept list-likes or dicts as input data, it *must* be a dict at this
325
390
  # point
326
391
  assert isinstance(xs, dict)
@@ -612,6 +677,10 @@ def write_arrays(
612
677
  dtype = _add_desired_dtypes(dtype, cinfos)
613
678
 
614
679
  data = _ensure_list(data, cinfos)
680
+
681
+ if len(data) != len(cinfos):
682
+ raise InvalidDataCardinalityError(data, cinfos)
683
+
615
684
  data = ensure_ma(data, dtype=dtype)
616
685
  data = _coerce_data(data, dtype)
617
686
 
@@ -1,6 +1,6 @@
1
1
  # pylint: disable=C0103,C0111,C0302,R0903
2
2
 
3
- # Copyright (c) 2009-2022, quasardb SAS. All rights reserved.
3
+ # Copyright (c) 2009-2023, quasardb SAS. All rights reserved.
4
4
  # All rights reserved.
5
5
  #
6
6
  # Redistribution and use in source and binary forms, with or without
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: quasardb
3
- Version: 3.13.6
3
+ Version: 3.13.7
4
4
  Summary: Python API for quasardb
5
5
  Home-page: https://www.quasardb.net/
6
6
  Author: quasardb SAS
@@ -0,0 +1,62 @@
1
+ LICENSE.md
2
+ README.md
3
+ setup.cfg
4
+ setup.py
5
+ quasardb/__init__.py
6
+ quasardb/firehose.py
7
+ quasardb/pool.py
8
+ quasardb/stats.py
9
+ quasardb.egg-info/PKG-INFO
10
+ quasardb.egg-info/SOURCES.txt
11
+ quasardb.egg-info/dependency_links.txt
12
+ quasardb.egg-info/not-zip-safe
13
+ quasardb.egg-info/requires.txt
14
+ quasardb.egg-info/top_level.txt
15
+ quasardb/extensions/__init__.py
16
+ quasardb/extensions/pinned_writer.py
17
+ quasardb/numpy/__init__.py
18
+ quasardb/pandas/__init__.py
19
+ tests/test_basic.py
20
+ tests/test_batch_inserter.py
21
+ tests/test_bench_pinned_writer.py
22
+ tests/test_blob.py
23
+ tests/test_buf_size.py
24
+ tests/test_connection.py
25
+ tests/test_continuous.py
26
+ tests/test_convert.py
27
+ tests/test_double.py
28
+ tests/test_entry.py
29
+ tests/test_error.py
30
+ tests/test_examples.py
31
+ tests/test_expiry.py
32
+ tests/test_firehose.py
33
+ tests/test_getTimeout.py
34
+ tests/test_info.py
35
+ tests/test_integer.py
36
+ tests/test_logging.py
37
+ tests/test_node.py
38
+ tests/test_numpy.py
39
+ tests/test_option_client_max_parallelism.py
40
+ tests/test_pandas.py
41
+ tests/test_pandas_benchmark.py
42
+ tests/test_perf.py
43
+ tests/test_pinned_writer.py
44
+ tests/test_pool.py
45
+ tests/test_prefix.py
46
+ tests/test_query.py
47
+ tests/test_query_find.py
48
+ tests/test_setCompression.py
49
+ tests/test_setMaxCardinality.py
50
+ tests/test_setTimeout.py
51
+ tests/test_stats.py
52
+ tests/test_string.py
53
+ tests/test_suffix.py
54
+ tests/test_table.py
55
+ tests/test_table_blob.py
56
+ tests/test_table_double.py
57
+ tests/test_table_int64.py
58
+ tests/test_table_reader.py
59
+ tests/test_table_string.py
60
+ tests/test_table_timestamp.py
61
+ tests/test_tag.py
62
+ tests/test_timestamp.py
@@ -21,7 +21,7 @@ from setuptools.command.install import install
21
21
  from pkg_resources import get_build_platform
22
22
  from wheel.bdist_wheel import bdist_wheel as old_bdist_wheel
23
23
 
24
- qdb_version = "3.13.6"
24
+ qdb_version = "3.13.7"
25
25
 
26
26
  # package_modules are our 'extra' files. Our cmake configuration copies our QDB_API_LIB
27
27
  # into our source directory, and by adding this to `package_modules` we tell setuptools to
@@ -73,6 +73,9 @@ class CMakeBuild(build_ext):
73
73
  return out.decode().strip()
74
74
 
75
75
  def build_extension(self, ext):
76
+ self.do_build_extension(ext)
77
+
78
+ def do_build_extension(self, ext):
76
79
  extdir = os.path.join(
77
80
  os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name))), 'quasardb')
78
81
 
@@ -82,7 +85,7 @@ class CMakeBuild(build_ext):
82
85
  cmake_args = [
83
86
  '-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir,
84
87
  '-DPYTHON_EXECUTABLE=' + sys.executable,
85
- '-DQDB_PY_VERSION=' + qdb_version,
88
+ '-DQDB_PY_VERSION=' + qdb_version
86
89
  ]
87
90
 
88
91
  if platform.system() == "Darwin":
@@ -95,12 +98,14 @@ class CMakeBuild(build_ext):
95
98
  'CMAKE_C_COMPILER': ['-D', 'CMAKE_C_COMPILER={}'],
96
99
  'CMAKE_CXX_COMPILER': ['-D', 'CMAKE_CXX_COMPILER={}'],
97
100
  'QDB_LINKER': ['-D', 'QDB_LINKER={}'],
101
+ 'QDB_TESTS_ENABLED': ['-D', 'QDB_TESTS_ENABLED={}'],
98
102
  'CMAKE_BUILD_TYPE': ['-D', 'CMAKE_BUILD_TYPE={}'],
99
103
  'CMAKE_OSX_DEPLOYMENT_TARGET': ['-D', 'CMAKE_OSX_DEPLOYMENT_TARGET={}'],
100
104
  'CMAKE_OSX_SYSROOT': ['-D', 'CMAKE_OSX_SYSROOT={}'],
101
105
  'CMAKE_VERBOSE_MAKEFILE': ['-D', 'CMAKE_VERBOSE_MAKEFILE={}'],
102
106
  }
103
- default_proxy_vals = {'CMAKE_BUILD_TYPE': 'Release'}
107
+ default_proxy_vals = {'CMAKE_BUILD_TYPE': 'Release',
108
+ 'QDB_TESTS_ENABLED': 'OFF'}
104
109
 
105
110
  for (env_var, cmake_args_) in proxied_env_vars.items():
106
111
  default_ = default_proxy_vals.get(env_var, 0)
@@ -0,0 +1,19 @@
1
+ # pylint: disable=C0103,C0111,C0302,W0212
2
+ import datetime
3
+
4
+ import pytest
5
+ import quasardb
6
+
7
+
8
+ def test_build():
9
+ build = quasardb.build()
10
+ assert len(build) > 0
11
+
12
+
13
+ def test_version():
14
+ build = quasardb.version()
15
+ assert len(build) > 0
16
+
17
+
18
+ def test_can_purge_all(qdbd_secure_connection):
19
+ qdbd_secure_connection.purge_all(datetime.timedelta(minutes=1))
@@ -0,0 +1,343 @@
1
+ # pylint: disable=C0103,C0111,C0302,W0212
2
+ from builtins import range as xrange, int as long # pylint: disable=W0622
3
+ from functools import reduce # pylint: disable=W0622
4
+ import datetime
5
+ import test_table as tslib
6
+ from time import sleep
7
+
8
+ import pytest
9
+ import quasardb
10
+ import numpy as np
11
+
12
+
13
+ def _row_insertion_method(
14
+ inserter,
15
+ dates,
16
+ doubles,
17
+ blobs,
18
+ strings,
19
+ integers,
20
+ timestamps,
21
+ symbols):
22
+ for i in range(len(dates)):
23
+ inserter.start_row(dates[i])
24
+ inserter.set_double(0, doubles[i])
25
+ inserter.set_blob(1, blobs[i])
26
+ inserter.set_string(2, strings[i])
27
+ inserter.set_int64(3, integers[i])
28
+ inserter.set_timestamp(4, timestamps[i])
29
+ inserter.set_string(5, symbols[i]) # symbol columns use string representation
30
+
31
+
32
+ def _regular_push(inserter):
33
+ inserter.push()
34
+
35
+
36
+ def _async_push(inserter):
37
+ inserter.push_async()
38
+ # Wait for push_async to complete
39
+ # Ideally we could be able to get the proper flush interval
40
+ sleep(15)
41
+
42
+
43
+ def _fast_push(inserter):
44
+ inserter.push_fast()
45
+
46
+
47
+ def _make_inserter_info(table):
48
+ return [quasardb.BatchColumnInfo(table.get_name(), tslib._double_col_name(table), 1000),
49
+ quasardb.BatchColumnInfo(
50
+ table.get_name(), tslib._blob_col_name(table), 1000),
51
+ quasardb.BatchColumnInfo(
52
+ table.get_name(), tslib._string_col_name(table), 1000),
53
+ quasardb.BatchColumnInfo(
54
+ table.get_name(), tslib._int64_col_name(table), 1000),
55
+ quasardb.BatchColumnInfo(table.get_name(), tslib._ts_col_name(table), 1000),
56
+ quasardb.BatchColumnInfo(table.get_name(), tslib._symbol_col_name(table), 1000)]
57
+
58
+
59
+ def test_non_existing_bulk_insert(qdbd_connection, entry_name):
60
+ with pytest.raises(quasardb.AliasNotFoundError):
61
+ qdbd_connection.inserter(
62
+ [quasardb.BatchColumnInfo(entry_name, "col", 10)])
63
+
64
+
65
+ def _generate_data(count, start=np.datetime64('2017-01-01', 'ns')):
66
+ doubles = np.random.uniform(-100.0, 100.0, count)
67
+ integers = np.random.randint(-100, 100, count)
68
+ blobs = np.array(list(np.random.bytes(np.random.randint(8, 16))
69
+ for i in range(count)), 'O')
70
+ strings = np.array([("content_" + str(item)) for item in range(count)])
71
+ timestamps = tslib._generate_dates(
72
+ start + np.timedelta64('1', 'D'), count)
73
+ symbols = np.array([("symbol_" + str(item)) for item in range(count)])
74
+
75
+ return (doubles, integers, blobs, strings, timestamps, symbols)
76
+
77
+
78
+ def _set_batch_inserter_data(inserter, intervals, data, start=0):
79
+ (doubles, integers, blobs, strings, timestamps, symbols) = data
80
+
81
+ for i in range(start, len(intervals)):
82
+ inserter.start_row(intervals[i])
83
+ inserter.set_double(0, doubles[i])
84
+ inserter.set_blob(1, blobs[i])
85
+ inserter.set_string(2, strings[i])
86
+ inserter.set_int64(3, integers[i])
87
+ inserter.set_timestamp(4, timestamps[i])
88
+ inserter.set_string(5, symbols[i])
89
+
90
+
91
+ def _assert_results(table, intervals, data):
92
+ (doubles, integers, blobs, strings, timestamps, symbols) = data
93
+
94
+ whole_range = (intervals[0], intervals[-1:][0] + np.timedelta64(2, 's'))
95
+ results = table.double_get_ranges(
96
+ tslib._double_col_name(table), [whole_range])
97
+
98
+ np.testing.assert_array_equal(results[0], intervals)
99
+ np.testing.assert_array_equal(results[1], doubles)
100
+
101
+ results = table.blob_get_ranges(tslib._blob_col_name(table), [whole_range])
102
+ np.testing.assert_array_equal(results[0], intervals)
103
+ np.testing.assert_array_equal(results[1], blobs)
104
+
105
+ results = table.string_get_ranges(
106
+ tslib._string_col_name(table), [whole_range])
107
+ np.testing.assert_array_equal(results[0], intervals)
108
+ np.testing.assert_array_equal(results[1], strings)
109
+
110
+ results = table.int64_get_ranges(
111
+ tslib._int64_col_name(table), [whole_range])
112
+ np.testing.assert_array_equal(results[0], intervals)
113
+ np.testing.assert_array_equal(results[1], integers)
114
+
115
+ results = table.timestamp_get_ranges(
116
+ tslib._ts_col_name(table), [whole_range])
117
+ np.testing.assert_array_equal(results[0], intervals)
118
+ np.testing.assert_array_equal(results[1], timestamps)
119
+
120
+ results = table.string_get_ranges(
121
+ tslib._symbol_col_name(table), [whole_range])
122
+ np.testing.assert_array_equal(results[0], intervals)
123
+ np.testing.assert_array_equal(results[1], symbols)
124
+
125
+
126
+ def _test_with_table(
127
+ inserter,
128
+ table,
129
+ intervals,
130
+ push_method=_regular_push,
131
+ data=None):
132
+
133
+ if data is None:
134
+ data = _generate_data(len(intervals))
135
+
136
+ # range is right exclusive, so the timestamp has to be beyond
137
+ whole_range = (intervals[0], intervals[-1:][0] + np.timedelta64(2, 's'))
138
+
139
+ (doubles, integers, blobs, strings, timestamps, symbols) = data
140
+
141
+ _set_batch_inserter_data(inserter, intervals, data)
142
+
143
+ # before the push, there is nothing
144
+ results = table.double_get_ranges(
145
+ tslib._double_col_name(table), [whole_range])
146
+ assert len(results[0]) == 0
147
+
148
+ results = table.blob_get_ranges(tslib._blob_col_name(table), [whole_range])
149
+ assert len(results[0]) == 0
150
+
151
+ results = table.string_get_ranges(
152
+ tslib._string_col_name(table), [whole_range])
153
+ assert len(results[0]) == 0
154
+
155
+ results = table.int64_get_ranges(
156
+ tslib._int64_col_name(table), [whole_range])
157
+ assert len(results[0]) == 0
158
+
159
+ results = table.timestamp_get_ranges(
160
+ tslib._ts_col_name(table), [whole_range])
161
+ assert len(results[0]) == 0
162
+
163
+ results = table.string_get_ranges(
164
+ tslib._symbol_col_name(table), [whole_range])
165
+ assert len(results[0]) == 0
166
+
167
+ # after push, there is everything
168
+ push_method(inserter)
169
+ if push_method == _async_push:
170
+ sleep(20)
171
+
172
+ _assert_results(table, intervals, data)
173
+
174
+ return doubles, blobs, strings, integers, timestamps, symbols
175
+
176
+
177
+ def test_successful_bulk_row_insert(qdbd_connection, table, many_intervals):
178
+ inserter = qdbd_connection.inserter(_make_inserter_info(table))
179
+
180
+ _test_with_table(
181
+ inserter,
182
+ table,
183
+ many_intervals,
184
+ _regular_push)
185
+
186
+
187
+ def test_successful_secure_bulk_row_insert(
188
+ qdbd_secure_connection,
189
+ secure_table,
190
+ many_intervals):
191
+ inserter = qdbd_secure_connection.inserter(
192
+ _make_inserter_info(secure_table))
193
+
194
+ _test_with_table(
195
+ inserter,
196
+ secure_table,
197
+ many_intervals,
198
+ _regular_push)
199
+
200
+
201
+
202
+ @pytest.mark.skip(reason="Skip slow tests")
203
+ def test_successful_async_bulk_row_insert(
204
+ qdbd_connection, table, many_intervals):
205
+
206
+ # Same test as `test_successful_bulk_row_insert` but using `push_async` to push the entries
207
+ # This allows us to test the `push_async` feature
208
+
209
+ inserter = qdbd_connection.inserter(_make_inserter_info(table))
210
+ _test_with_table(
211
+ inserter,
212
+ table,
213
+ many_intervals,
214
+ _async_push)
215
+
216
+
217
+ def test_successful_fast_bulk_row_insert(
218
+ qdbd_connection, table, many_intervals):
219
+ # Same test as `test_successful_bulk_row_insert` but using `push_async` to push the entries
220
+ # This allows us to test the `push_async` feature
221
+
222
+ inserter = qdbd_connection.inserter(_make_inserter_info(table))
223
+ _test_with_table(
224
+ inserter,
225
+ table,
226
+ many_intervals,
227
+ _fast_push)
228
+
229
+
230
+ def test_failed_local_table_with_wrong_columns(qdbd_connection, entry_name):
231
+ columns = [quasardb.BatchColumnInfo(entry_name, "1000flavorsofwrong", 10)]
232
+ with pytest.raises(quasardb.AliasNotFoundError):
233
+ qdbd_connection.inserter(columns)
234
+
235
+
236
+ def test_push_truncate_implicit_range(qdbd_connection, table, many_intervals):
237
+
238
+ whole_range = (
239
+ many_intervals[0], many_intervals[-1:][0] + np.timedelta64(2, 's'))
240
+
241
+ # Generate our dataset
242
+ data = _generate_data(len(many_intervals))
243
+ # (doubles, integers, blobs, strings, timestamps) = data
244
+ (doubles, _, _, _, _, _) = data
245
+
246
+ # Insert once
247
+ inserter = qdbd_connection.inserter(_make_inserter_info(table))
248
+ _set_batch_inserter_data(inserter, many_intervals, data)
249
+ inserter.push()
250
+
251
+ # Compare results, should be equal
252
+ results = table.double_get_ranges(
253
+ tslib._double_col_name(table), [whole_range])
254
+
255
+ np.testing.assert_array_equal(results[0], many_intervals)
256
+ np.testing.assert_array_equal(results[1], doubles)
257
+
258
+ # Insert regular, twice
259
+ _set_batch_inserter_data(inserter, many_intervals, data)
260
+ inserter.push()
261
+
262
+ # Compare results, should now have the same data twice
263
+ results = table.double_get_ranges(
264
+ tslib._double_col_name(table), [whole_range])
265
+
266
+ assert len(results[1]) == 2 * len(doubles)
267
+
268
+ # Insert truncate, should now have original data again
269
+ _set_batch_inserter_data(inserter, many_intervals, data)
270
+ inserter.push_truncate()
271
+
272
+ # Verify results, truncating should now make things the same
273
+ # as the beginning again.
274
+ results = table.double_get_ranges(
275
+ tslib._double_col_name(table), [whole_range])
276
+
277
+ np.testing.assert_array_equal(results[0], many_intervals)
278
+ np.testing.assert_array_equal(results[1], doubles)
279
+
280
+
281
+ def test_push_truncate_explicit_range(qdbd_connection, table, many_intervals):
282
+
283
+ whole_range = (
284
+ many_intervals[0], many_intervals[-1:][0] + np.timedelta64(2, 's'))
285
+
286
+ # Generate our dataset
287
+ data = _generate_data(len(many_intervals))
288
+ # (doubles, integers, blobs, strings, timestamps) = data
289
+ (doubles, _, _, _, _, _) = data
290
+
291
+ inserter = qdbd_connection.inserter(_make_inserter_info(table))
292
+
293
+ # Insert once
294
+ truncate_range = (whole_range[0],
295
+ whole_range[1] + np.timedelta64(1, 'ns'))
296
+
297
+ _set_batch_inserter_data(inserter, many_intervals, data)
298
+ inserter.push()
299
+
300
+ # Verify results, truncating should now make things the same
301
+ # as the beginning again.
302
+ results = table.double_get_ranges(
303
+ tslib._double_col_name(table), [whole_range])
304
+
305
+ np.testing.assert_array_equal(results[0], many_intervals)
306
+ np.testing.assert_array_equal(results[1], doubles)
307
+
308
+ # If we now set the same data, skip the first element, but keep
309
+ # the same time range, the first element will *not* be present in
310
+ # the resulting dataset.
311
+ _set_batch_inserter_data(inserter, many_intervals, data, start=1)
312
+ inserter.push_truncate(range=truncate_range)
313
+
314
+ # Verify results, truncating should now make things the same
315
+ # as the beginning again.
316
+ results = table.double_get_ranges(
317
+ tslib._double_col_name(table), [whole_range])
318
+
319
+ np.testing.assert_array_equal(results[0], many_intervals[1:])
320
+ np.testing.assert_array_equal(results[1], doubles[1:])
321
+
322
+
323
+ def test_push_truncate_throws_error_on_invalid_range(
324
+ qdbd_connection, table, many_intervals):
325
+ print("table = {}".format(table.get_name()))
326
+ whole_range = (
327
+ many_intervals[0], many_intervals[-1:][0] + np.timedelta64(2, 's'))
328
+
329
+ # Generate our dataset
330
+ data = _generate_data(len(many_intervals))
331
+ # (doubles, integers, blobs, strings, timestamps) = data
332
+ (_, _, _, _, _, _) = data
333
+
334
+ # Insert truncate with explicit timerange, we point the start right after the
335
+ # first element in our dataset. This means that the range does not overlap all
336
+ # the data anymore.
337
+ truncate_range = (whole_range[0] + np.timedelta64(1, 'ns'),
338
+ whole_range[1] + np.timedelta64(1, 'ns'))
339
+
340
+ inserter = qdbd_connection.inserter(_make_inserter_info(table))
341
+ _set_batch_inserter_data(inserter, many_intervals, data)
342
+ with pytest.raises(quasardb.InvalidArgumentError):
343
+ inserter.push_truncate(range=truncate_range)