quasardb 3.13.5.post3__tar.gz → 3.13.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasardb might be problematic. Click here for more details.
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/LICENSE.md +1 -1
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/PKG-INFO +2 -1
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb/__init__.py +1 -1
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb/numpy/__init__.py +109 -26
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb/pandas/__init__.py +6 -4
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb/stats.py +78 -20
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb.egg-info/PKG-INFO +2 -1
- quasardb-3.13.7/quasardb.egg-info/SOURCES.txt +62 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/setup.py +9 -3
- quasardb-3.13.7/tests/test_basic.py +19 -0
- quasardb-3.13.7/tests/test_batch_inserter.py +343 -0
- quasardb-3.13.7/tests/test_bench_pinned_writer.py +194 -0
- quasardb-3.13.7/tests/test_blob.py +122 -0
- quasardb-3.13.7/tests/test_buf_size.py +66 -0
- quasardb-3.13.7/tests/test_connection.py +114 -0
- quasardb-3.13.7/tests/test_continuous.py +156 -0
- quasardb-3.13.7/tests/test_convert.py +53 -0
- quasardb-3.13.7/tests/test_double.py +40 -0
- quasardb-3.13.7/tests/test_entry.py +10 -0
- quasardb-3.13.7/tests/test_error.py +53 -0
- quasardb-3.13.7/tests/test_examples.py +16 -0
- quasardb-3.13.7/tests/test_expiry.py +65 -0
- quasardb-3.13.7/tests/test_firehose.py +109 -0
- quasardb-3.13.7/tests/test_getTimeout.py +10 -0
- quasardb-3.13.7/tests/test_info.py +36 -0
- quasardb-3.13.7/tests/test_integer.py +40 -0
- quasardb-3.13.7/tests/test_logging.py +76 -0
- quasardb-3.13.7/tests/test_node.py +148 -0
- quasardb-3.13.7/tests/test_numpy.py +387 -0
- quasardb-3.13.7/tests/test_option_client_max_parallelism.py +27 -0
- quasardb-3.13.7/tests/test_pandas.py +415 -0
- quasardb-3.13.7/tests/test_pandas_benchmark.py +162 -0
- quasardb-3.13.7/tests/test_perf.py +68 -0
- quasardb-3.13.7/tests/test_pinned_writer.py +688 -0
- quasardb-3.13.7/tests/test_pool.py +96 -0
- quasardb-3.13.7/tests/test_prefix.py +25 -0
- quasardb-3.13.7/tests/test_query.py +369 -0
- quasardb-3.13.7/tests/test_query_find.py +42 -0
- quasardb-3.13.7/tests/test_setCompression.py +18 -0
- quasardb-3.13.7/tests/test_setMaxCardinality.py +18 -0
- quasardb-3.13.7/tests/test_setTimeout.py +17 -0
- quasardb-3.13.7/tests/test_stats.py +129 -0
- quasardb-3.13.7/tests/test_string.py +124 -0
- quasardb-3.13.7/tests/test_suffix.py +26 -0
- quasardb-3.13.7/tests/test_table.py +211 -0
- quasardb-3.13.7/tests/test_table_blob.py +103 -0
- quasardb-3.13.7/tests/test_table_double.py +101 -0
- quasardb-3.13.7/tests/test_table_int64.py +116 -0
- quasardb-3.13.7/tests/test_table_reader.py +214 -0
- quasardb-3.13.7/tests/test_table_string.py +103 -0
- quasardb-3.13.7/tests/test_table_timestamp.py +117 -0
- quasardb-3.13.7/tests/test_tag.py +39 -0
- quasardb-3.13.7/tests/test_timestamp.py +30 -0
- quasardb-3.13.5.post3/quasardb.egg-info/SOURCES.txt +0 -18
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/README.md +0 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb/extensions/__init__.py +0 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb/extensions/pinned_writer.py +0 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb/firehose.py +0 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb/pool.py +0 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb.egg-info/dependency_links.txt +0 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb.egg-info/not-zip-safe +0 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb.egg-info/requires.txt +0 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/quasardb.egg-info/top_level.txt +0 -0
- {quasardb-3.13.5.post3 → quasardb-3.13.7}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: quasardb
|
|
3
|
-
Version: 3.13.
|
|
3
|
+
Version: 3.13.7
|
|
4
4
|
Summary: Python API for quasardb
|
|
5
5
|
Home-page: https://www.quasardb.net/
|
|
6
6
|
Author: quasardb SAS
|
|
@@ -19,6 +19,7 @@ Classifier: Programming Language :: Python :: 3.7
|
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.8
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.9
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.10
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
22
23
|
Classifier: Topic :: Database
|
|
23
24
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
24
25
|
Classifier: License :: OSI Approved :: BSD License
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# pylint: disable=C0103,C0111,C0302,R0903
|
|
2
2
|
|
|
3
|
-
# Copyright (c) 2009-
|
|
3
|
+
# Copyright (c) 2009-2023, quasardb SAS. All rights reserved.
|
|
4
4
|
# All rights reserved.
|
|
5
5
|
#
|
|
6
6
|
# Redistribution and use in source and binary forms, with or without
|
|
@@ -81,6 +81,19 @@ class IncompatibleDtypeErrors(TypeError):
|
|
|
81
81
|
def msg(self):
|
|
82
82
|
return "\n".join(x.msg() for x in self.xs)
|
|
83
83
|
|
|
84
|
+
class InvalidDataCardinalityError(ValueError):
|
|
85
|
+
"""
|
|
86
|
+
Raised when the provided data arrays doesn't match the table's columns.
|
|
87
|
+
"""
|
|
88
|
+
def __init__(self, data, cinfos):
|
|
89
|
+
self.data = data
|
|
90
|
+
self.cinfos = cinfos
|
|
91
|
+
super().__init__(self.msg())
|
|
92
|
+
|
|
93
|
+
def msg(self):
|
|
94
|
+
return "Provided data array length '{}' exceeds amount of table columns '{}', unable to map data to columns".format(len(self.data), len(self.cinfos))
|
|
95
|
+
|
|
96
|
+
|
|
84
97
|
# Based on QuasarDB column types, which dtype do we accept?
|
|
85
98
|
# First entry will always be the 'preferred' dtype, other ones
|
|
86
99
|
# those that we can natively convert in native code.
|
|
@@ -233,25 +246,67 @@ def _validate_dtypes(data, columns):
|
|
|
233
246
|
if len(errors) > 0:
|
|
234
247
|
raise IncompatibleDtypeErrors(errors)
|
|
235
248
|
|
|
236
|
-
|
|
237
|
-
def _validate_drop_duplicates(drop_duplicates, columns):
|
|
249
|
+
def _coerce_deduplicate(deduplicate, deduplication_mode, columns):
|
|
238
250
|
"""
|
|
239
|
-
Throws an error when '
|
|
251
|
+
Throws an error when 'deduplicate' options are incorrect.
|
|
240
252
|
"""
|
|
241
|
-
|
|
242
|
-
return True
|
|
253
|
+
cnames = [cname for (cname, ctype) in columns]
|
|
243
254
|
|
|
244
|
-
if not
|
|
245
|
-
raise
|
|
255
|
+
if deduplication_mode not in ['drop', 'upsert']:
|
|
256
|
+
raise RuntimeError("deduplication_mode should be one of ['drop', 'upsert'], got: {}".format(deduplication_mode))
|
|
246
257
|
|
|
247
|
-
|
|
258
|
+
if isinstance(deduplicate, bool):
|
|
259
|
+
return deduplicate
|
|
260
|
+
|
|
261
|
+
# Special value of $timestamp, hardcoded
|
|
262
|
+
if isinstance(deduplicate, str) and deduplicate == '$timestamp':
|
|
263
|
+
deduplicate = ['$timestamp']
|
|
264
|
+
cnames.append('$timestamp')
|
|
265
|
+
|
|
266
|
+
if not isinstance(deduplicate, list):
|
|
267
|
+
raise TypeError("drop_duplicates should be either a bool or a list, got: " + type(deduplicate))
|
|
248
268
|
|
|
249
|
-
for column_name in
|
|
269
|
+
for column_name in deduplicate:
|
|
250
270
|
if not column_name in cnames:
|
|
251
271
|
raise RuntimeError("Provided deduplication column name '{}' not found in table columns.".format(column_name))
|
|
252
272
|
|
|
273
|
+
return deduplicate
|
|
274
|
+
|
|
275
|
+
def _clean_nulls(xs, dtype):
|
|
276
|
+
"""
|
|
277
|
+
Numpy's masked arrays have a downside that in case they're not able to convert a (masked!) value to
|
|
278
|
+
the desired dtype, they raise an error. So, for example, if I have a masked array of objects that
|
|
279
|
+
look like this
|
|
280
|
+
|
|
281
|
+
xs: [1.234 <pd.NA> 5.678]
|
|
282
|
+
mask: [1 0 1]
|
|
283
|
+
|
|
284
|
+
even though pd.NA is not "visible", because it cannot be converted to a float(), the operation will
|
|
285
|
+
fail!
|
|
286
|
+
|
|
287
|
+
This function fixes this by replacing the null values with an acceptable value that can always be
|
|
288
|
+
converted to the desired dtype.
|
|
289
|
+
"""
|
|
290
|
+
|
|
291
|
+
assert ma.isMA(xs)
|
|
292
|
+
|
|
293
|
+
if xs.dtype is not np.dtype('object'):
|
|
294
|
+
return xs
|
|
295
|
+
|
|
296
|
+
fill_value = None
|
|
297
|
+
if dtype == np.float64 or dtype == np.float32 or dtype == np.float16:
|
|
298
|
+
fill_value = float('nan')
|
|
299
|
+
elif dtype == np.int64 or dtype == np.int32 or dtype == np.int16:
|
|
300
|
+
fill_value = -1
|
|
301
|
+
elif dtype == np.dtype('datetime64[ns]'):
|
|
302
|
+
fill_value = np.datetime64('nat')
|
|
303
|
+
|
|
304
|
+
mask = xs.mask
|
|
305
|
+
xs_ = xs.filled(fill_value)
|
|
306
|
+
|
|
307
|
+
return ma.array(xs_, mask=mask)
|
|
308
|
+
|
|
253
309
|
|
|
254
|
-
return True
|
|
255
310
|
|
|
256
311
|
def _coerce_data(data, dtype):
|
|
257
312
|
"""
|
|
@@ -265,25 +320,34 @@ def _coerce_data(data, dtype):
|
|
|
265
320
|
data_ = data[i]
|
|
266
321
|
|
|
267
322
|
if dtype_ is not None and dtypes_equal(data_.dtype, dtype_) == False:
|
|
323
|
+
data_ = _clean_nulls(data_, dtype_)
|
|
324
|
+
|
|
325
|
+
assert ma.isMA(data_)
|
|
326
|
+
|
|
268
327
|
logger.debug("data for column with offset %d was provided in dtype '%s', but need '%s': converting data...", i, data_.dtype, dtype_)
|
|
269
328
|
|
|
270
|
-
logger.debug("
|
|
271
|
-
logger.debug("
|
|
272
|
-
logger.debug("
|
|
329
|
+
logger.debug("dtype of data[%d] before: %s", i, data_.dtype)
|
|
330
|
+
logger.debug("type of data[%d] after: %s", i, type(data_))
|
|
331
|
+
logger.debug("size of data[%d] after: %s", i, ma.size(data_))
|
|
332
|
+
logger.debug("data of data[%d] after: %s", i, data_)
|
|
273
333
|
|
|
274
334
|
try:
|
|
275
335
|
data[i] = data_.astype(dtype_)
|
|
276
336
|
except TypeError as err:
|
|
277
337
|
# One 'bug' is that, if everything is masked, the underlying data type can be
|
|
278
338
|
# pretty much anything.
|
|
279
|
-
if
|
|
339
|
+
if _is_all_masked(data_):
|
|
340
|
+
logger.debug("array completely empty, re-initializing to empty array of '%s'", dtype_)
|
|
341
|
+
data[i] = ma.masked_all(ma.size(data_),
|
|
342
|
+
dtype=dtype_)
|
|
343
|
+
|
|
344
|
+
# Another 'bug' is that when the input data is objects, we may have null-like values (like pd.NA)
|
|
345
|
+
# that cannot easily be converted to, say, float.
|
|
346
|
+
else:
|
|
280
347
|
logger.error("An error occured while coercing input data type from dtype '%s' to dtype '%s': ", data_.dtype, dtype_)
|
|
281
348
|
logger.exception(err)
|
|
282
349
|
raise err
|
|
283
350
|
|
|
284
|
-
logger.debug("array completely empty, re-initializing to empty array of '%s'", dtype_)
|
|
285
|
-
data[i] = ma.masked_all(ma.size(data_),
|
|
286
|
-
dtype=dtype_)
|
|
287
351
|
assert data[i].dtype.kind == dtype_.kind
|
|
288
352
|
|
|
289
353
|
logger.debug("type of data[%d] after: %s", i, type(data[i]))
|
|
@@ -315,6 +379,13 @@ def _ensure_list(xs, cinfos):
|
|
|
315
379
|
if isinstance(xs, list):
|
|
316
380
|
return xs
|
|
317
381
|
|
|
382
|
+
if isinstance(xs, np.ndarray):
|
|
383
|
+
ret = []
|
|
384
|
+
for x in xs:
|
|
385
|
+
ret.append(x)
|
|
386
|
+
|
|
387
|
+
return ret
|
|
388
|
+
|
|
318
389
|
# As we only accept list-likes or dicts as input data, it *must* be a dict at this
|
|
319
390
|
# point
|
|
320
391
|
assert isinstance(xs, dict)
|
|
@@ -494,7 +565,8 @@ def write_arrays(
|
|
|
494
565
|
_async = False,
|
|
495
566
|
fast = False,
|
|
496
567
|
truncate = False,
|
|
497
|
-
|
|
568
|
+
deduplicate=False,
|
|
569
|
+
deduplication_mode='drop',
|
|
498
570
|
infer_types = True,
|
|
499
571
|
writer = None):
|
|
500
572
|
"""
|
|
@@ -537,13 +609,20 @@ def write_arrays(
|
|
|
537
609
|
If a dtype for a column is provided in this argument, and infer_types is also
|
|
538
610
|
True, this argument takes precedence.
|
|
539
611
|
|
|
540
|
-
|
|
612
|
+
deduplicate: bool or list[str]
|
|
541
613
|
Enables server-side deduplication of data when it is written into the table.
|
|
542
614
|
When True, automatically deduplicates rows when all values of a row are identical.
|
|
543
615
|
When a list of strings is provided, deduplicates only based on the values of
|
|
544
616
|
these columns.
|
|
545
617
|
|
|
546
|
-
|
|
618
|
+
Defaults to False.
|
|
619
|
+
|
|
620
|
+
deduplication_mode: 'drop' or 'upsert'
|
|
621
|
+
When `deduplicate` is enabled, decides how deduplication is performed. 'drop' means
|
|
622
|
+
any newly written duplicates are dropped, where 'upsert' means that the previously
|
|
623
|
+
written data is updated to reflect the new data.
|
|
624
|
+
|
|
625
|
+
Defaults to 'drop'.
|
|
547
626
|
|
|
548
627
|
infer_types: optional bool
|
|
549
628
|
If true, will attemp to convert types from Python to QuasarDB natives types if
|
|
@@ -598,6 +677,10 @@ def write_arrays(
|
|
|
598
677
|
dtype = _add_desired_dtypes(dtype, cinfos)
|
|
599
678
|
|
|
600
679
|
data = _ensure_list(data, cinfos)
|
|
680
|
+
|
|
681
|
+
if len(data) != len(cinfos):
|
|
682
|
+
raise InvalidDataCardinalityError(data, cinfos)
|
|
683
|
+
|
|
601
684
|
data = ensure_ma(data, dtype=dtype)
|
|
602
685
|
data = _coerce_data(data, dtype)
|
|
603
686
|
|
|
@@ -607,7 +690,7 @@ def write_arrays(
|
|
|
607
690
|
# code as it generally makes for somewhat better error context.
|
|
608
691
|
_validate_dtypes(data, cinfos)
|
|
609
692
|
|
|
610
|
-
|
|
693
|
+
deduplicate = _coerce_deduplicate(deduplicate, deduplication_mode, cinfos)
|
|
611
694
|
|
|
612
695
|
write_with = {
|
|
613
696
|
quasardb.ColumnType.Double: writer.set_double_column,
|
|
@@ -633,15 +716,15 @@ def write_arrays(
|
|
|
633
716
|
start = time.time()
|
|
634
717
|
|
|
635
718
|
if fast is True:
|
|
636
|
-
writer.push_fast(
|
|
719
|
+
writer.push_fast(deduplicate=deduplicate, deduplication_mode=deduplication_mode)
|
|
637
720
|
elif truncate is True:
|
|
638
|
-
writer.push_truncate(
|
|
721
|
+
writer.push_truncate(deduplicate=deduplicate, deduplication_mode=deduplication_mode)
|
|
639
722
|
elif isinstance(truncate, tuple):
|
|
640
|
-
writer.push_truncate(range=truncate,
|
|
723
|
+
writer.push_truncate(range=truncate, deduplicate=deduplicate, deduplication_mode=deduplication_mode)
|
|
641
724
|
elif _async is True:
|
|
642
|
-
writer.push_async(
|
|
725
|
+
writer.push_async(deduplicate=deduplicate, deduplication_mode=deduplication_mode)
|
|
643
726
|
else:
|
|
644
|
-
writer.push(
|
|
727
|
+
writer.push(deduplicate=deduplicate, deduplication_mode=deduplication_mode)
|
|
645
728
|
|
|
646
729
|
logger.debug("pushed %d rows in %s seconds",
|
|
647
730
|
len(index), (time.time() - start))
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# pylint: disable=C0103,C0111,C0302,R0903
|
|
2
2
|
|
|
3
|
-
# Copyright (c) 2009-
|
|
3
|
+
# Copyright (c) 2009-2023, quasardb SAS. All rights reserved.
|
|
4
4
|
# All rights reserved.
|
|
5
5
|
#
|
|
6
6
|
# Redistribution and use in source and binary forms, with or without
|
|
@@ -293,7 +293,8 @@ def write_dataframe(
|
|
|
293
293
|
_async=False,
|
|
294
294
|
fast=False,
|
|
295
295
|
truncate=False,
|
|
296
|
-
|
|
296
|
+
deduplicate=False,
|
|
297
|
+
deduplication_mode='drop',
|
|
297
298
|
infer_types=True,
|
|
298
299
|
writer=None):
|
|
299
300
|
"""
|
|
@@ -326,7 +327,7 @@ def write_dataframe(
|
|
|
326
327
|
shard_size: optional datetime.timedelta
|
|
327
328
|
The shard size of the timeseries you wish to create.
|
|
328
329
|
|
|
329
|
-
|
|
330
|
+
deduplicate: bool or list[str]
|
|
330
331
|
Enables server-side deduplication of data when it is written into the table.
|
|
331
332
|
When True, automatically deduplicates rows when all values of a row are identical.
|
|
332
333
|
When a list of strings is provided, deduplicates only based on the values of
|
|
@@ -399,7 +400,8 @@ def write_dataframe(
|
|
|
399
400
|
_async=_async,
|
|
400
401
|
fast=fast,
|
|
401
402
|
truncate=truncate,
|
|
402
|
-
|
|
403
|
+
deduplicate=deduplicate,
|
|
404
|
+
deduplication_mode=deduplication_mode,
|
|
403
405
|
infer_types=infer_types,
|
|
404
406
|
writer=writer)
|
|
405
407
|
|
|
@@ -63,39 +63,91 @@ def of_node(dconn):
|
|
|
63
63
|
|
|
64
64
|
return ret
|
|
65
65
|
|
|
66
|
+
_stat_types = {'node_id': ('constant', None),
|
|
67
|
+
'operating_system': ('constant', None),
|
|
68
|
+
'partitions_count': ('constant', 'count'),
|
|
66
69
|
|
|
67
|
-
|
|
68
|
-
'
|
|
69
|
-
'
|
|
70
|
-
'
|
|
71
|
-
'
|
|
72
|
-
'
|
|
73
|
-
'async_pipelines.write.time_us': 'counter'}
|
|
70
|
+
'cpu.system': ('counter', 'ns'),
|
|
71
|
+
'cpu.user': ('counter', 'ns'),
|
|
72
|
+
'cpu.idle': ('counter', 'ns'),
|
|
73
|
+
'startup': ('constant', None),
|
|
74
|
+
'startup_time': ('constant', None),
|
|
75
|
+
'shutdown_time': ('constant', None),
|
|
74
76
|
|
|
77
|
+
'network.current_users_count': ('gauge', 'count'),
|
|
78
|
+
'hardware_concurrency': ('gauge', 'count'),
|
|
79
|
+
|
|
80
|
+
'check.online': ('gauge', 'count'),
|
|
81
|
+
'check.duration_ms': ('constant', 'ms'),
|
|
82
|
+
|
|
83
|
+
'requests.bytes_in': ('counter', 'bytes'),
|
|
84
|
+
'requests.bytes_out': ('counter', 'bytes'),
|
|
85
|
+
'requests.errors_count': ('counter', 'count'),
|
|
86
|
+
'requests.successes_count': ('counter', 'count'),
|
|
87
|
+
'requests.total_count': ('counter', 'count'),
|
|
88
|
+
|
|
89
|
+
'async_pipelines.merge.bucket_count': ('counter', 'count'),
|
|
90
|
+
'async_pipelines.merge.duration_us': ('counter', 'us'),
|
|
91
|
+
'async_pipelines.write.successes_count': ('counter', 'count'),
|
|
92
|
+
'async_pipelines.write.failures_count': ('counter', 'count'),
|
|
93
|
+
'async_pipelines.write.time_us': ('counter', 'us'),
|
|
94
|
+
|
|
95
|
+
'async_pipelines.merge.max_bucket_count': ('gauge', 'count'),
|
|
96
|
+
'async_pipelines.merge.max_depth_count': ('gauge', 'count'),
|
|
97
|
+
'async_pipelines.merge.requests_count': ('counter', 'count'),
|
|
98
|
+
|
|
99
|
+
'evicted.count': ('counter', 'count'),
|
|
100
|
+
'pageins.count': ('counter', 'count'),
|
|
101
|
+
|
|
102
|
+
}
|
|
75
103
|
|
|
76
104
|
async_pipeline_bytes_pattern = re.compile(r'async_pipelines.pipe_[0-9]+.merge_map.bytes')
|
|
77
105
|
async_pipeline_count_pattern = re.compile(r'async_pipelines.pipe_[0-9]+.merge_map.count')
|
|
78
106
|
|
|
79
|
-
|
|
80
107
|
def _stat_type(stat_id):
|
|
108
|
+
if stat_id in _stat_types:
|
|
109
|
+
return _stat_types[stat_id]
|
|
110
|
+
elif stat_id.endswith('total_ns'):
|
|
111
|
+
return ('counter', 'ns')
|
|
112
|
+
elif stat_id.endswith('total_bytes'):
|
|
113
|
+
return ('counter', 'bytes')
|
|
114
|
+
elif stat_id.endswith('read_bytes'):
|
|
115
|
+
return ('counter', 'bytes')
|
|
116
|
+
elif stat_id.endswith('written_bytes'):
|
|
117
|
+
return ('counter', 'bytes')
|
|
118
|
+
elif stat_id.endswith('total_count'):
|
|
119
|
+
return ('counter', 'count')
|
|
120
|
+
elif stat_id.startswith('network.sessions.'):
|
|
121
|
+
return ('gauge', 'count')
|
|
122
|
+
elif stat_id.startswith('memory.'):
|
|
123
|
+
# memory statistics are all gauges i think, describes how much memory currently allocated where
|
|
124
|
+
return ('gauge', 'bytes')
|
|
125
|
+
elif stat_id.startswith('persistence.') or stat_id.startswith('disk'):
|
|
126
|
+
# persistence are also all gauges, describes mostly how much is currently available/used on storage
|
|
127
|
+
return ('gauge', 'bytes')
|
|
128
|
+
elif stat_id.startswith('license.'):
|
|
129
|
+
return ('gauge', None)
|
|
130
|
+
elif stat_id.startswith('engine_'):
|
|
131
|
+
return ('constant', None)
|
|
132
|
+
elif async_pipeline_bytes_pattern.match(stat_id):
|
|
133
|
+
return ('gauge', 'bytes')
|
|
134
|
+
elif async_pipeline_count_pattern.match(stat_id):
|
|
135
|
+
return ('gauge', 'count')
|
|
136
|
+
else:
|
|
137
|
+
return None
|
|
138
|
+
|
|
139
|
+
def stat_type(stat_id):
|
|
81
140
|
"""
|
|
82
141
|
Returns the statistic type by a stat id. Returns one of:
|
|
83
142
|
|
|
84
143
|
- 'gauge'
|
|
85
144
|
- 'counter'
|
|
145
|
+
- None in case of unrecognized statistics
|
|
86
146
|
|
|
87
147
|
This is useful for determining which value should be reported in a dashboard.
|
|
88
148
|
"""
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
elif stat_id.endswith('total_ns'):
|
|
92
|
-
return 'counter'
|
|
93
|
-
elif async_pipeline_bytes_pattern.match(stat_id):
|
|
94
|
-
return 'gauge'
|
|
95
|
-
elif async_pipeline_count_pattern.match(stat_id):
|
|
96
|
-
return 'gauge'
|
|
97
|
-
else:
|
|
98
|
-
return None
|
|
149
|
+
return _stat_type(stat_id)
|
|
150
|
+
|
|
99
151
|
|
|
100
152
|
def _calculate_delta_stat(stat_id, prev, cur):
|
|
101
153
|
logger.info("calculating delta for stat_id = {}, prev = {}. cur = {}".format(stat_id, prev, cur))
|
|
@@ -137,14 +189,20 @@ def calculate_delta(prev, cur):
|
|
|
137
189
|
|
|
138
190
|
return ret
|
|
139
191
|
|
|
192
|
+
def _clean_blob(x):
|
|
193
|
+
x_ = x.decode('utf-8', 'replace')
|
|
194
|
+
|
|
195
|
+
# remove trailing zero-terminator
|
|
196
|
+
return ''.join(c for c in x_ if ord(c) != 0)
|
|
197
|
+
|
|
198
|
+
|
|
140
199
|
def _get_stat(dconn, k):
|
|
141
200
|
# Ugly, but works: try to retrieve as integer, if not an int, retrieve as
|
|
142
201
|
# blob
|
|
143
202
|
try:
|
|
144
203
|
return dconn.integer(k).get()
|
|
145
204
|
except quasardb.quasardb.AliasNotFoundError:
|
|
146
|
-
|
|
147
|
-
return blob.decode('utf-8', 'replace')
|
|
205
|
+
return _clean_blob(dconn.blob(k).get())
|
|
148
206
|
|
|
149
207
|
def _by_uid(stats):
|
|
150
208
|
xs = {}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: quasardb
|
|
3
|
-
Version: 3.13.
|
|
3
|
+
Version: 3.13.7
|
|
4
4
|
Summary: Python API for quasardb
|
|
5
5
|
Home-page: https://www.quasardb.net/
|
|
6
6
|
Author: quasardb SAS
|
|
@@ -19,6 +19,7 @@ Classifier: Programming Language :: Python :: 3.7
|
|
|
19
19
|
Classifier: Programming Language :: Python :: 3.8
|
|
20
20
|
Classifier: Programming Language :: Python :: 3.9
|
|
21
21
|
Classifier: Programming Language :: Python :: 3.10
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
22
23
|
Classifier: Topic :: Database
|
|
23
24
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
24
25
|
Classifier: License :: OSI Approved :: BSD License
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
LICENSE.md
|
|
2
|
+
README.md
|
|
3
|
+
setup.cfg
|
|
4
|
+
setup.py
|
|
5
|
+
quasardb/__init__.py
|
|
6
|
+
quasardb/firehose.py
|
|
7
|
+
quasardb/pool.py
|
|
8
|
+
quasardb/stats.py
|
|
9
|
+
quasardb.egg-info/PKG-INFO
|
|
10
|
+
quasardb.egg-info/SOURCES.txt
|
|
11
|
+
quasardb.egg-info/dependency_links.txt
|
|
12
|
+
quasardb.egg-info/not-zip-safe
|
|
13
|
+
quasardb.egg-info/requires.txt
|
|
14
|
+
quasardb.egg-info/top_level.txt
|
|
15
|
+
quasardb/extensions/__init__.py
|
|
16
|
+
quasardb/extensions/pinned_writer.py
|
|
17
|
+
quasardb/numpy/__init__.py
|
|
18
|
+
quasardb/pandas/__init__.py
|
|
19
|
+
tests/test_basic.py
|
|
20
|
+
tests/test_batch_inserter.py
|
|
21
|
+
tests/test_bench_pinned_writer.py
|
|
22
|
+
tests/test_blob.py
|
|
23
|
+
tests/test_buf_size.py
|
|
24
|
+
tests/test_connection.py
|
|
25
|
+
tests/test_continuous.py
|
|
26
|
+
tests/test_convert.py
|
|
27
|
+
tests/test_double.py
|
|
28
|
+
tests/test_entry.py
|
|
29
|
+
tests/test_error.py
|
|
30
|
+
tests/test_examples.py
|
|
31
|
+
tests/test_expiry.py
|
|
32
|
+
tests/test_firehose.py
|
|
33
|
+
tests/test_getTimeout.py
|
|
34
|
+
tests/test_info.py
|
|
35
|
+
tests/test_integer.py
|
|
36
|
+
tests/test_logging.py
|
|
37
|
+
tests/test_node.py
|
|
38
|
+
tests/test_numpy.py
|
|
39
|
+
tests/test_option_client_max_parallelism.py
|
|
40
|
+
tests/test_pandas.py
|
|
41
|
+
tests/test_pandas_benchmark.py
|
|
42
|
+
tests/test_perf.py
|
|
43
|
+
tests/test_pinned_writer.py
|
|
44
|
+
tests/test_pool.py
|
|
45
|
+
tests/test_prefix.py
|
|
46
|
+
tests/test_query.py
|
|
47
|
+
tests/test_query_find.py
|
|
48
|
+
tests/test_setCompression.py
|
|
49
|
+
tests/test_setMaxCardinality.py
|
|
50
|
+
tests/test_setTimeout.py
|
|
51
|
+
tests/test_stats.py
|
|
52
|
+
tests/test_string.py
|
|
53
|
+
tests/test_suffix.py
|
|
54
|
+
tests/test_table.py
|
|
55
|
+
tests/test_table_blob.py
|
|
56
|
+
tests/test_table_double.py
|
|
57
|
+
tests/test_table_int64.py
|
|
58
|
+
tests/test_table_reader.py
|
|
59
|
+
tests/test_table_string.py
|
|
60
|
+
tests/test_table_timestamp.py
|
|
61
|
+
tests/test_tag.py
|
|
62
|
+
tests/test_timestamp.py
|
|
@@ -21,7 +21,7 @@ from setuptools.command.install import install
|
|
|
21
21
|
from pkg_resources import get_build_platform
|
|
22
22
|
from wheel.bdist_wheel import bdist_wheel as old_bdist_wheel
|
|
23
23
|
|
|
24
|
-
qdb_version = "3.13.
|
|
24
|
+
qdb_version = "3.13.7"
|
|
25
25
|
|
|
26
26
|
# package_modules are our 'extra' files. Our cmake configuration copies our QDB_API_LIB
|
|
27
27
|
# into our source directory, and by adding this to `package_modules` we tell setuptools to
|
|
@@ -73,6 +73,9 @@ class CMakeBuild(build_ext):
|
|
|
73
73
|
return out.decode().strip()
|
|
74
74
|
|
|
75
75
|
def build_extension(self, ext):
|
|
76
|
+
self.do_build_extension(ext)
|
|
77
|
+
|
|
78
|
+
def do_build_extension(self, ext):
|
|
76
79
|
extdir = os.path.join(
|
|
77
80
|
os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name))), 'quasardb')
|
|
78
81
|
|
|
@@ -82,7 +85,7 @@ class CMakeBuild(build_ext):
|
|
|
82
85
|
cmake_args = [
|
|
83
86
|
'-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir,
|
|
84
87
|
'-DPYTHON_EXECUTABLE=' + sys.executable,
|
|
85
|
-
'-DQDB_PY_VERSION=' + qdb_version
|
|
88
|
+
'-DQDB_PY_VERSION=' + qdb_version
|
|
86
89
|
]
|
|
87
90
|
|
|
88
91
|
if platform.system() == "Darwin":
|
|
@@ -95,12 +98,14 @@ class CMakeBuild(build_ext):
|
|
|
95
98
|
'CMAKE_C_COMPILER': ['-D', 'CMAKE_C_COMPILER={}'],
|
|
96
99
|
'CMAKE_CXX_COMPILER': ['-D', 'CMAKE_CXX_COMPILER={}'],
|
|
97
100
|
'QDB_LINKER': ['-D', 'QDB_LINKER={}'],
|
|
101
|
+
'QDB_TESTS_ENABLED': ['-D', 'QDB_TESTS_ENABLED={}'],
|
|
98
102
|
'CMAKE_BUILD_TYPE': ['-D', 'CMAKE_BUILD_TYPE={}'],
|
|
99
103
|
'CMAKE_OSX_DEPLOYMENT_TARGET': ['-D', 'CMAKE_OSX_DEPLOYMENT_TARGET={}'],
|
|
100
104
|
'CMAKE_OSX_SYSROOT': ['-D', 'CMAKE_OSX_SYSROOT={}'],
|
|
101
105
|
'CMAKE_VERBOSE_MAKEFILE': ['-D', 'CMAKE_VERBOSE_MAKEFILE={}'],
|
|
102
106
|
}
|
|
103
|
-
default_proxy_vals = {'CMAKE_BUILD_TYPE': 'Release'
|
|
107
|
+
default_proxy_vals = {'CMAKE_BUILD_TYPE': 'Release',
|
|
108
|
+
'QDB_TESTS_ENABLED': 'OFF'}
|
|
104
109
|
|
|
105
110
|
for (env_var, cmake_args_) in proxied_env_vars.items():
|
|
106
111
|
default_ = default_proxy_vals.get(env_var, 0)
|
|
@@ -216,6 +221,7 @@ setup(name=package_name,
|
|
|
216
221
|
'Programming Language :: Python :: 3.8',
|
|
217
222
|
'Programming Language :: Python :: 3.9',
|
|
218
223
|
'Programming Language :: Python :: 3.10',
|
|
224
|
+
'Programming Language :: Python :: 3.11',
|
|
219
225
|
'Topic :: Database',
|
|
220
226
|
'Topic :: Software Development :: Libraries :: Python Modules',
|
|
221
227
|
"License :: OSI Approved :: BSD License",
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# pylint: disable=C0103,C0111,C0302,W0212
|
|
2
|
+
import datetime
|
|
3
|
+
|
|
4
|
+
import pytest
|
|
5
|
+
import quasardb
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def test_build():
|
|
9
|
+
build = quasardb.build()
|
|
10
|
+
assert len(build) > 0
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def test_version():
|
|
14
|
+
build = quasardb.version()
|
|
15
|
+
assert len(build) > 0
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def test_can_purge_all(qdbd_secure_connection):
|
|
19
|
+
qdbd_secure_connection.purge_all(datetime.timedelta(minutes=1))
|