quasardb 3.14.2.dev7__cp310-cp310-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of quasardb might be problematic. Click here for more details.
- quasardb/CMakeFiles/CMakeDirectoryInformation.cmake +16 -0
- quasardb/CMakeFiles/progress.marks +1 -0
- quasardb/Makefile +189 -0
- quasardb/__init__.py +140 -0
- quasardb/__init__.pyi +72 -0
- quasardb/cmake_install.cmake +58 -0
- quasardb/date/CMakeFiles/CMakeDirectoryInformation.cmake +16 -0
- quasardb/date/CMakeFiles/Export/b76006b2b7125baf1b0b4d4ca4db82bd/dateTargets.cmake +108 -0
- quasardb/date/CMakeFiles/progress.marks +1 -0
- quasardb/date/Makefile +189 -0
- quasardb/date/cmake_install.cmake +81 -0
- quasardb/date/dateConfigVersion.cmake +65 -0
- quasardb/date/dateTargets.cmake +63 -0
- quasardb/extensions/__init__.py +8 -0
- quasardb/extensions/writer.py +191 -0
- quasardb/firehose.py +103 -0
- quasardb/libqdb_api.so +0 -0
- quasardb/numpy/__init__.py +1045 -0
- quasardb/pandas/__init__.py +533 -0
- quasardb/pool.py +311 -0
- quasardb/pybind11/CMakeFiles/CMakeDirectoryInformation.cmake +16 -0
- quasardb/pybind11/CMakeFiles/progress.marks +1 -0
- quasardb/pybind11/Makefile +189 -0
- quasardb/pybind11/cmake_install.cmake +50 -0
- quasardb/quasardb/__init__.pyi +97 -0
- quasardb/quasardb/_batch_column.pyi +5 -0
- quasardb/quasardb/_batch_inserter.pyi +30 -0
- quasardb/quasardb/_blob.pyi +16 -0
- quasardb/quasardb/_cluster.pyi +100 -0
- quasardb/quasardb/_continuous.pyi +16 -0
- quasardb/quasardb/_double.pyi +7 -0
- quasardb/quasardb/_entry.pyi +60 -0
- quasardb/quasardb/_error.pyi +15 -0
- quasardb/quasardb/_integer.pyi +7 -0
- quasardb/quasardb/_node.pyi +26 -0
- quasardb/quasardb/_options.pyi +105 -0
- quasardb/quasardb/_perf.pyi +5 -0
- quasardb/quasardb/_properties.pyi +5 -0
- quasardb/quasardb/_query.pyi +2 -0
- quasardb/quasardb/_reader.pyi +9 -0
- quasardb/quasardb/_retry.pyi +16 -0
- quasardb/quasardb/_string.pyi +12 -0
- quasardb/quasardb/_table.pyi +125 -0
- quasardb/quasardb/_tag.pyi +5 -0
- quasardb/quasardb/_timestamp.pyi +9 -0
- quasardb/quasardb/_writer.pyi +111 -0
- quasardb/quasardb/metrics/__init__.pyi +20 -0
- quasardb/quasardb.cpython-310-aarch64-linux-gnu.so +0 -0
- quasardb/range-v3/CMakeFiles/CMakeDirectoryInformation.cmake +16 -0
- quasardb/range-v3/CMakeFiles/Export/48a02d54b5e9e60c30c5f249b431a911/range-v3-targets.cmake +128 -0
- quasardb/range-v3/CMakeFiles/progress.marks +1 -0
- quasardb/range-v3/CMakeFiles/range.v3.headers.dir/DependInfo.cmake +22 -0
- quasardb/range-v3/CMakeFiles/range.v3.headers.dir/build.make +86 -0
- quasardb/range-v3/CMakeFiles/range.v3.headers.dir/cmake_clean.cmake +5 -0
- quasardb/range-v3/CMakeFiles/range.v3.headers.dir/compiler_depend.make +2 -0
- quasardb/range-v3/CMakeFiles/range.v3.headers.dir/compiler_depend.ts +2 -0
- quasardb/range-v3/CMakeFiles/range.v3.headers.dir/progress.make +1 -0
- quasardb/range-v3/Makefile +204 -0
- quasardb/range-v3/cmake_install.cmake +93 -0
- quasardb/range-v3/include/range/v3/version.hpp +24 -0
- quasardb/range-v3/range-v3-config-version.cmake +83 -0
- quasardb/range-v3/range-v3-config.cmake +80 -0
- quasardb/stats.py +358 -0
- quasardb/table_cache.py +56 -0
- quasardb-3.14.2.dev7.dist-info/METADATA +41 -0
- quasardb-3.14.2.dev7.dist-info/RECORD +69 -0
- quasardb-3.14.2.dev7.dist-info/WHEEL +6 -0
- quasardb-3.14.2.dev7.dist-info/licenses/LICENSE.md +11 -0
- quasardb-3.14.2.dev7.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
# Install script for directory: /home/teamcity/buildAgent/work/938b0bdf6727d1ad/thirdparty/range-v3
|
|
2
|
+
|
|
3
|
+
# Set the install prefix
|
|
4
|
+
if(NOT DEFINED CMAKE_INSTALL_PREFIX)
|
|
5
|
+
set(CMAKE_INSTALL_PREFIX "/usr/local")
|
|
6
|
+
endif()
|
|
7
|
+
string(REGEX REPLACE "/$" "" CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
|
|
8
|
+
|
|
9
|
+
# Set the install configuration name.
|
|
10
|
+
if(NOT DEFINED CMAKE_INSTALL_CONFIG_NAME)
|
|
11
|
+
if(BUILD_TYPE)
|
|
12
|
+
string(REGEX REPLACE "^[^A-Za-z0-9_]+" ""
|
|
13
|
+
CMAKE_INSTALL_CONFIG_NAME "${BUILD_TYPE}")
|
|
14
|
+
else()
|
|
15
|
+
set(CMAKE_INSTALL_CONFIG_NAME "Release")
|
|
16
|
+
endif()
|
|
17
|
+
message(STATUS "Install configuration: \"${CMAKE_INSTALL_CONFIG_NAME}\"")
|
|
18
|
+
endif()
|
|
19
|
+
|
|
20
|
+
# Set the component getting installed.
|
|
21
|
+
if(NOT CMAKE_INSTALL_COMPONENT)
|
|
22
|
+
if(COMPONENT)
|
|
23
|
+
message(STATUS "Install component: \"${COMPONENT}\"")
|
|
24
|
+
set(CMAKE_INSTALL_COMPONENT "${COMPONENT}")
|
|
25
|
+
else()
|
|
26
|
+
set(CMAKE_INSTALL_COMPONENT)
|
|
27
|
+
endif()
|
|
28
|
+
endif()
|
|
29
|
+
|
|
30
|
+
# Install shared libraries without execute permission?
|
|
31
|
+
if(NOT DEFINED CMAKE_INSTALL_SO_NO_EXE)
|
|
32
|
+
set(CMAKE_INSTALL_SO_NO_EXE "0")
|
|
33
|
+
endif()
|
|
34
|
+
|
|
35
|
+
# Is this installation the result of a crosscompile?
|
|
36
|
+
if(NOT DEFINED CMAKE_CROSSCOMPILING)
|
|
37
|
+
set(CMAKE_CROSSCOMPILING "FALSE")
|
|
38
|
+
endif()
|
|
39
|
+
|
|
40
|
+
# Set path to fallback-tool for dependency-resolution.
|
|
41
|
+
if(NOT DEFINED CMAKE_OBJDUMP)
|
|
42
|
+
set(CMAKE_OBJDUMP "/opt/rh/gcc-toolset-14/root/usr/bin/objdump")
|
|
43
|
+
endif()
|
|
44
|
+
|
|
45
|
+
if(CMAKE_INSTALL_COMPONENT STREQUAL "Unspecified" OR NOT CMAKE_INSTALL_COMPONENT)
|
|
46
|
+
include("/home/teamcity/buildAgent/work/938b0bdf6727d1ad/build/lib.linux-aarch64-cpython-310/quasardb/range-v3/CMakeFiles/range-v3-concepts.dir/install-cxx-module-bmi-Release.cmake" OPTIONAL)
|
|
47
|
+
endif()
|
|
48
|
+
|
|
49
|
+
if(CMAKE_INSTALL_COMPONENT STREQUAL "Unspecified" OR NOT CMAKE_INSTALL_COMPONENT)
|
|
50
|
+
include("/home/teamcity/buildAgent/work/938b0bdf6727d1ad/build/lib.linux-aarch64-cpython-310/quasardb/range-v3/CMakeFiles/range-v3-meta.dir/install-cxx-module-bmi-Release.cmake" OPTIONAL)
|
|
51
|
+
endif()
|
|
52
|
+
|
|
53
|
+
if(CMAKE_INSTALL_COMPONENT STREQUAL "Unspecified" OR NOT CMAKE_INSTALL_COMPONENT)
|
|
54
|
+
include("/home/teamcity/buildAgent/work/938b0bdf6727d1ad/build/lib.linux-aarch64-cpython-310/quasardb/range-v3/CMakeFiles/range-v3.dir/install-cxx-module-bmi-Release.cmake" OPTIONAL)
|
|
55
|
+
endif()
|
|
56
|
+
|
|
57
|
+
if(CMAKE_INSTALL_COMPONENT STREQUAL "Unspecified" OR NOT CMAKE_INSTALL_COMPONENT)
|
|
58
|
+
if(EXISTS "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib64/cmake/range-v3/range-v3-targets.cmake")
|
|
59
|
+
file(DIFFERENT _cmake_export_file_changed FILES
|
|
60
|
+
"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib64/cmake/range-v3/range-v3-targets.cmake"
|
|
61
|
+
"/home/teamcity/buildAgent/work/938b0bdf6727d1ad/build/lib.linux-aarch64-cpython-310/quasardb/range-v3/CMakeFiles/Export/48a02d54b5e9e60c30c5f249b431a911/range-v3-targets.cmake")
|
|
62
|
+
if(_cmake_export_file_changed)
|
|
63
|
+
file(GLOB _cmake_old_config_files "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib64/cmake/range-v3/range-v3-targets-*.cmake")
|
|
64
|
+
if(_cmake_old_config_files)
|
|
65
|
+
string(REPLACE ";" ", " _cmake_old_config_files_text "${_cmake_old_config_files}")
|
|
66
|
+
message(STATUS "Old export file \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/lib64/cmake/range-v3/range-v3-targets.cmake\" will be replaced. Removing files [${_cmake_old_config_files_text}].")
|
|
67
|
+
unset(_cmake_old_config_files_text)
|
|
68
|
+
file(REMOVE ${_cmake_old_config_files})
|
|
69
|
+
endif()
|
|
70
|
+
unset(_cmake_old_config_files)
|
|
71
|
+
endif()
|
|
72
|
+
unset(_cmake_export_file_changed)
|
|
73
|
+
endif()
|
|
74
|
+
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib64/cmake/range-v3" TYPE FILE FILES "/home/teamcity/buildAgent/work/938b0bdf6727d1ad/build/lib.linux-aarch64-cpython-310/quasardb/range-v3/CMakeFiles/Export/48a02d54b5e9e60c30c5f249b431a911/range-v3-targets.cmake")
|
|
75
|
+
endif()
|
|
76
|
+
|
|
77
|
+
if(CMAKE_INSTALL_COMPONENT STREQUAL "Unspecified" OR NOT CMAKE_INSTALL_COMPONENT)
|
|
78
|
+
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib64/cmake/range-v3" TYPE FILE FILES
|
|
79
|
+
"/home/teamcity/buildAgent/work/938b0bdf6727d1ad/build/lib.linux-aarch64-cpython-310/quasardb/range-v3/range-v3-config-version.cmake"
|
|
80
|
+
"/home/teamcity/buildAgent/work/938b0bdf6727d1ad/thirdparty/range-v3/cmake/range-v3-config.cmake"
|
|
81
|
+
)
|
|
82
|
+
endif()
|
|
83
|
+
|
|
84
|
+
if(CMAKE_INSTALL_COMPONENT STREQUAL "Unspecified" OR NOT CMAKE_INSTALL_COMPONENT)
|
|
85
|
+
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/include" TYPE DIRECTORY FILES "/home/teamcity/buildAgent/work/938b0bdf6727d1ad/thirdparty/range-v3/include/" FILES_MATCHING REGEX "/[^/]*$")
|
|
86
|
+
endif()
|
|
87
|
+
|
|
88
|
+
string(REPLACE ";" "\n" CMAKE_INSTALL_MANIFEST_CONTENT
|
|
89
|
+
"${CMAKE_INSTALL_MANIFEST_FILES}")
|
|
90
|
+
if(CMAKE_INSTALL_LOCAL_ONLY)
|
|
91
|
+
file(WRITE "/home/teamcity/buildAgent/work/938b0bdf6727d1ad/build/lib.linux-aarch64-cpython-310/quasardb/range-v3/install_local_manifest.txt"
|
|
92
|
+
"${CMAKE_INSTALL_MANIFEST_CONTENT}")
|
|
93
|
+
endif()
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
/// \file
|
|
2
|
+
// Range v3 library
|
|
3
|
+
//
|
|
4
|
+
// Copyright Eric Niebler 2017-present
|
|
5
|
+
//
|
|
6
|
+
// Use, modification and distribution is subject to the
|
|
7
|
+
// Boost Software License, Version 1.0. (See accompanying
|
|
8
|
+
// file LICENSE_1_0.txt or copy at
|
|
9
|
+
// http://www.boost.org/LICENSE_1_0.txt)
|
|
10
|
+
//
|
|
11
|
+
// Project home: https://github.com/ericniebler/range-v3
|
|
12
|
+
//
|
|
13
|
+
|
|
14
|
+
#ifndef RANGES_V3_VERSION_HPP
|
|
15
|
+
#define RANGES_V3_VERSION_HPP
|
|
16
|
+
|
|
17
|
+
#define RANGE_V3_MAJOR 0
|
|
18
|
+
#define RANGE_V3_MINOR 11
|
|
19
|
+
#define RANGE_V3_PATCHLEVEL 0
|
|
20
|
+
|
|
21
|
+
#define RANGE_V3_VERSION \
|
|
22
|
+
(RANGE_V3_MAJOR * 10000 + RANGE_V3_MINOR * 100 + RANGE_V3_PATCHLEVEL)
|
|
23
|
+
|
|
24
|
+
#endif
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
# This is a basic version file for the Config-mode of find_package().
|
|
2
|
+
# It is used by write_basic_package_version_file() as input file for configure_file()
|
|
3
|
+
# to create a version-file which can be installed along a config.cmake file.
|
|
4
|
+
#
|
|
5
|
+
# The created file sets PACKAGE_VERSION_EXACT if the current version string and
|
|
6
|
+
# the requested version string are exactly the same and it sets
|
|
7
|
+
# PACKAGE_VERSION_COMPATIBLE if the current version is equal to the requested version.
|
|
8
|
+
# The tweak version component is ignored.
|
|
9
|
+
# The variable CVF_VERSION must be set before calling configure_file().
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
if (PACKAGE_FIND_VERSION_RANGE)
|
|
13
|
+
message(AUTHOR_WARNING
|
|
14
|
+
"`find_package()` specify a version range but the version strategy "
|
|
15
|
+
"(ExactVersion) of the module `${PACKAGE_FIND_NAME}` is incompatible "
|
|
16
|
+
"with this request. Only the lower endpoint of the range will be used.")
|
|
17
|
+
endif()
|
|
18
|
+
|
|
19
|
+
set(PACKAGE_VERSION "0.11.0")
|
|
20
|
+
|
|
21
|
+
if("0.11.0" MATCHES "^([0-9]+)\\.([0-9]+)\\.([0-9]+)") # strip the tweak version
|
|
22
|
+
set(CVF_VERSION_MAJOR "${CMAKE_MATCH_1}")
|
|
23
|
+
set(CVF_VERSION_MINOR "${CMAKE_MATCH_2}")
|
|
24
|
+
set(CVF_VERSION_PATCH "${CMAKE_MATCH_3}")
|
|
25
|
+
|
|
26
|
+
if(NOT CVF_VERSION_MAJOR VERSION_EQUAL 0)
|
|
27
|
+
string(REGEX REPLACE "^0+" "" CVF_VERSION_MAJOR "${CVF_VERSION_MAJOR}")
|
|
28
|
+
endif()
|
|
29
|
+
if(NOT CVF_VERSION_MINOR VERSION_EQUAL 0)
|
|
30
|
+
string(REGEX REPLACE "^0+" "" CVF_VERSION_MINOR "${CVF_VERSION_MINOR}")
|
|
31
|
+
endif()
|
|
32
|
+
if(NOT CVF_VERSION_PATCH VERSION_EQUAL 0)
|
|
33
|
+
string(REGEX REPLACE "^0+" "" CVF_VERSION_PATCH "${CVF_VERSION_PATCH}")
|
|
34
|
+
endif()
|
|
35
|
+
|
|
36
|
+
set(CVF_VERSION_NO_TWEAK "${CVF_VERSION_MAJOR}.${CVF_VERSION_MINOR}.${CVF_VERSION_PATCH}")
|
|
37
|
+
else()
|
|
38
|
+
set(CVF_VERSION_NO_TWEAK "0.11.0")
|
|
39
|
+
endif()
|
|
40
|
+
|
|
41
|
+
if(PACKAGE_FIND_VERSION MATCHES "^([0-9]+)\\.([0-9]+)\\.([0-9]+)") # strip the tweak version
|
|
42
|
+
set(REQUESTED_VERSION_MAJOR "${CMAKE_MATCH_1}")
|
|
43
|
+
set(REQUESTED_VERSION_MINOR "${CMAKE_MATCH_2}")
|
|
44
|
+
set(REQUESTED_VERSION_PATCH "${CMAKE_MATCH_3}")
|
|
45
|
+
|
|
46
|
+
if(NOT REQUESTED_VERSION_MAJOR VERSION_EQUAL 0)
|
|
47
|
+
string(REGEX REPLACE "^0+" "" REQUESTED_VERSION_MAJOR "${REQUESTED_VERSION_MAJOR}")
|
|
48
|
+
endif()
|
|
49
|
+
if(NOT REQUESTED_VERSION_MINOR VERSION_EQUAL 0)
|
|
50
|
+
string(REGEX REPLACE "^0+" "" REQUESTED_VERSION_MINOR "${REQUESTED_VERSION_MINOR}")
|
|
51
|
+
endif()
|
|
52
|
+
if(NOT REQUESTED_VERSION_PATCH VERSION_EQUAL 0)
|
|
53
|
+
string(REGEX REPLACE "^0+" "" REQUESTED_VERSION_PATCH "${REQUESTED_VERSION_PATCH}")
|
|
54
|
+
endif()
|
|
55
|
+
|
|
56
|
+
set(REQUESTED_VERSION_NO_TWEAK
|
|
57
|
+
"${REQUESTED_VERSION_MAJOR}.${REQUESTED_VERSION_MINOR}.${REQUESTED_VERSION_PATCH}")
|
|
58
|
+
else()
|
|
59
|
+
set(REQUESTED_VERSION_NO_TWEAK "${PACKAGE_FIND_VERSION}")
|
|
60
|
+
endif()
|
|
61
|
+
|
|
62
|
+
if(REQUESTED_VERSION_NO_TWEAK STREQUAL CVF_VERSION_NO_TWEAK)
|
|
63
|
+
set(PACKAGE_VERSION_COMPATIBLE TRUE)
|
|
64
|
+
else()
|
|
65
|
+
set(PACKAGE_VERSION_COMPATIBLE FALSE)
|
|
66
|
+
endif()
|
|
67
|
+
|
|
68
|
+
if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION)
|
|
69
|
+
set(PACKAGE_VERSION_EXACT TRUE)
|
|
70
|
+
endif()
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
# if the installed or the using project don't have CMAKE_SIZEOF_VOID_P set, ignore it:
|
|
74
|
+
if("${CMAKE_SIZEOF_VOID_P}" STREQUAL "" OR "" STREQUAL "")
|
|
75
|
+
return()
|
|
76
|
+
endif()
|
|
77
|
+
|
|
78
|
+
# check that the installed version has the same 32/64bit-ness as the one which is currently searching:
|
|
79
|
+
if(NOT CMAKE_SIZEOF_VOID_P STREQUAL "")
|
|
80
|
+
math(EXPR installedBits " * 8")
|
|
81
|
+
set(PACKAGE_VERSION "${PACKAGE_VERSION} (${installedBits}bit)")
|
|
82
|
+
set(PACKAGE_VERSION_UNSUITABLE TRUE)
|
|
83
|
+
endif()
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
# Generated by CMake
|
|
2
|
+
|
|
3
|
+
if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8)
|
|
4
|
+
message(FATAL_ERROR "CMake >= 3.0.0 required")
|
|
5
|
+
endif()
|
|
6
|
+
if(CMAKE_VERSION VERSION_LESS "3.0.0")
|
|
7
|
+
message(FATAL_ERROR "CMake >= 3.0.0 required")
|
|
8
|
+
endif()
|
|
9
|
+
cmake_policy(PUSH)
|
|
10
|
+
cmake_policy(VERSION 3.0.0...3.31)
|
|
11
|
+
#----------------------------------------------------------------
|
|
12
|
+
# Generated CMake target import file.
|
|
13
|
+
#----------------------------------------------------------------
|
|
14
|
+
|
|
15
|
+
# Commands may need to know the format version.
|
|
16
|
+
set(CMAKE_IMPORT_FILE_VERSION 1)
|
|
17
|
+
|
|
18
|
+
# Protect against multiple inclusion, which would fail when already imported targets are added once more.
|
|
19
|
+
set(_cmake_targets_defined "")
|
|
20
|
+
set(_cmake_targets_not_defined "")
|
|
21
|
+
set(_cmake_expected_targets "")
|
|
22
|
+
foreach(_cmake_expected_target IN ITEMS range-v3-concepts range-v3-meta range-v3)
|
|
23
|
+
list(APPEND _cmake_expected_targets "${_cmake_expected_target}")
|
|
24
|
+
if(TARGET "${_cmake_expected_target}")
|
|
25
|
+
list(APPEND _cmake_targets_defined "${_cmake_expected_target}")
|
|
26
|
+
else()
|
|
27
|
+
list(APPEND _cmake_targets_not_defined "${_cmake_expected_target}")
|
|
28
|
+
endif()
|
|
29
|
+
endforeach()
|
|
30
|
+
unset(_cmake_expected_target)
|
|
31
|
+
if(_cmake_targets_defined STREQUAL _cmake_expected_targets)
|
|
32
|
+
unset(_cmake_targets_defined)
|
|
33
|
+
unset(_cmake_targets_not_defined)
|
|
34
|
+
unset(_cmake_expected_targets)
|
|
35
|
+
unset(CMAKE_IMPORT_FILE_VERSION)
|
|
36
|
+
cmake_policy(POP)
|
|
37
|
+
return()
|
|
38
|
+
endif()
|
|
39
|
+
if(NOT _cmake_targets_defined STREQUAL "")
|
|
40
|
+
string(REPLACE ";" ", " _cmake_targets_defined_text "${_cmake_targets_defined}")
|
|
41
|
+
string(REPLACE ";" ", " _cmake_targets_not_defined_text "${_cmake_targets_not_defined}")
|
|
42
|
+
message(FATAL_ERROR "Some (but not all) targets in this export set were already defined.\nTargets Defined: ${_cmake_targets_defined_text}\nTargets not yet defined: ${_cmake_targets_not_defined_text}\n")
|
|
43
|
+
endif()
|
|
44
|
+
unset(_cmake_targets_defined)
|
|
45
|
+
unset(_cmake_targets_not_defined)
|
|
46
|
+
unset(_cmake_expected_targets)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
# Create imported target range-v3-concepts
|
|
50
|
+
add_library(range-v3-concepts INTERFACE IMPORTED)
|
|
51
|
+
|
|
52
|
+
set_target_properties(range-v3-concepts PROPERTIES
|
|
53
|
+
INTERFACE_COMPILE_OPTIONS "\$<\$<COMPILE_LANG_AND_ID:CXX,MSVC>:/permissive->;\$<\$<COMPILE_LANG_AND_ID:CUDA,MSVC>:-Xcompiler=/permissive->"
|
|
54
|
+
INTERFACE_INCLUDE_DIRECTORIES "/home/teamcity/buildAgent/work/938b0bdf6727d1ad/thirdparty/range-v3/include/"
|
|
55
|
+
INTERFACE_LINK_LIBRARIES "range-v3-meta"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
# Create imported target range-v3-meta
|
|
59
|
+
add_library(range-v3-meta INTERFACE IMPORTED)
|
|
60
|
+
|
|
61
|
+
set_target_properties(range-v3-meta PROPERTIES
|
|
62
|
+
INTERFACE_COMPILE_OPTIONS "\$<\$<COMPILE_LANG_AND_ID:CXX,MSVC>:/permissive->;\$<\$<COMPILE_LANG_AND_ID:CUDA,MSVC>:-Xcompiler=/permissive->"
|
|
63
|
+
INTERFACE_INCLUDE_DIRECTORIES "/home/teamcity/buildAgent/work/938b0bdf6727d1ad/thirdparty/range-v3/include/"
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
# Create imported target range-v3
|
|
67
|
+
add_library(range-v3 INTERFACE IMPORTED)
|
|
68
|
+
|
|
69
|
+
set_target_properties(range-v3 PROPERTIES
|
|
70
|
+
INTERFACE_COMPILE_OPTIONS "\$<\$<COMPILE_LANG_AND_ID:CXX,MSVC>:/permissive->;\$<\$<COMPILE_LANG_AND_ID:CUDA,MSVC>:-Xcompiler=/permissive->"
|
|
71
|
+
INTERFACE_INCLUDE_DIRECTORIES "/home/teamcity/buildAgent/work/938b0bdf6727d1ad/thirdparty/range-v3/include/"
|
|
72
|
+
INTERFACE_LINK_LIBRARIES "range-v3-concepts;range-v3-meta"
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
# This file does not depend on other imported targets which have
|
|
76
|
+
# been exported from the same project but in a separate export set.
|
|
77
|
+
|
|
78
|
+
# Commands beyond this point should not need to know the version.
|
|
79
|
+
set(CMAKE_IMPORT_FILE_VERSION)
|
|
80
|
+
cmake_policy(POP)
|
quasardb/stats.py
ADDED
|
@@ -0,0 +1,358 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
import quasardb
|
|
4
|
+
import logging
|
|
5
|
+
from collections import defaultdict
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from enum import Enum
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger("quasardb.stats")
|
|
10
|
+
|
|
11
|
+
MAX_KEYS = 4 * 1024 * 1024 # 4 million max keys
|
|
12
|
+
stats_prefix = "$qdb.statistics."
|
|
13
|
+
|
|
14
|
+
# Compile these regexes once for speed
|
|
15
|
+
user_pattern = re.compile(r"\$qdb.statistics.(.*).uid_([0-9]+)$")
|
|
16
|
+
total_pattern = re.compile(r"\$qdb.statistics.(.*)$")
|
|
17
|
+
user_clean_pattern = re.compile(r"\.uid_\d+")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def is_user_stat(s):
|
|
21
|
+
return user_pattern.match(s) is not None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def is_cumulative_stat(s):
|
|
25
|
+
# NOTE(leon): It's quite difficult to express in Python that you don't want any
|
|
26
|
+
# regex to _end_ with uid_[0-9]+, because Python's regex engine doesn't support
|
|
27
|
+
# variable width look-behind.
|
|
28
|
+
#
|
|
29
|
+
# An alternative would be to use the PyPi regex library (for POSIX regexes), but
|
|
30
|
+
# want to stay light on dependencies#
|
|
31
|
+
#
|
|
32
|
+
# As such, we define a 'cumulative' stat as anything that's not a user stat.
|
|
33
|
+
# Simple but effective.
|
|
34
|
+
return user_pattern.match(s) is None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def by_node(conn):
|
|
38
|
+
"""
|
|
39
|
+
Returns statistic grouped by node URI.
|
|
40
|
+
|
|
41
|
+
Parameters:
|
|
42
|
+
conn: quasardb.Cluster
|
|
43
|
+
Active connection to the QuasarDB cluster
|
|
44
|
+
"""
|
|
45
|
+
return {x: of_node(conn.node(x)) for x in conn.endpoints()}
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def of_node(dconn):
|
|
49
|
+
"""
|
|
50
|
+
Returns statistic for a single node.
|
|
51
|
+
|
|
52
|
+
Parameters:
|
|
53
|
+
dconn: quasardb.Node
|
|
54
|
+
Direct node connection to the node we wish to connect to
|
|
55
|
+
|
|
56
|
+
"""
|
|
57
|
+
|
|
58
|
+
start = datetime.now()
|
|
59
|
+
|
|
60
|
+
ks = _get_all_keys(dconn)
|
|
61
|
+
idx = _index_keys(dconn, ks)
|
|
62
|
+
raw = {k: _get_stat_value(dconn, k) for k in ks}
|
|
63
|
+
|
|
64
|
+
ret = {"by_uid": _by_uid(raw, idx), "cumulative": _cumulative(raw, idx)}
|
|
65
|
+
|
|
66
|
+
check_duration = datetime.now() - start
|
|
67
|
+
|
|
68
|
+
ret["cumulative"]["check.online"] = {
|
|
69
|
+
"value": 1,
|
|
70
|
+
"type": Type.ACCUMULATOR,
|
|
71
|
+
"unit": Unit.NONE,
|
|
72
|
+
}
|
|
73
|
+
ret["cumulative"]["check.duration_ms"] = {
|
|
74
|
+
"value": int(check_duration.total_seconds() * 1000),
|
|
75
|
+
"type": Type.ACCUMULATOR,
|
|
76
|
+
"unit": Unit.MILLISECONDS,
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return ret
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
async_pipeline_bytes_pattern = re.compile(
|
|
83
|
+
r"async_pipelines.pipe_[0-9]+.merge_map.bytes"
|
|
84
|
+
)
|
|
85
|
+
async_pipeline_count_pattern = re.compile(
|
|
86
|
+
r"async_pipelines.pipe_[0-9]+.merge_map.count"
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def stat_type(stat_id):
|
|
91
|
+
"""
|
|
92
|
+
Returns the statistic type by a stat id. Returns one of:
|
|
93
|
+
|
|
94
|
+
- 'gauge'
|
|
95
|
+
- 'counter'
|
|
96
|
+
- None in case of unrecognized statistics
|
|
97
|
+
|
|
98
|
+
This is useful for determining which value should be reported in a dashboard.
|
|
99
|
+
"""
|
|
100
|
+
import warnings
|
|
101
|
+
|
|
102
|
+
warnings.warn(
|
|
103
|
+
"The 'stat_type' method is deprecated and will be removed in a future release."
|
|
104
|
+
"The stat type and unit are now part of the return value of invocations to the 'of_node' and 'by_node' methods.",
|
|
105
|
+
DeprecationWarning,
|
|
106
|
+
stacklevel=2,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
return None
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _get_all_keys(dconn, n=1024):
|
|
113
|
+
"""
|
|
114
|
+
Returns all keys from a single node.
|
|
115
|
+
|
|
116
|
+
Parameters:
|
|
117
|
+
dconn: quasardb.Node
|
|
118
|
+
Direct node connection to the node we wish to connect to.
|
|
119
|
+
|
|
120
|
+
n: int
|
|
121
|
+
Number of keys to retrieve.
|
|
122
|
+
"""
|
|
123
|
+
xs = None
|
|
124
|
+
increase_rate = 8
|
|
125
|
+
# keep getting keys while number of results exceeds the given "n"
|
|
126
|
+
while xs is None or len(xs) >= n:
|
|
127
|
+
if xs is not None:
|
|
128
|
+
n = n * increase_rate
|
|
129
|
+
if n >= MAX_KEYS:
|
|
130
|
+
raise Exception(f"ERROR: Cannot fetch more than {MAX_KEYS} keys.")
|
|
131
|
+
xs = dconn.prefix_get(stats_prefix, n)
|
|
132
|
+
|
|
133
|
+
return xs
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class Type(Enum):
|
|
137
|
+
ACCUMULATOR = 1
|
|
138
|
+
GAUGE = 2
|
|
139
|
+
LABEL = 3
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class Unit(Enum):
|
|
143
|
+
NONE = 0
|
|
144
|
+
COUNT = 1
|
|
145
|
+
|
|
146
|
+
# Size units
|
|
147
|
+
BYTES = 32
|
|
148
|
+
|
|
149
|
+
# Time/duration units
|
|
150
|
+
EPOCH = 64
|
|
151
|
+
NANOSECONDS = 65
|
|
152
|
+
MICROSECONDS = 66
|
|
153
|
+
MILLISECONDS = 67
|
|
154
|
+
SECONDS = 68
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
_type_string_to_enum = {
|
|
158
|
+
"accumulator": Type.ACCUMULATOR,
|
|
159
|
+
"gauge": Type.GAUGE,
|
|
160
|
+
"label": Type.LABEL,
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
_unit_string_to_enum = {
|
|
164
|
+
"none": Unit.NONE,
|
|
165
|
+
"count": Unit.COUNT,
|
|
166
|
+
"bytes": Unit.BYTES,
|
|
167
|
+
"epoch": Unit.EPOCH,
|
|
168
|
+
"nanoseconds": Unit.NANOSECONDS,
|
|
169
|
+
"microseconds": Unit.MICROSECONDS,
|
|
170
|
+
"milliseconds": Unit.MILLISECONDS,
|
|
171
|
+
"seconds": Unit.SECONDS,
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def _lookup_enum(dconn, k, m):
|
|
176
|
+
"""
|
|
177
|
+
Utility function to avoid code duplication: automatically looks up a key's value
|
|
178
|
+
from QuasarDB and looks it up in provided dict.
|
|
179
|
+
"""
|
|
180
|
+
|
|
181
|
+
x = dconn.blob(k).get()
|
|
182
|
+
x = _clean_blob(x)
|
|
183
|
+
|
|
184
|
+
if x not in m:
|
|
185
|
+
raise Exception(f"Unrecognized unit/type {x} from key {k}")
|
|
186
|
+
|
|
187
|
+
return m[x]
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def _lookup_type(dconn, k):
|
|
191
|
+
"""
|
|
192
|
+
Looks up and parses/validates the metric type.
|
|
193
|
+
"""
|
|
194
|
+
assert k.endswith(".type")
|
|
195
|
+
|
|
196
|
+
return _lookup_enum(dconn, k, _type_string_to_enum)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def _lookup_unit(dconn, k):
|
|
200
|
+
"""
|
|
201
|
+
Looks up and parses/validates the metric type.
|
|
202
|
+
"""
|
|
203
|
+
assert k.endswith(".unit")
|
|
204
|
+
|
|
205
|
+
return _lookup_enum(dconn, k, _unit_string_to_enum)
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def _index_keys(dconn, ks):
|
|
209
|
+
"""
|
|
210
|
+
Takes all statistics keys that are retrieved, and "indexes" them in such a way
|
|
211
|
+
that we end up with a dict of all statistic keys, their type and their unit.
|
|
212
|
+
"""
|
|
213
|
+
|
|
214
|
+
###
|
|
215
|
+
# The keys generally look like this, for example:
|
|
216
|
+
#
|
|
217
|
+
# $qdb.statistics.requests.out_bytes
|
|
218
|
+
# $qdb.statistics.requests.out_bytes.type
|
|
219
|
+
# $qdb.statistics.requests.out_bytes.uid_1
|
|
220
|
+
# $qdb.statistics.requests.out_bytes.uid_1.type
|
|
221
|
+
# $qdb.statistics.requests.out_bytes.uid_1.unit
|
|
222
|
+
# $qdb.statistics.requests.out_bytes.unit
|
|
223
|
+
#
|
|
224
|
+
# For this purpose, we simply get rid of the "uid" part, as the per-uid metrics are guaranteed
|
|
225
|
+
# to be of the exact same type as all the others. So after trimming those, the keys will look
|
|
226
|
+
# like this:
|
|
227
|
+
#
|
|
228
|
+
# $qdb.statistics.requests.out_bytes
|
|
229
|
+
# $qdb.statistics.requests.out_bytes.type
|
|
230
|
+
# $qdb.statistics.requests.out_bytes
|
|
231
|
+
# $qdb.statistics.requests.out_bytes.type
|
|
232
|
+
# $qdb.statistics.requests.out_bytes.unit
|
|
233
|
+
# $qdb.statistics.requests.out_bytes.unit
|
|
234
|
+
#
|
|
235
|
+
# And after deduplication like this:
|
|
236
|
+
#
|
|
237
|
+
# $qdb.statistics.requests.out_bytes
|
|
238
|
+
# $qdb.statistics.requests.out_bytes.type
|
|
239
|
+
# $qdb.statistics.requests.out_bytes.unit
|
|
240
|
+
#
|
|
241
|
+
# In which case we'll store `requests.out_bytes` as the statistic type, and look up the type
|
|
242
|
+
# and unit for those metrics and add a placeholder value.
|
|
243
|
+
|
|
244
|
+
ret = defaultdict(lambda: {"value": None, "type": None, "unit": None})
|
|
245
|
+
|
|
246
|
+
for k in ks:
|
|
247
|
+
# Remove any 'uid_[0-9]+' part from the string
|
|
248
|
+
k_ = user_clean_pattern.sub("", k)
|
|
249
|
+
|
|
250
|
+
matches = total_pattern.match(k_)
|
|
251
|
+
|
|
252
|
+
parts = matches.groups()[0].rsplit(".", 1)
|
|
253
|
+
metric_id = parts[0]
|
|
254
|
+
|
|
255
|
+
if len(parts) > 1 and parts[1] == "type":
|
|
256
|
+
if ret[metric_id]["type"] == None:
|
|
257
|
+
# We haven't seen this particular statistic yet
|
|
258
|
+
ret[metric_id]["type"] = _lookup_type(dconn, k)
|
|
259
|
+
elif len(parts) > 1 and parts[1] == "unit":
|
|
260
|
+
if ret[metric_id]["unit"] == None:
|
|
261
|
+
# We haven't seen this particular statistic yet
|
|
262
|
+
ret[metric_id]["unit"] = _lookup_unit(dconn, k)
|
|
263
|
+
else:
|
|
264
|
+
# It's a value, we look those up later
|
|
265
|
+
pass
|
|
266
|
+
|
|
267
|
+
return ret
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
def _clean_blob(x):
|
|
271
|
+
"""
|
|
272
|
+
Utility function that decodes a blob as an UTF-8 string, as the direct node C API
|
|
273
|
+
does not yet support 'string' types and as such all statistics are stored as blobs.
|
|
274
|
+
"""
|
|
275
|
+
x_ = x.decode("utf-8", "replace")
|
|
276
|
+
|
|
277
|
+
# remove trailing zero-terminator
|
|
278
|
+
return "".join(c for c in x_ if ord(c) != 0)
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def _get_stat_value(dconn, k):
|
|
282
|
+
# Ugly, but works: try to retrieve as integer, if not an int, retrieve as
|
|
283
|
+
# blob
|
|
284
|
+
#
|
|
285
|
+
# XXX(leon): we could use the index we built to get a much stronger hint
|
|
286
|
+
# on what the type is.
|
|
287
|
+
try:
|
|
288
|
+
return dconn.integer(k).get()
|
|
289
|
+
|
|
290
|
+
# Older versions of qdb api returned 'alias not found'
|
|
291
|
+
except quasardb.quasardb.AliasNotFoundError:
|
|
292
|
+
return _clean_blob(dconn.blob(k).get())
|
|
293
|
+
|
|
294
|
+
# Since ~ 3.14.2, it returns 'Incompatible Type'
|
|
295
|
+
except quasardb.quasardb.IncompatibleTypeError:
|
|
296
|
+
return _clean_blob(dconn.blob(k).get())
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def _by_uid(stats, idx):
|
|
300
|
+
xs = {}
|
|
301
|
+
for k, v in stats.items():
|
|
302
|
+
matches = user_pattern.match(k)
|
|
303
|
+
if is_user_stat(k) and matches:
|
|
304
|
+
(metric, uid_str) = matches.groups()
|
|
305
|
+
|
|
306
|
+
if metric.split(".")[-1] in ["type", "unit"]:
|
|
307
|
+
# We already indexed the type and unit in our idx, this is not interesting
|
|
308
|
+
continue
|
|
309
|
+
|
|
310
|
+
if metric.startswith("serialized"):
|
|
311
|
+
# Internal stuff we don't care about nor cannot do anything with
|
|
312
|
+
continue
|
|
313
|
+
|
|
314
|
+
if not metric in idx:
|
|
315
|
+
raise Exception(f"Metric not in internal index: {metric}")
|
|
316
|
+
|
|
317
|
+
# Parse user id
|
|
318
|
+
uid = int(uid_str)
|
|
319
|
+
|
|
320
|
+
# Prepare our metric dict
|
|
321
|
+
x = idx[metric].copy()
|
|
322
|
+
x["value"] = v
|
|
323
|
+
|
|
324
|
+
if uid not in xs:
|
|
325
|
+
xs[uid] = {}
|
|
326
|
+
|
|
327
|
+
xs[uid][metric] = x
|
|
328
|
+
|
|
329
|
+
return xs
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def _cumulative(stats, idx):
|
|
333
|
+
xs = {}
|
|
334
|
+
|
|
335
|
+
for k, v in stats.items():
|
|
336
|
+
matches = total_pattern.match(k)
|
|
337
|
+
if is_cumulative_stat(k) and matches:
|
|
338
|
+
metric = matches.groups()[0]
|
|
339
|
+
|
|
340
|
+
if metric.split(".")[-1] in ["type", "unit"]:
|
|
341
|
+
# We already indexed the type and unit in our idx, this is not interesting
|
|
342
|
+
continue
|
|
343
|
+
|
|
344
|
+
if metric.startswith("serialized"):
|
|
345
|
+
# Internal stuff we don't care about nor cannot do anything with
|
|
346
|
+
continue
|
|
347
|
+
|
|
348
|
+
if not metric in idx:
|
|
349
|
+
raise Exception(f"Metric not in internal index: {metric}")
|
|
350
|
+
|
|
351
|
+
x = idx[metric].copy()
|
|
352
|
+
x["value"] = v
|
|
353
|
+
xs[metric] = x
|
|
354
|
+
|
|
355
|
+
return xs
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
# async_pipelines.buffer.total_bytes
|
quasardb/table_cache.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
logger = logging.getLogger("quasardb.table_cache")
|
|
4
|
+
|
|
5
|
+
_cache = {}
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def clear():
|
|
9
|
+
logger.info("Clearing table cache")
|
|
10
|
+
_cache = {}
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def exists(table_name: str) -> bool:
|
|
14
|
+
"""
|
|
15
|
+
Returns true if table already exists in table cache.
|
|
16
|
+
"""
|
|
17
|
+
return table_name in _cache
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def store(table, table_name=None, force_retrieve_metadata=True):
|
|
21
|
+
"""
|
|
22
|
+
Stores a table into the cache. Ensures metadata is retrieved. This is useful if you want
|
|
23
|
+
to retrieve all table metadata at the beginning of a process, to avoid doing expensive
|
|
24
|
+
lookups in undesired code paths.
|
|
25
|
+
|
|
26
|
+
Returns a reference to the table being stored.
|
|
27
|
+
"""
|
|
28
|
+
if table_name is None:
|
|
29
|
+
table_name = table.get_name()
|
|
30
|
+
|
|
31
|
+
if exists(table_name):
|
|
32
|
+
logger.warn("Table already in cache, overwriting: %s", table_name)
|
|
33
|
+
|
|
34
|
+
logger.debug("Caching table %s", table_name)
|
|
35
|
+
_cache[table_name] = table
|
|
36
|
+
|
|
37
|
+
table.retrieve_metadata()
|
|
38
|
+
|
|
39
|
+
return table
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def lookup(table_name: str, conn, force_retrieve_metadata=True):
|
|
43
|
+
"""
|
|
44
|
+
Retrieves table from _cache if already exists. If it does not exist,
|
|
45
|
+
looks up the table from `conn` and puts it in the cache.
|
|
46
|
+
|
|
47
|
+
If `force_retrieve_metadata` equals True, we will ensure that the table's
|
|
48
|
+
metadata is
|
|
49
|
+
"""
|
|
50
|
+
if exists(table_name):
|
|
51
|
+
return _cache[table_name]
|
|
52
|
+
|
|
53
|
+
logger.debug("table %s not yet found, looking up", table_name)
|
|
54
|
+
table = conn.table(table_name)
|
|
55
|
+
|
|
56
|
+
return store(table, table_name, force_retrieve_metadata=force_retrieve_metadata)
|