qdb-prometheus-exporter 3.14.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
@@ -0,0 +1,141 @@
1
+ import logging
2
+ from typing import Generator
3
+
4
+ import quasardb.stats as qdbst
5
+ from prometheus_client.core import (
6
+ CounterMetricFamily,
7
+ GaugeMetricFamily,
8
+ InfoMetricFamily,
9
+ )
10
+ from prometheus_client.registry import Collector
11
+
12
+ from .stats import fetch_qdb_stats
13
+ import re
14
+ from typing import Union
15
+
16
+
17
+ class QdbStatsCollector(Collector):
18
+ def __init__(
19
+ self,
20
+ qdb_conn_args: dict,
21
+ filter_include: Union[list[str], None],
22
+ filter_exclude: Union[list[str], None],
23
+ logger: logging.Logger,
24
+ ):
25
+ self.qdb_conn_args = qdb_conn_args
26
+ self.filter_include = filter_include
27
+ self.filter_exclude = filter_exclude
28
+ self.logger = logger
29
+
30
+ self.qdbst_types_to_prometheus_types: dict[
31
+ qdbst.Type,
32
+ type[Union[GaugeMetricFamily, CounterMetricFamily, InfoMetricFamily]],
33
+ ] = {
34
+ qdbst.Type.GAUGE: GaugeMetricFamily,
35
+ qdbst.Type.ACCUMULATOR: CounterMetricFamily,
36
+ qdbst.Type.LABEL: InfoMetricFamily,
37
+ }
38
+
39
+ self.suffix_regex = re.compile(r"(^|_)(total|count|ns)(?=_|$)")
40
+
41
+ def _conform_qdb_name_to_prometheus_name(self, name: str) -> str:
42
+ """
43
+ Converts QuasarDB statistic name to OpenMetrics/Prometheus compliant name.
44
+ """
45
+ # Prometheus will add suffixes based on metric type (_total for counters, _info for info metrics, no suffix for gauges), units are also added as suffixes for gauges
46
+ # e.g. metric with name `async_pipelines.pulled.total_count` will be transformed by Prometheus client internally to `async_pipelines_pulled_total_count_total`
47
+ # we want to avoid redundant suffixes in the final metric name. this will result is slightly different names compared to native QuasarDB stats, but is more compliant with Prometheus conventions.
48
+
49
+ # 1. Replace dots with underscores to conform to OpenMetrics hierarchy convention
50
+ name = name.replace(".", "_")
51
+ # 2. Remove suffixes that will be added by Prometheus
52
+ # - total : Prometheus counter convention
53
+ # - count : histogram / summary aggregation
54
+ # - ns : unit suffix (units belong in metadata, not names)
55
+ name = self.suffix_regex.sub("", name)
56
+ return name
57
+
58
+ def _parse_qdb_statistics_entry(
59
+ self, labels: dict[str, str], metric_name: str, metric_info: dict
60
+ ) -> Union[GaugeMetricFamily, CounterMetricFamily, InfoMetricFamily, None]:
61
+ """
62
+ Converts single statistic from QuasarDB to Prometheus Metric.
63
+ """
64
+ metric_type = self.qdbst_types_to_prometheus_types.get(
65
+ metric_info["type"], None
66
+ )
67
+
68
+ if metric_type is None:
69
+ self.logger.error(
70
+ "Unknown metric type for metric '%s': %s",
71
+ metric_name,
72
+ str(metric_info["type"]),
73
+ )
74
+ return None
75
+
76
+ metric_name = self._conform_qdb_name_to_prometheus_name(metric_name)
77
+
78
+ # Info metrics dont have units in Prometheus, metric creation is different from others
79
+ #
80
+ # value for `documentation` is required by Prometheus but not available from QuasarDB stats module itself, we set it to an empty string
81
+ metric_kwargs = {
82
+ "name": metric_name,
83
+ "documentation": "",
84
+ "labels": list(labels.keys()),
85
+ }
86
+ add_metric_kwargs = {
87
+ "labels": list(labels.values()),
88
+ "value": metric_info["value"],
89
+ }
90
+
91
+ # depending on metric type, adjust metric creation parameters
92
+ if metric_type is InfoMetricFamily:
93
+ add_metric_kwargs["value"] = {"value": metric_info["value"]}
94
+ else:
95
+ metric_kwargs["unit"] = metric_info["unit"].name.lower()
96
+
97
+ metric = metric_type(**metric_kwargs)
98
+ metric.add_metric(**add_metric_kwargs)
99
+ return metric
100
+
101
+ def _parse_qdb_statistics(self, qdb_metrics: dict) -> Generator[
102
+ Union[GaugeMetricFamily, CounterMetricFamily, InfoMetricFamily, None],
103
+ None,
104
+ None,
105
+ ]:
106
+ """
107
+ Yields Prometheus metrics parsed from QuasarDB statistics dictionary.
108
+ """
109
+ for endpoint, metric_types in qdb_metrics.items():
110
+ for metric_name, metric_info in metric_types["cumulative"].items():
111
+ labels = {"endpoint": endpoint, "stat_type": "cumulative"}
112
+ yield self._parse_qdb_statistics_entry(labels, metric_name, metric_info)
113
+
114
+ for user_id, metrics in metric_types["by_uid"].items():
115
+ for metric_name, metric_info in metrics.items():
116
+ labels = {
117
+ "endpoint": endpoint,
118
+ "stat_type": "by_uid",
119
+ "user_id": f"{user_id}",
120
+ }
121
+ yield self._parse_qdb_statistics_entry(
122
+ labels, metric_name, metric_info
123
+ )
124
+
125
+ def collect(self):
126
+ """
127
+ Method called by Prometheus to collect metrics.
128
+ Yields Prometheus metrics collected from QuasarDB.
129
+ """
130
+
131
+ try:
132
+ stats = fetch_qdb_stats(
133
+ self.qdb_conn_args,
134
+ self.filter_include,
135
+ self.filter_exclude,
136
+ self.logger,
137
+ )
138
+ for metric in self._parse_qdb_statistics(stats):
139
+ yield metric
140
+ except Exception:
141
+ self.logger.exception("Failed to collect metrics")
@@ -0,0 +1,90 @@
1
+ import logging
2
+
3
+ import click
4
+ import prometheus_client
5
+ import uvicorn
6
+ from fastapi import FastAPI
7
+ from prometheus_client import make_asgi_app
8
+ from prometheus_client.core import REGISTRY
9
+
10
+ from .collector import QdbStatsCollector
11
+
12
+
13
+ def _set_up_prometheus_metrics_app(
14
+ conn_args, filter_include, filter_exclude, logger: logging.Logger
15
+ ):
16
+ metrics_app = make_asgi_app()
17
+ # By default prometheus client will send metrics about the Python process itself. We want to forward QuasarDB metrics only
18
+ REGISTRY.unregister(prometheus_client.GC_COLLECTOR)
19
+ REGISTRY.unregister(prometheus_client.PLATFORM_COLLECTOR)
20
+ REGISTRY.unregister(prometheus_client.PROCESS_COLLECTOR)
21
+ REGISTRY.register(
22
+ QdbStatsCollector(conn_args, filter_include, filter_exclude, logger)
23
+ )
24
+ return metrics_app
25
+
26
+
27
+ app = FastAPI(debug=False)
28
+
29
+
30
+ @app.get("/health")
31
+ def health():
32
+ return {"status": "ok"}
33
+
34
+
35
+ def _parse_list(x):
36
+ """
37
+ Parses a comma-separated string into a list.
38
+ """
39
+
40
+ if x is None or not x.strip():
41
+ return None
42
+
43
+ return [token.strip() for token in x.split(",") if token.strip()]
44
+
45
+
46
+ @click.command()
47
+ @click.option("--cluster", default="qdb://127.0.0.1:2836", type=str)
48
+ @click.option("--cluster-public-key-file", default=None, type=str)
49
+ @click.option("--user-security-file", default=None, type=str)
50
+ @click.option(
51
+ "--filter-include",
52
+ default="",
53
+ type=str,
54
+ help="Optional comma-separated list of regex patterns to filter metrics. Only metrics that match at least one of the patterns will be reported.",
55
+ )
56
+ @click.option(
57
+ "--filter-exclude",
58
+ default="",
59
+ type=str,
60
+ help="Optional comma-separated list of regex patterns to filter metrics. Only metrics that contain none of the patterns will be reported.",
61
+ )
62
+ @click.option("--exporter-port", default=9000, type=int)
63
+ @click.option("--listen-address", default="127.0.0.1", type=str, help="Address on which the exporter will listen.")
64
+ def start_server(
65
+ cluster: str,
66
+ cluster_public_key_file: str,
67
+ user_security_file: str,
68
+ filter_include: str,
69
+ filter_exclude: str,
70
+ exporter_port: int,
71
+ listen_address: str,
72
+ ):
73
+ conn_args = {
74
+ "uri": cluster,
75
+ }
76
+
77
+ if cluster_public_key_file and user_security_file:
78
+ conn_args["cluster_public_key_file"] = cluster_public_key_file
79
+ conn_args["user_security_file"] = user_security_file
80
+
81
+ logger = logging.getLogger("uvicorn.error")
82
+ metrics_app = _set_up_prometheus_metrics_app(
83
+ conn_args, _parse_list(filter_include), _parse_list(filter_exclude), logger
84
+ )
85
+ app.mount("/metrics", metrics_app)
86
+ uvicorn.run(app, host=listen_address, port=exporter_port)
87
+
88
+
89
+ if __name__ == "__main__":
90
+ start_server()
@@ -0,0 +1,175 @@
1
+ import copy
2
+ import logging
3
+ import random
4
+ import re
5
+ import uuid
6
+
7
+ import quasardb
8
+ import quasardb.stats as qdbst
9
+
10
+ from typing import Union
11
+
12
+
13
+ def _do_filter_metrics(metrics: dict, fn):
14
+ return {key: metrics[key] for key in metrics if fn(key)}
15
+
16
+
17
+ def _do_filter(stats: dict, fn):
18
+ """
19
+ Performs actual filtering of stats, keeping only those where fn(name) equals True
20
+ """
21
+
22
+ for node_id in stats:
23
+ for group_id in stats[node_id]:
24
+ if group_id == "cumulative":
25
+ stats[node_id][group_id] = _do_filter_metrics(
26
+ stats[node_id][group_id], fn
27
+ )
28
+ elif group_id == "by_uid":
29
+ for uid in stats[node_id][group_id]:
30
+ stats[node_id][group_id][uid] = _do_filter_metrics(
31
+ stats[node_id][group_id][uid], fn
32
+ )
33
+ else:
34
+ raise RuntimeError(
35
+ "Internal error: unrecognized stats group id: {}".format(group_id)
36
+ )
37
+ return stats
38
+
39
+
40
+ def filter_stats(
41
+ stats: dict,
42
+ include: Union[list[str], None],
43
+ exclude: Union[list[str], None],
44
+ logger: logging.Logger,
45
+ ):
46
+ logger.info("Filtering stats based on include/exclude filters")
47
+ stats_ = copy.deepcopy(stats)
48
+
49
+ if include is not None:
50
+ # Returns `true` if any of the `include` patterns is found in the metric name.
51
+ def _filter_include(metric_name):
52
+ return any(
53
+ pattern for pattern in include if re.search(pattern, metric_name)
54
+ )
55
+
56
+ stats_ = _do_filter(stats_, _filter_include)
57
+
58
+ if exclude is not None:
59
+ # Returns `false` if any of the `exclude` patterns is found in the metric name.
60
+ def _filter_exclude(metric_name):
61
+ return not any(
62
+ pattern for pattern in exclude if re.search(pattern, metric_name)
63
+ )
64
+
65
+ stats_ = _do_filter(stats_, _filter_exclude)
66
+
67
+ return stats_
68
+
69
+
70
+ def _check_node_online(conn: quasardb.Cluster, logger: logging.Logger):
71
+ logger.info("Checking node online")
72
+
73
+ ret = {}
74
+
75
+ for endpoint in conn.endpoints():
76
+ ret[endpoint] = 0 # pessimistic
77
+ node = conn.node(endpoint)
78
+ entry = node.integer("$qdb.statistics.startup_epoch") # entry always exists
79
+
80
+ try:
81
+ entry.get()
82
+ ret[endpoint] = 1
83
+ except quasardb.Error as e:
84
+ logger.error("[%s] Failed to read sample entry: %s", endpoint, str(e))
85
+
86
+ return ret
87
+
88
+
89
+ def _check_node_writable(conn: quasardb.Cluster, logger: logging.Logger):
90
+ logger.info("Checking node writable")
91
+ key = f"_qdb_write_check_{uuid.uuid4().hex}" # almost zero chance of collision
92
+ value = random.randint(-9223372036854775808, 9223372036854775807)
93
+ ret = {}
94
+
95
+ for endpoint in conn.endpoints():
96
+ ret[endpoint] = 0 # pessimistic
97
+ node = conn.node(endpoint)
98
+ entry = node.integer(key)
99
+
100
+ try:
101
+ entry.put(value)
102
+ if entry.get() == value:
103
+ ret[endpoint] = 1
104
+ except quasardb.Error as e:
105
+ logger.error("[%s] Failed to put/get test entry '%s': %s", endpoint, key, e)
106
+ finally:
107
+ try:
108
+ entry.remove()
109
+ except quasardb.AliasNotFoundError as e:
110
+ logger.error(
111
+ "[%s] Failed to put/get test entry '%s': %s", endpoint, key, e
112
+ )
113
+ except quasardb.Error as e:
114
+ logger.error(
115
+ "[%s] Failed to clean up test entry '%s': %s", endpoint, key, e
116
+ )
117
+
118
+ return ret
119
+
120
+
121
+ def _get_base_qdb_metrics(conn: quasardb.Cluster, logger: logging.Logger):
122
+ """
123
+ Returns basic QuasarDB metrics such as node online and writable status.
124
+ Those metrics are most commonly used to determine QuasarDB cluster health.
125
+ """
126
+ logger.info("Getting base QuasarDB metrics")
127
+ ret = {endpoint: {"cumulative": {}, "by_uid": {}} for endpoint in conn.endpoints()}
128
+ online_stats = _check_node_online(conn, logger)
129
+ writable_stats = _check_node_writable(conn, logger)
130
+
131
+ for endpoint in conn.endpoints():
132
+ ret[endpoint]["cumulative"]["check.online"] = {
133
+ "value": online_stats.get(endpoint, 0),
134
+ "type": qdbst.Type.GAUGE,
135
+ "unit": qdbst.Unit.NONE,
136
+ }
137
+ ret[endpoint]["cumulative"]["node.writable"] = {
138
+ "value": writable_stats.get(endpoint, 0),
139
+ "type": qdbst.Type.GAUGE,
140
+ "unit": qdbst.Unit.NONE,
141
+ }
142
+
143
+ return ret
144
+
145
+
146
+ def fetch_qdb_stats(
147
+ qdb_conn_args: dict,
148
+ include: Union[list[str], None],
149
+ exclude: Union[list[str], None],
150
+ logger: logging.Logger,
151
+ ):
152
+ base_stats, node_stats = {}, {}
153
+ logger.info("Getting QuasarDB connection")
154
+ try:
155
+ with quasardb.Cluster(**qdb_conn_args) as conn:
156
+ base_stats = _get_base_qdb_metrics(conn, logger)
157
+ node_stats = qdbst.by_node(conn)
158
+ except Exception as e:
159
+ logger.error("Failed to fetch stats from QuasarDB: %s", str(e))
160
+ raise
161
+
162
+ # Merge base_stats into node_stats
163
+ combined_stats = node_stats
164
+ for endpoint, data in base_stats.items():
165
+ if endpoint not in combined_stats:
166
+ combined_stats[endpoint] = data
167
+ else:
168
+ if "cumulative" in data:
169
+ combined_stats[endpoint].setdefault("cumulative", {}).update(
170
+ data["cumulative"]
171
+ )
172
+ if "by_uid" in data:
173
+ combined_stats[endpoint].setdefault("by_uid", {}).update(data["by_uid"])
174
+
175
+ return filter_stats(combined_stats, include, exclude, logger)
@@ -0,0 +1,15 @@
1
+ Metadata-Version: 2.4
2
+ Name: qdb-prometheus-exporter
3
+ Version: 3.14.2
4
+ Summary: Statistics exporter for Prometheus monitoring system for QuasarDB time-series database.
5
+ Author: quasardb SAS
6
+ Author-email: support@quasar.ai
7
+ Project-URL: Homepage, https://www.quasar.ai
8
+ Requires-Python: >=3.9
9
+ License-File: LICENSE
10
+ Requires-Dist: prometheus-client
11
+ Requires-Dist: quasardb==3.14.2
12
+ Requires-Dist: click
13
+ Requires-Dist: uvicorn
14
+ Requires-Dist: fastapi
15
+ Dynamic: license-file
@@ -0,0 +1,10 @@
1
+ qdb_prometheus_exporter/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ qdb_prometheus_exporter/collector.py,sha256=NZ_ihNiP3Lq1VUBbzRTRgeUzBLvdzX7R14Q02ss8Dhg,5493
3
+ qdb_prometheus_exporter/main.py,sha256=DG-FP2OIXLN5Dd70fVE9g8zJeLB7PN5xCoDLHGespFo,2715
4
+ qdb_prometheus_exporter/stats.py,sha256=UeBJFuLfJx_qOyf1rVcTk1IZ6Gx7oKrIUilJtcH0-78,5648
5
+ qdb_prometheus_exporter-3.14.2.dist-info/licenses/LICENSE,sha256=D4FxifKokzhemZloJq4ytQjoDdWHF6Yf11itdMlqzDI,1496
6
+ qdb_prometheus_exporter-3.14.2.dist-info/METADATA,sha256=D0Pv5Y32_UatMFY8fVoP6qIiVaQYNOugn6a_6KTeF_U,462
7
+ qdb_prometheus_exporter-3.14.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
8
+ qdb_prometheus_exporter-3.14.2.dist-info/entry_points.txt,sha256=bTj7lJtxFFqs0W8NYv16phryQ_4M_4sHDT8hfQDYbpA,86
9
+ qdb_prometheus_exporter-3.14.2.dist-info/top_level.txt,sha256=nZG-KKjflJ9MUQa5TzVpdqOR0GZlQwzll6vMOLJJ90M,24
10
+ qdb_prometheus_exporter-3.14.2.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.10.2)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ qdb-prometheus-exporter = qdb_prometheus_exporter.main:start_server
@@ -0,0 +1,28 @@
1
+ BSD 3-Clause License
2
+
3
+ Copyright (c) 2026, Quasar AI
4
+
5
+ Redistribution and use in source and binary forms, with or without
6
+ modification, are permitted provided that the following conditions are met:
7
+
8
+ 1. Redistributions of source code must retain the above copyright notice, this
9
+ list of conditions and the following disclaimer.
10
+
11
+ 2. Redistributions in binary form must reproduce the above copyright notice,
12
+ this list of conditions and the following disclaimer in the documentation
13
+ and/or other materials provided with the distribution.
14
+
15
+ 3. Neither the name of the copyright holder nor the names of its
16
+ contributors may be used to endorse or promote products derived from
17
+ this software without specific prior written permission.
18
+
19
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20
+ AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21
+ IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22
+ DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23
+ FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24
+ DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25
+ SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26
+ CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27
+ OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1 @@
1
+ qdb_prometheus_exporter