c2cwsgiutils 5.1.7.dev20230901073305__py3-none-any.whl → 5.2.1.dev197__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- c2cwsgiutils/__init__.py +13 -13
- c2cwsgiutils/acceptance/connection.py +5 -2
- c2cwsgiutils/acceptance/image.py +98 -4
- c2cwsgiutils/acceptance/package-lock.json +1933 -0
- c2cwsgiutils/acceptance/package.json +7 -0
- c2cwsgiutils/acceptance/print.py +4 -4
- c2cwsgiutils/acceptance/screenshot.js +62 -0
- c2cwsgiutils/acceptance/utils.py +14 -22
- c2cwsgiutils/auth.py +4 -4
- c2cwsgiutils/broadcast/__init__.py +15 -7
- c2cwsgiutils/broadcast/interface.py +3 -2
- c2cwsgiutils/broadcast/local.py +3 -2
- c2cwsgiutils/broadcast/redis.py +8 -7
- c2cwsgiutils/client_info.py +5 -5
- c2cwsgiutils/config_utils.py +2 -1
- c2cwsgiutils/coverage_setup.py +2 -2
- c2cwsgiutils/db.py +58 -37
- c2cwsgiutils/db_maintenance_view.py +2 -1
- c2cwsgiutils/debug/_listeners.py +10 -9
- c2cwsgiutils/debug/_views.py +12 -11
- c2cwsgiutils/debug/utils.py +5 -5
- c2cwsgiutils/errors.py +7 -6
- c2cwsgiutils/health_check.py +96 -85
- c2cwsgiutils/index.py +90 -105
- c2cwsgiutils/loader.py +3 -3
- c2cwsgiutils/logging_view.py +3 -2
- c2cwsgiutils/models_graph.py +8 -6
- c2cwsgiutils/prometheus.py +175 -57
- c2cwsgiutils/pyramid.py +4 -2
- c2cwsgiutils/pyramid_logging.py +2 -1
- c2cwsgiutils/redis_stats.py +13 -11
- c2cwsgiutils/redis_utils.py +15 -14
- c2cwsgiutils/request_tracking/__init__.py +36 -30
- c2cwsgiutils/request_tracking/_sql.py +3 -1
- c2cwsgiutils/scripts/genversion.py +4 -4
- c2cwsgiutils/scripts/stats_db.py +130 -68
- c2cwsgiutils/scripts/test_print.py +1 -1
- c2cwsgiutils/sentry.py +2 -1
- c2cwsgiutils/setup_process.py +13 -17
- c2cwsgiutils/sql_profiler/_impl.py +12 -5
- c2cwsgiutils/sqlalchemylogger/README.md +48 -0
- c2cwsgiutils/sqlalchemylogger/_models.py +7 -4
- c2cwsgiutils/sqlalchemylogger/examples/example.py +15 -0
- c2cwsgiutils/sqlalchemylogger/handlers.py +11 -8
- c2cwsgiutils/static/favicon-16x16.png +0 -0
- c2cwsgiutils/static/favicon-32x32.png +0 -0
- c2cwsgiutils/stats_pyramid/__init__.py +7 -11
- c2cwsgiutils/stats_pyramid/_db_spy.py +14 -11
- c2cwsgiutils/stats_pyramid/_pyramid_spy.py +29 -20
- c2cwsgiutils/templates/index.html.mako +50 -0
- c2cwsgiutils/version.py +49 -16
- c2cwsgiutils-5.2.1.dev197.dist-info/LICENSE +22 -0
- {c2cwsgiutils-5.1.7.dev20230901073305.dist-info → c2cwsgiutils-5.2.1.dev197.dist-info}/METADATA +187 -135
- c2cwsgiutils-5.2.1.dev197.dist-info/RECORD +67 -0
- {c2cwsgiutils-5.1.7.dev20230901073305.dist-info → c2cwsgiutils-5.2.1.dev197.dist-info}/WHEEL +1 -2
- c2cwsgiutils-5.2.1.dev197.dist-info/entry_points.txt +21 -0
- c2cwsgiutils/acceptance/composition.py +0 -129
- c2cwsgiutils/metrics.py +0 -110
- c2cwsgiutils/scripts/check_es.py +0 -130
- c2cwsgiutils/scripts/coverage_report.py +0 -36
- c2cwsgiutils/stats.py +0 -355
- c2cwsgiutils/stats_pyramid/_views.py +0 -16
- c2cwsgiutils-5.1.7.dev20230901073305.data/scripts/c2cwsgiutils-run +0 -32
- c2cwsgiutils-5.1.7.dev20230901073305.dist-info/LICENSE.txt +0 -28
- c2cwsgiutils-5.1.7.dev20230901073305.dist-info/RECORD +0 -69
- c2cwsgiutils-5.1.7.dev20230901073305.dist-info/entry_points.txt +0 -25
- c2cwsgiutils-5.1.7.dev20230901073305.dist-info/top_level.txt +0 -2
- tests/acceptance/__init__.py +0 -0
- tests/acceptance/test_utils.py +0 -13
c2cwsgiutils/metrics.py
DELETED
@@ -1,110 +0,0 @@
|
|
1
|
-
"""Used to publish metrics to Prometheus."""
|
2
|
-
|
3
|
-
import re
|
4
|
-
import socket
|
5
|
-
import warnings
|
6
|
-
from os import listdir
|
7
|
-
from typing import Any, Dict, List, Optional, Tuple, Union
|
8
|
-
|
9
|
-
import pyramid.request
|
10
|
-
import pyramid.response
|
11
|
-
|
12
|
-
from c2cwsgiutils.debug.utils import dump_memory_maps
|
13
|
-
|
14
|
-
|
15
|
-
class Provider:
|
16
|
-
"""The provider interface."""
|
17
|
-
|
18
|
-
def __init__(self, name: str, help_: str, type_: str = "gauge", extend: bool = True):
|
19
|
-
self.name = name
|
20
|
-
self.help = help_
|
21
|
-
self.type = type_
|
22
|
-
self.extend = extend
|
23
|
-
|
24
|
-
def get_data(self) -> List[Tuple[Dict[str, str], Union[int, float]]]:
|
25
|
-
"""Get empty response, should be defined in the specific provider."""
|
26
|
-
return []
|
27
|
-
|
28
|
-
|
29
|
-
_PROVIDERS = []
|
30
|
-
|
31
|
-
|
32
|
-
POD_NAME = socket.gethostname()
|
33
|
-
SERVICE_NAME = re.match("^(.+)-[0-9a-f]+-[0-9a-z]+$", POD_NAME)
|
34
|
-
|
35
|
-
|
36
|
-
def add_provider(provider: Provider) -> None:
|
37
|
-
"""Add the provider."""
|
38
|
-
_PROVIDERS.append(provider)
|
39
|
-
|
40
|
-
|
41
|
-
def _metrics() -> pyramid.response.Response:
|
42
|
-
result: List[str] = []
|
43
|
-
|
44
|
-
for provider in _PROVIDERS:
|
45
|
-
result += [
|
46
|
-
f"# HELP {provider.name} {provider.help}",
|
47
|
-
f"# TYPE {provider.name} {provider.type}",
|
48
|
-
]
|
49
|
-
for attributes, value in provider.get_data():
|
50
|
-
attrib = {}
|
51
|
-
if provider.extend:
|
52
|
-
attrib["pod_name"] = POD_NAME
|
53
|
-
if SERVICE_NAME is not None:
|
54
|
-
attrib["service_name"] = SERVICE_NAME.group(1)
|
55
|
-
attrib.update(attributes)
|
56
|
-
dbl_quote = '"'
|
57
|
-
printable_attribs = ",".join([f'{k}="{v.replace(dbl_quote, "_")}"' for k, v in attrib.items()])
|
58
|
-
result.append(f"{provider.name}{{{printable_attribs}}} {value}")
|
59
|
-
|
60
|
-
return "\n".join(result)
|
61
|
-
|
62
|
-
|
63
|
-
def _view(request: pyramid.request.Request) -> pyramid.response.Response:
|
64
|
-
request.response.text = _metrics()
|
65
|
-
return request.response
|
66
|
-
|
67
|
-
|
68
|
-
NUMBER_RE = re.compile(r"^[0-9]+$")
|
69
|
-
|
70
|
-
|
71
|
-
class MemoryMapProvider(Provider):
|
72
|
-
"""The Linux memory map provider."""
|
73
|
-
|
74
|
-
def __init__(self, memory_type: str = "pss", pids: Optional[List[str]] = None):
|
75
|
-
"""
|
76
|
-
Initialize.
|
77
|
-
|
78
|
-
Arguments:
|
79
|
-
|
80
|
-
memory_type: can be rss, pss or size
|
81
|
-
pids: the list of pids or none
|
82
|
-
"""
|
83
|
-
super().__init__(
|
84
|
-
f"pod_process_smap_{memory_type}_kb",
|
85
|
-
f"Container smap used {memory_type.capitalize()}",
|
86
|
-
)
|
87
|
-
self.memory_type = memory_type
|
88
|
-
self.pids = pids
|
89
|
-
|
90
|
-
def get_data(self) -> List[Tuple[Dict[str, Any], Union[int, float]]]:
|
91
|
-
"""Get empty response, should be defined in the specific provider."""
|
92
|
-
results: List[Tuple[Dict[str, Any], Union[int, float]]] = []
|
93
|
-
for pid in [p for p in listdir("/proc/") if NUMBER_RE.match(p)] if self.pids is None else self.pids:
|
94
|
-
results += [
|
95
|
-
({"pid": pid, "name": e["name"]}, e[self.memory_type + "_kb"]) for e in dump_memory_maps(pid)
|
96
|
-
]
|
97
|
-
return results
|
98
|
-
|
99
|
-
|
100
|
-
def init(config: pyramid.config.Configurator) -> None:
|
101
|
-
"""Initialize the metrics view, , for backward compatibility."""
|
102
|
-
warnings.warn("init function is deprecated; use includeme instead")
|
103
|
-
includeme(config)
|
104
|
-
|
105
|
-
|
106
|
-
def includeme(config: pyramid.config.Configurator) -> None:
|
107
|
-
"""Initialize the metrics view."""
|
108
|
-
config.add_route("c2c_metrics", r"/metrics", request_method="GET")
|
109
|
-
config.add_view(_view, route_name="c2c_metrics", http_cache=0)
|
110
|
-
add_provider(MemoryMapProvider())
|
c2cwsgiutils/scripts/check_es.py
DELETED
@@ -1,130 +0,0 @@
|
|
1
|
-
#!/usr/bin/env python3
|
2
|
-
import argparse
|
3
|
-
import datetime
|
4
|
-
import logging
|
5
|
-
import os
|
6
|
-
import sys
|
7
|
-
import time
|
8
|
-
import uuid
|
9
|
-
import warnings
|
10
|
-
from typing import Any, List, Optional
|
11
|
-
|
12
|
-
import requests.exceptions
|
13
|
-
from dateutil import parser as dp
|
14
|
-
|
15
|
-
import c2cwsgiutils.setup_process
|
16
|
-
from c2cwsgiutils import stats
|
17
|
-
|
18
|
-
|
19
|
-
def _ensure_slash(txt: Optional[str]) -> Optional[str]:
|
20
|
-
if txt is None:
|
21
|
-
return None
|
22
|
-
if txt.endswith("/"):
|
23
|
-
return txt
|
24
|
-
return txt + "/"
|
25
|
-
|
26
|
-
|
27
|
-
LOGGER_NAME = "check_elasticsearch"
|
28
|
-
LOG_TIMEOUT = int(os.environ["LOG_TIMEOUT"])
|
29
|
-
LOG = logging.getLogger(LOGGER_NAME)
|
30
|
-
ES_URL = _ensure_slash(os.environ.get("ES_URL"))
|
31
|
-
ES_INDEXES = os.environ.get("ES_INDEXES")
|
32
|
-
ES_AUTH = os.environ.get("ES_AUTH")
|
33
|
-
ES_FILTERS = os.environ.get("ES_FILTERS", "")
|
34
|
-
|
35
|
-
SEARCH_HEADERS = {"Content-Type": "application/json;charset=UTF-8", "Accept": "application/json"}
|
36
|
-
if ES_AUTH is not None:
|
37
|
-
SEARCH_HEADERS["Authorization"] = ES_AUTH
|
38
|
-
SEARCH_URL = f"{ES_URL}{ES_INDEXES}/_search"
|
39
|
-
|
40
|
-
|
41
|
-
def _max_timestamp() -> datetime.datetime:
|
42
|
-
must: List[Any] = []
|
43
|
-
query = {
|
44
|
-
"aggs": {"max_timestamp": {"max": {"field": "@timestamp"}}},
|
45
|
-
"query": {"bool": {"must": must}},
|
46
|
-
}
|
47
|
-
if ES_FILTERS != "":
|
48
|
-
for filter_ in ES_FILTERS.split(","):
|
49
|
-
name, value = filter_.split("=")
|
50
|
-
must.append({"term": {name: value}})
|
51
|
-
else:
|
52
|
-
del query["query"]
|
53
|
-
|
54
|
-
r = requests.post(SEARCH_URL, json=query, headers=SEARCH_HEADERS)
|
55
|
-
r.raise_for_status()
|
56
|
-
json = r.json()
|
57
|
-
return dp.parse(json["aggregations"]["max_timestamp"]["value_as_string"])
|
58
|
-
|
59
|
-
|
60
|
-
def _check_roundtrip() -> None:
|
61
|
-
check_uuid = str(uuid.uuid4())
|
62
|
-
|
63
|
-
# emit the log we are going to look for
|
64
|
-
logger_name = LOGGER_NAME + "." + check_uuid
|
65
|
-
logger = logging.getLogger(logger_name)
|
66
|
-
logger.setLevel(logging.INFO)
|
67
|
-
logger.info("Test roundtrip")
|
68
|
-
|
69
|
-
query = {"query": {"match_phrase": {"log.logger": logger_name}}}
|
70
|
-
start = time.monotonic()
|
71
|
-
while time.monotonic() < start + LOG_TIMEOUT:
|
72
|
-
exception = None
|
73
|
-
for _ in range(int(os.environ.get("C2CWSGIUTILS_CHECK_ES_TRYNUMBER", 10))):
|
74
|
-
try:
|
75
|
-
r = requests.post(SEARCH_URL, json=query, headers=SEARCH_HEADERS)
|
76
|
-
exception = None
|
77
|
-
except requests.exceptions.RequestException as e:
|
78
|
-
logger.exception("Error on querying Elasticsearch")
|
79
|
-
exception = e
|
80
|
-
if r.ok:
|
81
|
-
continue
|
82
|
-
time.sleep(float(os.environ.get("C2CWSGIUTILS_CHECK_ES_SLEEP", 1)))
|
83
|
-
if exception is not None:
|
84
|
-
raise exception
|
85
|
-
r.raise_for_status()
|
86
|
-
json = r.json()
|
87
|
-
found = json["hits"]["total"]
|
88
|
-
if isinstance(found, dict):
|
89
|
-
found = found["value"]
|
90
|
-
if found > 0:
|
91
|
-
LOG.info("Found the test log line.")
|
92
|
-
stats.set_gauge(["roundtrip"], time.monotonic() - start)
|
93
|
-
return
|
94
|
-
else:
|
95
|
-
LOG.info("Didn't find the test log line. Wait 1s...")
|
96
|
-
time.sleep(1)
|
97
|
-
LOG.warning("Timeout waiting for the test log line")
|
98
|
-
stats.set_gauge(["roundtrip"], LOG_TIMEOUT * 2)
|
99
|
-
|
100
|
-
|
101
|
-
def deprecated() -> None:
|
102
|
-
"""Run the command and print a deprecated notice."""
|
103
|
-
warnings.warn("c2cwsgiutils_check_es.py is deprecated; use c2cwsgiutils-check-es instead")
|
104
|
-
return main()
|
105
|
-
|
106
|
-
|
107
|
-
def main() -> None:
|
108
|
-
"""Run the command."""
|
109
|
-
try:
|
110
|
-
argparser = argparse.ArgumentParser(description="Check logs on Elasticsearch")
|
111
|
-
c2cwsgiutils.setup_process.fill_arguments(argparser)
|
112
|
-
args = argparser.parse_args()
|
113
|
-
c2cwsgiutils.setup_process.bootstrap_application_from_options(args)
|
114
|
-
|
115
|
-
with stats.outcome_timer_context(["get_max_timestamp"]):
|
116
|
-
max_ts = _max_timestamp()
|
117
|
-
now = datetime.datetime.now(max_ts.tzinfo)
|
118
|
-
age = round((now - max_ts).total_seconds())
|
119
|
-
LOG.info("Last log age: %ss", age)
|
120
|
-
stats.set_gauge(["max_age"], age)
|
121
|
-
|
122
|
-
if "LOG_TIMEOUT" in os.environ:
|
123
|
-
_check_roundtrip()
|
124
|
-
except: # pylint: disable=bare-except
|
125
|
-
LOG.exception("Exception during run")
|
126
|
-
sys.exit(1)
|
127
|
-
|
128
|
-
|
129
|
-
if __name__ == "__main__":
|
130
|
-
main()
|
@@ -1,36 +0,0 @@
|
|
1
|
-
#!/usr/bin/env python3
|
2
|
-
import logging
|
3
|
-
import os
|
4
|
-
import shutil
|
5
|
-
import sys
|
6
|
-
import warnings
|
7
|
-
|
8
|
-
import coverage
|
9
|
-
|
10
|
-
LOG = logging.getLogger(__name__)
|
11
|
-
|
12
|
-
|
13
|
-
def deprecated() -> None:
|
14
|
-
"""Run the command and print a deprecated notice."""
|
15
|
-
warnings.warn("c2cwsgiutils_coverage_report.py is deprecated; use c2cwsgiutils-coverage-report instead")
|
16
|
-
return main()
|
17
|
-
|
18
|
-
|
19
|
-
def main() -> None:
|
20
|
-
"""Run the command."""
|
21
|
-
sources = sys.argv[1:]
|
22
|
-
report_dir = "/reports/coverage/api"
|
23
|
-
dest_dir = "/tmp/coverage/api" # nosec
|
24
|
-
shutil.rmtree(dest_dir, ignore_errors=True)
|
25
|
-
shutil.copytree(report_dir, dest_dir)
|
26
|
-
cov = coverage.Coverage(
|
27
|
-
data_file=os.path.join(dest_dir, "coverage"), data_suffix=True, source=sources or None, branch=True
|
28
|
-
)
|
29
|
-
cov.combine([dest_dir], strict=True)
|
30
|
-
cov.html_report(directory=dest_dir, ignore_errors=True)
|
31
|
-
cov.xml_report(outfile=os.path.join(dest_dir, "coverage.xml"), ignore_errors=True)
|
32
|
-
cov.report(ignore_errors=True)
|
33
|
-
|
34
|
-
|
35
|
-
if __name__ == "__main__":
|
36
|
-
main()
|
c2cwsgiutils/stats.py
DELETED
@@ -1,355 +0,0 @@
|
|
1
|
-
"""Generate statsd metrics."""
|
2
|
-
|
3
|
-
import contextlib
|
4
|
-
import logging
|
5
|
-
import os
|
6
|
-
import re
|
7
|
-
import socket
|
8
|
-
import threading
|
9
|
-
import time
|
10
|
-
from abc import ABCMeta, abstractmethod
|
11
|
-
from typing import ( # noqa # pylint: disable=unused-import
|
12
|
-
Any,
|
13
|
-
Callable,
|
14
|
-
Dict,
|
15
|
-
Generator,
|
16
|
-
List,
|
17
|
-
Mapping,
|
18
|
-
MutableMapping,
|
19
|
-
Optional,
|
20
|
-
Sequence,
|
21
|
-
Tuple,
|
22
|
-
)
|
23
|
-
|
24
|
-
import pyramid.request
|
25
|
-
|
26
|
-
from c2cwsgiutils import config_utils
|
27
|
-
|
28
|
-
LOG = logging.getLogger(__name__)
|
29
|
-
USE_TAGS_ENV = "STATSD_USE_TAGS"
|
30
|
-
TAG_PREFIX_ENV = "STATSD_TAG_"
|
31
|
-
USE_TAGS = config_utils.config_bool(os.environ.get(USE_TAGS_ENV, "0"))
|
32
|
-
TagType = Optional[Mapping[str, Any]]
|
33
|
-
|
34
|
-
|
35
|
-
class _BaseBackend(metaclass=ABCMeta):
|
36
|
-
@abstractmethod
|
37
|
-
def timer(self, key: Sequence[Any], duration: float, tags: TagType = None) -> None:
|
38
|
-
pass
|
39
|
-
|
40
|
-
@abstractmethod
|
41
|
-
def gauge(self, key: Sequence[Any], value: float, tags: TagType = None) -> None:
|
42
|
-
pass
|
43
|
-
|
44
|
-
@abstractmethod
|
45
|
-
def counter(self, key: Sequence[Any], increment: int, tags: TagType = None) -> None:
|
46
|
-
pass
|
47
|
-
|
48
|
-
|
49
|
-
BACKENDS: MutableMapping[str, _BaseBackend] = {}
|
50
|
-
|
51
|
-
|
52
|
-
class Timer:
|
53
|
-
"""Allow to measure the duration of some activity."""
|
54
|
-
|
55
|
-
def __init__(self, key: Optional[Sequence[Any]], tags: TagType) -> None:
|
56
|
-
self._key = key
|
57
|
-
self._tags = tags
|
58
|
-
self._start = time.monotonic()
|
59
|
-
|
60
|
-
def stop(self, key_final: Optional[Sequence[Any]] = None, tags_final: TagType = None) -> float:
|
61
|
-
duration = time.monotonic() - self._start
|
62
|
-
if key_final is not None:
|
63
|
-
self._key = key_final
|
64
|
-
if tags_final is not None:
|
65
|
-
self._tags = tags_final
|
66
|
-
assert self._key is not None
|
67
|
-
for backend in BACKENDS.values():
|
68
|
-
backend.timer(self._key, duration, self._tags)
|
69
|
-
return duration
|
70
|
-
|
71
|
-
|
72
|
-
@contextlib.contextmanager
|
73
|
-
def timer_context(key: Sequence[Any], tags: TagType = None) -> Generator[None, None, None]:
|
74
|
-
"""
|
75
|
-
Add a duration measurement to the stats using the duration the context took to run.
|
76
|
-
|
77
|
-
Arguments:
|
78
|
-
|
79
|
-
key: The path of the key, given as a list.
|
80
|
-
tags: Some tags to attach to the metric.
|
81
|
-
"""
|
82
|
-
measure = timer(key, tags)
|
83
|
-
yield
|
84
|
-
measure.stop()
|
85
|
-
|
86
|
-
|
87
|
-
@contextlib.contextmanager
|
88
|
-
def outcome_timer_context(key: List[Any], tags: TagType = None) -> Generator[None, None, None]:
|
89
|
-
"""
|
90
|
-
Add a duration measurement to the stats using the duration the context took to run.
|
91
|
-
|
92
|
-
The given key is prepended with 'success' or 'failure' according to the context's outcome.
|
93
|
-
|
94
|
-
Arguments:
|
95
|
-
|
96
|
-
key: The path of the key, given as a list.
|
97
|
-
tags: Some tags to attach to the metric.
|
98
|
-
"""
|
99
|
-
measure = timer()
|
100
|
-
try:
|
101
|
-
yield
|
102
|
-
if USE_TAGS:
|
103
|
-
opt_tags = dict(tags) if tags is not None else {}
|
104
|
-
opt_tags["success"] = 1
|
105
|
-
measure.stop(key, opt_tags)
|
106
|
-
else:
|
107
|
-
measure.stop(key + ["success"], tags)
|
108
|
-
except Exception:
|
109
|
-
if USE_TAGS:
|
110
|
-
opt_tags = dict(tags) if tags is not None else {}
|
111
|
-
opt_tags["success"] = 0
|
112
|
-
measure.stop(key, opt_tags)
|
113
|
-
else:
|
114
|
-
measure.stop(key + ["failure"], tags)
|
115
|
-
raise
|
116
|
-
|
117
|
-
|
118
|
-
def timer(key: Optional[Sequence[Any]] = None, tags: TagType = None) -> Timer:
|
119
|
-
"""
|
120
|
-
Create a timer for the given key.
|
121
|
-
|
122
|
-
The key can be omitted, but then need to be specified when stop is called.
|
123
|
-
|
124
|
-
Arguments:
|
125
|
-
|
126
|
-
key: The path of the key, given as a list.
|
127
|
-
tags: Some tags to attach to the metric.
|
128
|
-
|
129
|
-
Returns: An instance of _Timer
|
130
|
-
"""
|
131
|
-
assert key is None or isinstance(key, list)
|
132
|
-
return Timer(key, tags)
|
133
|
-
|
134
|
-
|
135
|
-
def set_gauge(key: Sequence[Any], value: float, tags: TagType = None) -> None:
|
136
|
-
"""
|
137
|
-
Set a gauge value.
|
138
|
-
|
139
|
-
Arguments:
|
140
|
-
|
141
|
-
key: The path of the key, given as a list.
|
142
|
-
value: The new value of the gauge
|
143
|
-
tags: Some tags to attach to the metric.
|
144
|
-
"""
|
145
|
-
for backend in BACKENDS.values():
|
146
|
-
backend.gauge(key, value, tags)
|
147
|
-
|
148
|
-
|
149
|
-
def increment_counter(key: Sequence[Any], increment: int = 1, tags: TagType = None) -> None:
|
150
|
-
"""
|
151
|
-
Increment a counter value.
|
152
|
-
|
153
|
-
Arguments:
|
154
|
-
|
155
|
-
key: The path of the key, given as a list.
|
156
|
-
increment: The increment
|
157
|
-
tags: Some tags to attach to the metric.
|
158
|
-
"""
|
159
|
-
for backend in BACKENDS.values():
|
160
|
-
backend.counter(key, increment, tags)
|
161
|
-
|
162
|
-
|
163
|
-
class MemoryBackend(_BaseBackend):
|
164
|
-
"""Store stats in the memors."""
|
165
|
-
|
166
|
-
def __init__(self) -> None:
|
167
|
-
self._timers: MutableMapping[str, Tuple[int, float, float, float]] = {}
|
168
|
-
self._gauges: MutableMapping[str, float] = {}
|
169
|
-
self._counters: MutableMapping[str, int] = {}
|
170
|
-
self._stats_lock = threading.Lock()
|
171
|
-
LOG.info("Starting a MemoryBackend for stats")
|
172
|
-
|
173
|
-
@staticmethod
|
174
|
-
def _key_entry(key: str) -> str:
|
175
|
-
return str(key).replace("/", "_")
|
176
|
-
|
177
|
-
@staticmethod
|
178
|
-
def _key(key: Sequence[Any], tags: TagType) -> str:
|
179
|
-
result = "/".join(MemoryBackend._key_entry(v) for v in key)
|
180
|
-
result += _format_tags(
|
181
|
-
tags,
|
182
|
-
prefix="/",
|
183
|
-
tag_sep="/",
|
184
|
-
kv_sep="=",
|
185
|
-
key_formatter=MemoryBackend._key_entry,
|
186
|
-
value_formatter=MemoryBackend._key_entry,
|
187
|
-
)
|
188
|
-
return result
|
189
|
-
|
190
|
-
def timer(self, key: Sequence[Any], duration: float, tags: TagType = None) -> None:
|
191
|
-
"""Add a duration measurement to the stats."""
|
192
|
-
the_key = self._key(key, tags)
|
193
|
-
with self._stats_lock:
|
194
|
-
cur = self._timers.get(the_key, None)
|
195
|
-
if cur is None:
|
196
|
-
self._timers[the_key] = (1, duration, duration, duration)
|
197
|
-
else:
|
198
|
-
self._timers[the_key] = (
|
199
|
-
cur[0] + 1,
|
200
|
-
cur[1] + duration,
|
201
|
-
min(cur[2], duration),
|
202
|
-
max(cur[3], duration),
|
203
|
-
)
|
204
|
-
|
205
|
-
def gauge(self, key: Sequence[Any], value: float, tags: TagType = None) -> None:
|
206
|
-
self._gauges[self._key(key, tags)] = value
|
207
|
-
|
208
|
-
def counter(self, key: Sequence[Any], increment: int, tags: TagType = None) -> None:
|
209
|
-
the_key = self._key(key, tags)
|
210
|
-
with self._stats_lock:
|
211
|
-
self._counters[the_key] = self._counters.get(the_key, 0) + increment
|
212
|
-
|
213
|
-
def get_stats(self, request: pyramid.request.Request) -> Mapping[str, Any]:
|
214
|
-
reset = request.params.get("reset", "0") == "1"
|
215
|
-
with self._stats_lock:
|
216
|
-
timers = {}
|
217
|
-
for key, value in self._timers.items():
|
218
|
-
timers[key] = {
|
219
|
-
"nb": value[0],
|
220
|
-
"avg_ms": int(round((value[1] / value[0]) * 1000.0)),
|
221
|
-
"min_ms": int(round(value[2] * 1000.0)),
|
222
|
-
"max_ms": int(round(value[3] * 1000.0)),
|
223
|
-
}
|
224
|
-
gauges = dict(self._gauges)
|
225
|
-
counters = dict(self._counters)
|
226
|
-
|
227
|
-
if reset:
|
228
|
-
self._timers.clear()
|
229
|
-
self._gauges.clear()
|
230
|
-
self._counters.clear()
|
231
|
-
return {"timers": timers, "gauges": gauges, "counters": counters}
|
232
|
-
|
233
|
-
|
234
|
-
# https://github.com/prometheus/statsd_exporter/blob/master/mapper.go#L29
|
235
|
-
INVALID_KEY_CHARS = re.compile(r"[^a-zA-Z0-9_]")
|
236
|
-
INVALID_TAG_VALUE_CHARS = re.compile(r"[,#|]")
|
237
|
-
|
238
|
-
|
239
|
-
class StatsDBackend(_BaseBackend): # pragma: nocover
|
240
|
-
"""Abstraction of the statd backend to sent some metrics."""
|
241
|
-
|
242
|
-
def __init__(self, address: str, prefix: str, tags: Optional[Dict[str, str]] = None) -> None:
|
243
|
-
self._prefix = prefix
|
244
|
-
self._tags = tags
|
245
|
-
if self._prefix != "" and not self._prefix.endswith("."):
|
246
|
-
self._prefix += "."
|
247
|
-
|
248
|
-
host, port = address.rsplit(":")
|
249
|
-
host = host.strip("[]")
|
250
|
-
addrinfo = socket.getaddrinfo(host, port, 0, 0, socket.IPPROTO_UDP)
|
251
|
-
family, socktype, protocol, _canonname, sock_addr = addrinfo[0]
|
252
|
-
LOG.info("Starting a StatsDBackend for %s stats: %s -> %s", prefix, address, repr(sock_addr))
|
253
|
-
|
254
|
-
self._socket = socket.socket(family, socktype, protocol)
|
255
|
-
self._socket.setblocking(False)
|
256
|
-
self._socket.connect(sock_addr)
|
257
|
-
|
258
|
-
@staticmethod
|
259
|
-
def _key_entry(key_entry: Any) -> str:
|
260
|
-
return INVALID_KEY_CHARS.sub("_", str(key_entry))
|
261
|
-
|
262
|
-
@staticmethod
|
263
|
-
def _tag_value(tag_value: Any) -> str:
|
264
|
-
return INVALID_TAG_VALUE_CHARS.sub("_", str(tag_value))
|
265
|
-
|
266
|
-
def _key(self, key: Sequence[Any]) -> str:
|
267
|
-
return (self._prefix + ".".join(map(StatsDBackend._key_entry, key)))[:450]
|
268
|
-
|
269
|
-
def _merge_tags(self, tags: TagType) -> TagType:
|
270
|
-
if tags is None:
|
271
|
-
return self._tags
|
272
|
-
elif self._tags is None:
|
273
|
-
return tags
|
274
|
-
else:
|
275
|
-
tmp = dict(self._tags)
|
276
|
-
tmp.update(tags)
|
277
|
-
return tmp
|
278
|
-
|
279
|
-
def _send(self, message: str, tags: TagType) -> None:
|
280
|
-
tags = self._merge_tags(tags)
|
281
|
-
message += _format_tags(
|
282
|
-
tags,
|
283
|
-
prefix="|#",
|
284
|
-
tag_sep=",",
|
285
|
-
kv_sep=":",
|
286
|
-
key_formatter=StatsDBackend._key_entry,
|
287
|
-
value_formatter=StatsDBackend._tag_value,
|
288
|
-
)
|
289
|
-
try:
|
290
|
-
self._socket.send(message.encode("utf-8"))
|
291
|
-
except Exception: # nosec # pylint: disable=broad-except
|
292
|
-
pass # Ignore errors (must survive if stats cannot be sent)
|
293
|
-
|
294
|
-
def timer(self, key: Sequence[Any], duration: float, tags: TagType = None) -> None:
|
295
|
-
the_key = self._key(key)
|
296
|
-
ms_duration = int(round(duration * 1000.0))
|
297
|
-
ms_duration = max(ms_duration, 1) # collectd would ignore events with zero durations
|
298
|
-
message = f"{the_key}:{ms_duration}|ms"
|
299
|
-
self._send(message, tags)
|
300
|
-
|
301
|
-
def gauge(self, key: Sequence[Any], value: float, tags: TagType = None) -> None:
|
302
|
-
the_key = self._key(key)
|
303
|
-
message = f"{the_key}:{value}|g"
|
304
|
-
self._send(message, tags)
|
305
|
-
|
306
|
-
def counter(self, key: Sequence[Any], increment: int, tags: TagType = None) -> None:
|
307
|
-
the_key = self._key(key)
|
308
|
-
message = f"{the_key}:{increment}|c"
|
309
|
-
self._send(message, tags)
|
310
|
-
|
311
|
-
|
312
|
-
def init_backends(settings: Optional[Mapping[str, str]] = None) -> None:
|
313
|
-
"""
|
314
|
-
Initialize the backends according to the configuration.
|
315
|
-
|
316
|
-
Arguments:
|
317
|
-
|
318
|
-
settings: The Pyramid config
|
319
|
-
"""
|
320
|
-
if config_utils.env_or_settings(settings, "STATS_VIEW", "c2c.stats_view", False): # pragma: nocover
|
321
|
-
BACKENDS["memory"] = MemoryBackend()
|
322
|
-
|
323
|
-
statsd_address = config_utils.env_or_settings(settings, "STATSD_ADDRESS", "c2c.statsd_address", None)
|
324
|
-
if statsd_address is not None: # pragma: nocover
|
325
|
-
statsd_prefix = config_utils.env_or_settings(settings, "STATSD_PREFIX", "c2c.statsd_prefix", "")
|
326
|
-
statsd_tags = get_env_tags()
|
327
|
-
try:
|
328
|
-
BACKENDS["statsd"] = StatsDBackend(statsd_address, statsd_prefix, statsd_tags)
|
329
|
-
except Exception: # pylint: disable=broad-except
|
330
|
-
LOG.error("Failed configuring the statsd backend. Will continue without it.", exc_info=True)
|
331
|
-
|
332
|
-
|
333
|
-
def _format_tags(
|
334
|
-
tags: Optional[Mapping[str, Any]],
|
335
|
-
prefix: str,
|
336
|
-
tag_sep: str,
|
337
|
-
kv_sep: str,
|
338
|
-
key_formatter: Callable[[str], str],
|
339
|
-
value_formatter: Callable[[str], str],
|
340
|
-
) -> str:
|
341
|
-
if tags:
|
342
|
-
return prefix + tag_sep.join(
|
343
|
-
key_formatter(k) + kv_sep + value_formatter(v) for k, v in sorted(tags.items())
|
344
|
-
)
|
345
|
-
else:
|
346
|
-
return ""
|
347
|
-
|
348
|
-
|
349
|
-
def get_env_tags() -> Dict[str, str]:
|
350
|
-
"""Get the tag from the environment variable."""
|
351
|
-
tags = {}
|
352
|
-
for name, value in os.environ.items():
|
353
|
-
if name.startswith(TAG_PREFIX_ENV):
|
354
|
-
tags[name[len(TAG_PREFIX_ENV) :].lower()] = value
|
355
|
-
return tags
|
@@ -1,16 +0,0 @@
|
|
1
|
-
from typing import cast
|
2
|
-
|
3
|
-
import pyramid.config
|
4
|
-
|
5
|
-
from c2cwsgiutils import config_utils, stats
|
6
|
-
|
7
|
-
|
8
|
-
def init(config: pyramid.config.Configurator) -> None:
|
9
|
-
"""Initialize the statistic view."""
|
10
|
-
config.add_route(
|
11
|
-
"c2c_read_stats_json", config_utils.get_base_path(config) + r"/stats.json", request_method="GET"
|
12
|
-
)
|
13
|
-
memory_backend = cast(stats.MemoryBackend, stats.BACKENDS["memory"])
|
14
|
-
config.add_view(
|
15
|
-
memory_backend.get_stats, route_name="c2c_read_stats_json", renderer="fast_json", http_cache=0
|
16
|
-
)
|
@@ -1,32 +0,0 @@
|
|
1
|
-
#!/bin/bash
|
2
|
-
|
3
|
-
echo "Not for production usage!"
|
4
|
-
|
5
|
-
_term() {
|
6
|
-
echo "Caught SIGTERM signal!"
|
7
|
-
kill -TERM "$child" 2> /dev/null
|
8
|
-
wait "$child"
|
9
|
-
exit 1
|
10
|
-
}
|
11
|
-
|
12
|
-
_int() {
|
13
|
-
echo "Caught SIGINT signal!"
|
14
|
-
kill -INT "$child" 2> /dev/null
|
15
|
-
wait "$child"
|
16
|
-
exit 1
|
17
|
-
}
|
18
|
-
|
19
|
-
trap _term SIGTERM
|
20
|
-
trap _int SIGINT
|
21
|
-
|
22
|
-
while true; do
|
23
|
-
/usr/local/bin/gunicorn --paste=/app/production.ini &
|
24
|
-
child=$!
|
25
|
-
wait "$child"
|
26
|
-
exit_status=$?
|
27
|
-
if [ $exit_status -eq 0 ]; then
|
28
|
-
exit 0
|
29
|
-
fi
|
30
|
-
echo "gunicorn exited with an error ($exit_status), restarting in 1s"
|
31
|
-
sleep 1
|
32
|
-
done
|