c2cwsgiutils 5.1.7.dev20230901073305__py3-none-any.whl → 5.2.1.dev197__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. c2cwsgiutils/__init__.py +13 -13
  2. c2cwsgiutils/acceptance/connection.py +5 -2
  3. c2cwsgiutils/acceptance/image.py +98 -4
  4. c2cwsgiutils/acceptance/package-lock.json +1933 -0
  5. c2cwsgiutils/acceptance/package.json +7 -0
  6. c2cwsgiutils/acceptance/print.py +4 -4
  7. c2cwsgiutils/acceptance/screenshot.js +62 -0
  8. c2cwsgiutils/acceptance/utils.py +14 -22
  9. c2cwsgiutils/auth.py +4 -4
  10. c2cwsgiutils/broadcast/__init__.py +15 -7
  11. c2cwsgiutils/broadcast/interface.py +3 -2
  12. c2cwsgiutils/broadcast/local.py +3 -2
  13. c2cwsgiutils/broadcast/redis.py +8 -7
  14. c2cwsgiutils/client_info.py +5 -5
  15. c2cwsgiutils/config_utils.py +2 -1
  16. c2cwsgiutils/coverage_setup.py +2 -2
  17. c2cwsgiutils/db.py +58 -37
  18. c2cwsgiutils/db_maintenance_view.py +2 -1
  19. c2cwsgiutils/debug/_listeners.py +10 -9
  20. c2cwsgiutils/debug/_views.py +12 -11
  21. c2cwsgiutils/debug/utils.py +5 -5
  22. c2cwsgiutils/errors.py +7 -6
  23. c2cwsgiutils/health_check.py +96 -85
  24. c2cwsgiutils/index.py +90 -105
  25. c2cwsgiutils/loader.py +3 -3
  26. c2cwsgiutils/logging_view.py +3 -2
  27. c2cwsgiutils/models_graph.py +8 -6
  28. c2cwsgiutils/prometheus.py +175 -57
  29. c2cwsgiutils/pyramid.py +4 -2
  30. c2cwsgiutils/pyramid_logging.py +2 -1
  31. c2cwsgiutils/redis_stats.py +13 -11
  32. c2cwsgiutils/redis_utils.py +15 -14
  33. c2cwsgiutils/request_tracking/__init__.py +36 -30
  34. c2cwsgiutils/request_tracking/_sql.py +3 -1
  35. c2cwsgiutils/scripts/genversion.py +4 -4
  36. c2cwsgiutils/scripts/stats_db.py +130 -68
  37. c2cwsgiutils/scripts/test_print.py +1 -1
  38. c2cwsgiutils/sentry.py +2 -1
  39. c2cwsgiutils/setup_process.py +13 -17
  40. c2cwsgiutils/sql_profiler/_impl.py +12 -5
  41. c2cwsgiutils/sqlalchemylogger/README.md +48 -0
  42. c2cwsgiutils/sqlalchemylogger/_models.py +7 -4
  43. c2cwsgiutils/sqlalchemylogger/examples/example.py +15 -0
  44. c2cwsgiutils/sqlalchemylogger/handlers.py +11 -8
  45. c2cwsgiutils/static/favicon-16x16.png +0 -0
  46. c2cwsgiutils/static/favicon-32x32.png +0 -0
  47. c2cwsgiutils/stats_pyramid/__init__.py +7 -11
  48. c2cwsgiutils/stats_pyramid/_db_spy.py +14 -11
  49. c2cwsgiutils/stats_pyramid/_pyramid_spy.py +29 -20
  50. c2cwsgiutils/templates/index.html.mako +50 -0
  51. c2cwsgiutils/version.py +49 -16
  52. c2cwsgiutils-5.2.1.dev197.dist-info/LICENSE +22 -0
  53. {c2cwsgiutils-5.1.7.dev20230901073305.dist-info → c2cwsgiutils-5.2.1.dev197.dist-info}/METADATA +187 -135
  54. c2cwsgiutils-5.2.1.dev197.dist-info/RECORD +67 -0
  55. {c2cwsgiutils-5.1.7.dev20230901073305.dist-info → c2cwsgiutils-5.2.1.dev197.dist-info}/WHEEL +1 -2
  56. c2cwsgiutils-5.2.1.dev197.dist-info/entry_points.txt +21 -0
  57. c2cwsgiutils/acceptance/composition.py +0 -129
  58. c2cwsgiutils/metrics.py +0 -110
  59. c2cwsgiutils/scripts/check_es.py +0 -130
  60. c2cwsgiutils/scripts/coverage_report.py +0 -36
  61. c2cwsgiutils/stats.py +0 -355
  62. c2cwsgiutils/stats_pyramid/_views.py +0 -16
  63. c2cwsgiutils-5.1.7.dev20230901073305.data/scripts/c2cwsgiutils-run +0 -32
  64. c2cwsgiutils-5.1.7.dev20230901073305.dist-info/LICENSE.txt +0 -28
  65. c2cwsgiutils-5.1.7.dev20230901073305.dist-info/RECORD +0 -69
  66. c2cwsgiutils-5.1.7.dev20230901073305.dist-info/entry_points.txt +0 -25
  67. c2cwsgiutils-5.1.7.dev20230901073305.dist-info/top_level.txt +0 -2
  68. tests/acceptance/__init__.py +0 -0
  69. tests/acceptance/test_utils.py +0 -13
@@ -1,66 +1,184 @@
1
- from typing import Any, Mapping, MutableMapping, Optional # noqa # pylint: disable=unused-import
1
+ """Every thing we needs to have the metrics in Prometheus."""
2
2
 
3
- import requests
3
+ import os
4
+ import re
5
+ from collections.abc import Generator, Iterable
6
+ from typing import Any, Optional, TypedDict, cast
4
7
 
5
- LabelsType = Optional[Mapping[str, Any]]
8
+ import prometheus_client
9
+ import prometheus_client.core
10
+ import prometheus_client.metrics_core
11
+ import prometheus_client.multiprocess
12
+ import prometheus_client.registry
13
+ import pyramid.config
6
14
 
15
+ from c2cwsgiutils import broadcast, redis_utils
16
+ from c2cwsgiutils.debug.utils import dump_memory_maps
7
17
 
8
- class PushgatewayGroupPublisher:
9
- """
10
- Implement parts of the Prometheus Pushgateway protocol.
18
+ _NUMBER_RE = re.compile(r"^[0-9]+$")
19
+ MULTI_PROCESS_COLLECTOR_BROADCAST_CHANNELS = [
20
+ "c2cwsgiutils_prometheus_collector_gc",
21
+ "c2cwsgiutils_prometheus_collector_process",
22
+ ]
11
23
 
12
- As defined here:
13
24
 
14
- https://github.com/prometheus/pushgateway
15
- """
25
+ def start(registry: Optional[prometheus_client.CollectorRegistry] = None) -> None:
26
+ """Start separate HTTP server to provide the Prometheus metrics."""
16
27
 
17
- def __init__(
18
- self, base_url: str, job: str, instance: Optional[str] = None, labels: LabelsType = None
19
- ) -> None:
20
- if not base_url.endswith("/"):
21
- base_url += "/"
22
- self._url = f"{base_url}metrics/job/{job}"
23
- if instance is not None:
24
- self._url += "/instance/" + instance
25
- self._labels = labels
26
- self._reset()
28
+ if os.environ.get("C2C_PROMETHEUS_PORT") is not None:
29
+ broadcast.includeme()
27
30
 
28
- def _merge_labels(self, labels: LabelsType) -> LabelsType:
29
- if labels is None:
30
- return self._labels
31
- elif self._labels is None:
32
- return labels
33
- else:
34
- tmp = dict(self._labels)
35
- tmp.update(labels)
36
- return tmp
37
-
38
- def add(
39
- self,
40
- metric_name: str,
41
- metric_value: Any,
42
- metric_type: str = "gauge",
43
- metric_labels: Optional[Mapping[str, str]] = None,
44
- ) -> None:
45
- if metric_name in self._types:
46
- if self._types[metric_name] != metric_type:
47
- raise ValueError("Cannot change the type of a given metric")
31
+ registry = prometheus_client.CollectorRegistry() if registry is None else registry
32
+ registry.register(MemoryMapCollector())
33
+ registry.register(prometheus_client.PLATFORM_COLLECTOR)
34
+ registry.register(MultiProcessCustomCollector())
35
+ prometheus_client.multiprocess.MultiProcessCollector(registry) # type: ignore[no-untyped-call]
36
+ prometheus_client.start_http_server(int(os.environ["C2C_PROMETHEUS_PORT"]), registry=registry)
37
+
38
+
39
+ def includeme(config: pyramid.config.Configurator) -> None:
40
+ """Initialize prometheus_client in pyramid context."""
41
+
42
+ broadcast.subscribe("c2cwsgiutils_prometheus_collector_gc", _broadcast_collector_gc)
43
+ broadcast.subscribe("c2cwsgiutils_prometheus_collector_process", _broadcast_collector_process)
44
+
45
+
46
+ def build_metric_name(postfix: str) -> str:
47
+ """Build the metric name with the prefix from the environment variable."""
48
+
49
+ return os.environ.get("C2C_PROMETHEUS_PREFIX", "c2cwsgiutils_") + postfix
50
+
51
+
52
+ def cleanup() -> None:
53
+ """Cleanup the prometheus_client registry."""
54
+
55
+ redis_utils.cleanup()
56
+ broadcast.cleanup()
57
+
58
+
59
+ class SerializedSample(TypedDict):
60
+ """Represent the serialized sample."""
61
+
62
+ name: str
63
+ labels: dict[str, str]
64
+ value: float
65
+
66
+
67
+ class SerializedMetric(TypedDict):
68
+ """Represent the serialized gauge."""
69
+
70
+ type: str
71
+ args: dict[str, Any]
72
+ samples: list[SerializedSample]
73
+
74
+
75
+ def _broadcast_collector_gc() -> list[SerializedMetric]:
76
+ """Get the collected GC gauges."""
77
+
78
+ return serialize_collected_data(prometheus_client.GC_COLLECTOR)
79
+
80
+
81
+ def _broadcast_collector_process() -> list[SerializedMetric]:
82
+ """Get the collected process gauges."""
83
+ return serialize_collected_data(prometheus_client.PROCESS_COLLECTOR)
84
+
85
+
86
+ def serialize_collected_data(collector: prometheus_client.registry.Collector) -> list[SerializedMetric]:
87
+ """Serialize the data from the custom collector."""
88
+
89
+ gauges: list[SerializedMetric] = []
90
+ for process_gauge in collector.collect():
91
+ gauge: SerializedMetric = {
92
+ "type": "<to be defined>",
93
+ "args": {
94
+ "name": process_gauge.name,
95
+ "documentation": process_gauge.documentation,
96
+ "unit": process_gauge.unit,
97
+ },
98
+ "samples": [],
99
+ }
100
+
101
+ if isinstance(process_gauge, prometheus_client.core.GaugeMetricFamily):
102
+ gauge["type"] = "gauge"
103
+ elif isinstance(process_gauge, prometheus_client.core.CounterMetricFamily):
104
+ gauge["type"] = "counter"
48
105
  else:
49
- self._types[metric_name] = metric_type
50
- self._to_send += f"# TYPE {metric_name} {metric_type}\n"
51
- self._to_send += metric_name
52
- labels = self._merge_labels(metric_labels)
53
- if labels is not None:
54
- self._to_send += "{" + ", ".join(f'{k}="{v}"' for k, v in sorted(labels.items())) + "}"
55
- self._to_send += f" {metric_value}\n"
56
-
57
- def commit(self) -> None:
58
- requests.put(self._url, data=self._to_send.encode("utf-8")).raise_for_status()
59
- self._reset()
60
-
61
- def _reset(self) -> None:
62
- self._to_send = ""
63
- self._types: MutableMapping[str, str] = {}
64
-
65
- def __str__(self) -> str:
66
- return self._url + " ->\n" + self._to_send
106
+ raise NotImplementedError()
107
+ for sample in process_gauge.samples:
108
+ gauge["samples"].append(
109
+ {
110
+ "name": sample.name,
111
+ "labels": {"pid": str(os.getpid()), **sample.labels},
112
+ "value": sample.value,
113
+ },
114
+ )
115
+ gauges.append(gauge)
116
+ return gauges
117
+
118
+
119
+ class MultiProcessCustomCollector(prometheus_client.registry.Collector):
120
+ """Get the metrics from the custom collectors."""
121
+
122
+ def collect(self) -> Generator[prometheus_client.core.Metric, None, None]:
123
+ results: list[list[SerializedMetric]] = []
124
+ for channel in MULTI_PROCESS_COLLECTOR_BROADCAST_CHANNELS:
125
+ result = broadcast.broadcast(channel, expect_answers=True)
126
+ if result is not None:
127
+ results.extend(cast(Iterable[list[SerializedMetric]], result))
128
+ return _deserialize_collected_data(results)
129
+
130
+
131
+ def _deserialize_collected_data(
132
+ results: list[list[SerializedMetric]],
133
+ ) -> Generator[prometheus_client.core.Metric, None, None]:
134
+ for serialized_collection in results:
135
+ for serialized_metric in serialized_collection:
136
+ if serialized_metric is None:
137
+ continue
138
+
139
+ if serialized_metric["type"] == "gauge":
140
+ metric: prometheus_client.core.Metric = prometheus_client.core.GaugeMetricFamily(
141
+ **serialized_metric["args"]
142
+ )
143
+ elif serialized_metric["type"] == "counter":
144
+ metric = prometheus_client.core.CounterMetricFamily(**serialized_metric["args"])
145
+ else:
146
+ raise NotImplementedError()
147
+ for sample in serialized_metric["samples"]:
148
+ metric.samples.append(
149
+ prometheus_client.metrics_core.Sample(**sample), # type: ignore[attr-defined]
150
+ )
151
+ yield metric
152
+
153
+
154
+ class MemoryMapCollector(prometheus_client.registry.Collector):
155
+ """The Linux memory map provider."""
156
+
157
+ def __init__(self, memory_type: str = "pss", pids: Optional[list[str]] = None):
158
+ """
159
+ Initialize.
160
+
161
+ Arguments:
162
+
163
+ memory_type: can be rss, pss or size
164
+ pids: the list of pids or none
165
+ """
166
+ super().__init__()
167
+ self.memory_type = memory_type
168
+ self.pids = pids
169
+
170
+ def collect(self) -> Generator[prometheus_client.core.GaugeMetricFamily, None, None]:
171
+ """Get the gauge from smap file."""
172
+ gauge = prometheus_client.core.GaugeMetricFamily(
173
+ build_metric_name(f"process_smap_{self.memory_type}"),
174
+ f"Container smap used {self.memory_type.capitalize()}",
175
+ labels=["pid", "name"],
176
+ unit="bytes",
177
+ )
178
+
179
+ for pid in (
180
+ [p for p in os.listdir("/proc/") if _NUMBER_RE.match(p)] if self.pids is None else self.pids
181
+ ):
182
+ for e in dump_memory_maps(pid):
183
+ gauge.add_metric([pid, e["name"]], e[self.memory_type + "_kb"] * 1024)
184
+ yield gauge
c2cwsgiutils/pyramid.py CHANGED
@@ -12,8 +12,8 @@ from c2cwsgiutils import (
12
12
  errors,
13
13
  index,
14
14
  logging_view,
15
- metrics,
16
15
  pretty_json,
16
+ prometheus,
17
17
  redis_stats,
18
18
  request_tracking,
19
19
  sentry,
@@ -22,6 +22,8 @@ from c2cwsgiutils import (
22
22
  version,
23
23
  )
24
24
 
25
+ _LOG = logging.getLogger(__name__)
26
+
25
27
 
26
28
  def includeme(config: pyramid.config.Configurator) -> None:
27
29
  """
@@ -46,7 +48,7 @@ def includeme(config: pyramid.config.Configurator) -> None:
46
48
  config.include(logging_view.includeme)
47
49
  config.include(sql_profiler.includeme)
48
50
  config.include(version.includeme)
51
+ config.include(prometheus.includeme)
49
52
  config.include(debug.includeme)
50
- config.include(metrics.includeme)
51
53
  config.include(errors.includeme)
52
54
  config.include(index.includeme)
@@ -14,7 +14,8 @@ import json
14
14
  import logging
15
15
  import logging.config
16
16
  import socket
17
- from typing import TYPE_CHECKING, Any, Mapping, MutableMapping, Optional, TextIO
17
+ from collections.abc import Mapping, MutableMapping
18
+ from typing import TYPE_CHECKING, Any, Optional, TextIO
18
19
 
19
20
  import cee_syslog_handler
20
21
  from pyramid.threadlocal import get_current_request
@@ -1,25 +1,27 @@
1
1
  import logging
2
2
  import warnings
3
- from typing import Any, Callable, Dict, Optional # noqa # pylint: disable=unused-import
3
+ from typing import Any, Callable, Optional
4
4
 
5
+ import prometheus_client
5
6
  import pyramid.config
6
7
 
7
- from c2cwsgiutils import config_utils, stats
8
+ from c2cwsgiutils import config_utils, prometheus
8
9
 
9
10
  LOG = logging.getLogger(__name__)
10
11
  ORIG: Optional[Callable[..., Any]] = None
11
12
 
13
+ _PROMETHEUS_REDIS_SUMMARY = prometheus_client.Summary(
14
+ prometheus.build_metric_name("redis"),
15
+ "Number of redis commands",
16
+ ["command"],
17
+ unit="seconds",
18
+ )
12
19
 
13
- def _execute_command_patch(self: Any, *args: Any, **options: Any) -> Any:
14
- if stats.USE_TAGS:
15
- key = ["redis"]
16
- tags: Optional[Dict[str, str]] = dict(cmd=args[0])
17
- else:
18
- key = ["redis", args[0]]
19
- tags = None
20
+
21
+ def _execute_command_patch(self: Any, command: str, *args: Any, **options: Any) -> Any:
20
22
  assert ORIG is not None
21
- with stats.outcome_timer_context(key, tags):
22
- return ORIG(self, *args, **options)
23
+ with _PROMETHEUS_REDIS_SUMMARY.labels(command=command).time():
24
+ return ORIG(self, command, *args, **options)
23
25
 
24
26
 
25
27
  def init(config: Optional[pyramid.config.Configurator] = None) -> None:
@@ -1,7 +1,8 @@
1
1
  import logging
2
2
  import threading
3
3
  import time
4
- from typing import Any, Mapping, Optional, Tuple
4
+ from collections.abc import Mapping
5
+ from typing import Any, Optional
5
6
 
6
7
  import redis.client
7
8
  import redis.exceptions
@@ -29,9 +30,17 @@ _slave: Optional["redis.client.Redis[str]"] = None
29
30
  _sentinel: Optional[redis.sentinel.Sentinel] = None
30
31
 
31
32
 
33
+ def cleanup() -> None:
34
+ """Cleanup the redis connections."""
35
+ global _master, _slave, _sentinel
36
+ _master = None
37
+ _slave = None
38
+ _sentinel = None
39
+
40
+
32
41
  def get(
33
42
  settings: Optional[Mapping[str, bytes]] = None,
34
- ) -> Tuple[
43
+ ) -> tuple[
35
44
  Optional["redis.client.Redis[str]"],
36
45
  Optional["redis.client.Redis[str]"],
37
46
  Optional[redis.sentinel.Sentinel],
@@ -73,19 +82,11 @@ def _init(settings: Optional[Mapping[str, Any]]) -> None:
73
82
  db=db,
74
83
  **redis_options,
75
84
  )
76
-
77
- try:
78
- LOG.info("Redis setup using: %s, %s, %s", sentinels, service_name, redis_options_)
79
- _master = _sentinel.master_for(service_name)
80
- _slave = _sentinel.slave_for(service_name)
81
- return
82
- except redis.sentinel.MasterNotFoundError as error:
83
- print(_sentinel.sentinels[0].sentinel_masters())
84
- raise Exception(_sentinel.sentinels[0].sentinel_masters()) from error
85
+ LOG.info("Redis setup using: %s, %s, %s", sentinels, service_name, redis_options_)
86
+ _master = _sentinel.master_for(service_name)
87
+ _slave = _sentinel.slave_for(service_name)
88
+ return
85
89
  if url:
86
- if "://" not in url:
87
- url = "redis://" + url
88
-
89
90
  LOG.info("Redis setup using: %s, with options: %s", url, redis_options_)
90
91
  _master = redis.client.Redis.from_url(url, decode_responses=True, **redis_options)
91
92
  _slave = _master
@@ -4,22 +4,31 @@ Allows to track the request_id in the logs, the DB and others.
4
4
  Adds a c2c_request_id attribute to the Pyramid Request class to access it.
5
5
  """
6
6
  import logging
7
+ import time
7
8
  import urllib.parse
8
9
  import uuid
9
10
  import warnings
10
- from typing import Any, Dict, List, Optional, Sequence # noqa # pylint: disable=unused-import
11
+ from collections.abc import Mapping
12
+ from typing import Optional, Union
11
13
 
14
+ import prometheus_client
12
15
  import pyramid.request
13
16
  import requests.adapters
14
17
  import requests.models
15
18
  from pyramid.threadlocal import get_current_request
16
19
 
17
- from c2cwsgiutils import config_utils, stats
20
+ from c2cwsgiutils import config_utils, prometheus
18
21
 
19
- ID_HEADERS: List[str] = []
22
+ ID_HEADERS: list[str] = []
20
23
  _HTTPAdapter_send = requests.adapters.HTTPAdapter.send
21
24
  LOG = logging.getLogger(__name__)
22
25
  DEFAULT_TIMEOUT: Optional[float] = None
26
+ _PROMETHEUS_REQUESTS_SUMMARY = prometheus_client.Summary(
27
+ prometheus.build_metric_name("requests"),
28
+ "Requests requests",
29
+ ["scheme", "hostname", "port", "method", "status", "group"],
30
+ unit="seconds",
31
+ )
23
32
 
24
33
 
25
34
  def _gen_request_id(request: pyramid.request.Request) -> str:
@@ -33,8 +42,11 @@ def _patch_requests() -> None:
33
42
  def send_wrapper(
34
43
  self: requests.adapters.HTTPAdapter,
35
44
  request: requests.models.PreparedRequest,
36
- timeout: Optional[float] = None,
37
- **kwargs: Any,
45
+ stream: bool = False,
46
+ timeout: Union[None, float, tuple[float, float], tuple[float, None]] = None,
47
+ verify: Union[bool, str] = True,
48
+ cert: Union[None, bytes, str, tuple[Union[bytes, str], Union[bytes, str]]] = None,
49
+ proxies: Optional[Mapping[str, str]] = None,
38
50
  ) -> requests.Response:
39
51
  pyramid_request = get_current_request()
40
52
  header = ID_HEADERS[0]
@@ -47,31 +59,25 @@ def _patch_requests() -> None:
47
59
  else:
48
60
  LOG.warning("Doing a %s request without timeout to %s", request.method, request.url)
49
61
 
50
- status = 999
51
- timer = stats.timer()
52
- try:
53
- response = _HTTPAdapter_send(self, request, timeout=timeout, **kwargs)
54
- status = response.status_code
55
- return response
56
- finally:
57
- if request.url is not None:
58
- parsed = urllib.parse.urlparse(request.url)
59
- port = parsed.port or (80 if parsed.scheme == "http" else 443)
60
- if stats.USE_TAGS:
61
- key: Sequence[Any] = ["requests"]
62
- tags: Optional[Dict[str, Any]] = dict(
63
- scheme=parsed.scheme,
64
- host=parsed.hostname,
65
- port=port,
66
- method=request.method,
67
- status=status,
68
- )
69
- else:
70
- key = ["requests", parsed.scheme, parsed.hostname, port, request.method, status]
71
- tags = None
72
- timer.stop(key, tags)
73
-
74
- requests.adapters.HTTPAdapter.send = send_wrapper # type: ignore
62
+ assert request.url
63
+ parsed = urllib.parse.urlparse(request.url)
64
+ port = parsed.port or (80 if parsed.scheme == "http" else 443)
65
+ start = time.perf_counter()
66
+ response = _HTTPAdapter_send(
67
+ self, request, timeout=timeout, stream=stream, verify=verify, cert=cert, proxies=proxies
68
+ )
69
+
70
+ _PROMETHEUS_REQUESTS_SUMMARY.labels(
71
+ scheme=parsed.scheme,
72
+ hostname=parsed.hostname,
73
+ port=str(port),
74
+ method=request.method,
75
+ status=str(response.status_code),
76
+ group=str(response.status_code // 100 * 100),
77
+ ).observe(time.perf_counter() - start)
78
+ return response
79
+
80
+ requests.adapters.HTTPAdapter.send = send_wrapper # type: ignore[method-assign]
75
81
 
76
82
 
77
83
  def init(config: Optional[pyramid.config.Configurator] = None) -> None:
@@ -8,7 +8,9 @@ from sqlalchemy.orm import Session
8
8
  def _add_session_id(session: Session, _transaction: Any, _connection: Any) -> None:
9
9
  request = get_current_request()
10
10
  if request is not None:
11
- session.execute("set application_name=:session_id", params={"session_id": request.c2c_request_id})
11
+ session.execute(
12
+ sqlalchemy.text("set application_name=:session_id"), params={"session_id": request.c2c_request_id}
13
+ )
12
14
 
13
15
 
14
16
  def init() -> None:
@@ -6,14 +6,14 @@ import re
6
6
  import subprocess # nosec
7
7
  import sys
8
8
  import warnings
9
- from typing import Dict, Optional, Tuple, cast
9
+ from typing import Optional, cast
10
10
 
11
11
  SRC_VERSION_RE = re.compile(r"^.*\(([^=]*)===?([^=]*)\)$")
12
12
  VERSION_RE = re.compile(r"^([^=]*)==([^=]*)$")
13
13
  LOG = logging.getLogger(__name__)
14
14
 
15
15
 
16
- def _get_package_version(comp: str) -> Tuple[Optional[str], Optional[str]]:
16
+ def _get_package_version(comp: str) -> tuple[Optional[str], Optional[str]]:
17
17
  """
18
18
  Parse plain and editable versions.
19
19
 
@@ -22,14 +22,14 @@ def _get_package_version(comp: str) -> Tuple[Optional[str], Optional[str]]:
22
22
  src_matcher = SRC_VERSION_RE.match(comp)
23
23
  matcher = src_matcher or VERSION_RE.match(comp)
24
24
  if matcher:
25
- return cast(Tuple[str, str], matcher.groups())
25
+ return cast(tuple[str, str], matcher.groups())
26
26
  else:
27
27
  if len(comp) > 0 and not comp[:3] == "-e ":
28
28
  print("Cannot parse package version: " + comp)
29
29
  return None, None
30
30
 
31
31
 
32
- def _get_packages_version() -> Dict[str, str]:
32
+ def _get_packages_version() -> dict[str, str]:
33
33
  result = {}
34
34
  with open(os.devnull, "w", encoding="utf-8") as devnull:
35
35
  for comp in (