c2cwsgiutils 6.1.0.dev105__py3-none-any.whl → 6.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. c2cwsgiutils/__init__.py +14 -11
  2. c2cwsgiutils/acceptance/__init__.py +2 -3
  3. c2cwsgiutils/acceptance/connection.py +1 -2
  4. c2cwsgiutils/acceptance/image.py +17 -11
  5. c2cwsgiutils/acceptance/package-lock.json +306 -213
  6. c2cwsgiutils/acceptance/package.json +2 -2
  7. c2cwsgiutils/acceptance/print.py +7 -3
  8. c2cwsgiutils/acceptance/utils.py +1 -3
  9. c2cwsgiutils/auth.py +27 -25
  10. c2cwsgiutils/broadcast/__init__.py +15 -16
  11. c2cwsgiutils/broadcast/interface.py +3 -3
  12. c2cwsgiutils/broadcast/local.py +1 -0
  13. c2cwsgiutils/broadcast/redis.py +13 -12
  14. c2cwsgiutils/client_info.py +19 -1
  15. c2cwsgiutils/coverage_setup.py +4 -3
  16. c2cwsgiutils/db.py +35 -41
  17. c2cwsgiutils/db_maintenance_view.py +13 -13
  18. c2cwsgiutils/debug/__init__.py +2 -2
  19. c2cwsgiutils/debug/_listeners.py +2 -7
  20. c2cwsgiutils/debug/_views.py +20 -12
  21. c2cwsgiutils/debug/utils.py +9 -9
  22. c2cwsgiutils/errors.py +13 -15
  23. c2cwsgiutils/health_check.py +24 -30
  24. c2cwsgiutils/index.py +34 -13
  25. c2cwsgiutils/loader.py +21 -2
  26. c2cwsgiutils/logging_view.py +12 -12
  27. c2cwsgiutils/models_graph.py +0 -1
  28. c2cwsgiutils/pretty_json.py +0 -1
  29. c2cwsgiutils/prometheus.py +10 -10
  30. c2cwsgiutils/pyramid.py +0 -1
  31. c2cwsgiutils/pyramid_logging.py +1 -1
  32. c2cwsgiutils/redis_stats.py +9 -9
  33. c2cwsgiutils/redis_utils.py +19 -18
  34. c2cwsgiutils/request_tracking/__init__.py +13 -13
  35. c2cwsgiutils/request_tracking/_sql.py +0 -1
  36. c2cwsgiutils/scripts/genversion.py +5 -5
  37. c2cwsgiutils/scripts/stats_db.py +19 -17
  38. c2cwsgiutils/scripts/test_print.py +5 -5
  39. c2cwsgiutils/sentry.py +55 -20
  40. c2cwsgiutils/services.py +2 -2
  41. c2cwsgiutils/setup_process.py +0 -1
  42. c2cwsgiutils/sql_profiler/__init__.py +5 -6
  43. c2cwsgiutils/sql_profiler/_impl.py +18 -17
  44. c2cwsgiutils/sqlalchemylogger/README.md +30 -13
  45. c2cwsgiutils/sqlalchemylogger/handlers.py +12 -11
  46. c2cwsgiutils/stats_pyramid/__init__.py +1 -5
  47. c2cwsgiutils/stats_pyramid/_db_spy.py +2 -2
  48. c2cwsgiutils/stats_pyramid/_pyramid_spy.py +12 -1
  49. c2cwsgiutils/templates/index.html.mako +4 -1
  50. c2cwsgiutils/version.py +11 -5
  51. {c2cwsgiutils-6.1.0.dev105.dist-info → c2cwsgiutils-6.1.7.dist-info}/LICENSE +1 -1
  52. {c2cwsgiutils-6.1.0.dev105.dist-info → c2cwsgiutils-6.1.7.dist-info}/METADATA +18 -6
  53. c2cwsgiutils-6.1.7.dist-info/RECORD +67 -0
  54. {c2cwsgiutils-6.1.0.dev105.dist-info → c2cwsgiutils-6.1.7.dist-info}/WHEEL +1 -1
  55. c2cwsgiutils-6.1.0.dev105.dist-info/RECORD +0 -67
  56. {c2cwsgiutils-6.1.0.dev105.dist-info → c2cwsgiutils-6.1.7.dist-info}/entry_points.txt +0 -0
@@ -11,19 +11,19 @@ import yaml
11
11
 
12
12
  import c2cwsgiutils.config_utils
13
13
 
14
- LOG = logging.getLogger(__name__)
14
+ _LOG = logging.getLogger(__name__)
15
15
 
16
16
  REDIS_URL_KEY = "C2C_REDIS_URL"
17
- REDIS_OPTIONS_KEY = "C2C_REDIS_OPTIONS"
17
+ _REDIS_OPTIONS_KEY = "C2C_REDIS_OPTIONS"
18
18
  REDIS_SENTINELS_KEY = "C2C_REDIS_SENTINELS"
19
19
  REDIS_SERVICENAME_KEY = "C2C_REDIS_SERVICENAME"
20
- REDIS_DB_KEY = "C2C_REDIS_DB"
20
+ _REDIS_DB_KEY = "C2C_REDIS_DB"
21
21
 
22
22
  REDIS_URL_KEY_PROP = "c2c.redis_url"
23
- REDIS_OPTIONS_KEY_PROP = "c2c.redis_options"
23
+ _REDIS_OPTIONS_KEY_PROP = "c2c.redis_options"
24
24
  REDIS_SENTINELS_KEY_PROP = "c2c.redis_sentinels"
25
25
  REDIS_SERVICENAME_KEY_PROP = "c2c.redis_servicename"
26
- REDIS_DB_KEY_PROP = "c2c.redis_db"
26
+ _REDIS_DB_KEY_PROP = "c2c.redis_db"
27
27
 
28
28
  _master: Optional["redis.client.Redis[str]"] = None
29
29
  _slave: Optional["redis.client.Redis[str]"] = None
@@ -32,7 +32,7 @@ _sentinel: Optional[redis.sentinel.Sentinel] = None
32
32
 
33
33
  def cleanup() -> None:
34
34
  """Cleanup the redis connections."""
35
- global _master, _slave, _sentinel
35
+ global _master, _slave, _sentinel # pylint: disable=global-statement
36
36
  _master = None
37
37
  _slave = None
38
38
  _sentinel = None
@@ -52,17 +52,17 @@ def get(
52
52
 
53
53
 
54
54
  def _init(settings: Optional[Mapping[str, Any]]) -> None:
55
- global _master, _slave, _sentinel
55
+ global _master, _slave, _sentinel # pylint: disable=global-statement
56
56
  sentinels = c2cwsgiutils.config_utils.env_or_settings(
57
57
  settings, REDIS_SENTINELS_KEY, REDIS_SENTINELS_KEY_PROP
58
58
  )
59
59
  service_name = c2cwsgiutils.config_utils.env_or_settings(
60
60
  settings, REDIS_SERVICENAME_KEY, REDIS_SERVICENAME_KEY_PROP
61
61
  )
62
- db = c2cwsgiutils.config_utils.env_or_settings(settings, REDIS_DB_KEY, REDIS_DB_KEY_PROP)
62
+ db = c2cwsgiutils.config_utils.env_or_settings(settings, _REDIS_DB_KEY, _REDIS_DB_KEY_PROP)
63
63
  url = c2cwsgiutils.config_utils.env_or_settings(settings, REDIS_URL_KEY, REDIS_URL_KEY_PROP)
64
64
  redis_options_ = c2cwsgiutils.config_utils.env_or_settings(
65
- settings, REDIS_OPTIONS_KEY, REDIS_OPTIONS_KEY_PROP
65
+ settings, _REDIS_OPTIONS_KEY, _REDIS_OPTIONS_KEY_PROP
66
66
  )
67
67
 
68
68
  redis_options = (
@@ -82,16 +82,16 @@ def _init(settings: Optional[Mapping[str, Any]]) -> None:
82
82
  db=db,
83
83
  **redis_options,
84
84
  )
85
- LOG.info("Redis setup using: %s, %s, %s", sentinels, service_name, redis_options_)
85
+ _LOG.info("Redis setup using: %s, %s, %s", sentinels, service_name, redis_options_)
86
86
  _master = _sentinel.master_for(service_name)
87
87
  _slave = _sentinel.slave_for(service_name)
88
88
  return
89
89
  if url:
90
- LOG.info("Redis setup using: %s, with options: %s", url, redis_options_)
90
+ _LOG.info("Redis setup using: %s, with options: %s", url, redis_options_)
91
91
  _master = redis.client.Redis.from_url(url, decode_responses=True, **redis_options)
92
92
  _slave = _master
93
93
  else:
94
- LOG.info(
94
+ _LOG.info(
95
95
  "No Redis configuration found, use %s or %s to configure it", REDIS_URL_KEY, REDIS_SENTINELS_KEY
96
96
  )
97
97
 
@@ -114,22 +114,23 @@ class PubSubWorkerThread(threading.Thread):
114
114
  try:
115
115
  pubsub.get_message(ignore_subscribe_messages=True, timeout=1)
116
116
  if not last_was_ok:
117
- LOG.info("Redis is back")
117
+ _LOG.info("Redis is back")
118
118
  last_was_ok = True
119
119
  except redis.exceptions.RedisError:
120
120
  if last_was_ok:
121
- LOG.warning("Redis connection problem")
121
+ _LOG.warning("Redis connection problem")
122
122
  last_was_ok = False
123
123
  time.sleep(0.5)
124
124
  except Exception: # pylint: disable=broad-except
125
- LOG.warning("Unexpected error", exc_info=True)
126
- LOG.info("Redis subscription worker stopped")
125
+ _LOG.warning("Unexpected error", exc_info=True)
126
+ _LOG.info("Redis subscription worker stopped")
127
127
  pubsub.close()
128
128
  self._running = False
129
129
 
130
130
  def stop(self) -> None:
131
- # stopping simply unsubscribes from all channels and patterns.
132
- # the unsubscribe responses that are generated will short circuit
131
+ """Stop the worker."""
132
+ # Stopping simply unsubscribes from all channels and patterns.
133
+ # The unsubscribe responses that are generated will short circuit
133
134
  # the loop in run(), calling pubsub.close() to clean up the connection
134
135
  self.pubsub.unsubscribe()
135
136
  self.pubsub.punsubscribe()
@@ -20,10 +20,10 @@ from pyramid.threadlocal import get_current_request
20
20
 
21
21
  from c2cwsgiutils import config_utils, prometheus
22
22
 
23
- ID_HEADERS: list[str] = []
23
+ _ID_HEADERS: list[str] = []
24
24
  _HTTPAdapter_send = requests.adapters.HTTPAdapter.send
25
- LOG = logging.getLogger(__name__)
26
- DEFAULT_TIMEOUT: Optional[float] = None
25
+ _LOG = logging.getLogger(__name__)
26
+ _DEFAULT_TIMEOUT: Optional[float] = None
27
27
  _PROMETHEUS_REQUESTS_SUMMARY = prometheus_client.Summary(
28
28
  prometheus.build_metric_name("requests"),
29
29
  "Requests requests",
@@ -33,7 +33,7 @@ _PROMETHEUS_REQUESTS_SUMMARY = prometheus_client.Summary(
33
33
 
34
34
 
35
35
  def _gen_request_id(request: pyramid.request.Request) -> str:
36
- for id_header in ID_HEADERS:
36
+ for id_header in _ID_HEADERS:
37
37
  if id_header in request.headers:
38
38
  return request.headers[id_header] # type: ignore
39
39
  return str(uuid.uuid4())
@@ -50,15 +50,15 @@ def _patch_requests() -> None:
50
50
  proxies: Optional[Mapping[str, str]] = None,
51
51
  ) -> requests.Response:
52
52
  pyramid_request = get_current_request()
53
- header = ID_HEADERS[0]
53
+ header = _ID_HEADERS[0]
54
54
  if pyramid_request is not None and header not in request.headers:
55
55
  request.headers[header] = pyramid_request.c2c_request_id
56
56
 
57
57
  if timeout is None:
58
- if DEFAULT_TIMEOUT is not None:
59
- timeout = DEFAULT_TIMEOUT
58
+ if _DEFAULT_TIMEOUT is not None:
59
+ timeout = _DEFAULT_TIMEOUT
60
60
  else:
61
- LOG.warning("Doing a %s request without timeout to %s", request.method, request.url)
61
+ _LOG.warning("Doing a %s request without timeout to %s", request.method, request.url)
62
62
 
63
63
  assert request.url
64
64
  parsed = urllib.parse.urlparse(request.url)
@@ -94,20 +94,20 @@ def includeme(config: Optional[pyramid.config.Configurator] = None) -> None:
94
94
  Use a X-Request-ID (or other) header to track all the logs related to a request
95
95
  including on the sub services.
96
96
  """
97
- global ID_HEADERS, DEFAULT_TIMEOUT
98
- ID_HEADERS = ["X-Request-ID", "X-Correlation-ID", "Request-ID", "X-Varnish", "X-Amzn-Trace-Id"]
97
+ global _ID_HEADERS, _DEFAULT_TIMEOUT # pylint: disable=global-statement
98
+ _ID_HEADERS = ["X-Request-ID", "X-Correlation-ID", "Request-ID", "X-Varnish", "X-Amzn-Trace-Id"]
99
99
  if config is not None:
100
100
  extra_header = config_utils.env_or_config(config, "C2C_REQUEST_ID_HEADER", "c2c.request_id_header")
101
101
  if extra_header:
102
- ID_HEADERS.insert(0, extra_header)
102
+ _ID_HEADERS.insert(0, extra_header)
103
103
  config.add_request_method(_gen_request_id, "c2c_request_id", reify=True)
104
104
 
105
- DEFAULT_TIMEOUT = config_utils.env_or_config(
105
+ _DEFAULT_TIMEOUT = config_utils.env_or_config(
106
106
  config, "C2C_REQUESTS_DEFAULT_TIMEOUT", "c2c.requests_default_timeout", type_=float
107
107
  )
108
108
  _patch_requests()
109
109
 
110
110
  if config_utils.env_or_config(config, "C2C_SQL_REQUEST_ID", "c2c.sql_request_id", False):
111
- from . import _sql
111
+ from . import _sql # pylint: disable=import-outside-toplevel
112
112
 
113
113
  _sql.init()
@@ -15,5 +15,4 @@ def _add_session_id(session: Session, _transaction: Any) -> None:
15
15
 
16
16
  def init() -> None:
17
17
  """Initialize the SQL alchemy session selector."""
18
-
19
18
  sqlalchemy.event.listen(Session, "after_transaction_create", _add_session_id)
@@ -8,9 +8,9 @@ import sys
8
8
  import warnings
9
9
  from typing import Optional, cast
10
10
 
11
- SRC_VERSION_RE = re.compile(r"^.*\(([^=]*)===?([^=]*)\)$")
12
- VERSION_RE = re.compile(r"^([^=]*)==([^=]*)$")
13
- LOG = logging.getLogger(__name__)
11
+ _SRC_VERSION_RE = re.compile(r"^.*\(([^=]*)===?([^=]*)\)$")
12
+ _VERSION_RE = re.compile(r"^([^=]*)==([^=]*)$")
13
+ _LOG = logging.getLogger(__name__)
14
14
 
15
15
 
16
16
  def _get_package_version(comp: str) -> tuple[Optional[str], Optional[str]]:
@@ -19,8 +19,8 @@ def _get_package_version(comp: str) -> tuple[Optional[str], Optional[str]]:
19
19
 
20
20
  See test_genversion.py for examples.
21
21
  """
22
- src_matcher = SRC_VERSION_RE.match(comp)
23
- matcher = src_matcher or VERSION_RE.match(comp)
22
+ src_matcher = _SRC_VERSION_RE.match(comp)
23
+ matcher = src_matcher or _VERSION_RE.match(comp)
24
24
  if matcher:
25
25
  return cast(tuple[str, str], matcher.groups())
26
26
  else:
@@ -6,7 +6,7 @@ import logging
6
6
  import os
7
7
  import sys
8
8
  import time
9
- from typing import TYPE_CHECKING, Optional
9
+ from typing import Optional
10
10
  from wsgiref.simple_server import make_server
11
11
 
12
12
  import sqlalchemy
@@ -20,12 +20,9 @@ from zope.sqlalchemy import register
20
20
  import c2cwsgiutils.setup_process
21
21
  from c2cwsgiutils import prometheus
22
22
 
23
- if TYPE_CHECKING:
24
- scoped_session = sqlalchemy.orm.scoped_session[sqlalchemy.orm.Session]
25
- else:
26
- scoped_session = sqlalchemy.orm.scoped_session
23
+ scoped_session = sqlalchemy.orm.scoped_session[sqlalchemy.orm.Session]
27
24
 
28
- LOG = logging.getLogger(__name__)
25
+ _LOG = logging.getLogger(__name__)
29
26
 
30
27
 
31
28
  def _parse_args() -> argparse.Namespace:
@@ -72,6 +69,7 @@ class Reporter:
72
69
  self.gauges: dict[str, Gauge] = {}
73
70
 
74
71
  def get_gauge(self, kind: str, kind_help: str, labels: list[str]) -> Gauge:
72
+ """Get a gauge."""
75
73
  if kind not in self.gauges:
76
74
  self.gauges[kind] = Gauge(
77
75
  prometheus.build_metric_name(f"database_{kind}"),
@@ -84,25 +82,30 @@ class Reporter:
84
82
  def do_report(
85
83
  self, metric: list[str], value: int, kind: str, kind_help: str, tags: dict[str, str]
86
84
  ) -> None:
87
- LOG.debug("%s.%s -> %d", kind, ".".join(metric), value)
85
+ """Report a metric."""
86
+ _LOG.debug("%s.%s -> %d", kind, ".".join(metric), value)
88
87
  gauge = self.get_gauge(kind, kind_help, list(tags.keys()))
89
88
  gauge.labels(**tags).set(value)
90
89
 
91
90
  def commit(self) -> None:
91
+ """Commit the metrics."""
92
92
  if self.prometheus_push:
93
93
  push_to_gateway(self.args.prometheus_url, job="db_counts", registry=self.registry)
94
94
  else:
95
95
  port = int(os.environ.get("C2C_PROMETHEUS_PORT", "9090"))
96
96
  app = make_wsgi_app(self.registry)
97
97
  with make_server("", port, app) as httpd:
98
- LOG.info("Waiting that Prometheus get the metrics served on port %s...", port)
98
+ _LOG.info("Waiting that Prometheus get the metrics served on port %s...", port)
99
99
  httpd.handle_request()
100
100
 
101
101
  def error(self, metric: list[str], error_: Exception) -> None:
102
+ """Report an error."""
103
+ del metric
102
104
  if self._error is None:
103
105
  self._error = error_
104
106
 
105
107
  def report_error(self) -> None:
108
+ """Raise the error if any."""
106
109
  if self._error is not None:
107
110
  raise self._error
108
111
 
@@ -225,7 +228,6 @@ def _do_table_count(
225
228
 
226
229
  def do_extra(session: scoped_session, sql: str, kind: str, gauge_help: str, reporter: Reporter) -> None:
227
230
  """Do an extra report."""
228
-
229
231
  for metric, count in session.execute(sqlalchemy.text(sql)):
230
232
  reporter.do_report(
231
233
  str(metric).split("."), count, kind=kind, kind_help=gauge_help, tags={"metric": metric}
@@ -253,29 +255,29 @@ def _do_dtats_db(args: argparse.Namespace) -> None:
253
255
  params={"schemas": tuple(args.schema)},
254
256
  ).fetchall()
255
257
  for schema, table in tables:
256
- LOG.info("Process table %s.%s.", schema, table)
258
+ _LOG.info("Process table %s.%s.", schema, table)
257
259
  try:
258
260
  do_table(session, schema, table, reporter)
259
261
  except Exception as e: # pylint: disable=broad-except
260
- LOG.exception("Process table %s.%s error.", schema, table)
262
+ _LOG.exception("Process table %s.%s error.", schema, table)
261
263
  reporter.error([schema, table], e)
262
264
 
263
265
  if args.extra:
264
266
  for pos, extra in enumerate(args.extra):
265
- LOG.info("Process extra %s.", extra)
267
+ _LOG.info("Process extra %s.", extra)
266
268
  try:
267
269
  do_extra(session, extra, "extra", "Extra metric", reporter)
268
270
  except Exception as e: # pylint: disable=broad-except
269
- LOG.exception("Process extra %s error.", extra)
271
+ _LOG.exception("Process extra %s error.", extra)
270
272
  reporter.error(["extra", str(pos + 1)], e)
271
273
  if args.extra_gauge:
272
274
  for pos, extra in enumerate(args.extra_gauge):
273
275
  sql, gauge, gauge_help = extra
274
- LOG.info("Process extra %s.", extra)
276
+ _LOG.info("Process extra %s.", extra)
275
277
  try:
276
278
  do_extra(session, sql, gauge, gauge_help, reporter)
277
279
  except Exception as e: # pylint: disable=broad-except
278
- LOG.exception("Process extra %s error.", extra)
280
+ _LOG.exception("Process extra %s error.", extra)
279
281
  reporter.error(["extra", str(len(args.extra) + pos + 1)], e)
280
282
 
281
283
  reporter.commit()
@@ -294,11 +296,11 @@ def main() -> None:
294
296
  success = True
295
297
  break
296
298
  except: # pylint: disable=bare-except
297
- LOG.exception("Exception during run")
299
+ _LOG.exception("Exception during run")
298
300
  time.sleep(float(os.environ.get("C2CWSGIUTILS_STATS_DB_SLEEP", 1)))
299
301
 
300
302
  if not success:
301
- LOG.error("Not in success, exiting")
303
+ _LOG.error("Not in success, exiting")
302
304
  sys.exit(1)
303
305
 
304
306
 
@@ -8,7 +8,7 @@ import warnings
8
8
  import c2cwsgiutils.setup_process
9
9
  from c2cwsgiutils.acceptance.print import PrintConnection
10
10
 
11
- LOG = logging.getLogger(__name__)
11
+ _LOG = logging.getLogger(__name__)
12
12
 
13
13
 
14
14
  def _parse_args() -> argparse.Namespace:
@@ -38,7 +38,7 @@ def main() -> None:
38
38
  if args.app is None:
39
39
  for app in print_.get_apps():
40
40
  if app != "default":
41
- LOG.info("\n\n%s=================", app)
41
+ _LOG.info("\n\n%s=================", app)
42
42
  test_app(print_, app)
43
43
  else:
44
44
  test_app(print_, args.app)
@@ -47,13 +47,13 @@ def main() -> None:
47
47
  def test_app(print_: PrintConnection, app: str) -> None:
48
48
  """Test the application."""
49
49
  capabilities = print_.get_capabilities(app)
50
- LOG.debug("Capabilities:\n%s", pprint.pformat(capabilities))
50
+ _LOG.debug("Capabilities:\n%s", pprint.pformat(capabilities))
51
51
  examples = print_.get_example_requests(app)
52
52
  for name, request in examples.items():
53
- LOG.info("\n%s-----------------", name)
53
+ _LOG.info("\n%s-----------------", name)
54
54
  pdf = print_.get_pdf(app, request)
55
55
  size = len(pdf.content)
56
- LOG.info("Size=%d", size)
56
+ _LOG.info("Size=%d", size)
57
57
 
58
58
 
59
59
  if __name__ == "__main__":
c2cwsgiutils/sentry.py CHANGED
@@ -6,7 +6,8 @@ from collections.abc import Generator, MutableMapping
6
6
  from typing import Any, Callable, Optional
7
7
 
8
8
  import pyramid.config
9
- import sentry_sdk
9
+ import sentry_sdk.integrations
10
+ from sentry_sdk.integrations.asyncio import AsyncioIntegration
10
11
  from sentry_sdk.integrations.logging import LoggingIntegration, ignore_logger
11
12
  from sentry_sdk.integrations.pyramid import PyramidIntegration
12
13
  from sentry_sdk.integrations.redis import RedisIntegration
@@ -15,14 +16,15 @@ from sentry_sdk.integrations.wsgi import SentryWsgiMiddleware
15
16
 
16
17
  from c2cwsgiutils import config_utils
17
18
 
18
- LOG = logging.getLogger(__name__)
19
- _client_setup = False
19
+ _LOG = logging.getLogger(__name__)
20
+ _CLIENT_SETUP = False
20
21
 
21
22
 
22
23
  def _create_before_send_filter(tags: MutableMapping[str, str]) -> Callable[[Any, Any], Any]:
23
24
  """Create a filter that adds tags to every events."""
24
25
 
25
26
  def do_filter(event: Any, hint: Any) -> Any:
27
+ del hint
26
28
  event.setdefault("tags", {}).update(tags)
27
29
  return event
28
30
 
@@ -31,17 +33,15 @@ def _create_before_send_filter(tags: MutableMapping[str, str]) -> Callable[[Any,
31
33
 
32
34
  def init(config: Optional[pyramid.config.Configurator] = None) -> None:
33
35
  """Initialize the Sentry integration, for backward compatibility."""
34
-
35
36
  warnings.warn("init function is deprecated; use includeme instead")
36
37
  includeme(config)
37
38
 
38
39
 
39
40
  def includeme(config: Optional[pyramid.config.Configurator] = None) -> None:
40
41
  """Initialize the Sentry integration."""
41
-
42
- global _client_setup
42
+ global _CLIENT_SETUP # pylint: disable=global-statement
43
43
  sentry_url = config_utils.env_or_config(config, "SENTRY_URL", "c2c.sentry.url")
44
- if sentry_url is not None and not _client_setup:
44
+ if sentry_url is not None and not _CLIENT_SETUP:
45
45
  client_info: MutableMapping[str, Any] = {
46
46
  key[14:].lower(): value for key, value in os.environ.items() if key.startswith("SENTRY_CLIENT_")
47
47
  }
@@ -55,6 +55,7 @@ def includeme(config: Optional[pyramid.config.Configurator] = None) -> None:
55
55
  "propagate_traces",
56
56
  "auto_enabling_integrations",
57
57
  "auto_session_tracking",
58
+ "enable_tracing",
58
59
  ):
59
60
  if key in client_info:
60
61
  client_info[key] = client_info[key].lower() in ("1", "t", "true")
@@ -73,25 +74,58 @@ def includeme(config: Optional[pyramid.config.Configurator] = None) -> None:
73
74
  client_info["ignore_errors"] = client_info.pop("ignore_exceptions", "SystemExit").split(",")
74
75
  tags = {key[11:].lower(): value for key, value in os.environ.items() if key.startswith("SENTRY_TAG_")}
75
76
 
76
- sentry_logging = LoggingIntegration(
77
- level=logging.DEBUG,
78
- event_level=config_utils.env_or_config(
79
- config, "SENTRY_LEVEL", "c2c.sentry_level", "ERROR"
80
- ).upper(),
81
- )
82
77
  traces_sample_rate = float(
83
78
  config_utils.env_or_config(
84
79
  config, "SENTRY_TRACES_SAMPLE_RATE", "c2c.sentry_traces_sample_rate", "0.0"
85
80
  )
86
81
  )
82
+ integrations: list[sentry_sdk.integrations.Integration] = []
83
+ if config_utils.config_bool(
84
+ config_utils.env_or_config(
85
+ config, "SENTRY_INTEGRATION_LOGGING", "c2c.sentry_integration_logging", "true"
86
+ )
87
+ ):
88
+ integrations.append(
89
+ LoggingIntegration(
90
+ level=logging.DEBUG,
91
+ event_level=config_utils.env_or_config(
92
+ config, "SENTRY_LEVEL", "c2c.sentry_level", "ERROR"
93
+ ).upper(),
94
+ )
95
+ )
96
+ if config_utils.config_bool(
97
+ config_utils.env_or_config(
98
+ config, "SENTRY_INTEGRATION_PYRAMID", "c2c.sentry_integration_pyramid", "true"
99
+ )
100
+ ):
101
+ integrations.append(PyramidIntegration())
102
+ if config_utils.config_bool(
103
+ config_utils.env_or_config(
104
+ config, "SENTRY_INTEGRATION_SQLALCHEMY", "c2c.sentry_integration_sqlalchemy", "true"
105
+ )
106
+ ):
107
+ integrations.append(SqlalchemyIntegration())
108
+ if config_utils.config_bool(
109
+ config_utils.env_or_config(
110
+ config, "SENTRY_INTEGRATION_REDIS", "c2c.sentry_integration_redis", "true"
111
+ )
112
+ ):
113
+ integrations.append(RedisIntegration())
114
+ if config_utils.config_bool(
115
+ config_utils.env_or_config(
116
+ config, "SENTRY_INTEGRATION_ASYNCIO", "c2c.sentry_integration_asyncio", "true"
117
+ )
118
+ ):
119
+ integrations.append(AsyncioIntegration())
120
+
87
121
  sentry_sdk.init(
88
122
  dsn=sentry_url,
89
- integrations=[sentry_logging, PyramidIntegration(), SqlalchemyIntegration(), RedisIntegration()],
123
+ integrations=integrations,
90
124
  traces_sample_rate=traces_sample_rate,
91
125
  before_send=_create_before_send_filter(tags),
92
126
  **client_info,
93
127
  )
94
- _client_setup = True
128
+ _CLIENT_SETUP = True
95
129
 
96
130
  excludes = config_utils.env_or_config(
97
131
  config, "SENTRY_EXCLUDES", "c2c.sentry.excludes", "sentry_sdk"
@@ -99,7 +133,7 @@ def includeme(config: Optional[pyramid.config.Configurator] = None) -> None:
99
133
  for exclude in excludes:
100
134
  ignore_logger(exclude)
101
135
 
102
- LOG.info("Configured sentry reporting with client=%s and tags=%s", repr(client_info), repr(tags))
136
+ _LOG.info("Configured sentry reporting with client=%s and tags=%s", repr(client_info), repr(tags))
103
137
 
104
138
 
105
139
  @contextlib.contextmanager
@@ -110,7 +144,7 @@ def capture_exceptions() -> Generator[None, None, None]:
110
144
  You don't need to use that for exception terminating the process (those not caught). Sentry does that
111
145
  already.
112
146
  """
113
- if _client_setup:
147
+ if _CLIENT_SETUP:
114
148
  try:
115
149
  yield
116
150
  except Exception:
@@ -122,12 +156,12 @@ def capture_exceptions() -> Generator[None, None, None]:
122
156
 
123
157
  def filter_wsgi_app(application: Callable[..., Any]) -> Callable[..., Any]:
124
158
  """If sentry is configured, add a Sentry filter around the application."""
125
- if _client_setup:
159
+ if _CLIENT_SETUP:
126
160
  try:
127
- LOG.info("Enable WSGI filter for Sentry")
161
+ _LOG.info("Enable WSGI filter for Sentry")
128
162
  return SentryWsgiMiddleware(application)
129
163
  except Exception: # pylint: disable=broad-except
130
- LOG.error("Failed enabling sentry. Continuing without it.", exc_info=True)
164
+ _LOG.error("Failed enabling sentry. Continuing without it.", exc_info=True)
131
165
  return application
132
166
  else:
133
167
  return application
@@ -135,4 +169,5 @@ def filter_wsgi_app(application: Callable[..., Any]) -> Callable[..., Any]:
135
169
 
136
170
  def filter_factory(*args: Any, **kwargs: Any) -> Callable[..., Any]:
137
171
  """Get the filter."""
172
+ del args, kwargs
138
173
  return filter_wsgi_app
c2cwsgiutils/services.py CHANGED
@@ -5,7 +5,7 @@ from cornice import Service
5
5
  from pyramid.request import Request
6
6
  from pyramid.response import Response
7
7
 
8
- LOG = logging.getLogger(__name__)
8
+ _LOG = logging.getLogger(__name__)
9
9
 
10
10
 
11
11
  def create(name: str, path: str, *args: Any, **kwargs: Any) -> Service:
@@ -31,6 +31,6 @@ def _cache_cors(response: Response, request: Request) -> Response:
31
31
  except Exception:
32
32
  # cornice catches exceptions from filters, and tries call back the filter with only the request.
33
33
  # This leads to a useless message in case of error...
34
- LOG.error("Failed fixing cache headers for CORS", exc_info=True)
34
+ _LOG.error("Failed fixing cache headers for CORS", exc_info=True)
35
35
  raise
36
36
  return response
@@ -24,7 +24,6 @@ def fill_arguments(
24
24
  default_config_uri: str = "c2c:///app/production.ini",
25
25
  ) -> None:
26
26
  """Add the needed arguments to the parser like it's done in pshell."""
27
-
28
27
  parser.add_argument(
29
28
  "--config-uri" if use_attribute else "config_uri",
30
29
  nargs="?",
@@ -11,10 +11,9 @@ import pyramid.request
11
11
 
12
12
  from c2cwsgiutils import auth
13
13
 
14
- ENV_KEY = "C2C_SQL_PROFILER_ENABLED"
15
- CONFIG_KEY = "c2c.sql_profiler_enabled"
16
- LOG = logging.getLogger(__name__)
17
- repository = None
14
+ _ENV_KEY = "C2C_SQL_PROFILER_ENABLED"
15
+ _CONFIG_KEY = "c2c.sql_profiler_enabled"
16
+ _LOG = logging.getLogger(__name__)
18
17
 
19
18
 
20
19
  def init(config: pyramid.config.Configurator) -> None:
@@ -25,7 +24,7 @@ def init(config: pyramid.config.Configurator) -> None:
25
24
 
26
25
  def includeme(config: pyramid.config.Configurator) -> None:
27
26
  """Install a pyramid event handler that adds the request information."""
28
- if auth.is_enabled(config, ENV_KEY, CONFIG_KEY):
29
- from . import _impl
27
+ if auth.is_enabled(config, _ENV_KEY, _CONFIG_KEY):
28
+ from . import _impl # pylint: disable=import-outside-toplevel
30
29
 
31
30
  _impl.init(config)
@@ -16,8 +16,8 @@ import sqlalchemy.event
16
16
 
17
17
  from c2cwsgiutils import auth, broadcast, config_utils
18
18
 
19
- LOG = logging.getLogger(__name__)
20
- repository = None
19
+ _LOG = logging.getLogger(__name__)
20
+ _REPOSITORY = None
21
21
 
22
22
 
23
23
  class _Repository:
@@ -35,7 +35,8 @@ class _Repository:
35
35
  _context: Any,
36
36
  _executemany: Any,
37
37
  ) -> None:
38
- if statement.startswith("SELECT ") and LOG.isEnabledFor(logging.INFO):
38
+ """Profile the SQL statement."""
39
+ if statement.startswith("SELECT ") and _LOG.isEnabledFor(logging.INFO):
39
40
  do_it = False
40
41
  with self._lock:
41
42
  if statement not in self._repo:
@@ -43,8 +44,8 @@ class _Repository:
43
44
  self._repo.add(statement)
44
45
  if do_it:
45
46
  try:
46
- LOG.info("statement:\n%s", _indent(_beautify_sql(statement)))
47
- LOG.info("parameters: %s", repr(parameters))
47
+ _LOG.info("statement:\n%s", _indent(_beautify_sql(statement)))
48
+ _LOG.info("parameters: %s", repr(parameters))
48
49
  with conn.engine.begin() as c:
49
50
  output = "\n ".join(
50
51
  [
@@ -54,7 +55,7 @@ class _Repository:
54
55
  )
55
56
  ]
56
57
  )
57
- LOG.info(output)
58
+ _LOG.info(output)
58
59
  except Exception: # nosec # pylint: disable=broad-except
59
60
  pass
60
61
 
@@ -64,21 +65,21 @@ def _sql_profiler_view(request: pyramid.request.Request) -> Mapping[str, Any]:
64
65
  enable = request.params.get("enable")
65
66
  if enable is not None:
66
67
  broadcast.broadcast("c2c_sql_profiler", params={"enable": enable}, expect_answers=True)
67
- return {"status": 200, "enabled": repository is not None}
68
+ return {"status": 200, "enabled": _REPOSITORY is not None}
68
69
 
69
70
 
70
71
  def _setup_profiler(enable: str) -> None:
71
- global repository
72
+ global _REPOSITORY # pylint: disable=global-statement
72
73
  if config_utils.config_bool(enable):
73
- if repository is None:
74
- LOG.info("Enabling the SQL profiler")
75
- repository = _Repository()
76
- sqlalchemy.event.listen(sqlalchemy.engine.Engine, "before_cursor_execute", repository.profile)
74
+ if _REPOSITORY is None:
75
+ _LOG.info("Enabling the SQL profiler")
76
+ _REPOSITORY = _Repository()
77
+ sqlalchemy.event.listen(sqlalchemy.engine.Engine, "before_cursor_execute", _REPOSITORY.profile)
77
78
  else:
78
- if repository is not None:
79
- LOG.info("Disabling the SQL profiler")
80
- sqlalchemy.event.remove(sqlalchemy.engine.Engine, "before_cursor_execute", repository.profile)
81
- repository = None
79
+ if _REPOSITORY is not None:
80
+ _LOG.info("Disabling the SQL profiler")
81
+ sqlalchemy.event.remove(sqlalchemy.engine.Engine, "before_cursor_execute", _REPOSITORY.profile)
82
+ _REPOSITORY = None
82
83
 
83
84
 
84
85
  def _beautify_sql(statement: str) -> str:
@@ -102,4 +103,4 @@ def init(config: pyramid.config.Configurator) -> None:
102
103
  "c2c_sql_profiler", config_utils.get_base_path(config) + r"/sql_profiler", request_method="GET"
103
104
  )
104
105
  config.add_view(_sql_profiler_view, route_name="c2c_sql_profiler", renderer="fast_json", http_cache=0)
105
- LOG.info("Enabled the /sql_profiler API")
106
+ _LOG.info("Enabled the /sql_profiler API")