kinto 20.0.0__py3-none-any.whl → 20.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of kinto might be problematic. Click here for more details.

kinto/config/kinto.tpl CHANGED
@@ -245,7 +245,7 @@ keys = root, kinto
245
245
  keys = console
246
246
 
247
247
  [formatters]
248
- keys = color
248
+ keys = color, json
249
249
 
250
250
  [logger_root]
251
251
  level = INFO
@@ -258,10 +258,13 @@ qualname = kinto
258
258
  propagate = 0
259
259
 
260
260
  [handler_console]
261
- class = StreamHandler
261
+ class = kinto.core.StreamHandlerWithRequestID
262
262
  args = (sys.stderr,)
263
263
  level = NOTSET
264
264
  formatter = color
265
265
 
266
266
  [formatter_color]
267
267
  class = logging_color_formatter.ColorFormatter
268
+
269
+ [formatter_json]
270
+ class = kinto.core.JsonLogFormatter
kinto/core/__init__.py CHANGED
@@ -151,6 +151,20 @@ class JsonLogFormatter(dockerflow_logging.JsonLogFormatter):
151
151
  self.logger_name = logger_name
152
152
 
153
153
 
154
+ class StreamHandlerWithRequestID(logging.StreamHandler):
155
+ """
156
+ A custom StreamHandler that adds the Dockerflow's `RequestIdLogFilter`.
157
+
158
+ Defining a custom handler seems to be the only way to bypass the fact that
159
+ ``logging.config.fileConfig()`` does not load filters from ``.ini`` files.
160
+ """
161
+
162
+ def __init__(self, *args, **kwargs):
163
+ super().__init__(*args, **kwargs)
164
+ filter_ = dockerflow_logging.RequestIdLogFilter()
165
+ self.addFilter(filter_)
166
+
167
+
154
168
  def get_user_info(request):
155
169
  # Default user info (shown in hello view for example).
156
170
  user_info = {"id": request.prefixed_userid, "principals": request.prefixed_principals}
@@ -4,9 +4,9 @@ import re
4
4
  import urllib.parse
5
5
  import warnings
6
6
  from datetime import datetime
7
- from secrets import token_hex
8
7
 
9
8
  from dateutil import parser as dateparser
9
+ from dockerflow.logging import get_or_generate_request_id, request_id_context
10
10
  from pyramid.events import ApplicationCreated, NewRequest, NewResponse
11
11
  from pyramid.exceptions import ConfigurationError
12
12
  from pyramid.httpexceptions import (
@@ -374,12 +374,15 @@ def setup_logging(config):
374
374
  message="Invalid URL path.",
375
375
  )
376
376
 
377
+ rid = get_or_generate_request_id(headers=request.headers)
378
+ request_id_context.set(rid)
379
+
377
380
  request.log_context(
378
381
  agent=request.headers.get("User-Agent"),
379
382
  path=request_path,
380
383
  method=request.method,
381
384
  lang=request.headers.get("Accept-Language"),
382
- rid=request.headers.get("X-Request-Id", token_hex(16)),
385
+ rid=rid,
383
386
  errno=0,
384
387
  )
385
388
  qs = dict(errors.request_GET(request))
@@ -87,6 +87,21 @@ class StorageBase:
87
87
  """
88
88
  raise NotImplementedError
89
89
 
90
+ def all_resources_timestamps(self, resource_name):
91
+ """Get the highest timestamp of every objects in this `resource_name` for
92
+ each `parent_id`.
93
+
94
+ .. note::
95
+
96
+ This should take deleted objects into account.
97
+
98
+ :param str resource_name: the resource name.
99
+
100
+ :returns: the latest timestamp of the resource by `parent_id`.
101
+ :rtype: dict[str, int]
102
+ """
103
+ raise NotImplementedError
104
+
90
105
  def create(
91
106
  self,
92
107
  resource_name,
@@ -153,6 +153,10 @@ class Storage(MemoryBasedStorage):
153
153
  raise exceptions.ReadonlyError(message=error_msg)
154
154
  return self.bump_and_store_timestamp(resource_name, parent_id)
155
155
 
156
+ @synchronized
157
+ def all_resources_timestamps(self, resource_name):
158
+ return {k: v[resource_name] for k, v in self._timestamps.items() if resource_name in v}
159
+
156
160
  def bump_and_store_timestamp(
157
161
  self, resource_name, parent_id, obj=None, modified_field=None, last_modified=None
158
162
  ):
@@ -247,6 +247,36 @@ class Storage(StorageBase, MigratorMixin):
247
247
 
248
248
  return obj.last_epoch
249
249
 
250
+ def all_resources_timestamps(self, resource_name):
251
+ query = """
252
+ WITH existing_timestamps AS (
253
+ -- Timestamp of latest object by parent_id.
254
+ (
255
+ SELECT parent_id, MAX(last_modified) AS last_modified
256
+ FROM objects
257
+ WHERE resource_name = :resource_name
258
+ GROUP BY parent_id
259
+ )
260
+ -- Timestamp of resources without sub-objects.
261
+ UNION
262
+ (
263
+ SELECT parent_id, last_modified
264
+ FROM timestamps
265
+ WHERE resource_name = :resource_name
266
+ )
267
+ )
268
+ SELECT parent_id, MAX(as_epoch(last_modified)) AS last_modified
269
+ FROM existing_timestamps
270
+ GROUP BY parent_id
271
+ ORDER BY last_modified DESC
272
+ """
273
+ with self.client.connect(readonly=True) as conn:
274
+ result = conn.execute(sa.text(query), dict(resource_name=resource_name))
275
+ rows = result.fetchmany(self._max_fetch_size + 1)
276
+
277
+ results = {r[0]: r[1] for r in rows}
278
+ return results
279
+
250
280
  @deprecate_kwargs({"collection_id": "resource_name", "record": "obj"})
251
281
  def create(
252
282
  self,
@@ -783,6 +783,39 @@ class TimestampsTest:
783
783
  after = self.storage.resource_timestamp(**self.storage_kw)
784
784
  self.assertTrue(before < after)
785
785
 
786
+ def test_all_timestamps_by_parent_id(self):
787
+ self.storage.create(obj={"id": "main"}, resource_name="bucket", parent_id="")
788
+ self.storage.create(obj={"id": "cid1"}, resource_name="collection", parent_id="/main")
789
+ self.storage.create(obj={"id": "cid2"}, resource_name="collection", parent_id="/main")
790
+ self.storage.create(obj={}, resource_name="record", parent_id="/main/cid2")
791
+ self.storage.create(obj={}, resource_name="record", parent_id="/main/cid2")
792
+
793
+ self.assertEqual(
794
+ {
795
+ "": self.storage.resource_timestamp(resource_name="bucket", parent_id=""),
796
+ },
797
+ self.storage.all_resources_timestamps(resource_name="bucket"),
798
+ )
799
+ self.assertEqual(
800
+ {
801
+ "/main": self.storage.resource_timestamp(
802
+ resource_name="collection", parent_id="/main"
803
+ ),
804
+ },
805
+ self.storage.all_resources_timestamps(resource_name="collection"),
806
+ )
807
+ self.assertEqual(
808
+ {
809
+ "/main/cid1": self.storage.resource_timestamp(
810
+ resource_name="record", parent_id="/main/cid1"
811
+ ),
812
+ "/main/cid2": self.storage.resource_timestamp(
813
+ resource_name="record", parent_id="/main/cid2"
814
+ ),
815
+ },
816
+ self.storage.all_resources_timestamps(resource_name="record"),
817
+ )
818
+
786
819
  @skip_if_ci
787
820
  def test_timestamps_are_unique(self): # pragma: no cover
788
821
  obtained = []
@@ -1 +1 @@
1
- 3.6.0
1
+ 3.7.0
@@ -1 +1 @@
1
- 3.6.0
1
+ 3.7.0