kinto 20.0.0__py3-none-any.whl → 20.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kinto might be problematic. Click here for more details.
- kinto/config/kinto.tpl +5 -2
- kinto/core/__init__.py +14 -0
- kinto/core/initialization.py +5 -2
- kinto/core/storage/__init__.py +15 -0
- kinto/core/storage/memory.py +20 -3
- kinto/core/storage/postgresql/__init__.py +31 -1
- kinto/core/storage/postgresql/migrations/migration_022_023.sql +5 -0
- kinto/core/storage/postgresql/schema.sql +3 -2
- kinto/core/storage/testing.py +41 -1
- kinto/plugins/admin/VERSION +1 -1
- kinto/plugins/admin/build/VERSION +1 -1
- kinto/plugins/admin/build/assets/{index-Bq62Gei8.js → index-CylsivYB.js} +66 -66
- kinto/plugins/admin/build/index.html +1 -1
- kinto/plugins/history/__init__.py +10 -0
- kinto/plugins/history/listener.py +68 -5
- {kinto-20.0.0.dist-info → kinto-20.4.0.dist-info}/METADATA +3 -2
- {kinto-20.0.0.dist-info → kinto-20.4.0.dist-info}/RECORD +21 -20
- {kinto-20.0.0.dist-info → kinto-20.4.0.dist-info}/WHEEL +1 -1
- {kinto-20.0.0.dist-info → kinto-20.4.0.dist-info}/entry_points.txt +0 -0
- {kinto-20.0.0.dist-info → kinto-20.4.0.dist-info/licenses}/LICENSE +0 -0
- {kinto-20.0.0.dist-info → kinto-20.4.0.dist-info}/top_level.txt +0 -0
kinto/config/kinto.tpl
CHANGED
|
@@ -245,7 +245,7 @@ keys = root, kinto
|
|
|
245
245
|
keys = console
|
|
246
246
|
|
|
247
247
|
[formatters]
|
|
248
|
-
keys = color
|
|
248
|
+
keys = color, json
|
|
249
249
|
|
|
250
250
|
[logger_root]
|
|
251
251
|
level = INFO
|
|
@@ -258,10 +258,13 @@ qualname = kinto
|
|
|
258
258
|
propagate = 0
|
|
259
259
|
|
|
260
260
|
[handler_console]
|
|
261
|
-
class =
|
|
261
|
+
class = kinto.core.StreamHandlerWithRequestID
|
|
262
262
|
args = (sys.stderr,)
|
|
263
263
|
level = NOTSET
|
|
264
264
|
formatter = color
|
|
265
265
|
|
|
266
266
|
[formatter_color]
|
|
267
267
|
class = logging_color_formatter.ColorFormatter
|
|
268
|
+
|
|
269
|
+
[formatter_json]
|
|
270
|
+
class = kinto.core.JsonLogFormatter
|
kinto/core/__init__.py
CHANGED
|
@@ -151,6 +151,20 @@ class JsonLogFormatter(dockerflow_logging.JsonLogFormatter):
|
|
|
151
151
|
self.logger_name = logger_name
|
|
152
152
|
|
|
153
153
|
|
|
154
|
+
class StreamHandlerWithRequestID(logging.StreamHandler):
|
|
155
|
+
"""
|
|
156
|
+
A custom StreamHandler that adds the Dockerflow's `RequestIdLogFilter`.
|
|
157
|
+
|
|
158
|
+
Defining a custom handler seems to be the only way to bypass the fact that
|
|
159
|
+
``logging.config.fileConfig()`` does not load filters from ``.ini`` files.
|
|
160
|
+
"""
|
|
161
|
+
|
|
162
|
+
def __init__(self, *args, **kwargs):
|
|
163
|
+
super().__init__(*args, **kwargs)
|
|
164
|
+
filter_ = dockerflow_logging.RequestIdLogFilter()
|
|
165
|
+
self.addFilter(filter_)
|
|
166
|
+
|
|
167
|
+
|
|
154
168
|
def get_user_info(request):
|
|
155
169
|
# Default user info (shown in hello view for example).
|
|
156
170
|
user_info = {"id": request.prefixed_userid, "principals": request.prefixed_principals}
|
kinto/core/initialization.py
CHANGED
|
@@ -4,9 +4,9 @@ import re
|
|
|
4
4
|
import urllib.parse
|
|
5
5
|
import warnings
|
|
6
6
|
from datetime import datetime
|
|
7
|
-
from secrets import token_hex
|
|
8
7
|
|
|
9
8
|
from dateutil import parser as dateparser
|
|
9
|
+
from dockerflow.logging import get_or_generate_request_id, request_id_context
|
|
10
10
|
from pyramid.events import ApplicationCreated, NewRequest, NewResponse
|
|
11
11
|
from pyramid.exceptions import ConfigurationError
|
|
12
12
|
from pyramid.httpexceptions import (
|
|
@@ -374,12 +374,15 @@ def setup_logging(config):
|
|
|
374
374
|
message="Invalid URL path.",
|
|
375
375
|
)
|
|
376
376
|
|
|
377
|
+
rid = get_or_generate_request_id(headers=request.headers)
|
|
378
|
+
request_id_context.set(rid)
|
|
379
|
+
|
|
377
380
|
request.log_context(
|
|
378
381
|
agent=request.headers.get("User-Agent"),
|
|
379
382
|
path=request_path,
|
|
380
383
|
method=request.method,
|
|
381
384
|
lang=request.headers.get("Accept-Language"),
|
|
382
|
-
rid=
|
|
385
|
+
rid=rid,
|
|
383
386
|
errno=0,
|
|
384
387
|
)
|
|
385
388
|
qs = dict(errors.request_GET(request))
|
kinto/core/storage/__init__.py
CHANGED
|
@@ -87,6 +87,21 @@ class StorageBase:
|
|
|
87
87
|
"""
|
|
88
88
|
raise NotImplementedError
|
|
89
89
|
|
|
90
|
+
def all_resources_timestamps(self, resource_name):
|
|
91
|
+
"""Get the highest timestamp of every objects in this `resource_name` for
|
|
92
|
+
each `parent_id`.
|
|
93
|
+
|
|
94
|
+
.. note::
|
|
95
|
+
|
|
96
|
+
This should take deleted objects into account.
|
|
97
|
+
|
|
98
|
+
:param str resource_name: the resource name.
|
|
99
|
+
|
|
100
|
+
:returns: the latest timestamp of the resource by `parent_id`.
|
|
101
|
+
:rtype: dict[str, int]
|
|
102
|
+
"""
|
|
103
|
+
raise NotImplementedError
|
|
104
|
+
|
|
90
105
|
def create(
|
|
91
106
|
self,
|
|
92
107
|
resource_name,
|
kinto/core/storage/memory.py
CHANGED
|
@@ -153,6 +153,10 @@ class Storage(MemoryBasedStorage):
|
|
|
153
153
|
raise exceptions.ReadonlyError(message=error_msg)
|
|
154
154
|
return self.bump_and_store_timestamp(resource_name, parent_id)
|
|
155
155
|
|
|
156
|
+
@synchronized
|
|
157
|
+
def all_resources_timestamps(self, resource_name):
|
|
158
|
+
return {k: v[resource_name] for k, v in self._timestamps.items() if resource_name in v}
|
|
159
|
+
|
|
156
160
|
def bump_and_store_timestamp(
|
|
157
161
|
self, resource_name, parent_id, obj=None, modified_field=None, last_modified=None
|
|
158
162
|
):
|
|
@@ -284,13 +288,26 @@ class Storage(MemoryBasedStorage):
|
|
|
284
288
|
modified_field=DEFAULT_MODIFIED_FIELD,
|
|
285
289
|
):
|
|
286
290
|
parent_id_match = re.compile(parent_id.replace("*", ".*"))
|
|
287
|
-
|
|
291
|
+
|
|
292
|
+
timestamps_by_parent_id = {
|
|
288
293
|
pid: resources
|
|
289
|
-
for pid, resources in self.
|
|
294
|
+
for pid, resources in self._timestamps.items()
|
|
290
295
|
if parent_id_match.match(pid)
|
|
291
296
|
}
|
|
297
|
+
if resource_name is not None:
|
|
298
|
+
for pid, resources in timestamps_by_parent_id.items():
|
|
299
|
+
del self._timestamps[pid][resource_name]
|
|
300
|
+
else:
|
|
301
|
+
for pid, resources in timestamps_by_parent_id.items():
|
|
302
|
+
del self._timestamps[pid]
|
|
303
|
+
|
|
292
304
|
num_deleted = 0
|
|
293
|
-
|
|
305
|
+
tombstones_by_parent_id = {
|
|
306
|
+
pid: resources
|
|
307
|
+
for pid, resources in self._cemetery.items()
|
|
308
|
+
if parent_id_match.match(pid)
|
|
309
|
+
}
|
|
310
|
+
for pid, resources in tombstones_by_parent_id.items():
|
|
294
311
|
if resource_name is not None:
|
|
295
312
|
resources = {resource_name: resources[resource_name]}
|
|
296
313
|
for resource, resource_objects in resources.items():
|
|
@@ -79,7 +79,7 @@ class Storage(StorageBase, MigratorMixin):
|
|
|
79
79
|
|
|
80
80
|
# MigratorMixin attributes.
|
|
81
81
|
name = "storage"
|
|
82
|
-
schema_version =
|
|
82
|
+
schema_version = 23
|
|
83
83
|
schema_file = os.path.join(HERE, "schema.sql")
|
|
84
84
|
migrations_directory = os.path.join(HERE, "migrations")
|
|
85
85
|
|
|
@@ -247,6 +247,36 @@ class Storage(StorageBase, MigratorMixin):
|
|
|
247
247
|
|
|
248
248
|
return obj.last_epoch
|
|
249
249
|
|
|
250
|
+
def all_resources_timestamps(self, resource_name):
|
|
251
|
+
query = """
|
|
252
|
+
WITH existing_timestamps AS (
|
|
253
|
+
-- Timestamp of latest object by parent_id.
|
|
254
|
+
(
|
|
255
|
+
SELECT parent_id, MAX(last_modified) AS last_modified
|
|
256
|
+
FROM objects
|
|
257
|
+
WHERE resource_name = :resource_name
|
|
258
|
+
GROUP BY parent_id
|
|
259
|
+
)
|
|
260
|
+
-- Timestamp of resources without sub-objects.
|
|
261
|
+
UNION
|
|
262
|
+
(
|
|
263
|
+
SELECT parent_id, last_modified
|
|
264
|
+
FROM timestamps
|
|
265
|
+
WHERE resource_name = :resource_name
|
|
266
|
+
)
|
|
267
|
+
)
|
|
268
|
+
SELECT parent_id, MAX(as_epoch(last_modified)) AS last_modified
|
|
269
|
+
FROM existing_timestamps
|
|
270
|
+
GROUP BY parent_id
|
|
271
|
+
ORDER BY last_modified DESC
|
|
272
|
+
"""
|
|
273
|
+
with self.client.connect(readonly=True) as conn:
|
|
274
|
+
result = conn.execute(sa.text(query), dict(resource_name=resource_name))
|
|
275
|
+
rows = result.fetchmany(self._max_fetch_size + 1)
|
|
276
|
+
|
|
277
|
+
results = {r[0]: r[1] for r in rows}
|
|
278
|
+
return results
|
|
279
|
+
|
|
250
280
|
@deprecate_kwargs({"collection_id": "resource_name", "record": "obj"})
|
|
251
281
|
def create(
|
|
252
282
|
self,
|
|
@@ -47,7 +47,8 @@ CREATE UNIQUE INDEX IF NOT EXISTS idx_objects_parent_id_resource_name_last_modif
|
|
|
47
47
|
ON objects(parent_id, resource_name, last_modified DESC);
|
|
48
48
|
CREATE INDEX IF NOT EXISTS idx_objects_last_modified_epoch
|
|
49
49
|
ON objects(as_epoch(last_modified));
|
|
50
|
-
|
|
50
|
+
CREATE INDEX IF NOT EXISTS idx_objects_resource_name_parent_id_deleted
|
|
51
|
+
ON objects(resource_name, parent_id, deleted);
|
|
51
52
|
|
|
52
53
|
CREATE TABLE IF NOT EXISTS timestamps (
|
|
53
54
|
parent_id TEXT NOT NULL COLLATE "C",
|
|
@@ -131,4 +132,4 @@ INSERT INTO metadata (name, value) VALUES ('created_at', NOW()::TEXT);
|
|
|
131
132
|
|
|
132
133
|
-- Set storage schema version.
|
|
133
134
|
-- Should match ``kinto.core.storage.postgresql.PostgreSQL.schema_version``
|
|
134
|
-
INSERT INTO metadata (name, value) VALUES ('storage_schema_version', '
|
|
135
|
+
INSERT INTO metadata (name, value) VALUES ('storage_schema_version', '23');
|
kinto/core/storage/testing.py
CHANGED
|
@@ -783,6 +783,39 @@ class TimestampsTest:
|
|
|
783
783
|
after = self.storage.resource_timestamp(**self.storage_kw)
|
|
784
784
|
self.assertTrue(before < after)
|
|
785
785
|
|
|
786
|
+
def test_all_timestamps_by_parent_id(self):
|
|
787
|
+
self.storage.create(obj={"id": "main"}, resource_name="bucket", parent_id="")
|
|
788
|
+
self.storage.create(obj={"id": "cid1"}, resource_name="collection", parent_id="/main")
|
|
789
|
+
self.storage.create(obj={"id": "cid2"}, resource_name="collection", parent_id="/main")
|
|
790
|
+
self.storage.create(obj={}, resource_name="record", parent_id="/main/cid2")
|
|
791
|
+
self.storage.create(obj={}, resource_name="record", parent_id="/main/cid2")
|
|
792
|
+
|
|
793
|
+
self.assertEqual(
|
|
794
|
+
{
|
|
795
|
+
"": self.storage.resource_timestamp(resource_name="bucket", parent_id=""),
|
|
796
|
+
},
|
|
797
|
+
self.storage.all_resources_timestamps(resource_name="bucket"),
|
|
798
|
+
)
|
|
799
|
+
self.assertEqual(
|
|
800
|
+
{
|
|
801
|
+
"/main": self.storage.resource_timestamp(
|
|
802
|
+
resource_name="collection", parent_id="/main"
|
|
803
|
+
),
|
|
804
|
+
},
|
|
805
|
+
self.storage.all_resources_timestamps(resource_name="collection"),
|
|
806
|
+
)
|
|
807
|
+
self.assertEqual(
|
|
808
|
+
{
|
|
809
|
+
"/main/cid1": self.storage.resource_timestamp(
|
|
810
|
+
resource_name="record", parent_id="/main/cid1"
|
|
811
|
+
),
|
|
812
|
+
"/main/cid2": self.storage.resource_timestamp(
|
|
813
|
+
resource_name="record", parent_id="/main/cid2"
|
|
814
|
+
),
|
|
815
|
+
},
|
|
816
|
+
self.storage.all_resources_timestamps(resource_name="record"),
|
|
817
|
+
)
|
|
818
|
+
|
|
786
819
|
@skip_if_ci
|
|
787
820
|
def test_timestamps_are_unique(self): # pragma: no cover
|
|
788
821
|
obtained = []
|
|
@@ -1263,6 +1296,9 @@ class DeletedObjectsTest:
|
|
|
1263
1296
|
self.create_object(parent_id="/abc/a", resource_name="c")
|
|
1264
1297
|
self.create_object(parent_id="/efg", resource_name="c")
|
|
1265
1298
|
|
|
1299
|
+
all_timestamps = self.storage.all_resources_timestamps(resource_name="c")
|
|
1300
|
+
self.assertEqual(set(all_timestamps.keys()), {"/abc/a", "/efg"})
|
|
1301
|
+
|
|
1266
1302
|
before1 = self.storage.resource_timestamp(parent_id="/abc/a", resource_name="c")
|
|
1267
1303
|
# Different parent_id with object.
|
|
1268
1304
|
before2 = self.storage.resource_timestamp(parent_id="/efg", resource_name="c")
|
|
@@ -1272,11 +1308,15 @@ class DeletedObjectsTest:
|
|
|
1272
1308
|
self.storage.delete_all(parent_id="/abc/*", resource_name=None, with_deleted=False)
|
|
1273
1309
|
self.storage.purge_deleted(parent_id="/abc/*", resource_name=None)
|
|
1274
1310
|
|
|
1311
|
+
all_timestamps = self.storage.all_resources_timestamps(resource_name="c")
|
|
1312
|
+
self.assertEqual(set(all_timestamps.keys()), {"/efg", "/ijk"})
|
|
1313
|
+
|
|
1314
|
+
time.sleep(0.002) # make sure we don't recreate timestamps at same msec.
|
|
1275
1315
|
after1 = self.storage.resource_timestamp(parent_id="/abc/a", resource_name="c")
|
|
1276
1316
|
after2 = self.storage.resource_timestamp(parent_id="/efg", resource_name="c")
|
|
1277
1317
|
after3 = self.storage.resource_timestamp(parent_id="/ijk", resource_name="c")
|
|
1278
1318
|
|
|
1279
|
-
self.assertNotEqual(before1, after1)
|
|
1319
|
+
self.assertNotEqual(before1, after1) # timestamp was removed, it will differ.
|
|
1280
1320
|
self.assertEqual(before2, after2)
|
|
1281
1321
|
self.assertEqual(before3, after3)
|
|
1282
1322
|
|
kinto/plugins/admin/VERSION
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
3.
|
|
1
|
+
3.7.1
|
|
@@ -1 +1 @@
|
|
|
1
|
-
3.
|
|
1
|
+
3.7.1
|