autonomous-app 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autonomous/__init__.py +1 -1
- autonomous/ai/audioagent.py +1 -1
- autonomous/ai/imageagent.py +1 -1
- autonomous/ai/jsonagent.py +1 -1
- autonomous/ai/models/openai.py +81 -53
- autonomous/ai/oaiagent.py +1 -14
- autonomous/ai/textagent.py +1 -1
- autonomous/auth/autoauth.py +10 -10
- autonomous/auth/user.py +17 -2
- autonomous/db/__init__.py +42 -0
- autonomous/db/base/__init__.py +33 -0
- autonomous/db/base/common.py +62 -0
- autonomous/db/base/datastructures.py +476 -0
- autonomous/db/base/document.py +1230 -0
- autonomous/db/base/fields.py +767 -0
- autonomous/db/base/metaclasses.py +468 -0
- autonomous/db/base/utils.py +22 -0
- autonomous/db/common.py +79 -0
- autonomous/db/connection.py +472 -0
- autonomous/db/context_managers.py +313 -0
- autonomous/db/dereference.py +291 -0
- autonomous/db/document.py +1141 -0
- autonomous/db/errors.py +165 -0
- autonomous/db/fields.py +2732 -0
- autonomous/db/mongodb_support.py +24 -0
- autonomous/db/pymongo_support.py +80 -0
- autonomous/db/queryset/__init__.py +28 -0
- autonomous/db/queryset/base.py +2033 -0
- autonomous/db/queryset/field_list.py +88 -0
- autonomous/db/queryset/manager.py +58 -0
- autonomous/db/queryset/queryset.py +189 -0
- autonomous/db/queryset/transform.py +527 -0
- autonomous/db/queryset/visitor.py +189 -0
- autonomous/db/signals.py +59 -0
- autonomous/logger.py +3 -0
- autonomous/model/autoattr.py +56 -41
- autonomous/model/automodel.py +95 -34
- autonomous/storage/imagestorage.py +49 -8
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.2.dist-info}/METADATA +2 -2
- autonomous_app-0.3.2.dist-info/RECORD +60 -0
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.2.dist-info}/WHEEL +1 -1
- autonomous_app-0.3.0.dist-info/RECORD +0 -35
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.2.dist-info}/LICENSE +0 -0
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.2.dist-info}/top_level.txt +0 -0
autonomous/db/fields.py
ADDED
|
@@ -0,0 +1,2732 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import decimal
|
|
3
|
+
import inspect
|
|
4
|
+
import itertools
|
|
5
|
+
import re
|
|
6
|
+
import socket
|
|
7
|
+
import time
|
|
8
|
+
import uuid
|
|
9
|
+
from inspect import isclass
|
|
10
|
+
from io import BytesIO
|
|
11
|
+
from operator import itemgetter
|
|
12
|
+
|
|
13
|
+
import gridfs
|
|
14
|
+
import pymongo
|
|
15
|
+
from bson import SON, Binary, DBRef, ObjectId
|
|
16
|
+
from bson.decimal128 import Decimal128, create_decimal128_context
|
|
17
|
+
from bson.int64 import Int64
|
|
18
|
+
from pymongo import ReturnDocument
|
|
19
|
+
|
|
20
|
+
from autonomous import log
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
import dateutil
|
|
24
|
+
except ImportError:
|
|
25
|
+
dateutil = None
|
|
26
|
+
else:
|
|
27
|
+
import dateutil.parser
|
|
28
|
+
|
|
29
|
+
from autonomous.db.base import (
|
|
30
|
+
BaseDocument,
|
|
31
|
+
BaseField,
|
|
32
|
+
ComplexBaseField,
|
|
33
|
+
GeoJsonBaseField,
|
|
34
|
+
LazyReference,
|
|
35
|
+
ObjectIdField,
|
|
36
|
+
get_document,
|
|
37
|
+
)
|
|
38
|
+
from autonomous.db.base.utils import LazyRegexCompiler
|
|
39
|
+
from autonomous.db.common import _import_class
|
|
40
|
+
from autonomous.db.connection import DEFAULT_CONNECTION_NAME, get_db
|
|
41
|
+
from autonomous.db.document import Document, EmbeddedDocument
|
|
42
|
+
from autonomous.db.errors import (
|
|
43
|
+
DoesNotExist,
|
|
44
|
+
InvalidQueryError,
|
|
45
|
+
ValidationError,
|
|
46
|
+
)
|
|
47
|
+
from autonomous.db.queryset import DO_NOTHING
|
|
48
|
+
from autonomous.db.queryset.base import BaseQuerySet
|
|
49
|
+
from autonomous.db.queryset.transform import STRING_OPERATORS
|
|
50
|
+
|
|
51
|
+
try:
|
|
52
|
+
from PIL import Image, ImageOps
|
|
53
|
+
|
|
54
|
+
if hasattr(Image, "Resampling"):
|
|
55
|
+
LANCZOS = Image.Resampling.LANCZOS
|
|
56
|
+
else:
|
|
57
|
+
LANCZOS = Image.LANCZOS
|
|
58
|
+
except ImportError:
|
|
59
|
+
# pillow is optional so may not be installed
|
|
60
|
+
Image = None
|
|
61
|
+
ImageOps = None
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
__all__ = (
|
|
65
|
+
"StringField",
|
|
66
|
+
"URLField",
|
|
67
|
+
"EmailField",
|
|
68
|
+
"IntField",
|
|
69
|
+
"LongField",
|
|
70
|
+
"FloatField",
|
|
71
|
+
"DecimalField",
|
|
72
|
+
"BooleanField",
|
|
73
|
+
"DateTimeField",
|
|
74
|
+
"DateField",
|
|
75
|
+
"ComplexDateTimeField",
|
|
76
|
+
"EmbeddedDocumentField",
|
|
77
|
+
"ObjectIdField",
|
|
78
|
+
"GenericEmbeddedDocumentField",
|
|
79
|
+
"DynamicField",
|
|
80
|
+
"ListField",
|
|
81
|
+
"SortedListField",
|
|
82
|
+
"EmbeddedDocumentListField",
|
|
83
|
+
"DictField",
|
|
84
|
+
"MapField",
|
|
85
|
+
"ReferenceField",
|
|
86
|
+
"CachedReferenceField",
|
|
87
|
+
"LazyReferenceField",
|
|
88
|
+
"GenericLazyReferenceField",
|
|
89
|
+
"GenericReferenceField",
|
|
90
|
+
"BinaryField",
|
|
91
|
+
"GridFSError",
|
|
92
|
+
"GridFSProxy",
|
|
93
|
+
"FileField",
|
|
94
|
+
"ImageGridFsProxy",
|
|
95
|
+
"ImproperlyConfigured",
|
|
96
|
+
"ImageField",
|
|
97
|
+
"GeoPointField",
|
|
98
|
+
"PointField",
|
|
99
|
+
"LineStringField",
|
|
100
|
+
"PolygonField",
|
|
101
|
+
"SequenceField",
|
|
102
|
+
"UUIDField",
|
|
103
|
+
"EnumField",
|
|
104
|
+
"MultiPointField",
|
|
105
|
+
"MultiLineStringField",
|
|
106
|
+
"MultiPolygonField",
|
|
107
|
+
"GeoJsonBaseField",
|
|
108
|
+
"Decimal128Field",
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
RECURSIVE_REFERENCE_CONSTANT = "self"
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
class StringField(BaseField):
|
|
115
|
+
"""A unicode string field."""
|
|
116
|
+
|
|
117
|
+
def __init__(self, regex=None, max_length=None, min_length=None, **kwargs):
|
|
118
|
+
"""
|
|
119
|
+
:param regex: (optional) A string pattern that will be applied during validation
|
|
120
|
+
:param max_length: (optional) A max length that will be applied during validation
|
|
121
|
+
:param min_length: (optional) A min length that will be applied during validation
|
|
122
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.BaseField`
|
|
123
|
+
"""
|
|
124
|
+
self.regex = re.compile(regex) if regex else None
|
|
125
|
+
self.max_length = max_length
|
|
126
|
+
self.min_length = min_length
|
|
127
|
+
super().__init__(**kwargs)
|
|
128
|
+
|
|
129
|
+
def to_python(self, value):
|
|
130
|
+
if isinstance(value, str):
|
|
131
|
+
return value
|
|
132
|
+
try:
|
|
133
|
+
value = value.decode("utf-8")
|
|
134
|
+
except Exception:
|
|
135
|
+
pass
|
|
136
|
+
return value
|
|
137
|
+
|
|
138
|
+
def validate(self, value):
|
|
139
|
+
# log("Validating StringField", value)
|
|
140
|
+
if not isinstance(value, str):
|
|
141
|
+
self.error("StringField only accepts string values")
|
|
142
|
+
|
|
143
|
+
if self.max_length is not None and len(value) > self.max_length:
|
|
144
|
+
self.error("String value is too long")
|
|
145
|
+
|
|
146
|
+
if self.min_length is not None and len(value) < self.min_length:
|
|
147
|
+
self.error("String value is too short")
|
|
148
|
+
|
|
149
|
+
if self.regex is not None and self.regex.match(value) is None:
|
|
150
|
+
self.error("String value did not match validation regex")
|
|
151
|
+
|
|
152
|
+
def lookup_member(self, member_name):
|
|
153
|
+
return None
|
|
154
|
+
|
|
155
|
+
def prepare_query_value(self, op, value):
|
|
156
|
+
if not isinstance(op, str):
|
|
157
|
+
return value
|
|
158
|
+
|
|
159
|
+
if op in STRING_OPERATORS:
|
|
160
|
+
case_insensitive = op.startswith("i")
|
|
161
|
+
op = op.lstrip("i")
|
|
162
|
+
|
|
163
|
+
flags = re.IGNORECASE if case_insensitive else 0
|
|
164
|
+
|
|
165
|
+
regex = r"%s"
|
|
166
|
+
if op == "startswith":
|
|
167
|
+
regex = r"^%s"
|
|
168
|
+
elif op == "endswith":
|
|
169
|
+
regex = r"%s$"
|
|
170
|
+
elif op == "exact":
|
|
171
|
+
regex = r"^%s$"
|
|
172
|
+
elif op == "wholeword":
|
|
173
|
+
regex = r"\b%s\b"
|
|
174
|
+
elif op == "regex":
|
|
175
|
+
regex = value
|
|
176
|
+
|
|
177
|
+
if op == "regex":
|
|
178
|
+
value = re.compile(regex, flags)
|
|
179
|
+
else:
|
|
180
|
+
# escape unsafe characters which could lead to a re.error
|
|
181
|
+
value = re.escape(value)
|
|
182
|
+
value = re.compile(regex % value, flags)
|
|
183
|
+
return super().prepare_query_value(op, value)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
class URLField(StringField):
|
|
187
|
+
"""A field that validates input as an URL."""
|
|
188
|
+
|
|
189
|
+
_URL_REGEX = LazyRegexCompiler(
|
|
190
|
+
r"^(?:[a-z0-9\.\-]*)://" # scheme is validated separately
|
|
191
|
+
r"(?:(?:[A-Z0-9](?:[A-Z0-9-_]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}(?<!-)\.?)|" # domain...
|
|
192
|
+
r"localhost|" # localhost...
|
|
193
|
+
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}|" # ...or ipv4
|
|
194
|
+
r"\[?[A-F0-9]*:[A-F0-9:]+\]?)" # ...or ipv6
|
|
195
|
+
r"(?::\d+)?" # optional port
|
|
196
|
+
r"(?:/?|[/?]\S+)$",
|
|
197
|
+
re.IGNORECASE,
|
|
198
|
+
)
|
|
199
|
+
_URL_SCHEMES = ["http", "https", "ftp", "ftps"]
|
|
200
|
+
|
|
201
|
+
def __init__(self, url_regex=None, schemes=None, **kwargs):
|
|
202
|
+
"""
|
|
203
|
+
:param url_regex: (optional) Overwrite the default regex used for validation
|
|
204
|
+
:param schemes: (optional) Overwrite the default URL schemes that are allowed
|
|
205
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.StringField`
|
|
206
|
+
"""
|
|
207
|
+
self.url_regex = url_regex or self._URL_REGEX
|
|
208
|
+
self.schemes = schemes or self._URL_SCHEMES
|
|
209
|
+
super().__init__(**kwargs)
|
|
210
|
+
|
|
211
|
+
def validate(self, value):
|
|
212
|
+
# Check first if the scheme is valid
|
|
213
|
+
scheme = value.split("://")[0].lower()
|
|
214
|
+
if scheme not in self.schemes:
|
|
215
|
+
self.error(f"Invalid scheme {scheme} in URL: {value}")
|
|
216
|
+
|
|
217
|
+
# Then check full URL
|
|
218
|
+
if not self.url_regex.match(value):
|
|
219
|
+
self.error(f"Invalid URL: {value}")
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
class EmailField(StringField):
|
|
223
|
+
"""A field that validates input as an email address."""
|
|
224
|
+
|
|
225
|
+
USER_REGEX = LazyRegexCompiler(
|
|
226
|
+
# `dot-atom` defined in RFC 5322 Section 3.2.3.
|
|
227
|
+
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*\Z"
|
|
228
|
+
# `quoted-string` defined in RFC 5322 Section 3.2.4.
|
|
229
|
+
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"\Z)',
|
|
230
|
+
re.IGNORECASE,
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
UTF8_USER_REGEX = LazyRegexCompiler(
|
|
234
|
+
(
|
|
235
|
+
# RFC 6531 Section 3.3 extends `atext` (used by dot-atom) to
|
|
236
|
+
# include `UTF8-non-ascii`.
|
|
237
|
+
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z\u0080-\U0010FFFF]+)*\Z"
|
|
238
|
+
# `quoted-string`
|
|
239
|
+
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"\Z)'
|
|
240
|
+
),
|
|
241
|
+
re.IGNORECASE | re.UNICODE,
|
|
242
|
+
)
|
|
243
|
+
|
|
244
|
+
DOMAIN_REGEX = LazyRegexCompiler(
|
|
245
|
+
r"((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+)(?:[A-Z0-9-]{2,63}(?<!-))\Z",
|
|
246
|
+
re.IGNORECASE,
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
error_msg = "Invalid email address: %s"
|
|
250
|
+
|
|
251
|
+
def __init__(
|
|
252
|
+
self,
|
|
253
|
+
domain_whitelist=None,
|
|
254
|
+
allow_utf8_user=False,
|
|
255
|
+
allow_ip_domain=False,
|
|
256
|
+
*args,
|
|
257
|
+
**kwargs,
|
|
258
|
+
):
|
|
259
|
+
"""
|
|
260
|
+
:param domain_whitelist: (optional) list of valid domain names applied during validation
|
|
261
|
+
:param allow_utf8_user: Allow user part of the email to contain utf8 char
|
|
262
|
+
:param allow_ip_domain: Allow domain part of the email to be an IPv4 or IPv6 address
|
|
263
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.StringField`
|
|
264
|
+
"""
|
|
265
|
+
self.domain_whitelist = domain_whitelist or []
|
|
266
|
+
self.allow_utf8_user = allow_utf8_user
|
|
267
|
+
self.allow_ip_domain = allow_ip_domain
|
|
268
|
+
super().__init__(*args, **kwargs)
|
|
269
|
+
|
|
270
|
+
def validate_user_part(self, user_part):
|
|
271
|
+
"""Validate the user part of the email address. Return True if
|
|
272
|
+
valid and False otherwise.
|
|
273
|
+
"""
|
|
274
|
+
if self.allow_utf8_user:
|
|
275
|
+
return self.UTF8_USER_REGEX.match(user_part)
|
|
276
|
+
return self.USER_REGEX.match(user_part)
|
|
277
|
+
|
|
278
|
+
def validate_domain_part(self, domain_part):
|
|
279
|
+
"""Validate the domain part of the email address. Return True if
|
|
280
|
+
valid and False otherwise.
|
|
281
|
+
"""
|
|
282
|
+
# Skip domain validation if it's in the whitelist.
|
|
283
|
+
if domain_part in self.domain_whitelist:
|
|
284
|
+
return True
|
|
285
|
+
|
|
286
|
+
if self.DOMAIN_REGEX.match(domain_part):
|
|
287
|
+
return True
|
|
288
|
+
|
|
289
|
+
# Validate IPv4/IPv6, e.g. user@[192.168.0.1]
|
|
290
|
+
if self.allow_ip_domain and domain_part[0] == "[" and domain_part[-1] == "]":
|
|
291
|
+
for addr_family in (socket.AF_INET, socket.AF_INET6):
|
|
292
|
+
try:
|
|
293
|
+
socket.inet_pton(addr_family, domain_part[1:-1])
|
|
294
|
+
return True
|
|
295
|
+
except (OSError, UnicodeEncodeError):
|
|
296
|
+
pass
|
|
297
|
+
|
|
298
|
+
return False
|
|
299
|
+
|
|
300
|
+
def validate(self, value):
|
|
301
|
+
super().validate(value)
|
|
302
|
+
|
|
303
|
+
if "@" not in value:
|
|
304
|
+
self.error(self.error_msg % value)
|
|
305
|
+
|
|
306
|
+
user_part, domain_part = value.rsplit("@", 1)
|
|
307
|
+
|
|
308
|
+
# Validate the user part.
|
|
309
|
+
if not self.validate_user_part(user_part):
|
|
310
|
+
self.error(self.error_msg % value)
|
|
311
|
+
|
|
312
|
+
# Validate the domain and, if invalid, see if it's IDN-encoded.
|
|
313
|
+
if not self.validate_domain_part(domain_part):
|
|
314
|
+
try:
|
|
315
|
+
domain_part = domain_part.encode("idna").decode("ascii")
|
|
316
|
+
except UnicodeError:
|
|
317
|
+
self.error(
|
|
318
|
+
"{} {}".format(
|
|
319
|
+
self.error_msg % value, "(domain failed IDN encoding)"
|
|
320
|
+
)
|
|
321
|
+
)
|
|
322
|
+
else:
|
|
323
|
+
if not self.validate_domain_part(domain_part):
|
|
324
|
+
self.error(
|
|
325
|
+
"{} {}".format(
|
|
326
|
+
self.error_msg % value, "(domain validation failed)"
|
|
327
|
+
)
|
|
328
|
+
)
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
class IntField(BaseField):
|
|
332
|
+
"""32-bit integer field."""
|
|
333
|
+
|
|
334
|
+
def __init__(self, min_value=None, max_value=None, **kwargs):
|
|
335
|
+
"""
|
|
336
|
+
:param min_value: (optional) A min value that will be applied during validation
|
|
337
|
+
:param max_value: (optional) A max value that will be applied during validation
|
|
338
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.BaseField`
|
|
339
|
+
"""
|
|
340
|
+
self.min_value, self.max_value = min_value, max_value
|
|
341
|
+
super().__init__(**kwargs)
|
|
342
|
+
|
|
343
|
+
def to_python(self, value):
|
|
344
|
+
try:
|
|
345
|
+
value = int(value)
|
|
346
|
+
except (TypeError, ValueError):
|
|
347
|
+
pass
|
|
348
|
+
return value
|
|
349
|
+
|
|
350
|
+
def validate(self, value):
|
|
351
|
+
try:
|
|
352
|
+
value = int(value)
|
|
353
|
+
except (TypeError, ValueError):
|
|
354
|
+
self.error("%s could not be converted to int" % value)
|
|
355
|
+
|
|
356
|
+
if self.min_value is not None and value < self.min_value:
|
|
357
|
+
self.error("Integer value is too small")
|
|
358
|
+
|
|
359
|
+
if self.max_value is not None and value > self.max_value:
|
|
360
|
+
self.error("Integer value is too large")
|
|
361
|
+
|
|
362
|
+
def prepare_query_value(self, op, value):
|
|
363
|
+
if value is None:
|
|
364
|
+
return value
|
|
365
|
+
|
|
366
|
+
return super().prepare_query_value(op, int(value))
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
class LongField(IntField):
|
|
370
|
+
"""64-bit integer field. (Equivalent to IntField since the support to Python2 was dropped)"""
|
|
371
|
+
|
|
372
|
+
def to_mongo(self, value):
|
|
373
|
+
return Int64(value)
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
class FloatField(BaseField):
|
|
377
|
+
"""Floating point number field."""
|
|
378
|
+
|
|
379
|
+
def __init__(self, min_value=None, max_value=None, **kwargs):
|
|
380
|
+
"""
|
|
381
|
+
:param min_value: (optional) A min value that will be applied during validation
|
|
382
|
+
:param max_value: (optional) A max value that will be applied during validation
|
|
383
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.BaseField`
|
|
384
|
+
"""
|
|
385
|
+
self.min_value, self.max_value = min_value, max_value
|
|
386
|
+
super().__init__(**kwargs)
|
|
387
|
+
|
|
388
|
+
def to_python(self, value):
|
|
389
|
+
try:
|
|
390
|
+
value = float(value)
|
|
391
|
+
except ValueError:
|
|
392
|
+
pass
|
|
393
|
+
return value
|
|
394
|
+
|
|
395
|
+
def validate(self, value):
|
|
396
|
+
if isinstance(value, int):
|
|
397
|
+
try:
|
|
398
|
+
value = float(value)
|
|
399
|
+
except OverflowError:
|
|
400
|
+
self.error("The value is too large to be converted to float")
|
|
401
|
+
|
|
402
|
+
if not isinstance(value, float):
|
|
403
|
+
self.error("FloatField only accepts float and integer values")
|
|
404
|
+
|
|
405
|
+
if self.min_value is not None and value < self.min_value:
|
|
406
|
+
self.error("Float value is too small")
|
|
407
|
+
|
|
408
|
+
if self.max_value is not None and value > self.max_value:
|
|
409
|
+
self.error("Float value is too large")
|
|
410
|
+
|
|
411
|
+
def prepare_query_value(self, op, value):
|
|
412
|
+
if value is None:
|
|
413
|
+
return value
|
|
414
|
+
|
|
415
|
+
return super().prepare_query_value(op, float(value))
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
class DecimalField(BaseField):
|
|
419
|
+
"""Disclaimer: This field is kept for historical reason but since it converts the values to float, it
|
|
420
|
+
is not suitable for true decimal storage. Consider using :class:`~autonomous.db.fields.Decimal128Field`.
|
|
421
|
+
|
|
422
|
+
Fixed-point decimal number field. Stores the value as a float by default unless `force_string` is used.
|
|
423
|
+
If using floats, beware of Decimal to float conversion (potential precision loss)
|
|
424
|
+
"""
|
|
425
|
+
|
|
426
|
+
def __init__(
|
|
427
|
+
self,
|
|
428
|
+
min_value=None,
|
|
429
|
+
max_value=None,
|
|
430
|
+
force_string=False,
|
|
431
|
+
precision=2,
|
|
432
|
+
rounding=decimal.ROUND_HALF_UP,
|
|
433
|
+
**kwargs,
|
|
434
|
+
):
|
|
435
|
+
"""
|
|
436
|
+
:param min_value: (optional) A min value that will be applied during validation
|
|
437
|
+
:param max_value: (optional) A max value that will be applied during validation
|
|
438
|
+
:param force_string: Store the value as a string (instead of a float).
|
|
439
|
+
Be aware that this affects query sorting and operation like lte, gte (as string comparison is applied)
|
|
440
|
+
and some query operator won't work (e.g. inc, dec)
|
|
441
|
+
:param precision: Number of decimal places to store.
|
|
442
|
+
:param rounding: The rounding rule from the python decimal library:
|
|
443
|
+
|
|
444
|
+
- decimal.ROUND_CEILING (towards Infinity)
|
|
445
|
+
- decimal.ROUND_DOWN (towards zero)
|
|
446
|
+
- decimal.ROUND_FLOOR (towards -Infinity)
|
|
447
|
+
- decimal.ROUND_HALF_DOWN (to nearest with ties going towards zero)
|
|
448
|
+
- decimal.ROUND_HALF_EVEN (to nearest with ties going to nearest even integer)
|
|
449
|
+
- decimal.ROUND_HALF_UP (to nearest with ties going away from zero)
|
|
450
|
+
- decimal.ROUND_UP (away from zero)
|
|
451
|
+
- decimal.ROUND_05UP (away from zero if last digit after rounding towards zero would have been 0 or 5; otherwise towards zero)
|
|
452
|
+
|
|
453
|
+
Defaults to: ``decimal.ROUND_HALF_UP``
|
|
454
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.BaseField`
|
|
455
|
+
"""
|
|
456
|
+
self.min_value = min_value
|
|
457
|
+
self.max_value = max_value
|
|
458
|
+
self.force_string = force_string
|
|
459
|
+
|
|
460
|
+
if precision < 0 or not isinstance(precision, int):
|
|
461
|
+
self.error("precision must be a positive integer")
|
|
462
|
+
|
|
463
|
+
self.precision = precision
|
|
464
|
+
self.rounding = rounding
|
|
465
|
+
|
|
466
|
+
super().__init__(**kwargs)
|
|
467
|
+
|
|
468
|
+
def to_python(self, value):
|
|
469
|
+
# Convert to string for python 2.6 before casting to Decimal
|
|
470
|
+
try:
|
|
471
|
+
value = decimal.Decimal("%s" % value)
|
|
472
|
+
except (TypeError, ValueError, decimal.InvalidOperation):
|
|
473
|
+
return value
|
|
474
|
+
if self.precision > 0:
|
|
475
|
+
return value.quantize(
|
|
476
|
+
decimal.Decimal(".%s" % ("0" * self.precision)), rounding=self.rounding
|
|
477
|
+
)
|
|
478
|
+
else:
|
|
479
|
+
return value.quantize(decimal.Decimal(), rounding=self.rounding)
|
|
480
|
+
|
|
481
|
+
def to_mongo(self, value):
|
|
482
|
+
if self.force_string:
|
|
483
|
+
return str(self.to_python(value))
|
|
484
|
+
return float(self.to_python(value))
|
|
485
|
+
|
|
486
|
+
def validate(self, value):
|
|
487
|
+
if not isinstance(value, decimal.Decimal):
|
|
488
|
+
if not isinstance(value, str):
|
|
489
|
+
value = str(value)
|
|
490
|
+
try:
|
|
491
|
+
value = decimal.Decimal(value)
|
|
492
|
+
except (TypeError, ValueError, decimal.InvalidOperation) as exc:
|
|
493
|
+
self.error("Could not convert value to decimal: %s" % exc)
|
|
494
|
+
|
|
495
|
+
if self.min_value is not None and value < self.min_value:
|
|
496
|
+
self.error("Decimal value is too small")
|
|
497
|
+
|
|
498
|
+
if self.max_value is not None and value > self.max_value:
|
|
499
|
+
self.error("Decimal value is too large")
|
|
500
|
+
|
|
501
|
+
def prepare_query_value(self, op, value):
|
|
502
|
+
if value is None:
|
|
503
|
+
return value
|
|
504
|
+
return super().prepare_query_value(op, self.to_mongo(value))
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
class BooleanField(BaseField):
|
|
508
|
+
"""Boolean field type."""
|
|
509
|
+
|
|
510
|
+
def to_python(self, value):
|
|
511
|
+
try:
|
|
512
|
+
value = bool(value)
|
|
513
|
+
except (ValueError, TypeError):
|
|
514
|
+
pass
|
|
515
|
+
return value
|
|
516
|
+
|
|
517
|
+
def validate(self, value):
|
|
518
|
+
if not isinstance(value, bool):
|
|
519
|
+
self.error("BooleanField only accepts boolean values")
|
|
520
|
+
|
|
521
|
+
|
|
522
|
+
class DateTimeField(BaseField):
|
|
523
|
+
"""Datetime field.
|
|
524
|
+
|
|
525
|
+
Uses the python-dateutil library if available alternatively use time.strptime
|
|
526
|
+
to parse the dates. Note: python-dateutil's parser is fully featured and when
|
|
527
|
+
installed you can utilise it to convert varying types of date formats into valid
|
|
528
|
+
python datetime objects.
|
|
529
|
+
|
|
530
|
+
Note: To default the field to the current datetime, use: DateTimeField(default=datetime.utcnow)
|
|
531
|
+
|
|
532
|
+
Note: Microseconds are rounded to the nearest millisecond.
|
|
533
|
+
Pre UTC microsecond support is effectively broken.
|
|
534
|
+
Use :class:`~autonomous.db.fields.ComplexDateTimeField` if you
|
|
535
|
+
need accurate microsecond support.
|
|
536
|
+
"""
|
|
537
|
+
|
|
538
|
+
def validate(self, value):
|
|
539
|
+
new_value = self.to_mongo(value)
|
|
540
|
+
if not isinstance(new_value, (datetime.datetime, datetime.date)):
|
|
541
|
+
self.error('cannot parse date "%s"' % value)
|
|
542
|
+
|
|
543
|
+
def to_mongo(self, value):
|
|
544
|
+
if value is None:
|
|
545
|
+
return value
|
|
546
|
+
if isinstance(value, datetime.datetime):
|
|
547
|
+
return value
|
|
548
|
+
if isinstance(value, datetime.date):
|
|
549
|
+
return datetime.datetime(value.year, value.month, value.day)
|
|
550
|
+
if callable(value):
|
|
551
|
+
return value()
|
|
552
|
+
|
|
553
|
+
if isinstance(value, str):
|
|
554
|
+
return self._parse_datetime(value)
|
|
555
|
+
else:
|
|
556
|
+
return None
|
|
557
|
+
|
|
558
|
+
@staticmethod
|
|
559
|
+
def _parse_datetime(value):
|
|
560
|
+
# Attempt to parse a datetime from a string
|
|
561
|
+
value = value.strip()
|
|
562
|
+
if not value:
|
|
563
|
+
return None
|
|
564
|
+
|
|
565
|
+
if dateutil:
|
|
566
|
+
try:
|
|
567
|
+
return dateutil.parser.parse(value)
|
|
568
|
+
except (TypeError, ValueError, OverflowError):
|
|
569
|
+
return None
|
|
570
|
+
|
|
571
|
+
# split usecs, because they are not recognized by strptime.
|
|
572
|
+
if "." in value:
|
|
573
|
+
try:
|
|
574
|
+
value, usecs = value.split(".")
|
|
575
|
+
usecs = int(usecs)
|
|
576
|
+
except ValueError:
|
|
577
|
+
return None
|
|
578
|
+
else:
|
|
579
|
+
usecs = 0
|
|
580
|
+
kwargs = {"microsecond": usecs}
|
|
581
|
+
try: # Seconds are optional, so try converting seconds first.
|
|
582
|
+
return datetime.datetime(
|
|
583
|
+
*time.strptime(value, "%Y-%m-%d %H:%M:%S")[:6], **kwargs
|
|
584
|
+
)
|
|
585
|
+
except ValueError:
|
|
586
|
+
try: # Try without seconds.
|
|
587
|
+
return datetime.datetime(
|
|
588
|
+
*time.strptime(value, "%Y-%m-%d %H:%M")[:5], **kwargs
|
|
589
|
+
)
|
|
590
|
+
except ValueError: # Try without hour/minutes/seconds.
|
|
591
|
+
try:
|
|
592
|
+
return datetime.datetime(
|
|
593
|
+
*time.strptime(value, "%Y-%m-%d")[:3], **kwargs
|
|
594
|
+
)
|
|
595
|
+
except ValueError:
|
|
596
|
+
return None
|
|
597
|
+
|
|
598
|
+
def prepare_query_value(self, op, value):
|
|
599
|
+
return super().prepare_query_value(op, self.to_mongo(value))
|
|
600
|
+
|
|
601
|
+
|
|
602
|
+
class DateField(DateTimeField):
|
|
603
|
+
def to_mongo(self, value):
|
|
604
|
+
value = super().to_mongo(value)
|
|
605
|
+
# drop hours, minutes, seconds
|
|
606
|
+
if isinstance(value, datetime.datetime):
|
|
607
|
+
value = datetime.datetime(value.year, value.month, value.day)
|
|
608
|
+
return value
|
|
609
|
+
|
|
610
|
+
def to_python(self, value):
|
|
611
|
+
value = super().to_python(value)
|
|
612
|
+
# convert datetime to date
|
|
613
|
+
if isinstance(value, datetime.datetime):
|
|
614
|
+
value = datetime.date(value.year, value.month, value.day)
|
|
615
|
+
return value
|
|
616
|
+
|
|
617
|
+
|
|
618
|
+
class ComplexDateTimeField(StringField):
|
|
619
|
+
"""
|
|
620
|
+
ComplexDateTimeField handles microseconds exactly instead of rounding
|
|
621
|
+
like DateTimeField does.
|
|
622
|
+
|
|
623
|
+
Derives from a StringField so you can do `gte` and `lte` filtering by
|
|
624
|
+
using lexicographical comparison when filtering / sorting strings.
|
|
625
|
+
|
|
626
|
+
The stored string has the following format:
|
|
627
|
+
|
|
628
|
+
YYYY,MM,DD,HH,MM,SS,NNNNNN
|
|
629
|
+
|
|
630
|
+
Where NNNNNN is the number of microseconds of the represented `datetime`.
|
|
631
|
+
The `,` as the separator can be easily modified by passing the `separator`
|
|
632
|
+
keyword when initializing the field.
|
|
633
|
+
|
|
634
|
+
Note: To default the field to the current datetime, use: DateTimeField(default=datetime.utcnow)
|
|
635
|
+
"""
|
|
636
|
+
|
|
637
|
+
def __init__(self, separator=",", **kwargs):
|
|
638
|
+
"""
|
|
639
|
+
:param separator: Allows to customize the separator used for storage (default ``,``)
|
|
640
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.StringField`
|
|
641
|
+
"""
|
|
642
|
+
self.separator = separator
|
|
643
|
+
self.format = separator.join(["%Y", "%m", "%d", "%H", "%M", "%S", "%f"])
|
|
644
|
+
super().__init__(**kwargs)
|
|
645
|
+
|
|
646
|
+
def _convert_from_datetime(self, val):
|
|
647
|
+
"""
|
|
648
|
+
Convert a `datetime` object to a string representation (which will be
|
|
649
|
+
stored in MongoDB). This is the reverse function of
|
|
650
|
+
`_convert_from_string`.
|
|
651
|
+
|
|
652
|
+
>>> a = datetime(2011, 6, 8, 20, 26, 24, 92284)
|
|
653
|
+
>>> ComplexDateTimeField()._convert_from_datetime(a)
|
|
654
|
+
'2011,06,08,20,26,24,092284'
|
|
655
|
+
"""
|
|
656
|
+
return val.strftime(self.format)
|
|
657
|
+
|
|
658
|
+
def _convert_from_string(self, data):
|
|
659
|
+
"""
|
|
660
|
+
Convert a string representation to a `datetime` object (the object you
|
|
661
|
+
will manipulate). This is the reverse function of
|
|
662
|
+
`_convert_from_datetime`.
|
|
663
|
+
|
|
664
|
+
>>> a = '2011,06,08,20,26,24,092284'
|
|
665
|
+
>>> ComplexDateTimeField()._convert_from_string(a)
|
|
666
|
+
datetime.datetime(2011, 6, 8, 20, 26, 24, 92284)
|
|
667
|
+
"""
|
|
668
|
+
values = [int(d) for d in data.split(self.separator)]
|
|
669
|
+
return datetime.datetime(*values)
|
|
670
|
+
|
|
671
|
+
def __get__(self, instance, owner):
|
|
672
|
+
if instance is None:
|
|
673
|
+
return self
|
|
674
|
+
|
|
675
|
+
data = super().__get__(instance, owner)
|
|
676
|
+
|
|
677
|
+
if isinstance(data, datetime.datetime) or data is None:
|
|
678
|
+
return data
|
|
679
|
+
return self._convert_from_string(data)
|
|
680
|
+
|
|
681
|
+
def __set__(self, instance, value):
|
|
682
|
+
super().__set__(instance, value)
|
|
683
|
+
value = instance._data[self.name]
|
|
684
|
+
if value is not None:
|
|
685
|
+
if isinstance(value, datetime.datetime):
|
|
686
|
+
instance._data[self.name] = self._convert_from_datetime(value)
|
|
687
|
+
else:
|
|
688
|
+
instance._data[self.name] = value
|
|
689
|
+
|
|
690
|
+
def validate(self, value):
|
|
691
|
+
value = self.to_python(value)
|
|
692
|
+
if not isinstance(value, datetime.datetime):
|
|
693
|
+
self.error("Only datetime objects may used in a ComplexDateTimeField")
|
|
694
|
+
|
|
695
|
+
def to_python(self, value):
|
|
696
|
+
original_value = value
|
|
697
|
+
try:
|
|
698
|
+
return self._convert_from_string(value)
|
|
699
|
+
except Exception:
|
|
700
|
+
return original_value
|
|
701
|
+
|
|
702
|
+
def to_mongo(self, value):
|
|
703
|
+
value = self.to_python(value)
|
|
704
|
+
return self._convert_from_datetime(value)
|
|
705
|
+
|
|
706
|
+
def prepare_query_value(self, op, value):
|
|
707
|
+
if value is None:
|
|
708
|
+
return value
|
|
709
|
+
return super().prepare_query_value(op, self._convert_from_datetime(value))
|
|
710
|
+
|
|
711
|
+
|
|
712
|
+
class EmbeddedDocumentField(BaseField):
|
|
713
|
+
"""An embedded document field - with a declared document_type.
|
|
714
|
+
Only valid values are subclasses of :class:`~autonomous.db.EmbeddedDocument`.
|
|
715
|
+
"""
|
|
716
|
+
|
|
717
|
+
def __init__(self, document_type, **kwargs):
|
|
718
|
+
if not (
|
|
719
|
+
isinstance(document_type, str)
|
|
720
|
+
or issubclass(document_type, EmbeddedDocument)
|
|
721
|
+
):
|
|
722
|
+
self.error(
|
|
723
|
+
"Invalid embedded document class provided to an "
|
|
724
|
+
"EmbeddedDocumentField"
|
|
725
|
+
)
|
|
726
|
+
|
|
727
|
+
self.document_type_obj = document_type
|
|
728
|
+
super().__init__(**kwargs)
|
|
729
|
+
|
|
730
|
+
@property
|
|
731
|
+
def document_type(self):
|
|
732
|
+
if isinstance(self.document_type_obj, str):
|
|
733
|
+
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
|
|
734
|
+
resolved_document_type = self.owner_document
|
|
735
|
+
else:
|
|
736
|
+
resolved_document_type = get_document(self.document_type_obj)
|
|
737
|
+
|
|
738
|
+
if not issubclass(resolved_document_type, EmbeddedDocument):
|
|
739
|
+
# Due to the late resolution of the document_type
|
|
740
|
+
# There is a chance that it won't be an EmbeddedDocument (#1661)
|
|
741
|
+
self.error(
|
|
742
|
+
"Invalid embedded document class provided to an "
|
|
743
|
+
"EmbeddedDocumentField"
|
|
744
|
+
)
|
|
745
|
+
self.document_type_obj = resolved_document_type
|
|
746
|
+
|
|
747
|
+
return self.document_type_obj
|
|
748
|
+
|
|
749
|
+
def to_python(self, value):
|
|
750
|
+
if not isinstance(value, self.document_type):
|
|
751
|
+
return self.document_type._from_son(
|
|
752
|
+
value, _auto_dereference=self._auto_dereference
|
|
753
|
+
)
|
|
754
|
+
return value
|
|
755
|
+
|
|
756
|
+
def to_mongo(self, value, use_db_field=True, fields=None):
|
|
757
|
+
if not isinstance(value, self.document_type):
|
|
758
|
+
return value
|
|
759
|
+
return self.document_type.to_mongo(value, use_db_field, fields)
|
|
760
|
+
|
|
761
|
+
def validate(self, value, clean=True):
|
|
762
|
+
"""Make sure that the document instance is an instance of the
|
|
763
|
+
EmbeddedDocument subclass provided when the document was defined.
|
|
764
|
+
"""
|
|
765
|
+
# Using isinstance also works for subclasses of self.document
|
|
766
|
+
if not isinstance(value, self.document_type):
|
|
767
|
+
self.error(
|
|
768
|
+
"Invalid embedded document instance provided to an "
|
|
769
|
+
"EmbeddedDocumentField"
|
|
770
|
+
)
|
|
771
|
+
value.validate(clean=clean)
|
|
772
|
+
|
|
773
|
+
def lookup_member(self, member_name):
|
|
774
|
+
doc_and_subclasses = [self.document_type] + self.document_type.__subclasses__()
|
|
775
|
+
for doc_type in doc_and_subclasses:
|
|
776
|
+
field = doc_type._fields.get(member_name)
|
|
777
|
+
if field:
|
|
778
|
+
return field
|
|
779
|
+
|
|
780
|
+
def prepare_query_value(self, op, value):
|
|
781
|
+
if value is not None and not isinstance(value, self.document_type):
|
|
782
|
+
# Short circuit for special operators, returning them as is
|
|
783
|
+
if isinstance(value, dict) and all(k.startswith("$") for k in value.keys()):
|
|
784
|
+
return value
|
|
785
|
+
try:
|
|
786
|
+
value = self.document_type._from_son(value)
|
|
787
|
+
except ValueError:
|
|
788
|
+
raise InvalidQueryError(
|
|
789
|
+
"Querying the embedded document '%s' failed, due to an invalid query value"
|
|
790
|
+
% (self.document_type._class_name,)
|
|
791
|
+
)
|
|
792
|
+
super().prepare_query_value(op, value)
|
|
793
|
+
return self.to_mongo(value)
|
|
794
|
+
|
|
795
|
+
|
|
796
|
+
class GenericEmbeddedDocumentField(BaseField):
|
|
797
|
+
"""A generic embedded document field - allows any
|
|
798
|
+
:class:`~autonomous.db.EmbeddedDocument` to be stored.
|
|
799
|
+
|
|
800
|
+
Only valid values are subclasses of :class:`~autonomous.db.EmbeddedDocument`.
|
|
801
|
+
|
|
802
|
+
.. note ::
|
|
803
|
+
You can use the choices param to limit the acceptable
|
|
804
|
+
EmbeddedDocument types
|
|
805
|
+
"""
|
|
806
|
+
|
|
807
|
+
def prepare_query_value(self, op, value):
|
|
808
|
+
return super().prepare_query_value(op, self.to_mongo(value))
|
|
809
|
+
|
|
810
|
+
def to_python(self, value):
|
|
811
|
+
if isinstance(value, dict):
|
|
812
|
+
doc_cls = get_document(value["_cls"])
|
|
813
|
+
value = doc_cls._from_son(value)
|
|
814
|
+
|
|
815
|
+
return value
|
|
816
|
+
|
|
817
|
+
def validate(self, value, clean=True):
|
|
818
|
+
if self.choices and isinstance(value, SON):
|
|
819
|
+
for choice in self.choices:
|
|
820
|
+
if value["_cls"] == choice._class_name:
|
|
821
|
+
return True
|
|
822
|
+
|
|
823
|
+
if not isinstance(value, EmbeddedDocument):
|
|
824
|
+
self.error(
|
|
825
|
+
"Invalid embedded document instance provided to an "
|
|
826
|
+
"GenericEmbeddedDocumentField"
|
|
827
|
+
)
|
|
828
|
+
|
|
829
|
+
value.validate(clean=clean)
|
|
830
|
+
|
|
831
|
+
def lookup_member(self, member_name):
|
|
832
|
+
document_choices = self.choices or []
|
|
833
|
+
for document_choice in document_choices:
|
|
834
|
+
doc_and_subclasses = [document_choice] + document_choice.__subclasses__()
|
|
835
|
+
for doc_type in doc_and_subclasses:
|
|
836
|
+
field = doc_type._fields.get(member_name)
|
|
837
|
+
if field:
|
|
838
|
+
return field
|
|
839
|
+
|
|
840
|
+
def to_mongo(self, document, use_db_field=True, fields=None):
|
|
841
|
+
if document is None:
|
|
842
|
+
return None
|
|
843
|
+
data = document.to_mongo(use_db_field, fields)
|
|
844
|
+
if "_cls" not in data:
|
|
845
|
+
data["_cls"] = document._class_name
|
|
846
|
+
return data
|
|
847
|
+
|
|
848
|
+
|
|
849
|
+
class DynamicField(BaseField):
|
|
850
|
+
"""A truly dynamic field type capable of handling different and varying
|
|
851
|
+
types of data.
|
|
852
|
+
|
|
853
|
+
Used by :class:`~autonomous.db.DynamicDocument` to handle dynamic data"""
|
|
854
|
+
|
|
855
|
+
def to_mongo(self, value, use_db_field=True, fields=None):
|
|
856
|
+
"""Convert a Python type to a MongoDB compatible type."""
|
|
857
|
+
|
|
858
|
+
if isinstance(value, str):
|
|
859
|
+
return value
|
|
860
|
+
|
|
861
|
+
if hasattr(value, "to_mongo"):
|
|
862
|
+
cls = value.__class__
|
|
863
|
+
val = value.to_mongo(use_db_field, fields)
|
|
864
|
+
# If we its a document thats not inherited add _cls
|
|
865
|
+
if isinstance(value, Document):
|
|
866
|
+
val = {"_ref": value.to_dbref(), "_cls": cls.__name__}
|
|
867
|
+
if isinstance(value, EmbeddedDocument):
|
|
868
|
+
val["_cls"] = cls.__name__
|
|
869
|
+
return val
|
|
870
|
+
|
|
871
|
+
if not isinstance(value, (dict, list, tuple)):
|
|
872
|
+
return value
|
|
873
|
+
|
|
874
|
+
is_list = False
|
|
875
|
+
if not hasattr(value, "items"):
|
|
876
|
+
is_list = True
|
|
877
|
+
value = {k: v for k, v in enumerate(value)}
|
|
878
|
+
|
|
879
|
+
data = {}
|
|
880
|
+
for k, v in value.items():
|
|
881
|
+
data[k] = self.to_mongo(v, use_db_field, fields)
|
|
882
|
+
|
|
883
|
+
value = data
|
|
884
|
+
if is_list: # Convert back to a list
|
|
885
|
+
value = [v for k, v in sorted(data.items(), key=itemgetter(0))]
|
|
886
|
+
return value
|
|
887
|
+
|
|
888
|
+
def to_python(self, value):
|
|
889
|
+
if isinstance(value, dict) and "_cls" in value:
|
|
890
|
+
doc_cls = get_document(value["_cls"])
|
|
891
|
+
if "_ref" in value:
|
|
892
|
+
value = doc_cls._get_db().dereference(value["_ref"])
|
|
893
|
+
return doc_cls._from_son(value)
|
|
894
|
+
|
|
895
|
+
return super().to_python(value)
|
|
896
|
+
|
|
897
|
+
def lookup_member(self, member_name):
|
|
898
|
+
return member_name
|
|
899
|
+
|
|
900
|
+
def prepare_query_value(self, op, value):
|
|
901
|
+
if isinstance(value, str):
|
|
902
|
+
return StringField().prepare_query_value(op, value)
|
|
903
|
+
return super().prepare_query_value(op, self.to_mongo(value))
|
|
904
|
+
|
|
905
|
+
def validate(self, value, clean=True):
|
|
906
|
+
if hasattr(value, "validate"):
|
|
907
|
+
value.validate(clean=clean)
|
|
908
|
+
|
|
909
|
+
|
|
910
|
+
class ListField(ComplexBaseField):
|
|
911
|
+
"""A list field that wraps a standard field, allowing multiple instances
|
|
912
|
+
of the field to be used as a list in the database.
|
|
913
|
+
|
|
914
|
+
If using with ReferenceFields see: :ref:`many-to-many-with-listfields`
|
|
915
|
+
|
|
916
|
+
.. note::
|
|
917
|
+
Required means it cannot be empty - as the default for ListFields is []
|
|
918
|
+
"""
|
|
919
|
+
|
|
920
|
+
def __init__(self, field=None, *, max_length=None, **kwargs):
|
|
921
|
+
self.max_length = max_length
|
|
922
|
+
if not kwargs.get("default"):
|
|
923
|
+
kwargs["default"] = list
|
|
924
|
+
super().__init__(field=field, **kwargs)
|
|
925
|
+
|
|
926
|
+
def __get__(self, instance, owner):
|
|
927
|
+
if instance is None:
|
|
928
|
+
# Document class being used rather than a document object
|
|
929
|
+
return self
|
|
930
|
+
value = instance._data.get(self.name)
|
|
931
|
+
LazyReferenceField = _import_class("LazyReferenceField")
|
|
932
|
+
GenericLazyReferenceField = _import_class("GenericLazyReferenceField")
|
|
933
|
+
if (
|
|
934
|
+
isinstance(self.field, (LazyReferenceField, GenericLazyReferenceField))
|
|
935
|
+
and value
|
|
936
|
+
):
|
|
937
|
+
instance._data[self.name] = [self.field.build_lazyref(x) for x in value]
|
|
938
|
+
|
|
939
|
+
return super().__get__(instance, owner)
|
|
940
|
+
|
|
941
|
+
def validate(self, value):
|
|
942
|
+
"""Make sure that a list of valid fields is being used."""
|
|
943
|
+
# log(value)
|
|
944
|
+
if not isinstance(value, (list, tuple, BaseQuerySet)):
|
|
945
|
+
self.error("Only lists and tuples may be used in a list field")
|
|
946
|
+
|
|
947
|
+
# Validate that max_length is not exceeded.
|
|
948
|
+
# NOTE It's still possible to bypass this enforcement by using $push.
|
|
949
|
+
# However, if the document is reloaded after $push and then re-saved,
|
|
950
|
+
# the validation error will be raised.
|
|
951
|
+
if self.max_length is not None and len(value) > self.max_length:
|
|
952
|
+
self.error("List is too long")
|
|
953
|
+
|
|
954
|
+
super().validate(value)
|
|
955
|
+
|
|
956
|
+
def prepare_query_value(self, op, value):
|
|
957
|
+
# Validate that the `set` operator doesn't contain more items than `max_length`.
|
|
958
|
+
if op == "set" and self.max_length is not None and len(value) > self.max_length:
|
|
959
|
+
self.error("List is too long")
|
|
960
|
+
|
|
961
|
+
if self.field:
|
|
962
|
+
# If the value is iterable and it's not a string nor a
|
|
963
|
+
# BaseDocument, call prepare_query_value for each of its items.
|
|
964
|
+
is_iter = hasattr(value, "__iter__")
|
|
965
|
+
eligible_iter = is_iter and not isinstance(value, (str, BaseDocument))
|
|
966
|
+
if (
|
|
967
|
+
op in ("set", "unset", "gt", "gte", "lt", "lte", "ne", None)
|
|
968
|
+
and eligible_iter
|
|
969
|
+
):
|
|
970
|
+
return [self.field.prepare_query_value(op, v) for v in value]
|
|
971
|
+
|
|
972
|
+
return self.field.prepare_query_value(op, value)
|
|
973
|
+
|
|
974
|
+
return super().prepare_query_value(op, value)
|
|
975
|
+
|
|
976
|
+
|
|
977
|
+
class EmbeddedDocumentListField(ListField):
|
|
978
|
+
"""A :class:`~autonomous.db.ListField` designed specially to hold a list of
|
|
979
|
+
embedded documents to provide additional query helpers.
|
|
980
|
+
|
|
981
|
+
.. note::
|
|
982
|
+
The only valid list values are subclasses of
|
|
983
|
+
:class:`~autonomous.db.EmbeddedDocument`.
|
|
984
|
+
"""
|
|
985
|
+
|
|
986
|
+
def __init__(self, document_type, **kwargs):
|
|
987
|
+
"""
|
|
988
|
+
:param document_type: The type of
|
|
989
|
+
:class:`~autonomous.db.EmbeddedDocument` the list will hold.
|
|
990
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.ListField`
|
|
991
|
+
"""
|
|
992
|
+
super().__init__(field=EmbeddedDocumentField(document_type), **kwargs)
|
|
993
|
+
|
|
994
|
+
|
|
995
|
+
class SortedListField(ListField):
|
|
996
|
+
"""A ListField that sorts the contents of its list before writing to
|
|
997
|
+
the database in order to ensure that a sorted list is always
|
|
998
|
+
retrieved.
|
|
999
|
+
|
|
1000
|
+
.. warning::
|
|
1001
|
+
There is a potential race condition when handling lists. If you set /
|
|
1002
|
+
save the whole list then other processes trying to save the whole list
|
|
1003
|
+
as well could overwrite changes. The safest way to append to a list is
|
|
1004
|
+
to perform a push operation.
|
|
1005
|
+
"""
|
|
1006
|
+
|
|
1007
|
+
def __init__(self, field, **kwargs):
|
|
1008
|
+
self._ordering = kwargs.pop("ordering", None)
|
|
1009
|
+
self._order_reverse = kwargs.pop("reverse", False)
|
|
1010
|
+
super().__init__(field, **kwargs)
|
|
1011
|
+
|
|
1012
|
+
def to_mongo(self, value, use_db_field=True, fields=None):
|
|
1013
|
+
value = super().to_mongo(value, use_db_field, fields)
|
|
1014
|
+
if self._ordering is not None:
|
|
1015
|
+
return sorted(
|
|
1016
|
+
value, key=itemgetter(self._ordering), reverse=self._order_reverse
|
|
1017
|
+
)
|
|
1018
|
+
return sorted(value, reverse=self._order_reverse)
|
|
1019
|
+
|
|
1020
|
+
|
|
1021
|
+
def key_not_string(d):
|
|
1022
|
+
"""Helper function to recursively determine if any key in a
|
|
1023
|
+
dictionary is not a string.
|
|
1024
|
+
"""
|
|
1025
|
+
for k, v in d.items():
|
|
1026
|
+
if not isinstance(k, str) or (isinstance(v, dict) and key_not_string(v)):
|
|
1027
|
+
return True
|
|
1028
|
+
|
|
1029
|
+
|
|
1030
|
+
def key_starts_with_dollar(d):
|
|
1031
|
+
"""Helper function to recursively determine if any key in a
|
|
1032
|
+
dictionary starts with a dollar
|
|
1033
|
+
"""
|
|
1034
|
+
for k, v in d.items():
|
|
1035
|
+
if (k.startswith("$")) or (isinstance(v, dict) and key_starts_with_dollar(v)):
|
|
1036
|
+
return True
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+
class DictField(ComplexBaseField):
|
|
1040
|
+
"""A dictionary field that wraps a standard Python dictionary. This is
|
|
1041
|
+
similar to an embedded document, but the structure is not defined.
|
|
1042
|
+
|
|
1043
|
+
.. note::
|
|
1044
|
+
Required means it cannot be empty - as the default for DictFields is {}
|
|
1045
|
+
"""
|
|
1046
|
+
|
|
1047
|
+
def __init__(self, field=None, *args, **kwargs):
|
|
1048
|
+
kwargs.setdefault("default", dict)
|
|
1049
|
+
super().__init__(*args, field=field, **kwargs)
|
|
1050
|
+
self.set_auto_dereferencing(False)
|
|
1051
|
+
|
|
1052
|
+
def validate(self, value):
|
|
1053
|
+
"""Make sure that a list of valid fields is being used."""
|
|
1054
|
+
if not isinstance(value, dict):
|
|
1055
|
+
self.error("Only dictionaries may be used in a DictField")
|
|
1056
|
+
|
|
1057
|
+
if key_not_string(value):
|
|
1058
|
+
msg = "Invalid dictionary key - documents must have only string keys"
|
|
1059
|
+
self.error(msg)
|
|
1060
|
+
|
|
1061
|
+
# Following condition applies to MongoDB >= 3.6
|
|
1062
|
+
# older Mongo has stricter constraints but
|
|
1063
|
+
# it will be rejected upon insertion anyway
|
|
1064
|
+
# Having a validation that depends on the MongoDB version
|
|
1065
|
+
# is not straightforward as the field isn't aware of the connected Mongo
|
|
1066
|
+
if key_starts_with_dollar(value):
|
|
1067
|
+
self.error(
|
|
1068
|
+
'Invalid dictionary key name - keys may not startswith "$" characters'
|
|
1069
|
+
)
|
|
1070
|
+
super().validate(value)
|
|
1071
|
+
|
|
1072
|
+
def lookup_member(self, member_name):
|
|
1073
|
+
return DictField(db_field=member_name)
|
|
1074
|
+
|
|
1075
|
+
def prepare_query_value(self, op, value):
|
|
1076
|
+
match_operators = [*STRING_OPERATORS]
|
|
1077
|
+
|
|
1078
|
+
if op in match_operators and isinstance(value, str):
|
|
1079
|
+
return StringField().prepare_query_value(op, value)
|
|
1080
|
+
|
|
1081
|
+
if hasattr(
|
|
1082
|
+
self.field, "field"
|
|
1083
|
+
): # Used for instance when using DictField(ListField(IntField()))
|
|
1084
|
+
if op in ("set", "unset") and isinstance(value, dict):
|
|
1085
|
+
return {
|
|
1086
|
+
k: self.field.prepare_query_value(op, v) for k, v in value.items()
|
|
1087
|
+
}
|
|
1088
|
+
return self.field.prepare_query_value(op, value)
|
|
1089
|
+
|
|
1090
|
+
return super().prepare_query_value(op, value)
|
|
1091
|
+
|
|
1092
|
+
|
|
1093
|
+
class MapField(DictField):
|
|
1094
|
+
"""A field that maps a name to a specified field type. Similar to
|
|
1095
|
+
a DictField, except the 'value' of each item must match the specified
|
|
1096
|
+
field type.
|
|
1097
|
+
"""
|
|
1098
|
+
|
|
1099
|
+
def __init__(self, field=None, *args, **kwargs):
|
|
1100
|
+
# XXX ValidationError raised outside the "validate" method.
|
|
1101
|
+
if not isinstance(field, BaseField):
|
|
1102
|
+
self.error("Argument to MapField constructor must be a valid field")
|
|
1103
|
+
super().__init__(field=field, *args, **kwargs)
|
|
1104
|
+
|
|
1105
|
+
|
|
1106
|
+
class ReferenceField(BaseField):
|
|
1107
|
+
"""A reference to a document that will be automatically dereferenced on
|
|
1108
|
+
access (lazily).
|
|
1109
|
+
|
|
1110
|
+
Note this means you will get a database I/O access everytime you access
|
|
1111
|
+
this field. This is necessary because the field returns a :class:`~autonomous.db.Document`
|
|
1112
|
+
which precise type can depend of the value of the `_cls` field present in the
|
|
1113
|
+
document in database.
|
|
1114
|
+
In short, using this type of field can lead to poor performances (especially
|
|
1115
|
+
if you access this field only to retrieve it `pk` field which is already
|
|
1116
|
+
known before dereference). To solve this you should consider using the
|
|
1117
|
+
:class:`~autonomous.db.fields.LazyReferenceField`.
|
|
1118
|
+
|
|
1119
|
+
Use the `reverse_delete_rule` to handle what should happen if the document
|
|
1120
|
+
the field is referencing is deleted. EmbeddedDocuments, DictFields and
|
|
1121
|
+
MapFields does not support reverse_delete_rule and an `InvalidDocumentError`
|
|
1122
|
+
will be raised if trying to set on one of these Document / Field types.
|
|
1123
|
+
|
|
1124
|
+
The options are:
|
|
1125
|
+
|
|
1126
|
+
* DO_NOTHING (0) - don't do anything (default).
|
|
1127
|
+
* NULLIFY (1) - Updates the reference to null.
|
|
1128
|
+
* CASCADE (2) - Deletes the documents associated with the reference.
|
|
1129
|
+
* DENY (3) - Prevent the deletion of the reference object.
|
|
1130
|
+
* PULL (4) - Pull the reference from a :class:`~autonomous.db.fields.ListField` of references
|
|
1131
|
+
|
|
1132
|
+
Alternative syntax for registering delete rules (useful when implementing
|
|
1133
|
+
bi-directional delete rules)
|
|
1134
|
+
|
|
1135
|
+
.. code-block:: python
|
|
1136
|
+
|
|
1137
|
+
class Org(Document):
|
|
1138
|
+
owner = ReferenceField('User')
|
|
1139
|
+
|
|
1140
|
+
class User(Document):
|
|
1141
|
+
org = ReferenceField('Org', reverse_delete_rule=CASCADE)
|
|
1142
|
+
|
|
1143
|
+
User.register_delete_rule(Org, 'owner', DENY)
|
|
1144
|
+
"""
|
|
1145
|
+
|
|
1146
|
+
def __init__(
|
|
1147
|
+
self, document_type, dbref=False, reverse_delete_rule=DO_NOTHING, **kwargs
|
|
1148
|
+
):
|
|
1149
|
+
"""Initialises the Reference Field.
|
|
1150
|
+
|
|
1151
|
+
:param document_type: The type of Document that will be referenced
|
|
1152
|
+
:param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
|
|
1153
|
+
or as the :class:`~pymongo.objectid.ObjectId`.
|
|
1154
|
+
:param reverse_delete_rule: Determines what to do when the referring
|
|
1155
|
+
object is deleted
|
|
1156
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.BaseField`
|
|
1157
|
+
|
|
1158
|
+
.. note ::
|
|
1159
|
+
A reference to an abstract document type is always stored as a
|
|
1160
|
+
:class:`~pymongo.dbref.DBRef`, regardless of the value of `dbref`.
|
|
1161
|
+
"""
|
|
1162
|
+
# XXX ValidationError raised outside of the "validate" method.
|
|
1163
|
+
if not (
|
|
1164
|
+
isinstance(document_type, str)
|
|
1165
|
+
or (isclass(document_type) and issubclass(document_type, Document))
|
|
1166
|
+
):
|
|
1167
|
+
self.error(
|
|
1168
|
+
"Argument to ReferenceField constructor must be a "
|
|
1169
|
+
"document class or a string"
|
|
1170
|
+
)
|
|
1171
|
+
|
|
1172
|
+
self.dbref = dbref
|
|
1173
|
+
self.document_type_obj = document_type
|
|
1174
|
+
self.reverse_delete_rule = reverse_delete_rule
|
|
1175
|
+
super().__init__(**kwargs)
|
|
1176
|
+
|
|
1177
|
+
@property
|
|
1178
|
+
def document_type(self):
|
|
1179
|
+
if isinstance(self.document_type_obj, str):
|
|
1180
|
+
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
|
|
1181
|
+
self.document_type_obj = self.owner_document
|
|
1182
|
+
else:
|
|
1183
|
+
self.document_type_obj = get_document(self.document_type_obj)
|
|
1184
|
+
return self.document_type_obj
|
|
1185
|
+
|
|
1186
|
+
@staticmethod
|
|
1187
|
+
def _lazy_load_ref(ref_cls, dbref):
|
|
1188
|
+
dereferenced_son = ref_cls._get_db().dereference(dbref)
|
|
1189
|
+
if dereferenced_son is None:
|
|
1190
|
+
raise DoesNotExist(f"Trying to dereference unknown document {dbref}")
|
|
1191
|
+
|
|
1192
|
+
return ref_cls._from_son(dereferenced_son)
|
|
1193
|
+
|
|
1194
|
+
def __get__(self, instance, owner):
|
|
1195
|
+
"""Descriptor to allow lazy dereferencing."""
|
|
1196
|
+
if instance is None:
|
|
1197
|
+
# Document class being used rather than a document object
|
|
1198
|
+
return self
|
|
1199
|
+
|
|
1200
|
+
# Get value from document instance if available
|
|
1201
|
+
ref_value = instance._data.get(self.name)
|
|
1202
|
+
auto_dereference = instance._fields[self.name]._auto_dereference
|
|
1203
|
+
# Dereference DBRefs
|
|
1204
|
+
if auto_dereference and isinstance(ref_value, DBRef):
|
|
1205
|
+
if hasattr(ref_value, "cls"):
|
|
1206
|
+
# Dereference using the class type specified in the reference
|
|
1207
|
+
cls = get_document(ref_value.cls)
|
|
1208
|
+
else:
|
|
1209
|
+
cls = self.document_type
|
|
1210
|
+
|
|
1211
|
+
instance._data[self.name] = self._lazy_load_ref(cls, ref_value)
|
|
1212
|
+
|
|
1213
|
+
return super().__get__(instance, owner)
|
|
1214
|
+
|
|
1215
|
+
def to_mongo(self, document):
|
|
1216
|
+
if isinstance(document, DBRef):
|
|
1217
|
+
if not self.dbref:
|
|
1218
|
+
return document.id
|
|
1219
|
+
return document
|
|
1220
|
+
|
|
1221
|
+
if isinstance(document, Document):
|
|
1222
|
+
# We need the id from the saved object to create the DBRef
|
|
1223
|
+
id_ = document.pk
|
|
1224
|
+
|
|
1225
|
+
# XXX ValidationError raised outside of the "validate" method.
|
|
1226
|
+
if id_ is None:
|
|
1227
|
+
self.error(
|
|
1228
|
+
"You can only reference documents once they have"
|
|
1229
|
+
" been saved to the database"
|
|
1230
|
+
)
|
|
1231
|
+
|
|
1232
|
+
# Use the attributes from the document instance, so that they
|
|
1233
|
+
# override the attributes of this field's document type
|
|
1234
|
+
cls = document
|
|
1235
|
+
else:
|
|
1236
|
+
id_ = document
|
|
1237
|
+
cls = self.document_type
|
|
1238
|
+
|
|
1239
|
+
id_field_name = cls._meta["id_field"]
|
|
1240
|
+
id_field = cls._fields[id_field_name]
|
|
1241
|
+
|
|
1242
|
+
id_ = id_field.to_mongo(id_)
|
|
1243
|
+
if self.document_type._meta.get("abstract"):
|
|
1244
|
+
collection = cls._get_collection_name()
|
|
1245
|
+
return DBRef(collection, id_, cls=cls._class_name)
|
|
1246
|
+
elif self.dbref:
|
|
1247
|
+
collection = cls._get_collection_name()
|
|
1248
|
+
return DBRef(collection, id_)
|
|
1249
|
+
|
|
1250
|
+
return id_
|
|
1251
|
+
|
|
1252
|
+
def to_python(self, value):
|
|
1253
|
+
"""Convert a MongoDB-compatible type to a Python type."""
|
|
1254
|
+
if not self.dbref and not isinstance(
|
|
1255
|
+
value, (DBRef, Document, EmbeddedDocument)
|
|
1256
|
+
):
|
|
1257
|
+
collection = self.document_type._get_collection_name()
|
|
1258
|
+
value = DBRef(collection, self.document_type.id.to_python(value))
|
|
1259
|
+
return value
|
|
1260
|
+
|
|
1261
|
+
def prepare_query_value(self, op, value):
|
|
1262
|
+
if value is None:
|
|
1263
|
+
return None
|
|
1264
|
+
super().prepare_query_value(op, value)
|
|
1265
|
+
return self.to_mongo(value)
|
|
1266
|
+
|
|
1267
|
+
def validate(self, value):
|
|
1268
|
+
if not isinstance(value, (self.document_type, LazyReference, DBRef, ObjectId)):
|
|
1269
|
+
self.error(
|
|
1270
|
+
"A ReferenceField only accepts DBRef, LazyReference, ObjectId or documents"
|
|
1271
|
+
)
|
|
1272
|
+
|
|
1273
|
+
if isinstance(value, Document) and value.id is None:
|
|
1274
|
+
self.error(
|
|
1275
|
+
"You can only reference documents once they have been "
|
|
1276
|
+
"saved to the database"
|
|
1277
|
+
)
|
|
1278
|
+
|
|
1279
|
+
def lookup_member(self, member_name):
|
|
1280
|
+
return self.document_type._fields.get(member_name)
|
|
1281
|
+
|
|
1282
|
+
|
|
1283
|
+
class CachedReferenceField(BaseField):
|
|
1284
|
+
"""A referencefield with cache fields to purpose pseudo-joins"""
|
|
1285
|
+
|
|
1286
|
+
def __init__(self, document_type, fields=None, auto_sync=True, **kwargs):
|
|
1287
|
+
"""Initialises the Cached Reference Field.
|
|
1288
|
+
|
|
1289
|
+
:param document_type: The type of Document that will be referenced
|
|
1290
|
+
:param fields: A list of fields to be cached in document
|
|
1291
|
+
:param auto_sync: if True documents are auto updated
|
|
1292
|
+
:param kwargs: Keyword arguments passed into the parent :class:`~autonomous.db.BaseField`
|
|
1293
|
+
"""
|
|
1294
|
+
if fields is None:
|
|
1295
|
+
fields = []
|
|
1296
|
+
|
|
1297
|
+
# XXX ValidationError raised outside of the "validate" method.
|
|
1298
|
+
if not isinstance(document_type, str) and not (
|
|
1299
|
+
inspect.isclass(document_type) and issubclass(document_type, Document)
|
|
1300
|
+
):
|
|
1301
|
+
self.error(
|
|
1302
|
+
"Argument to CachedReferenceField constructor must be a"
|
|
1303
|
+
" document class or a string"
|
|
1304
|
+
)
|
|
1305
|
+
|
|
1306
|
+
self.auto_sync = auto_sync
|
|
1307
|
+
self.document_type_obj = document_type
|
|
1308
|
+
self.fields = fields
|
|
1309
|
+
super().__init__(**kwargs)
|
|
1310
|
+
|
|
1311
|
+
def start_listener(self):
|
|
1312
|
+
from autonomous.db import signals
|
|
1313
|
+
|
|
1314
|
+
signals.post_save.connect(self.on_document_pre_save, sender=self.document_type)
|
|
1315
|
+
|
|
1316
|
+
def on_document_pre_save(self, sender, document, created, **kwargs):
|
|
1317
|
+
if created:
|
|
1318
|
+
return None
|
|
1319
|
+
|
|
1320
|
+
update_kwargs = {
|
|
1321
|
+
f"set__{self.name}__{key}": val
|
|
1322
|
+
for key, val in document._delta()[0].items()
|
|
1323
|
+
if key in self.fields
|
|
1324
|
+
}
|
|
1325
|
+
if update_kwargs:
|
|
1326
|
+
filter_kwargs = {}
|
|
1327
|
+
filter_kwargs[self.name] = document
|
|
1328
|
+
|
|
1329
|
+
self.owner_document.objects(**filter_kwargs).update(**update_kwargs)
|
|
1330
|
+
|
|
1331
|
+
def to_python(self, value):
|
|
1332
|
+
if isinstance(value, dict):
|
|
1333
|
+
collection = self.document_type._get_collection_name()
|
|
1334
|
+
value = DBRef(collection, self.document_type.id.to_python(value["_id"]))
|
|
1335
|
+
return self.document_type._from_son(
|
|
1336
|
+
self.document_type._get_db().dereference(value)
|
|
1337
|
+
)
|
|
1338
|
+
|
|
1339
|
+
return value
|
|
1340
|
+
|
|
1341
|
+
@property
|
|
1342
|
+
def document_type(self):
|
|
1343
|
+
if isinstance(self.document_type_obj, str):
|
|
1344
|
+
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
|
|
1345
|
+
self.document_type_obj = self.owner_document
|
|
1346
|
+
else:
|
|
1347
|
+
self.document_type_obj = get_document(self.document_type_obj)
|
|
1348
|
+
return self.document_type_obj
|
|
1349
|
+
|
|
1350
|
+
@staticmethod
|
|
1351
|
+
def _lazy_load_ref(ref_cls, dbref):
|
|
1352
|
+
dereferenced_son = ref_cls._get_db().dereference(dbref)
|
|
1353
|
+
if dereferenced_son is None:
|
|
1354
|
+
raise DoesNotExist(f"Trying to dereference unknown document {dbref}")
|
|
1355
|
+
|
|
1356
|
+
return ref_cls._from_son(dereferenced_son)
|
|
1357
|
+
|
|
1358
|
+
def __get__(self, instance, owner):
|
|
1359
|
+
if instance is None:
|
|
1360
|
+
# Document class being used rather than a document object
|
|
1361
|
+
return self
|
|
1362
|
+
|
|
1363
|
+
# Get value from document instance if available
|
|
1364
|
+
value = instance._data.get(self.name)
|
|
1365
|
+
auto_dereference = instance._fields[self.name]._auto_dereference
|
|
1366
|
+
|
|
1367
|
+
# Dereference DBRefs
|
|
1368
|
+
if auto_dereference and isinstance(value, DBRef):
|
|
1369
|
+
instance._data[self.name] = self._lazy_load_ref(self.document_type, value)
|
|
1370
|
+
|
|
1371
|
+
return super().__get__(instance, owner)
|
|
1372
|
+
|
|
1373
|
+
def to_mongo(self, document, use_db_field=True, fields=None):
|
|
1374
|
+
id_field_name = self.document_type._meta["id_field"]
|
|
1375
|
+
id_field = self.document_type._fields[id_field_name]
|
|
1376
|
+
|
|
1377
|
+
# XXX ValidationError raised outside of the "validate" method.
|
|
1378
|
+
if isinstance(document, Document):
|
|
1379
|
+
# We need the id from the saved object to create the DBRef
|
|
1380
|
+
id_ = document.pk
|
|
1381
|
+
if id_ is None:
|
|
1382
|
+
self.error(
|
|
1383
|
+
"You can only reference documents once they have"
|
|
1384
|
+
" been saved to the database"
|
|
1385
|
+
)
|
|
1386
|
+
else:
|
|
1387
|
+
self.error("Only accept a document object")
|
|
1388
|
+
|
|
1389
|
+
value = SON((("_id", id_field.to_mongo(id_)),))
|
|
1390
|
+
|
|
1391
|
+
if fields:
|
|
1392
|
+
new_fields = [f for f in self.fields if f in fields]
|
|
1393
|
+
else:
|
|
1394
|
+
new_fields = self.fields
|
|
1395
|
+
|
|
1396
|
+
value.update(dict(document.to_mongo(use_db_field, fields=new_fields)))
|
|
1397
|
+
return value
|
|
1398
|
+
|
|
1399
|
+
def prepare_query_value(self, op, value):
|
|
1400
|
+
if value is None:
|
|
1401
|
+
return None
|
|
1402
|
+
|
|
1403
|
+
# XXX ValidationError raised outside of the "validate" method.
|
|
1404
|
+
if isinstance(value, Document):
|
|
1405
|
+
if value.pk is None:
|
|
1406
|
+
self.error(
|
|
1407
|
+
"You can only reference documents once they have"
|
|
1408
|
+
" been saved to the database"
|
|
1409
|
+
)
|
|
1410
|
+
value_dict = {"_id": value.pk}
|
|
1411
|
+
for field in self.fields:
|
|
1412
|
+
value_dict.update({field: value[field]})
|
|
1413
|
+
|
|
1414
|
+
return value_dict
|
|
1415
|
+
|
|
1416
|
+
raise NotImplementedError
|
|
1417
|
+
|
|
1418
|
+
def validate(self, value):
|
|
1419
|
+
if not isinstance(value, self.document_type):
|
|
1420
|
+
self.error("A CachedReferenceField only accepts documents")
|
|
1421
|
+
|
|
1422
|
+
if isinstance(value, Document) and value.id is None:
|
|
1423
|
+
self.error(
|
|
1424
|
+
"You can only reference documents once they have been "
|
|
1425
|
+
"saved to the database"
|
|
1426
|
+
)
|
|
1427
|
+
|
|
1428
|
+
def lookup_member(self, member_name):
|
|
1429
|
+
return self.document_type._fields.get(member_name)
|
|
1430
|
+
|
|
1431
|
+
def sync_all(self):
|
|
1432
|
+
"""
|
|
1433
|
+
Sync all cached fields on demand.
|
|
1434
|
+
Caution: this operation may be slower.
|
|
1435
|
+
"""
|
|
1436
|
+
update_key = "set__%s" % self.name
|
|
1437
|
+
|
|
1438
|
+
for doc in self.document_type.objects:
|
|
1439
|
+
filter_kwargs = {}
|
|
1440
|
+
filter_kwargs[self.name] = doc
|
|
1441
|
+
|
|
1442
|
+
update_kwargs = {}
|
|
1443
|
+
update_kwargs[update_key] = doc
|
|
1444
|
+
|
|
1445
|
+
self.owner_document.objects(**filter_kwargs).update(**update_kwargs)
|
|
1446
|
+
|
|
1447
|
+
|
|
1448
|
+
class GenericReferenceField(BaseField):
|
|
1449
|
+
"""A reference to *any* :class:`~autonomous.db.document.Document` subclass
|
|
1450
|
+
that will be automatically dereferenced on access (lazily).
|
|
1451
|
+
|
|
1452
|
+
Note this field works the same way as :class:`~autonomous.db.document.ReferenceField`,
|
|
1453
|
+
doing database I/O access the first time it is accessed (even if it's to access
|
|
1454
|
+
it ``pk`` or ``id`` field).
|
|
1455
|
+
To solve this you should consider using the
|
|
1456
|
+
:class:`~autonomous.db.fields.GenericLazyReferenceField`.
|
|
1457
|
+
|
|
1458
|
+
.. note ::
|
|
1459
|
+
* Any documents used as a generic reference must be registered in the
|
|
1460
|
+
document registry. Importing the model will automatically register
|
|
1461
|
+
it.
|
|
1462
|
+
|
|
1463
|
+
* You can use the choices param to limit the acceptable Document types
|
|
1464
|
+
"""
|
|
1465
|
+
|
|
1466
|
+
def __init__(self, *args, **kwargs):
|
|
1467
|
+
choices = kwargs.pop("choices", None)
|
|
1468
|
+
super().__init__(*args, **kwargs)
|
|
1469
|
+
self.choices = []
|
|
1470
|
+
# Keep the choices as a list of allowed Document class names
|
|
1471
|
+
if choices:
|
|
1472
|
+
for choice in choices:
|
|
1473
|
+
if isinstance(choice, str):
|
|
1474
|
+
self.choices.append(choice)
|
|
1475
|
+
elif isinstance(choice, type) and issubclass(choice, Document):
|
|
1476
|
+
self.choices.append(choice.__name__)
|
|
1477
|
+
else:
|
|
1478
|
+
# XXX ValidationError raised outside of the "validate"
|
|
1479
|
+
# method.
|
|
1480
|
+
self.error(
|
|
1481
|
+
"Invalid choices provided: must be a list of"
|
|
1482
|
+
"Document subclasses and/or str"
|
|
1483
|
+
)
|
|
1484
|
+
|
|
1485
|
+
def _validate_choices(self, value):
|
|
1486
|
+
# log(value, self.null)
|
|
1487
|
+
if not value and self.null:
|
|
1488
|
+
return
|
|
1489
|
+
elif isinstance(value, dict):
|
|
1490
|
+
# If the field has not been dereferenced, it is still a dict
|
|
1491
|
+
# of class and DBRef
|
|
1492
|
+
value = value.get("_cls")
|
|
1493
|
+
elif isinstance(value, Document):
|
|
1494
|
+
mro = [cls.__name__ for cls in value.__class__.mro()]
|
|
1495
|
+
# log(
|
|
1496
|
+
# value,
|
|
1497
|
+
# mro,
|
|
1498
|
+
# self.choices,
|
|
1499
|
+
# _print=True,
|
|
1500
|
+
# )
|
|
1501
|
+
base_value = None
|
|
1502
|
+
for choice in self.choices:
|
|
1503
|
+
if choice in mro:
|
|
1504
|
+
base_value = choice
|
|
1505
|
+
break
|
|
1506
|
+
if base_value:
|
|
1507
|
+
value = base_value
|
|
1508
|
+
else:
|
|
1509
|
+
raise ValidationError(
|
|
1510
|
+
f"Invalid Model Type. Must be or derive from on of: {self.choices}, not: {value} for attribute {self.name}"
|
|
1511
|
+
)
|
|
1512
|
+
else:
|
|
1513
|
+
# log(
|
|
1514
|
+
# value,
|
|
1515
|
+
# type(value),
|
|
1516
|
+
# "!!!! Need to update to this !!!!",
|
|
1517
|
+
# _print=True,
|
|
1518
|
+
# )
|
|
1519
|
+
value = value.__class__.__name__
|
|
1520
|
+
# log(
|
|
1521
|
+
# value,
|
|
1522
|
+
# self.choices,
|
|
1523
|
+
# type(value),
|
|
1524
|
+
# _print=True,
|
|
1525
|
+
# )
|
|
1526
|
+
super()._validate_choices(value)
|
|
1527
|
+
|
|
1528
|
+
@staticmethod
|
|
1529
|
+
def _lazy_load_ref(ref_cls, dbref):
|
|
1530
|
+
dereferenced_son = ref_cls._get_db().dereference(dbref)
|
|
1531
|
+
# log(dereferenced_son)
|
|
1532
|
+
if dereferenced_son is None:
|
|
1533
|
+
raise DoesNotExist(f"Trying to dereference unknown document {dbref}")
|
|
1534
|
+
|
|
1535
|
+
return ref_cls._from_son(dereferenced_son)
|
|
1536
|
+
|
|
1537
|
+
def __get__(self, instance, owner):
|
|
1538
|
+
if instance is None:
|
|
1539
|
+
return self
|
|
1540
|
+
|
|
1541
|
+
value = instance._data.get(self.name)
|
|
1542
|
+
|
|
1543
|
+
auto_dereference = instance._fields[self.name]._auto_dereference
|
|
1544
|
+
if auto_dereference and isinstance(value, dict):
|
|
1545
|
+
doc_cls = get_document(value["_cls"])
|
|
1546
|
+
instance._data[self.name] = self._lazy_load_ref(doc_cls, value["_ref"])
|
|
1547
|
+
|
|
1548
|
+
return super().__get__(instance, owner)
|
|
1549
|
+
|
|
1550
|
+
def validate(self, value):
|
|
1551
|
+
if not value and self.null:
|
|
1552
|
+
return
|
|
1553
|
+
if not isinstance(value, (Document, DBRef, dict, SON)):
|
|
1554
|
+
self.error("GenericReferences can only contain documents")
|
|
1555
|
+
|
|
1556
|
+
if isinstance(value, (dict, SON)):
|
|
1557
|
+
if "_ref" not in value or "_cls" not in value:
|
|
1558
|
+
self.error("GenericReferences can only contain documents")
|
|
1559
|
+
|
|
1560
|
+
# We need the id from the saved object to create the DBRef
|
|
1561
|
+
elif isinstance(value, Document) and value.id is None:
|
|
1562
|
+
self.error(
|
|
1563
|
+
"You can only reference documents once they have been"
|
|
1564
|
+
" saved to the database"
|
|
1565
|
+
)
|
|
1566
|
+
|
|
1567
|
+
def to_mongo(self, document):
|
|
1568
|
+
if document is None:
|
|
1569
|
+
return None
|
|
1570
|
+
|
|
1571
|
+
if isinstance(document, (dict, SON, ObjectId, DBRef)):
|
|
1572
|
+
return document
|
|
1573
|
+
|
|
1574
|
+
id_field_name = document.__class__._meta["id_field"]
|
|
1575
|
+
id_field = document.__class__._fields[id_field_name]
|
|
1576
|
+
|
|
1577
|
+
if isinstance(document, Document):
|
|
1578
|
+
# We need the id from the saved object to create the DBRef
|
|
1579
|
+
id_ = document.id
|
|
1580
|
+
if id_ is None:
|
|
1581
|
+
# XXX ValidationError raised outside of the "validate" method.
|
|
1582
|
+
self.error(
|
|
1583
|
+
"You can only reference documents once they have"
|
|
1584
|
+
" been saved to the database"
|
|
1585
|
+
)
|
|
1586
|
+
else:
|
|
1587
|
+
id_ = document
|
|
1588
|
+
|
|
1589
|
+
id_ = id_field.to_mongo(id_)
|
|
1590
|
+
collection = document._get_collection_name()
|
|
1591
|
+
ref = DBRef(collection, id_)
|
|
1592
|
+
return SON((("_cls", document._class_name), ("_ref", ref)))
|
|
1593
|
+
|
|
1594
|
+
def prepare_query_value(self, op, value):
|
|
1595
|
+
if value is None:
|
|
1596
|
+
return None
|
|
1597
|
+
|
|
1598
|
+
return self.to_mongo(value)
|
|
1599
|
+
|
|
1600
|
+
|
|
1601
|
+
class BinaryField(BaseField):
|
|
1602
|
+
"""A binary data field."""
|
|
1603
|
+
|
|
1604
|
+
def __init__(self, max_bytes=None, **kwargs):
|
|
1605
|
+
self.max_bytes = max_bytes
|
|
1606
|
+
super().__init__(**kwargs)
|
|
1607
|
+
|
|
1608
|
+
def __set__(self, instance, value):
|
|
1609
|
+
"""Handle bytearrays in python 3.1"""
|
|
1610
|
+
if isinstance(value, bytearray):
|
|
1611
|
+
value = bytes(value)
|
|
1612
|
+
return super().__set__(instance, value)
|
|
1613
|
+
|
|
1614
|
+
def to_mongo(self, value):
|
|
1615
|
+
return Binary(value)
|
|
1616
|
+
|
|
1617
|
+
def validate(self, value):
|
|
1618
|
+
if not isinstance(value, (bytes, Binary)):
|
|
1619
|
+
self.error(
|
|
1620
|
+
"BinaryField only accepts instances of "
|
|
1621
|
+
"(%s, %s, Binary)" % (bytes.__name__, Binary.__name__)
|
|
1622
|
+
)
|
|
1623
|
+
|
|
1624
|
+
if self.max_bytes is not None and len(value) > self.max_bytes:
|
|
1625
|
+
self.error("Binary value is too long")
|
|
1626
|
+
|
|
1627
|
+
def prepare_query_value(self, op, value):
|
|
1628
|
+
if value is None:
|
|
1629
|
+
return value
|
|
1630
|
+
return super().prepare_query_value(op, self.to_mongo(value))
|
|
1631
|
+
|
|
1632
|
+
|
|
1633
|
+
class EnumField(BaseField):
|
|
1634
|
+
"""Enumeration Field. Values are stored underneath as is,
|
|
1635
|
+
so it will only work with simple types (str, int, etc) that
|
|
1636
|
+
are bson encodable
|
|
1637
|
+
|
|
1638
|
+
Example usage:
|
|
1639
|
+
|
|
1640
|
+
.. code-block:: python
|
|
1641
|
+
|
|
1642
|
+
class Status(Enum):
|
|
1643
|
+
NEW = 'new'
|
|
1644
|
+
ONGOING = 'ongoing'
|
|
1645
|
+
DONE = 'done'
|
|
1646
|
+
|
|
1647
|
+
class ModelWithEnum(Document):
|
|
1648
|
+
status = EnumField(Status, default=Status.NEW)
|
|
1649
|
+
|
|
1650
|
+
ModelWithEnum(status='done')
|
|
1651
|
+
ModelWithEnum(status=Status.DONE)
|
|
1652
|
+
|
|
1653
|
+
Enum fields can be searched using enum or its value:
|
|
1654
|
+
|
|
1655
|
+
.. code-block:: python
|
|
1656
|
+
|
|
1657
|
+
ModelWithEnum.objects(status='new').count()
|
|
1658
|
+
ModelWithEnum.objects(status=Status.NEW).count()
|
|
1659
|
+
|
|
1660
|
+
The values can be restricted to a subset of the enum by using the ``choices`` parameter:
|
|
1661
|
+
|
|
1662
|
+
.. code-block:: python
|
|
1663
|
+
|
|
1664
|
+
class ModelWithEnum(Document):
|
|
1665
|
+
status = EnumField(Status, choices=[Status.NEW, Status.DONE])
|
|
1666
|
+
"""
|
|
1667
|
+
|
|
1668
|
+
def __init__(self, enum, **kwargs):
|
|
1669
|
+
self._enum_cls = enum
|
|
1670
|
+
if kwargs.get("choices"):
|
|
1671
|
+
invalid_choices = []
|
|
1672
|
+
for choice in kwargs["choices"]:
|
|
1673
|
+
if not isinstance(choice, enum):
|
|
1674
|
+
invalid_choices.append(choice)
|
|
1675
|
+
if invalid_choices:
|
|
1676
|
+
raise ValueError("Invalid choices: %r" % invalid_choices)
|
|
1677
|
+
else:
|
|
1678
|
+
kwargs["choices"] = list(self._enum_cls) # Implicit validator
|
|
1679
|
+
super().__init__(**kwargs)
|
|
1680
|
+
|
|
1681
|
+
def validate(self, value):
|
|
1682
|
+
if isinstance(value, self._enum_cls):
|
|
1683
|
+
return super().validate(value)
|
|
1684
|
+
try:
|
|
1685
|
+
self._enum_cls(value)
|
|
1686
|
+
except ValueError:
|
|
1687
|
+
self.error(f"{value} is not a valid {self._enum_cls}")
|
|
1688
|
+
|
|
1689
|
+
def to_python(self, value):
|
|
1690
|
+
value = super().to_python(value)
|
|
1691
|
+
if not isinstance(value, self._enum_cls):
|
|
1692
|
+
try:
|
|
1693
|
+
return self._enum_cls(value)
|
|
1694
|
+
except ValueError:
|
|
1695
|
+
return value
|
|
1696
|
+
return value
|
|
1697
|
+
|
|
1698
|
+
def __set__(self, instance, value):
|
|
1699
|
+
return super().__set__(instance, self.to_python(value))
|
|
1700
|
+
|
|
1701
|
+
def to_mongo(self, value):
|
|
1702
|
+
if isinstance(value, self._enum_cls):
|
|
1703
|
+
return value.value
|
|
1704
|
+
return value
|
|
1705
|
+
|
|
1706
|
+
def prepare_query_value(self, op, value):
|
|
1707
|
+
if value is None:
|
|
1708
|
+
return value
|
|
1709
|
+
return super().prepare_query_value(op, self.to_mongo(value))
|
|
1710
|
+
|
|
1711
|
+
|
|
1712
|
+
class GridFSError(Exception):
|
|
1713
|
+
pass
|
|
1714
|
+
|
|
1715
|
+
|
|
1716
|
+
class GridFSProxy:
|
|
1717
|
+
"""Proxy object to handle writing and reading of files to and from GridFS"""
|
|
1718
|
+
|
|
1719
|
+
_fs = None
|
|
1720
|
+
|
|
1721
|
+
def __init__(
|
|
1722
|
+
self,
|
|
1723
|
+
grid_id=None,
|
|
1724
|
+
key=None,
|
|
1725
|
+
instance=None,
|
|
1726
|
+
db_alias=DEFAULT_CONNECTION_NAME,
|
|
1727
|
+
collection_name="fs",
|
|
1728
|
+
):
|
|
1729
|
+
self.grid_id = grid_id # Store GridFS id for file
|
|
1730
|
+
self.key = key
|
|
1731
|
+
self.instance = instance
|
|
1732
|
+
self.db_alias = db_alias
|
|
1733
|
+
self.collection_name = collection_name
|
|
1734
|
+
self.newfile = None # Used for partial writes
|
|
1735
|
+
self.gridout = None
|
|
1736
|
+
|
|
1737
|
+
def __getattr__(self, name):
|
|
1738
|
+
attrs = (
|
|
1739
|
+
"_fs",
|
|
1740
|
+
"grid_id",
|
|
1741
|
+
"key",
|
|
1742
|
+
"instance",
|
|
1743
|
+
"db_alias",
|
|
1744
|
+
"collection_name",
|
|
1745
|
+
"newfile",
|
|
1746
|
+
"gridout",
|
|
1747
|
+
)
|
|
1748
|
+
if name in attrs:
|
|
1749
|
+
return self.__getattribute__(name)
|
|
1750
|
+
obj = self.get()
|
|
1751
|
+
if hasattr(obj, name):
|
|
1752
|
+
return getattr(obj, name)
|
|
1753
|
+
raise AttributeError
|
|
1754
|
+
|
|
1755
|
+
def __get__(self, instance, value):
|
|
1756
|
+
return self
|
|
1757
|
+
|
|
1758
|
+
def __bool__(self):
|
|
1759
|
+
return bool(self.grid_id)
|
|
1760
|
+
|
|
1761
|
+
def __getstate__(self):
|
|
1762
|
+
self_dict = self.__dict__
|
|
1763
|
+
self_dict["_fs"] = None
|
|
1764
|
+
return self_dict
|
|
1765
|
+
|
|
1766
|
+
def __copy__(self):
|
|
1767
|
+
copied = GridFSProxy()
|
|
1768
|
+
copied.__dict__.update(self.__getstate__())
|
|
1769
|
+
return copied
|
|
1770
|
+
|
|
1771
|
+
def __deepcopy__(self, memo):
|
|
1772
|
+
return self.__copy__()
|
|
1773
|
+
|
|
1774
|
+
def __repr__(self):
|
|
1775
|
+
return f"<{self.__class__.__name__}: {self.grid_id}>"
|
|
1776
|
+
|
|
1777
|
+
def __str__(self):
|
|
1778
|
+
gridout = self.get()
|
|
1779
|
+
filename = gridout.filename if gridout else "<no file>"
|
|
1780
|
+
return f"<{self.__class__.__name__}: {filename} ({self.grid_id})>"
|
|
1781
|
+
|
|
1782
|
+
def __eq__(self, other):
|
|
1783
|
+
if isinstance(other, GridFSProxy):
|
|
1784
|
+
return (
|
|
1785
|
+
(self.grid_id == other.grid_id)
|
|
1786
|
+
and (self.collection_name == other.collection_name)
|
|
1787
|
+
and (self.db_alias == other.db_alias)
|
|
1788
|
+
)
|
|
1789
|
+
else:
|
|
1790
|
+
return False
|
|
1791
|
+
|
|
1792
|
+
def __ne__(self, other):
|
|
1793
|
+
return not self == other
|
|
1794
|
+
|
|
1795
|
+
@property
|
|
1796
|
+
def fs(self):
|
|
1797
|
+
if not self._fs:
|
|
1798
|
+
self._fs = gridfs.GridFS(get_db(self.db_alias), self.collection_name)
|
|
1799
|
+
return self._fs
|
|
1800
|
+
|
|
1801
|
+
def get(self, grid_id=None):
|
|
1802
|
+
if grid_id:
|
|
1803
|
+
self.grid_id = grid_id
|
|
1804
|
+
|
|
1805
|
+
if self.grid_id is None:
|
|
1806
|
+
return None
|
|
1807
|
+
|
|
1808
|
+
try:
|
|
1809
|
+
if self.gridout is None:
|
|
1810
|
+
self.gridout = self.fs.get(self.grid_id)
|
|
1811
|
+
return self.gridout
|
|
1812
|
+
except Exception:
|
|
1813
|
+
# File has been deleted
|
|
1814
|
+
return None
|
|
1815
|
+
|
|
1816
|
+
def new_file(self, **kwargs):
|
|
1817
|
+
self.newfile = self.fs.new_file(**kwargs)
|
|
1818
|
+
self.grid_id = self.newfile._id
|
|
1819
|
+
self._mark_as_changed()
|
|
1820
|
+
|
|
1821
|
+
def put(self, file_obj, **kwargs):
|
|
1822
|
+
if self.grid_id:
|
|
1823
|
+
raise GridFSError(
|
|
1824
|
+
"This document already has a file. Either delete "
|
|
1825
|
+
"it or call replace to overwrite it"
|
|
1826
|
+
)
|
|
1827
|
+
self.grid_id = self.fs.put(file_obj, **kwargs)
|
|
1828
|
+
self._mark_as_changed()
|
|
1829
|
+
|
|
1830
|
+
def write(self, string):
|
|
1831
|
+
if self.grid_id:
|
|
1832
|
+
if not self.newfile:
|
|
1833
|
+
raise GridFSError(
|
|
1834
|
+
"This document already has a file. Either "
|
|
1835
|
+
"delete it or call replace to overwrite it"
|
|
1836
|
+
)
|
|
1837
|
+
else:
|
|
1838
|
+
self.new_file()
|
|
1839
|
+
self.newfile.write(string)
|
|
1840
|
+
|
|
1841
|
+
def writelines(self, lines):
|
|
1842
|
+
if not self.newfile:
|
|
1843
|
+
self.new_file()
|
|
1844
|
+
self.grid_id = self.newfile._id
|
|
1845
|
+
self.newfile.writelines(lines)
|
|
1846
|
+
|
|
1847
|
+
def read(self, size=-1):
|
|
1848
|
+
gridout = self.get()
|
|
1849
|
+
if gridout is None:
|
|
1850
|
+
return None
|
|
1851
|
+
else:
|
|
1852
|
+
try:
|
|
1853
|
+
return gridout.read(size)
|
|
1854
|
+
except Exception:
|
|
1855
|
+
return ""
|
|
1856
|
+
|
|
1857
|
+
def delete(self):
|
|
1858
|
+
# Delete file from GridFS, FileField still remains
|
|
1859
|
+
self.fs.delete(self.grid_id)
|
|
1860
|
+
self.grid_id = None
|
|
1861
|
+
self.gridout = None
|
|
1862
|
+
self._mark_as_changed()
|
|
1863
|
+
|
|
1864
|
+
def replace(self, file_obj, **kwargs):
|
|
1865
|
+
self.delete()
|
|
1866
|
+
self.put(file_obj, **kwargs)
|
|
1867
|
+
|
|
1868
|
+
def close(self):
|
|
1869
|
+
if self.newfile:
|
|
1870
|
+
self.newfile.close()
|
|
1871
|
+
|
|
1872
|
+
def _mark_as_changed(self):
|
|
1873
|
+
"""Inform the instance that `self.key` has been changed"""
|
|
1874
|
+
if self.instance:
|
|
1875
|
+
self.instance._mark_as_changed(self.key)
|
|
1876
|
+
|
|
1877
|
+
|
|
1878
|
+
class FileField(BaseField):
|
|
1879
|
+
"""A GridFS storage field."""
|
|
1880
|
+
|
|
1881
|
+
proxy_class = GridFSProxy
|
|
1882
|
+
|
|
1883
|
+
def __init__(
|
|
1884
|
+
self, db_alias=DEFAULT_CONNECTION_NAME, collection_name="fs", **kwargs
|
|
1885
|
+
):
|
|
1886
|
+
super().__init__(**kwargs)
|
|
1887
|
+
self.collection_name = collection_name
|
|
1888
|
+
self.db_alias = db_alias
|
|
1889
|
+
|
|
1890
|
+
def __get__(self, instance, owner):
|
|
1891
|
+
if instance is None:
|
|
1892
|
+
return self
|
|
1893
|
+
|
|
1894
|
+
# Check if a file already exists for this model
|
|
1895
|
+
grid_file = instance._data.get(self.name)
|
|
1896
|
+
if not isinstance(grid_file, self.proxy_class):
|
|
1897
|
+
grid_file = self.get_proxy_obj(key=self.name, instance=instance)
|
|
1898
|
+
instance._data[self.name] = grid_file
|
|
1899
|
+
|
|
1900
|
+
if not grid_file.key:
|
|
1901
|
+
grid_file.key = self.name
|
|
1902
|
+
grid_file.instance = instance
|
|
1903
|
+
return grid_file
|
|
1904
|
+
|
|
1905
|
+
def __set__(self, instance, value):
|
|
1906
|
+
key = self.name
|
|
1907
|
+
if (
|
|
1908
|
+
hasattr(value, "read") and not isinstance(value, GridFSProxy)
|
|
1909
|
+
) or isinstance(value, (bytes, str)):
|
|
1910
|
+
# using "FileField() = file/string" notation
|
|
1911
|
+
grid_file = instance._data.get(self.name)
|
|
1912
|
+
# If a file already exists, delete it
|
|
1913
|
+
if grid_file:
|
|
1914
|
+
try:
|
|
1915
|
+
grid_file.delete()
|
|
1916
|
+
except Exception:
|
|
1917
|
+
pass
|
|
1918
|
+
|
|
1919
|
+
# Create a new proxy object as we don't already have one
|
|
1920
|
+
instance._data[key] = self.get_proxy_obj(key=key, instance=instance)
|
|
1921
|
+
instance._data[key].put(value)
|
|
1922
|
+
else:
|
|
1923
|
+
instance._data[key] = value
|
|
1924
|
+
|
|
1925
|
+
instance._mark_as_changed(key)
|
|
1926
|
+
|
|
1927
|
+
def get_proxy_obj(self, key, instance, db_alias=None, collection_name=None):
|
|
1928
|
+
if db_alias is None:
|
|
1929
|
+
db_alias = self.db_alias
|
|
1930
|
+
if collection_name is None:
|
|
1931
|
+
collection_name = self.collection_name
|
|
1932
|
+
|
|
1933
|
+
return self.proxy_class(
|
|
1934
|
+
key=key,
|
|
1935
|
+
instance=instance,
|
|
1936
|
+
db_alias=db_alias,
|
|
1937
|
+
collection_name=collection_name,
|
|
1938
|
+
)
|
|
1939
|
+
|
|
1940
|
+
def to_mongo(self, value):
|
|
1941
|
+
# Store the GridFS file id in MongoDB
|
|
1942
|
+
if isinstance(value, self.proxy_class) and value.grid_id is not None:
|
|
1943
|
+
return value.grid_id
|
|
1944
|
+
return None
|
|
1945
|
+
|
|
1946
|
+
def to_python(self, value):
|
|
1947
|
+
if value is not None:
|
|
1948
|
+
return self.proxy_class(
|
|
1949
|
+
value, collection_name=self.collection_name, db_alias=self.db_alias
|
|
1950
|
+
)
|
|
1951
|
+
|
|
1952
|
+
def validate(self, value):
|
|
1953
|
+
if value.grid_id is not None:
|
|
1954
|
+
if not isinstance(value, self.proxy_class):
|
|
1955
|
+
self.error("FileField only accepts GridFSProxy values")
|
|
1956
|
+
if not isinstance(value.grid_id, ObjectId):
|
|
1957
|
+
self.error("Invalid GridFSProxy value")
|
|
1958
|
+
|
|
1959
|
+
|
|
1960
|
+
class ImageGridFsProxy(GridFSProxy):
|
|
1961
|
+
"""Proxy for ImageField"""
|
|
1962
|
+
|
|
1963
|
+
def put(self, file_obj, **kwargs):
|
|
1964
|
+
"""
|
|
1965
|
+
Insert a image in database
|
|
1966
|
+
applying field properties (size, thumbnail_size)
|
|
1967
|
+
"""
|
|
1968
|
+
field = self.instance._fields[self.key]
|
|
1969
|
+
# Handle nested fields
|
|
1970
|
+
if hasattr(field, "field") and isinstance(field.field, FileField):
|
|
1971
|
+
field = field.field
|
|
1972
|
+
|
|
1973
|
+
try:
|
|
1974
|
+
img = Image.open(file_obj)
|
|
1975
|
+
img_format = img.format
|
|
1976
|
+
except Exception as e:
|
|
1977
|
+
raise ValidationError("Invalid image: %s" % e)
|
|
1978
|
+
|
|
1979
|
+
# Progressive JPEG
|
|
1980
|
+
# TODO: fixme, at least unused, at worst bad implementation
|
|
1981
|
+
progressive = img.info.get("progressive") or False
|
|
1982
|
+
|
|
1983
|
+
if (
|
|
1984
|
+
kwargs.get("progressive")
|
|
1985
|
+
and isinstance(kwargs.get("progressive"), bool)
|
|
1986
|
+
and img_format == "JPEG"
|
|
1987
|
+
):
|
|
1988
|
+
progressive = True
|
|
1989
|
+
else:
|
|
1990
|
+
progressive = False
|
|
1991
|
+
|
|
1992
|
+
if field.size and (
|
|
1993
|
+
img.size[0] > field.size["width"] or img.size[1] > field.size["height"]
|
|
1994
|
+
):
|
|
1995
|
+
size = field.size
|
|
1996
|
+
|
|
1997
|
+
if size["force"]:
|
|
1998
|
+
img = ImageOps.fit(img, (size["width"], size["height"]), LANCZOS)
|
|
1999
|
+
else:
|
|
2000
|
+
img.thumbnail((size["width"], size["height"]), LANCZOS)
|
|
2001
|
+
|
|
2002
|
+
thumbnail = None
|
|
2003
|
+
if field.thumbnail_size:
|
|
2004
|
+
size = field.thumbnail_size
|
|
2005
|
+
|
|
2006
|
+
if size["force"]:
|
|
2007
|
+
thumbnail = ImageOps.fit(img, (size["width"], size["height"]), LANCZOS)
|
|
2008
|
+
else:
|
|
2009
|
+
thumbnail = img.copy()
|
|
2010
|
+
thumbnail.thumbnail((size["width"], size["height"]), LANCZOS)
|
|
2011
|
+
|
|
2012
|
+
if thumbnail:
|
|
2013
|
+
thumb_id = self._put_thumbnail(thumbnail, img_format, progressive)
|
|
2014
|
+
else:
|
|
2015
|
+
thumb_id = None
|
|
2016
|
+
|
|
2017
|
+
w, h = img.size
|
|
2018
|
+
|
|
2019
|
+
io = BytesIO()
|
|
2020
|
+
img.save(io, img_format, progressive=progressive)
|
|
2021
|
+
io.seek(0)
|
|
2022
|
+
|
|
2023
|
+
return super().put(
|
|
2024
|
+
io, width=w, height=h, format=img_format, thumbnail_id=thumb_id, **kwargs
|
|
2025
|
+
)
|
|
2026
|
+
|
|
2027
|
+
def delete(self, *args, **kwargs):
|
|
2028
|
+
# deletes thumbnail
|
|
2029
|
+
out = self.get()
|
|
2030
|
+
if out and out.thumbnail_id:
|
|
2031
|
+
self.fs.delete(out.thumbnail_id)
|
|
2032
|
+
|
|
2033
|
+
return super().delete()
|
|
2034
|
+
|
|
2035
|
+
def _put_thumbnail(self, thumbnail, format, progressive, **kwargs):
|
|
2036
|
+
w, h = thumbnail.size
|
|
2037
|
+
|
|
2038
|
+
io = BytesIO()
|
|
2039
|
+
thumbnail.save(io, format, progressive=progressive)
|
|
2040
|
+
io.seek(0)
|
|
2041
|
+
|
|
2042
|
+
return self.fs.put(io, width=w, height=h, format=format, **kwargs)
|
|
2043
|
+
|
|
2044
|
+
@property
|
|
2045
|
+
def size(self):
|
|
2046
|
+
"""
|
|
2047
|
+
return a width, height of image
|
|
2048
|
+
"""
|
|
2049
|
+
out = self.get()
|
|
2050
|
+
if out:
|
|
2051
|
+
return out.width, out.height
|
|
2052
|
+
|
|
2053
|
+
@property
|
|
2054
|
+
def format(self):
|
|
2055
|
+
"""
|
|
2056
|
+
return format of image
|
|
2057
|
+
ex: PNG, JPEG, GIF, etc
|
|
2058
|
+
"""
|
|
2059
|
+
out = self.get()
|
|
2060
|
+
if out:
|
|
2061
|
+
return out.format
|
|
2062
|
+
|
|
2063
|
+
@property
|
|
2064
|
+
def thumbnail(self):
|
|
2065
|
+
"""
|
|
2066
|
+
return a gridfs.grid_file.GridOut
|
|
2067
|
+
representing a thumbnail of Image
|
|
2068
|
+
"""
|
|
2069
|
+
out = self.get()
|
|
2070
|
+
if out and out.thumbnail_id:
|
|
2071
|
+
return self.fs.get(out.thumbnail_id)
|
|
2072
|
+
|
|
2073
|
+
def write(self, *args, **kwargs):
|
|
2074
|
+
raise RuntimeError('Please use "put" method instead')
|
|
2075
|
+
|
|
2076
|
+
def writelines(self, *args, **kwargs):
|
|
2077
|
+
raise RuntimeError('Please use "put" method instead')
|
|
2078
|
+
|
|
2079
|
+
|
|
2080
|
+
class ImproperlyConfigured(Exception):
|
|
2081
|
+
pass
|
|
2082
|
+
|
|
2083
|
+
|
|
2084
|
+
class ImageField(FileField):
|
|
2085
|
+
"""
|
|
2086
|
+
A Image File storage field.
|
|
2087
|
+
|
|
2088
|
+
:param size: max size to store images, provided as (width, height, force)
|
|
2089
|
+
if larger, it will be automatically resized (ex: size=(800, 600, True))
|
|
2090
|
+
:param thumbnail_size: size to generate a thumbnail, provided as (width, height, force)
|
|
2091
|
+
"""
|
|
2092
|
+
|
|
2093
|
+
proxy_class = ImageGridFsProxy
|
|
2094
|
+
|
|
2095
|
+
def __init__(
|
|
2096
|
+
self, size=None, thumbnail_size=None, collection_name="images", **kwargs
|
|
2097
|
+
):
|
|
2098
|
+
if not Image:
|
|
2099
|
+
raise ImproperlyConfigured("PIL library was not found")
|
|
2100
|
+
|
|
2101
|
+
params_size = ("width", "height", "force")
|
|
2102
|
+
extra_args = {"size": size, "thumbnail_size": thumbnail_size}
|
|
2103
|
+
for att_name, att in extra_args.items():
|
|
2104
|
+
value = None
|
|
2105
|
+
if isinstance(att, (tuple, list)):
|
|
2106
|
+
value = dict(itertools.zip_longest(params_size, att, fillvalue=None))
|
|
2107
|
+
|
|
2108
|
+
setattr(self, att_name, value)
|
|
2109
|
+
|
|
2110
|
+
super().__init__(collection_name=collection_name, **kwargs)
|
|
2111
|
+
|
|
2112
|
+
|
|
2113
|
+
class SequenceField(BaseField):
|
|
2114
|
+
"""Provides a sequential counter see:
|
|
2115
|
+
https://www.mongodb.com/docs/manual/reference/method/ObjectId/#ObjectIDs-SequenceNumbers
|
|
2116
|
+
|
|
2117
|
+
.. note::
|
|
2118
|
+
|
|
2119
|
+
Although traditional databases often use increasing sequence
|
|
2120
|
+
numbers for primary keys. In MongoDB, the preferred approach is to
|
|
2121
|
+
use Object IDs instead. The concept is that in a very large
|
|
2122
|
+
cluster of machines, it is easier to create an object ID than have
|
|
2123
|
+
global, uniformly increasing sequence numbers.
|
|
2124
|
+
|
|
2125
|
+
:param collection_name: Name of the counter collection (default 'autonomous.db.counters')
|
|
2126
|
+
:param sequence_name: Name of the sequence in the collection (default 'ClassName.counter')
|
|
2127
|
+
:param value_decorator: Any callable to use as a counter (default int)
|
|
2128
|
+
|
|
2129
|
+
Use any callable as `value_decorator` to transform calculated counter into
|
|
2130
|
+
any value suitable for your needs, e.g. string or hexadecimal
|
|
2131
|
+
representation of the default integer counter value.
|
|
2132
|
+
|
|
2133
|
+
.. note::
|
|
2134
|
+
|
|
2135
|
+
In case the counter is defined in the abstract document, it will be
|
|
2136
|
+
common to all inherited documents and the default sequence name will
|
|
2137
|
+
be the class name of the abstract document.
|
|
2138
|
+
"""
|
|
2139
|
+
|
|
2140
|
+
_auto_gen = True
|
|
2141
|
+
COLLECTION_NAME = "autonomous.db.counters"
|
|
2142
|
+
VALUE_DECORATOR = int
|
|
2143
|
+
|
|
2144
|
+
def __init__(
|
|
2145
|
+
self,
|
|
2146
|
+
collection_name=None,
|
|
2147
|
+
db_alias=None,
|
|
2148
|
+
sequence_name=None,
|
|
2149
|
+
value_decorator=None,
|
|
2150
|
+
*args,
|
|
2151
|
+
**kwargs,
|
|
2152
|
+
):
|
|
2153
|
+
self.collection_name = collection_name or self.COLLECTION_NAME
|
|
2154
|
+
self.db_alias = db_alias or DEFAULT_CONNECTION_NAME
|
|
2155
|
+
self.sequence_name = sequence_name
|
|
2156
|
+
self.value_decorator = (
|
|
2157
|
+
value_decorator if callable(value_decorator) else self.VALUE_DECORATOR
|
|
2158
|
+
)
|
|
2159
|
+
super().__init__(*args, **kwargs)
|
|
2160
|
+
|
|
2161
|
+
def generate(self):
|
|
2162
|
+
"""
|
|
2163
|
+
Generate and Increment the counter
|
|
2164
|
+
"""
|
|
2165
|
+
sequence_name = self.get_sequence_name()
|
|
2166
|
+
sequence_id = f"{sequence_name}.{self.name}"
|
|
2167
|
+
collection = get_db(alias=self.db_alias)[self.collection_name]
|
|
2168
|
+
|
|
2169
|
+
counter = collection.find_one_and_update(
|
|
2170
|
+
filter={"_id": sequence_id},
|
|
2171
|
+
update={"$inc": {"next": 1}},
|
|
2172
|
+
return_document=ReturnDocument.AFTER,
|
|
2173
|
+
upsert=True,
|
|
2174
|
+
)
|
|
2175
|
+
return self.value_decorator(counter["next"])
|
|
2176
|
+
|
|
2177
|
+
def set_next_value(self, value):
|
|
2178
|
+
"""Helper method to set the next sequence value"""
|
|
2179
|
+
sequence_name = self.get_sequence_name()
|
|
2180
|
+
sequence_id = f"{sequence_name}.{self.name}"
|
|
2181
|
+
collection = get_db(alias=self.db_alias)[self.collection_name]
|
|
2182
|
+
counter = collection.find_one_and_update(
|
|
2183
|
+
filter={"_id": sequence_id},
|
|
2184
|
+
update={"$set": {"next": value}},
|
|
2185
|
+
return_document=ReturnDocument.AFTER,
|
|
2186
|
+
upsert=True,
|
|
2187
|
+
)
|
|
2188
|
+
return self.value_decorator(counter["next"])
|
|
2189
|
+
|
|
2190
|
+
def get_next_value(self):
|
|
2191
|
+
"""Helper method to get the next value for previewing.
|
|
2192
|
+
|
|
2193
|
+
.. warning:: There is no guarantee this will be the next value
|
|
2194
|
+
as it is only fixed on set.
|
|
2195
|
+
"""
|
|
2196
|
+
sequence_name = self.get_sequence_name()
|
|
2197
|
+
sequence_id = f"{sequence_name}.{self.name}"
|
|
2198
|
+
collection = get_db(alias=self.db_alias)[self.collection_name]
|
|
2199
|
+
data = collection.find_one({"_id": sequence_id})
|
|
2200
|
+
|
|
2201
|
+
if data:
|
|
2202
|
+
return self.value_decorator(data["next"] + 1)
|
|
2203
|
+
|
|
2204
|
+
return self.value_decorator(1)
|
|
2205
|
+
|
|
2206
|
+
def get_sequence_name(self):
|
|
2207
|
+
if self.sequence_name:
|
|
2208
|
+
return self.sequence_name
|
|
2209
|
+
owner = self.owner_document
|
|
2210
|
+
if issubclass(owner, Document) and not owner._meta.get("abstract"):
|
|
2211
|
+
return owner._get_collection_name()
|
|
2212
|
+
else:
|
|
2213
|
+
return (
|
|
2214
|
+
"".join("_%s" % c if c.isupper() else c for c in owner._class_name)
|
|
2215
|
+
.strip("_")
|
|
2216
|
+
.lower()
|
|
2217
|
+
)
|
|
2218
|
+
|
|
2219
|
+
def __get__(self, instance, owner):
|
|
2220
|
+
value = super().__get__(instance, owner)
|
|
2221
|
+
if value is None and instance._initialised:
|
|
2222
|
+
value = self.generate()
|
|
2223
|
+
instance._data[self.name] = value
|
|
2224
|
+
instance._mark_as_changed(self.name)
|
|
2225
|
+
|
|
2226
|
+
return value
|
|
2227
|
+
|
|
2228
|
+
def __set__(self, instance, value):
|
|
2229
|
+
if value is None and instance._initialised:
|
|
2230
|
+
value = self.generate()
|
|
2231
|
+
|
|
2232
|
+
return super().__set__(instance, value)
|
|
2233
|
+
|
|
2234
|
+
def prepare_query_value(self, op, value):
|
|
2235
|
+
"""
|
|
2236
|
+
This method is overridden in order to convert the query value into to required
|
|
2237
|
+
type. We need to do this in order to be able to successfully compare query
|
|
2238
|
+
values passed as string, the base implementation returns the value as is.
|
|
2239
|
+
"""
|
|
2240
|
+
return self.value_decorator(value)
|
|
2241
|
+
|
|
2242
|
+
def to_python(self, value):
|
|
2243
|
+
if value is None:
|
|
2244
|
+
value = self.generate()
|
|
2245
|
+
return value
|
|
2246
|
+
|
|
2247
|
+
|
|
2248
|
+
class UUIDField(BaseField):
|
|
2249
|
+
"""A UUID field."""
|
|
2250
|
+
|
|
2251
|
+
_binary = None
|
|
2252
|
+
|
|
2253
|
+
def __init__(self, binary=True, **kwargs):
|
|
2254
|
+
"""
|
|
2255
|
+
Store UUID data in the database
|
|
2256
|
+
|
|
2257
|
+
:param binary: if False store as a string.
|
|
2258
|
+
"""
|
|
2259
|
+
self._binary = binary
|
|
2260
|
+
super().__init__(**kwargs)
|
|
2261
|
+
|
|
2262
|
+
def to_python(self, value):
|
|
2263
|
+
if not self._binary:
|
|
2264
|
+
original_value = value
|
|
2265
|
+
try:
|
|
2266
|
+
if not isinstance(value, str):
|
|
2267
|
+
value = str(value)
|
|
2268
|
+
return uuid.UUID(value)
|
|
2269
|
+
except (ValueError, TypeError, AttributeError):
|
|
2270
|
+
return original_value
|
|
2271
|
+
return value
|
|
2272
|
+
|
|
2273
|
+
def to_mongo(self, value):
|
|
2274
|
+
if not self._binary:
|
|
2275
|
+
return str(value)
|
|
2276
|
+
elif isinstance(value, str):
|
|
2277
|
+
return uuid.UUID(value)
|
|
2278
|
+
return value
|
|
2279
|
+
|
|
2280
|
+
def prepare_query_value(self, op, value):
|
|
2281
|
+
if value is None:
|
|
2282
|
+
return None
|
|
2283
|
+
return self.to_mongo(value)
|
|
2284
|
+
|
|
2285
|
+
def validate(self, value):
|
|
2286
|
+
if not isinstance(value, uuid.UUID):
|
|
2287
|
+
if not isinstance(value, str):
|
|
2288
|
+
value = str(value)
|
|
2289
|
+
try:
|
|
2290
|
+
uuid.UUID(value)
|
|
2291
|
+
except (ValueError, TypeError, AttributeError) as exc:
|
|
2292
|
+
self.error("Could not convert to UUID: %s" % exc)
|
|
2293
|
+
|
|
2294
|
+
|
|
2295
|
+
class GeoPointField(BaseField):
|
|
2296
|
+
"""A list storing a longitude and latitude coordinate.
|
|
2297
|
+
|
|
2298
|
+
.. note:: this represents a generic point in a 2D plane and a legacy way of
|
|
2299
|
+
representing a geo point. It admits 2d indexes but not "2dsphere" indexes
|
|
2300
|
+
in MongoDB > 2.4 which are more natural for modeling geospatial points.
|
|
2301
|
+
See :ref:`geospatial-indexes`
|
|
2302
|
+
"""
|
|
2303
|
+
|
|
2304
|
+
_geo_index = pymongo.GEO2D
|
|
2305
|
+
|
|
2306
|
+
def validate(self, value):
|
|
2307
|
+
"""Make sure that a geo-value is of type (x, y)"""
|
|
2308
|
+
if not isinstance(value, (list, tuple)):
|
|
2309
|
+
self.error("GeoPointField can only accept tuples or lists of (x, y)")
|
|
2310
|
+
|
|
2311
|
+
if not len(value) == 2:
|
|
2312
|
+
self.error("Value (%s) must be a two-dimensional point" % repr(value))
|
|
2313
|
+
elif not isinstance(value[0], (float, int)) or not isinstance(
|
|
2314
|
+
value[1], (float, int)
|
|
2315
|
+
):
|
|
2316
|
+
self.error("Both values (%s) in point must be float or int" % repr(value))
|
|
2317
|
+
|
|
2318
|
+
|
|
2319
|
+
class PointField(GeoJsonBaseField):
|
|
2320
|
+
"""A GeoJSON field storing a longitude and latitude coordinate.
|
|
2321
|
+
|
|
2322
|
+
The data is represented as:
|
|
2323
|
+
|
|
2324
|
+
.. code-block:: js
|
|
2325
|
+
|
|
2326
|
+
{'type' : 'Point' ,
|
|
2327
|
+
'coordinates' : [x, y]}
|
|
2328
|
+
|
|
2329
|
+
You can either pass a dict with the full information or a list
|
|
2330
|
+
to set the value.
|
|
2331
|
+
|
|
2332
|
+
Requires mongodb >= 2.4
|
|
2333
|
+
"""
|
|
2334
|
+
|
|
2335
|
+
_type = "Point"
|
|
2336
|
+
|
|
2337
|
+
|
|
2338
|
+
class LineStringField(GeoJsonBaseField):
|
|
2339
|
+
"""A GeoJSON field storing a line of longitude and latitude coordinates.
|
|
2340
|
+
|
|
2341
|
+
The data is represented as:
|
|
2342
|
+
|
|
2343
|
+
.. code-block:: js
|
|
2344
|
+
|
|
2345
|
+
{'type' : 'LineString' ,
|
|
2346
|
+
'coordinates' : [[x1, y1], [x2, y2] ... [xn, yn]]}
|
|
2347
|
+
|
|
2348
|
+
You can either pass a dict with the full information or a list of points.
|
|
2349
|
+
|
|
2350
|
+
Requires mongodb >= 2.4
|
|
2351
|
+
"""
|
|
2352
|
+
|
|
2353
|
+
_type = "LineString"
|
|
2354
|
+
|
|
2355
|
+
|
|
2356
|
+
class PolygonField(GeoJsonBaseField):
|
|
2357
|
+
"""A GeoJSON field storing a polygon of longitude and latitude coordinates.
|
|
2358
|
+
|
|
2359
|
+
The data is represented as:
|
|
2360
|
+
|
|
2361
|
+
.. code-block:: js
|
|
2362
|
+
|
|
2363
|
+
{'type' : 'Polygon' ,
|
|
2364
|
+
'coordinates' : [[[x1, y1], [x1, y1] ... [xn, yn]],
|
|
2365
|
+
[[x1, y1], [x1, y1] ... [xn, yn]]}
|
|
2366
|
+
|
|
2367
|
+
You can either pass a dict with the full information or a list
|
|
2368
|
+
of LineStrings. The first LineString being the outside and the rest being
|
|
2369
|
+
holes.
|
|
2370
|
+
|
|
2371
|
+
Requires mongodb >= 2.4
|
|
2372
|
+
"""
|
|
2373
|
+
|
|
2374
|
+
_type = "Polygon"
|
|
2375
|
+
|
|
2376
|
+
|
|
2377
|
+
class MultiPointField(GeoJsonBaseField):
|
|
2378
|
+
"""A GeoJSON field storing a list of Points.
|
|
2379
|
+
|
|
2380
|
+
The data is represented as:
|
|
2381
|
+
|
|
2382
|
+
.. code-block:: js
|
|
2383
|
+
|
|
2384
|
+
{'type' : 'MultiPoint' ,
|
|
2385
|
+
'coordinates' : [[x1, y1], [x2, y2]]}
|
|
2386
|
+
|
|
2387
|
+
You can either pass a dict with the full information or a list
|
|
2388
|
+
to set the value.
|
|
2389
|
+
|
|
2390
|
+
Requires mongodb >= 2.6
|
|
2391
|
+
"""
|
|
2392
|
+
|
|
2393
|
+
_type = "MultiPoint"
|
|
2394
|
+
|
|
2395
|
+
|
|
2396
|
+
class MultiLineStringField(GeoJsonBaseField):
|
|
2397
|
+
"""A GeoJSON field storing a list of LineStrings.
|
|
2398
|
+
|
|
2399
|
+
The data is represented as:
|
|
2400
|
+
|
|
2401
|
+
.. code-block:: js
|
|
2402
|
+
|
|
2403
|
+
{'type' : 'MultiLineString' ,
|
|
2404
|
+
'coordinates' : [[[x1, y1], [x1, y1] ... [xn, yn]],
|
|
2405
|
+
[[x1, y1], [x1, y1] ... [xn, yn]]]}
|
|
2406
|
+
|
|
2407
|
+
You can either pass a dict with the full information or a list of points.
|
|
2408
|
+
|
|
2409
|
+
Requires mongodb >= 2.6
|
|
2410
|
+
"""
|
|
2411
|
+
|
|
2412
|
+
_type = "MultiLineString"
|
|
2413
|
+
|
|
2414
|
+
|
|
2415
|
+
class MultiPolygonField(GeoJsonBaseField):
|
|
2416
|
+
"""A GeoJSON field storing list of Polygons.
|
|
2417
|
+
|
|
2418
|
+
The data is represented as:
|
|
2419
|
+
|
|
2420
|
+
.. code-block:: js
|
|
2421
|
+
|
|
2422
|
+
{'type' : 'MultiPolygon' ,
|
|
2423
|
+
'coordinates' : [[
|
|
2424
|
+
[[x1, y1], [x1, y1] ... [xn, yn]],
|
|
2425
|
+
[[x1, y1], [x1, y1] ... [xn, yn]]
|
|
2426
|
+
], [
|
|
2427
|
+
[[x1, y1], [x1, y1] ... [xn, yn]],
|
|
2428
|
+
[[x1, y1], [x1, y1] ... [xn, yn]]
|
|
2429
|
+
]
|
|
2430
|
+
}
|
|
2431
|
+
|
|
2432
|
+
You can either pass a dict with the full information or a list
|
|
2433
|
+
of Polygons.
|
|
2434
|
+
|
|
2435
|
+
Requires mongodb >= 2.6
|
|
2436
|
+
"""
|
|
2437
|
+
|
|
2438
|
+
_type = "MultiPolygon"
|
|
2439
|
+
|
|
2440
|
+
|
|
2441
|
+
class LazyReferenceField(BaseField):
|
|
2442
|
+
"""A really lazy reference to a document.
|
|
2443
|
+
Unlike the :class:`~autonomous.db.fields.ReferenceField` it will
|
|
2444
|
+
**not** be automatically (lazily) dereferenced on access.
|
|
2445
|
+
Instead, access will return a :class:`~autonomous.db.base.LazyReference` class
|
|
2446
|
+
instance, allowing access to `pk` or manual dereference by using
|
|
2447
|
+
``fetch()`` method.
|
|
2448
|
+
"""
|
|
2449
|
+
|
|
2450
|
+
def __init__(
|
|
2451
|
+
self,
|
|
2452
|
+
document_type,
|
|
2453
|
+
passthrough=False,
|
|
2454
|
+
dbref=False,
|
|
2455
|
+
reverse_delete_rule=DO_NOTHING,
|
|
2456
|
+
**kwargs,
|
|
2457
|
+
):
|
|
2458
|
+
"""Initialises the Reference Field.
|
|
2459
|
+
|
|
2460
|
+
:param dbref: Store the reference as :class:`~pymongo.dbref.DBRef`
|
|
2461
|
+
or as the :class:`~pymongo.objectid.ObjectId`.id .
|
|
2462
|
+
:param reverse_delete_rule: Determines what to do when the referring
|
|
2463
|
+
object is deleted
|
|
2464
|
+
:param passthrough: When trying to access unknown fields, the
|
|
2465
|
+
:class:`~autonomous.db.base.datastructure.LazyReference` instance will
|
|
2466
|
+
automatically call `fetch()` and try to retrieve the field on the fetched
|
|
2467
|
+
document. Note this only work getting field (not setting or deleting).
|
|
2468
|
+
"""
|
|
2469
|
+
# XXX ValidationError raised outside of the "validate" method.
|
|
2470
|
+
if not isinstance(document_type, str) and not issubclass(
|
|
2471
|
+
document_type, Document
|
|
2472
|
+
):
|
|
2473
|
+
self.error(
|
|
2474
|
+
"Argument to LazyReferenceField constructor must be a "
|
|
2475
|
+
"document class or a string"
|
|
2476
|
+
)
|
|
2477
|
+
|
|
2478
|
+
self.dbref = dbref
|
|
2479
|
+
self.passthrough = passthrough
|
|
2480
|
+
self.document_type_obj = document_type
|
|
2481
|
+
self.reverse_delete_rule = reverse_delete_rule
|
|
2482
|
+
super().__init__(**kwargs)
|
|
2483
|
+
|
|
2484
|
+
@property
|
|
2485
|
+
def document_type(self):
|
|
2486
|
+
if isinstance(self.document_type_obj, str):
|
|
2487
|
+
if self.document_type_obj == RECURSIVE_REFERENCE_CONSTANT:
|
|
2488
|
+
self.document_type_obj = self.owner_document
|
|
2489
|
+
else:
|
|
2490
|
+
self.document_type_obj = get_document(self.document_type_obj)
|
|
2491
|
+
return self.document_type_obj
|
|
2492
|
+
|
|
2493
|
+
def build_lazyref(self, value):
|
|
2494
|
+
# log("build_lazyref", value)
|
|
2495
|
+
if isinstance(value, LazyReference):
|
|
2496
|
+
if value.passthrough != self.passthrough:
|
|
2497
|
+
value = LazyReference(
|
|
2498
|
+
value.document_type, value.pk, passthrough=self.passthrough
|
|
2499
|
+
)
|
|
2500
|
+
elif value is not None:
|
|
2501
|
+
if isinstance(value, self.document_type):
|
|
2502
|
+
value = LazyReference(
|
|
2503
|
+
self.document_type, value.pk, passthrough=self.passthrough
|
|
2504
|
+
)
|
|
2505
|
+
elif isinstance(value, DBRef):
|
|
2506
|
+
value = LazyReference(
|
|
2507
|
+
self.document_type, value.id, passthrough=self.passthrough
|
|
2508
|
+
)
|
|
2509
|
+
else:
|
|
2510
|
+
# value is the primary key of the referenced document
|
|
2511
|
+
value = LazyReference(
|
|
2512
|
+
self.document_type, value, passthrough=self.passthrough
|
|
2513
|
+
)
|
|
2514
|
+
return value
|
|
2515
|
+
|
|
2516
|
+
def __get__(self, instance, owner):
|
|
2517
|
+
"""Descriptor to allow lazy dereferencing."""
|
|
2518
|
+
# log("__get__", instance, owner)
|
|
2519
|
+
if instance is None:
|
|
2520
|
+
# Document class being used rather than a document object
|
|
2521
|
+
return self
|
|
2522
|
+
|
|
2523
|
+
value = self.build_lazyref(instance._data.get(self.name))
|
|
2524
|
+
if value:
|
|
2525
|
+
instance._data[self.name] = value
|
|
2526
|
+
# log("get", instance, self.name, value)
|
|
2527
|
+
return super().__get__(instance, owner)
|
|
2528
|
+
|
|
2529
|
+
def to_mongo(self, value):
|
|
2530
|
+
if isinstance(value, LazyReference):
|
|
2531
|
+
pk = value.pk
|
|
2532
|
+
elif isinstance(value, self.document_type):
|
|
2533
|
+
pk = value.pk
|
|
2534
|
+
elif isinstance(value, DBRef):
|
|
2535
|
+
pk = value.id
|
|
2536
|
+
else:
|
|
2537
|
+
# value is the primary key of the referenced document
|
|
2538
|
+
pk = value
|
|
2539
|
+
id_field_name = self.document_type._meta["id_field"]
|
|
2540
|
+
id_field = self.document_type._fields[id_field_name]
|
|
2541
|
+
pk = id_field.to_mongo(pk)
|
|
2542
|
+
if self.dbref:
|
|
2543
|
+
return DBRef(self.document_type._get_collection_name(), pk)
|
|
2544
|
+
else:
|
|
2545
|
+
return pk
|
|
2546
|
+
|
|
2547
|
+
def to_python(self, value):
|
|
2548
|
+
"""Convert a MongoDB-compatible type to a Python type."""
|
|
2549
|
+
if not isinstance(value, (DBRef, Document, EmbeddedDocument)):
|
|
2550
|
+
collection = self.document_type._get_collection_name()
|
|
2551
|
+
value = DBRef(collection, self.document_type.id.to_python(value))
|
|
2552
|
+
value = self.build_lazyref(value)
|
|
2553
|
+
return value
|
|
2554
|
+
|
|
2555
|
+
def validate(self, value):
|
|
2556
|
+
if isinstance(value, LazyReference):
|
|
2557
|
+
if value.collection != self.document_type._get_collection_name():
|
|
2558
|
+
self.error("Reference must be on a `%s` document." % self.document_type)
|
|
2559
|
+
pk = value.pk
|
|
2560
|
+
elif isinstance(value, self.document_type):
|
|
2561
|
+
pk = value.pk
|
|
2562
|
+
elif isinstance(value, DBRef):
|
|
2563
|
+
# TODO: check collection ?
|
|
2564
|
+
collection = self.document_type._get_collection_name()
|
|
2565
|
+
if value.collection != collection:
|
|
2566
|
+
self.error("DBRef on bad collection (must be on `%s`)" % collection)
|
|
2567
|
+
pk = value.id
|
|
2568
|
+
else:
|
|
2569
|
+
# value is the primary key of the referenced document
|
|
2570
|
+
id_field_name = self.document_type._meta["id_field"]
|
|
2571
|
+
id_field = getattr(self.document_type, id_field_name)
|
|
2572
|
+
pk = value
|
|
2573
|
+
try:
|
|
2574
|
+
id_field.validate(pk)
|
|
2575
|
+
except ValidationError:
|
|
2576
|
+
self.error(
|
|
2577
|
+
"value should be `{0}` document, LazyReference or DBRef on `{0}` "
|
|
2578
|
+
"or `{0}`'s primary key (i.e. `{1}`)".format(
|
|
2579
|
+
self.document_type.__name__, type(id_field).__name__
|
|
2580
|
+
)
|
|
2581
|
+
)
|
|
2582
|
+
|
|
2583
|
+
if pk is None:
|
|
2584
|
+
self.error(
|
|
2585
|
+
"You can only reference documents once they have been "
|
|
2586
|
+
"saved to the database"
|
|
2587
|
+
)
|
|
2588
|
+
|
|
2589
|
+
def prepare_query_value(self, op, value):
|
|
2590
|
+
if value is None:
|
|
2591
|
+
return None
|
|
2592
|
+
super().prepare_query_value(op, value)
|
|
2593
|
+
return self.to_mongo(value)
|
|
2594
|
+
|
|
2595
|
+
def lookup_member(self, member_name):
|
|
2596
|
+
return self.document_type._fields.get(member_name)
|
|
2597
|
+
|
|
2598
|
+
|
|
2599
|
+
class GenericLazyReferenceField(GenericReferenceField):
|
|
2600
|
+
"""A reference to *any* :class:`~autonomous.db.document.Document` subclass.
|
|
2601
|
+
Unlike the :class:`~autonomous.db.fields.GenericReferenceField` it will
|
|
2602
|
+
**not** be automatically (lazily) dereferenced on access.
|
|
2603
|
+
Instead, access will return a :class:`~autonomous.db.base.LazyReference` class
|
|
2604
|
+
instance, allowing access to `pk` or manual dereference by using
|
|
2605
|
+
``fetch()`` method.
|
|
2606
|
+
|
|
2607
|
+
.. note ::
|
|
2608
|
+
* Any documents used as a generic reference must be registered in the
|
|
2609
|
+
document registry. Importing the model will automatically register
|
|
2610
|
+
it.
|
|
2611
|
+
|
|
2612
|
+
* You can use the choices param to limit the acceptable Document types
|
|
2613
|
+
"""
|
|
2614
|
+
|
|
2615
|
+
def __init__(self, *args, **kwargs):
|
|
2616
|
+
self.passthrough = kwargs.pop("passthrough", False)
|
|
2617
|
+
super().__init__(*args, **kwargs)
|
|
2618
|
+
|
|
2619
|
+
def _validate_choices(self, value):
|
|
2620
|
+
log(
|
|
2621
|
+
value, value.document_type, "!!!! Need to CHange LazyReference to this !!!!"
|
|
2622
|
+
)
|
|
2623
|
+
if isinstance(value, LazyReference):
|
|
2624
|
+
# log(value, value.document_type)
|
|
2625
|
+
mro = [cls.__name__ for cls in value.document_type.mro()]
|
|
2626
|
+
for choice in self.choices:
|
|
2627
|
+
if choice in mro:
|
|
2628
|
+
value = choice
|
|
2629
|
+
break
|
|
2630
|
+
super()._validate_choices(value)
|
|
2631
|
+
|
|
2632
|
+
def build_lazyref(self, value):
|
|
2633
|
+
if isinstance(value, LazyReference):
|
|
2634
|
+
if value.passthrough != self.passthrough:
|
|
2635
|
+
value = LazyReference(
|
|
2636
|
+
value.document_type, value.pk, passthrough=self.passthrough
|
|
2637
|
+
)
|
|
2638
|
+
elif value is not None:
|
|
2639
|
+
if isinstance(value, (dict, SON)):
|
|
2640
|
+
value = LazyReference(
|
|
2641
|
+
get_document(value["_cls"]),
|
|
2642
|
+
value["_ref"].id,
|
|
2643
|
+
passthrough=self.passthrough,
|
|
2644
|
+
)
|
|
2645
|
+
elif isinstance(value, Document):
|
|
2646
|
+
value = LazyReference(
|
|
2647
|
+
type(value), value.pk, passthrough=self.passthrough
|
|
2648
|
+
)
|
|
2649
|
+
return value
|
|
2650
|
+
|
|
2651
|
+
def __get__(self, instance, owner):
|
|
2652
|
+
if instance is None:
|
|
2653
|
+
return self
|
|
2654
|
+
|
|
2655
|
+
value = self.build_lazyref(instance._data.get(self.name))
|
|
2656
|
+
if value:
|
|
2657
|
+
instance._data[self.name] = value
|
|
2658
|
+
|
|
2659
|
+
return super().__get__(instance, owner)
|
|
2660
|
+
|
|
2661
|
+
def validate(self, value):
|
|
2662
|
+
if isinstance(value, LazyReference) and value.pk is None:
|
|
2663
|
+
self.error(
|
|
2664
|
+
"You can only reference documents once they have been"
|
|
2665
|
+
" saved to the database"
|
|
2666
|
+
)
|
|
2667
|
+
return super().validate(value)
|
|
2668
|
+
|
|
2669
|
+
def to_mongo(self, document):
|
|
2670
|
+
if document is None:
|
|
2671
|
+
return None
|
|
2672
|
+
|
|
2673
|
+
if isinstance(document, LazyReference):
|
|
2674
|
+
return SON(
|
|
2675
|
+
(
|
|
2676
|
+
("_cls", document.document_type._class_name),
|
|
2677
|
+
(
|
|
2678
|
+
"_ref",
|
|
2679
|
+
DBRef(
|
|
2680
|
+
document.document_type._get_collection_name(), document.pk
|
|
2681
|
+
),
|
|
2682
|
+
),
|
|
2683
|
+
)
|
|
2684
|
+
)
|
|
2685
|
+
else:
|
|
2686
|
+
return super().to_mongo(document)
|
|
2687
|
+
|
|
2688
|
+
|
|
2689
|
+
class Decimal128Field(BaseField):
|
|
2690
|
+
"""
|
|
2691
|
+
128-bit decimal-based floating-point field capable of emulating decimal
|
|
2692
|
+
rounding with exact precision. This field will expose decimal.Decimal but stores the value as a
|
|
2693
|
+
`bson.Decimal128` behind the scene, this field is intended for monetary data, scientific computations, etc.
|
|
2694
|
+
"""
|
|
2695
|
+
|
|
2696
|
+
DECIMAL_CONTEXT = create_decimal128_context()
|
|
2697
|
+
|
|
2698
|
+
def __init__(self, min_value=None, max_value=None, **kwargs):
|
|
2699
|
+
self.min_value = min_value
|
|
2700
|
+
self.max_value = max_value
|
|
2701
|
+
super().__init__(**kwargs)
|
|
2702
|
+
|
|
2703
|
+
def to_mongo(self, value):
|
|
2704
|
+
if value is None:
|
|
2705
|
+
return None
|
|
2706
|
+
if isinstance(value, Decimal128):
|
|
2707
|
+
return value
|
|
2708
|
+
if not isinstance(value, decimal.Decimal):
|
|
2709
|
+
with decimal.localcontext(self.DECIMAL_CONTEXT) as ctx:
|
|
2710
|
+
value = ctx.create_decimal(value)
|
|
2711
|
+
return Decimal128(value)
|
|
2712
|
+
|
|
2713
|
+
def to_python(self, value):
|
|
2714
|
+
if value is None:
|
|
2715
|
+
return None
|
|
2716
|
+
return self.to_mongo(value).to_decimal()
|
|
2717
|
+
|
|
2718
|
+
def validate(self, value):
|
|
2719
|
+
if not isinstance(value, Decimal128):
|
|
2720
|
+
try:
|
|
2721
|
+
value = Decimal128(value)
|
|
2722
|
+
except (TypeError, ValueError, decimal.InvalidOperation) as exc:
|
|
2723
|
+
self.error("Could not convert value to Decimal128: %s" % exc)
|
|
2724
|
+
|
|
2725
|
+
if self.min_value is not None and value.to_decimal() < self.min_value:
|
|
2726
|
+
self.error("Decimal value is too small")
|
|
2727
|
+
|
|
2728
|
+
if self.max_value is not None and value.to_decimal() > self.max_value:
|
|
2729
|
+
self.error("Decimal value is too large")
|
|
2730
|
+
|
|
2731
|
+
def prepare_query_value(self, op, value):
|
|
2732
|
+
return super().prepare_query_value(op, self.to_mongo(value))
|