autonomous-app 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autonomous/__init__.py +1 -1
- autonomous/ai/audioagent.py +1 -1
- autonomous/ai/imageagent.py +1 -1
- autonomous/ai/jsonagent.py +1 -1
- autonomous/ai/models/openai.py +81 -53
- autonomous/ai/oaiagent.py +1 -14
- autonomous/ai/textagent.py +1 -1
- autonomous/auth/autoauth.py +10 -10
- autonomous/auth/user.py +17 -2
- autonomous/db/__init__.py +42 -0
- autonomous/db/base/__init__.py +33 -0
- autonomous/db/base/common.py +62 -0
- autonomous/db/base/datastructures.py +476 -0
- autonomous/db/base/document.py +1230 -0
- autonomous/db/base/fields.py +767 -0
- autonomous/db/base/metaclasses.py +468 -0
- autonomous/db/base/utils.py +22 -0
- autonomous/db/common.py +79 -0
- autonomous/db/connection.py +472 -0
- autonomous/db/context_managers.py +313 -0
- autonomous/db/dereference.py +291 -0
- autonomous/db/document.py +1141 -0
- autonomous/db/errors.py +165 -0
- autonomous/db/fields.py +2732 -0
- autonomous/db/mongodb_support.py +24 -0
- autonomous/db/pymongo_support.py +80 -0
- autonomous/db/queryset/__init__.py +28 -0
- autonomous/db/queryset/base.py +2033 -0
- autonomous/db/queryset/field_list.py +88 -0
- autonomous/db/queryset/manager.py +58 -0
- autonomous/db/queryset/queryset.py +189 -0
- autonomous/db/queryset/transform.py +527 -0
- autonomous/db/queryset/visitor.py +189 -0
- autonomous/db/signals.py +59 -0
- autonomous/logger.py +3 -0
- autonomous/model/autoattr.py +56 -41
- autonomous/model/automodel.py +88 -34
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.1.dist-info}/METADATA +2 -2
- autonomous_app-0.3.1.dist-info/RECORD +60 -0
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.1.dist-info}/WHEEL +1 -1
- autonomous_app-0.3.0.dist-info/RECORD +0 -35
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.1.dist-info}/LICENSE +0 -0
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1230 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
import numbers
|
|
3
|
+
import warnings
|
|
4
|
+
from functools import partial
|
|
5
|
+
|
|
6
|
+
import pymongo
|
|
7
|
+
from bson import SON, DBRef, ObjectId, json_util
|
|
8
|
+
|
|
9
|
+
from autonomous import log
|
|
10
|
+
from autonomous.db import signals
|
|
11
|
+
from autonomous.db.base.common import get_document
|
|
12
|
+
from autonomous.db.base.datastructures import (
|
|
13
|
+
BaseDict,
|
|
14
|
+
BaseList,
|
|
15
|
+
EmbeddedDocumentList,
|
|
16
|
+
LazyReference,
|
|
17
|
+
StrictDict,
|
|
18
|
+
)
|
|
19
|
+
from autonomous.db.base.fields import ComplexBaseField
|
|
20
|
+
from autonomous.db.common import _import_class
|
|
21
|
+
from autonomous.db.errors import (
|
|
22
|
+
FieldDoesNotExist,
|
|
23
|
+
InvalidDocumentError,
|
|
24
|
+
LookUpError,
|
|
25
|
+
OperationError,
|
|
26
|
+
ValidationError,
|
|
27
|
+
)
|
|
28
|
+
from autonomous.db.pymongo_support import LEGACY_JSON_OPTIONS
|
|
29
|
+
|
|
30
|
+
__all__ = ("BaseDocument", "NON_FIELD_ERRORS")
|
|
31
|
+
|
|
32
|
+
NON_FIELD_ERRORS = "__all__"
|
|
33
|
+
|
|
34
|
+
try:
|
|
35
|
+
GEOHAYSTACK = pymongo.GEOHAYSTACK
|
|
36
|
+
except AttributeError:
|
|
37
|
+
GEOHAYSTACK = None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class BaseDocument:
|
|
41
|
+
# TODO simplify how `_changed_fields` is used.
|
|
42
|
+
# Currently, handling of `_changed_fields` seems unnecessarily convoluted:
|
|
43
|
+
# 1. `BaseDocument` defines `_changed_fields` in its `__slots__`, yet it's
|
|
44
|
+
# not setting it to `[]` (or any other value) in `__init__`.
|
|
45
|
+
# 2. `EmbeddedDocument` sets `_changed_fields` to `[]` it its overloaded
|
|
46
|
+
# `__init__`.
|
|
47
|
+
# 3. `Document` does NOT set `_changed_fields` upon initialization. The
|
|
48
|
+
# field is primarily set via `_from_son` or `_clear_changed_fields`,
|
|
49
|
+
# though there are also other methods that manipulate it.
|
|
50
|
+
# 4. The codebase is littered with `hasattr` calls for `_changed_fields`.
|
|
51
|
+
__slots__ = (
|
|
52
|
+
"_changed_fields",
|
|
53
|
+
"_initialised",
|
|
54
|
+
"_created",
|
|
55
|
+
"_data",
|
|
56
|
+
"_dynamic_fields",
|
|
57
|
+
"_auto_id_field",
|
|
58
|
+
"_db_field_map",
|
|
59
|
+
"__weakref__",
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
_dynamic = False
|
|
63
|
+
_dynamic_lock = True
|
|
64
|
+
STRICT = False
|
|
65
|
+
|
|
66
|
+
def __init__(self, *args, **values):
|
|
67
|
+
"""
|
|
68
|
+
Initialise a document or an embedded document.
|
|
69
|
+
|
|
70
|
+
:param values: A dictionary of keys and values for the document.
|
|
71
|
+
It may contain additional reserved keywords, e.g. "__auto_convert".
|
|
72
|
+
:param __auto_convert: If True, supplied values will be converted
|
|
73
|
+
to Python-type values via each field's `to_python` method.
|
|
74
|
+
:param _created: Indicates whether this is a brand new document
|
|
75
|
+
or whether it's already been persisted before. Defaults to true.
|
|
76
|
+
"""
|
|
77
|
+
self._initialised = False
|
|
78
|
+
self._created = True
|
|
79
|
+
|
|
80
|
+
if args:
|
|
81
|
+
raise TypeError(
|
|
82
|
+
"Instantiating a document with positional arguments is not "
|
|
83
|
+
"supported. Please use `field_name=value` keyword arguments."
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
__auto_convert = values.pop("__auto_convert", True)
|
|
87
|
+
|
|
88
|
+
_created = values.pop("_created", True)
|
|
89
|
+
|
|
90
|
+
signals.pre_init.send(self.__class__, document=self, values=values)
|
|
91
|
+
|
|
92
|
+
# Check if there are undefined fields supplied to the constructor,
|
|
93
|
+
# if so raise an Exception.
|
|
94
|
+
if not self._dynamic and (self._meta.get("strict", True) or _created):
|
|
95
|
+
_undefined_fields = set(values.keys()) - set(
|
|
96
|
+
list(self._fields.keys()) + ["id", "pk", "_cls", "_text_score"]
|
|
97
|
+
)
|
|
98
|
+
if _undefined_fields:
|
|
99
|
+
msg = f'The fields "{_undefined_fields}" do not exist on the document "{self._class_name}"'
|
|
100
|
+
raise FieldDoesNotExist(msg)
|
|
101
|
+
|
|
102
|
+
if self.STRICT and not self._dynamic:
|
|
103
|
+
self._data = StrictDict.create(allowed_keys=self._fields_ordered)()
|
|
104
|
+
else:
|
|
105
|
+
self._data = {}
|
|
106
|
+
|
|
107
|
+
self._dynamic_fields = SON()
|
|
108
|
+
|
|
109
|
+
# Assign default values for fields
|
|
110
|
+
# not set in the constructor
|
|
111
|
+
for field_name in self._fields:
|
|
112
|
+
if field_name in values:
|
|
113
|
+
continue
|
|
114
|
+
value = getattr(self, field_name, None)
|
|
115
|
+
setattr(self, field_name, value)
|
|
116
|
+
|
|
117
|
+
if "_cls" not in values:
|
|
118
|
+
self._cls = self._class_name
|
|
119
|
+
|
|
120
|
+
# Set actual values
|
|
121
|
+
dynamic_data = {}
|
|
122
|
+
FileField = _import_class("FileField")
|
|
123
|
+
for key, value in values.items():
|
|
124
|
+
field = self._fields.get(key)
|
|
125
|
+
if field or key in ("id", "pk", "_cls"):
|
|
126
|
+
if __auto_convert and value is not None:
|
|
127
|
+
if field and not isinstance(field, FileField):
|
|
128
|
+
value = field.to_python(value)
|
|
129
|
+
setattr(self, key, value)
|
|
130
|
+
else:
|
|
131
|
+
if self._dynamic:
|
|
132
|
+
dynamic_data[key] = value
|
|
133
|
+
else:
|
|
134
|
+
# For strict Document
|
|
135
|
+
self._data[key] = value
|
|
136
|
+
|
|
137
|
+
# Set any get_<field>_display methods
|
|
138
|
+
self.__set_field_display()
|
|
139
|
+
|
|
140
|
+
if self._dynamic:
|
|
141
|
+
self._dynamic_lock = False
|
|
142
|
+
for key, value in dynamic_data.items():
|
|
143
|
+
setattr(self, key, value)
|
|
144
|
+
|
|
145
|
+
# Flag initialised
|
|
146
|
+
self._initialised = True
|
|
147
|
+
self._created = _created
|
|
148
|
+
|
|
149
|
+
signals.post_init.send(self.__class__, document=self)
|
|
150
|
+
|
|
151
|
+
def __delattr__(self, *args, **kwargs):
|
|
152
|
+
"""Handle deletions of fields"""
|
|
153
|
+
field_name = args[0]
|
|
154
|
+
if field_name in self._fields:
|
|
155
|
+
default = self._fields[field_name].default
|
|
156
|
+
if callable(default):
|
|
157
|
+
default = default()
|
|
158
|
+
setattr(self, field_name, default)
|
|
159
|
+
else:
|
|
160
|
+
super().__delattr__(*args, **kwargs)
|
|
161
|
+
|
|
162
|
+
def __setattr__(self, name, value):
|
|
163
|
+
# Handle dynamic data only if an initialised dynamic document
|
|
164
|
+
if self._dynamic and not self._dynamic_lock:
|
|
165
|
+
if name not in self._fields_ordered and not name.startswith("_"):
|
|
166
|
+
DynamicField = _import_class("DynamicField")
|
|
167
|
+
field = DynamicField(db_field=name, null=True)
|
|
168
|
+
field.name = name
|
|
169
|
+
self._dynamic_fields[name] = field
|
|
170
|
+
self._fields_ordered += (name,)
|
|
171
|
+
|
|
172
|
+
if not name.startswith("_"):
|
|
173
|
+
value = self.__expand_dynamic_values(name, value)
|
|
174
|
+
|
|
175
|
+
# Handle marking data as changed
|
|
176
|
+
if name in self._dynamic_fields:
|
|
177
|
+
self._data[name] = value
|
|
178
|
+
if hasattr(self, "_changed_fields"):
|
|
179
|
+
self._mark_as_changed(name)
|
|
180
|
+
try:
|
|
181
|
+
self__created = self._created
|
|
182
|
+
except AttributeError:
|
|
183
|
+
self__created = True
|
|
184
|
+
|
|
185
|
+
if (
|
|
186
|
+
self._is_document
|
|
187
|
+
and not self__created
|
|
188
|
+
and name in self._meta.get("shard_key", tuple())
|
|
189
|
+
and self._data.get(name) != value
|
|
190
|
+
):
|
|
191
|
+
msg = "Shard Keys are immutable. Tried to update %s" % name
|
|
192
|
+
raise OperationError(msg)
|
|
193
|
+
|
|
194
|
+
try:
|
|
195
|
+
self__initialised = self._initialised
|
|
196
|
+
except AttributeError:
|
|
197
|
+
self__initialised = False
|
|
198
|
+
|
|
199
|
+
# Check if the user has created a new instance of a class
|
|
200
|
+
if (
|
|
201
|
+
self._is_document
|
|
202
|
+
and self__initialised
|
|
203
|
+
and self__created
|
|
204
|
+
and name == self._meta.get("id_field")
|
|
205
|
+
):
|
|
206
|
+
# When setting the ID field of an instance already instantiated and that was user-created (i.e not saved in db yet)
|
|
207
|
+
# Typically this is when calling .save()
|
|
208
|
+
super().__setattr__("_created", False)
|
|
209
|
+
|
|
210
|
+
super().__setattr__(name, value)
|
|
211
|
+
|
|
212
|
+
def __getstate__(self):
|
|
213
|
+
data = {}
|
|
214
|
+
for k in (
|
|
215
|
+
"_changed_fields",
|
|
216
|
+
"_initialised",
|
|
217
|
+
"_created",
|
|
218
|
+
"_dynamic_fields",
|
|
219
|
+
"_fields_ordered",
|
|
220
|
+
):
|
|
221
|
+
if hasattr(self, k):
|
|
222
|
+
data[k] = getattr(self, k)
|
|
223
|
+
data["_data"] = self.to_mongo()
|
|
224
|
+
return data
|
|
225
|
+
|
|
226
|
+
def __setstate__(self, data):
|
|
227
|
+
if isinstance(data["_data"], SON):
|
|
228
|
+
data["_data"] = self.__class__._from_son(data["_data"])._data
|
|
229
|
+
for k in (
|
|
230
|
+
"_changed_fields",
|
|
231
|
+
"_initialised",
|
|
232
|
+
"_created",
|
|
233
|
+
"_data",
|
|
234
|
+
"_dynamic_fields",
|
|
235
|
+
):
|
|
236
|
+
if k in data:
|
|
237
|
+
setattr(self, k, data[k])
|
|
238
|
+
if "_fields_ordered" in data:
|
|
239
|
+
if self._dynamic:
|
|
240
|
+
self._fields_ordered = data["_fields_ordered"]
|
|
241
|
+
else:
|
|
242
|
+
_super_fields_ordered = type(self)._fields_ordered
|
|
243
|
+
self._fields_ordered = _super_fields_ordered
|
|
244
|
+
|
|
245
|
+
dynamic_fields = data.get("_dynamic_fields") or SON()
|
|
246
|
+
for k in dynamic_fields.keys():
|
|
247
|
+
setattr(self, k, data["_data"].get(k))
|
|
248
|
+
|
|
249
|
+
def __iter__(self):
|
|
250
|
+
return iter(self._fields_ordered)
|
|
251
|
+
|
|
252
|
+
def __getitem__(self, name):
|
|
253
|
+
"""Dictionary-style field access, return a field's value if present."""
|
|
254
|
+
try:
|
|
255
|
+
if name in self._fields_ordered:
|
|
256
|
+
return getattr(self, name)
|
|
257
|
+
except AttributeError:
|
|
258
|
+
pass
|
|
259
|
+
raise KeyError(name)
|
|
260
|
+
|
|
261
|
+
def __setitem__(self, name, value):
|
|
262
|
+
"""Dictionary-style field access, set a field's value."""
|
|
263
|
+
# Ensure that the field exists before settings its value
|
|
264
|
+
if not self._dynamic and name not in self._fields:
|
|
265
|
+
raise KeyError(name)
|
|
266
|
+
return setattr(self, name, value)
|
|
267
|
+
|
|
268
|
+
def __contains__(self, name):
|
|
269
|
+
try:
|
|
270
|
+
val = getattr(self, name)
|
|
271
|
+
return val is not None
|
|
272
|
+
except AttributeError:
|
|
273
|
+
return False
|
|
274
|
+
|
|
275
|
+
def __len__(self):
|
|
276
|
+
return len(self._data)
|
|
277
|
+
|
|
278
|
+
def __repr__(self):
|
|
279
|
+
try:
|
|
280
|
+
u = self.__str__()
|
|
281
|
+
except (UnicodeEncodeError, UnicodeDecodeError):
|
|
282
|
+
u = "[Bad Unicode data]"
|
|
283
|
+
repr_type = str if u is None else type(u)
|
|
284
|
+
return repr_type(f"<{self.__class__.__name__}: {u}>")
|
|
285
|
+
|
|
286
|
+
def __str__(self):
|
|
287
|
+
# TODO this could be simpler?
|
|
288
|
+
if hasattr(self, "__unicode__"):
|
|
289
|
+
return self.__unicode__()
|
|
290
|
+
return "%s object" % self.__class__.__name__
|
|
291
|
+
|
|
292
|
+
def __eq__(self, other):
|
|
293
|
+
if (
|
|
294
|
+
isinstance(other, self.__class__)
|
|
295
|
+
and hasattr(other, "id")
|
|
296
|
+
and other.id is not None
|
|
297
|
+
):
|
|
298
|
+
return self.id == other.id
|
|
299
|
+
if isinstance(other, DBRef):
|
|
300
|
+
return (
|
|
301
|
+
self._get_collection_name() == other.collection and self.id == other.id
|
|
302
|
+
)
|
|
303
|
+
if self.id is None:
|
|
304
|
+
return self is other
|
|
305
|
+
return False
|
|
306
|
+
|
|
307
|
+
def __ne__(self, other):
|
|
308
|
+
return not self.__eq__(other)
|
|
309
|
+
|
|
310
|
+
def clean(self):
|
|
311
|
+
"""
|
|
312
|
+
Hook for doing document level data cleaning (usually validation or assignment)
|
|
313
|
+
before validation is run.
|
|
314
|
+
|
|
315
|
+
Any ValidationError raised by this method will not be associated with
|
|
316
|
+
a particular field; it will have a special-case association with the
|
|
317
|
+
field defined by NON_FIELD_ERRORS.
|
|
318
|
+
"""
|
|
319
|
+
pass
|
|
320
|
+
|
|
321
|
+
def get_text_score(self):
|
|
322
|
+
"""
|
|
323
|
+
Get text score from text query
|
|
324
|
+
"""
|
|
325
|
+
|
|
326
|
+
if "_text_score" not in self._data:
|
|
327
|
+
raise InvalidDocumentError(
|
|
328
|
+
"This document is not originally built from a text query (or text_score was not set on search_text() call)"
|
|
329
|
+
)
|
|
330
|
+
|
|
331
|
+
return self._data["_text_score"]
|
|
332
|
+
|
|
333
|
+
def to_mongo(self, use_db_field=True, fields=None):
|
|
334
|
+
"""
|
|
335
|
+
Return as SON data ready for use with MongoDB.
|
|
336
|
+
"""
|
|
337
|
+
fields = fields or []
|
|
338
|
+
|
|
339
|
+
data = SON()
|
|
340
|
+
data["_id"] = None
|
|
341
|
+
data["_cls"] = self._class_name
|
|
342
|
+
|
|
343
|
+
# only root fields ['test1.a', 'test2'] => ['test1', 'test2']
|
|
344
|
+
root_fields = {f.split(".")[0] for f in fields}
|
|
345
|
+
|
|
346
|
+
for field_name in self:
|
|
347
|
+
if root_fields and field_name not in root_fields:
|
|
348
|
+
continue
|
|
349
|
+
|
|
350
|
+
value = self._data.get(field_name, None)
|
|
351
|
+
field = self._fields.get(field_name)
|
|
352
|
+
|
|
353
|
+
if field is None and self._dynamic:
|
|
354
|
+
field = self._dynamic_fields.get(field_name)
|
|
355
|
+
|
|
356
|
+
if value is not None:
|
|
357
|
+
f_inputs = field.to_mongo.__code__.co_varnames
|
|
358
|
+
ex_vars = {}
|
|
359
|
+
if fields and "fields" in f_inputs:
|
|
360
|
+
key = "%s." % field_name
|
|
361
|
+
embedded_fields = [
|
|
362
|
+
i.replace(key, "") for i in fields if i.startswith(key)
|
|
363
|
+
]
|
|
364
|
+
|
|
365
|
+
ex_vars["fields"] = embedded_fields
|
|
366
|
+
|
|
367
|
+
if "use_db_field" in f_inputs:
|
|
368
|
+
ex_vars["use_db_field"] = use_db_field
|
|
369
|
+
|
|
370
|
+
value = field.to_mongo(value, **ex_vars)
|
|
371
|
+
|
|
372
|
+
# Handle self generating fields
|
|
373
|
+
if value is None and field._auto_gen:
|
|
374
|
+
value = field.generate()
|
|
375
|
+
self._data[field_name] = value
|
|
376
|
+
|
|
377
|
+
if value is not None or field.null:
|
|
378
|
+
if use_db_field:
|
|
379
|
+
data[field.db_field] = value
|
|
380
|
+
else:
|
|
381
|
+
data[field.name] = value
|
|
382
|
+
|
|
383
|
+
# Only add _cls if allow_inheritance is True
|
|
384
|
+
if not self._meta.get("allow_inheritance"):
|
|
385
|
+
data.pop("_cls")
|
|
386
|
+
|
|
387
|
+
return data
|
|
388
|
+
|
|
389
|
+
def validate(self, clean=True):
|
|
390
|
+
"""Ensure that all fields' values are valid and that required fields
|
|
391
|
+
are present.
|
|
392
|
+
|
|
393
|
+
Raises :class:`ValidationError` if any of the fields' values are found
|
|
394
|
+
to be invalid.
|
|
395
|
+
"""
|
|
396
|
+
# Ensure that each field is matched to a valid value
|
|
397
|
+
errors = {}
|
|
398
|
+
if clean:
|
|
399
|
+
try:
|
|
400
|
+
self.clean()
|
|
401
|
+
except ValidationError as error:
|
|
402
|
+
errors[NON_FIELD_ERRORS] = error
|
|
403
|
+
|
|
404
|
+
# Get a list of tuples of field names and their current values
|
|
405
|
+
fields = [
|
|
406
|
+
(
|
|
407
|
+
self._fields.get(name, self._dynamic_fields.get(name)),
|
|
408
|
+
self._data.get(name),
|
|
409
|
+
)
|
|
410
|
+
for name in self._fields_ordered
|
|
411
|
+
]
|
|
412
|
+
|
|
413
|
+
# EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
|
414
|
+
# GenericEmbeddedDocumentField = _import_class("GenericEmbeddedDocumentField")
|
|
415
|
+
|
|
416
|
+
for field, value in fields:
|
|
417
|
+
if value is not None:
|
|
418
|
+
try:
|
|
419
|
+
# if isinstance(
|
|
420
|
+
# field, (EmbeddedDocumentField, GenericEmbeddedDocumentField)
|
|
421
|
+
# ):
|
|
422
|
+
# field._validate(value, clean=clean)
|
|
423
|
+
# else:
|
|
424
|
+
# log(f"Validating {field}:{field.name} with value {value}")
|
|
425
|
+
field._validate(value)
|
|
426
|
+
except ValidationError as error:
|
|
427
|
+
errors[field.name] = error.errors or error
|
|
428
|
+
except (ValueError, AttributeError, AssertionError) as error:
|
|
429
|
+
errors[field.name] = error
|
|
430
|
+
elif field.required and not getattr(field, "_auto_gen", False):
|
|
431
|
+
errors[field.name] = ValidationError(
|
|
432
|
+
"Field is required", field_name=field.name
|
|
433
|
+
)
|
|
434
|
+
|
|
435
|
+
if errors:
|
|
436
|
+
pk = "None"
|
|
437
|
+
if hasattr(self, "pk"):
|
|
438
|
+
pk = self.pk
|
|
439
|
+
elif self._instance and hasattr(self._instance, "pk"):
|
|
440
|
+
pk = self._instance.pk
|
|
441
|
+
message = f"ValidationError ({self._class_name}:{pk}) "
|
|
442
|
+
raise ValidationError(message, errors=errors)
|
|
443
|
+
|
|
444
|
+
def to_json(self, *args, **kwargs):
|
|
445
|
+
"""Convert this document to JSON.
|
|
446
|
+
|
|
447
|
+
:param use_db_field: Serialize field names as they appear in
|
|
448
|
+
MongoDB (as opposed to attribute names on this document).
|
|
449
|
+
Defaults to True.
|
|
450
|
+
"""
|
|
451
|
+
use_db_field = kwargs.pop("use_db_field", True)
|
|
452
|
+
if "json_options" not in kwargs:
|
|
453
|
+
warnings.warn(
|
|
454
|
+
"No 'json_options' are specified! Falling back to "
|
|
455
|
+
"LEGACY_JSON_OPTIONS with uuid_representation=PYTHON_LEGACY. "
|
|
456
|
+
"For use with other MongoDB drivers specify the UUID "
|
|
457
|
+
"representation to use. This will be changed to "
|
|
458
|
+
"uuid_representation=UNSPECIFIED in a future release.",
|
|
459
|
+
DeprecationWarning,
|
|
460
|
+
)
|
|
461
|
+
kwargs["json_options"] = LEGACY_JSON_OPTIONS
|
|
462
|
+
return json_util.dumps(self.to_mongo(use_db_field), *args, **kwargs)
|
|
463
|
+
|
|
464
|
+
@classmethod
|
|
465
|
+
def from_json(cls, json_data, created=False, **kwargs):
|
|
466
|
+
"""Converts json data to a Document instance.
|
|
467
|
+
|
|
468
|
+
:param str json_data: The json data to load into the Document.
|
|
469
|
+
:param bool created: Boolean defining whether to consider the newly
|
|
470
|
+
instantiated document as brand new or as persisted already:
|
|
471
|
+
* If True, consider the document as brand new, no matter what data
|
|
472
|
+
it's loaded with (i.e., even if an ID is loaded).
|
|
473
|
+
* If False and an ID is NOT provided, consider the document as
|
|
474
|
+
brand new.
|
|
475
|
+
* If False and an ID is provided, assume that the object has
|
|
476
|
+
already been persisted (this has an impact on the subsequent
|
|
477
|
+
call to .save()).
|
|
478
|
+
* Defaults to ``False``.
|
|
479
|
+
"""
|
|
480
|
+
# TODO should `created` default to False? If the object already exists
|
|
481
|
+
# in the DB, you would likely retrieve it from MongoDB itself through
|
|
482
|
+
# a query, not load it from JSON data.
|
|
483
|
+
if "json_options" not in kwargs:
|
|
484
|
+
warnings.warn(
|
|
485
|
+
"No 'json_options' are specified! Falling back to "
|
|
486
|
+
"LEGACY_JSON_OPTIONS with uuid_representation=PYTHON_LEGACY. "
|
|
487
|
+
"For use with other MongoDB drivers specify the UUID "
|
|
488
|
+
"representation to use. This will be changed to "
|
|
489
|
+
"uuid_representation=UNSPECIFIED in a future release.",
|
|
490
|
+
DeprecationWarning,
|
|
491
|
+
)
|
|
492
|
+
kwargs["json_options"] = LEGACY_JSON_OPTIONS
|
|
493
|
+
return cls._from_son(json_util.loads(json_data, **kwargs), created=created)
|
|
494
|
+
|
|
495
|
+
def __expand_dynamic_values(self, name, value):
|
|
496
|
+
"""Expand any dynamic values to their correct types / values."""
|
|
497
|
+
if not isinstance(value, (dict, list, tuple)):
|
|
498
|
+
return value
|
|
499
|
+
|
|
500
|
+
# If the value is a dict with '_cls' in it, turn it into a document
|
|
501
|
+
is_dict = isinstance(value, dict)
|
|
502
|
+
if is_dict and "_cls" in value:
|
|
503
|
+
cls = get_document(value["_cls"])
|
|
504
|
+
return cls(**value)
|
|
505
|
+
|
|
506
|
+
if is_dict:
|
|
507
|
+
value = {k: self.__expand_dynamic_values(k, v) for k, v in value.items()}
|
|
508
|
+
else:
|
|
509
|
+
value = [self.__expand_dynamic_values(name, v) for v in value]
|
|
510
|
+
|
|
511
|
+
# Convert lists / values so we can watch for any changes on them
|
|
512
|
+
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
|
513
|
+
if isinstance(value, (list, tuple)) and not isinstance(value, BaseList):
|
|
514
|
+
if issubclass(type(self), EmbeddedDocumentListField):
|
|
515
|
+
value = EmbeddedDocumentList(value, self, name)
|
|
516
|
+
else:
|
|
517
|
+
value = BaseList(value, self, name)
|
|
518
|
+
elif isinstance(value, dict) and not isinstance(value, BaseDict):
|
|
519
|
+
value = BaseDict(value, self, name)
|
|
520
|
+
|
|
521
|
+
return value
|
|
522
|
+
|
|
523
|
+
def _mark_as_changed(self, key):
|
|
524
|
+
"""Mark a key as explicitly changed by the user."""
|
|
525
|
+
if not hasattr(self, "_changed_fields"):
|
|
526
|
+
return
|
|
527
|
+
|
|
528
|
+
if "." in key:
|
|
529
|
+
key, rest = key.split(".", 1)
|
|
530
|
+
key = self._db_field_map.get(key, key)
|
|
531
|
+
key = f"{key}.{rest}"
|
|
532
|
+
else:
|
|
533
|
+
key = self._db_field_map.get(key, key)
|
|
534
|
+
|
|
535
|
+
if key not in self._changed_fields:
|
|
536
|
+
levels, idx = key.split("."), 1
|
|
537
|
+
while idx <= len(levels):
|
|
538
|
+
if ".".join(levels[:idx]) in self._changed_fields:
|
|
539
|
+
break
|
|
540
|
+
idx += 1
|
|
541
|
+
else:
|
|
542
|
+
self._changed_fields.append(key)
|
|
543
|
+
# remove lower level changed fields
|
|
544
|
+
level = ".".join(levels[:idx]) + "."
|
|
545
|
+
remove = self._changed_fields.remove
|
|
546
|
+
for field in self._changed_fields[:]:
|
|
547
|
+
if field.startswith(level):
|
|
548
|
+
remove(field)
|
|
549
|
+
|
|
550
|
+
def _clear_changed_fields(self):
|
|
551
|
+
"""Using _get_changed_fields iterate and remove any fields that
|
|
552
|
+
are marked as changed.
|
|
553
|
+
"""
|
|
554
|
+
ReferenceField = _import_class("ReferenceField")
|
|
555
|
+
GenericReferenceField = _import_class("GenericReferenceField")
|
|
556
|
+
|
|
557
|
+
for changed in self._get_changed_fields():
|
|
558
|
+
parts = changed.split(".")
|
|
559
|
+
data = self
|
|
560
|
+
for part in parts:
|
|
561
|
+
if isinstance(data, list):
|
|
562
|
+
try:
|
|
563
|
+
data = data[int(part)]
|
|
564
|
+
except IndexError:
|
|
565
|
+
data = None
|
|
566
|
+
elif isinstance(data, dict):
|
|
567
|
+
data = data.get(part, None)
|
|
568
|
+
else:
|
|
569
|
+
field_name = data._reverse_db_field_map.get(part, part)
|
|
570
|
+
data = getattr(data, field_name, None)
|
|
571
|
+
|
|
572
|
+
if not isinstance(data, LazyReference) and hasattr(
|
|
573
|
+
data, "_changed_fields"
|
|
574
|
+
):
|
|
575
|
+
if getattr(data, "_is_document", False):
|
|
576
|
+
continue
|
|
577
|
+
|
|
578
|
+
data._changed_fields = []
|
|
579
|
+
elif isinstance(data, (list, tuple, dict)):
|
|
580
|
+
if hasattr(data, "field") and isinstance(
|
|
581
|
+
data.field, (ReferenceField, GenericReferenceField)
|
|
582
|
+
):
|
|
583
|
+
continue
|
|
584
|
+
BaseDocument._nestable_types_clear_changed_fields(data)
|
|
585
|
+
|
|
586
|
+
self._changed_fields = []
|
|
587
|
+
|
|
588
|
+
@staticmethod
|
|
589
|
+
def _nestable_types_clear_changed_fields(data):
|
|
590
|
+
"""Inspect nested data for changed fields
|
|
591
|
+
|
|
592
|
+
:param data: data to inspect for changes
|
|
593
|
+
"""
|
|
594
|
+
Document = _import_class("Document")
|
|
595
|
+
|
|
596
|
+
# Loop list / dict fields as they contain documents
|
|
597
|
+
# Determine the iterator to use
|
|
598
|
+
if not hasattr(data, "items"):
|
|
599
|
+
iterator = enumerate(data)
|
|
600
|
+
else:
|
|
601
|
+
iterator = data.items()
|
|
602
|
+
|
|
603
|
+
for _index_or_key, value in iterator:
|
|
604
|
+
if hasattr(value, "_get_changed_fields") and not isinstance(
|
|
605
|
+
value, Document
|
|
606
|
+
): # don't follow references
|
|
607
|
+
value._clear_changed_fields()
|
|
608
|
+
elif isinstance(value, (list, tuple, dict)):
|
|
609
|
+
BaseDocument._nestable_types_clear_changed_fields(value)
|
|
610
|
+
|
|
611
|
+
@staticmethod
|
|
612
|
+
def _nestable_types_changed_fields(changed_fields, base_key, data):
|
|
613
|
+
"""Inspect nested data for changed fields
|
|
614
|
+
|
|
615
|
+
:param changed_fields: Previously collected changed fields
|
|
616
|
+
:param base_key: The base key that must be used to prepend changes to this data
|
|
617
|
+
:param data: data to inspect for changes
|
|
618
|
+
"""
|
|
619
|
+
# Loop list / dict fields as they contain documents
|
|
620
|
+
# Determine the iterator to use
|
|
621
|
+
if not hasattr(data, "items"):
|
|
622
|
+
iterator = enumerate(data)
|
|
623
|
+
else:
|
|
624
|
+
iterator = data.items()
|
|
625
|
+
|
|
626
|
+
for index_or_key, value in iterator:
|
|
627
|
+
item_key = f"{base_key}{index_or_key}."
|
|
628
|
+
# don't check anything lower if this key is already marked
|
|
629
|
+
# as changed.
|
|
630
|
+
if item_key[:-1] in changed_fields:
|
|
631
|
+
continue
|
|
632
|
+
|
|
633
|
+
if hasattr(value, "_get_changed_fields"):
|
|
634
|
+
changed = value._get_changed_fields()
|
|
635
|
+
changed_fields += [f"{item_key}{k}" for k in changed if k]
|
|
636
|
+
elif isinstance(value, (list, tuple, dict)):
|
|
637
|
+
BaseDocument._nestable_types_changed_fields(
|
|
638
|
+
changed_fields, item_key, value
|
|
639
|
+
)
|
|
640
|
+
|
|
641
|
+
def _get_changed_fields(self):
|
|
642
|
+
"""Return a list of all fields that have explicitly been changed."""
|
|
643
|
+
EmbeddedDocument = _import_class("EmbeddedDocument")
|
|
644
|
+
LazyReferenceField = _import_class("LazyReferenceField")
|
|
645
|
+
ReferenceField = _import_class("ReferenceField")
|
|
646
|
+
GenericLazyReferenceField = _import_class("GenericLazyReferenceField")
|
|
647
|
+
GenericReferenceField = _import_class("GenericReferenceField")
|
|
648
|
+
SortedListField = _import_class("SortedListField")
|
|
649
|
+
|
|
650
|
+
changed_fields = []
|
|
651
|
+
changed_fields += getattr(self, "_changed_fields", [])
|
|
652
|
+
|
|
653
|
+
for field_name in self._fields_ordered:
|
|
654
|
+
db_field_name = self._db_field_map.get(field_name, field_name)
|
|
655
|
+
key = "%s." % db_field_name
|
|
656
|
+
data = self._data.get(field_name, None)
|
|
657
|
+
field = self._fields.get(field_name)
|
|
658
|
+
|
|
659
|
+
if db_field_name in changed_fields:
|
|
660
|
+
# Whole field already marked as changed, no need to go further
|
|
661
|
+
continue
|
|
662
|
+
|
|
663
|
+
if isinstance(field, ReferenceField): # Don't follow referenced documents
|
|
664
|
+
continue
|
|
665
|
+
|
|
666
|
+
if isinstance(data, EmbeddedDocument):
|
|
667
|
+
# Find all embedded fields that have been changed
|
|
668
|
+
changed = data._get_changed_fields()
|
|
669
|
+
changed_fields += [f"{key}{k}" for k in changed if k]
|
|
670
|
+
elif isinstance(data, (list, tuple, dict)):
|
|
671
|
+
if hasattr(field, "field") and isinstance(
|
|
672
|
+
field.field,
|
|
673
|
+
(
|
|
674
|
+
LazyReferenceField,
|
|
675
|
+
ReferenceField,
|
|
676
|
+
GenericLazyReferenceField,
|
|
677
|
+
GenericReferenceField,
|
|
678
|
+
),
|
|
679
|
+
):
|
|
680
|
+
continue
|
|
681
|
+
elif isinstance(field, SortedListField) and field._ordering:
|
|
682
|
+
# if ordering is affected whole list is changed
|
|
683
|
+
if any(field._ordering in d._changed_fields for d in data):
|
|
684
|
+
changed_fields.append(db_field_name)
|
|
685
|
+
continue
|
|
686
|
+
|
|
687
|
+
self._nestable_types_changed_fields(changed_fields, key, data)
|
|
688
|
+
return changed_fields
|
|
689
|
+
|
|
690
|
+
def _delta(self):
|
|
691
|
+
"""Returns the delta (set, unset) of the changes for a document.
|
|
692
|
+
Gets any values that have been explicitly changed.
|
|
693
|
+
"""
|
|
694
|
+
# Handles cases where not loaded from_son but has _id
|
|
695
|
+
doc = self.to_mongo()
|
|
696
|
+
|
|
697
|
+
set_fields = self._get_changed_fields()
|
|
698
|
+
unset_data = {}
|
|
699
|
+
if hasattr(self, "_changed_fields"):
|
|
700
|
+
set_data = {}
|
|
701
|
+
# Fetch each set item from its path
|
|
702
|
+
for path in set_fields:
|
|
703
|
+
parts = path.split(".")
|
|
704
|
+
d = doc
|
|
705
|
+
new_path = []
|
|
706
|
+
for p in parts:
|
|
707
|
+
if isinstance(d, (ObjectId, DBRef)):
|
|
708
|
+
# Don't dig in the references
|
|
709
|
+
break
|
|
710
|
+
elif isinstance(d, list) and p.isdigit():
|
|
711
|
+
# An item of a list (identified by its index) is updated
|
|
712
|
+
d = d[int(p)]
|
|
713
|
+
elif hasattr(d, "get"):
|
|
714
|
+
# dict-like (dict, embedded document)
|
|
715
|
+
d = d.get(p)
|
|
716
|
+
new_path.append(p)
|
|
717
|
+
path = ".".join(new_path)
|
|
718
|
+
set_data[path] = d
|
|
719
|
+
else:
|
|
720
|
+
set_data = doc
|
|
721
|
+
if "_id" in set_data:
|
|
722
|
+
del set_data["_id"]
|
|
723
|
+
|
|
724
|
+
# Determine if any changed items were actually unset.
|
|
725
|
+
for path, value in list(set_data.items()):
|
|
726
|
+
if value or isinstance(
|
|
727
|
+
value, (numbers.Number, bool)
|
|
728
|
+
): # Account for 0 and True that are truthy
|
|
729
|
+
continue
|
|
730
|
+
|
|
731
|
+
parts = path.split(".")
|
|
732
|
+
|
|
733
|
+
if self._dynamic and len(parts) and parts[0] in self._dynamic_fields:
|
|
734
|
+
del set_data[path]
|
|
735
|
+
unset_data[path] = 1
|
|
736
|
+
continue
|
|
737
|
+
|
|
738
|
+
# If we've set a value that ain't the default value don't unset it.
|
|
739
|
+
default = None
|
|
740
|
+
if path in self._fields:
|
|
741
|
+
default = self._fields[path].default
|
|
742
|
+
else: # Perform a full lookup for lists / embedded lookups
|
|
743
|
+
d = self
|
|
744
|
+
db_field_name = parts.pop()
|
|
745
|
+
for p in parts:
|
|
746
|
+
if isinstance(d, list) and p.isdigit():
|
|
747
|
+
d = d[int(p)]
|
|
748
|
+
elif hasattr(d, "__getattribute__") and not isinstance(d, dict):
|
|
749
|
+
real_path = d._reverse_db_field_map.get(p, p)
|
|
750
|
+
d = getattr(d, real_path)
|
|
751
|
+
else:
|
|
752
|
+
d = d.get(p)
|
|
753
|
+
|
|
754
|
+
if hasattr(d, "_fields"):
|
|
755
|
+
field_name = d._reverse_db_field_map.get(
|
|
756
|
+
db_field_name, db_field_name
|
|
757
|
+
)
|
|
758
|
+
if field_name in d._fields:
|
|
759
|
+
default = d._fields.get(field_name).default
|
|
760
|
+
else:
|
|
761
|
+
default = None
|
|
762
|
+
|
|
763
|
+
if default is not None:
|
|
764
|
+
default = default() if callable(default) else default
|
|
765
|
+
|
|
766
|
+
if value != default:
|
|
767
|
+
continue
|
|
768
|
+
|
|
769
|
+
del set_data[path]
|
|
770
|
+
unset_data[path] = 1
|
|
771
|
+
return set_data, unset_data
|
|
772
|
+
|
|
773
|
+
@classmethod
|
|
774
|
+
def _get_collection_name(cls):
|
|
775
|
+
"""Return the collection name for this class. None for abstract
|
|
776
|
+
class.
|
|
777
|
+
"""
|
|
778
|
+
return cls._meta.get("collection", None)
|
|
779
|
+
|
|
780
|
+
@classmethod
|
|
781
|
+
def _from_son(cls, son, _auto_dereference=True, created=False):
|
|
782
|
+
"""Create an instance of a Document (subclass) from a PyMongo SON (dict)"""
|
|
783
|
+
if son and not isinstance(son, dict):
|
|
784
|
+
raise ValueError(
|
|
785
|
+
"The source SON object needs to be of type 'dict' but a '%s' was found"
|
|
786
|
+
% type(son)
|
|
787
|
+
)
|
|
788
|
+
|
|
789
|
+
# Get the class name from the document, falling back to the given
|
|
790
|
+
# class if unavailable
|
|
791
|
+
class_name = son.get("_cls", cls._class_name)
|
|
792
|
+
|
|
793
|
+
# Convert SON to a data dict, making sure each key is a string and
|
|
794
|
+
# corresponds to the right db field.
|
|
795
|
+
# This is needed as _from_son is currently called both from BaseDocument.__init__
|
|
796
|
+
# and from EmbeddedDocumentField.to_python
|
|
797
|
+
data = {}
|
|
798
|
+
for key, value in son.items():
|
|
799
|
+
key = str(key)
|
|
800
|
+
key = cls._db_field_map.get(key, key)
|
|
801
|
+
data[key] = value
|
|
802
|
+
|
|
803
|
+
# Return correct subclass for document type
|
|
804
|
+
if class_name != cls._class_name:
|
|
805
|
+
cls = get_document(class_name)
|
|
806
|
+
|
|
807
|
+
errors_dict = {}
|
|
808
|
+
|
|
809
|
+
fields = cls._fields
|
|
810
|
+
if not _auto_dereference:
|
|
811
|
+
# if auto_deref is turned off, we copy the fields so
|
|
812
|
+
# we can mutate the auto_dereference of the fields
|
|
813
|
+
fields = copy.deepcopy(fields)
|
|
814
|
+
|
|
815
|
+
# Apply field-name / db-field conversion
|
|
816
|
+
for field_name, field in fields.items():
|
|
817
|
+
field.set_auto_dereferencing(
|
|
818
|
+
_auto_dereference
|
|
819
|
+
) # align the field's auto-dereferencing with the document's
|
|
820
|
+
if field.db_field in data:
|
|
821
|
+
value = data[field.db_field]
|
|
822
|
+
try:
|
|
823
|
+
data[field_name] = (
|
|
824
|
+
value if value is None else field.to_python(value)
|
|
825
|
+
)
|
|
826
|
+
if field_name != field.db_field:
|
|
827
|
+
del data[field.db_field]
|
|
828
|
+
except (AttributeError, ValueError) as e:
|
|
829
|
+
errors_dict[field_name] = e
|
|
830
|
+
|
|
831
|
+
if errors_dict:
|
|
832
|
+
errors = "\n".join([f"Field '{k}' - {v}" for k, v in errors_dict.items()])
|
|
833
|
+
msg = "Invalid data to create a `{}` instance.\n{}".format(
|
|
834
|
+
cls._class_name,
|
|
835
|
+
errors,
|
|
836
|
+
)
|
|
837
|
+
raise InvalidDocumentError(msg)
|
|
838
|
+
|
|
839
|
+
# In STRICT documents, remove any keys that aren't in cls._fields
|
|
840
|
+
if cls.STRICT:
|
|
841
|
+
data = {k: v for k, v in data.items() if k in cls._fields}
|
|
842
|
+
|
|
843
|
+
obj = cls(__auto_convert=False, _created=created, **data)
|
|
844
|
+
obj._changed_fields = []
|
|
845
|
+
if not _auto_dereference:
|
|
846
|
+
obj._fields = fields
|
|
847
|
+
|
|
848
|
+
return obj
|
|
849
|
+
|
|
850
|
+
@classmethod
|
|
851
|
+
def _build_index_specs(cls, meta_indexes):
|
|
852
|
+
"""Generate and merge the full index specs."""
|
|
853
|
+
geo_indices = cls._geo_indices()
|
|
854
|
+
unique_indices = cls._unique_with_indexes()
|
|
855
|
+
index_specs = [cls._build_index_spec(spec) for spec in meta_indexes]
|
|
856
|
+
|
|
857
|
+
def merge_index_specs(index_specs, indices):
|
|
858
|
+
"""Helper method for merging index specs."""
|
|
859
|
+
if not indices:
|
|
860
|
+
return index_specs
|
|
861
|
+
|
|
862
|
+
# Create a map of index fields to index spec. We're converting
|
|
863
|
+
# the fields from a list to a tuple so that it's hashable.
|
|
864
|
+
spec_fields = {tuple(index["fields"]): index for index in index_specs}
|
|
865
|
+
|
|
866
|
+
# For each new index, if there's an existing index with the same
|
|
867
|
+
# fields list, update the existing spec with all data from the
|
|
868
|
+
# new spec.
|
|
869
|
+
for new_index in indices:
|
|
870
|
+
candidate = spec_fields.get(tuple(new_index["fields"]))
|
|
871
|
+
if candidate is None:
|
|
872
|
+
index_specs.append(new_index)
|
|
873
|
+
else:
|
|
874
|
+
candidate.update(new_index)
|
|
875
|
+
|
|
876
|
+
return index_specs
|
|
877
|
+
|
|
878
|
+
# Merge geo indexes and unique_with indexes into the meta index specs.
|
|
879
|
+
index_specs = merge_index_specs(index_specs, geo_indices)
|
|
880
|
+
index_specs = merge_index_specs(index_specs, unique_indices)
|
|
881
|
+
return index_specs
|
|
882
|
+
|
|
883
|
+
@classmethod
|
|
884
|
+
def _build_index_spec(cls, spec):
|
|
885
|
+
"""Build a PyMongo index spec from a MongoEngine index spec."""
|
|
886
|
+
if isinstance(spec, str):
|
|
887
|
+
spec = {"fields": [spec]}
|
|
888
|
+
elif isinstance(spec, (list, tuple)):
|
|
889
|
+
spec = {"fields": list(spec)}
|
|
890
|
+
elif isinstance(spec, dict):
|
|
891
|
+
spec = dict(spec)
|
|
892
|
+
|
|
893
|
+
index_list = []
|
|
894
|
+
direction = None
|
|
895
|
+
|
|
896
|
+
# Check to see if we need to include _cls
|
|
897
|
+
allow_inheritance = cls._meta.get("allow_inheritance")
|
|
898
|
+
include_cls = (
|
|
899
|
+
allow_inheritance
|
|
900
|
+
and not spec.get("sparse", False)
|
|
901
|
+
and spec.get("cls", True)
|
|
902
|
+
and "_cls" not in spec["fields"]
|
|
903
|
+
)
|
|
904
|
+
|
|
905
|
+
# 733: don't include cls if index_cls is False unless there is an explicit cls with the index
|
|
906
|
+
include_cls = include_cls and (
|
|
907
|
+
spec.get("cls", False) or cls._meta.get("index_cls", True)
|
|
908
|
+
)
|
|
909
|
+
if "cls" in spec:
|
|
910
|
+
spec.pop("cls")
|
|
911
|
+
for key in spec["fields"]:
|
|
912
|
+
# If inherited spec continue
|
|
913
|
+
if isinstance(key, (list, tuple)):
|
|
914
|
+
continue
|
|
915
|
+
|
|
916
|
+
# ASCENDING from +
|
|
917
|
+
# DESCENDING from -
|
|
918
|
+
# TEXT from $
|
|
919
|
+
# HASHED from #
|
|
920
|
+
# GEOSPHERE from (
|
|
921
|
+
# GEOHAYSTACK from )
|
|
922
|
+
# GEO2D from *
|
|
923
|
+
direction = pymongo.ASCENDING
|
|
924
|
+
if key.startswith("-"):
|
|
925
|
+
direction = pymongo.DESCENDING
|
|
926
|
+
elif key.startswith("$"):
|
|
927
|
+
direction = pymongo.TEXT
|
|
928
|
+
elif key.startswith("#"):
|
|
929
|
+
direction = pymongo.HASHED
|
|
930
|
+
elif key.startswith("("):
|
|
931
|
+
direction = pymongo.GEOSPHERE
|
|
932
|
+
elif key.startswith(")"):
|
|
933
|
+
try:
|
|
934
|
+
direction = pymongo.GEOHAYSTACK
|
|
935
|
+
except AttributeError:
|
|
936
|
+
raise NotImplementedError
|
|
937
|
+
elif key.startswith("*"):
|
|
938
|
+
direction = pymongo.GEO2D
|
|
939
|
+
if key.startswith(("+", "-", "*", "$", "#", "(", ")")):
|
|
940
|
+
key = key[1:]
|
|
941
|
+
|
|
942
|
+
# Use real field name, do it manually because we need field
|
|
943
|
+
# objects for the next part (list field checking)
|
|
944
|
+
parts = key.split(".")
|
|
945
|
+
if parts in (["pk"], ["id"], ["_id"]):
|
|
946
|
+
key = "_id"
|
|
947
|
+
else:
|
|
948
|
+
fields = cls._lookup_field(parts)
|
|
949
|
+
parts = []
|
|
950
|
+
for field in fields:
|
|
951
|
+
try:
|
|
952
|
+
if field != "_id":
|
|
953
|
+
field = field.db_field
|
|
954
|
+
except AttributeError:
|
|
955
|
+
pass
|
|
956
|
+
parts.append(field)
|
|
957
|
+
key = ".".join(parts)
|
|
958
|
+
index_list.append((key, direction))
|
|
959
|
+
|
|
960
|
+
# Don't add cls to a geo index
|
|
961
|
+
if (
|
|
962
|
+
include_cls
|
|
963
|
+
and direction not in (pymongo.GEO2D, pymongo.GEOSPHERE)
|
|
964
|
+
and (GEOHAYSTACK is None or direction != GEOHAYSTACK)
|
|
965
|
+
):
|
|
966
|
+
index_list.insert(0, ("_cls", 1))
|
|
967
|
+
|
|
968
|
+
if index_list:
|
|
969
|
+
spec["fields"] = index_list
|
|
970
|
+
|
|
971
|
+
return spec
|
|
972
|
+
|
|
973
|
+
@classmethod
|
|
974
|
+
def _unique_with_indexes(cls, namespace=""):
|
|
975
|
+
"""Find unique indexes in the document schema and return them."""
|
|
976
|
+
unique_indexes = []
|
|
977
|
+
for field_name, field in cls._fields.items():
|
|
978
|
+
sparse = field.sparse
|
|
979
|
+
|
|
980
|
+
# Generate a list of indexes needed by uniqueness constraints
|
|
981
|
+
if field.unique:
|
|
982
|
+
unique_fields = [field.db_field]
|
|
983
|
+
|
|
984
|
+
# Add any unique_with fields to the back of the index spec
|
|
985
|
+
if field.unique_with:
|
|
986
|
+
if isinstance(field.unique_with, str):
|
|
987
|
+
field.unique_with = [field.unique_with]
|
|
988
|
+
|
|
989
|
+
# Convert unique_with field names to real field names
|
|
990
|
+
unique_with = []
|
|
991
|
+
for other_name in field.unique_with:
|
|
992
|
+
parts = other_name.split(".")
|
|
993
|
+
|
|
994
|
+
# Lookup real name
|
|
995
|
+
parts = cls._lookup_field(parts)
|
|
996
|
+
name_parts = [part.db_field for part in parts]
|
|
997
|
+
unique_with.append(".".join(name_parts))
|
|
998
|
+
|
|
999
|
+
# Unique field should be required
|
|
1000
|
+
parts[-1].required = True
|
|
1001
|
+
sparse = not sparse and parts[-1].name not in cls.__dict__
|
|
1002
|
+
|
|
1003
|
+
unique_fields += unique_with
|
|
1004
|
+
|
|
1005
|
+
# Add the new index to the list
|
|
1006
|
+
fields = [(f"{namespace}{f}", pymongo.ASCENDING) for f in unique_fields]
|
|
1007
|
+
index = {"fields": fields, "unique": True, "sparse": sparse}
|
|
1008
|
+
unique_indexes.append(index)
|
|
1009
|
+
|
|
1010
|
+
if field.__class__.__name__ in {
|
|
1011
|
+
"EmbeddedDocumentListField",
|
|
1012
|
+
"ListField",
|
|
1013
|
+
"SortedListField",
|
|
1014
|
+
}:
|
|
1015
|
+
field = field.field
|
|
1016
|
+
|
|
1017
|
+
# Grab any embedded document field unique indexes
|
|
1018
|
+
if (
|
|
1019
|
+
field.__class__.__name__ == "EmbeddedDocumentField"
|
|
1020
|
+
and field.document_type != cls
|
|
1021
|
+
):
|
|
1022
|
+
field_namespace = "%s." % field_name
|
|
1023
|
+
doc_cls = field.document_type
|
|
1024
|
+
unique_indexes += doc_cls._unique_with_indexes(field_namespace)
|
|
1025
|
+
|
|
1026
|
+
return unique_indexes
|
|
1027
|
+
|
|
1028
|
+
@classmethod
|
|
1029
|
+
def _geo_indices(cls, inspected=None, parent_field=None):
|
|
1030
|
+
inspected = inspected or []
|
|
1031
|
+
geo_indices = []
|
|
1032
|
+
inspected.append(cls)
|
|
1033
|
+
|
|
1034
|
+
geo_field_type_names = (
|
|
1035
|
+
"EmbeddedDocumentField",
|
|
1036
|
+
"GeoPointField",
|
|
1037
|
+
"PointField",
|
|
1038
|
+
"LineStringField",
|
|
1039
|
+
"PolygonField",
|
|
1040
|
+
)
|
|
1041
|
+
|
|
1042
|
+
geo_field_types = tuple(_import_class(field) for field in geo_field_type_names)
|
|
1043
|
+
|
|
1044
|
+
for field in cls._fields.values():
|
|
1045
|
+
if not isinstance(field, geo_field_types):
|
|
1046
|
+
continue
|
|
1047
|
+
|
|
1048
|
+
if hasattr(field, "document_type"):
|
|
1049
|
+
field_cls = field.document_type
|
|
1050
|
+
if field_cls in inspected:
|
|
1051
|
+
continue
|
|
1052
|
+
|
|
1053
|
+
if hasattr(field_cls, "_geo_indices"):
|
|
1054
|
+
geo_indices += field_cls._geo_indices(
|
|
1055
|
+
inspected, parent_field=field.db_field
|
|
1056
|
+
)
|
|
1057
|
+
elif field._geo_index:
|
|
1058
|
+
field_name = field.db_field
|
|
1059
|
+
if parent_field:
|
|
1060
|
+
field_name = f"{parent_field}.{field_name}"
|
|
1061
|
+
geo_indices.append({"fields": [(field_name, field._geo_index)]})
|
|
1062
|
+
|
|
1063
|
+
return geo_indices
|
|
1064
|
+
|
|
1065
|
+
@classmethod
|
|
1066
|
+
def _lookup_field(cls, parts):
|
|
1067
|
+
"""Given the path to a given field, return a list containing
|
|
1068
|
+
the Field object associated with that field and all of its parent
|
|
1069
|
+
Field objects.
|
|
1070
|
+
|
|
1071
|
+
Args:
|
|
1072
|
+
parts (str, list, or tuple) - path to the field. Should be a
|
|
1073
|
+
string for simple fields existing on this document or a list
|
|
1074
|
+
of strings for a field that exists deeper in embedded documents.
|
|
1075
|
+
|
|
1076
|
+
Returns:
|
|
1077
|
+
A list of Field instances for fields that were found or
|
|
1078
|
+
strings for sub-fields that weren't.
|
|
1079
|
+
|
|
1080
|
+
Example:
|
|
1081
|
+
>>> user._lookup_field('name')
|
|
1082
|
+
[<autonomous.db.fields.StringField at 0x1119bff50>]
|
|
1083
|
+
|
|
1084
|
+
>>> user._lookup_field('roles')
|
|
1085
|
+
[<autonomous.db.fields.EmbeddedDocumentListField at 0x1119ec250>]
|
|
1086
|
+
|
|
1087
|
+
>>> user._lookup_field(['roles', 'role'])
|
|
1088
|
+
[<autonomous.db.fields.EmbeddedDocumentListField at 0x1119ec250>,
|
|
1089
|
+
<autonomous.db.fields.StringField at 0x1119ec050>]
|
|
1090
|
+
|
|
1091
|
+
>>> user._lookup_field('doesnt_exist')
|
|
1092
|
+
raises LookUpError
|
|
1093
|
+
|
|
1094
|
+
>>> user._lookup_field(['roles', 'doesnt_exist'])
|
|
1095
|
+
[<autonomous.db.fields.EmbeddedDocumentListField at 0x1119ec250>,
|
|
1096
|
+
'doesnt_exist']
|
|
1097
|
+
|
|
1098
|
+
"""
|
|
1099
|
+
# TODO this method is WAY too complicated. Simplify it.
|
|
1100
|
+
# TODO don't think returning a string for embedded non-existent fields is desired
|
|
1101
|
+
|
|
1102
|
+
ListField = _import_class("ListField")
|
|
1103
|
+
DynamicField = _import_class("DynamicField")
|
|
1104
|
+
|
|
1105
|
+
if not isinstance(parts, (list, tuple)):
|
|
1106
|
+
parts = [parts]
|
|
1107
|
+
|
|
1108
|
+
fields = []
|
|
1109
|
+
field = None
|
|
1110
|
+
|
|
1111
|
+
for field_name in parts:
|
|
1112
|
+
# Handle ListField indexing:
|
|
1113
|
+
if field_name.isdigit() and isinstance(field, ListField):
|
|
1114
|
+
fields.append(field_name)
|
|
1115
|
+
continue
|
|
1116
|
+
|
|
1117
|
+
# Look up first field from the document
|
|
1118
|
+
if field is None:
|
|
1119
|
+
if field_name == "pk":
|
|
1120
|
+
# Deal with "primary key" alias
|
|
1121
|
+
field_name = cls._meta["id_field"]
|
|
1122
|
+
|
|
1123
|
+
if field_name in cls._fields:
|
|
1124
|
+
field = cls._fields[field_name]
|
|
1125
|
+
elif cls._dynamic:
|
|
1126
|
+
field = DynamicField(db_field=field_name)
|
|
1127
|
+
elif cls._meta.get("allow_inheritance") or cls._meta.get(
|
|
1128
|
+
"abstract", False
|
|
1129
|
+
):
|
|
1130
|
+
# 744: in case the field is defined in a subclass
|
|
1131
|
+
for subcls in cls.__subclasses__():
|
|
1132
|
+
try:
|
|
1133
|
+
field = subcls._lookup_field([field_name])[0]
|
|
1134
|
+
except LookUpError:
|
|
1135
|
+
continue
|
|
1136
|
+
|
|
1137
|
+
if field is not None:
|
|
1138
|
+
break
|
|
1139
|
+
else:
|
|
1140
|
+
raise LookUpError('Cannot resolve field "%s"' % field_name)
|
|
1141
|
+
else:
|
|
1142
|
+
raise LookUpError('Cannot resolve field "%s"' % field_name)
|
|
1143
|
+
else:
|
|
1144
|
+
ReferenceField = _import_class("ReferenceField")
|
|
1145
|
+
GenericReferenceField = _import_class("GenericReferenceField")
|
|
1146
|
+
|
|
1147
|
+
# If previous field was a reference, throw an error (we
|
|
1148
|
+
# cannot look up fields that are on references).
|
|
1149
|
+
if isinstance(field, (ReferenceField, GenericReferenceField)):
|
|
1150
|
+
raise LookUpError(
|
|
1151
|
+
"Cannot perform join in mongoDB: %s" % "__".join(parts)
|
|
1152
|
+
)
|
|
1153
|
+
|
|
1154
|
+
# If the parent field has a "field" attribute which has a
|
|
1155
|
+
# lookup_member method, call it to find the field
|
|
1156
|
+
# corresponding to this iteration.
|
|
1157
|
+
if hasattr(getattr(field, "field", None), "lookup_member"):
|
|
1158
|
+
new_field = field.field.lookup_member(field_name)
|
|
1159
|
+
|
|
1160
|
+
# If the parent field is a DynamicField or if it's part of
|
|
1161
|
+
# a DynamicDocument, mark current field as a DynamicField
|
|
1162
|
+
# with db_name equal to the field name.
|
|
1163
|
+
elif cls._dynamic and (
|
|
1164
|
+
isinstance(field, DynamicField)
|
|
1165
|
+
or getattr(getattr(field, "document_type", None), "_dynamic", None)
|
|
1166
|
+
):
|
|
1167
|
+
new_field = DynamicField(db_field=field_name)
|
|
1168
|
+
|
|
1169
|
+
# Else, try to use the parent field's lookup_member method
|
|
1170
|
+
# to find the subfield.
|
|
1171
|
+
elif hasattr(field, "lookup_member"):
|
|
1172
|
+
new_field = field.lookup_member(field_name)
|
|
1173
|
+
|
|
1174
|
+
# Raise a LookUpError if all the other conditions failed.
|
|
1175
|
+
else:
|
|
1176
|
+
raise LookUpError(
|
|
1177
|
+
"Cannot resolve subfield or operator {} "
|
|
1178
|
+
"on the field {}".format(field_name, field.name)
|
|
1179
|
+
)
|
|
1180
|
+
|
|
1181
|
+
# If current field still wasn't found and the parent field
|
|
1182
|
+
# is a ComplexBaseField, add the name current field name and
|
|
1183
|
+
# move on.
|
|
1184
|
+
if not new_field and isinstance(field, ComplexBaseField):
|
|
1185
|
+
fields.append(field_name)
|
|
1186
|
+
continue
|
|
1187
|
+
elif not new_field:
|
|
1188
|
+
raise LookUpError('Cannot resolve field "%s"' % field_name)
|
|
1189
|
+
|
|
1190
|
+
field = new_field # update field to the new field type
|
|
1191
|
+
|
|
1192
|
+
fields.append(field)
|
|
1193
|
+
|
|
1194
|
+
return fields
|
|
1195
|
+
|
|
1196
|
+
@classmethod
|
|
1197
|
+
def _translate_field_name(cls, field, sep="."):
|
|
1198
|
+
"""Translate a field attribute name to a database field name."""
|
|
1199
|
+
parts = field.split(sep)
|
|
1200
|
+
parts = [f.db_field for f in cls._lookup_field(parts)]
|
|
1201
|
+
return ".".join(parts)
|
|
1202
|
+
|
|
1203
|
+
def __set_field_display(self):
|
|
1204
|
+
"""For each field that specifies choices, create a
|
|
1205
|
+
get_<field>_display method.
|
|
1206
|
+
"""
|
|
1207
|
+
fields_with_choices = [(n, f) for n, f in self._fields.items() if f.choices]
|
|
1208
|
+
for attr_name, field in fields_with_choices:
|
|
1209
|
+
setattr(
|
|
1210
|
+
self,
|
|
1211
|
+
"get_%s_display" % attr_name,
|
|
1212
|
+
partial(self.__get_field_display, field=field),
|
|
1213
|
+
)
|
|
1214
|
+
|
|
1215
|
+
def __get_field_display(self, field):
|
|
1216
|
+
"""Return the display value for a choice field"""
|
|
1217
|
+
value = getattr(self, field.name)
|
|
1218
|
+
if field.choices and isinstance(field.choices[0], (list, tuple)):
|
|
1219
|
+
if value is None:
|
|
1220
|
+
return None
|
|
1221
|
+
sep = getattr(field, "display_sep", " ")
|
|
1222
|
+
values = (
|
|
1223
|
+
value
|
|
1224
|
+
if field.__class__.__name__ in ("ListField", "SortedListField")
|
|
1225
|
+
else [value]
|
|
1226
|
+
)
|
|
1227
|
+
return sep.join(
|
|
1228
|
+
[str(dict(field.choices).get(val, val)) for val in values or []]
|
|
1229
|
+
)
|
|
1230
|
+
return value
|