autonomous-app 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autonomous/__init__.py +1 -1
- autonomous/ai/audioagent.py +1 -1
- autonomous/ai/imageagent.py +1 -1
- autonomous/ai/jsonagent.py +1 -1
- autonomous/ai/models/openai.py +81 -53
- autonomous/ai/oaiagent.py +1 -14
- autonomous/ai/textagent.py +1 -1
- autonomous/auth/autoauth.py +10 -10
- autonomous/auth/user.py +17 -2
- autonomous/db/__init__.py +42 -0
- autonomous/db/base/__init__.py +33 -0
- autonomous/db/base/common.py +62 -0
- autonomous/db/base/datastructures.py +476 -0
- autonomous/db/base/document.py +1230 -0
- autonomous/db/base/fields.py +767 -0
- autonomous/db/base/metaclasses.py +468 -0
- autonomous/db/base/utils.py +22 -0
- autonomous/db/common.py +79 -0
- autonomous/db/connection.py +472 -0
- autonomous/db/context_managers.py +313 -0
- autonomous/db/dereference.py +291 -0
- autonomous/db/document.py +1141 -0
- autonomous/db/errors.py +165 -0
- autonomous/db/fields.py +2732 -0
- autonomous/db/mongodb_support.py +24 -0
- autonomous/db/pymongo_support.py +80 -0
- autonomous/db/queryset/__init__.py +28 -0
- autonomous/db/queryset/base.py +2033 -0
- autonomous/db/queryset/field_list.py +88 -0
- autonomous/db/queryset/manager.py +58 -0
- autonomous/db/queryset/queryset.py +189 -0
- autonomous/db/queryset/transform.py +527 -0
- autonomous/db/queryset/visitor.py +189 -0
- autonomous/db/signals.py +59 -0
- autonomous/logger.py +3 -0
- autonomous/model/autoattr.py +56 -41
- autonomous/model/automodel.py +88 -34
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.1.dist-info}/METADATA +2 -2
- autonomous_app-0.3.1.dist-info/RECORD +60 -0
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.1.dist-info}/WHEEL +1 -1
- autonomous_app-0.3.0.dist-info/RECORD +0 -35
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.1.dist-info}/LICENSE +0 -0
- {autonomous_app-0.3.0.dist-info → autonomous_app-0.3.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1141 @@
|
|
|
1
|
+
import re
|
|
2
|
+
|
|
3
|
+
import pymongo
|
|
4
|
+
from bson.dbref import DBRef
|
|
5
|
+
from pymongo.read_preferences import ReadPreference
|
|
6
|
+
|
|
7
|
+
from autonomous import log
|
|
8
|
+
from autonomous.db import signals
|
|
9
|
+
from autonomous.db.base import (
|
|
10
|
+
BaseDict,
|
|
11
|
+
BaseDocument,
|
|
12
|
+
BaseList,
|
|
13
|
+
DocumentMetaclass,
|
|
14
|
+
EmbeddedDocumentList,
|
|
15
|
+
TopLevelDocumentMetaclass,
|
|
16
|
+
get_document,
|
|
17
|
+
)
|
|
18
|
+
from autonomous.db.common import _import_class
|
|
19
|
+
from autonomous.db.connection import DEFAULT_CONNECTION_NAME, get_db
|
|
20
|
+
from autonomous.db.context_managers import (
|
|
21
|
+
set_write_concern,
|
|
22
|
+
switch_collection,
|
|
23
|
+
switch_db,
|
|
24
|
+
)
|
|
25
|
+
from autonomous.db.errors import (
|
|
26
|
+
InvalidDocumentError,
|
|
27
|
+
InvalidQueryError,
|
|
28
|
+
SaveConditionError,
|
|
29
|
+
)
|
|
30
|
+
from autonomous.db.pymongo_support import list_collection_names
|
|
31
|
+
from autonomous.db.queryset import (
|
|
32
|
+
NotUniqueError,
|
|
33
|
+
OperationError,
|
|
34
|
+
QuerySet,
|
|
35
|
+
transform,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
__all__ = (
|
|
39
|
+
"Document",
|
|
40
|
+
"EmbeddedDocument",
|
|
41
|
+
"DynamicDocument",
|
|
42
|
+
"DynamicEmbeddedDocument",
|
|
43
|
+
"OperationError",
|
|
44
|
+
"InvalidCollectionError",
|
|
45
|
+
"NotUniqueError",
|
|
46
|
+
"MapReduceDocument",
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def includes_cls(fields):
|
|
51
|
+
"""Helper function used for ensuring and comparing indexes."""
|
|
52
|
+
first_field = None
|
|
53
|
+
if len(fields):
|
|
54
|
+
if isinstance(fields[0], str):
|
|
55
|
+
first_field = fields[0]
|
|
56
|
+
elif isinstance(fields[0], (list, tuple)) and len(fields[0]):
|
|
57
|
+
first_field = fields[0][0]
|
|
58
|
+
return first_field == "_cls"
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class InvalidCollectionError(Exception):
|
|
62
|
+
pass
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class EmbeddedDocument(BaseDocument, metaclass=DocumentMetaclass):
|
|
66
|
+
r"""A :class:`~autonomous.db.Document` that isn't stored in its own
|
|
67
|
+
collection. :class:`~autonomous.db.EmbeddedDocument`\ s should be used as
|
|
68
|
+
fields on :class:`~autonomous.db.Document`\ s through the
|
|
69
|
+
:class:`~autonomous.db.EmbeddedDocumentField` field type.
|
|
70
|
+
|
|
71
|
+
A :class:`~autonomous.db.EmbeddedDocument` subclass may be itself subclassed,
|
|
72
|
+
to create a specialised version of the embedded document that will be
|
|
73
|
+
stored in the same collection. To facilitate this behaviour a `_cls`
|
|
74
|
+
field is added to documents (hidden though the MongoEngine interface).
|
|
75
|
+
To enable this behaviour set :attr:`allow_inheritance` to ``True`` in the
|
|
76
|
+
:attr:`meta` dictionary.
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
__slots__ = ("_instance",)
|
|
80
|
+
|
|
81
|
+
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
|
82
|
+
my_metaclass = DocumentMetaclass
|
|
83
|
+
|
|
84
|
+
# A generic embedded document doesn't have any immutable properties
|
|
85
|
+
# that describe it uniquely, hence it shouldn't be hashable. You can
|
|
86
|
+
# define your own __hash__ method on a subclass if you need your
|
|
87
|
+
# embedded documents to be hashable.
|
|
88
|
+
__hash__ = None
|
|
89
|
+
|
|
90
|
+
def __init__(self, *args, **kwargs):
|
|
91
|
+
super().__init__(*args, **kwargs)
|
|
92
|
+
self._instance = None
|
|
93
|
+
self._changed_fields = []
|
|
94
|
+
|
|
95
|
+
def __eq__(self, other):
|
|
96
|
+
if isinstance(other, self.__class__):
|
|
97
|
+
return self._data == other._data
|
|
98
|
+
return False
|
|
99
|
+
|
|
100
|
+
def __ne__(self, other):
|
|
101
|
+
return not self.__eq__(other)
|
|
102
|
+
|
|
103
|
+
def __getstate__(self):
|
|
104
|
+
data = super().__getstate__()
|
|
105
|
+
data["_instance"] = None
|
|
106
|
+
return data
|
|
107
|
+
|
|
108
|
+
def __setstate__(self, state):
|
|
109
|
+
super().__setstate__(state)
|
|
110
|
+
self._instance = state["_instance"]
|
|
111
|
+
|
|
112
|
+
def to_mongo(self, *args, **kwargs):
|
|
113
|
+
data = super().to_mongo(*args, **kwargs)
|
|
114
|
+
|
|
115
|
+
# remove _id from the SON if it's in it and it's None
|
|
116
|
+
if "_id" in data and data["_id"] is None:
|
|
117
|
+
del data["_id"]
|
|
118
|
+
|
|
119
|
+
return data
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
class Document(BaseDocument, metaclass=TopLevelDocumentMetaclass):
|
|
123
|
+
"""The base class used for defining the structure and properties of
|
|
124
|
+
collections of documents stored in MongoDB. Inherit from this class, and
|
|
125
|
+
add fields as class attributes to define a document's structure.
|
|
126
|
+
Individual documents may then be created by making instances of the
|
|
127
|
+
:class:`~autonomous.db.Document` subclass.
|
|
128
|
+
|
|
129
|
+
By default, the MongoDB collection used to store documents created using a
|
|
130
|
+
:class:`~autonomous.db.Document` subclass will be the name of the subclass
|
|
131
|
+
converted to snake_case. A different collection may be specified by
|
|
132
|
+
providing :attr:`collection` to the :attr:`meta` dictionary in the class
|
|
133
|
+
definition.
|
|
134
|
+
|
|
135
|
+
A :class:`~autonomous.db.Document` subclass may be itself subclassed, to
|
|
136
|
+
create a specialised version of the document that will be stored in the
|
|
137
|
+
same collection. To facilitate this behaviour a `_cls`
|
|
138
|
+
field is added to documents (hidden though the MongoEngine interface).
|
|
139
|
+
To enable this behaviour set :attr:`allow_inheritance` to ``True`` in the
|
|
140
|
+
:attr:`meta` dictionary.
|
|
141
|
+
|
|
142
|
+
A :class:`~autonomous.db.Document` may use a **Capped Collection** by
|
|
143
|
+
specifying :attr:`max_documents` and :attr:`max_size` in the :attr:`meta`
|
|
144
|
+
dictionary. :attr:`max_documents` is the maximum number of documents that
|
|
145
|
+
is allowed to be stored in the collection, and :attr:`max_size` is the
|
|
146
|
+
maximum size of the collection in bytes. :attr:`max_size` is rounded up
|
|
147
|
+
to the next multiple of 256 by MongoDB internally and autonomous.db before.
|
|
148
|
+
Use also a multiple of 256 to avoid confusions. If :attr:`max_size` is not
|
|
149
|
+
specified and :attr:`max_documents` is, :attr:`max_size` defaults to
|
|
150
|
+
10485760 bytes (10MB).
|
|
151
|
+
|
|
152
|
+
Indexes may be created by specifying :attr:`indexes` in the :attr:`meta`
|
|
153
|
+
dictionary. The value should be a list of field names or tuples of field
|
|
154
|
+
names. Index direction may be specified by prefixing the field names with
|
|
155
|
+
a **+** or **-** sign.
|
|
156
|
+
|
|
157
|
+
Automatic index creation can be disabled by specifying
|
|
158
|
+
:attr:`auto_create_index` in the :attr:`meta` dictionary. If this is set to
|
|
159
|
+
False then indexes will not be created by MongoEngine. This is useful in
|
|
160
|
+
production systems where index creation is performed as part of a
|
|
161
|
+
deployment system.
|
|
162
|
+
|
|
163
|
+
By default, _cls will be added to the start of every index (that
|
|
164
|
+
doesn't contain a list) if allow_inheritance is True. This can be
|
|
165
|
+
disabled by either setting cls to False on the specific index or
|
|
166
|
+
by setting index_cls to False on the meta dictionary for the document.
|
|
167
|
+
|
|
168
|
+
By default, any extra attribute existing in stored data but not declared
|
|
169
|
+
in your model will raise a :class:`~autonomous.db.FieldDoesNotExist` error.
|
|
170
|
+
This can be disabled by setting :attr:`strict` to ``False``
|
|
171
|
+
in the :attr:`meta` dictionary.
|
|
172
|
+
"""
|
|
173
|
+
|
|
174
|
+
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
|
175
|
+
my_metaclass = TopLevelDocumentMetaclass
|
|
176
|
+
|
|
177
|
+
__slots__ = ("__objects",)
|
|
178
|
+
|
|
179
|
+
@property
|
|
180
|
+
def pk(self):
|
|
181
|
+
"""Get the primary key."""
|
|
182
|
+
if "id_field" not in self._meta:
|
|
183
|
+
return None
|
|
184
|
+
return getattr(self, self._meta["id_field"])
|
|
185
|
+
|
|
186
|
+
@pk.setter
|
|
187
|
+
def pk(self, value):
|
|
188
|
+
"""Set the primary key."""
|
|
189
|
+
return setattr(self, self._meta["id_field"], value)
|
|
190
|
+
|
|
191
|
+
def __hash__(self):
|
|
192
|
+
"""Return the hash based on the PK of this document. If it's new
|
|
193
|
+
and doesn't have a PK yet, return the default object hash instead.
|
|
194
|
+
"""
|
|
195
|
+
if self.pk is None:
|
|
196
|
+
return super(BaseDocument, self).__hash__()
|
|
197
|
+
|
|
198
|
+
return hash(self.pk)
|
|
199
|
+
|
|
200
|
+
@classmethod
|
|
201
|
+
def _get_db(cls):
|
|
202
|
+
"""Some Model using other db_alias"""
|
|
203
|
+
return get_db(cls._meta.get("db_alias", DEFAULT_CONNECTION_NAME))
|
|
204
|
+
|
|
205
|
+
@classmethod
|
|
206
|
+
def _disconnect(cls):
|
|
207
|
+
"""Detach the Document class from the (cached) database collection"""
|
|
208
|
+
cls._collection = None
|
|
209
|
+
|
|
210
|
+
@classmethod
|
|
211
|
+
def _get_collection(cls):
|
|
212
|
+
"""Return the PyMongo collection corresponding to this document.
|
|
213
|
+
|
|
214
|
+
Upon first call, this method:
|
|
215
|
+
1. Initializes a :class:`~pymongo.collection.Collection` corresponding
|
|
216
|
+
to this document.
|
|
217
|
+
2. Creates indexes defined in this document's :attr:`meta` dictionary.
|
|
218
|
+
This happens only if `auto_create_index` is True.
|
|
219
|
+
"""
|
|
220
|
+
if not hasattr(cls, "_collection") or cls._collection is None:
|
|
221
|
+
# Get the collection, either capped or regular.
|
|
222
|
+
if cls._meta.get("max_size") or cls._meta.get("max_documents"):
|
|
223
|
+
cls._collection = cls._get_capped_collection()
|
|
224
|
+
elif cls._meta.get("timeseries"):
|
|
225
|
+
cls._collection = cls._get_timeseries_collection()
|
|
226
|
+
else:
|
|
227
|
+
db = cls._get_db()
|
|
228
|
+
collection_name = cls._get_collection_name()
|
|
229
|
+
cls._collection = db[collection_name]
|
|
230
|
+
|
|
231
|
+
# Ensure indexes on the collection unless auto_create_index was
|
|
232
|
+
# set to False. Plus, there is no need to ensure indexes on slave.
|
|
233
|
+
db = cls._get_db()
|
|
234
|
+
if cls._meta.get("auto_create_index", True) and db.client.is_primary:
|
|
235
|
+
cls.ensure_indexes()
|
|
236
|
+
|
|
237
|
+
return cls._collection
|
|
238
|
+
|
|
239
|
+
@classmethod
|
|
240
|
+
def _get_capped_collection(cls):
|
|
241
|
+
"""Create a new or get an existing capped PyMongo collection."""
|
|
242
|
+
db = cls._get_db()
|
|
243
|
+
collection_name = cls._get_collection_name()
|
|
244
|
+
|
|
245
|
+
# Get max document limit and max byte size from meta.
|
|
246
|
+
max_size = cls._meta.get("max_size") or 10 * 2**20 # 10MB default
|
|
247
|
+
max_documents = cls._meta.get("max_documents")
|
|
248
|
+
|
|
249
|
+
# MongoDB will automatically raise the size to make it a multiple of
|
|
250
|
+
# 256 bytes. We raise it here ourselves to be able to reliably compare
|
|
251
|
+
# the options below.
|
|
252
|
+
if max_size % 256:
|
|
253
|
+
max_size = (max_size // 256 + 1) * 256
|
|
254
|
+
|
|
255
|
+
# If the collection already exists and has different options
|
|
256
|
+
# (i.e. isn't capped or has different max/size), raise an error.
|
|
257
|
+
if collection_name in list_collection_names(
|
|
258
|
+
db, include_system_collections=True
|
|
259
|
+
):
|
|
260
|
+
collection = db[collection_name]
|
|
261
|
+
options = collection.options()
|
|
262
|
+
if options.get("max") != max_documents or options.get("size") != max_size:
|
|
263
|
+
raise InvalidCollectionError(
|
|
264
|
+
'Cannot create collection "{}" as a capped '
|
|
265
|
+
"collection as it already exists".format(cls._collection)
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
return collection
|
|
269
|
+
|
|
270
|
+
# Create a new capped collection.
|
|
271
|
+
opts = {"capped": True, "size": max_size}
|
|
272
|
+
if max_documents:
|
|
273
|
+
opts["max"] = max_documents
|
|
274
|
+
|
|
275
|
+
return db.create_collection(collection_name, **opts)
|
|
276
|
+
|
|
277
|
+
@classmethod
|
|
278
|
+
def _get_timeseries_collection(cls):
|
|
279
|
+
"""Create a new or get an existing timeseries PyMongo collection."""
|
|
280
|
+
db = cls._get_db()
|
|
281
|
+
collection_name = cls._get_collection_name()
|
|
282
|
+
timeseries_opts = cls._meta.get("timeseries")
|
|
283
|
+
|
|
284
|
+
if collection_name in list_collection_names(
|
|
285
|
+
db, include_system_collections=True
|
|
286
|
+
):
|
|
287
|
+
collection = db[collection_name]
|
|
288
|
+
collection.options()
|
|
289
|
+
return collection
|
|
290
|
+
|
|
291
|
+
opts = {"expireAfterSeconds": timeseries_opts.pop("expireAfterSeconds", None)}
|
|
292
|
+
return db.create_collection(
|
|
293
|
+
name=collection_name,
|
|
294
|
+
timeseries=timeseries_opts,
|
|
295
|
+
**opts,
|
|
296
|
+
)
|
|
297
|
+
|
|
298
|
+
def to_mongo(self, *args, **kwargs):
|
|
299
|
+
data = super().to_mongo(*args, **kwargs)
|
|
300
|
+
|
|
301
|
+
# If '_id' is None, try and set it from self._data. If that
|
|
302
|
+
# doesn't exist either, remove '_id' from the SON completely.
|
|
303
|
+
if data["_id"] is None:
|
|
304
|
+
if self._data.get("id") is None:
|
|
305
|
+
del data["_id"]
|
|
306
|
+
else:
|
|
307
|
+
data["_id"] = self._data["id"]
|
|
308
|
+
|
|
309
|
+
return data
|
|
310
|
+
|
|
311
|
+
def modify(self, query=None, **update):
|
|
312
|
+
"""Perform an atomic update of the document in the database and reload
|
|
313
|
+
the document object using updated version.
|
|
314
|
+
|
|
315
|
+
Returns True if the document has been updated or False if the document
|
|
316
|
+
in the database doesn't match the query.
|
|
317
|
+
|
|
318
|
+
.. note:: All unsaved changes that have been made to the document are
|
|
319
|
+
rejected if the method returns True.
|
|
320
|
+
|
|
321
|
+
:param query: the update will be performed only if the document in the
|
|
322
|
+
database matches the query
|
|
323
|
+
:param update: Django-style update keyword arguments
|
|
324
|
+
"""
|
|
325
|
+
if query is None:
|
|
326
|
+
query = {}
|
|
327
|
+
|
|
328
|
+
if self.pk is None:
|
|
329
|
+
raise InvalidDocumentError("The document does not have a primary key.")
|
|
330
|
+
|
|
331
|
+
id_field = self._meta["id_field"]
|
|
332
|
+
query = query.copy() if isinstance(query, dict) else query.to_query(self)
|
|
333
|
+
|
|
334
|
+
if id_field not in query:
|
|
335
|
+
query[id_field] = self.pk
|
|
336
|
+
elif query[id_field] != self.pk:
|
|
337
|
+
raise InvalidQueryError(
|
|
338
|
+
"Invalid document modify query: it must modify only this document."
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
# Need to add shard key to query, or you get an error
|
|
342
|
+
query.update(self._object_key)
|
|
343
|
+
|
|
344
|
+
updated = self._qs(**query).modify(new=True, **update)
|
|
345
|
+
if updated is None:
|
|
346
|
+
return False
|
|
347
|
+
|
|
348
|
+
for field in self._fields_ordered:
|
|
349
|
+
setattr(self, field, self._reload(field, updated[field]))
|
|
350
|
+
|
|
351
|
+
self._changed_fields = updated._changed_fields
|
|
352
|
+
self._created = False
|
|
353
|
+
|
|
354
|
+
return True
|
|
355
|
+
|
|
356
|
+
def save(
|
|
357
|
+
self,
|
|
358
|
+
force_insert=False,
|
|
359
|
+
validate=True,
|
|
360
|
+
clean=True,
|
|
361
|
+
write_concern=None,
|
|
362
|
+
cascade=None,
|
|
363
|
+
cascade_kwargs=None,
|
|
364
|
+
_refs=None,
|
|
365
|
+
save_condition=None,
|
|
366
|
+
signal_kwargs=None,
|
|
367
|
+
**kwargs,
|
|
368
|
+
):
|
|
369
|
+
"""Save the :class:`~autonomous.db.Document` to the database. If the
|
|
370
|
+
document already exists, it will be updated, otherwise it will be
|
|
371
|
+
created. Returns the saved object instance.
|
|
372
|
+
|
|
373
|
+
:param force_insert: only try to create a new document, don't allow
|
|
374
|
+
updates of existing documents.
|
|
375
|
+
:param validate: validates the document; set to ``False`` to skip.
|
|
376
|
+
:param clean: call the document clean method, requires `validate` to be
|
|
377
|
+
True.
|
|
378
|
+
:param write_concern: Extra keyword arguments are passed down to
|
|
379
|
+
:meth:`~pymongo.collection.Collection.save` OR
|
|
380
|
+
:meth:`~pymongo.collection.Collection.insert`
|
|
381
|
+
which will be used as options for the resultant
|
|
382
|
+
``getLastError`` command. For example,
|
|
383
|
+
``save(..., write_concern={w: 2, fsync: True}, ...)`` will
|
|
384
|
+
wait until at least two servers have recorded the write and
|
|
385
|
+
will force an fsync on the primary server.
|
|
386
|
+
:param cascade: Sets the flag for cascading saves. You can set a
|
|
387
|
+
default by setting "cascade" in the document __meta__
|
|
388
|
+
:param cascade_kwargs: (optional) kwargs dictionary to be passed throw
|
|
389
|
+
to cascading saves. Implies ``cascade=True``.
|
|
390
|
+
:param _refs: A list of processed references used in cascading saves
|
|
391
|
+
:param save_condition: only perform save if matching record in db
|
|
392
|
+
satisfies condition(s) (e.g. version number).
|
|
393
|
+
Raises :class:`OperationError` if the conditions are not satisfied
|
|
394
|
+
:param signal_kwargs: (optional) kwargs dictionary to be passed to
|
|
395
|
+
the signal calls.
|
|
396
|
+
|
|
397
|
+
.. versionchanged:: 0.5
|
|
398
|
+
In existing documents it only saves changed fields using
|
|
399
|
+
set / unset. Saves are cascaded and any
|
|
400
|
+
:class:`~bson.dbref.DBRef` objects that have changes are
|
|
401
|
+
saved as well.
|
|
402
|
+
.. versionchanged:: 0.6
|
|
403
|
+
Added cascading saves
|
|
404
|
+
.. versionchanged:: 0.8
|
|
405
|
+
Cascade saves are optional and default to False. If you want
|
|
406
|
+
fine grain control then you can turn off using document
|
|
407
|
+
meta['cascade'] = True. Also you can pass different kwargs to
|
|
408
|
+
the cascade save using cascade_kwargs which overwrites the
|
|
409
|
+
existing kwargs with custom values.
|
|
410
|
+
.. versionchanged:: 0.26
|
|
411
|
+
save() no longer calls :meth:`~autonomous.db.Document.ensure_indexes`
|
|
412
|
+
unless ``meta['auto_create_index_on_save']`` is set to True.
|
|
413
|
+
|
|
414
|
+
"""
|
|
415
|
+
signal_kwargs = signal_kwargs or {}
|
|
416
|
+
|
|
417
|
+
if self._meta.get("abstract"):
|
|
418
|
+
raise InvalidDocumentError("Cannot save an abstract document.")
|
|
419
|
+
signals.pre_save.send(self.__class__, document=self, **signal_kwargs)
|
|
420
|
+
|
|
421
|
+
if validate:
|
|
422
|
+
self.validate(clean=clean)
|
|
423
|
+
|
|
424
|
+
if write_concern is None:
|
|
425
|
+
write_concern = {}
|
|
426
|
+
|
|
427
|
+
doc_id = self.to_mongo(fields=[self._meta["id_field"]])
|
|
428
|
+
created = "_id" not in doc_id or self._created or force_insert
|
|
429
|
+
|
|
430
|
+
signals.pre_save_post_validation.send(
|
|
431
|
+
self.__class__, document=self, created=created, **signal_kwargs
|
|
432
|
+
)
|
|
433
|
+
# it might be refreshed by the pre_save_post_validation hook, e.g., for etag generation
|
|
434
|
+
doc = self.to_mongo()
|
|
435
|
+
|
|
436
|
+
# Initialize the Document's underlying pymongo.Collection (+create indexes) if not already initialized
|
|
437
|
+
# Important to do this here to avoid that the index creation gets wrapped in the try/except block below
|
|
438
|
+
# and turned into autonomous.db.OperationError
|
|
439
|
+
if self._collection is None:
|
|
440
|
+
_ = self._get_collection()
|
|
441
|
+
elif self._meta.get("auto_create_index_on_save", False):
|
|
442
|
+
# ensure_indexes is called as part of _get_collection so no need to re-call it again here
|
|
443
|
+
self.ensure_indexes()
|
|
444
|
+
|
|
445
|
+
try:
|
|
446
|
+
# Save a new document or update an existing one
|
|
447
|
+
if created:
|
|
448
|
+
object_id = self._save_create(
|
|
449
|
+
doc=doc, force_insert=force_insert, write_concern=write_concern
|
|
450
|
+
)
|
|
451
|
+
else:
|
|
452
|
+
object_id, created = self._save_update(
|
|
453
|
+
doc, save_condition, write_concern
|
|
454
|
+
)
|
|
455
|
+
|
|
456
|
+
if cascade is None:
|
|
457
|
+
cascade = self._meta.get("cascade", False) or cascade_kwargs is not None
|
|
458
|
+
|
|
459
|
+
if cascade:
|
|
460
|
+
kwargs = {
|
|
461
|
+
"force_insert": force_insert,
|
|
462
|
+
"validate": validate,
|
|
463
|
+
"write_concern": write_concern,
|
|
464
|
+
"cascade": cascade,
|
|
465
|
+
}
|
|
466
|
+
if cascade_kwargs: # Allow granular control over cascades
|
|
467
|
+
kwargs.update(cascade_kwargs)
|
|
468
|
+
kwargs["_refs"] = _refs
|
|
469
|
+
self.cascade_save(**kwargs)
|
|
470
|
+
|
|
471
|
+
except pymongo.errors.DuplicateKeyError as err:
|
|
472
|
+
message = "Tried to save duplicate unique keys (%s)"
|
|
473
|
+
raise NotUniqueError(message % err)
|
|
474
|
+
except pymongo.errors.OperationFailure as err:
|
|
475
|
+
message = "Could not save document (%s)"
|
|
476
|
+
if re.match("^E1100[01] duplicate key", str(err)):
|
|
477
|
+
# E11000 - duplicate key error index
|
|
478
|
+
# E11001 - duplicate key on update
|
|
479
|
+
message = "Tried to save duplicate unique keys (%s)"
|
|
480
|
+
raise NotUniqueError(message % err)
|
|
481
|
+
raise OperationError(message % err)
|
|
482
|
+
|
|
483
|
+
# Make sure we store the PK on this document now that it's saved
|
|
484
|
+
id_field = self._meta["id_field"]
|
|
485
|
+
if created or id_field not in self._meta.get("shard_key", []):
|
|
486
|
+
self[id_field] = self._fields[id_field].to_python(object_id)
|
|
487
|
+
|
|
488
|
+
signals.post_save.send(
|
|
489
|
+
self.__class__, document=self, created=created, **signal_kwargs
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
self._clear_changed_fields()
|
|
493
|
+
self._created = False
|
|
494
|
+
|
|
495
|
+
return self
|
|
496
|
+
|
|
497
|
+
def _save_create(self, doc, force_insert, write_concern):
|
|
498
|
+
"""Save a new document.
|
|
499
|
+
|
|
500
|
+
Helper method, should only be used inside save().
|
|
501
|
+
"""
|
|
502
|
+
collection = self._get_collection()
|
|
503
|
+
with set_write_concern(collection, write_concern) as wc_collection:
|
|
504
|
+
if force_insert:
|
|
505
|
+
return wc_collection.insert_one(doc).inserted_id
|
|
506
|
+
# insert_one will provoke UniqueError alongside save does not
|
|
507
|
+
# therefore, it need to catch and call replace_one.
|
|
508
|
+
if "_id" in doc:
|
|
509
|
+
select_dict = {"_id": doc["_id"]}
|
|
510
|
+
select_dict = self._integrate_shard_key(doc, select_dict)
|
|
511
|
+
raw_object = wc_collection.find_one_and_replace(select_dict, doc)
|
|
512
|
+
if raw_object:
|
|
513
|
+
return doc["_id"]
|
|
514
|
+
|
|
515
|
+
object_id = wc_collection.insert_one(doc).inserted_id
|
|
516
|
+
|
|
517
|
+
return object_id
|
|
518
|
+
|
|
519
|
+
def _get_update_doc(self):
|
|
520
|
+
"""Return a dict containing all the $set and $unset operations
|
|
521
|
+
that should be sent to MongoDB based on the changes made to this
|
|
522
|
+
Document.
|
|
523
|
+
"""
|
|
524
|
+
updates, removals = self._delta()
|
|
525
|
+
|
|
526
|
+
update_doc = {}
|
|
527
|
+
if updates:
|
|
528
|
+
update_doc["$set"] = updates
|
|
529
|
+
if removals:
|
|
530
|
+
update_doc["$unset"] = removals
|
|
531
|
+
|
|
532
|
+
return update_doc
|
|
533
|
+
|
|
534
|
+
def _integrate_shard_key(self, doc, select_dict):
|
|
535
|
+
"""Integrates the collection's shard key to the `select_dict`, which will be used for the query.
|
|
536
|
+
The value from the shard key is taken from the `doc` and finally the select_dict is returned.
|
|
537
|
+
"""
|
|
538
|
+
|
|
539
|
+
# Need to add shard key to query, or you get an error
|
|
540
|
+
shard_key = self._meta.get("shard_key", tuple())
|
|
541
|
+
for k in shard_key:
|
|
542
|
+
path = self._lookup_field(k.split("."))
|
|
543
|
+
actual_key = [p.db_field for p in path]
|
|
544
|
+
val = doc
|
|
545
|
+
for ak in actual_key:
|
|
546
|
+
val = val[ak]
|
|
547
|
+
select_dict[".".join(actual_key)] = val
|
|
548
|
+
|
|
549
|
+
return select_dict
|
|
550
|
+
|
|
551
|
+
def _save_update(self, doc, save_condition, write_concern):
|
|
552
|
+
"""Update an existing document.
|
|
553
|
+
|
|
554
|
+
Helper method, should only be used inside save().
|
|
555
|
+
"""
|
|
556
|
+
collection = self._get_collection()
|
|
557
|
+
object_id = doc["_id"]
|
|
558
|
+
created = False
|
|
559
|
+
|
|
560
|
+
select_dict = {}
|
|
561
|
+
if save_condition is not None:
|
|
562
|
+
select_dict = transform.query(self.__class__, **save_condition)
|
|
563
|
+
|
|
564
|
+
select_dict["_id"] = object_id
|
|
565
|
+
|
|
566
|
+
select_dict = self._integrate_shard_key(doc, select_dict)
|
|
567
|
+
|
|
568
|
+
update_doc = self._get_update_doc()
|
|
569
|
+
if update_doc:
|
|
570
|
+
upsert = save_condition is None
|
|
571
|
+
with set_write_concern(collection, write_concern) as wc_collection:
|
|
572
|
+
last_error = wc_collection.update_one(
|
|
573
|
+
select_dict, update_doc, upsert=upsert
|
|
574
|
+
).raw_result
|
|
575
|
+
if not upsert and last_error["n"] == 0:
|
|
576
|
+
raise SaveConditionError(
|
|
577
|
+
"Race condition preventing document update detected"
|
|
578
|
+
)
|
|
579
|
+
if last_error is not None:
|
|
580
|
+
updated_existing = last_error.get("updatedExisting")
|
|
581
|
+
if updated_existing is False:
|
|
582
|
+
created = True
|
|
583
|
+
# !!! This is bad, means we accidentally created a new,
|
|
584
|
+
# potentially corrupted document. See
|
|
585
|
+
# https://github.com/MongoEngine/autonomous.db/issues/564
|
|
586
|
+
|
|
587
|
+
return object_id, created
|
|
588
|
+
|
|
589
|
+
def cascade_save(self, **kwargs):
|
|
590
|
+
"""Recursively save any references and generic references on the
|
|
591
|
+
document.
|
|
592
|
+
"""
|
|
593
|
+
_refs = kwargs.get("_refs") or []
|
|
594
|
+
|
|
595
|
+
ReferenceField = _import_class("ReferenceField")
|
|
596
|
+
GenericReferenceField = _import_class("GenericReferenceField")
|
|
597
|
+
|
|
598
|
+
for name, cls in self._fields.items():
|
|
599
|
+
if not isinstance(cls, (ReferenceField, GenericReferenceField)):
|
|
600
|
+
continue
|
|
601
|
+
|
|
602
|
+
ref = self._data.get(name)
|
|
603
|
+
if not ref or isinstance(ref, DBRef):
|
|
604
|
+
continue
|
|
605
|
+
|
|
606
|
+
if not getattr(ref, "_changed_fields", True):
|
|
607
|
+
continue
|
|
608
|
+
|
|
609
|
+
ref_id = f"{ref.__class__.__name__},{str(ref._data)}"
|
|
610
|
+
if ref and ref_id not in _refs:
|
|
611
|
+
_refs.append(ref_id)
|
|
612
|
+
kwargs["_refs"] = _refs
|
|
613
|
+
ref.save(**kwargs)
|
|
614
|
+
ref._changed_fields = []
|
|
615
|
+
|
|
616
|
+
@property
|
|
617
|
+
def _qs(self):
|
|
618
|
+
"""Return the default queryset corresponding to this document."""
|
|
619
|
+
if not hasattr(self, "__objects"):
|
|
620
|
+
queryset_class = self._meta.get("queryset_class", QuerySet)
|
|
621
|
+
self.__objects = queryset_class(self.__class__, self._get_collection())
|
|
622
|
+
return self.__objects
|
|
623
|
+
|
|
624
|
+
@property
|
|
625
|
+
def _object_key(self):
|
|
626
|
+
"""Return a query dict that can be used to fetch this document.
|
|
627
|
+
|
|
628
|
+
Most of the time the dict is a simple PK lookup, but in case of
|
|
629
|
+
a sharded collection with a compound shard key, it can contain a more
|
|
630
|
+
complex query.
|
|
631
|
+
|
|
632
|
+
Note that the dict returned by this method uses MongoEngine field
|
|
633
|
+
names instead of PyMongo field names (e.g. "pk" instead of "_id",
|
|
634
|
+
"some__nested__field" instead of "some.nested.field", etc.).
|
|
635
|
+
"""
|
|
636
|
+
select_dict = {"pk": self.pk}
|
|
637
|
+
shard_key = self.__class__._meta.get("shard_key", tuple())
|
|
638
|
+
for k in shard_key:
|
|
639
|
+
val = self
|
|
640
|
+
field_parts = k.split(".")
|
|
641
|
+
for part in field_parts:
|
|
642
|
+
val = getattr(val, part)
|
|
643
|
+
select_dict["__".join(field_parts)] = val
|
|
644
|
+
return select_dict
|
|
645
|
+
|
|
646
|
+
def update(self, **kwargs):
|
|
647
|
+
"""Performs an update on the :class:`~autonomous.db.Document`
|
|
648
|
+
A convenience wrapper to :meth:`~autonomous.db.QuerySet.update`.
|
|
649
|
+
|
|
650
|
+
Raises :class:`OperationError` if called on an object that has not yet
|
|
651
|
+
been saved.
|
|
652
|
+
"""
|
|
653
|
+
if self.pk is None:
|
|
654
|
+
if kwargs.get("upsert", False):
|
|
655
|
+
query = self.to_mongo()
|
|
656
|
+
if "_cls" in query:
|
|
657
|
+
del query["_cls"]
|
|
658
|
+
return self._qs.filter(**query).update_one(**kwargs)
|
|
659
|
+
else:
|
|
660
|
+
raise OperationError("attempt to update a document not yet saved")
|
|
661
|
+
|
|
662
|
+
# Need to add shard key to query, or you get an error
|
|
663
|
+
return self._qs.filter(**self._object_key).update_one(**kwargs)
|
|
664
|
+
|
|
665
|
+
def delete(self, signal_kwargs=None, **write_concern):
|
|
666
|
+
"""Delete the :class:`~autonomous.db.Document` from the database. This
|
|
667
|
+
will only take effect if the document has been previously saved.
|
|
668
|
+
|
|
669
|
+
:param signal_kwargs: (optional) kwargs dictionary to be passed to
|
|
670
|
+
the signal calls.
|
|
671
|
+
:param write_concern: Extra keyword arguments are passed down which
|
|
672
|
+
will be used as options for the resultant ``getLastError`` command.
|
|
673
|
+
For example, ``save(..., w: 2, fsync: True)`` will
|
|
674
|
+
wait until at least two servers have recorded the write and
|
|
675
|
+
will force an fsync on the primary server.
|
|
676
|
+
"""
|
|
677
|
+
signal_kwargs = signal_kwargs or {}
|
|
678
|
+
signals.pre_delete.send(self.__class__, document=self, **signal_kwargs)
|
|
679
|
+
|
|
680
|
+
# Delete FileFields separately
|
|
681
|
+
FileField = _import_class("FileField")
|
|
682
|
+
for name, field in self._fields.items():
|
|
683
|
+
if isinstance(field, FileField):
|
|
684
|
+
getattr(self, name).delete()
|
|
685
|
+
|
|
686
|
+
try:
|
|
687
|
+
self._qs.filter(**self._object_key).delete(
|
|
688
|
+
write_concern=write_concern, _from_doc_delete=True
|
|
689
|
+
)
|
|
690
|
+
except pymongo.errors.OperationFailure as err:
|
|
691
|
+
message = "Could not delete document (%s)" % err.args
|
|
692
|
+
raise OperationError(message)
|
|
693
|
+
signals.post_delete.send(self.__class__, document=self, **signal_kwargs)
|
|
694
|
+
|
|
695
|
+
def switch_db(self, db_alias, keep_created=True):
|
|
696
|
+
"""
|
|
697
|
+
Temporarily switch the database for a document instance.
|
|
698
|
+
|
|
699
|
+
Only really useful for archiving off data and calling `save()`::
|
|
700
|
+
|
|
701
|
+
user = User.objects.get(id=user_id)
|
|
702
|
+
user.switch_db('archive-db')
|
|
703
|
+
user.save()
|
|
704
|
+
|
|
705
|
+
:param str db_alias: The database alias to use for saving the document
|
|
706
|
+
|
|
707
|
+
:param bool keep_created: keep self._created value after switching db, else is reset to True
|
|
708
|
+
|
|
709
|
+
|
|
710
|
+
.. seealso::
|
|
711
|
+
Use :class:`~autonomous.db.context_managers.switch_collection`
|
|
712
|
+
if you need to read from another collection
|
|
713
|
+
"""
|
|
714
|
+
with switch_db(self.__class__, db_alias) as cls:
|
|
715
|
+
collection = cls._get_collection()
|
|
716
|
+
db = cls._get_db()
|
|
717
|
+
self._get_collection = lambda: collection
|
|
718
|
+
self._get_db = lambda: db
|
|
719
|
+
self._collection = collection
|
|
720
|
+
self._created = True if not keep_created else self._created
|
|
721
|
+
self.__objects = self._qs
|
|
722
|
+
self.__objects._collection_obj = collection
|
|
723
|
+
return self
|
|
724
|
+
|
|
725
|
+
def switch_collection(self, collection_name, keep_created=True):
|
|
726
|
+
"""
|
|
727
|
+
Temporarily switch the collection for a document instance.
|
|
728
|
+
|
|
729
|
+
Only really useful for archiving off data and calling `save()`::
|
|
730
|
+
|
|
731
|
+
user = User.objects.get(id=user_id)
|
|
732
|
+
user.switch_collection('old-users')
|
|
733
|
+
user.save()
|
|
734
|
+
|
|
735
|
+
:param str collection_name: The database alias to use for saving the
|
|
736
|
+
document
|
|
737
|
+
|
|
738
|
+
:param bool keep_created: keep self._created value after switching collection, else is reset to True
|
|
739
|
+
|
|
740
|
+
|
|
741
|
+
.. seealso::
|
|
742
|
+
Use :class:`~autonomous.db.context_managers.switch_db`
|
|
743
|
+
if you need to read from another database
|
|
744
|
+
"""
|
|
745
|
+
with switch_collection(self.__class__, collection_name) as cls:
|
|
746
|
+
collection = cls._get_collection()
|
|
747
|
+
self._get_collection = lambda: collection
|
|
748
|
+
self._collection = collection
|
|
749
|
+
self._created = True if not keep_created else self._created
|
|
750
|
+
self.__objects = self._qs
|
|
751
|
+
self.__objects._collection_obj = collection
|
|
752
|
+
return self
|
|
753
|
+
|
|
754
|
+
def select_related(self, max_depth=1):
|
|
755
|
+
"""Handles dereferencing of :class:`~bson.dbref.DBRef` objects to
|
|
756
|
+
a maximum depth in order to cut down the number queries to mongodb.
|
|
757
|
+
"""
|
|
758
|
+
DeReference = _import_class("DeReference")
|
|
759
|
+
DeReference()([self], max_depth + 1)
|
|
760
|
+
return self
|
|
761
|
+
|
|
762
|
+
def reload(self, *fields, **kwargs):
|
|
763
|
+
"""Reloads all attributes from the database.
|
|
764
|
+
|
|
765
|
+
:param fields: (optional) args list of fields to reload
|
|
766
|
+
:param max_depth: (optional) depth of dereferencing to follow
|
|
767
|
+
"""
|
|
768
|
+
max_depth = 1
|
|
769
|
+
if fields and isinstance(fields[0], int):
|
|
770
|
+
max_depth = fields[0]
|
|
771
|
+
fields = fields[1:]
|
|
772
|
+
elif "max_depth" in kwargs:
|
|
773
|
+
max_depth = kwargs["max_depth"]
|
|
774
|
+
|
|
775
|
+
if self.pk is None:
|
|
776
|
+
raise self.DoesNotExist("Document does not exist")
|
|
777
|
+
|
|
778
|
+
obj = (
|
|
779
|
+
self._qs.read_preference(ReadPreference.PRIMARY)
|
|
780
|
+
.filter(**self._object_key)
|
|
781
|
+
.only(*fields)
|
|
782
|
+
.limit(1)
|
|
783
|
+
.select_related(max_depth=max_depth)
|
|
784
|
+
)
|
|
785
|
+
|
|
786
|
+
if obj:
|
|
787
|
+
obj = obj[0]
|
|
788
|
+
else:
|
|
789
|
+
raise self.DoesNotExist("Document does not exist")
|
|
790
|
+
for field in obj._data:
|
|
791
|
+
if not fields or field in fields:
|
|
792
|
+
try:
|
|
793
|
+
setattr(self, field, self._reload(field, obj[field]))
|
|
794
|
+
except (KeyError, AttributeError):
|
|
795
|
+
try:
|
|
796
|
+
# If field is a special field, e.g. items is stored as _reserved_items,
|
|
797
|
+
# a KeyError is thrown. So try to retrieve the field from _data
|
|
798
|
+
setattr(self, field, self._reload(field, obj._data.get(field)))
|
|
799
|
+
except KeyError:
|
|
800
|
+
# If field is removed from the database while the object
|
|
801
|
+
# is in memory, a reload would cause a KeyError
|
|
802
|
+
# i.e. obj.update(unset__field=1) followed by obj.reload()
|
|
803
|
+
delattr(self, field)
|
|
804
|
+
|
|
805
|
+
self._changed_fields = (
|
|
806
|
+
list(set(self._changed_fields) - set(fields))
|
|
807
|
+
if fields
|
|
808
|
+
else obj._changed_fields
|
|
809
|
+
)
|
|
810
|
+
self._created = False
|
|
811
|
+
return self
|
|
812
|
+
|
|
813
|
+
def _reload(self, key, value):
|
|
814
|
+
"""Used by :meth:`~autonomous.db.Document.reload` to ensure the
|
|
815
|
+
correct instance is linked to self.
|
|
816
|
+
"""
|
|
817
|
+
if isinstance(value, BaseDict):
|
|
818
|
+
value = [(k, self._reload(k, v)) for k, v in value.items()]
|
|
819
|
+
value = BaseDict(value, self, key)
|
|
820
|
+
elif isinstance(value, EmbeddedDocumentList):
|
|
821
|
+
value = [self._reload(key, v) for v in value]
|
|
822
|
+
value = EmbeddedDocumentList(value, self, key)
|
|
823
|
+
elif isinstance(value, BaseList):
|
|
824
|
+
value = [self._reload(key, v) for v in value]
|
|
825
|
+
value = BaseList(value, self, key)
|
|
826
|
+
elif isinstance(value, (EmbeddedDocument, DynamicEmbeddedDocument)):
|
|
827
|
+
value._instance = None
|
|
828
|
+
value._changed_fields = []
|
|
829
|
+
return value
|
|
830
|
+
|
|
831
|
+
def to_dbref(self):
|
|
832
|
+
"""Returns an instance of :class:`~bson.dbref.DBRef` useful in
|
|
833
|
+
`__raw__` queries."""
|
|
834
|
+
if self.pk is None:
|
|
835
|
+
msg = "Only saved documents can have a valid dbref"
|
|
836
|
+
raise OperationError(msg)
|
|
837
|
+
return DBRef(self.__class__._get_collection_name(), self.pk)
|
|
838
|
+
|
|
839
|
+
@classmethod
|
|
840
|
+
def register_delete_rule(cls, document_cls, field_name, rule):
|
|
841
|
+
"""This method registers the delete rules to apply when removing this
|
|
842
|
+
object.
|
|
843
|
+
"""
|
|
844
|
+
classes = [
|
|
845
|
+
get_document(class_name)
|
|
846
|
+
for class_name in cls._subclasses
|
|
847
|
+
if class_name != cls.__name__
|
|
848
|
+
] + [cls]
|
|
849
|
+
documents = [
|
|
850
|
+
get_document(class_name)
|
|
851
|
+
for class_name in document_cls._subclasses
|
|
852
|
+
if class_name != document_cls.__name__
|
|
853
|
+
] + [document_cls]
|
|
854
|
+
|
|
855
|
+
for klass in classes:
|
|
856
|
+
for document_cls in documents:
|
|
857
|
+
delete_rules = klass._meta.get("delete_rules") or {}
|
|
858
|
+
delete_rules[(document_cls, field_name)] = rule
|
|
859
|
+
klass._meta["delete_rules"] = delete_rules
|
|
860
|
+
|
|
861
|
+
@classmethod
|
|
862
|
+
def drop_collection(cls):
|
|
863
|
+
"""Drops the entire collection associated with this
|
|
864
|
+
:class:`~autonomous.db.Document` type from the database.
|
|
865
|
+
|
|
866
|
+
Raises :class:`OperationError` if the document has no collection set
|
|
867
|
+
(i.g. if it is `abstract`)
|
|
868
|
+
"""
|
|
869
|
+
coll_name = cls._get_collection_name()
|
|
870
|
+
if not coll_name:
|
|
871
|
+
raise OperationError(
|
|
872
|
+
"Document %s has no collection defined (is it abstract ?)" % cls
|
|
873
|
+
)
|
|
874
|
+
cls._collection = None
|
|
875
|
+
db = cls._get_db()
|
|
876
|
+
db.drop_collection(coll_name)
|
|
877
|
+
|
|
878
|
+
@classmethod
|
|
879
|
+
def create_index(cls, keys, background=False, **kwargs):
|
|
880
|
+
"""Creates the given indexes if required.
|
|
881
|
+
|
|
882
|
+
:param keys: a single index key or a list of index keys (to
|
|
883
|
+
construct a multi-field index); keys may be prefixed with a **+**
|
|
884
|
+
or a **-** to determine the index ordering
|
|
885
|
+
:param background: Allows index creation in the background
|
|
886
|
+
"""
|
|
887
|
+
index_spec = cls._build_index_spec(keys)
|
|
888
|
+
index_spec = index_spec.copy()
|
|
889
|
+
fields = index_spec.pop("fields")
|
|
890
|
+
index_spec["background"] = background
|
|
891
|
+
index_spec.update(kwargs)
|
|
892
|
+
|
|
893
|
+
return cls._get_collection().create_index(fields, **index_spec)
|
|
894
|
+
|
|
895
|
+
@classmethod
|
|
896
|
+
def ensure_indexes(cls):
|
|
897
|
+
"""Checks the document meta data and ensures all the indexes exist.
|
|
898
|
+
|
|
899
|
+
Global defaults can be set in the meta - see :doc:`guide/defining-documents`
|
|
900
|
+
|
|
901
|
+
By default, this will get called automatically upon first interaction with the
|
|
902
|
+
Document collection (query, save, etc) so unless you disabled `auto_create_index`, you
|
|
903
|
+
shouldn't have to call this manually.
|
|
904
|
+
|
|
905
|
+
This also gets called upon every call to Document.save if `auto_create_index_on_save` is set to True
|
|
906
|
+
|
|
907
|
+
If called multiple times, MongoDB will not re-recreate indexes if they exist already
|
|
908
|
+
|
|
909
|
+
.. note:: You can disable automatic index creation by setting
|
|
910
|
+
`auto_create_index` to False in the documents meta data
|
|
911
|
+
"""
|
|
912
|
+
background = cls._meta.get("index_background", False)
|
|
913
|
+
index_opts = cls._meta.get("index_opts") or {}
|
|
914
|
+
index_cls = cls._meta.get("index_cls", True)
|
|
915
|
+
|
|
916
|
+
collection = cls._get_collection()
|
|
917
|
+
|
|
918
|
+
# determine if an index which we are creating includes
|
|
919
|
+
# _cls as its first field; if so, we can avoid creating
|
|
920
|
+
# an extra index on _cls, as mongodb will use the existing
|
|
921
|
+
# index to service queries against _cls
|
|
922
|
+
cls_indexed = False
|
|
923
|
+
|
|
924
|
+
# Ensure document-defined indexes are created
|
|
925
|
+
if cls._meta["index_specs"]:
|
|
926
|
+
index_spec = cls._meta["index_specs"]
|
|
927
|
+
for spec in index_spec:
|
|
928
|
+
spec = spec.copy()
|
|
929
|
+
fields = spec.pop("fields")
|
|
930
|
+
cls_indexed = cls_indexed or includes_cls(fields)
|
|
931
|
+
opts = index_opts.copy()
|
|
932
|
+
opts.update(spec)
|
|
933
|
+
|
|
934
|
+
# we shouldn't pass 'cls' to the collection.ensureIndex options
|
|
935
|
+
# because of https://jira.mongodb.org/browse/SERVER-769
|
|
936
|
+
if "cls" in opts:
|
|
937
|
+
del opts["cls"]
|
|
938
|
+
|
|
939
|
+
collection.create_index(fields, background=background, **opts)
|
|
940
|
+
|
|
941
|
+
# If _cls is being used (for polymorphism), it needs an index,
|
|
942
|
+
# only if another index doesn't begin with _cls
|
|
943
|
+
if index_cls and not cls_indexed and cls._meta.get("allow_inheritance"):
|
|
944
|
+
# we shouldn't pass 'cls' to the collection.ensureIndex options
|
|
945
|
+
# because of https://jira.mongodb.org/browse/SERVER-769
|
|
946
|
+
if "cls" in index_opts:
|
|
947
|
+
del index_opts["cls"]
|
|
948
|
+
|
|
949
|
+
collection.create_index("_cls", background=background, **index_opts)
|
|
950
|
+
|
|
951
|
+
@classmethod
|
|
952
|
+
def list_indexes(cls):
|
|
953
|
+
"""Lists all indexes that should be created for the Document collection.
|
|
954
|
+
It includes all the indexes from super- and sub-classes.
|
|
955
|
+
|
|
956
|
+
Note that it will only return the indexes' fields, not the indexes' options
|
|
957
|
+
"""
|
|
958
|
+
if cls._meta.get("abstract"):
|
|
959
|
+
return []
|
|
960
|
+
|
|
961
|
+
# get all the base classes, subclasses and siblings
|
|
962
|
+
classes = []
|
|
963
|
+
|
|
964
|
+
def get_classes(cls):
|
|
965
|
+
if cls not in classes and isinstance(cls, TopLevelDocumentMetaclass):
|
|
966
|
+
classes.append(cls)
|
|
967
|
+
|
|
968
|
+
for base_cls in cls.__bases__:
|
|
969
|
+
if (
|
|
970
|
+
isinstance(base_cls, TopLevelDocumentMetaclass)
|
|
971
|
+
and base_cls != Document
|
|
972
|
+
and not base_cls._meta.get("abstract")
|
|
973
|
+
and base_cls._get_collection().full_name
|
|
974
|
+
== cls._get_collection().full_name
|
|
975
|
+
and base_cls not in classes
|
|
976
|
+
):
|
|
977
|
+
classes.append(base_cls)
|
|
978
|
+
get_classes(base_cls)
|
|
979
|
+
for subclass in cls.__subclasses__():
|
|
980
|
+
if (
|
|
981
|
+
isinstance(base_cls, TopLevelDocumentMetaclass)
|
|
982
|
+
and subclass._get_collection().full_name
|
|
983
|
+
== cls._get_collection().full_name
|
|
984
|
+
and subclass not in classes
|
|
985
|
+
):
|
|
986
|
+
classes.append(subclass)
|
|
987
|
+
get_classes(subclass)
|
|
988
|
+
|
|
989
|
+
get_classes(cls)
|
|
990
|
+
|
|
991
|
+
# get the indexes spec for all the gathered classes
|
|
992
|
+
def get_indexes_spec(cls):
|
|
993
|
+
indexes = []
|
|
994
|
+
|
|
995
|
+
if cls._meta["index_specs"]:
|
|
996
|
+
index_spec = cls._meta["index_specs"]
|
|
997
|
+
for spec in index_spec:
|
|
998
|
+
spec = spec.copy()
|
|
999
|
+
fields = spec.pop("fields")
|
|
1000
|
+
indexes.append(fields)
|
|
1001
|
+
return indexes
|
|
1002
|
+
|
|
1003
|
+
indexes = []
|
|
1004
|
+
for klass in classes:
|
|
1005
|
+
for index in get_indexes_spec(klass):
|
|
1006
|
+
if index not in indexes:
|
|
1007
|
+
indexes.append(index)
|
|
1008
|
+
|
|
1009
|
+
# finish up by appending { '_id': 1 } and { '_cls': 1 }, if needed
|
|
1010
|
+
if [("_id", 1)] not in indexes:
|
|
1011
|
+
indexes.append([("_id", 1)])
|
|
1012
|
+
if cls._meta.get("index_cls", True) and cls._meta.get("allow_inheritance"):
|
|
1013
|
+
indexes.append([("_cls", 1)])
|
|
1014
|
+
|
|
1015
|
+
return indexes
|
|
1016
|
+
|
|
1017
|
+
@classmethod
|
|
1018
|
+
def compare_indexes(cls):
|
|
1019
|
+
"""Compares the indexes defined in MongoEngine with the ones
|
|
1020
|
+
existing in the database. Returns any missing/extra indexes.
|
|
1021
|
+
"""
|
|
1022
|
+
|
|
1023
|
+
required = cls.list_indexes()
|
|
1024
|
+
|
|
1025
|
+
existing = []
|
|
1026
|
+
collection = cls._get_collection()
|
|
1027
|
+
for info in collection.index_information().values():
|
|
1028
|
+
if "_fts" in info["key"][0]:
|
|
1029
|
+
# Useful for text indexes (but not only)
|
|
1030
|
+
index_type = info["key"][0][1]
|
|
1031
|
+
text_index_fields = info.get("weights").keys()
|
|
1032
|
+
existing.append([(key, index_type) for key in text_index_fields])
|
|
1033
|
+
else:
|
|
1034
|
+
existing.append(info["key"])
|
|
1035
|
+
missing = [index for index in required if index not in existing]
|
|
1036
|
+
extra = [index for index in existing if index not in required]
|
|
1037
|
+
|
|
1038
|
+
# if { _cls: 1 } is missing, make sure it's *really* necessary
|
|
1039
|
+
if [("_cls", 1)] in missing:
|
|
1040
|
+
cls_obsolete = False
|
|
1041
|
+
for index in existing:
|
|
1042
|
+
if includes_cls(index) and index not in extra:
|
|
1043
|
+
cls_obsolete = True
|
|
1044
|
+
break
|
|
1045
|
+
if cls_obsolete:
|
|
1046
|
+
missing.remove([("_cls", 1)])
|
|
1047
|
+
|
|
1048
|
+
return {"missing": missing, "extra": extra}
|
|
1049
|
+
|
|
1050
|
+
|
|
1051
|
+
class DynamicDocument(Document, metaclass=TopLevelDocumentMetaclass):
|
|
1052
|
+
"""A Dynamic Document class allowing flexible, expandable and uncontrolled
|
|
1053
|
+
schemas. As a :class:`~autonomous.db.Document` subclass, acts in the same
|
|
1054
|
+
way as an ordinary document but has expanded style properties. Any data
|
|
1055
|
+
passed or set against the :class:`~autonomous.db.DynamicDocument` that is
|
|
1056
|
+
not a field is automatically converted into a
|
|
1057
|
+
:class:`~autonomous.db.fields.DynamicField` and data can be attributed to that
|
|
1058
|
+
field.
|
|
1059
|
+
|
|
1060
|
+
.. note::
|
|
1061
|
+
|
|
1062
|
+
There is one caveat on Dynamic Documents: undeclared fields cannot start with `_`
|
|
1063
|
+
"""
|
|
1064
|
+
|
|
1065
|
+
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
|
1066
|
+
my_metaclass = TopLevelDocumentMetaclass
|
|
1067
|
+
|
|
1068
|
+
_dynamic = True
|
|
1069
|
+
|
|
1070
|
+
def __delattr__(self, *args, **kwargs):
|
|
1071
|
+
"""Delete the attribute by setting to None and allowing _delta
|
|
1072
|
+
to unset it.
|
|
1073
|
+
"""
|
|
1074
|
+
field_name = args[0]
|
|
1075
|
+
if field_name in self._dynamic_fields:
|
|
1076
|
+
setattr(self, field_name, None)
|
|
1077
|
+
self._dynamic_fields[field_name].null = False
|
|
1078
|
+
else:
|
|
1079
|
+
super().__delattr__(*args, **kwargs)
|
|
1080
|
+
|
|
1081
|
+
|
|
1082
|
+
class DynamicEmbeddedDocument(EmbeddedDocument, metaclass=DocumentMetaclass):
|
|
1083
|
+
"""A Dynamic Embedded Document class allowing flexible, expandable and
|
|
1084
|
+
uncontrolled schemas. See :class:`~autonomous.db.DynamicDocument` for more
|
|
1085
|
+
information about dynamic documents.
|
|
1086
|
+
"""
|
|
1087
|
+
|
|
1088
|
+
# my_metaclass is defined so that metaclass can be queried in Python 2 & 3
|
|
1089
|
+
my_metaclass = DocumentMetaclass
|
|
1090
|
+
|
|
1091
|
+
_dynamic = True
|
|
1092
|
+
|
|
1093
|
+
def __delattr__(self, *args, **kwargs):
|
|
1094
|
+
"""Delete the attribute by setting to None and allowing _delta
|
|
1095
|
+
to unset it.
|
|
1096
|
+
"""
|
|
1097
|
+
field_name = args[0]
|
|
1098
|
+
if field_name in self._fields:
|
|
1099
|
+
default = self._fields[field_name].default
|
|
1100
|
+
if callable(default):
|
|
1101
|
+
default = default()
|
|
1102
|
+
setattr(self, field_name, default)
|
|
1103
|
+
else:
|
|
1104
|
+
setattr(self, field_name, None)
|
|
1105
|
+
|
|
1106
|
+
|
|
1107
|
+
class MapReduceDocument:
|
|
1108
|
+
"""A document returned from a map/reduce query.
|
|
1109
|
+
|
|
1110
|
+
:param collection: An instance of :class:`~pymongo.Collection`
|
|
1111
|
+
:param key: Document/result key, often an instance of
|
|
1112
|
+
:class:`~bson.objectid.ObjectId`. If supplied as
|
|
1113
|
+
an ``ObjectId`` found in the given ``collection``,
|
|
1114
|
+
the object can be accessed via the ``object`` property.
|
|
1115
|
+
:param value: The result(s) for this key.
|
|
1116
|
+
"""
|
|
1117
|
+
|
|
1118
|
+
def __init__(self, document, collection, key, value):
|
|
1119
|
+
self._document = document
|
|
1120
|
+
self._collection = collection
|
|
1121
|
+
self.key = key
|
|
1122
|
+
self.value = value
|
|
1123
|
+
|
|
1124
|
+
@property
|
|
1125
|
+
def object(self):
|
|
1126
|
+
"""Lazy-load the object referenced by ``self.key``. ``self.key``
|
|
1127
|
+
should be the ``primary_key``.
|
|
1128
|
+
"""
|
|
1129
|
+
id_field = self._document()._meta["id_field"]
|
|
1130
|
+
id_field_type = type(id_field)
|
|
1131
|
+
|
|
1132
|
+
if not isinstance(self.key, id_field_type):
|
|
1133
|
+
try:
|
|
1134
|
+
self.key = id_field_type(self.key)
|
|
1135
|
+
except Exception:
|
|
1136
|
+
raise Exception("Could not cast key as %s" % id_field_type.__name__)
|
|
1137
|
+
|
|
1138
|
+
if not hasattr(self, "_key_object"):
|
|
1139
|
+
self._key_object = self._document.objects.with_id(self.key)
|
|
1140
|
+
return self._key_object
|
|
1141
|
+
return self._key_object
|