autonomous-app 0.2.25__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- autonomous/__init__.py +5 -2
- autonomous/ai/audioagent.py +32 -0
- autonomous/ai/imageagent.py +31 -0
- autonomous/ai/jsonagent.py +40 -0
- autonomous/ai/models/__init__.py +0 -0
- autonomous/ai/models/openai.py +308 -0
- autonomous/ai/oaiagent.py +20 -194
- autonomous/ai/textagent.py +35 -0
- autonomous/auth/autoauth.py +11 -11
- autonomous/auth/user.py +24 -11
- autonomous/db/__init__.py +41 -0
- autonomous/db/base/__init__.py +33 -0
- autonomous/db/base/common.py +62 -0
- autonomous/db/base/datastructures.py +476 -0
- autonomous/db/base/document.py +1230 -0
- autonomous/db/base/fields.py +767 -0
- autonomous/db/base/metaclasses.py +468 -0
- autonomous/db/base/utils.py +22 -0
- autonomous/db/common.py +79 -0
- autonomous/db/connection.py +472 -0
- autonomous/db/context_managers.py +313 -0
- autonomous/db/dereference.py +291 -0
- autonomous/db/document.py +1141 -0
- autonomous/db/errors.py +165 -0
- autonomous/db/fields.py +2732 -0
- autonomous/db/mongodb_support.py +24 -0
- autonomous/db/pymongo_support.py +80 -0
- autonomous/db/queryset/__init__.py +28 -0
- autonomous/db/queryset/base.py +2033 -0
- autonomous/db/queryset/field_list.py +88 -0
- autonomous/db/queryset/manager.py +58 -0
- autonomous/db/queryset/queryset.py +189 -0
- autonomous/db/queryset/transform.py +527 -0
- autonomous/db/queryset/visitor.py +189 -0
- autonomous/db/signals.py +59 -0
- autonomous/logger.py +3 -0
- autonomous/model/autoattr.py +120 -0
- autonomous/model/automodel.py +121 -308
- autonomous/storage/imagestorage.py +9 -54
- autonomous/tasks/autotask.py +0 -25
- {autonomous_app-0.2.25.dist-info → autonomous_app-0.3.1.dist-info}/METADATA +7 -8
- autonomous_app-0.3.1.dist-info/RECORD +60 -0
- {autonomous_app-0.2.25.dist-info → autonomous_app-0.3.1.dist-info}/WHEEL +1 -1
- autonomous/db/autodb.py +0 -86
- autonomous/db/table.py +0 -156
- autonomous/errors/__init__.py +0 -1
- autonomous/errors/danglingreferenceerror.py +0 -8
- autonomous/model/autoattribute.py +0 -20
- autonomous/model/orm.py +0 -86
- autonomous/model/serializer.py +0 -110
- autonomous_app-0.2.25.dist-info/RECORD +0 -36
- /autonomous/{storage → apis}/version_control/GHCallbacks.py +0 -0
- /autonomous/{storage → apis}/version_control/GHOrganization.py +0 -0
- /autonomous/{storage → apis}/version_control/GHRepo.py +0 -0
- /autonomous/{storage → apis}/version_control/GHVersionControl.py +0 -0
- /autonomous/{storage → apis}/version_control/__init__.py +0 -0
- /autonomous/{storage → utils}/markdown.py +0 -0
- {autonomous_app-0.2.25.dist-info → autonomous_app-0.3.1.dist-info}/LICENSE +0 -0
- {autonomous_app-0.2.25.dist-info → autonomous_app-0.3.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,527 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
|
|
3
|
+
import pymongo
|
|
4
|
+
from bson import SON, ObjectId
|
|
5
|
+
from bson.dbref import DBRef
|
|
6
|
+
|
|
7
|
+
from autonomous.db.base import UPDATE_OPERATORS
|
|
8
|
+
from autonomous.db.common import _import_class
|
|
9
|
+
from autonomous.db.errors import InvalidQueryError
|
|
10
|
+
|
|
11
|
+
__all__ = ("query", "update", "STRING_OPERATORS")
|
|
12
|
+
|
|
13
|
+
COMPARISON_OPERATORS = (
|
|
14
|
+
"ne",
|
|
15
|
+
"gt",
|
|
16
|
+
"gte",
|
|
17
|
+
"lt",
|
|
18
|
+
"lte",
|
|
19
|
+
"in",
|
|
20
|
+
"nin",
|
|
21
|
+
"mod",
|
|
22
|
+
"all",
|
|
23
|
+
"size",
|
|
24
|
+
"exists",
|
|
25
|
+
"not",
|
|
26
|
+
"elemMatch",
|
|
27
|
+
"type",
|
|
28
|
+
)
|
|
29
|
+
GEO_OPERATORS = (
|
|
30
|
+
"within_distance",
|
|
31
|
+
"within_spherical_distance",
|
|
32
|
+
"within_box",
|
|
33
|
+
"within_polygon",
|
|
34
|
+
"near",
|
|
35
|
+
"near_sphere",
|
|
36
|
+
"max_distance",
|
|
37
|
+
"min_distance",
|
|
38
|
+
"geo_within",
|
|
39
|
+
"geo_within_box",
|
|
40
|
+
"geo_within_polygon",
|
|
41
|
+
"geo_within_center",
|
|
42
|
+
"geo_within_sphere",
|
|
43
|
+
"geo_intersects",
|
|
44
|
+
)
|
|
45
|
+
STRING_OPERATORS = (
|
|
46
|
+
"contains",
|
|
47
|
+
"icontains",
|
|
48
|
+
"startswith",
|
|
49
|
+
"istartswith",
|
|
50
|
+
"endswith",
|
|
51
|
+
"iendswith",
|
|
52
|
+
"exact",
|
|
53
|
+
"iexact",
|
|
54
|
+
"regex",
|
|
55
|
+
"iregex",
|
|
56
|
+
"wholeword",
|
|
57
|
+
"iwholeword",
|
|
58
|
+
)
|
|
59
|
+
CUSTOM_OPERATORS = ("match",)
|
|
60
|
+
MATCH_OPERATORS = (
|
|
61
|
+
COMPARISON_OPERATORS + GEO_OPERATORS + STRING_OPERATORS + CUSTOM_OPERATORS
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def handle_raw_query(value, mongo_query):
|
|
66
|
+
"""Combine a raw query with an existing one"""
|
|
67
|
+
for op, v in value.items():
|
|
68
|
+
if op not in mongo_query:
|
|
69
|
+
mongo_query[op] = v
|
|
70
|
+
elif op in mongo_query and isinstance(mongo_query[op], dict):
|
|
71
|
+
mongo_query[op].update(v)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
# TODO make this less complex
|
|
75
|
+
def query(_doc_cls=None, **kwargs):
|
|
76
|
+
"""Transform a query from Django-style format to Mongo format."""
|
|
77
|
+
mongo_query = {}
|
|
78
|
+
merge_query = defaultdict(list)
|
|
79
|
+
for key, value in sorted(kwargs.items()):
|
|
80
|
+
if key == "__raw__":
|
|
81
|
+
handle_raw_query(value, mongo_query)
|
|
82
|
+
continue
|
|
83
|
+
|
|
84
|
+
parts = key.rsplit("__")
|
|
85
|
+
indices = [(i, p) for i, p in enumerate(parts) if p.isdigit()]
|
|
86
|
+
parts = [part for part in parts if not part.isdigit()]
|
|
87
|
+
# Check for an operator and transform to mongo-style if there is
|
|
88
|
+
op = None
|
|
89
|
+
if len(parts) > 1 and parts[-1] in MATCH_OPERATORS:
|
|
90
|
+
op = parts.pop()
|
|
91
|
+
|
|
92
|
+
# Allow to escape operator-like field name by __
|
|
93
|
+
if len(parts) > 1 and parts[-1] == "":
|
|
94
|
+
parts.pop()
|
|
95
|
+
|
|
96
|
+
negate = False
|
|
97
|
+
if len(parts) > 1 and parts[-1] == "not":
|
|
98
|
+
parts.pop()
|
|
99
|
+
negate = True
|
|
100
|
+
|
|
101
|
+
if _doc_cls:
|
|
102
|
+
# Switch field names to proper names [set in Field(name='foo')]
|
|
103
|
+
try:
|
|
104
|
+
fields = _doc_cls._lookup_field(parts)
|
|
105
|
+
except Exception as e:
|
|
106
|
+
raise InvalidQueryError(e)
|
|
107
|
+
parts = []
|
|
108
|
+
|
|
109
|
+
CachedReferenceField = _import_class("CachedReferenceField")
|
|
110
|
+
GenericReferenceField = _import_class("GenericReferenceField")
|
|
111
|
+
|
|
112
|
+
cleaned_fields = []
|
|
113
|
+
for field in fields:
|
|
114
|
+
append_field = True
|
|
115
|
+
if isinstance(field, str):
|
|
116
|
+
parts.append(field)
|
|
117
|
+
append_field = False
|
|
118
|
+
# is last and CachedReferenceField
|
|
119
|
+
elif isinstance(field, CachedReferenceField) and fields[-1] == field:
|
|
120
|
+
parts.append("%s._id" % field.db_field)
|
|
121
|
+
else:
|
|
122
|
+
parts.append(field.db_field)
|
|
123
|
+
|
|
124
|
+
if append_field:
|
|
125
|
+
cleaned_fields.append(field)
|
|
126
|
+
|
|
127
|
+
# Convert value to proper value
|
|
128
|
+
field = cleaned_fields[-1]
|
|
129
|
+
|
|
130
|
+
singular_ops = [None, "ne", "gt", "gte", "lt", "lte", "not"]
|
|
131
|
+
singular_ops += STRING_OPERATORS
|
|
132
|
+
if op in singular_ops:
|
|
133
|
+
value = field.prepare_query_value(op, value)
|
|
134
|
+
|
|
135
|
+
if isinstance(field, CachedReferenceField) and value:
|
|
136
|
+
value = value["_id"]
|
|
137
|
+
|
|
138
|
+
elif op in ("in", "nin", "all", "near") and not isinstance(value, dict):
|
|
139
|
+
# Raise an error if the in/nin/all/near param is not iterable.
|
|
140
|
+
value = _prepare_query_for_iterable(field, op, value)
|
|
141
|
+
|
|
142
|
+
# If we're querying a GenericReferenceField, we need to alter the
|
|
143
|
+
# key depending on the value:
|
|
144
|
+
# * If the value is a DBRef, the key should be "field_name._ref".
|
|
145
|
+
# * If the value is an ObjectId, the key should be "field_name._ref.$id".
|
|
146
|
+
if isinstance(field, GenericReferenceField):
|
|
147
|
+
if isinstance(value, DBRef):
|
|
148
|
+
parts[-1] += "._ref"
|
|
149
|
+
elif isinstance(value, ObjectId):
|
|
150
|
+
parts[-1] += "._ref.$id"
|
|
151
|
+
|
|
152
|
+
# if op and op not in COMPARISON_OPERATORS:
|
|
153
|
+
if op:
|
|
154
|
+
if op in GEO_OPERATORS:
|
|
155
|
+
value = _geo_operator(field, op, value)
|
|
156
|
+
elif op in ("match", "elemMatch"):
|
|
157
|
+
ListField = _import_class("ListField")
|
|
158
|
+
EmbeddedDocumentField = _import_class("EmbeddedDocumentField")
|
|
159
|
+
if (
|
|
160
|
+
isinstance(value, dict)
|
|
161
|
+
and isinstance(field, ListField)
|
|
162
|
+
and isinstance(field.field, EmbeddedDocumentField)
|
|
163
|
+
):
|
|
164
|
+
value = query(field.field.document_type, **value)
|
|
165
|
+
else:
|
|
166
|
+
value = field.prepare_query_value(op, value)
|
|
167
|
+
value = {"$elemMatch": value}
|
|
168
|
+
elif op in CUSTOM_OPERATORS:
|
|
169
|
+
NotImplementedError(
|
|
170
|
+
'Custom method "%s" has not ' "been implemented" % op
|
|
171
|
+
)
|
|
172
|
+
elif op not in STRING_OPERATORS:
|
|
173
|
+
value = {"$" + op: value}
|
|
174
|
+
|
|
175
|
+
if negate:
|
|
176
|
+
value = {"$not": value}
|
|
177
|
+
|
|
178
|
+
for i, part in indices:
|
|
179
|
+
parts.insert(i, part)
|
|
180
|
+
|
|
181
|
+
key = ".".join(parts)
|
|
182
|
+
|
|
183
|
+
if key not in mongo_query:
|
|
184
|
+
mongo_query[key] = value
|
|
185
|
+
else:
|
|
186
|
+
if isinstance(mongo_query[key], dict) and isinstance(value, dict):
|
|
187
|
+
mongo_query[key].update(value)
|
|
188
|
+
# $max/minDistance needs to come last - convert to SON
|
|
189
|
+
value_dict = mongo_query[key]
|
|
190
|
+
if ("$maxDistance" in value_dict or "$minDistance" in value_dict) and (
|
|
191
|
+
"$near" in value_dict or "$nearSphere" in value_dict
|
|
192
|
+
):
|
|
193
|
+
value_son = SON()
|
|
194
|
+
for k, v in value_dict.items():
|
|
195
|
+
if k == "$maxDistance" or k == "$minDistance":
|
|
196
|
+
continue
|
|
197
|
+
value_son[k] = v
|
|
198
|
+
# Required for MongoDB >= 2.6, may fail when combining
|
|
199
|
+
# PyMongo 3+ and MongoDB < 2.6
|
|
200
|
+
near_embedded = False
|
|
201
|
+
for near_op in ("$near", "$nearSphere"):
|
|
202
|
+
if isinstance(value_dict.get(near_op), dict):
|
|
203
|
+
value_son[near_op] = SON(value_son[near_op])
|
|
204
|
+
if "$maxDistance" in value_dict:
|
|
205
|
+
value_son[near_op]["$maxDistance"] = value_dict[
|
|
206
|
+
"$maxDistance"
|
|
207
|
+
]
|
|
208
|
+
if "$minDistance" in value_dict:
|
|
209
|
+
value_son[near_op]["$minDistance"] = value_dict[
|
|
210
|
+
"$minDistance"
|
|
211
|
+
]
|
|
212
|
+
near_embedded = True
|
|
213
|
+
|
|
214
|
+
if not near_embedded:
|
|
215
|
+
if "$maxDistance" in value_dict:
|
|
216
|
+
value_son["$maxDistance"] = value_dict["$maxDistance"]
|
|
217
|
+
if "$minDistance" in value_dict:
|
|
218
|
+
value_son["$minDistance"] = value_dict["$minDistance"]
|
|
219
|
+
mongo_query[key] = value_son
|
|
220
|
+
else:
|
|
221
|
+
# Store for manually merging later
|
|
222
|
+
merge_query[key].append(value)
|
|
223
|
+
|
|
224
|
+
# The queryset has been filter in such a way we must manually merge
|
|
225
|
+
for k, v in merge_query.items():
|
|
226
|
+
merge_query[k].append(mongo_query[k])
|
|
227
|
+
del mongo_query[k]
|
|
228
|
+
if isinstance(v, list):
|
|
229
|
+
value = [{k: val} for val in v]
|
|
230
|
+
if "$and" in mongo_query.keys():
|
|
231
|
+
mongo_query["$and"].extend(value)
|
|
232
|
+
else:
|
|
233
|
+
mongo_query["$and"] = value
|
|
234
|
+
|
|
235
|
+
return mongo_query
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def update(_doc_cls=None, **update):
|
|
239
|
+
"""Transform an update spec from Django-style format to Mongo
|
|
240
|
+
format.
|
|
241
|
+
"""
|
|
242
|
+
mongo_update = {}
|
|
243
|
+
|
|
244
|
+
for key, value in update.items():
|
|
245
|
+
if key == "__raw__":
|
|
246
|
+
handle_raw_query(value, mongo_update)
|
|
247
|
+
continue
|
|
248
|
+
|
|
249
|
+
parts = key.split("__")
|
|
250
|
+
|
|
251
|
+
# if there is no operator, default to 'set'
|
|
252
|
+
if len(parts) < 3 and parts[0] not in UPDATE_OPERATORS:
|
|
253
|
+
parts.insert(0, "set")
|
|
254
|
+
|
|
255
|
+
# Check for an operator and transform to mongo-style if there is
|
|
256
|
+
op = None
|
|
257
|
+
if parts[0] in UPDATE_OPERATORS:
|
|
258
|
+
op = parts.pop(0)
|
|
259
|
+
# Convert Pythonic names to Mongo equivalents
|
|
260
|
+
operator_map = {
|
|
261
|
+
"push_all": "pushAll",
|
|
262
|
+
"pull_all": "pullAll",
|
|
263
|
+
"dec": "inc",
|
|
264
|
+
"add_to_set": "addToSet",
|
|
265
|
+
"set_on_insert": "setOnInsert",
|
|
266
|
+
}
|
|
267
|
+
if op == "dec":
|
|
268
|
+
# Support decrement by flipping a positive value's sign
|
|
269
|
+
# and using 'inc'
|
|
270
|
+
value = -value
|
|
271
|
+
# If the operator doesn't found from operator map, the op value
|
|
272
|
+
# will stay unchanged
|
|
273
|
+
op = operator_map.get(op, op)
|
|
274
|
+
|
|
275
|
+
match = None
|
|
276
|
+
|
|
277
|
+
if len(parts) == 1:
|
|
278
|
+
# typical update like set__field
|
|
279
|
+
# but also allows to update a field named like a comparison operator
|
|
280
|
+
# like set__type = "something" (without clashing with the 'type' operator)
|
|
281
|
+
pass
|
|
282
|
+
elif len(parts) > 1:
|
|
283
|
+
# can be either an embedded field like set__foo__bar
|
|
284
|
+
# or a comparison operator as in pull__foo__in
|
|
285
|
+
if parts[-1] in COMPARISON_OPERATORS:
|
|
286
|
+
match = parts.pop() # e.g. pop 'in' from pull__foo__in
|
|
287
|
+
|
|
288
|
+
# Allow to escape operator-like field name by __
|
|
289
|
+
# e.g. in the case of an embedded foo.type field
|
|
290
|
+
# Doc.objects().update(set__foo__type="bar")
|
|
291
|
+
# see https://github.com/MongoEngine/autonomous.db/pull/1351
|
|
292
|
+
if parts[-1] == "":
|
|
293
|
+
match = parts.pop() # e.g. pop last '__' from set__foo__type__
|
|
294
|
+
|
|
295
|
+
if _doc_cls:
|
|
296
|
+
# Switch field names to proper names [set in Field(name='foo')]
|
|
297
|
+
try:
|
|
298
|
+
fields = _doc_cls._lookup_field(parts)
|
|
299
|
+
except Exception as e:
|
|
300
|
+
raise InvalidQueryError(e)
|
|
301
|
+
parts = []
|
|
302
|
+
|
|
303
|
+
cleaned_fields = []
|
|
304
|
+
appended_sub_field = False
|
|
305
|
+
for field in fields:
|
|
306
|
+
append_field = True
|
|
307
|
+
if isinstance(field, str):
|
|
308
|
+
# Convert the S operator to $
|
|
309
|
+
if field == "S":
|
|
310
|
+
field = "$"
|
|
311
|
+
parts.append(field)
|
|
312
|
+
append_field = False
|
|
313
|
+
else:
|
|
314
|
+
parts.append(field.db_field)
|
|
315
|
+
if append_field:
|
|
316
|
+
appended_sub_field = False
|
|
317
|
+
cleaned_fields.append(field)
|
|
318
|
+
if hasattr(field, "field"):
|
|
319
|
+
cleaned_fields.append(field.field)
|
|
320
|
+
appended_sub_field = True
|
|
321
|
+
|
|
322
|
+
# Convert value to proper value
|
|
323
|
+
if appended_sub_field:
|
|
324
|
+
field = cleaned_fields[-2]
|
|
325
|
+
else:
|
|
326
|
+
field = cleaned_fields[-1]
|
|
327
|
+
|
|
328
|
+
GeoJsonBaseField = _import_class("GeoJsonBaseField")
|
|
329
|
+
if isinstance(field, GeoJsonBaseField):
|
|
330
|
+
value = field.to_mongo(value)
|
|
331
|
+
|
|
332
|
+
if op == "pull":
|
|
333
|
+
if field.required or value is not None:
|
|
334
|
+
if match in ("in", "nin") and not isinstance(value, dict):
|
|
335
|
+
value = _prepare_query_for_iterable(field, op, value)
|
|
336
|
+
else:
|
|
337
|
+
value = field.prepare_query_value(op, value)
|
|
338
|
+
elif op == "push" and isinstance(value, (list, tuple, set)):
|
|
339
|
+
value = [field.prepare_query_value(op, v) for v in value]
|
|
340
|
+
elif op in (None, "set", "push"):
|
|
341
|
+
if field.required or value is not None:
|
|
342
|
+
value = field.prepare_query_value(op, value)
|
|
343
|
+
elif op in ("pushAll", "pullAll"):
|
|
344
|
+
value = [field.prepare_query_value(op, v) for v in value]
|
|
345
|
+
elif op in ("addToSet", "setOnInsert"):
|
|
346
|
+
if isinstance(value, (list, tuple, set)):
|
|
347
|
+
value = [field.prepare_query_value(op, v) for v in value]
|
|
348
|
+
elif field.required or value is not None:
|
|
349
|
+
value = field.prepare_query_value(op, value)
|
|
350
|
+
elif op == "unset":
|
|
351
|
+
value = 1
|
|
352
|
+
elif op == "inc":
|
|
353
|
+
value = field.prepare_query_value(op, value)
|
|
354
|
+
|
|
355
|
+
if match:
|
|
356
|
+
match = "$" + match
|
|
357
|
+
value = {match: value}
|
|
358
|
+
|
|
359
|
+
key = ".".join(parts)
|
|
360
|
+
|
|
361
|
+
if "pull" in op and "." in key:
|
|
362
|
+
# Dot operators don't work on pull operations
|
|
363
|
+
# unless they point to a list field
|
|
364
|
+
# Otherwise it uses nested dict syntax
|
|
365
|
+
if op == "pullAll":
|
|
366
|
+
raise InvalidQueryError(
|
|
367
|
+
"pullAll operations only support a single field depth"
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
# Look for the last list field and use dot notation until there
|
|
371
|
+
field_classes = [c.__class__ for c in cleaned_fields]
|
|
372
|
+
field_classes.reverse()
|
|
373
|
+
ListField = _import_class("ListField")
|
|
374
|
+
EmbeddedDocumentListField = _import_class("EmbeddedDocumentListField")
|
|
375
|
+
if ListField in field_classes or EmbeddedDocumentListField in field_classes:
|
|
376
|
+
# Join all fields via dot notation to the last ListField or EmbeddedDocumentListField
|
|
377
|
+
# Then process as normal
|
|
378
|
+
if ListField in field_classes:
|
|
379
|
+
_check_field = ListField
|
|
380
|
+
else:
|
|
381
|
+
_check_field = EmbeddedDocumentListField
|
|
382
|
+
|
|
383
|
+
last_listField = len(cleaned_fields) - field_classes.index(_check_field)
|
|
384
|
+
key = ".".join(parts[:last_listField])
|
|
385
|
+
parts = parts[last_listField:]
|
|
386
|
+
parts.insert(0, key)
|
|
387
|
+
|
|
388
|
+
parts.reverse()
|
|
389
|
+
for key in parts:
|
|
390
|
+
value = {key: value}
|
|
391
|
+
elif op == "addToSet" and isinstance(value, list):
|
|
392
|
+
value = {key: {"$each": value}}
|
|
393
|
+
elif op in ("push", "pushAll"):
|
|
394
|
+
if parts[-1].isdigit():
|
|
395
|
+
key = ".".join(parts[0:-1])
|
|
396
|
+
position = int(parts[-1])
|
|
397
|
+
# $position expects an iterable. If pushing a single value,
|
|
398
|
+
# wrap it in a list.
|
|
399
|
+
if not isinstance(value, (set, tuple, list)):
|
|
400
|
+
value = [value]
|
|
401
|
+
value = {key: {"$each": value, "$position": position}}
|
|
402
|
+
else:
|
|
403
|
+
if op == "pushAll":
|
|
404
|
+
op = "push" # convert to non-deprecated keyword
|
|
405
|
+
if not isinstance(value, (set, tuple, list)):
|
|
406
|
+
value = [value]
|
|
407
|
+
value = {key: {"$each": value}}
|
|
408
|
+
else:
|
|
409
|
+
value = {key: value}
|
|
410
|
+
else:
|
|
411
|
+
value = {key: value}
|
|
412
|
+
key = "$" + op
|
|
413
|
+
if key not in mongo_update:
|
|
414
|
+
mongo_update[key] = value
|
|
415
|
+
elif key in mongo_update and isinstance(mongo_update[key], dict):
|
|
416
|
+
mongo_update[key].update(value)
|
|
417
|
+
|
|
418
|
+
return mongo_update
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
def _geo_operator(field, op, value):
|
|
422
|
+
"""Helper to return the query for a given geo query."""
|
|
423
|
+
if op == "max_distance":
|
|
424
|
+
value = {"$maxDistance": value}
|
|
425
|
+
elif op == "min_distance":
|
|
426
|
+
value = {"$minDistance": value}
|
|
427
|
+
elif field._geo_index == pymongo.GEO2D:
|
|
428
|
+
if op == "within_distance":
|
|
429
|
+
value = {"$within": {"$center": value}}
|
|
430
|
+
elif op == "within_spherical_distance":
|
|
431
|
+
value = {"$within": {"$centerSphere": value}}
|
|
432
|
+
elif op == "within_polygon":
|
|
433
|
+
value = {"$within": {"$polygon": value}}
|
|
434
|
+
elif op == "near":
|
|
435
|
+
value = {"$near": value}
|
|
436
|
+
elif op == "near_sphere":
|
|
437
|
+
value = {"$nearSphere": value}
|
|
438
|
+
elif op == "within_box":
|
|
439
|
+
value = {"$within": {"$box": value}}
|
|
440
|
+
else:
|
|
441
|
+
raise NotImplementedError(
|
|
442
|
+
'Geo method "%s" has not been ' "implemented for a GeoPointField" % op
|
|
443
|
+
)
|
|
444
|
+
else:
|
|
445
|
+
if op == "geo_within":
|
|
446
|
+
value = {"$geoWithin": _infer_geometry(value)}
|
|
447
|
+
elif op == "geo_within_box":
|
|
448
|
+
value = {"$geoWithin": {"$box": value}}
|
|
449
|
+
elif op == "geo_within_polygon":
|
|
450
|
+
value = {"$geoWithin": {"$polygon": value}}
|
|
451
|
+
elif op == "geo_within_center":
|
|
452
|
+
value = {"$geoWithin": {"$center": value}}
|
|
453
|
+
elif op == "geo_within_sphere":
|
|
454
|
+
value = {"$geoWithin": {"$centerSphere": value}}
|
|
455
|
+
elif op == "geo_intersects":
|
|
456
|
+
value = {"$geoIntersects": _infer_geometry(value)}
|
|
457
|
+
elif op == "near":
|
|
458
|
+
value = {"$near": _infer_geometry(value)}
|
|
459
|
+
else:
|
|
460
|
+
raise NotImplementedError(
|
|
461
|
+
'Geo method "{}" has not been implemented for a {} '.format(
|
|
462
|
+
op, field._name
|
|
463
|
+
)
|
|
464
|
+
)
|
|
465
|
+
return value
|
|
466
|
+
|
|
467
|
+
|
|
468
|
+
def _infer_geometry(value):
|
|
469
|
+
"""Helper method that tries to infer the $geometry shape for a
|
|
470
|
+
given value.
|
|
471
|
+
"""
|
|
472
|
+
if isinstance(value, dict):
|
|
473
|
+
if "$geometry" in value:
|
|
474
|
+
return value
|
|
475
|
+
elif "coordinates" in value and "type" in value:
|
|
476
|
+
return {"$geometry": value}
|
|
477
|
+
raise InvalidQueryError(
|
|
478
|
+
"Invalid $geometry dictionary should have type and coordinates keys"
|
|
479
|
+
)
|
|
480
|
+
elif isinstance(value, (list, set)):
|
|
481
|
+
# TODO: shouldn't we test value[0][0][0][0] to see if it is MultiPolygon?
|
|
482
|
+
|
|
483
|
+
try:
|
|
484
|
+
value[0][0][0]
|
|
485
|
+
return {"$geometry": {"type": "Polygon", "coordinates": value}}
|
|
486
|
+
except (TypeError, IndexError):
|
|
487
|
+
pass
|
|
488
|
+
|
|
489
|
+
try:
|
|
490
|
+
value[0][0]
|
|
491
|
+
return {"$geometry": {"type": "LineString", "coordinates": value}}
|
|
492
|
+
except (TypeError, IndexError):
|
|
493
|
+
pass
|
|
494
|
+
|
|
495
|
+
try:
|
|
496
|
+
value[0]
|
|
497
|
+
return {"$geometry": {"type": "Point", "coordinates": value}}
|
|
498
|
+
except (TypeError, IndexError):
|
|
499
|
+
pass
|
|
500
|
+
|
|
501
|
+
raise InvalidQueryError(
|
|
502
|
+
"Invalid $geometry data. Can be either a "
|
|
503
|
+
"dictionary or (nested) lists of coordinate(s)"
|
|
504
|
+
)
|
|
505
|
+
|
|
506
|
+
|
|
507
|
+
def _prepare_query_for_iterable(field, op, value):
|
|
508
|
+
# We need a special check for BaseDocument, because - although it's iterable - using
|
|
509
|
+
# it as such in the context of this method is most definitely a mistake.
|
|
510
|
+
BaseDocument = _import_class("BaseDocument")
|
|
511
|
+
|
|
512
|
+
if isinstance(value, BaseDocument):
|
|
513
|
+
raise TypeError(
|
|
514
|
+
"When using the `in`, `nin`, `all`, or "
|
|
515
|
+
"`near`-operators you can't use a "
|
|
516
|
+
"`Document`, you must wrap your object "
|
|
517
|
+
"in a list (object -> [object])."
|
|
518
|
+
)
|
|
519
|
+
|
|
520
|
+
if not hasattr(value, "__iter__"):
|
|
521
|
+
raise TypeError(
|
|
522
|
+
"The `in`, `nin`, `all`, or "
|
|
523
|
+
"`near`-operators must be applied to an "
|
|
524
|
+
"iterable (e.g. a list)."
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
return [field.prepare_query_value(op, v) for v in value]
|