sensu-plugins-mongodb-mrtrotl 1.4.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.md +1 -0
- data/LICENSE +22 -0
- data/README.md +27 -0
- data/bin/check-mongodb-metric.rb +144 -0
- data/bin/check-mongodb-query-count.rb +267 -0
- data/bin/check-mongodb.py +1644 -0
- data/bin/check-mongodb.rb +5 -0
- data/bin/metrics-mongodb-replication.rb +254 -0
- data/bin/metrics-mongodb.rb +133 -0
- data/lib/bson/__init__.py +1347 -0
- data/lib/bson/__pycache__/__init__.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/_helpers.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/binary.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/code.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/codec_options.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/dbref.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/decimal128.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/errors.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/int64.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/json_util.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/max_key.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/min_key.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/objectid.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/raw_bson.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/regex.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/son.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/timestamp.cpython-310.pyc +0 -0
- data/lib/bson/__pycache__/tz_util.cpython-310.pyc +0 -0
- data/lib/bson/_cbson.cpython-310-x86_64-linux-gnu.so +0 -0
- data/lib/bson/_helpers.py +41 -0
- data/lib/bson/binary.py +364 -0
- data/lib/bson/code.py +101 -0
- data/lib/bson/codec_options.py +414 -0
- data/lib/bson/codec_options.pyi +100 -0
- data/lib/bson/dbref.py +133 -0
- data/lib/bson/decimal128.py +314 -0
- data/lib/bson/errors.py +35 -0
- data/lib/bson/int64.py +39 -0
- data/lib/bson/json_util.py +874 -0
- data/lib/bson/max_key.py +55 -0
- data/lib/bson/min_key.py +55 -0
- data/lib/bson/objectid.py +286 -0
- data/lib/bson/py.typed +2 -0
- data/lib/bson/raw_bson.py +175 -0
- data/lib/bson/regex.py +135 -0
- data/lib/bson/son.py +208 -0
- data/lib/bson/timestamp.py +124 -0
- data/lib/bson/tz_util.py +52 -0
- data/lib/gridfs/__init__.py +1015 -0
- data/lib/gridfs/__pycache__/__init__.cpython-310.pyc +0 -0
- data/lib/gridfs/__pycache__/errors.cpython-310.pyc +0 -0
- data/lib/gridfs/__pycache__/grid_file.cpython-310.pyc +0 -0
- data/lib/gridfs/errors.py +33 -0
- data/lib/gridfs/grid_file.py +907 -0
- data/lib/gridfs/py.typed +2 -0
- data/lib/pymongo/__init__.py +185 -0
- data/lib/pymongo/__pycache__/__init__.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/_csot.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/aggregation.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/auth.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/auth_aws.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/bulk.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/change_stream.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/client_options.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/client_session.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/collation.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/collection.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/command_cursor.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/common.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/compression_support.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/cursor.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/daemon.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/database.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/driver_info.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/encryption.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/encryption_options.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/errors.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/event_loggers.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/hello.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/helpers.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/max_staleness_selectors.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/message.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/mongo_client.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/monitor.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/monitoring.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/network.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/ocsp_cache.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/ocsp_support.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/operations.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/periodic_executor.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/pool.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/pyopenssl_context.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/read_concern.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/read_preferences.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/response.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/results.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/saslprep.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/server.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/server_api.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/server_description.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/server_selectors.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/server_type.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/settings.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/socket_checker.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/srv_resolver.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/ssl_context.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/ssl_support.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/topology.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/topology_description.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/typings.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/uri_parser.cpython-310.pyc +0 -0
- data/lib/pymongo/__pycache__/write_concern.cpython-310.pyc +0 -0
- data/lib/pymongo/_cmessage.cpython-310-x86_64-linux-gnu.so +0 -0
- data/lib/pymongo/_csot.py +118 -0
- data/lib/pymongo/aggregation.py +229 -0
- data/lib/pymongo/auth.py +549 -0
- data/lib/pymongo/auth_aws.py +94 -0
- data/lib/pymongo/bulk.py +513 -0
- data/lib/pymongo/change_stream.py +457 -0
- data/lib/pymongo/client_options.py +302 -0
- data/lib/pymongo/client_session.py +1112 -0
- data/lib/pymongo/collation.py +224 -0
- data/lib/pymongo/collection.py +3204 -0
- data/lib/pymongo/command_cursor.py +353 -0
- data/lib/pymongo/common.py +984 -0
- data/lib/pymongo/compression_support.py +149 -0
- data/lib/pymongo/cursor.py +1345 -0
- data/lib/pymongo/daemon.py +141 -0
- data/lib/pymongo/database.py +1202 -0
- data/lib/pymongo/driver_info.py +42 -0
- data/lib/pymongo/encryption.py +884 -0
- data/lib/pymongo/encryption_options.py +221 -0
- data/lib/pymongo/errors.py +365 -0
- data/lib/pymongo/event_loggers.py +221 -0
- data/lib/pymongo/hello.py +219 -0
- data/lib/pymongo/helpers.py +259 -0
- data/lib/pymongo/max_staleness_selectors.py +114 -0
- data/lib/pymongo/message.py +1440 -0
- data/lib/pymongo/mongo_client.py +2144 -0
- data/lib/pymongo/monitor.py +440 -0
- data/lib/pymongo/monitoring.py +1801 -0
- data/lib/pymongo/network.py +311 -0
- data/lib/pymongo/ocsp_cache.py +87 -0
- data/lib/pymongo/ocsp_support.py +372 -0
- data/lib/pymongo/operations.py +507 -0
- data/lib/pymongo/periodic_executor.py +183 -0
- data/lib/pymongo/pool.py +1660 -0
- data/lib/pymongo/py.typed +2 -0
- data/lib/pymongo/pyopenssl_context.py +383 -0
- data/lib/pymongo/read_concern.py +75 -0
- data/lib/pymongo/read_preferences.py +609 -0
- data/lib/pymongo/response.py +109 -0
- data/lib/pymongo/results.py +217 -0
- data/lib/pymongo/saslprep.py +113 -0
- data/lib/pymongo/server.py +247 -0
- data/lib/pymongo/server_api.py +170 -0
- data/lib/pymongo/server_description.py +285 -0
- data/lib/pymongo/server_selectors.py +153 -0
- data/lib/pymongo/server_type.py +32 -0
- data/lib/pymongo/settings.py +159 -0
- data/lib/pymongo/socket_checker.py +104 -0
- data/lib/pymongo/srv_resolver.py +126 -0
- data/lib/pymongo/ssl_context.py +39 -0
- data/lib/pymongo/ssl_support.py +99 -0
- data/lib/pymongo/topology.py +890 -0
- data/lib/pymongo/topology_description.py +639 -0
- data/lib/pymongo/typings.py +39 -0
- data/lib/pymongo/uri_parser.py +624 -0
- data/lib/pymongo/write_concern.py +129 -0
- data/lib/pymongo-4.2.0.dist-info/INSTALLER +1 -0
- data/lib/pymongo-4.2.0.dist-info/LICENSE +201 -0
- data/lib/pymongo-4.2.0.dist-info/METADATA +250 -0
- data/lib/pymongo-4.2.0.dist-info/RECORD +167 -0
- data/lib/pymongo-4.2.0.dist-info/REQUESTED +0 -0
- data/lib/pymongo-4.2.0.dist-info/WHEEL +6 -0
- data/lib/pymongo-4.2.0.dist-info/top_level.txt +3 -0
- data/lib/sensu-plugins-mongodb/metrics.rb +391 -0
- data/lib/sensu-plugins-mongodb/version.rb +9 -0
- data/lib/sensu-plugins-mongodb.rb +1 -0
- metadata +407 -0
@@ -0,0 +1,457 @@
|
|
1
|
+
# Copyright 2017 MongoDB, Inc.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License"); you
|
4
|
+
# may not use this file except in compliance with the License. You
|
5
|
+
# may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
12
|
+
# implied. See the License for the specific language governing
|
13
|
+
# permissions and limitations under the License.
|
14
|
+
|
15
|
+
"""Watch changes on a collection, a database, or the entire cluster."""
|
16
|
+
|
17
|
+
import copy
|
18
|
+
from typing import TYPE_CHECKING, Any, Dict, Generic, Mapping, Optional, Union
|
19
|
+
|
20
|
+
from bson import _bson_to_dict
|
21
|
+
from bson.raw_bson import RawBSONDocument
|
22
|
+
from bson.timestamp import Timestamp
|
23
|
+
from pymongo import _csot, common
|
24
|
+
from pymongo.aggregation import (
|
25
|
+
_CollectionAggregationCommand,
|
26
|
+
_DatabaseAggregationCommand,
|
27
|
+
)
|
28
|
+
from pymongo.collation import validate_collation_or_none
|
29
|
+
from pymongo.command_cursor import CommandCursor
|
30
|
+
from pymongo.errors import (
|
31
|
+
ConnectionFailure,
|
32
|
+
CursorNotFound,
|
33
|
+
InvalidOperation,
|
34
|
+
OperationFailure,
|
35
|
+
PyMongoError,
|
36
|
+
)
|
37
|
+
from pymongo.typings import _CollationIn, _DocumentType, _Pipeline
|
38
|
+
|
39
|
+
# The change streams spec considers the following server errors from the
|
40
|
+
# getMore command non-resumable. All other getMore errors are resumable.
|
41
|
+
_RESUMABLE_GETMORE_ERRORS = frozenset(
|
42
|
+
[
|
43
|
+
6, # HostUnreachable
|
44
|
+
7, # HostNotFound
|
45
|
+
89, # NetworkTimeout
|
46
|
+
91, # ShutdownInProgress
|
47
|
+
189, # PrimarySteppedDown
|
48
|
+
262, # ExceededTimeLimit
|
49
|
+
9001, # SocketException
|
50
|
+
10107, # NotWritablePrimary
|
51
|
+
11600, # InterruptedAtShutdown
|
52
|
+
11602, # InterruptedDueToReplStateChange
|
53
|
+
13435, # NotPrimaryNoSecondaryOk
|
54
|
+
13436, # NotPrimaryOrSecondary
|
55
|
+
63, # StaleShardVersion
|
56
|
+
150, # StaleEpoch
|
57
|
+
13388, # StaleConfig
|
58
|
+
234, # RetryChangeStream
|
59
|
+
133, # FailedToSatisfyReadPreference
|
60
|
+
]
|
61
|
+
)
|
62
|
+
|
63
|
+
|
64
|
+
if TYPE_CHECKING:
|
65
|
+
from pymongo.client_session import ClientSession
|
66
|
+
from pymongo.collection import Collection
|
67
|
+
from pymongo.database import Database
|
68
|
+
from pymongo.mongo_client import MongoClient
|
69
|
+
|
70
|
+
|
71
|
+
class ChangeStream(Generic[_DocumentType]):
|
72
|
+
"""The internal abstract base class for change stream cursors.
|
73
|
+
|
74
|
+
Should not be called directly by application developers. Use
|
75
|
+
:meth:`pymongo.collection.Collection.watch`,
|
76
|
+
:meth:`pymongo.database.Database.watch`, or
|
77
|
+
:meth:`pymongo.mongo_client.MongoClient.watch` instead.
|
78
|
+
|
79
|
+
.. versionadded:: 3.6
|
80
|
+
.. seealso:: The MongoDB documentation on `changeStreams <https://mongodb.com/docs/manual/changeStreams/>`_.
|
81
|
+
"""
|
82
|
+
|
83
|
+
def __init__(
|
84
|
+
self,
|
85
|
+
target: Union[
|
86
|
+
"MongoClient[_DocumentType]", "Database[_DocumentType]", "Collection[_DocumentType]"
|
87
|
+
],
|
88
|
+
pipeline: Optional[_Pipeline],
|
89
|
+
full_document: Optional[str],
|
90
|
+
resume_after: Optional[Mapping[str, Any]],
|
91
|
+
max_await_time_ms: Optional[int],
|
92
|
+
batch_size: Optional[int],
|
93
|
+
collation: Optional[_CollationIn],
|
94
|
+
start_at_operation_time: Optional[Timestamp],
|
95
|
+
session: Optional["ClientSession"],
|
96
|
+
start_after: Optional[Mapping[str, Any]],
|
97
|
+
comment: Optional[Any] = None,
|
98
|
+
full_document_before_change: Optional[str] = None,
|
99
|
+
) -> None:
|
100
|
+
if pipeline is None:
|
101
|
+
pipeline = []
|
102
|
+
pipeline = common.validate_list("pipeline", pipeline)
|
103
|
+
common.validate_string_or_none("full_document", full_document)
|
104
|
+
validate_collation_or_none(collation)
|
105
|
+
common.validate_non_negative_integer_or_none("batchSize", batch_size)
|
106
|
+
|
107
|
+
self._decode_custom = False
|
108
|
+
self._orig_codec_options = target.codec_options
|
109
|
+
if target.codec_options.type_registry._decoder_map:
|
110
|
+
self._decode_custom = True
|
111
|
+
# Keep the type registry so that we support encoding custom types
|
112
|
+
# in the pipeline.
|
113
|
+
self._target = target.with_options( # type: ignore
|
114
|
+
codec_options=target.codec_options.with_options(document_class=RawBSONDocument)
|
115
|
+
)
|
116
|
+
else:
|
117
|
+
self._target = target
|
118
|
+
|
119
|
+
self._pipeline = copy.deepcopy(pipeline)
|
120
|
+
self._full_document = full_document
|
121
|
+
self._full_document_before_change = full_document_before_change
|
122
|
+
self._uses_start_after = start_after is not None
|
123
|
+
self._uses_resume_after = resume_after is not None
|
124
|
+
self._resume_token = copy.deepcopy(start_after or resume_after)
|
125
|
+
self._max_await_time_ms = max_await_time_ms
|
126
|
+
self._batch_size = batch_size
|
127
|
+
self._collation = collation
|
128
|
+
self._start_at_operation_time = start_at_operation_time
|
129
|
+
self._session = session
|
130
|
+
self._comment = comment
|
131
|
+
self._closed = False
|
132
|
+
self._timeout = self._target._timeout
|
133
|
+
# Initialize cursor.
|
134
|
+
self._cursor = self._create_cursor()
|
135
|
+
|
136
|
+
@property
|
137
|
+
def _aggregation_command_class(self):
|
138
|
+
"""The aggregation command class to be used."""
|
139
|
+
raise NotImplementedError
|
140
|
+
|
141
|
+
@property
|
142
|
+
def _client(self):
|
143
|
+
"""The client against which the aggregation commands for
|
144
|
+
this ChangeStream will be run."""
|
145
|
+
raise NotImplementedError
|
146
|
+
|
147
|
+
def _change_stream_options(self):
|
148
|
+
"""Return the options dict for the $changeStream pipeline stage."""
|
149
|
+
options: Dict[str, Any] = {}
|
150
|
+
if self._full_document is not None:
|
151
|
+
options["fullDocument"] = self._full_document
|
152
|
+
|
153
|
+
if self._full_document_before_change is not None:
|
154
|
+
options["fullDocumentBeforeChange"] = self._full_document_before_change
|
155
|
+
|
156
|
+
resume_token = self.resume_token
|
157
|
+
if resume_token is not None:
|
158
|
+
if self._uses_start_after:
|
159
|
+
options["startAfter"] = resume_token
|
160
|
+
else:
|
161
|
+
options["resumeAfter"] = resume_token
|
162
|
+
|
163
|
+
if self._start_at_operation_time is not None:
|
164
|
+
options["startAtOperationTime"] = self._start_at_operation_time
|
165
|
+
return options
|
166
|
+
|
167
|
+
def _command_options(self):
|
168
|
+
"""Return the options dict for the aggregation command."""
|
169
|
+
options = {}
|
170
|
+
if self._max_await_time_ms is not None:
|
171
|
+
options["maxAwaitTimeMS"] = self._max_await_time_ms
|
172
|
+
if self._batch_size is not None:
|
173
|
+
options["batchSize"] = self._batch_size
|
174
|
+
return options
|
175
|
+
|
176
|
+
def _aggregation_pipeline(self):
|
177
|
+
"""Return the full aggregation pipeline for this ChangeStream."""
|
178
|
+
options = self._change_stream_options()
|
179
|
+
full_pipeline: list = [{"$changeStream": options}]
|
180
|
+
full_pipeline.extend(self._pipeline)
|
181
|
+
return full_pipeline
|
182
|
+
|
183
|
+
def _process_result(self, result, sock_info):
|
184
|
+
"""Callback that caches the postBatchResumeToken or
|
185
|
+
startAtOperationTime from a changeStream aggregate command response
|
186
|
+
containing an empty batch of change documents.
|
187
|
+
|
188
|
+
This is implemented as a callback because we need access to the wire
|
189
|
+
version in order to determine whether to cache this value.
|
190
|
+
"""
|
191
|
+
if not result["cursor"]["firstBatch"]:
|
192
|
+
if "postBatchResumeToken" in result["cursor"]:
|
193
|
+
self._resume_token = result["cursor"]["postBatchResumeToken"]
|
194
|
+
elif (
|
195
|
+
self._start_at_operation_time is None
|
196
|
+
and self._uses_resume_after is False
|
197
|
+
and self._uses_start_after is False
|
198
|
+
and sock_info.max_wire_version >= 7
|
199
|
+
):
|
200
|
+
self._start_at_operation_time = result.get("operationTime")
|
201
|
+
# PYTHON-2181: informative error on missing operationTime.
|
202
|
+
if self._start_at_operation_time is None:
|
203
|
+
raise OperationFailure(
|
204
|
+
"Expected field 'operationTime' missing from command "
|
205
|
+
"response : %r" % (result,)
|
206
|
+
)
|
207
|
+
|
208
|
+
def _run_aggregation_cmd(self, session, explicit_session):
|
209
|
+
"""Run the full aggregation pipeline for this ChangeStream and return
|
210
|
+
the corresponding CommandCursor.
|
211
|
+
"""
|
212
|
+
cmd = self._aggregation_command_class(
|
213
|
+
self._target,
|
214
|
+
CommandCursor,
|
215
|
+
self._aggregation_pipeline(),
|
216
|
+
self._command_options(),
|
217
|
+
explicit_session,
|
218
|
+
result_processor=self._process_result,
|
219
|
+
comment=self._comment,
|
220
|
+
)
|
221
|
+
return self._client._retryable_read(
|
222
|
+
cmd.get_cursor, self._target._read_preference_for(session), session
|
223
|
+
)
|
224
|
+
|
225
|
+
def _create_cursor(self):
|
226
|
+
with self._client._tmp_session(self._session, close=False) as s:
|
227
|
+
return self._run_aggregation_cmd(session=s, explicit_session=self._session is not None)
|
228
|
+
|
229
|
+
def _resume(self):
|
230
|
+
"""Reestablish this change stream after a resumable error."""
|
231
|
+
try:
|
232
|
+
self._cursor.close()
|
233
|
+
except PyMongoError:
|
234
|
+
pass
|
235
|
+
self._cursor = self._create_cursor()
|
236
|
+
|
237
|
+
def close(self) -> None:
|
238
|
+
"""Close this ChangeStream."""
|
239
|
+
self._closed = True
|
240
|
+
self._cursor.close()
|
241
|
+
|
242
|
+
def __iter__(self) -> "ChangeStream[_DocumentType]":
|
243
|
+
return self
|
244
|
+
|
245
|
+
@property
|
246
|
+
def resume_token(self) -> Optional[Mapping[str, Any]]:
|
247
|
+
"""The cached resume token that will be used to resume after the most
|
248
|
+
recently returned change.
|
249
|
+
|
250
|
+
.. versionadded:: 3.9
|
251
|
+
"""
|
252
|
+
return copy.deepcopy(self._resume_token)
|
253
|
+
|
254
|
+
@_csot.apply
|
255
|
+
def next(self) -> _DocumentType:
|
256
|
+
"""Advance the cursor.
|
257
|
+
|
258
|
+
This method blocks until the next change document is returned or an
|
259
|
+
unrecoverable error is raised. This method is used when iterating over
|
260
|
+
all changes in the cursor. For example::
|
261
|
+
|
262
|
+
try:
|
263
|
+
resume_token = None
|
264
|
+
pipeline = [{'$match': {'operationType': 'insert'}}]
|
265
|
+
with db.collection.watch(pipeline) as stream:
|
266
|
+
for insert_change in stream:
|
267
|
+
print(insert_change)
|
268
|
+
resume_token = stream.resume_token
|
269
|
+
except pymongo.errors.PyMongoError:
|
270
|
+
# The ChangeStream encountered an unrecoverable error or the
|
271
|
+
# resume attempt failed to recreate the cursor.
|
272
|
+
if resume_token is None:
|
273
|
+
# There is no usable resume token because there was a
|
274
|
+
# failure during ChangeStream initialization.
|
275
|
+
logging.error('...')
|
276
|
+
else:
|
277
|
+
# Use the interrupted ChangeStream's resume token to create
|
278
|
+
# a new ChangeStream. The new stream will continue from the
|
279
|
+
# last seen insert change without missing any events.
|
280
|
+
with db.collection.watch(
|
281
|
+
pipeline, resume_after=resume_token) as stream:
|
282
|
+
for insert_change in stream:
|
283
|
+
print(insert_change)
|
284
|
+
|
285
|
+
Raises :exc:`StopIteration` if this ChangeStream is closed.
|
286
|
+
"""
|
287
|
+
while self.alive:
|
288
|
+
doc = self.try_next()
|
289
|
+
if doc is not None:
|
290
|
+
return doc
|
291
|
+
|
292
|
+
raise StopIteration
|
293
|
+
|
294
|
+
__next__ = next
|
295
|
+
|
296
|
+
@property
|
297
|
+
def alive(self) -> bool:
|
298
|
+
"""Does this cursor have the potential to return more data?
|
299
|
+
|
300
|
+
.. note:: Even if :attr:`alive` is ``True``, :meth:`next` can raise
|
301
|
+
:exc:`StopIteration` and :meth:`try_next` can return ``None``.
|
302
|
+
|
303
|
+
.. versionadded:: 3.8
|
304
|
+
"""
|
305
|
+
return not self._closed
|
306
|
+
|
307
|
+
@_csot.apply
|
308
|
+
def try_next(self) -> Optional[_DocumentType]:
|
309
|
+
"""Advance the cursor without blocking indefinitely.
|
310
|
+
|
311
|
+
This method returns the next change document without waiting
|
312
|
+
indefinitely for the next change. For example::
|
313
|
+
|
314
|
+
with db.collection.watch() as stream:
|
315
|
+
while stream.alive:
|
316
|
+
change = stream.try_next()
|
317
|
+
# Note that the ChangeStream's resume token may be updated
|
318
|
+
# even when no changes are returned.
|
319
|
+
print("Current resume token: %r" % (stream.resume_token,))
|
320
|
+
if change is not None:
|
321
|
+
print("Change document: %r" % (change,))
|
322
|
+
continue
|
323
|
+
# We end up here when there are no recent changes.
|
324
|
+
# Sleep for a while before trying again to avoid flooding
|
325
|
+
# the server with getMore requests when no changes are
|
326
|
+
# available.
|
327
|
+
time.sleep(10)
|
328
|
+
|
329
|
+
If no change document is cached locally then this method runs a single
|
330
|
+
getMore command. If the getMore yields any documents, the next
|
331
|
+
document is returned, otherwise, if the getMore returns no documents
|
332
|
+
(because there have been no changes) then ``None`` is returned.
|
333
|
+
|
334
|
+
:Returns:
|
335
|
+
The next change document or ``None`` when no document is available
|
336
|
+
after running a single getMore or when the cursor is closed.
|
337
|
+
|
338
|
+
.. versionadded:: 3.8
|
339
|
+
"""
|
340
|
+
if not self._closed and not self._cursor.alive:
|
341
|
+
self._resume()
|
342
|
+
|
343
|
+
# Attempt to get the next change with at most one getMore and at most
|
344
|
+
# one resume attempt.
|
345
|
+
try:
|
346
|
+
change = self._cursor._try_next(True)
|
347
|
+
except (ConnectionFailure, CursorNotFound):
|
348
|
+
self._resume()
|
349
|
+
change = self._cursor._try_next(False)
|
350
|
+
except OperationFailure as exc:
|
351
|
+
if exc._max_wire_version is None:
|
352
|
+
raise
|
353
|
+
is_resumable = (
|
354
|
+
exc._max_wire_version >= 9 and exc.has_error_label("ResumableChangeStreamError")
|
355
|
+
) or (exc._max_wire_version < 9 and exc.code in _RESUMABLE_GETMORE_ERRORS)
|
356
|
+
if not is_resumable:
|
357
|
+
raise
|
358
|
+
self._resume()
|
359
|
+
change = self._cursor._try_next(False)
|
360
|
+
|
361
|
+
# Check if the cursor was invalidated.
|
362
|
+
if not self._cursor.alive:
|
363
|
+
self._closed = True
|
364
|
+
|
365
|
+
# If no changes are available.
|
366
|
+
if change is None:
|
367
|
+
# We have either iterated over all documents in the cursor,
|
368
|
+
# OR the most-recently returned batch is empty. In either case,
|
369
|
+
# update the cached resume token with the postBatchResumeToken if
|
370
|
+
# one was returned. We also clear the startAtOperationTime.
|
371
|
+
if self._cursor._post_batch_resume_token is not None:
|
372
|
+
self._resume_token = self._cursor._post_batch_resume_token
|
373
|
+
self._start_at_operation_time = None
|
374
|
+
return change
|
375
|
+
|
376
|
+
# Else, changes are available.
|
377
|
+
try:
|
378
|
+
resume_token = change["_id"]
|
379
|
+
except KeyError:
|
380
|
+
self.close()
|
381
|
+
raise InvalidOperation(
|
382
|
+
"Cannot provide resume functionality when the resume token is missing."
|
383
|
+
)
|
384
|
+
|
385
|
+
# If this is the last change document from the current batch, cache the
|
386
|
+
# postBatchResumeToken.
|
387
|
+
if not self._cursor._has_next() and self._cursor._post_batch_resume_token:
|
388
|
+
resume_token = self._cursor._post_batch_resume_token
|
389
|
+
|
390
|
+
# Hereafter, don't use startAfter; instead use resumeAfter.
|
391
|
+
self._uses_start_after = False
|
392
|
+
self._uses_resume_after = True
|
393
|
+
|
394
|
+
# Cache the resume token and clear startAtOperationTime.
|
395
|
+
self._resume_token = resume_token
|
396
|
+
self._start_at_operation_time = None
|
397
|
+
|
398
|
+
if self._decode_custom:
|
399
|
+
return _bson_to_dict(change.raw, self._orig_codec_options)
|
400
|
+
return change
|
401
|
+
|
402
|
+
def __enter__(self) -> "ChangeStream":
|
403
|
+
return self
|
404
|
+
|
405
|
+
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
406
|
+
self.close()
|
407
|
+
|
408
|
+
|
409
|
+
class CollectionChangeStream(ChangeStream, Generic[_DocumentType]):
|
410
|
+
"""A change stream that watches changes on a single collection.
|
411
|
+
|
412
|
+
Should not be called directly by application developers. Use
|
413
|
+
helper method :meth:`pymongo.collection.Collection.watch` instead.
|
414
|
+
|
415
|
+
.. versionadded:: 3.7
|
416
|
+
"""
|
417
|
+
|
418
|
+
@property
|
419
|
+
def _aggregation_command_class(self):
|
420
|
+
return _CollectionAggregationCommand
|
421
|
+
|
422
|
+
@property
|
423
|
+
def _client(self):
|
424
|
+
return self._target.database.client
|
425
|
+
|
426
|
+
|
427
|
+
class DatabaseChangeStream(ChangeStream, Generic[_DocumentType]):
|
428
|
+
"""A change stream that watches changes on all collections in a database.
|
429
|
+
|
430
|
+
Should not be called directly by application developers. Use
|
431
|
+
helper method :meth:`pymongo.database.Database.watch` instead.
|
432
|
+
|
433
|
+
.. versionadded:: 3.7
|
434
|
+
"""
|
435
|
+
|
436
|
+
@property
|
437
|
+
def _aggregation_command_class(self):
|
438
|
+
return _DatabaseAggregationCommand
|
439
|
+
|
440
|
+
@property
|
441
|
+
def _client(self):
|
442
|
+
return self._target.client
|
443
|
+
|
444
|
+
|
445
|
+
class ClusterChangeStream(DatabaseChangeStream, Generic[_DocumentType]):
|
446
|
+
"""A change stream that watches changes on all collections in the cluster.
|
447
|
+
|
448
|
+
Should not be called directly by application developers. Use
|
449
|
+
helper method :meth:`pymongo.mongo_client.MongoClient.watch` instead.
|
450
|
+
|
451
|
+
.. versionadded:: 3.7
|
452
|
+
"""
|
453
|
+
|
454
|
+
def _change_stream_options(self):
|
455
|
+
options = super(ClusterChangeStream, self)._change_stream_options()
|
456
|
+
options["allChangesForCluster"] = True
|
457
|
+
return options
|