bson 5.0.2 → 5.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Rakefile +2 -0
- data/ext/bson/extconf.rb +1 -1
- data/lib/bson/binary.rb +126 -4
- data/lib/bson/document.rb +8 -0
- data/lib/bson/object_id.rb +1 -1
- data/lib/bson/regexp.rb +3 -3
- data/lib/bson/vector.rb +44 -0
- data/lib/bson/version.rb +5 -16
- data/lib/bson.rb +1 -0
- data/spec/bson/document_as_spec.rb +14 -0
- data/spec/bson/vector_spec.rb +33 -0
- data/spec/runners/binary_vector.rb +78 -0
- data/spec/shared/LICENSE +20 -0
- data/spec/shared/bin/get-mongodb-download-url +17 -0
- data/spec/shared/bin/s3-copy +45 -0
- data/spec/shared/bin/s3-upload +69 -0
- data/spec/shared/lib/mrss/child_process_helper.rb +80 -0
- data/spec/shared/lib/mrss/cluster_config.rb +231 -0
- data/spec/shared/lib/mrss/constraints.rb +378 -0
- data/spec/shared/lib/mrss/docker_runner.rb +298 -0
- data/spec/shared/lib/mrss/eg_config_utils.rb +51 -0
- data/spec/shared/lib/mrss/event_subscriber.rb +210 -0
- data/spec/shared/lib/mrss/lite_constraints.rb +238 -0
- data/spec/shared/lib/mrss/release/candidate.rb +284 -0
- data/spec/shared/lib/mrss/release/product_data.rb +144 -0
- data/spec/shared/lib/mrss/server_version_registry.rb +113 -0
- data/spec/shared/lib/mrss/session_registry.rb +69 -0
- data/spec/shared/lib/mrss/session_registry_legacy.rb +60 -0
- data/spec/shared/lib/mrss/spec_organizer.rb +179 -0
- data/spec/shared/lib/mrss/utils.rb +37 -0
- data/spec/shared/lib/tasks/candidate.rake +64 -0
- data/spec/shared/share/Dockerfile.erb +251 -0
- data/spec/shared/share/haproxy-1.conf +16 -0
- data/spec/shared/share/haproxy-2.conf +17 -0
- data/spec/shared/shlib/config.sh +27 -0
- data/spec/shared/shlib/distro.sh +84 -0
- data/spec/shared/shlib/server.sh +423 -0
- data/spec/shared/shlib/set_env.sh +110 -0
- data/spec/spec_helper.rb +2 -0
- data/spec/spec_tests/binary_vector_spec.rb +82 -0
- data/spec/spec_tests/data/binary_vector/README.md +61 -0
- data/spec/spec_tests/data/binary_vector/float32.json +65 -0
- data/spec/spec_tests/data/binary_vector/int8.json +57 -0
- data/spec/spec_tests/data/binary_vector/packed_bit.json +83 -0
- data/spec/spec_tests/data/corpus/binary.json +30 -0
- metadata +70 -6
@@ -0,0 +1,423 @@
|
|
1
|
+
# This file contains functions pertaining to downloading, starting and
|
2
|
+
# configuring a MongoDB server.
|
3
|
+
|
4
|
+
# Note that mlaunch is executed with (and therefore installed with) Python 2.
|
5
|
+
# The reason for this is that in the past, some of the distros we tested on
|
6
|
+
# had an ancient version of Python 3 that was unusable (e.g. it couldn't
|
7
|
+
# install anything from PyPI due to outdated TLS/SSL implementation).
|
8
|
+
# It is likely that all of the current distros we use have a recent enough
|
9
|
+
# and working Python 3 implementation, such that we could use Python 3 for
|
10
|
+
# everything.
|
11
|
+
#
|
12
|
+
# Note that some distros (e.g. ubuntu2004) do not contain a `python' binary
|
13
|
+
# at all, thus python2 or python3 must be explicitly specified depending on
|
14
|
+
# the desired version.
|
15
|
+
|
16
|
+
set_fcv() {
|
17
|
+
if test -n "$FCV"; then
|
18
|
+
mongo --eval 'assert.commandWorked(db.adminCommand( { setFeatureCompatibilityVersion: "'"$FCV"'" } ));' "$MONGODB_URI"
|
19
|
+
mongo --quiet --eval 'db.adminCommand( { getParameter: 1, featureCompatibilityVersion: 1 } )' |grep "version.*$FCV"
|
20
|
+
fi
|
21
|
+
}
|
22
|
+
|
23
|
+
add_uri_option() {
|
24
|
+
opt=$1
|
25
|
+
|
26
|
+
if ! echo $MONGODB_URI |sed -e s,//,, |grep -q /; then
|
27
|
+
MONGODB_URI="$MONGODB_URI/"
|
28
|
+
fi
|
29
|
+
|
30
|
+
if ! echo $MONGODB_URI |grep -q '?'; then
|
31
|
+
MONGODB_URI="$MONGODB_URI?"
|
32
|
+
fi
|
33
|
+
|
34
|
+
MONGODB_URI="$MONGODB_URI&$opt"
|
35
|
+
}
|
36
|
+
|
37
|
+
prepare_server() {
|
38
|
+
if test -n "$USE_OPT_MONGODB"; then
|
39
|
+
export BINDIR=/opt/mongodb/bin
|
40
|
+
export PATH=$BINDIR:$PATH
|
41
|
+
return
|
42
|
+
fi
|
43
|
+
|
44
|
+
. $PROJECT_DIRECTORY/.mod/drivers-evergreen-tools/.evergreen/download-mongodb.sh
|
45
|
+
|
46
|
+
get_distro
|
47
|
+
arch="${1:-$DISTRO}"
|
48
|
+
|
49
|
+
get_mongodb_download_url_for "$arch" "$MONGODB_VERSION"
|
50
|
+
prepare_server_from_url "$MONGODB_DOWNLOAD_URL" "$MONGOSH_DOWNLOAD_URL"
|
51
|
+
}
|
52
|
+
|
53
|
+
prepare_server_from_url() {
|
54
|
+
server_url=$1
|
55
|
+
mongosh_url=$2
|
56
|
+
|
57
|
+
dirname=`basename $server_url |sed -e s/.tgz//`
|
58
|
+
mongodb_dir="$MONGO_ORCHESTRATION_HOME"/mdb/"$dirname"
|
59
|
+
mkdir -p "$mongodb_dir"
|
60
|
+
curl --retry 3 $server_url | tar xz -C "$mongodb_dir" --strip-components 1 -f -
|
61
|
+
|
62
|
+
if test -n "$mongosh_url"; then
|
63
|
+
curl --retry 3 $mongosh_url | tar xz -C "$mongodb_dir" --strip-components 1 -f -
|
64
|
+
fi
|
65
|
+
|
66
|
+
BINDIR="$mongodb_dir"/bin
|
67
|
+
export PATH="$BINDIR":$PATH
|
68
|
+
}
|
69
|
+
|
70
|
+
install_mlaunch_venv() {
|
71
|
+
python3 -V || true
|
72
|
+
if ! python3 -m venv -h >/dev/null; then
|
73
|
+
# Current virtualenv fails with
|
74
|
+
# https://github.com/pypa/virtualenv/issues/1630
|
75
|
+
python3 -m pip install venv --user
|
76
|
+
fi
|
77
|
+
if ! python3 -m ensurepip -h > /dev/null; then
|
78
|
+
# Debian11/Ubuntu2204 have venv installed, but it is nonfunctional unless
|
79
|
+
# the python3-venv package is also installed (it lacks the ensurepip
|
80
|
+
# module).
|
81
|
+
sudo apt-get install --yes python3-venv
|
82
|
+
fi
|
83
|
+
if test "$USE_SYSTEM_PYTHON_PACKAGES" = 1 &&
|
84
|
+
python3 -m pip list |grep mtools
|
85
|
+
then
|
86
|
+
# Use the existing mtools-legacy
|
87
|
+
:
|
88
|
+
else
|
89
|
+
# Spawn a virtual environment, but only if one is not already
|
90
|
+
# active...
|
91
|
+
if test -z "$VIRTUAL_ENV"; then
|
92
|
+
venvpath="$MONGO_ORCHESTRATION_HOME"/venv
|
93
|
+
python3 -m venv $venvpath
|
94
|
+
. $venvpath/bin/activate
|
95
|
+
fi
|
96
|
+
|
97
|
+
# [mlaunch] does not work:
|
98
|
+
# https://github.com/rueckstiess/mtools/issues/856
|
99
|
+
# dateutil dependency is missing in mtools: https://github.com/rueckstiess/mtools/issues/864
|
100
|
+
#pip install 'mtools==1.7' 'pymongo==4.1' python-dateutil psutil
|
101
|
+
|
102
|
+
# dateutil dependency is missing in mtools: https://github.com/rueckstiess/mtools/issues/864
|
103
|
+
pip install --upgrade setuptools
|
104
|
+
pip install 'mtools-legacy[mlaunch]' 'pymongo<4' python-dateutil
|
105
|
+
fi
|
106
|
+
}
|
107
|
+
|
108
|
+
install_mlaunch_pip() {
|
109
|
+
if test -n "$USE_OPT_MONGODB" && which mlaunch >/dev/null 2>&1; then
|
110
|
+
# mlaunch is preinstalled in the docker image, do not install it here
|
111
|
+
return
|
112
|
+
fi
|
113
|
+
|
114
|
+
python -V || true
|
115
|
+
python3 -V || true
|
116
|
+
pythonpath="$MONGO_ORCHESTRATION_HOME"/python
|
117
|
+
# dateutil dependency is missing in mtools: https://github.com/rueckstiess/mtools/issues/864
|
118
|
+
pip install -t "$pythonpath" 'mtools-legacy[mlaunch]' 'pymongo<4' python-dateutil
|
119
|
+
export PATH="$pythonpath/bin":$PATH
|
120
|
+
export PYTHONPATH="$pythonpath"
|
121
|
+
}
|
122
|
+
|
123
|
+
install_mlaunch_git() {
|
124
|
+
repo=$1
|
125
|
+
branch=$2
|
126
|
+
python -V || true
|
127
|
+
python3 -V || true
|
128
|
+
which pip || true
|
129
|
+
which pip3 || true
|
130
|
+
|
131
|
+
if false; then
|
132
|
+
if ! virtualenv --version; then
|
133
|
+
python3 `which pip3` install --user virtualenv
|
134
|
+
export PATH=$HOME/.local/bin:$PATH
|
135
|
+
virtualenv --version
|
136
|
+
fi
|
137
|
+
|
138
|
+
venvpath="$MONGO_ORCHESTRATION_HOME"/venv
|
139
|
+
virtualenv -p python3 $venvpath
|
140
|
+
. $venvpath/bin/activate
|
141
|
+
|
142
|
+
# dateutil dependency is missing in mtools: https://github.com/rueckstiess/mtools/issues/864
|
143
|
+
pip3 install psutil pymongo python-dateutil
|
144
|
+
|
145
|
+
git clone $repo mlaunch
|
146
|
+
cd mlaunch
|
147
|
+
git checkout origin/$branch
|
148
|
+
python3 setup.py install
|
149
|
+
cd ..
|
150
|
+
else
|
151
|
+
pip install --user 'virtualenv==13'
|
152
|
+
export PATH=$HOME/.local/bin:$PATH
|
153
|
+
|
154
|
+
venvpath="$MONGO_ORCHESTRATION_HOME"/venv
|
155
|
+
virtualenv $venvpath
|
156
|
+
. $venvpath/bin/activate
|
157
|
+
|
158
|
+
# dateutil dependency is missing in mtools: https://github.com/rueckstiess/mtools/issues/864
|
159
|
+
pip install psutil pymongo python-dateutil
|
160
|
+
|
161
|
+
git clone $repo mlaunch
|
162
|
+
(cd mlaunch &&
|
163
|
+
git checkout origin/$branch &&
|
164
|
+
python2 setup.py install
|
165
|
+
)
|
166
|
+
fi
|
167
|
+
}
|
168
|
+
|
169
|
+
install_haproxy() {
|
170
|
+
if ! command -v haproxy &> /dev/null; then
|
171
|
+
if ! command -v apt-get &> /dev/null; then
|
172
|
+
# no apt-get; assume RHEL
|
173
|
+
sudo yum -y install haproxy
|
174
|
+
else
|
175
|
+
sudo apt-get update && sudo apt-get install --yes haproxy
|
176
|
+
fi
|
177
|
+
else
|
178
|
+
echo 'haproxy is present'
|
179
|
+
fi
|
180
|
+
}
|
181
|
+
|
182
|
+
install_cmake() {
|
183
|
+
if ! command -v cmake &> /dev/null; then
|
184
|
+
if ! command -v apt-get &> /dev/null; then
|
185
|
+
# no apt-get; assume RHEL
|
186
|
+
sudo yum -y install cmake libarchive
|
187
|
+
else
|
188
|
+
sudo apt-get update && sudo apt-get install --yes cmake
|
189
|
+
fi
|
190
|
+
else
|
191
|
+
echo 'cmake is present'
|
192
|
+
fi
|
193
|
+
}
|
194
|
+
|
195
|
+
# This function sets followong global variables:
|
196
|
+
# server_cert_path
|
197
|
+
# server_ca_path
|
198
|
+
# server_client_cert_path
|
199
|
+
#
|
200
|
+
# These variables are used later to connect to processes via mongo client.
|
201
|
+
calculate_server_args() {
|
202
|
+
local mongo_version=`echo $MONGODB_VERSION |tr -d .`
|
203
|
+
|
204
|
+
if test -z "$mongo_version"; then
|
205
|
+
echo "$MONGODB_VERSION must be set and not contain only dots" 1>&2
|
206
|
+
exit 3
|
207
|
+
fi
|
208
|
+
|
209
|
+
if test $mongo_version = latest; then
|
210
|
+
mongo_version=70
|
211
|
+
fi
|
212
|
+
|
213
|
+
local args="--setParameter enableTestCommands=1"
|
214
|
+
|
215
|
+
if test $mongo_version -ge 50; then
|
216
|
+
args="$args --setParameter acceptApiVersion2=1"
|
217
|
+
elif test $mongo_version -ge 47; then
|
218
|
+
args="$args --setParameter acceptAPIVersion2=1"
|
219
|
+
fi
|
220
|
+
|
221
|
+
args="$args --setParameter diagnosticDataCollectionEnabled=false"
|
222
|
+
|
223
|
+
local uri_options=
|
224
|
+
if test "$TOPOLOGY" = replica-set; then
|
225
|
+
args="$args --replicaset --name test-rs --nodes 2 --arbiter"
|
226
|
+
export HAVE_ARBITER=1
|
227
|
+
elif test "$TOPOLOGY" = replica-set-single-node; then
|
228
|
+
args="$args --replicaset --name test-rs --nodes 1"
|
229
|
+
elif test "$TOPOLOGY" = sharded-cluster; then
|
230
|
+
args="$args --replicaset --nodes 2 --sharded 1 --name test-rs"
|
231
|
+
if test -z "$SINGLE_MONGOS"; then
|
232
|
+
args="$args --mongos 2"
|
233
|
+
fi
|
234
|
+
elif test "$TOPOLOGY" = standalone; then
|
235
|
+
args="$args --single"
|
236
|
+
elif test "$TOPOLOGY" = load-balanced; then
|
237
|
+
args="$args --replicaset --nodes 2 --sharded 1 --name test-rs --port 27117"
|
238
|
+
if test -z "$MRSS_ROOT"; then
|
239
|
+
echo "Please set MRSS_ROOT" 1>&2
|
240
|
+
exit 2
|
241
|
+
fi
|
242
|
+
if test -n "$SINGLE_MONGOS"; then
|
243
|
+
haproxy_config=$MRSS_ROOT/share/haproxy-1.conf
|
244
|
+
else
|
245
|
+
args="$args --mongos 2"
|
246
|
+
haproxy_config=$MRSS_ROOT/share/haproxy-2.conf
|
247
|
+
fi
|
248
|
+
uri_options="$uri_options&loadBalanced=true"
|
249
|
+
else
|
250
|
+
echo "Unknown topology: $TOPOLOGY" 1>&2
|
251
|
+
exit 1
|
252
|
+
fi
|
253
|
+
if test -n "$MMAPV1"; then
|
254
|
+
args="$args --storageEngine mmapv1 --smallfiles --noprealloc"
|
255
|
+
uri_options="$uri_options&retryReads=false&retryWrites=false"
|
256
|
+
fi
|
257
|
+
if test "$AUTH" = auth; then
|
258
|
+
args="$args --auth --username bob --password pwd123"
|
259
|
+
elif test "$AUTH" = x509; then
|
260
|
+
args="$args --auth --username bootstrap --password bootstrap"
|
261
|
+
elif echo "$AUTH" |grep -q ^aws; then
|
262
|
+
args="$args --auth --username bootstrap --password bootstrap"
|
263
|
+
args="$args --setParameter authenticationMechanisms=MONGODB-AWS,SCRAM-SHA-1,SCRAM-SHA-256"
|
264
|
+
uri_options="$uri_options&authMechanism=MONGODB-AWS&authSource=\$external"
|
265
|
+
fi
|
266
|
+
|
267
|
+
if test -n "$OCSP"; then
|
268
|
+
if test -z "$OCSP_ALGORITHM"; then
|
269
|
+
echo "OCSP_ALGORITHM must be set if OCSP is set" 1>&2
|
270
|
+
exit 1
|
271
|
+
fi
|
272
|
+
fi
|
273
|
+
|
274
|
+
if test "$SSL" = ssl || test -n "$OCSP_ALGORITHM"; then
|
275
|
+
if test -n "$OCSP_ALGORITHM"; then
|
276
|
+
if test "$OCSP_MUST_STAPLE" = 1; then
|
277
|
+
server_cert_path=spec/support/ocsp/$OCSP_ALGORITHM/server-mustStaple.pem
|
278
|
+
else
|
279
|
+
server_cert_path=spec/support/ocsp/$OCSP_ALGORITHM/server.pem
|
280
|
+
fi
|
281
|
+
server_ca_path=spec/support/ocsp/$OCSP_ALGORITHM/ca.crt
|
282
|
+
server_client_cert_path=spec/support/ocsp/$OCSP_ALGORITHM/server.pem
|
283
|
+
else
|
284
|
+
server_cert_path=spec/support/certificates/server-second-level-bundle.pem
|
285
|
+
server_ca_path=spec/support/certificates/ca.crt
|
286
|
+
server_client_cert_path=spec/support/certificates/client.pem
|
287
|
+
fi
|
288
|
+
|
289
|
+
if test -n "$OCSP_ALGORITHM"; then
|
290
|
+
client_cert_path=spec/support/ocsp/$OCSP_ALGORITHM/server.pem
|
291
|
+
elif test "$AUTH" = x509; then
|
292
|
+
client_cert_path=spec/support/certificates/client-x509.pem
|
293
|
+
|
294
|
+
uri_options="$uri_options&authMechanism=MONGODB-X509"
|
295
|
+
elif echo $RVM_RUBY |grep -q jruby; then
|
296
|
+
# JRuby does not grok chained certificate bundles -
|
297
|
+
# https://github.com/jruby/jruby-openssl/issues/181
|
298
|
+
client_cert_path=spec/support/certificates/client.pem
|
299
|
+
else
|
300
|
+
client_cert_path=spec/support/certificates/client-second-level-bundle.pem
|
301
|
+
fi
|
302
|
+
|
303
|
+
uri_options="$uri_options&tls=true&"\
|
304
|
+
"tlsCAFile=$server_ca_path&"\
|
305
|
+
"tlsCertificateKeyFile=$client_cert_path"
|
306
|
+
|
307
|
+
args="$args --sslMode requireSSL"\
|
308
|
+
" --sslPEMKeyFile $server_cert_path"\
|
309
|
+
" --sslCAFile $server_ca_path"\
|
310
|
+
" --sslClientCertificate $server_client_cert_path"
|
311
|
+
fi
|
312
|
+
|
313
|
+
# Docker forwards ports to the external interface, not to the loopback.
|
314
|
+
# Hence we must bind to all interfaces here.
|
315
|
+
if test -n "$BIND_ALL"; then
|
316
|
+
args="$args --bind_ip_all"
|
317
|
+
fi
|
318
|
+
|
319
|
+
# MongoDB servers pre-4.2 do not enable zlib compression by default
|
320
|
+
if test "$COMPRESSOR" = snappy; then
|
321
|
+
args="$args --networkMessageCompressors snappy"
|
322
|
+
elif test "$COMPRESSOR" = zlib; then
|
323
|
+
args="$args --networkMessageCompressors zlib"
|
324
|
+
fi
|
325
|
+
|
326
|
+
if test -n "$OCSP_ALGORITHM" || test -n "$OCSP_VERIFIER"; then
|
327
|
+
python3 -m pip install asn1crypto oscrypto flask
|
328
|
+
fi
|
329
|
+
|
330
|
+
local ocsp_args=
|
331
|
+
if test -n "$OCSP_ALGORITHM"; then
|
332
|
+
if test -z "$server_ca_path"; then
|
333
|
+
echo "server_ca_path must have been set" 1>&2
|
334
|
+
exit 1
|
335
|
+
fi
|
336
|
+
ocsp_args="--ca_file $server_ca_path"
|
337
|
+
if test "$OCSP_DELEGATE" = 1; then
|
338
|
+
ocsp_args="$ocsp_args \
|
339
|
+
--ocsp_responder_cert spec/support/ocsp/$OCSP_ALGORITHM/ocsp-responder.crt \
|
340
|
+
--ocsp_responder_key spec/support/ocsp/$OCSP_ALGORITHM/ocsp-responder.key \
|
341
|
+
"
|
342
|
+
else
|
343
|
+
ocsp_args="$ocsp_args \
|
344
|
+
--ocsp_responder_cert spec/support/ocsp/$OCSP_ALGORITHM/ca.crt \
|
345
|
+
--ocsp_responder_key spec/support/ocsp/$OCSP_ALGORITHM/ca.key \
|
346
|
+
"
|
347
|
+
fi
|
348
|
+
if test -n "$OCSP_STATUS"; then
|
349
|
+
ocsp_args="$ocsp_args --fault $OCSP_STATUS"
|
350
|
+
fi
|
351
|
+
fi
|
352
|
+
|
353
|
+
OCSP_ARGS="$ocsp_args"
|
354
|
+
SERVER_ARGS="$args"
|
355
|
+
URI_OPTIONS="$uri_options"
|
356
|
+
}
|
357
|
+
|
358
|
+
launch_ocsp_mock() {
|
359
|
+
if test -n "$OCSP_ARGS"; then
|
360
|
+
# Bind to 0.0.0.0 for Docker
|
361
|
+
python3 spec/support/ocsp/ocsp_mock.py $OCSP_ARGS -b 0.0.0.0 -p 8100 &
|
362
|
+
OCSP_MOCK_PID=$!
|
363
|
+
fi
|
364
|
+
}
|
365
|
+
|
366
|
+
launch_server() {
|
367
|
+
local dbdir="$1"
|
368
|
+
python3 -m mtools.mlaunch.mlaunch --dir "$dbdir" --binarypath "$BINDIR" $SERVER_ARGS
|
369
|
+
|
370
|
+
if test "$TOPOLOGY" = sharded-cluster && test $MONGODB_VERSION = 3.6; then
|
371
|
+
# On 3.6 server the sessions collection is not immediately available,
|
372
|
+
# so we run the refreshLogicalSessionCacheNow command on the config server
|
373
|
+
# and again on each mongos in order for the mongoses
|
374
|
+
# to correctly report logicalSessionTimeoutMinutes.
|
375
|
+
mongos_regex="\s*mongos\s+([0-9]+)\s+running\s+[0-9]+"
|
376
|
+
config_server_regex="\s*config\sserver\s+([0-9]+)\s+running\s+[0-9]+"
|
377
|
+
config_server=""
|
378
|
+
mongoses=()
|
379
|
+
if test "$AUTH" = auth
|
380
|
+
then
|
381
|
+
base_url="mongodb://bob:pwd123@localhost"
|
382
|
+
else
|
383
|
+
base_url="mongodb://localhost"
|
384
|
+
fi
|
385
|
+
if test "$SSL" = "ssl"
|
386
|
+
then
|
387
|
+
mongo_command="${BINDIR}/mongo --ssl --sslPEMKeyFile $server_cert_path --sslCAFile $server_ca_path"
|
388
|
+
else
|
389
|
+
mongo_command="${BINDIR}/mongo"
|
390
|
+
fi
|
391
|
+
|
392
|
+
while read -r line
|
393
|
+
do
|
394
|
+
if [[ $line =~ $config_server_regex ]]
|
395
|
+
then
|
396
|
+
port="${BASH_REMATCH[1]}"
|
397
|
+
config_server="${base_url}:${port}"
|
398
|
+
fi
|
399
|
+
if [[ $line =~ $mongos_regex ]]
|
400
|
+
then
|
401
|
+
port="${BASH_REMATCH[1]}"
|
402
|
+
mongoses+=("${base_url}:${port}")
|
403
|
+
fi
|
404
|
+
done < <(python2 -m mtools.mlaunch.mlaunch list --dir "$dbdir" --binarypath "$BINDIR")
|
405
|
+
|
406
|
+
if [ -n "$config_server" ]; then
|
407
|
+
${mongo_command} "$config_server" --eval 'db.adminCommand("refreshLogicalSessionCacheNow")'
|
408
|
+
for mongos in ${mongoses[*]}
|
409
|
+
do
|
410
|
+
${mongo_command} "$mongos" --eval 'db.adminCommand("refreshLogicalSessionCacheNow")'
|
411
|
+
done
|
412
|
+
fi
|
413
|
+
fi
|
414
|
+
|
415
|
+
if test "$TOPOLOGY" = load-balanced; then
|
416
|
+
if test -z "$haproxy_config"; then
|
417
|
+
echo haproxy_config should have been set 1>&2
|
418
|
+
exit 3
|
419
|
+
fi
|
420
|
+
|
421
|
+
haproxy -D -f $haproxy_config -p $mongodb_dir/haproxy.pid
|
422
|
+
fi
|
423
|
+
}
|
@@ -0,0 +1,110 @@
|
|
1
|
+
# When changing, also update the hash in share/Dockerfile.
|
2
|
+
JDK_VERSION=jdk21
|
3
|
+
|
4
|
+
set_env_java() {
|
5
|
+
ls -l /opt || true
|
6
|
+
ls -l /usr/lib/jvm || true
|
7
|
+
|
8
|
+
# Use toolchain java if it exists
|
9
|
+
if [ -f /opt/java/$JDK_VERSION/bin/java ]; then
|
10
|
+
export JAVACMD=/opt/java/$JDK_VERSION/bin/java
|
11
|
+
else
|
12
|
+
echo Could not find $JDK_VERSION in /opt/java
|
13
|
+
fi
|
14
|
+
|
15
|
+
if test -n "$JAVACMD"; then
|
16
|
+
eval $JAVACMD -version
|
17
|
+
elif which java 2>/dev/null; then
|
18
|
+
java -version
|
19
|
+
else
|
20
|
+
echo No java runtime found
|
21
|
+
fi
|
22
|
+
}
|
23
|
+
|
24
|
+
set_env_python() {
|
25
|
+
if test "$DOCKER_PRELOAD" != 1; then
|
26
|
+
if test -n "$DOCKER"; then
|
27
|
+
# If we are running in Docker and not preloading, we need to fetch the
|
28
|
+
# Python binary.
|
29
|
+
curl -fL --retry 3 https://github.com/p-mongodb/deps/raw/main/"$arch"-python37.tar.xz | \
|
30
|
+
tar xfJ - -C /opt
|
31
|
+
fi
|
32
|
+
|
33
|
+
if test -d /opt/python/3.7/bin; then
|
34
|
+
# Most Evergreen configurations.
|
35
|
+
export PATH=/opt/python/3.7/bin:$PATH
|
36
|
+
elif test -d /opt/python37/bin; then
|
37
|
+
# Configurations that use Docker in Evergreen - these don't preload.
|
38
|
+
export PATH=/opt/python37/bin:$PATH
|
39
|
+
fi
|
40
|
+
|
41
|
+
python3 -V
|
42
|
+
fi
|
43
|
+
}
|
44
|
+
|
45
|
+
set_env_node() {
|
46
|
+
if test "$DOCKER_PRELOAD" != 1; then
|
47
|
+
dir=`ls -d /opt/nodejs/node-v12* |head -1`
|
48
|
+
if test -z "$dir"; then
|
49
|
+
echo "Node 12 missing" 1>&2
|
50
|
+
exit 2
|
51
|
+
fi
|
52
|
+
export PATH="$dir/bin:$PATH"
|
53
|
+
elif test -d /opt/node/bin; then
|
54
|
+
# Node from toolchain in Evergreen
|
55
|
+
export PATH=/opt/node/bin:$PATH
|
56
|
+
fi
|
57
|
+
|
58
|
+
node -v
|
59
|
+
}
|
60
|
+
|
61
|
+
set_env_ruby() {
|
62
|
+
if test -z "$RVM_RUBY"; then
|
63
|
+
echo "Empty RVM_RUBY, aborting"
|
64
|
+
exit 2
|
65
|
+
fi
|
66
|
+
|
67
|
+
#ls -l /opt
|
68
|
+
|
69
|
+
# Necessary for jruby
|
70
|
+
set_env_java
|
71
|
+
|
72
|
+
if [ "$RVM_RUBY" == "ruby-head" ]; then
|
73
|
+
# When we use ruby-head, we do not install the Ruby toolchain.
|
74
|
+
# But we still need Python 3.6+ to run mlaunch.
|
75
|
+
# Since the ruby-head tests are run on ubuntu1604, we can use the
|
76
|
+
# globally installed Python toolchain.
|
77
|
+
#export PATH=/opt/python/3.7/bin:$PATH
|
78
|
+
|
79
|
+
# 12.04, 14.04 and 16.04 are good
|
80
|
+
curl --retry 3 -fL http://rubies.travis-ci.org/ubuntu/`lsb_release -rs`/x86_64/ruby-head.tar.bz2 |tar xfj -
|
81
|
+
# TODO adjust gem path?
|
82
|
+
export PATH=`pwd`/ruby-head/bin:`pwd`/ruby-head/lib/ruby/gems/2.6.0/bin:$PATH
|
83
|
+
ruby --version
|
84
|
+
ruby --version |grep dev
|
85
|
+
elif test "$SYSTEM_RUBY" = 1; then
|
86
|
+
# Nothing
|
87
|
+
:
|
88
|
+
else
|
89
|
+
if test "$USE_OPT_TOOLCHAIN" = 1; then
|
90
|
+
# Nothing, also PATH is already set
|
91
|
+
:
|
92
|
+
else
|
93
|
+
# For testing unpublished builds:
|
94
|
+
#build_url=https://s3.amazonaws.com/mciuploads/mongo-ruby-toolchain/library/`host_distro`/$RVM_RUBY.tar.xz
|
95
|
+
|
96
|
+
build_url=http://boxes.10gen.com/build/toolchain-drivers/mongo-ruby-toolchain/library/`host_distro`/$RVM_RUBY.tar.xz
|
97
|
+
curl --retry 3 -fL $build_url |tar Jxf -
|
98
|
+
export PATH=`pwd`/rubies/$RVM_RUBY/bin:$PATH
|
99
|
+
fi
|
100
|
+
|
101
|
+
ruby --version
|
102
|
+
|
103
|
+
# Ensure we're using the right ruby
|
104
|
+
ruby_name=`echo $RVM_RUBY |awk -F- '{print $1}'`
|
105
|
+
ruby_version=`echo $RVM_RUBY |awk -F- '{print $2}' |cut -c 1-3`
|
106
|
+
|
107
|
+
ruby -v |fgrep $ruby_name
|
108
|
+
ruby -v |fgrep $ruby_version
|
109
|
+
fi
|
110
|
+
}
|
data/spec/spec_helper.rb
CHANGED
@@ -17,12 +17,14 @@ CURRENT_PATH = File.expand_path(File.dirname(__FILE__))
|
|
17
17
|
DRIVER_COMMON_BSON_TESTS = Dir.glob("#{CURRENT_PATH}/spec_tests/data/decimal128/*.json").sort
|
18
18
|
BSON_CORPUS_TESTS = Dir.glob("#{CURRENT_PATH}/spec_tests/data/corpus/*.json").sort
|
19
19
|
BSON_CORPUS_LEGACY_TESTS = Dir.glob("#{CURRENT_PATH}/spec_tests/data/corpus_legacy/*.json").sort
|
20
|
+
BINARY_VECTOR_TESTS = Dir.glob("#{CURRENT_PATH}/spec_tests/data/binary_vector/*.json").sort
|
20
21
|
|
21
22
|
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), "shared", "lib"))
|
22
23
|
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
23
24
|
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), "..", "lib"))
|
24
25
|
|
25
26
|
require "ostruct"
|
27
|
+
require "stringio"
|
26
28
|
require "bson"
|
27
29
|
require "json"
|
28
30
|
require "rspec"
|
@@ -0,0 +1,82 @@
|
|
1
|
+
# rubocop:todo all
|
2
|
+
require 'spec_helper'
|
3
|
+
require 'runners/binary_vector'
|
4
|
+
|
5
|
+
describe 'Binary vector tests' do
|
6
|
+
specs = BINARY_VECTOR_TESTS.map { |file| BSON::BinaryVector::Spec.new(file) }
|
7
|
+
skipped_tests = [
|
8
|
+
'Overflow Vector INT8',
|
9
|
+
'Underflow Vector INT8',
|
10
|
+
'INT8 with float inputs',
|
11
|
+
'Overflow Vector PACKED_BIT',
|
12
|
+
'Underflow Vector PACKED_BIT',
|
13
|
+
'Vector with float values PACKED_BIT'
|
14
|
+
]
|
15
|
+
[true, false].each do |use_vector_type|
|
16
|
+
context "use vector type: #{use_vector_type}" do
|
17
|
+
specs.each do |spec|
|
18
|
+
context(spec.description) do
|
19
|
+
spec.valid_tests.each do |test|
|
20
|
+
context(test.description) do
|
21
|
+
it 'encodes a document' do
|
22
|
+
expect(test.canonical_bson_from_document(use_vector_type: use_vector_type)).to eq(test.bson)
|
23
|
+
end
|
24
|
+
|
25
|
+
it 'decodes BSON' do
|
26
|
+
binary = test.document_from_canonical_bson[spec.test_key]
|
27
|
+
expect(binary.type).to eq(:vector)
|
28
|
+
vector = binary.as_vector
|
29
|
+
expect(vector.dtype).to eq(test.dtype)
|
30
|
+
expect(vector.padding).to eq(test.padding)
|
31
|
+
if vector.dtype == :float32
|
32
|
+
vector.each_with_index do |v, i|
|
33
|
+
if v == Float::INFINITY || v == -Float::INFINITY
|
34
|
+
expect(v).to eq(test.vector[i])
|
35
|
+
else
|
36
|
+
expect(v).to be_within(0.00001).of(test.vector[i])
|
37
|
+
end
|
38
|
+
end
|
39
|
+
else
|
40
|
+
expect(vector).to eq(test.vector)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
spec.invalid_tests.each do |test|
|
47
|
+
context(test.description) do
|
48
|
+
let(:subject) do
|
49
|
+
if test.vector
|
50
|
+
->(vvd) { test.canonical_bson_from_document(use_vector_type: use_vector_type, validate_vector_data: vvd) }
|
51
|
+
else
|
52
|
+
->() { test.document_from_canonical_bson }
|
53
|
+
end
|
54
|
+
end
|
55
|
+
context 'with data validation' do
|
56
|
+
it 'raises' do
|
57
|
+
expect {
|
58
|
+
subject.call(true)
|
59
|
+
}.to raise_error do |err|
|
60
|
+
expect([ArgumentError, BSON::Error]).to include(err.class)
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
context 'without data validation' do
|
66
|
+
it 'raises' do
|
67
|
+
skip 'Ruby Array.pack does not validate input' if skipped_tests.include?(test.description)
|
68
|
+
|
69
|
+
expect {
|
70
|
+
subject.call(false)
|
71
|
+
}.to raise_error do |err|
|
72
|
+
expect([ArgumentError, BSON::Error]).to include(err.class)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
@@ -0,0 +1,61 @@
|
|
1
|
+
# Testing Binary subtype 9: Vector
|
2
|
+
|
3
|
+
The JSON files in this directory tree are platform-independent tests that drivers can use to prove their conformance to
|
4
|
+
the specification.
|
5
|
+
|
6
|
+
These tests focus on the roundtrip of the list of numbers as input/output, along with their data type and byte padding.
|
7
|
+
|
8
|
+
Additional tests exist in `bson_corpus/tests/binary.json` but do not sufficiently test the end-to-end process of Vector
|
9
|
+
to BSON. For this reason, drivers must create a bespoke test runner for the vector subtype.
|
10
|
+
|
11
|
+
## Format
|
12
|
+
|
13
|
+
The test data corpus consists of a JSON file for each data type (dtype). Each file contains a number of test cases,
|
14
|
+
under the top-level key "tests". Each test case pertains to a single vector. The keys provide the specification of the
|
15
|
+
vector. Valid cases also include the Canonical BSON format of a document {test_key: binary}. The "test_key" is common,
|
16
|
+
and specified at the top level.
|
17
|
+
|
18
|
+
#### Top level keys
|
19
|
+
|
20
|
+
Each JSON file contains three top-level keys.
|
21
|
+
|
22
|
+
- `description`: human-readable description of what is in the file
|
23
|
+
- `test_key`: name used for key when encoding/decoding a BSON document containing the single BSON Binary for the test
|
24
|
+
case. Applies to *every* case.
|
25
|
+
- `tests`: array of test case objects, each of which have the following keys. Valid cases will also contain additional
|
26
|
+
binary and json encoding values.
|
27
|
+
|
28
|
+
#### Keys of individual tests cases
|
29
|
+
|
30
|
+
- `description`: string describing the test.
|
31
|
+
- `valid`: boolean indicating if the vector, dtype, and padding should be considered a valid input.
|
32
|
+
- `vector`: (required if valid is true) list of numbers
|
33
|
+
- `dtype_hex`: string defining the data type in hex (e.g. "0x10", "0x27")
|
34
|
+
- `dtype_alias`: (optional) string defining the data dtype, perhaps as Enum.
|
35
|
+
- `padding`: (optional) integer for byte padding. Defaults to 0.
|
36
|
+
- `canonical_bson`: (required if valid is true) an (uppercase) big-endian hex representation of a BSON byte string.
|
37
|
+
|
38
|
+
## Required tests
|
39
|
+
|
40
|
+
#### To prove correct in a valid case (`valid: true`), one MUST
|
41
|
+
|
42
|
+
- encode a document from the numeric values, dtype, and padding, along with the "test_key", and assert this matches the
|
43
|
+
canonical_bson string.
|
44
|
+
- decode the canonical_bson into its binary form, and then assert that the numeric values, dtype, and padding all match
|
45
|
+
those provided in the JSON.
|
46
|
+
|
47
|
+
Note: For floating point number types, exact numerical matches may not be possible. Drivers that natively support the
|
48
|
+
floating-point type being tested (e.g., when testing float32 vector values in a driver that natively supports float32),
|
49
|
+
MUST assert that the input float array is the same after encoding and decoding.
|
50
|
+
|
51
|
+
#### To prove correct in an invalid case (`valid:false`), one MUST
|
52
|
+
|
53
|
+
- if the vector field is present, raise an exception when attempting to encode a document from the numeric values,
|
54
|
+
dtype, and padding.
|
55
|
+
- if the canonical_bson field is present, raise an exception when attempting to deserialize it into the corresponding
|
56
|
+
numeric values, as the field contains corrupted data.
|
57
|
+
|
58
|
+
## FAQ
|
59
|
+
|
60
|
+
- What MongoDB Server version does this apply to?
|
61
|
+
- Files in the "specifications" repository have no version scheme. They are not tied to a MongoDB server version.
|