mongo 2.2.2 → 2.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 72df9675f13de7a6111464ef0089ba64b4ef11af
4
- data.tar.gz: 20832950dd436fb031938d2e164cca2c7fe5d84c
3
+ metadata.gz: 4aa2e3cfc044a5c4e8c69c0a951c485db0aa93b7
4
+ data.tar.gz: 86f18a46a73a5d7d0234eb8468833258c441247c
5
5
  SHA512:
6
- metadata.gz: eaaf003581ddec08e424a7a8168917b4a971514029527229ffe0e1a5746715be52caf0eb8e1f3ef4af29e35d744b23240282fecfa157280f4e31cb4f8c03bfbf
7
- data.tar.gz: 32dd69254433179769be7b7d030e230da1b499ce59da53ab1535b1d736cca6dd2a3bb8d1516323abdf3f8dbb7d2285356ef011c708f8a460cc2ffa1e4e88f015
6
+ metadata.gz: 26109683ee810a6af091755feb5a2cbbbf578c28177194abb3601efe2478b77b9cb4cd261e3dfe69d3d7dd33daa40e326c3e10ab2cc5c89b7b0d7c6401b90ca2
7
+ data.tar.gz: 4109ba38cfb7841490a7216e61b8e16e97edffbeb438afc418a24876f10c6f6b092128402e218786df74c82ee047c1f9917d2e4bb1a4e1d6022bb37631c06819
Binary file
data.tar.gz.sig CHANGED
Binary file
data/Rakefile CHANGED
@@ -41,3 +41,158 @@ namespace :docs do
41
41
  system "yardoc -o #{out} --title mongo-#{Mongo::VERSION}"
42
42
  end
43
43
  end
44
+
45
+ require_relative "profile/benchmarking"
46
+
47
+ # Some require data files, available from the drivers team. See the comments above each task for details."
48
+ namespace :benchmark do
49
+ desc "Run the driver benchmark tests."
50
+
51
+ namespace :micro do
52
+ desc "Run the common driver micro benchmarking tests"
53
+
54
+ namespace :flat do
55
+ desc "Benchmarking for flat bson documents."
56
+
57
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called flat_bson.json.
58
+ task :encode do
59
+ puts "MICRO BENCHMARK:: FLAT:: ENCODE"
60
+ Mongo::Benchmarking::Micro.run(:flat, :encode)
61
+ end
62
+
63
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called flat_bson.json.
64
+ task :decode do
65
+ puts "MICRO BENCHMARK:: FLAT:: DECODE"
66
+ Mongo::Benchmarking::Micro.run(:flat, :decode)
67
+ end
68
+ end
69
+
70
+ namespace :deep do
71
+ desc "Benchmarking for deep bson documents."
72
+
73
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called deep_bson.json.
74
+ task :encode do
75
+ puts "MICRO BENCHMARK:: DEEP:: ENCODE"
76
+ Mongo::Benchmarking::Micro.run(:deep, :encode)
77
+ end
78
+
79
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called deep_bson.json.
80
+ task :decode do
81
+ puts "MICRO BENCHMARK:: DEEP:: DECODE"
82
+ Mongo::Benchmarking::Micro.run(:deep, :decode)
83
+ end
84
+ end
85
+
86
+ namespace :full do
87
+ desc "Benchmarking for full bson documents."
88
+
89
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called full_bson.json.
90
+ task :encode do
91
+ puts "MICRO BENCHMARK:: FULL:: ENCODE"
92
+ Mongo::Benchmarking::Micro.run(:full, :encode)
93
+ end
94
+
95
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called full_bson.json.
96
+ task :decode do
97
+ puts "MICRO BENCHMARK:: FULL:: DECODE"
98
+ Mongo::Benchmarking::Micro.run(:full, :decode)
99
+ end
100
+ end
101
+ end
102
+
103
+ namespace :single_doc do
104
+ desc "Run the common driver single-document benchmarking tests"
105
+ task :command do
106
+ puts "SINGLE DOC BENCHMARK:: COMMAND"
107
+ Mongo::Benchmarking::SingleDoc.run(:command)
108
+ end
109
+
110
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called TWEET.json.
111
+ task :find_one do
112
+ puts "SINGLE DOC BENCHMARK:: FIND ONE BY ID"
113
+ Mongo::Benchmarking::SingleDoc.run(:find_one)
114
+ end
115
+
116
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called SMALL_DOC.json.
117
+ task :insert_one_small do
118
+ puts "SINGLE DOC BENCHMARK:: INSERT ONE SMALL DOCUMENT"
119
+ Mongo::Benchmarking::SingleDoc.run(:insert_one_small)
120
+ end
121
+
122
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called LARGE_DOC.json.
123
+ task :insert_one_large do
124
+ puts "SINGLE DOC BENCHMARK:: INSERT ONE LARGE DOCUMENT"
125
+ Mongo::Benchmarking::SingleDoc.run(:insert_one_large)
126
+ end
127
+ end
128
+
129
+ namespace :multi_doc do
130
+ desc "Run the common driver multi-document benchmarking tests"
131
+
132
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called TWEET.json.
133
+ task :find_many do
134
+ puts "MULTI DOCUMENT BENCHMARK:: FIND MANY"
135
+ Mongo::Benchmarking::MultiDoc.run(:find_many)
136
+ end
137
+
138
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called SMALL_DOC.json.
139
+ task :bulk_insert_small do
140
+ puts "MULTI DOCUMENT BENCHMARK:: BULK INSERT SMALL"
141
+ Mongo::Benchmarking::MultiDoc.run(:bulk_insert_small)
142
+ end
143
+
144
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called LARGE_DOC.json.
145
+ task :bulk_insert_large do
146
+ puts "MULTI DOCUMENT BENCHMARK:: BULK INSERT LARGE"
147
+ Mongo::Benchmarking::MultiDoc.run(:bulk_insert_large)
148
+ end
149
+
150
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called GRIDFS_LARGE.
151
+ task :gridfs_upload do
152
+ puts "MULTI DOCUMENT BENCHMARK:: GRIDFS UPLOAD"
153
+ Mongo::Benchmarking::MultiDoc.run(:gridfs_upload)
154
+ end
155
+
156
+ # Requirement: A file in Mongo::Benchmarking::DATA_PATH, called GRIDFS_LARGE.
157
+ task :gridfs_download do
158
+ puts "MULTI DOCUMENT BENCHMARK:: GRIDFS DOWNLOAD"
159
+ Mongo::Benchmarking::MultiDoc.run(:gridfs_download)
160
+ end
161
+ end
162
+
163
+ namespace :parallel do
164
+ desc "Run the common driver paralell ETL benchmarking tests"
165
+
166
+ # Requirement: A directory in Mongo::Benchmarking::DATA_PATH, called LDJSON_MULTI,
167
+ # with the files used in this task.
168
+ task :import do
169
+ puts "PARALLEL ETL BENCHMARK:: IMPORT"
170
+ Mongo::Benchmarking::Parallel.run(:import)
171
+ end
172
+
173
+ # Requirement: A directory in Mongo::Benchmarking::DATA_PATH, called LDJSON_MULTI,
174
+ # with the files used in this task.
175
+ # Requirement: Another directory in "#{Mongo::Benchmarking::DATA_PATH}/LDJSON_MULTI"
176
+ # called 'output'.
177
+ task :export do
178
+ puts "PARALLEL ETL BENCHMARK:: EXPORT"
179
+ Mongo::Benchmarking::Parallel.run(:export)
180
+ end
181
+
182
+ # Requirement: A directory in Mongo::Benchmarking::DATA_PATH, called GRIDFS_MULTI,
183
+ # with the files used in this task.
184
+ task :gridfs_upload do
185
+ puts "PARALLEL ETL BENCHMARK:: GRIDFS UPLOAD"
186
+ Mongo::Benchmarking::Parallel.run(:gridfs_upload)
187
+ end
188
+
189
+ # Requirement: A directory in Mongo::Benchmarking::DATA_PATH, called GRIDFS_MULTI,
190
+ # with the files used in this task.
191
+ # Requirement: Another directory in "#{Mongo::Benchmarking::DATA_PATH}/GRIDFS_MULTI"
192
+ # called 'output'.
193
+ task :gridfs_download do
194
+ puts "PARALLEL ETL BENCHMARK:: GRIDFS DOWNLOAD"
195
+ Mongo::Benchmarking::Parallel.run(:gridfs_download)
196
+ end
197
+ end
198
+ end
Binary file
@@ -166,7 +166,7 @@ module Mongo
166
166
  #
167
167
  # @since 2.1.0
168
168
  def upserted_ids
169
- @results[UPSERTED_IDS]
169
+ @results[UPSERTED_IDS] || []
170
170
  end
171
171
 
172
172
  # Validates the bulk write result.
@@ -77,18 +77,20 @@ module Mongo
77
77
 
78
78
  def combine_counts!(result)
79
79
  Result::FIELDS.each do |field|
80
- if result.respond_to?(field)
81
- results.merge!(field => (results[field] || 0) + result.send(field))
80
+ if result.respond_to?(field) && value = result.send(field)
81
+ results.merge!(field => (results[field] || 0) + value)
82
82
  end
83
83
  end
84
84
  end
85
85
 
86
86
  def combine_ids!(result)
87
87
  if result.respond_to?(Result::INSERTED_IDS)
88
- results.merge!(Result::INSERTED_IDS => result.inserted_ids)
88
+ results[Result::INSERTED_IDS] = (results[Result::INSERTED_IDS] || []) +
89
+ result.inserted_ids
89
90
  end
90
91
  if result.respond_to?(Result::UPSERTED)
91
- results.merge!(Result::UPSERTED_IDS => result.upserted.map{ |doc| doc['_id'] })
92
+ results[Result::UPSERTED_IDS] = (results[Result::UPSERTED_IDS] || []) +
93
+ result.upserted.map{ |doc| doc['_id'] }
92
94
  end
93
95
  end
94
96
 
@@ -109,7 +111,8 @@ module Mongo
109
111
 
110
112
  def combine_write_concern_errors!(result)
111
113
  if write_concern_errors = result.aggregate_write_concern_errors(count)
112
- results.merge!(Error::WRITE_CONCERN_ERRORS => write_concern_errors)
114
+ results[Error::WRITE_CONCERN_ERRORS] = (results[Error::WRITE_CONCERN_ERRORS] || []) +
115
+ write_concern_errors
113
116
  end
114
117
  end
115
118
  end
@@ -144,7 +144,8 @@ module Mongo
144
144
  #
145
145
  # @since 2.0.6
146
146
  def add_hosts?(description, servers)
147
- !!(member_of_this_set?(description) && !has_primary?(servers))
147
+ !!(member_of_this_set?(description) &&
148
+ (!has_primary?(servers) || description.primary?))
148
149
  end
149
150
 
150
151
  # Whether a description can be used to remove hosts from the cluster.
@@ -62,6 +62,11 @@ module Mongo
62
62
  #
63
63
  # @since 2.0.0
64
64
  BAD_VALUE = 2.freeze
65
+
66
+ # Constant for a Cursor not found error.
67
+ #
68
+ # @since 2.2.3
69
+ CURSOR_NOT_FOUND = 'Cursor not found.'
65
70
  end
66
71
  end
67
72
 
@@ -27,6 +27,9 @@ module Mongo
27
27
  # @return [ String ] message The error message parsed from the document.
28
28
  attr_reader :message
29
29
 
30
+ # @return [ Array<Protocol::Reply> ] replies The message replies.
31
+ attr_reader :replies
32
+
30
33
  # Create the new parser with the returned document.
31
34
  #
32
35
  # @example Create the new parser.
@@ -35,8 +38,9 @@ module Mongo
35
38
  # @param [ BSON::Document ] document The returned document.
36
39
  #
37
40
  # @since 2.0.0
38
- def initialize(document)
41
+ def initialize(document, replies = nil)
39
42
  @document = document || {}
43
+ @replies = replies
40
44
  parse!
41
45
  end
42
46
 
@@ -50,6 +54,7 @@ module Mongo
50
54
  parse_multiple(@message, WRITE_ERRORS)
51
55
  parse_single(@message, ERRMSG,
52
56
  document[WRITE_CONCERN_ERROR]) if document[WRITE_CONCERN_ERROR]
57
+ parse_flag(@message)
53
58
  end
54
59
 
55
60
  def parse_single(message, key, doc = document)
@@ -66,6 +71,12 @@ module Mongo
66
71
  end
67
72
  end
68
73
 
74
+ def parse_flag(message)
75
+ if replies && replies.first && replies.first.cursor_not_found?
76
+ append(message, CURSOR_NOT_FOUND)
77
+ end
78
+ end
79
+
69
80
  def append(message, error)
70
81
  if message.length > 1
71
82
  message.concat(", #{error}")
@@ -290,7 +290,7 @@ module Mongo
290
290
  end
291
291
 
292
292
  def parser
293
- @parser ||= Error::Parser.new(first_document)
293
+ @parser ||= Error::Parser.new(first_document, replies)
294
294
  end
295
295
 
296
296
  def first_document
@@ -298,7 +298,7 @@ module Mongo
298
298
  end
299
299
 
300
300
  def query_failure?
301
- replies.first && replies.first.query_failure?
301
+ replies.first && (replies.first.query_failure? || replies.first.cursor_not_found?)
302
302
  end
303
303
  end
304
304
  end
@@ -34,7 +34,11 @@ module Mongo
34
34
  def n_removed
35
35
  return 0 unless acknowledged?
36
36
  @replies.reduce(0) do |n, reply|
37
- n += reply.documents.first[Result::N]
37
+ if reply.documents.first[Result::N]
38
+ n += reply.documents.first[Result::N]
39
+ else
40
+ n
41
+ end
38
42
  end
39
43
  end
40
44
  end
@@ -46,9 +46,9 @@ module Mongo
46
46
  return 0 unless acknowledged?
47
47
  @replies.reduce(0) do |n, reply|
48
48
  if upsert?(reply)
49
- n += 1
49
+ n += reply.documents.first[UPSERTED].size
50
50
  else
51
- n += 0
51
+ n
52
52
  end
53
53
  end
54
54
  end
@@ -65,14 +65,22 @@ module Mongo
65
65
  return 0 unless acknowledged?
66
66
  @replies.reduce(0) do |n, reply|
67
67
  if upsert?(reply)
68
- n += 0
68
+ reply.documents.first[N] - n_upserted
69
69
  else
70
- n += reply.documents.first[N]
70
+ if reply.documents.first[N]
71
+ n += reply.documents.first[N]
72
+ else
73
+ n
74
+ end
71
75
  end
72
76
  end
73
77
  end
74
78
 
75
79
  # Gets the number of documents modified.
80
+ # Not that in a mixed sharded cluster a call to
81
+ # update could return nModified (>= 2.6) or not (<= 2.4).
82
+ # If any call does not return nModified we can't report
83
+ # a valid final count so set the field to nil.
76
84
  #
77
85
  # @example Get the modified count.
78
86
  # result.n_modified
@@ -83,7 +91,11 @@ module Mongo
83
91
  def n_modified
84
92
  return 0 unless acknowledged?
85
93
  @replies.reduce(0) do |n, reply|
86
- n += reply.documents.first[MODIFIED] || 0
94
+ if n && reply.documents.first[MODIFIED]
95
+ n += reply.documents.first[MODIFIED]
96
+ else
97
+ nil
98
+ end
87
99
  end
88
100
  end
89
101
 
@@ -155,12 +167,31 @@ module Mongo
155
167
  end
156
168
  end
157
169
  end
158
- alias :n_modified :n_matched
170
+
171
+ # Gets the number of documents modified.
172
+ #
173
+ # @example Get the modified count.
174
+ # result.n_modified
175
+ #
176
+ # @return [ Integer ] The number of documents modified.
177
+ #
178
+ # @since 2.2.3
179
+ def n_modified
180
+ return 0 unless acknowledged?
181
+ @replies.reduce(0) do |n, reply|
182
+ if upsert?(reply)
183
+ n
184
+ else
185
+ updated_existing?(reply) ? n += reply.documents.first[N] : n
186
+ end
187
+ end
188
+ end
159
189
 
160
190
  private
161
191
 
162
192
  def upsert?(reply)
163
- !updated_existing?(reply) && reply.documents.first[N] == 1
193
+ reply.documents.first[BulkWrite::Result::UPSERTED] ||
194
+ (!updated_existing?(reply) && reply.documents.first[N] == 1)
164
195
  end
165
196
 
166
197
  def updated_existing?(reply)
@@ -38,6 +38,18 @@ module Mongo
38
38
  flags.include?(:query_failure)
39
39
  end
40
40
 
41
+ # Determine if the reply had a cursor not found flag.
42
+ #
43
+ # @example Did the reply have a cursor not found flag.
44
+ # reply.cursor_not_found?
45
+ #
46
+ # @return [ true, false ] If the query cursor was not found.
47
+ #
48
+ # @since 2.2.3
49
+ def cursor_not_found?
50
+ flags.include?(:cursor_not_found)
51
+ end
52
+
41
53
  # Return the event payload for monitoring.
42
54
  #
43
55
  # @example Return the event payload.
@@ -147,7 +147,7 @@ module Mongo
147
147
  # Fields to exclude when comparing two descriptions.
148
148
  #
149
149
  # @since 2.0.6
150
- EXCLUDE_FOR_COMPARISON = [ LOCAL_TIME, ELECTION_ID, SET_VERSION ].freeze
150
+ EXCLUDE_FOR_COMPARISON = [ LOCAL_TIME ].freeze
151
151
 
152
152
  # @return [ Address ] address The server's address.
153
153
  attr_reader :address
@@ -570,7 +570,6 @@ module Mongo
570
570
  # @since 2.0.6
571
571
  def ==(other)
572
572
  return false if self.class != other.class
573
- return true if config == other.config
574
573
  compare_config(other)
575
574
  end
576
575
  alias_method :eql?, :==
@@ -168,9 +168,10 @@ module Mongo
168
168
  #
169
169
  # @since 2.0.0
170
170
  AUTH_MECH_MAP = {
171
- 'PLAIN' => :plain,
172
- 'MONGODB-CR' => :mongodb_cr,
173
- 'GSSAPI' => :gssapi
171
+ 'PLAIN' => :plain,
172
+ 'MONGODB-CR' => :mongodb_cr,
173
+ 'GSSAPI' => :gssapi,
174
+ 'SCRAM-SHA-1' => :scram
174
175
  }.freeze
175
176
 
176
177
  # Options that are allowed to appear more than once in the uri.