mongodb 3.1.13 → 3.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/HISTORY.md CHANGED
@@ -2,6 +2,56 @@
2
2
 
3
3
  All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
4
4
 
5
+ <a name="3.2.2"></a>
6
+ ## [3.2.2](https://github.com/mongodb/node-mongodb-native/compare/v3.2.1...v3.2.2) (2019-03-22)
7
+
8
+
9
+ ### Bug Fixes
10
+
11
+ * **asyncIterator:** stronger guard against importing async generator ([e0826fb](https://github.com/mongodb/node-mongodb-native/commit/e0826fb))
12
+
13
+
14
+ ### Features
15
+
16
+ * update to mongodb-core v3.2.2 ([868cfc3](https://github.com/mongodb/node-mongodb-native/commit/868cfc3))
17
+
18
+
19
+
20
+ <a name="3.2.1"></a>
21
+ ## [3.2.1](https://github.com/mongodb/node-mongodb-native/compare/v3.2.0...v3.2.1) (2019-03-21)
22
+
23
+
24
+ ### Features
25
+
26
+ * **core:** update to mongodb-core v3.2.1 ([30b0100](https://github.com/mongodb/node-mongodb-native/commit/30b0100))
27
+
28
+
29
+
30
+ <a name="3.2.0"></a>
31
+ # [3.2.0](https://github.com/mongodb/node-mongodb-native/compare/v3.1.13...v3.2.0) (2019-03-21)
32
+
33
+
34
+ ### Bug Fixes
35
+
36
+ * **aggregate:** do not send batchSize for aggregation with $out ([ddb8d90](https://github.com/mongodb/node-mongodb-native/commit/ddb8d90))
37
+ * **bulkWrite:** always count undefined values in bson size for bulk ([436d340](https://github.com/mongodb/node-mongodb-native/commit/436d340))
38
+ * **db_ops:** rename db to add user on ([79931af](https://github.com/mongodb/node-mongodb-native/commit/79931af))
39
+ * **mongo_client_ops:** only skip authentication if no authMechanism is specified ([3b6957d](https://github.com/mongodb/node-mongodb-native/commit/3b6957d))
40
+ * **mongo-client:** ensure close callback is called with client ([f39e881](https://github.com/mongodb/node-mongodb-native/commit/f39e881))
41
+
42
+
43
+ ### Features
44
+
45
+ * **core:** pin to mongodb-core v3.2.0 ([22af15a](https://github.com/mongodb/node-mongodb-native/commit/22af15a))
46
+ * **Cursor:** adds support for AsyncIterator in cursors ([b972c1e](https://github.com/mongodb/node-mongodb-native/commit/b972c1e))
47
+ * **db:** add database-level aggregation ([b629b21](https://github.com/mongodb/node-mongodb-native/commit/b629b21))
48
+ * **mongo-client:** remove deprecated `logout` and print warning ([542859d](https://github.com/mongodb/node-mongodb-native/commit/542859d))
49
+ * **topology-base:** support passing callbacks to `close` method ([7c111e0](https://github.com/mongodb/node-mongodb-native/commit/7c111e0))
50
+ * **transactions:** support pinning mongos for sharded txns ([3886127](https://github.com/mongodb/node-mongodb-native/commit/3886127))
51
+ * **unified-sdam:** backport unified SDAM to master for v3.2.0 ([79f33ca](https://github.com/mongodb/node-mongodb-native/commit/79f33ca))
52
+
53
+
54
+
5
55
  <a name="3.1.13"></a>
6
56
  ## [3.1.13](https://github.com/mongodb/node-mongodb-native/compare/v3.1.12...v3.1.13) (2019-01-23)
7
57
 
@@ -222,6 +272,19 @@ All notable changes to this project will be documented in this file. See [standa
222
272
 
223
273
 
224
274
 
275
+ <a name="3.1.1"></a>
276
+ ## [3.1.1](https://github.com/mongodb/node-mongodb-native/compare/v3.1.0...v3.1.1) (2018-07-05)
277
+
278
+
279
+ ### Bug Fixes
280
+
281
+ * **client-ops:** return transform map to map rather than function ([b8b4bfa](https://github.com/mongodb/node-mongodb-native/commit/b8b4bfa))
282
+ * **collection:** correctly shallow clone passed in options ([2e6c4fa](https://github.com/mongodb/node-mongodb-native/commit/2e6c4fa))
283
+ * **collection:** countDocuments throws error when query doesn't match docs ([4e83556](https://github.com/mongodb/node-mongodb-native/commit/4e83556))
284
+ * **server:** remove unnecessary print statement ([20e11b3](https://github.com/mongodb/node-mongodb-native/commit/20e11b3))
285
+
286
+
287
+
225
288
  <a name="3.1.0"></a>
226
289
  # [3.1.0](https://github.com/mongodb/node-mongodb-native/compare/v3.0.6...v3.1.0) (2018-06-27)
227
290
 
package/README.md CHANGED
@@ -40,6 +40,13 @@ Core Server (i.e. SERVER) project are **public**.
40
40
 
41
41
  Change history can be found in [`HISTORY.md`](HISTORY.md).
42
42
 
43
+ ### Compatibility
44
+
45
+ For version compatibility matrices, please refer to the following links:
46
+
47
+ * [MongoDB](https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#reference-compatibility-mongodb-node)
48
+ * [NodeJS](https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#reference-compatibility-language-node)
49
+
43
50
  # Installation
44
51
 
45
52
  The recommended way to get started using the Node.js 3.0 driver is by using the `npm` (Node Package Manager) to install the dependency in your project.
@@ -4,6 +4,8 @@ const inherits = require('util').inherits;
4
4
  const MongoError = require('mongodb-core').MongoError;
5
5
  const Readable = require('stream').Readable;
6
6
  const CoreCursor = require('./cursor');
7
+ const deprecate = require('util').deprecate;
8
+ const SUPPORTS = require('./utils').SUPPORTS;
7
9
 
8
10
  /**
9
11
  * @fileOverview The **AggregationCursor** class is an internal class that embodies an aggregation cursor on MongoDB
@@ -129,6 +131,11 @@ inherits(AggregationCursor, Readable);
129
131
  for (var name in CoreCursor.prototype) {
130
132
  AggregationCursor.prototype[name] = CoreCursor.prototype[name];
131
133
  }
134
+ if (SUPPORTS.ASYNC_ITERATOR) {
135
+ AggregationCursor.prototype[
136
+ Symbol.asyncIterator
137
+ ] = require('./async/async_iterator').asyncIterator;
138
+ }
132
139
 
133
140
  /**
134
141
  * Set the batch size for the cursor.
@@ -153,10 +160,10 @@ AggregationCursor.prototype.batchSize = function(value) {
153
160
  * @param {object} document The geoNear stage document.
154
161
  * @return {AggregationCursor}
155
162
  */
156
- AggregationCursor.prototype.geoNear = function(document) {
163
+ AggregationCursor.prototype.geoNear = deprecate(function(document) {
157
164
  this.s.cmd.pipeline.push({ $geoNear: document });
158
165
  return this;
159
- };
166
+ }, 'The `$geoNear` stage is deprecated in MongoDB 4.0, and removed in version 4.2.');
160
167
 
161
168
  /**
162
169
  * Add a group stage to the aggregation pipeline
@@ -0,0 +1,5 @@
1
+ {
2
+ "parserOptions": {
3
+ "ecmaVersion": 2018
4
+ }
5
+ }
@@ -0,0 +1,15 @@
1
+ 'use strict';
2
+
3
+ async function* asyncIterator() {
4
+ while (true) {
5
+ const value = await this.next();
6
+ if (!value) {
7
+ await this.close();
8
+ return;
9
+ }
10
+
11
+ yield value;
12
+ }
13
+ }
14
+
15
+ exports.asyncIterator = asyncIterator;
@@ -25,7 +25,11 @@ const isPromiseLike = require('../utils').isPromiseLike;
25
25
  function addToOperationsList(bulkOperation, docType, document) {
26
26
  // Get the bsonSize
27
27
  const bsonSize = bson.calculateObjectSize(document, {
28
- checkKeys: false
28
+ checkKeys: false,
29
+
30
+ // Since we don't know what the user selected for BSON options here,
31
+ // err on the safe side, and check the size with ignoreUndefined: false.
32
+ ignoreUndefined: false
29
33
  });
30
34
 
31
35
  // Throw error if the doc is bigger than the max BSON size
@@ -25,7 +25,11 @@ const isPromiseLike = require('../utils').isPromiseLike;
25
25
  function addToOperationsList(bulkOperation, docType, document) {
26
26
  // Get the bsonSize
27
27
  const bsonSize = bson.calculateObjectSize(document, {
28
- checkKeys: false
28
+ checkKeys: false,
29
+
30
+ // Since we don't know what the user selected for BSON options here,
31
+ // err on the safe side, and check the size with ignoreUndefined: false.
32
+ ignoreUndefined: false
29
33
  });
30
34
  // Throw error if the doc is bigger than the max BSON size
31
35
  if (bsonSize >= bulkOperation.s.maxBatchSizeBytes)
package/lib/collection.js CHANGED
@@ -4,7 +4,6 @@ const deprecate = require('util').deprecate;
4
4
  const deprecateOptions = require('./utils').deprecateOptions;
5
5
  const checkCollectionName = require('./utils').checkCollectionName;
6
6
  const ObjectID = require('mongodb-core').BSON.ObjectID;
7
- const AggregationCursor = require('./aggregation_cursor');
8
7
  const MongoError = require('mongodb-core').MongoError;
9
8
  const toError = require('./utils').toError;
10
9
  const normalizeHintField = require('./utils').normalizeHintField;
@@ -19,10 +18,10 @@ const unordered = require('./bulk/unordered');
19
18
  const ordered = require('./bulk/ordered');
20
19
  const ChangeStream = require('./change_stream');
21
20
  const executeOperation = require('./utils').executeOperation;
22
- const applyWriteConcern = require('./utils').applyWriteConcern;
23
21
  const resolveReadPreference = require('./utils').resolveReadPreference;
24
22
 
25
23
  // Operations
24
+ const aggregate = require('./operations/aggregate').aggregate;
26
25
  const bulkWrite = require('./operations/collection_ops').bulkWrite;
27
26
  const checkForAtomicOperators = require('./operations/collection_ops').checkForAtomicOperators;
28
27
  const count = require('./operations/collection_ops').count;
@@ -46,12 +45,12 @@ const group = require('./operations/collection_ops').group;
46
45
  const indexes = require('./operations/collection_ops').indexes;
47
46
  const indexExists = require('./operations/collection_ops').indexExists;
48
47
  const indexInformation = require('./operations/collection_ops').indexInformation;
48
+ const insertMany = require('./operations/collection_ops').insertMany;
49
49
  const insertOne = require('./operations/collection_ops').insertOne;
50
50
  const isCapped = require('./operations/collection_ops').isCapped;
51
51
  const mapReduce = require('./operations/collection_ops').mapReduce;
52
52
  const optionsOp = require('./operations/collection_ops').optionsOp;
53
53
  const parallelCollectionScan = require('./operations/collection_ops').parallelCollectionScan;
54
- const prepareDocs = require('./operations/collection_ops').prepareDocs;
55
54
  const reIndex = require('./operations/collection_ops').reIndex;
56
55
  const removeDocuments = require('./operations/collection_ops').removeDocuments;
57
56
  const rename = require('./operations/collection_ops').rename;
@@ -464,21 +463,6 @@ Collection.prototype.insertOne = function(doc, options, callback) {
464
463
  return executeOperation(this.s.topology, insertOne, [this, doc, options, callback]);
465
464
  };
466
465
 
467
- function mapInsertManyResults(docs, r) {
468
- const finalResult = {
469
- result: { ok: 1, n: r.insertedCount },
470
- ops: docs,
471
- insertedCount: r.insertedCount,
472
- insertedIds: r.insertedIds
473
- };
474
-
475
- if (r.getLastOp()) {
476
- finalResult.result.opTime = r.getLastOp();
477
- }
478
-
479
- return finalResult;
480
- }
481
-
482
466
  /**
483
467
  * Inserts an array of documents into MongoDB. If documents passed in do not contain the **_id** field,
484
468
  * one will be added to each of the documents missing it by the driver, mutating the document. This behavior
@@ -502,33 +486,7 @@ Collection.prototype.insertMany = function(docs, options, callback) {
502
486
  if (typeof options === 'function') (callback = options), (options = {});
503
487
  options = options ? Object.assign({}, options) : { ordered: true };
504
488
 
505
- if (!Array.isArray(docs) && typeof callback === 'function') {
506
- return callback(
507
- MongoError.create({ message: 'docs parameter must be an array of documents', driver: true })
508
- );
509
- } else if (!Array.isArray(docs)) {
510
- return new this.s.promiseLibrary((resolve, reject) => {
511
- reject(
512
- MongoError.create({ message: 'docs parameter must be an array of documents', driver: true })
513
- );
514
- });
515
- }
516
-
517
- // If keep going set unordered
518
- options['serializeFunctions'] = options['serializeFunctions'] || this.s.serializeFunctions;
519
-
520
- docs = prepareDocs(this, docs, options);
521
-
522
- // Generate the bulk write operations
523
- const operations = [
524
- {
525
- insertMany: docs
526
- }
527
- ];
528
-
529
- return executeOperation(this.s.topology, bulkWrite, [this, operations, options, callback], {
530
- resultMutator: result => mapInsertManyResults(docs, result)
531
- });
489
+ return executeOperation(this.s.topology, insertMany, [this, docs, options, callback]);
532
490
  };
533
491
 
534
492
  /**
@@ -1434,7 +1392,7 @@ Collection.prototype.countDocuments = function(query, options, callback) {
1434
1392
  };
1435
1393
 
1436
1394
  /**
1437
- * The distinct command returns returns a list of distinct values for the given key across a collection.
1395
+ * The distinct command returns a list of distinct values for the given key across a collection.
1438
1396
  * @method
1439
1397
  * @param {string} key Field of the document to find distinct values for.
1440
1398
  * @param {object} query The query for filtering the set of documents to which we apply the distinct filter.
@@ -1733,97 +1691,7 @@ Collection.prototype.aggregate = function(pipeline, options, callback) {
1733
1691
  pipeline = args;
1734
1692
  }
1735
1693
 
1736
- // Ignore readConcern option
1737
- let ignoreReadConcern = false;
1738
-
1739
- // Build the command
1740
- const command = { aggregate: this.s.name, pipeline: pipeline };
1741
-
1742
- // If out was specified
1743
- if (typeof options.out === 'string') {
1744
- pipeline.push({ $out: options.out });
1745
- // Ignore read concern
1746
- ignoreReadConcern = true;
1747
- } else if (pipeline.length > 0 && pipeline[pipeline.length - 1]['$out']) {
1748
- ignoreReadConcern = true;
1749
- }
1750
-
1751
- // Decorate command with writeConcern if out has been specified
1752
- if (
1753
- pipeline.length > 0 &&
1754
- pipeline[pipeline.length - 1]['$out'] &&
1755
- this.s.topology.capabilities().commandsTakeWriteConcern
1756
- ) {
1757
- applyWriteConcern(command, { db: this.s.db, collection: this }, options);
1758
- }
1759
-
1760
- // Have we specified collation
1761
- try {
1762
- decorateWithCollation(command, this, options);
1763
- } catch (err) {
1764
- if (typeof callback === 'function') return callback(err, null);
1765
- throw err;
1766
- }
1767
-
1768
- // If we have bypassDocumentValidation set
1769
- if (options.bypassDocumentValidation === true) {
1770
- command.bypassDocumentValidation = options.bypassDocumentValidation;
1771
- }
1772
-
1773
- // Do we have a readConcern specified
1774
- if (!ignoreReadConcern) {
1775
- decorateWithReadConcern(command, this, options);
1776
- }
1777
-
1778
- // If we have allowDiskUse defined
1779
- if (options.allowDiskUse) command.allowDiskUse = options.allowDiskUse;
1780
- if (typeof options.maxTimeMS === 'number') command.maxTimeMS = options.maxTimeMS;
1781
-
1782
- // If we are giving a hint
1783
- if (options.hint) command.hint = options.hint;
1784
-
1785
- options = Object.assign({}, options);
1786
- // Ensure we have the right read preference inheritance
1787
- options.readPreference = resolveReadPreference(options, { db: this.s.db, collection: this });
1788
-
1789
- // If explain has been specified add it
1790
- if (options.explain) {
1791
- if (command.readConcern || command.writeConcern) {
1792
- throw toError('"explain" cannot be used on an aggregate call with readConcern/writeConcern');
1793
- }
1794
- command.explain = options.explain;
1795
- }
1796
-
1797
- if (typeof options.comment === 'string') command.comment = options.comment;
1798
-
1799
- // Validate that cursor options is valid
1800
- if (options.cursor != null && typeof options.cursor !== 'object') {
1801
- throw toError('cursor options must be an object');
1802
- }
1803
-
1804
- options.cursor = options.cursor || {};
1805
- if (options.batchSize) options.cursor.batchSize = options.batchSize;
1806
- command.cursor = options.cursor;
1807
-
1808
- // promiseLibrary
1809
- options.promiseLibrary = this.s.promiseLibrary;
1810
-
1811
- // Set the AggregationCursor constructor
1812
- options.cursorFactory = AggregationCursor;
1813
- if (typeof callback !== 'function') {
1814
- if (!this.s.topology.capabilities()) {
1815
- throw new MongoError('cannot connect to server');
1816
- }
1817
-
1818
- // Allow disk usage command
1819
- if (typeof options.allowDiskUse === 'boolean') command.allowDiskUse = options.allowDiskUse;
1820
- if (typeof options.maxTimeMS === 'number') command.maxTimeMS = options.maxTimeMS;
1821
-
1822
- // Execute the cursor
1823
- return this.s.topology.cursor(this.s.namespace, command, options);
1824
- }
1825
-
1826
- return handleCallback(callback, null, this.s.topology.cursor(this.s.namespace, command, options));
1694
+ return aggregate(this.s.db, this, pipeline, options, callback);
1827
1695
  };
1828
1696
 
1829
1697
  /**
@@ -1874,7 +1742,7 @@ Collection.prototype.watch = function(pipeline, options) {
1874
1742
  * @param {Collection~parallelCollectionScanCallback} [callback] The command result callback
1875
1743
  * @return {Promise} returns Promise if no callback passed
1876
1744
  */
1877
- Collection.prototype.parallelCollectionScan = function(options, callback) {
1745
+ Collection.prototype.parallelCollectionScan = deprecate(function(options, callback) {
1878
1746
  if (typeof options === 'function') (callback = options), (options = { numCursors: 1 });
1879
1747
  // Set number of cursors to 1
1880
1748
  options.numCursors = options.numCursors || 1;
@@ -1894,7 +1762,7 @@ Collection.prototype.parallelCollectionScan = function(options, callback) {
1894
1762
  return executeOperation(this.s.topology, parallelCollectionScan, [this, options, callback], {
1895
1763
  skipSessions: true
1896
1764
  });
1897
- };
1765
+ }, 'parallelCollectionScan is deprecated in MongoDB v4.1');
1898
1766
 
1899
1767
  /**
1900
1768
  * Execute a geo search using a geo haystack index on a collection.
@@ -5,6 +5,7 @@ const ReadPreference = require('mongodb-core').ReadPreference;
5
5
  const MongoError = require('mongodb-core').MongoError;
6
6
  const Readable = require('stream').Readable;
7
7
  const CoreCursor = require('./cursor');
8
+ const SUPPORTS = require('./utils').SUPPORTS;
8
9
 
9
10
  /**
10
11
  * @fileOverview The **CommandCursor** class is an internal class that embodies a
@@ -156,6 +157,10 @@ for (var i = 0; i < methodsToInherit.length; i++) {
156
157
  CommandCursor.prototype[methodsToInherit[i]] = CoreCursor.prototype[methodsToInherit[i]];
157
158
  }
158
159
 
160
+ if (SUPPORTS.ASYNC_ITERATOR) {
161
+ CommandCursor.prototype[Symbol.asyncIterator] = require('./async/async_iterator').asyncIterator;
162
+ }
163
+
159
164
  /**
160
165
  * Set the ReadPreference for the cursor.
161
166
  * @method
package/lib/cursor.js CHANGED
@@ -5,6 +5,7 @@ const PassThrough = require('stream').PassThrough;
5
5
  const inherits = require('util').inherits;
6
6
  const deprecate = require('util').deprecate;
7
7
  const handleCallback = require('./utils').handleCallback;
8
+ const SUPPORTS = require('./utils').SUPPORTS;
8
9
  const ReadPreference = require('mongodb-core').ReadPreference;
9
10
  const MongoError = require('mongodb-core').MongoError;
10
11
  const Readable = require('stream').Readable;
@@ -203,6 +204,10 @@ function Cursor(bson, ns, cmd, options, topology, topologyOptions) {
203
204
  // Inherit from Readable
204
205
  inherits(Cursor, Readable);
205
206
 
207
+ if (SUPPORTS.ASYNC_ITERATOR) {
208
+ Cursor.prototype[Symbol.asyncIterator] = require('./async/async_iterator').asyncIterator;
209
+ }
210
+
206
211
  // Map core cursor _next method so we can apply mapping
207
212
  Cursor.prototype._next = function() {
208
213
  if (this._initImplicitSession) {
package/lib/db.js CHANGED
@@ -23,6 +23,7 @@ const CONSTANTS = require('./constants');
23
23
 
24
24
  // Operations
25
25
  const addUser = require('./operations/db_ops').addUser;
26
+ const aggregate = require('./operations/aggregate').aggregate;
26
27
  const collections = require('./operations/db_ops').collections;
27
28
  const createCollection = require('./operations/db_ops').createCollection;
28
29
  const createIndex = require('./operations/db_ops').createIndex;
@@ -263,6 +264,44 @@ Db.prototype.command = function(command, options, callback) {
263
264
  return executeOperation(this.s.topology, executeCommand, [this, command, options, callback]);
264
265
  };
265
266
 
267
+ /**
268
+ * Execute an aggregation framework pipeline against the database, needs MongoDB >= 3.6
269
+ * @method
270
+ * @param {object} [pipeline=[]] Array containing all the aggregation framework commands for the execution.
271
+ * @param {object} [options] Optional settings.
272
+ * @param {(ReadPreference|string)} [options.readPreference] The preferred read preference (ReadPreference.PRIMARY, ReadPreference.PRIMARY_PREFERRED, ReadPreference.SECONDARY, ReadPreference.SECONDARY_PREFERRED, ReadPreference.NEAREST).
273
+ * @param {object} [options.cursor] Return the query as cursor, on 2.6 > it returns as a real cursor on pre 2.6 it returns as an emulated cursor.
274
+ * @param {number} [options.cursor.batchSize] The batchSize for the cursor
275
+ * @param {boolean} [options.explain=false] Explain returns the aggregation execution plan (requires mongodb 2.6 >).
276
+ * @param {boolean} [options.allowDiskUse=false] allowDiskUse lets the server know if it can use disk to store temporary results for the aggregation (requires mongodb 2.6 >).
277
+ * @param {number} [options.maxTimeMS] maxTimeMS specifies a cumulative time limit in milliseconds for processing operations on the cursor. MongoDB interrupts the operation at the earliest following interrupt point.
278
+ * @param {boolean} [options.bypassDocumentValidation=false] Allow driver to bypass schema validation in MongoDB 3.2 or higher.
279
+ * @param {boolean} [options.raw=false] Return document results as raw BSON buffers.
280
+ * @param {boolean} [options.promoteLongs=true] Promotes Long values to number if they fit inside the 53 bits resolution.
281
+ * @param {boolean} [options.promoteValues=true] Promotes BSON values to native types where possible, set to false to only receive wrapper types.
282
+ * @param {boolean} [options.promoteBuffers=false] Promotes Binary BSON values to native Node Buffers.
283
+ * @param {object} [options.collation] Specify collation (MongoDB 3.4 or higher) settings for update operation (see 3.4 documentation for available fields).
284
+ * @param {string} [options.comment] Add a comment to an aggregation command
285
+ * @param {string|object} [options.hint] Add an index selection hint to an aggregation command
286
+ * @param {ClientSession} [options.session] optional session to use for this operation
287
+ * @param {Database~aggregationCallback} callback The command result callback
288
+ * @return {(null|AggregationCursor)}
289
+ */
290
+ Db.prototype.aggregate = function(pipeline, options, callback) {
291
+ if (typeof options === 'function') {
292
+ callback = options;
293
+ options = {};
294
+ }
295
+
296
+ // If we have no options or callback we are doing
297
+ // a cursor based aggregation
298
+ if (options == null && callback == null) {
299
+ options = {};
300
+ }
301
+
302
+ return aggregate(this, '1', pipeline, options, callback);
303
+ };
304
+
266
305
  /**
267
306
  * Return the Admin db instance
268
307
  * @method
@@ -281,6 +320,13 @@ Db.prototype.admin = function() {
281
320
  * @param {Collection} collection The collection instance.
282
321
  */
283
322
 
323
+ /**
324
+ * The callback format for an aggregation call
325
+ * @callback Database~aggregationCallback
326
+ * @param {MongoError} error An error instance representing the error during the execution.
327
+ * @param {AggregationCursor} cursor The cursor if the aggregation command was executed successfully.
328
+ */
329
+
284
330
  const collectionKeys = [
285
331
  'pkFactory',
286
332
  'readPreference',
@@ -4,14 +4,14 @@ const ChangeStream = require('./change_stream');
4
4
  const Db = require('./db');
5
5
  const EventEmitter = require('events').EventEmitter;
6
6
  const executeOperation = require('./utils').executeOperation;
7
- const handleCallback = require('./utils').handleCallback;
8
7
  const inherits = require('util').inherits;
9
8
  const MongoError = require('mongodb-core').MongoError;
9
+ const deprecate = require('util').deprecate;
10
10
 
11
11
  // Operations
12
12
  const connectOp = require('./operations/mongo_client_ops').connectOp;
13
- const logout = require('./operations/mongo_client_ops').logout;
14
13
  const validOptions = require('./operations/mongo_client_ops').validOptions;
14
+ const closeOperation = require('./operations/mongo_client_ops').closeOperation;
15
15
 
16
16
  /**
17
17
  * @fileOverview The **MongoClient** class is a class that allows for making Connections to MongoDB.
@@ -110,6 +110,7 @@ const validOptions = require('./operations/mongo_client_ops').validOptions;
110
110
  * @param {boolean} [options.monitorCommands=false] Enable command monitoring for this client
111
111
  * @param {number} [options.minSize] If present, the connection pool will be initialized with minSize connections, and will never dip below minSize connections
112
112
  * @param {boolean} [options.useNewUrlParser=false] Determines whether or not to use the new url parser. Enables the new, spec-compliant, url parser shipped in the core driver. This url parser fixes a number of problems with the original parser, and aims to outright replace that parser in the near future.
113
+ * @param {boolean} [options.useUnifiedTopology] Enables the new unified topology layer
113
114
  * @param {MongoClient~connectCallback} [callback] The command result callback
114
115
  * @return {MongoClient} a MongoClient instance
115
116
  */
@@ -170,25 +171,10 @@ MongoClient.prototype.connect = function(callback) {
170
171
  });
171
172
  };
172
173
 
173
- /**
174
- * Logout user from server, fire off on all connections and remove all auth info
175
- * @method
176
- * @param {object} [options] Optional settings.
177
- * @param {string} [options.dbName] Logout against different database than current.
178
- * @param {Db~resultCallback} [callback] The command result callback
179
- * @return {Promise} returns Promise if no callback passed
180
- */
181
- MongoClient.prototype.logout = function(options, callback) {
174
+ MongoClient.prototype.logout = deprecate(function(options, callback) {
182
175
  if (typeof options === 'function') (callback = options), (options = {});
183
- options = options || {};
184
-
185
- // Establish the correct database name
186
- const dbName = this.s.options.authSource ? this.s.options.authSource : this.s.options.dbName;
187
-
188
- return executeOperation(this, logout, [this, dbName, callback], {
189
- skipSessions: true
190
- });
191
- };
176
+ if (typeof callback === 'function') callback(null, true);
177
+ }, 'Multiple authentication is prohibited on a connected client, please only authenticate once per MongoClient');
192
178
 
193
179
  /**
194
180
  * Close the db and its underlying connections
@@ -199,31 +185,8 @@ MongoClient.prototype.logout = function(options, callback) {
199
185
  */
200
186
  MongoClient.prototype.close = function(force, callback) {
201
187
  if (typeof force === 'function') (callback = force), (force = false);
202
-
203
- // Close the topology connection
204
- if (this.topology) {
205
- this.topology.close(force);
206
- }
207
- // Emit close event
208
- this.emit('close', this);
209
-
210
- // Fire close event on any cached db instances
211
- for (const name in this.s.dbCache) {
212
- this.s.dbCache[name].emit('close');
213
- }
214
-
215
- // Remove listeners after emit
216
- this.removeAllListeners('close');
217
-
218
- // Callback after next event loop tick
219
- if (typeof callback === 'function')
220
- return process.nextTick(() => {
221
- handleCallback(callback, null);
222
- });
223
-
224
- // Return dummy promise
225
- return new this.s.promiseLibrary(resolve => {
226
- resolve();
188
+ return executeOperation(this, closeOperation, [this, force, callback], {
189
+ skipSessions: true
227
190
  });
228
191
  };
229
192