mongodb 6.18.0-dev.20250730.sha.2ef6c10c → 6.18.0-dev.20250801.sha.aac76296
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/bulk/common.js +19 -32
- package/lib/bulk/common.js.map +1 -1
- package/lib/collection.js +49 -11
- package/lib/collection.js.map +1 -1
- package/lib/db.js +11 -4
- package/lib/db.js.map +1 -1
- package/lib/operations/create_collection.js +52 -42
- package/lib/operations/create_collection.js.map +1 -1
- package/lib/operations/drop.js +46 -36
- package/lib/operations/drop.js.map +1 -1
- package/lib/operations/execute_operation.js +1 -0
- package/lib/operations/execute_operation.js.map +1 -1
- package/lib/operations/insert.js +2 -43
- package/lib/operations/insert.js.map +1 -1
- package/lib/utils.js +8 -13
- package/lib/utils.js.map +1 -1
- package/package.json +2 -2
- package/src/bulk/common.ts +21 -41
- package/src/collection.ts +69 -36
- package/src/db.ts +15 -18
- package/src/operations/create_collection.ts +81 -64
- package/src/operations/drop.ts +61 -47
- package/src/operations/execute_operation.ts +1 -1
- package/src/operations/insert.ts +3 -62
- package/src/utils.ts +10 -25
- package/lib/operations/bulk_write.js +0 -39
- package/lib/operations/bulk_write.js.map +0 -1
- package/lib/operations/collections.js +0 -33
- package/lib/operations/collections.js.map +0 -1
- package/lib/operations/is_capped.js +0 -28
- package/lib/operations/is_capped.js.map +0 -1
- package/lib/operations/options_operation.js +0 -28
- package/lib/operations/options_operation.js.map +0 -1
- package/src/operations/bulk_write.ts +0 -64
- package/src/operations/collections.ts +0 -47
- package/src/operations/is_capped.ts +0 -35
- package/src/operations/options_operation.ts +0 -35
package/src/collection.ts
CHANGED
|
@@ -1,5 +1,10 @@
|
|
|
1
1
|
import { type BSONSerializeOptions, type Document, resolveBSONOptions } from './bson';
|
|
2
|
-
import type {
|
|
2
|
+
import type {
|
|
3
|
+
AnyBulkWriteOperation,
|
|
4
|
+
BulkOperationBase,
|
|
5
|
+
BulkWriteOptions,
|
|
6
|
+
BulkWriteResult
|
|
7
|
+
} from './bulk/common';
|
|
3
8
|
import { OrderedBulkOperation } from './bulk/ordered';
|
|
4
9
|
import { UnorderedBulkOperation } from './bulk/unordered';
|
|
5
10
|
import { ChangeStream, type ChangeStreamDocument, type ChangeStreamOptions } from './change_stream';
|
|
@@ -11,7 +16,7 @@ import {
|
|
|
11
16
|
type ListSearchIndexesOptions
|
|
12
17
|
} from './cursor/list_search_indexes_cursor';
|
|
13
18
|
import type { Db } from './db';
|
|
14
|
-
import { MongoInvalidArgumentError, MongoOperationTimeoutError } from './error';
|
|
19
|
+
import { MongoAPIError, MongoInvalidArgumentError, MongoOperationTimeoutError } from './error';
|
|
15
20
|
import type { MongoClient, PkFactory } from './mongo_client';
|
|
16
21
|
import type {
|
|
17
22
|
Abortable,
|
|
@@ -24,7 +29,6 @@ import type {
|
|
|
24
29
|
WithoutId
|
|
25
30
|
} from './mongo_types';
|
|
26
31
|
import type { AggregateOptions } from './operations/aggregate';
|
|
27
|
-
import { BulkWriteOperation } from './operations/bulk_write';
|
|
28
32
|
import { CountOperation, type CountOptions } from './operations/count';
|
|
29
33
|
import {
|
|
30
34
|
DeleteManyOperation,
|
|
@@ -33,12 +37,12 @@ import {
|
|
|
33
37
|
type DeleteResult
|
|
34
38
|
} from './operations/delete';
|
|
35
39
|
import { DistinctOperation, type DistinctOptions } from './operations/distinct';
|
|
36
|
-
import {
|
|
40
|
+
import { type DropCollectionOptions } from './operations/drop';
|
|
37
41
|
import {
|
|
38
42
|
EstimatedDocumentCountOperation,
|
|
39
43
|
type EstimatedDocumentCountOptions
|
|
40
44
|
} from './operations/estimated_document_count';
|
|
41
|
-
import { executeOperation } from './operations/execute_operation';
|
|
45
|
+
import { autoConnect, executeOperation } from './operations/execute_operation';
|
|
42
46
|
import type { FindOptions } from './operations/find';
|
|
43
47
|
import {
|
|
44
48
|
FindOneAndDeleteOperation,
|
|
@@ -61,15 +65,12 @@ import {
|
|
|
61
65
|
type ListIndexesOptions
|
|
62
66
|
} from './operations/indexes';
|
|
63
67
|
import {
|
|
64
|
-
InsertManyOperation,
|
|
65
68
|
type InsertManyResult,
|
|
66
69
|
InsertOneOperation,
|
|
67
70
|
type InsertOneOptions,
|
|
68
71
|
type InsertOneResult
|
|
69
72
|
} from './operations/insert';
|
|
70
|
-
import { IsCappedOperation } from './operations/is_capped';
|
|
71
73
|
import type { Hint, OperationOptions } from './operations/operation';
|
|
72
|
-
import { OptionsOperation } from './operations/options_operation';
|
|
73
74
|
import { RenameOperation, type RenameOptions } from './operations/rename';
|
|
74
75
|
import {
|
|
75
76
|
CreateSearchIndexesOperation,
|
|
@@ -305,14 +306,31 @@ export class Collection<TSchema extends Document = Document> {
|
|
|
305
306
|
docs: ReadonlyArray<OptionalUnlessRequiredId<TSchema>>,
|
|
306
307
|
options?: BulkWriteOptions
|
|
307
308
|
): Promise<InsertManyResult<TSchema>> {
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
309
|
+
if (!Array.isArray(docs)) {
|
|
310
|
+
throw new MongoInvalidArgumentError('Argument "docs" must be an array of documents');
|
|
311
|
+
}
|
|
312
|
+
options = resolveOptions(this, options ?? {});
|
|
313
|
+
|
|
314
|
+
const acknowledged = WriteConcern.fromOptions(options)?.w !== 0;
|
|
315
|
+
|
|
316
|
+
try {
|
|
317
|
+
const res = await this.bulkWrite(
|
|
318
|
+
docs.map(doc => ({ insertOne: { document: doc } })),
|
|
319
|
+
options
|
|
320
|
+
);
|
|
321
|
+
return {
|
|
322
|
+
acknowledged,
|
|
323
|
+
insertedCount: res.insertedCount,
|
|
324
|
+
insertedIds: res.insertedIds
|
|
325
|
+
};
|
|
326
|
+
} catch (err) {
|
|
327
|
+
if (err && err.message === 'Operation must be an object with an operation key') {
|
|
328
|
+
throw new MongoInvalidArgumentError(
|
|
329
|
+
'Collection.insertMany() cannot be called with an array that has null/undefined values'
|
|
330
|
+
);
|
|
331
|
+
}
|
|
332
|
+
throw err;
|
|
333
|
+
}
|
|
316
334
|
}
|
|
317
335
|
|
|
318
336
|
/**
|
|
@@ -342,14 +360,28 @@ export class Collection<TSchema extends Document = Document> {
|
|
|
342
360
|
throw new MongoInvalidArgumentError('Argument "operations" must be an array of documents');
|
|
343
361
|
}
|
|
344
362
|
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
)
|
|
352
|
-
|
|
363
|
+
options = resolveOptions(this, options ?? {});
|
|
364
|
+
|
|
365
|
+
// TODO(NODE-7071): remove once the client doesn't need to be connected to construct
|
|
366
|
+
// bulk operations
|
|
367
|
+
const isConnected = this.client.topology != null;
|
|
368
|
+
if (!isConnected) {
|
|
369
|
+
await autoConnect(this.client);
|
|
370
|
+
}
|
|
371
|
+
|
|
372
|
+
// Create the bulk operation
|
|
373
|
+
const bulk: BulkOperationBase =
|
|
374
|
+
options.ordered === false
|
|
375
|
+
? this.initializeUnorderedBulkOp(options)
|
|
376
|
+
: this.initializeOrderedBulkOp(options);
|
|
377
|
+
|
|
378
|
+
// for each op go through and add to the bulk
|
|
379
|
+
for (const operation of operations) {
|
|
380
|
+
bulk.raw(operation);
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
// Execute the bulk
|
|
384
|
+
return await bulk.execute({ ...options });
|
|
353
385
|
}
|
|
354
386
|
|
|
355
387
|
/**
|
|
@@ -491,10 +523,7 @@ export class Collection<TSchema extends Document = Document> {
|
|
|
491
523
|
* @param options - Optional settings for the command
|
|
492
524
|
*/
|
|
493
525
|
async drop(options?: DropCollectionOptions): Promise<boolean> {
|
|
494
|
-
return await
|
|
495
|
-
this.client,
|
|
496
|
-
new DropCollectionOperation(this.s.db, this.collectionName, options)
|
|
497
|
-
);
|
|
526
|
+
return await this.s.db.dropCollection(this.collectionName, options);
|
|
498
527
|
}
|
|
499
528
|
|
|
500
529
|
/**
|
|
@@ -557,10 +586,16 @@ export class Collection<TSchema extends Document = Document> {
|
|
|
557
586
|
* @param options - Optional settings for the command
|
|
558
587
|
*/
|
|
559
588
|
async options(options?: OperationOptions): Promise<Document> {
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
589
|
+
options = resolveOptions(this, options);
|
|
590
|
+
const [collection] = await this.s.db
|
|
591
|
+
.listCollections({ name: this.collectionName }, { ...options, nameOnly: false })
|
|
592
|
+
.toArray();
|
|
593
|
+
|
|
594
|
+
if (collection == null || collection.options == null) {
|
|
595
|
+
throw new MongoAPIError(`collection ${this.namespace} not found`);
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
return collection.options;
|
|
564
599
|
}
|
|
565
600
|
|
|
566
601
|
/**
|
|
@@ -569,10 +604,8 @@ export class Collection<TSchema extends Document = Document> {
|
|
|
569
604
|
* @param options - Optional settings for the command
|
|
570
605
|
*/
|
|
571
606
|
async isCapped(options?: OperationOptions): Promise<boolean> {
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
new IsCappedOperation(this as TODO_NODE_3286, resolveOptions(this, options))
|
|
575
|
-
);
|
|
607
|
+
const { capped } = await this.options(options);
|
|
608
|
+
return Boolean(capped);
|
|
576
609
|
}
|
|
577
610
|
|
|
578
611
|
/**
|
package/src/db.ts
CHANGED
|
@@ -10,14 +10,10 @@ import { MongoInvalidArgumentError } from './error';
|
|
|
10
10
|
import type { MongoClient, PkFactory } from './mongo_client';
|
|
11
11
|
import type { Abortable, TODO_NODE_3286 } from './mongo_types';
|
|
12
12
|
import type { AggregateOptions } from './operations/aggregate';
|
|
13
|
-
import {
|
|
13
|
+
import { type CreateCollectionOptions, createCollections } from './operations/create_collection';
|
|
14
14
|
import {
|
|
15
|
-
CreateCollectionOperation,
|
|
16
|
-
type CreateCollectionOptions
|
|
17
|
-
} from './operations/create_collection';
|
|
18
|
-
import {
|
|
19
|
-
DropCollectionOperation,
|
|
20
15
|
type DropCollectionOptions,
|
|
16
|
+
dropCollections,
|
|
21
17
|
DropDatabaseOperation,
|
|
22
18
|
type DropDatabaseOptions
|
|
23
19
|
} from './operations/drop';
|
|
@@ -242,10 +238,8 @@ export class Db {
|
|
|
242
238
|
name: string,
|
|
243
239
|
options?: CreateCollectionOptions
|
|
244
240
|
): Promise<Collection<TSchema>> {
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
new CreateCollectionOperation(this, name, resolveOptions(this, options)) as TODO_NODE_3286
|
|
248
|
-
);
|
|
241
|
+
options = resolveOptions(this, options);
|
|
242
|
+
return await createCollections<TSchema>(this, name, options);
|
|
249
243
|
}
|
|
250
244
|
|
|
251
245
|
/**
|
|
@@ -411,10 +405,8 @@ export class Db {
|
|
|
411
405
|
* @param options - Optional settings for the command
|
|
412
406
|
*/
|
|
413
407
|
async dropCollection(name: string, options?: DropCollectionOptions): Promise<boolean> {
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
new DropCollectionOperation(this, name, resolveOptions(this, options))
|
|
417
|
-
);
|
|
408
|
+
options = resolveOptions(this, options);
|
|
409
|
+
return await dropCollections(this, name, options);
|
|
418
410
|
}
|
|
419
411
|
|
|
420
412
|
/**
|
|
@@ -435,10 +427,15 @@ export class Db {
|
|
|
435
427
|
* @param options - Optional settings for the command
|
|
436
428
|
*/
|
|
437
429
|
async collections(options?: ListCollectionsOptions): Promise<Collection[]> {
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
430
|
+
options = resolveOptions(this, options);
|
|
431
|
+
const collections = await this.listCollections({}, { ...options, nameOnly: true }).toArray();
|
|
432
|
+
|
|
433
|
+
return collections
|
|
434
|
+
.filter(
|
|
435
|
+
// Filter collections removing any illegal ones
|
|
436
|
+
({ name }) => !name.includes('$')
|
|
437
|
+
)
|
|
438
|
+
.map(({ name }) => new Collection(this, name, this.s.options));
|
|
442
439
|
}
|
|
443
440
|
|
|
444
441
|
/**
|
|
@@ -9,8 +9,9 @@ import { MongoCompatibilityError } from '../error';
|
|
|
9
9
|
import type { PkFactory } from '../mongo_client';
|
|
10
10
|
import type { Server } from '../sdam/server';
|
|
11
11
|
import type { ClientSession } from '../sessions';
|
|
12
|
-
import {
|
|
12
|
+
import { TimeoutContext } from '../timeout';
|
|
13
13
|
import { CommandOperation, type CommandOperationOptions } from './command';
|
|
14
|
+
import { executeOperation } from './execute_operation';
|
|
14
15
|
import { CreateIndexesOperation } from './indexes';
|
|
15
16
|
import { Aspect, defineAspects } from './operation';
|
|
16
17
|
|
|
@@ -135,79 +136,95 @@ export class CreateCollectionOperation extends CommandOperation<Collection> {
|
|
|
135
136
|
const name = this.name;
|
|
136
137
|
const options = this.options;
|
|
137
138
|
|
|
138
|
-
const
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
if (encryptedFields) {
|
|
143
|
-
// Creating a QE collection required min server of 7.0.0
|
|
144
|
-
// TODO(NODE-5353): Get wire version information from connection.
|
|
145
|
-
if (
|
|
146
|
-
!server.loadBalanced &&
|
|
147
|
-
server.description.maxWireVersion < MIN_SUPPORTED_QE_WIRE_VERSION
|
|
148
|
-
) {
|
|
149
|
-
throw new MongoCompatibilityError(
|
|
150
|
-
`${INVALID_QE_VERSION} The minimum server version required is ${MIN_SUPPORTED_QE_SERVER_VERSION}`
|
|
151
|
-
);
|
|
152
|
-
}
|
|
153
|
-
// Create auxilliary collections for queryable encryption support.
|
|
154
|
-
const escCollection = encryptedFields.escCollection ?? `enxcol_.${name}.esc`;
|
|
155
|
-
const ecocCollection = encryptedFields.ecocCollection ?? `enxcol_.${name}.ecoc`;
|
|
156
|
-
|
|
157
|
-
for (const collectionName of [escCollection, ecocCollection]) {
|
|
158
|
-
const createOp = new CreateCollectionOperation(db, collectionName, {
|
|
159
|
-
clusteredIndex: {
|
|
160
|
-
key: { _id: 1 },
|
|
161
|
-
unique: true
|
|
162
|
-
}
|
|
163
|
-
});
|
|
164
|
-
await createOp.executeWithoutEncryptedFieldsCheck(server, session, timeoutContext);
|
|
139
|
+
const cmd: Document = { create: name };
|
|
140
|
+
for (const [option, value] of Object.entries(options)) {
|
|
141
|
+
if (value != null && typeof value !== 'function' && !ILLEGAL_COMMAND_FIELDS.has(option)) {
|
|
142
|
+
cmd[option] = value;
|
|
165
143
|
}
|
|
144
|
+
}
|
|
166
145
|
|
|
167
|
-
|
|
168
|
-
|
|
146
|
+
// otherwise just execute the command
|
|
147
|
+
await super.executeCommand(server, session, cmd, timeoutContext);
|
|
148
|
+
return new Collection(db, name, options);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
export async function createCollections<TSchema extends Document>(
|
|
153
|
+
db: Db,
|
|
154
|
+
name: string,
|
|
155
|
+
options: CreateCollectionOptions
|
|
156
|
+
): Promise<Collection<TSchema>> {
|
|
157
|
+
const timeoutContext = TimeoutContext.create({
|
|
158
|
+
session: options.session,
|
|
159
|
+
serverSelectionTimeoutMS: db.client.s.options.serverSelectionTimeoutMS,
|
|
160
|
+
waitQueueTimeoutMS: db.client.s.options.waitQueueTimeoutMS,
|
|
161
|
+
timeoutMS: options.timeoutMS
|
|
162
|
+
});
|
|
163
|
+
|
|
164
|
+
const encryptedFields: Document | undefined =
|
|
165
|
+
options.encryptedFields ??
|
|
166
|
+
db.client.s.options.autoEncryption?.encryptedFieldsMap?.[`${db.databaseName}.${name}`];
|
|
167
|
+
|
|
168
|
+
if (encryptedFields) {
|
|
169
|
+
class CreateSupportingFLEv2CollectionOperation extends CreateCollectionOperation {
|
|
170
|
+
override execute(
|
|
171
|
+
server: Server,
|
|
172
|
+
session: ClientSession | undefined,
|
|
173
|
+
timeoutContext: TimeoutContext
|
|
174
|
+
): Promise<Collection> {
|
|
175
|
+
// Creating a QE collection required min server of 7.0.0
|
|
176
|
+
// TODO(NODE-5353): Get wire version information from connection.
|
|
177
|
+
if (
|
|
178
|
+
!server.loadBalanced &&
|
|
179
|
+
server.description.maxWireVersion < MIN_SUPPORTED_QE_WIRE_VERSION
|
|
180
|
+
) {
|
|
181
|
+
throw new MongoCompatibilityError(
|
|
182
|
+
`${INVALID_QE_VERSION} The minimum server version required is ${MIN_SUPPORTED_QE_SERVER_VERSION}`
|
|
183
|
+
);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
return super.execute(server, session, timeoutContext);
|
|
169
187
|
}
|
|
170
188
|
}
|
|
171
189
|
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
190
|
+
// Create auxilliary collections for queryable encryption support.
|
|
191
|
+
const escCollection = encryptedFields.escCollection ?? `enxcol_.${name}.esc`;
|
|
192
|
+
const ecocCollection = encryptedFields.ecocCollection ?? `enxcol_.${name}.ecoc`;
|
|
193
|
+
|
|
194
|
+
for (const collectionName of [escCollection, ecocCollection]) {
|
|
195
|
+
const createOp = new CreateSupportingFLEv2CollectionOperation(db, collectionName, {
|
|
196
|
+
clusteredIndex: {
|
|
197
|
+
key: { _id: 1 },
|
|
198
|
+
unique: true
|
|
199
|
+
},
|
|
200
|
+
session: options.session
|
|
201
|
+
});
|
|
202
|
+
await executeOperation(db.client, createOp, timeoutContext);
|
|
183
203
|
}
|
|
184
204
|
|
|
185
|
-
|
|
205
|
+
if (!options.encryptedFields) {
|
|
206
|
+
options = { ...options, encryptedFields };
|
|
207
|
+
}
|
|
186
208
|
}
|
|
187
209
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
timeoutContext
|
|
192
|
-
)
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
) {
|
|
204
|
-
cmd[n] = (options as any)[n];
|
|
205
|
-
}
|
|
206
|
-
}
|
|
207
|
-
// otherwise just execute the command
|
|
208
|
-
await super.executeCommand(server, session, cmd, timeoutContext);
|
|
209
|
-
return new Collection(db, name, options);
|
|
210
|
+
const coll = await executeOperation(
|
|
211
|
+
db.client,
|
|
212
|
+
new CreateCollectionOperation(db, name, options),
|
|
213
|
+
timeoutContext
|
|
214
|
+
);
|
|
215
|
+
|
|
216
|
+
if (encryptedFields) {
|
|
217
|
+
// Create the required index for queryable encryption support.
|
|
218
|
+
const createIndexOp = CreateIndexesOperation.fromIndexSpecification(
|
|
219
|
+
db,
|
|
220
|
+
name,
|
|
221
|
+
{ __safeContent__: 1 },
|
|
222
|
+
{ session: options.session }
|
|
223
|
+
);
|
|
224
|
+
await executeOperation(db.client, createIndexOp, timeoutContext);
|
|
210
225
|
}
|
|
226
|
+
|
|
227
|
+
return coll as unknown as Collection<TSchema>;
|
|
211
228
|
}
|
|
212
229
|
|
|
213
230
|
defineAspects(CreateCollectionOperation, [Aspect.WRITE_OPERATION]);
|
package/src/operations/drop.ts
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import type { Document } from '../bson';
|
|
2
|
+
import { CursorTimeoutContext } from '../cursor/abstract_cursor';
|
|
2
3
|
import type { Db } from '../db';
|
|
3
4
|
import { MONGODB_ERROR_CODES, MongoServerError } from '../error';
|
|
4
5
|
import type { Server } from '../sdam/server';
|
|
5
6
|
import type { ClientSession } from '../sessions';
|
|
6
|
-
import {
|
|
7
|
+
import { TimeoutContext } from '../timeout';
|
|
7
8
|
import { CommandOperation, type CommandOperationOptions } from './command';
|
|
9
|
+
import { executeOperation } from './execute_operation';
|
|
8
10
|
import { Aspect, defineAspects } from './operation';
|
|
9
11
|
|
|
10
12
|
/** @public */
|
|
@@ -16,12 +18,10 @@ export interface DropCollectionOptions extends CommandOperationOptions {
|
|
|
16
18
|
/** @internal */
|
|
17
19
|
export class DropCollectionOperation extends CommandOperation<boolean> {
|
|
18
20
|
override options: DropCollectionOptions;
|
|
19
|
-
db: Db;
|
|
20
21
|
name: string;
|
|
21
22
|
|
|
22
23
|
constructor(db: Db, name: string, options: DropCollectionOptions = {}) {
|
|
23
24
|
super(db, options);
|
|
24
|
-
this.db = db;
|
|
25
25
|
this.options = options;
|
|
26
26
|
this.name = name;
|
|
27
27
|
}
|
|
@@ -35,56 +35,70 @@ export class DropCollectionOperation extends CommandOperation<boolean> {
|
|
|
35
35
|
session: ClientSession | undefined,
|
|
36
36
|
timeoutContext: TimeoutContext
|
|
37
37
|
): Promise<boolean> {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
38
|
+
await super.executeCommand(server, session, { drop: this.name }, timeoutContext);
|
|
39
|
+
return true;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export async function dropCollections(
|
|
44
|
+
db: Db,
|
|
45
|
+
name: string,
|
|
46
|
+
options: DropCollectionOptions
|
|
47
|
+
): Promise<boolean> {
|
|
48
|
+
const timeoutContext = TimeoutContext.create({
|
|
49
|
+
session: options.session,
|
|
50
|
+
serverSelectionTimeoutMS: db.client.s.options.serverSelectionTimeoutMS,
|
|
51
|
+
waitQueueTimeoutMS: db.client.s.options.waitQueueTimeoutMS,
|
|
52
|
+
timeoutMS: options.timeoutMS
|
|
53
|
+
});
|
|
54
|
+
|
|
55
|
+
const encryptedFieldsMap = db.client.s.options.autoEncryption?.encryptedFieldsMap;
|
|
56
|
+
let encryptedFields: Document | undefined =
|
|
57
|
+
options.encryptedFields ?? encryptedFieldsMap?.[`${db.databaseName}.${name}`];
|
|
58
|
+
|
|
59
|
+
if (!encryptedFields && encryptedFieldsMap) {
|
|
60
|
+
// If the MongoClient was configured with an encryptedFieldsMap,
|
|
61
|
+
// and no encryptedFields config was available in it or explicitly
|
|
62
|
+
// passed as an argument, the spec tells us to look one up using
|
|
63
|
+
// listCollections().
|
|
64
|
+
const listCollectionsResult = await db
|
|
65
|
+
.listCollections(
|
|
66
|
+
{ name },
|
|
67
|
+
{
|
|
68
|
+
nameOnly: false,
|
|
69
|
+
session: options.session,
|
|
70
|
+
timeoutContext: new CursorTimeoutContext(timeoutContext, Symbol())
|
|
71
|
+
}
|
|
72
|
+
)
|
|
73
|
+
.toArray();
|
|
74
|
+
encryptedFields = listCollectionsResult?.[0]?.options?.encryptedFields;
|
|
75
|
+
}
|
|
56
76
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
}
|
|
77
|
+
if (encryptedFields) {
|
|
78
|
+
const escCollection = encryptedFields.escCollection || `enxcol_.${name}.esc`;
|
|
79
|
+
const ecocCollection = encryptedFields.ecocCollection || `enxcol_.${name}.ecoc`;
|
|
80
|
+
|
|
81
|
+
for (const collectionName of [escCollection, ecocCollection]) {
|
|
82
|
+
// Drop auxilliary collections, ignoring potential NamespaceNotFound errors.
|
|
83
|
+
const dropOp = new DropCollectionOperation(db, collectionName, options);
|
|
84
|
+
try {
|
|
85
|
+
await executeOperation(db.client, dropOp, timeoutContext);
|
|
86
|
+
} catch (err) {
|
|
87
|
+
if (
|
|
88
|
+
!(err instanceof MongoServerError) ||
|
|
89
|
+
err.code !== MONGODB_ERROR_CODES.NamespaceNotFound
|
|
90
|
+
) {
|
|
91
|
+
throw err;
|
|
73
92
|
}
|
|
74
93
|
}
|
|
75
94
|
}
|
|
76
|
-
|
|
77
|
-
return await this.executeWithoutEncryptedFieldsCheck(server, session, timeoutContext);
|
|
78
95
|
}
|
|
79
96
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
timeoutContext
|
|
84
|
-
)
|
|
85
|
-
await super.executeCommand(server, session, { drop: this.name }, timeoutContext);
|
|
86
|
-
return true;
|
|
87
|
-
}
|
|
97
|
+
return await executeOperation(
|
|
98
|
+
db.client,
|
|
99
|
+
new DropCollectionOperation(db, name, options),
|
|
100
|
+
timeoutContext
|
|
101
|
+
);
|
|
88
102
|
}
|
|
89
103
|
|
|
90
104
|
/** @public */
|
|
@@ -129,7 +129,7 @@ export async function executeOperation<
|
|
|
129
129
|
* Connects a client if it has not yet been connected
|
|
130
130
|
* @internal
|
|
131
131
|
*/
|
|
132
|
-
async function autoConnect(client: MongoClient): Promise<Topology> {
|
|
132
|
+
export async function autoConnect(client: MongoClient): Promise<Topology> {
|
|
133
133
|
if (client.topology == null) {
|
|
134
134
|
if (client.s.hasBeenClosed) {
|
|
135
135
|
throw new MongoNotConnectedError('Client must be connected before running operations');
|
package/src/operations/insert.ts
CHANGED
|
@@ -1,16 +1,14 @@
|
|
|
1
1
|
import type { Document } from '../bson';
|
|
2
2
|
import type { BulkWriteOptions } from '../bulk/common';
|
|
3
3
|
import type { Collection } from '../collection';
|
|
4
|
-
import {
|
|
4
|
+
import { MongoServerError } from '../error';
|
|
5
5
|
import type { InferIdType } from '../mongo_types';
|
|
6
6
|
import type { Server } from '../sdam/server';
|
|
7
7
|
import type { ClientSession } from '../sessions';
|
|
8
8
|
import { type TimeoutContext } from '../timeout';
|
|
9
9
|
import { maybeAddIdToDocuments, type MongoDBNamespace } from '../utils';
|
|
10
|
-
import { WriteConcern } from '../write_concern';
|
|
11
|
-
import { BulkWriteOperation } from './bulk_write';
|
|
12
10
|
import { CommandOperation, type CommandOperationOptions } from './command';
|
|
13
|
-
import {
|
|
11
|
+
import { Aspect, defineAspects } from './operation';
|
|
14
12
|
|
|
15
13
|
/** @internal */
|
|
16
14
|
export class InsertOperation extends CommandOperation<Document> {
|
|
@@ -73,7 +71,7 @@ export interface InsertOneResult<TSchema = Document> {
|
|
|
73
71
|
|
|
74
72
|
export class InsertOneOperation extends InsertOperation {
|
|
75
73
|
constructor(collection: Collection, doc: Document, options: InsertOneOptions) {
|
|
76
|
-
super(collection.s.namespace, maybeAddIdToDocuments(collection,
|
|
74
|
+
super(collection.s.namespace, [maybeAddIdToDocuments(collection, doc, options)], options);
|
|
77
75
|
}
|
|
78
76
|
|
|
79
77
|
override async execute(
|
|
@@ -105,62 +103,5 @@ export interface InsertManyResult<TSchema = Document> {
|
|
|
105
103
|
insertedIds: { [key: number]: InferIdType<TSchema> };
|
|
106
104
|
}
|
|
107
105
|
|
|
108
|
-
/** @internal */
|
|
109
|
-
export class InsertManyOperation extends AbstractOperation<InsertManyResult> {
|
|
110
|
-
override options: BulkWriteOptions;
|
|
111
|
-
collection: Collection;
|
|
112
|
-
docs: ReadonlyArray<Document>;
|
|
113
|
-
|
|
114
|
-
constructor(collection: Collection, docs: ReadonlyArray<Document>, options: BulkWriteOptions) {
|
|
115
|
-
super(options);
|
|
116
|
-
|
|
117
|
-
if (!Array.isArray(docs)) {
|
|
118
|
-
throw new MongoInvalidArgumentError('Argument "docs" must be an array of documents');
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
this.options = options;
|
|
122
|
-
this.collection = collection;
|
|
123
|
-
this.docs = docs;
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
override get commandName() {
|
|
127
|
-
return 'insert' as const;
|
|
128
|
-
}
|
|
129
|
-
|
|
130
|
-
override async execute(
|
|
131
|
-
server: Server,
|
|
132
|
-
session: ClientSession | undefined,
|
|
133
|
-
timeoutContext: TimeoutContext
|
|
134
|
-
): Promise<InsertManyResult> {
|
|
135
|
-
const coll = this.collection;
|
|
136
|
-
const options = { ...this.options, ...this.bsonOptions, readPreference: this.readPreference };
|
|
137
|
-
const writeConcern = WriteConcern.fromOptions(options);
|
|
138
|
-
const bulkWriteOperation = new BulkWriteOperation(
|
|
139
|
-
coll,
|
|
140
|
-
this.docs.map(document => ({
|
|
141
|
-
insertOne: { document }
|
|
142
|
-
})),
|
|
143
|
-
options
|
|
144
|
-
);
|
|
145
|
-
|
|
146
|
-
try {
|
|
147
|
-
const res = await bulkWriteOperation.execute(server, session, timeoutContext);
|
|
148
|
-
return {
|
|
149
|
-
acknowledged: writeConcern?.w !== 0,
|
|
150
|
-
insertedCount: res.insertedCount,
|
|
151
|
-
insertedIds: res.insertedIds
|
|
152
|
-
};
|
|
153
|
-
} catch (err) {
|
|
154
|
-
if (err && err.message === 'Operation must be an object with an operation key') {
|
|
155
|
-
throw new MongoInvalidArgumentError(
|
|
156
|
-
'Collection.insertMany() cannot be called with an array that has null/undefined values'
|
|
157
|
-
);
|
|
158
|
-
}
|
|
159
|
-
throw err;
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
|
|
164
106
|
defineAspects(InsertOperation, [Aspect.RETRYABLE, Aspect.WRITE_OPERATION]);
|
|
165
107
|
defineAspects(InsertOneOperation, [Aspect.RETRYABLE, Aspect.WRITE_OPERATION]);
|
|
166
|
-
defineAspects(InsertManyOperation, [Aspect.WRITE_OPERATION]);
|