@toa.io/storages.mongodb 1.0.0-alpha.5 → 1.0.0-alpha.50
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +8 -8
- package/src/client.js +157 -0
- package/src/factory.js +4 -4
- package/src/record.js +6 -19
- package/src/storage.js +174 -52
- package/src/translate.js +13 -5
- package/test/record.test.js +0 -15
- package/src/connection.js +0 -103
- package/test/connection.test.js +0 -58
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@toa.io/storages.mongodb",
|
|
3
|
-
"version": "1.0.0-alpha.
|
|
3
|
+
"version": "1.0.0-alpha.50",
|
|
4
4
|
"description": "Toa MongoDB Storage Connector",
|
|
5
5
|
"author": "temich <tema.gurtovoy@gmail.com>",
|
|
6
6
|
"homepage": "https://github.com/toa-io/toa#readme",
|
|
@@ -19,13 +19,13 @@
|
|
|
19
19
|
"test": "echo \"Error: run tests from root\" && exit 1"
|
|
20
20
|
},
|
|
21
21
|
"dependencies": {
|
|
22
|
-
"@toa.io/console": "1.0.0-alpha.
|
|
23
|
-
"@toa.io/conveyor": "1.0.0-alpha.
|
|
24
|
-
"@toa.io/core": "1.0.0-alpha.
|
|
25
|
-
"@toa.io/generic": "1.0.0-alpha.
|
|
26
|
-
"@toa.io/pointer": "1.0.0-alpha.
|
|
27
|
-
"mongodb": "6.
|
|
22
|
+
"@toa.io/console": "1.0.0-alpha.50",
|
|
23
|
+
"@toa.io/conveyor": "1.0.0-alpha.50",
|
|
24
|
+
"@toa.io/core": "1.0.0-alpha.50",
|
|
25
|
+
"@toa.io/generic": "1.0.0-alpha.50",
|
|
26
|
+
"@toa.io/pointer": "1.0.0-alpha.50",
|
|
27
|
+
"mongodb": "6.7.0",
|
|
28
28
|
"saslprep": "1.0.3"
|
|
29
29
|
},
|
|
30
|
-
"gitHead": "
|
|
30
|
+
"gitHead": "514269d4b481150c8cd0db2e5971da6e9fe80ad9"
|
|
31
31
|
}
|
package/src/client.js
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* @typedef {import('mongodb').MongoClient} MongoClient
|
|
5
|
+
* @typedef {{ count: number, client: MongoClient }} Instance
|
|
6
|
+
* @typedef {import('@toa.io/core').Locator} Locator
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const { Connector } = require('@toa.io/core')
|
|
10
|
+
const { resolve } = require('@toa.io/pointer')
|
|
11
|
+
const { ID } = require('./deployment')
|
|
12
|
+
const { MongoClient } = require('mongodb')
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* @type {Record<string, Promise<Instance>>}
|
|
16
|
+
*/
|
|
17
|
+
const INSTANCES = {}
|
|
18
|
+
|
|
19
|
+
class Client extends Connector {
|
|
20
|
+
/**
|
|
21
|
+
* @public
|
|
22
|
+
* @type {import('mongodb').Collection}
|
|
23
|
+
*/
|
|
24
|
+
collection
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* @private
|
|
28
|
+
* @type {Locator}
|
|
29
|
+
*/
|
|
30
|
+
locator
|
|
31
|
+
|
|
32
|
+
/**
|
|
33
|
+
* @private
|
|
34
|
+
* @type {Instance}
|
|
35
|
+
*/
|
|
36
|
+
instance
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* @private
|
|
40
|
+
* @type {string}
|
|
41
|
+
*/
|
|
42
|
+
key
|
|
43
|
+
|
|
44
|
+
/**
|
|
45
|
+
* @param {Locator} locator
|
|
46
|
+
*/
|
|
47
|
+
constructor (locator) {
|
|
48
|
+
super()
|
|
49
|
+
|
|
50
|
+
this.locator = locator
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/**
|
|
54
|
+
* @protected
|
|
55
|
+
* @override
|
|
56
|
+
* @return {Promise<void>}
|
|
57
|
+
*/
|
|
58
|
+
async open () {
|
|
59
|
+
const urls = await this.resolveURLs()
|
|
60
|
+
const dbname = this.resolveDB()
|
|
61
|
+
const collname = this.locator.lowercase
|
|
62
|
+
|
|
63
|
+
this.key = getKey(dbname, urls)
|
|
64
|
+
|
|
65
|
+
INSTANCES[this.key] ??= this.createInstance(urls)
|
|
66
|
+
|
|
67
|
+
this.instance = await INSTANCES[this.key]
|
|
68
|
+
this.instance.count++
|
|
69
|
+
|
|
70
|
+
const db = this.instance.client.db(dbname)
|
|
71
|
+
|
|
72
|
+
try {
|
|
73
|
+
this.collection = await db.createCollection(collname)
|
|
74
|
+
} catch (e) {
|
|
75
|
+
if (e.code !== ALREADY_EXISTS) {
|
|
76
|
+
throw e
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
this.collection = db.collection(collname)
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* @protected
|
|
85
|
+
* @override
|
|
86
|
+
* @return {Promise<void>}
|
|
87
|
+
*/
|
|
88
|
+
async close () {
|
|
89
|
+
const instance = await INSTANCES[this.key]
|
|
90
|
+
|
|
91
|
+
instance.count--
|
|
92
|
+
|
|
93
|
+
if (instance.count === 0) {
|
|
94
|
+
await instance.client.close()
|
|
95
|
+
delete INSTANCES[this.key]
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
/**
|
|
100
|
+
* @private
|
|
101
|
+
* @param {string[]} urls
|
|
102
|
+
* @return {Promise<Instance>}
|
|
103
|
+
*/
|
|
104
|
+
async createInstance (urls) {
|
|
105
|
+
const client = new MongoClient(urls.join(','), OPTIONS)
|
|
106
|
+
const hosts = urls.map((str) => new URL(str).host)
|
|
107
|
+
|
|
108
|
+
console.info('Connecting to MongoDB:', hosts.join(', '))
|
|
109
|
+
|
|
110
|
+
await client.connect()
|
|
111
|
+
|
|
112
|
+
return {
|
|
113
|
+
count: 0,
|
|
114
|
+
client
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
/**
|
|
119
|
+
* @private
|
|
120
|
+
* @return {Promise<string[]>}
|
|
121
|
+
*/
|
|
122
|
+
async resolveURLs () {
|
|
123
|
+
if (process.env.TOA_DEV === '1') {
|
|
124
|
+
return ['mongodb://developer:secret@localhost']
|
|
125
|
+
} else {
|
|
126
|
+
return await resolve(ID, this.locator.id)
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
/**
|
|
131
|
+
* @private
|
|
132
|
+
* @return {string}
|
|
133
|
+
*/
|
|
134
|
+
resolveDB () {
|
|
135
|
+
if (process.env.TOA_CONTEXT !== undefined) {
|
|
136
|
+
return process.env.TOA_CONTEXT
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
if (process.env.TOA_DEV === '1') {
|
|
140
|
+
return 'toa-dev'
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
throw new Error('Environment variable TOA_CONTEXT is not defined')
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
function getKey (db, urls) {
|
|
148
|
+
return db + ':' + urls.sort().join(' ')
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
const OPTIONS = {
|
|
152
|
+
ignoreUndefined: true
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
const ALREADY_EXISTS = 48
|
|
156
|
+
|
|
157
|
+
exports.Client = Client
|
package/src/factory.js
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
|
-
const {
|
|
3
|
+
const { Client } = require('./client')
|
|
4
4
|
const { Storage } = require('./storage')
|
|
5
5
|
|
|
6
6
|
class Factory {
|
|
7
|
-
storage (locator) {
|
|
8
|
-
const
|
|
7
|
+
storage (locator, entity) {
|
|
8
|
+
const client = new Client(locator)
|
|
9
9
|
|
|
10
|
-
return new Storage(
|
|
10
|
+
return new Storage(client, entity)
|
|
11
11
|
}
|
|
12
12
|
}
|
|
13
13
|
|
package/src/record.js
CHANGED
|
@@ -1,29 +1,16 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
* @returns {toa.mongodb.Record}
|
|
6
|
-
*/
|
|
7
|
-
const to = (entity) => {
|
|
8
|
-
const {
|
|
9
|
-
id,
|
|
10
|
-
...rest
|
|
11
|
-
} = entity
|
|
3
|
+
function to (entity) {
|
|
4
|
+
const { id, ...rest } = entity
|
|
12
5
|
|
|
13
6
|
return /** @type {toa.mongodb.Record} */ { _id: id, ...rest }
|
|
14
7
|
}
|
|
15
8
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
*/
|
|
20
|
-
const from = (record) => {
|
|
21
|
-
if (record === undefined || record === null) return null
|
|
9
|
+
function from (record) {
|
|
10
|
+
if (record === undefined || record === null)
|
|
11
|
+
return null
|
|
22
12
|
|
|
23
|
-
const {
|
|
24
|
-
_id,
|
|
25
|
-
...rest
|
|
26
|
-
} = record
|
|
13
|
+
const { _id, ...rest } = record
|
|
27
14
|
|
|
28
15
|
return { id: _id, ...rest }
|
|
29
16
|
}
|
package/src/storage.js
CHANGED
|
@@ -1,56 +1,58 @@
|
|
|
1
1
|
'use strict'
|
|
2
2
|
|
|
3
|
-
const { Connector } = require('@toa.io/core')
|
|
3
|
+
const { Connector, exceptions } = require('@toa.io/core')
|
|
4
4
|
|
|
5
5
|
const { translate } = require('./translate')
|
|
6
|
-
const {
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
} = require('./record')
|
|
10
|
-
|
|
11
|
-
/**
|
|
12
|
-
* @implements {toa.core.Storage}
|
|
13
|
-
*/
|
|
6
|
+
const { to, from } = require('./record')
|
|
7
|
+
const { ReturnDocument } = require('mongodb')
|
|
8
|
+
|
|
14
9
|
class Storage extends Connector {
|
|
15
|
-
|
|
16
|
-
|
|
10
|
+
#client
|
|
11
|
+
|
|
12
|
+
/** @type {import('mongodb').Collection} */
|
|
13
|
+
#collection
|
|
14
|
+
#entity
|
|
17
15
|
|
|
18
|
-
|
|
19
|
-
* @param {toa.mongodb.Connection} connection
|
|
20
|
-
*/
|
|
21
|
-
constructor (connection) {
|
|
16
|
+
constructor (client, entity) {
|
|
22
17
|
super()
|
|
23
18
|
|
|
24
|
-
this.#
|
|
19
|
+
this.#client = client
|
|
20
|
+
this.#entity = entity
|
|
25
21
|
|
|
26
|
-
this.depends(
|
|
22
|
+
this.depends(client)
|
|
27
23
|
}
|
|
28
24
|
|
|
29
|
-
async
|
|
30
|
-
|
|
31
|
-
criteria,
|
|
32
|
-
options
|
|
33
|
-
} = translate(query)
|
|
25
|
+
async open () {
|
|
26
|
+
this.#collection = this.#client.collection
|
|
34
27
|
|
|
35
|
-
|
|
28
|
+
await this.index()
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
async get (query) {
|
|
32
|
+
const { criteria, options } = translate(query)
|
|
33
|
+
const record = await this.#collection.findOne(criteria, options)
|
|
36
34
|
|
|
37
35
|
return from(record)
|
|
38
36
|
}
|
|
39
37
|
|
|
40
38
|
async find (query) {
|
|
41
|
-
const {
|
|
42
|
-
|
|
43
|
-
options
|
|
44
|
-
} = translate(query)
|
|
45
|
-
const recordset = await this.#connection.find(criteria, options)
|
|
39
|
+
const { criteria, options } = translate(query)
|
|
40
|
+
const recordset = await this.#collection.find(criteria, options).toArray()
|
|
46
41
|
|
|
47
42
|
return recordset.map((item) => from(item))
|
|
48
43
|
}
|
|
49
44
|
|
|
45
|
+
async stream (query = undefined) {
|
|
46
|
+
const { criteria, options } = translate(query)
|
|
47
|
+
|
|
48
|
+
return await this.#collection.find(criteria, options).stream({ transform: from })
|
|
49
|
+
}
|
|
50
|
+
|
|
50
51
|
async add (entity) {
|
|
51
52
|
const record = to(entity)
|
|
53
|
+
const result = await this.#collection.insertOne(record)
|
|
52
54
|
|
|
53
|
-
return
|
|
55
|
+
return result.acknowledged
|
|
54
56
|
}
|
|
55
57
|
|
|
56
58
|
async set (entity) {
|
|
@@ -58,50 +60,170 @@ class Storage extends Connector {
|
|
|
58
60
|
_id: entity.id,
|
|
59
61
|
_version: entity._version - 1
|
|
60
62
|
}
|
|
61
|
-
|
|
63
|
+
|
|
64
|
+
const result = await this.#collection.findOneAndReplace(criteria, to(entity))
|
|
62
65
|
|
|
63
66
|
return result !== null
|
|
64
67
|
}
|
|
65
68
|
|
|
66
|
-
async store (entity) {
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
69
|
+
async store (entity, attempt = 0) {
|
|
70
|
+
try {
|
|
71
|
+
if (entity._version === 1)
|
|
72
|
+
return await this.add(entity)
|
|
73
|
+
else
|
|
74
|
+
return await this.set(entity)
|
|
75
|
+
} catch (error) {
|
|
76
|
+
if (error.code === ERR_DUPLICATE_KEY) {
|
|
77
|
+
const id = error.keyPattern === undefined
|
|
78
|
+
? error.message.includes(' index: _id_ ') // AWS DocumentDB
|
|
79
|
+
: error.keyPattern._id === 1
|
|
80
|
+
|
|
81
|
+
if (id)
|
|
82
|
+
return false
|
|
83
|
+
else
|
|
84
|
+
throw new exceptions.DuplicateException()
|
|
85
|
+
} else if (error.cause?.code === 'ECONNREFUSED') {
|
|
86
|
+
// This is temporary and should be replaced with a class decorator.
|
|
87
|
+
if (attempt > 10)
|
|
88
|
+
throw error
|
|
89
|
+
|
|
90
|
+
await new Promise((resolve) => setTimeout(resolve, 1000))
|
|
91
|
+
|
|
92
|
+
return this.store(entity)
|
|
93
|
+
} else
|
|
94
|
+
throw error
|
|
71
95
|
}
|
|
72
96
|
}
|
|
73
97
|
|
|
74
|
-
async upsert (query, changeset
|
|
75
|
-
const {
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
98
|
+
async upsert (query, changeset) {
|
|
99
|
+
const { criteria, options } = translate(query)
|
|
100
|
+
|
|
101
|
+
if (!('_deleted' in changeset) || changeset._deleted === null) {
|
|
102
|
+
delete criteria._deleted
|
|
103
|
+
changeset._deleted = null
|
|
104
|
+
}
|
|
79
105
|
|
|
80
106
|
const update = {
|
|
81
107
|
$set: { ...changeset },
|
|
82
108
|
$inc: { _version: 1 }
|
|
83
109
|
}
|
|
84
110
|
|
|
85
|
-
|
|
86
|
-
|
|
111
|
+
options.returnDocument = ReturnDocument.AFTER
|
|
112
|
+
|
|
113
|
+
const result = await this.#collection.findOneAndUpdate(criteria, update, options)
|
|
87
114
|
|
|
88
|
-
|
|
115
|
+
return from(result)
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
async ensure (query, properties, state) {
|
|
119
|
+
let { criteria, options } = translate(query)
|
|
120
|
+
|
|
121
|
+
if (query === undefined)
|
|
122
|
+
criteria = properties
|
|
89
123
|
|
|
90
|
-
|
|
91
|
-
insert._id = criteria._id
|
|
92
|
-
} else {
|
|
93
|
-
return null
|
|
94
|
-
} // this shouldn't ever happen
|
|
124
|
+
const update = { $setOnInsert: to(state) }
|
|
95
125
|
|
|
96
|
-
|
|
126
|
+
options.upsert = true
|
|
127
|
+
options.returnDocument = ReturnDocument.AFTER
|
|
128
|
+
|
|
129
|
+
const result = await this.#collection.findOneAndUpdate(criteria, update, options)
|
|
130
|
+
|
|
131
|
+
if (result._deleted !== undefined && result._deleted !== null)
|
|
132
|
+
return null
|
|
133
|
+
else
|
|
134
|
+
return from(result)
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
async index () {
|
|
138
|
+
const indexes = []
|
|
139
|
+
|
|
140
|
+
if (this.#entity.unique !== undefined) {
|
|
141
|
+
for (const [name, fields] of Object.entries(this.#entity.unique)) {
|
|
142
|
+
const sparse = this.checkFields(fields)
|
|
143
|
+
const unique = await this.uniqueIndex(name, fields, sparse)
|
|
144
|
+
|
|
145
|
+
indexes.push(unique)
|
|
146
|
+
}
|
|
97
147
|
}
|
|
98
148
|
|
|
99
|
-
|
|
149
|
+
if (this.#entity.index !== undefined) {
|
|
150
|
+
for (const [suffix, declaration] of Object.entries(this.#entity.index)) {
|
|
151
|
+
const name = 'index_' + suffix
|
|
152
|
+
const fields = Object.fromEntries(Object.entries(declaration)
|
|
153
|
+
.map(([name, type]) => [name, INDEX_TYPES[type]]))
|
|
100
154
|
|
|
101
|
-
|
|
155
|
+
const sparse = this.checkFields(Object.keys(fields))
|
|
102
156
|
|
|
103
|
-
|
|
157
|
+
await this.#collection.createIndex(fields, { name, sparse })
|
|
158
|
+
|
|
159
|
+
indexes.push(name)
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
await this.removeObsoleteIndexes(indexes)
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
async uniqueIndex (name, properties, sparse = false) {
|
|
167
|
+
const fields = properties.reduce((acc, property) => {
|
|
168
|
+
acc[property] = 1
|
|
169
|
+
return acc
|
|
170
|
+
}, {})
|
|
171
|
+
|
|
172
|
+
name = 'unique_' + name
|
|
173
|
+
|
|
174
|
+
await this.#collection.createIndex(fields, { name, unique: true, sparse })
|
|
175
|
+
|
|
176
|
+
return name
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
async removeObsoleteIndexes (desired) {
|
|
180
|
+
const current = await this.getCurrentIndexes()
|
|
181
|
+
const obsolete = current.filter((name) => !desired.includes(name))
|
|
182
|
+
|
|
183
|
+
if (obsolete.length > 0) {
|
|
184
|
+
console.info(`Remove obsolete indexes: [${obsolete.join(', ')}]`)
|
|
185
|
+
|
|
186
|
+
await Promise.all(obsolete.map((name) => this.#collection.dropIndex(name)))
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
async getCurrentIndexes () {
|
|
191
|
+
try {
|
|
192
|
+
const array = await this.#collection.listIndexes().toArray()
|
|
193
|
+
|
|
194
|
+
return array.map(({ name }) => name).filter((name) => name !== '_id_')
|
|
195
|
+
} catch {
|
|
196
|
+
return []
|
|
197
|
+
}
|
|
104
198
|
}
|
|
199
|
+
|
|
200
|
+
checkFields (fields) {
|
|
201
|
+
const optional = []
|
|
202
|
+
|
|
203
|
+
for (const field of fields) {
|
|
204
|
+
if (!(field in this.#entity.schema.properties))
|
|
205
|
+
throw new Error(`Index field '${field}' is not defined.`)
|
|
206
|
+
|
|
207
|
+
if (!this.#entity.schema.required?.includes(field))
|
|
208
|
+
optional.push(field)
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
if (optional.length > 0) {
|
|
212
|
+
console.info(`Index fields [${optional.join(', ')}] are optional, creating sparse index.`)
|
|
213
|
+
|
|
214
|
+
return true
|
|
215
|
+
} else
|
|
216
|
+
return false
|
|
217
|
+
}
|
|
218
|
+
|
|
105
219
|
}
|
|
106
220
|
|
|
221
|
+
const INDEX_TYPES = {
|
|
222
|
+
'asc': 1,
|
|
223
|
+
'desc': -1,
|
|
224
|
+
'hash': 'hashed'
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
const ERR_DUPLICATE_KEY = 11000
|
|
228
|
+
|
|
107
229
|
exports.Storage = Storage
|
package/src/translate.js
CHANGED
|
@@ -7,12 +7,20 @@ const parse = { ...require('./translate/criteria'), ...require('./translate/opti
|
|
|
7
7
|
* @returns {{criteria: Object, options: Object}}
|
|
8
8
|
*/
|
|
9
9
|
const translate = (query) => {
|
|
10
|
-
const result = {
|
|
10
|
+
const result = {
|
|
11
|
+
criteria: query?.criteria === undefined ? {} : parse.criteria(query.criteria),
|
|
12
|
+
options: query?.options === undefined ? {} : parse.options(query.options)
|
|
13
|
+
}
|
|
11
14
|
|
|
12
|
-
if (query
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
15
|
+
if (query?.id !== undefined) {
|
|
16
|
+
result.criteria._id = query.id
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
if (query?.version !== undefined) {
|
|
20
|
+
result.criteria._version = query.version
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
result.criteria._deleted = null
|
|
16
24
|
|
|
17
25
|
return result
|
|
18
26
|
}
|
package/test/record.test.js
CHANGED
|
@@ -47,19 +47,4 @@ describe('from', () => {
|
|
|
47
47
|
_version: 0
|
|
48
48
|
})
|
|
49
49
|
})
|
|
50
|
-
|
|
51
|
-
it('should not modify argument', () => {
|
|
52
|
-
/** @type {toa.mongodb.Record} */
|
|
53
|
-
const record = {
|
|
54
|
-
_id: '1',
|
|
55
|
-
_version: 0
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
from(record)
|
|
59
|
-
|
|
60
|
-
expect(record).toStrictEqual({
|
|
61
|
-
_id: '1',
|
|
62
|
-
_version: 0
|
|
63
|
-
})
|
|
64
|
-
})
|
|
65
50
|
})
|
package/src/connection.js
DELETED
|
@@ -1,103 +0,0 @@
|
|
|
1
|
-
// noinspection JSCheckFunctionSignatures
|
|
2
|
-
|
|
3
|
-
'use strict'
|
|
4
|
-
|
|
5
|
-
const { MongoClient } = require('mongodb')
|
|
6
|
-
const { Connector } = require('@toa.io/core')
|
|
7
|
-
const { resolve } = require('@toa.io/pointer')
|
|
8
|
-
const { Conveyor } = require('@toa.io/conveyor')
|
|
9
|
-
const { ID } = require('./deployment')
|
|
10
|
-
|
|
11
|
-
class Connection extends Connector {
|
|
12
|
-
#locator
|
|
13
|
-
/** @type {import('mongodb').MongoClient} */
|
|
14
|
-
#client
|
|
15
|
-
/** @type {import('mongodb').Collection<toa.mongodb.Record>} */
|
|
16
|
-
#collection
|
|
17
|
-
/** @type {toa.conveyor.Conveyor<toa.core.storages.Record, boolean>} */
|
|
18
|
-
#conveyor
|
|
19
|
-
|
|
20
|
-
constructor (locator) {
|
|
21
|
-
super()
|
|
22
|
-
|
|
23
|
-
this.#locator = locator
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
async open () {
|
|
27
|
-
const urls = await this.#resolveURLs()
|
|
28
|
-
const db = this.#locator.namespace
|
|
29
|
-
const collection = this.#locator.name
|
|
30
|
-
|
|
31
|
-
this.#client = new MongoClient(urls[0], OPTIONS)
|
|
32
|
-
|
|
33
|
-
await this.#client.connect()
|
|
34
|
-
|
|
35
|
-
this.#collection = this.#client.db(db).collection(collection)
|
|
36
|
-
this.#conveyor = new Conveyor((objects) => this.addMany(objects))
|
|
37
|
-
|
|
38
|
-
console.info(`Storage Mongo '${this.#locator.id}' connected`)
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
async close () {
|
|
42
|
-
await this.#client?.close()
|
|
43
|
-
|
|
44
|
-
console.info(`Storage Mongo '${this.#locator.id}' disconnected`)
|
|
45
|
-
}
|
|
46
|
-
|
|
47
|
-
/** @hot */
|
|
48
|
-
async get (query, options) {
|
|
49
|
-
return /** @type {toa.mongodb.Record} */ this.#collection.findOne(query, options)
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
/** @hot */
|
|
53
|
-
async find (query, options) {
|
|
54
|
-
const cursor = this.#collection.find(query, options)
|
|
55
|
-
|
|
56
|
-
return cursor.toArray()
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
/** @hot */
|
|
60
|
-
async add (record) {
|
|
61
|
-
return this.#conveyor.process(record)
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
async addMany (records) {
|
|
65
|
-
let result
|
|
66
|
-
|
|
67
|
-
try {
|
|
68
|
-
const response = await this.#collection.insertMany(records, { ordered: false })
|
|
69
|
-
|
|
70
|
-
result = response.acknowledged
|
|
71
|
-
} catch (e) {
|
|
72
|
-
if (e.code === ERR_DUPLICATE_KEY) result = false
|
|
73
|
-
else throw e
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
return result
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
/** @hot */
|
|
80
|
-
async replace (query, record, options) {
|
|
81
|
-
return await this.#collection.findOneAndReplace(query, record, options)
|
|
82
|
-
}
|
|
83
|
-
|
|
84
|
-
/** @hot */
|
|
85
|
-
async update (query, update, options) {
|
|
86
|
-
return this.#collection.findOneAndUpdate(query, update, options)
|
|
87
|
-
}
|
|
88
|
-
|
|
89
|
-
async #resolveURLs () {
|
|
90
|
-
if (process.env.TOA_DEV === '1') return ['mongodb://developer:secret@localhost']
|
|
91
|
-
else return await resolve(ID, this.#locator.id)
|
|
92
|
-
}
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
const OPTIONS = {
|
|
96
|
-
ignoreUndefined: true,
|
|
97
|
-
connectTimeoutMS: 0,
|
|
98
|
-
serverSelectionTimeoutMS: 0
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
const ERR_DUPLICATE_KEY = 11000
|
|
102
|
-
|
|
103
|
-
exports.Connection = Connection
|
package/test/connection.test.js
DELETED
|
@@ -1,58 +0,0 @@
|
|
|
1
|
-
'use strict'
|
|
2
|
-
|
|
3
|
-
const insertManyMock = jest.fn(() => ({ acknowledged: true }))
|
|
4
|
-
jest.mock('mongodb', () => ({
|
|
5
|
-
__esModule: true,
|
|
6
|
-
MongoClient: function () {
|
|
7
|
-
this.connect = () => {},
|
|
8
|
-
this.db = () => ({
|
|
9
|
-
collection: () => ({
|
|
10
|
-
insertMany: insertManyMock
|
|
11
|
-
})
|
|
12
|
-
})
|
|
13
|
-
return this
|
|
14
|
-
},
|
|
15
|
-
}))
|
|
16
|
-
jest.mock('@toa.io/pointer', () => ({
|
|
17
|
-
__esModule: true,
|
|
18
|
-
resolve: () => ['url'],
|
|
19
|
-
}))
|
|
20
|
-
const { generate } = require('randomstring')
|
|
21
|
-
const { Connection } = require('../src/connection')
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
let connection
|
|
25
|
-
|
|
26
|
-
beforeEach(async () => {
|
|
27
|
-
jest.clearAllMocks()
|
|
28
|
-
connection = new Connection({ id: 1 })
|
|
29
|
-
await connection.open()
|
|
30
|
-
})
|
|
31
|
-
|
|
32
|
-
it('should be', () => {
|
|
33
|
-
expect(Connection).toBeDefined()
|
|
34
|
-
})
|
|
35
|
-
|
|
36
|
-
it('should insert', async () => {
|
|
37
|
-
const object = generate()
|
|
38
|
-
|
|
39
|
-
await connection.add(object)
|
|
40
|
-
|
|
41
|
-
expect(insertManyMock).toHaveBeenCalledWith([object], { ordered: false })
|
|
42
|
-
})
|
|
43
|
-
|
|
44
|
-
it('should batch insert', async () => {
|
|
45
|
-
const a = generate()
|
|
46
|
-
const b = generate()
|
|
47
|
-
const c = generate()
|
|
48
|
-
|
|
49
|
-
await Promise.all([
|
|
50
|
-
connection.add(a),
|
|
51
|
-
connection.add(b),
|
|
52
|
-
connection.add(c)
|
|
53
|
-
])
|
|
54
|
-
|
|
55
|
-
expect(insertManyMock).toHaveBeenCalledTimes(2)
|
|
56
|
-
expect(insertManyMock).toHaveBeenNthCalledWith(1, [a], { ordered: false })
|
|
57
|
-
expect(insertManyMock).toHaveBeenNthCalledWith(2, [b, c], { ordered: false })
|
|
58
|
-
})
|