@joystick.js/db-canary 0.0.0-canary.2251 → 0.0.0-canary.2253
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/client/database.js +1 -1
- package/dist/client/index.js +1 -1
- package/dist/server/cluster/master.js +4 -4
- package/dist/server/cluster/worker.js +1 -1
- package/dist/server/index.js +1 -1
- package/dist/server/lib/auto_index_manager.js +1 -1
- package/dist/server/lib/backup_manager.js +1 -1
- package/dist/server/lib/index_manager.js +1 -1
- package/dist/server/lib/operation_dispatcher.js +1 -1
- package/dist/server/lib/operations/admin.js +1 -1
- package/dist/server/lib/operations/bulk_write.js +1 -1
- package/dist/server/lib/operations/create_index.js +1 -1
- package/dist/server/lib/operations/delete_many.js +1 -1
- package/dist/server/lib/operations/delete_one.js +1 -1
- package/dist/server/lib/operations/find.js +1 -1
- package/dist/server/lib/operations/find_one.js +1 -1
- package/dist/server/lib/operations/insert_one.js +1 -1
- package/dist/server/lib/operations/update_one.js +1 -1
- package/dist/server/lib/send_response.js +1 -1
- package/dist/server/lib/tcp_protocol.js +1 -1
- package/package.json +2 -2
- package/src/client/database.js +159 -133
- package/src/client/index.js +285 -346
- package/src/server/cluster/master.js +265 -156
- package/src/server/cluster/worker.js +26 -18
- package/src/server/index.js +553 -330
- package/src/server/lib/auto_index_manager.js +85 -23
- package/src/server/lib/backup_manager.js +117 -70
- package/src/server/lib/index_manager.js +63 -25
- package/src/server/lib/operation_dispatcher.js +339 -168
- package/src/server/lib/operations/admin.js +343 -205
- package/src/server/lib/operations/bulk_write.js +458 -194
- package/src/server/lib/operations/create_index.js +127 -34
- package/src/server/lib/operations/delete_many.js +204 -67
- package/src/server/lib/operations/delete_one.js +164 -52
- package/src/server/lib/operations/find.js +552 -319
- package/src/server/lib/operations/find_one.js +530 -304
- package/src/server/lib/operations/insert_one.js +147 -52
- package/src/server/lib/operations/update_one.js +334 -93
- package/src/server/lib/send_response.js +37 -17
- package/src/server/lib/tcp_protocol.js +158 -53
- package/test_data_api_key_1758233848259_cglfjzhou/data.mdb +0 -0
- package/test_data_api_key_1758233848259_cglfjzhou/lock.mdb +0 -0
- package/test_data_api_key_1758233848502_urlje2utd/data.mdb +0 -0
- package/test_data_api_key_1758233848502_urlje2utd/lock.mdb +0 -0
- package/test_data_api_key_1758233848738_mtcpfe5ns/data.mdb +0 -0
- package/test_data_api_key_1758233848738_mtcpfe5ns/lock.mdb +0 -0
- package/test_data_api_key_1758233848856_9g97p6gag/data.mdb +0 -0
- package/test_data_api_key_1758233848856_9g97p6gag/lock.mdb +0 -0
- package/test_data_api_key_1758233857008_0tl9zzhj8/data.mdb +0 -0
- package/test_data_api_key_1758233857008_0tl9zzhj8/lock.mdb +0 -0
- package/test_data_api_key_1758233857120_60c2f2uhu/data.mdb +0 -0
- package/test_data_api_key_1758233857120_60c2f2uhu/lock.mdb +0 -0
- package/test_data_api_key_1758233857232_aw7fkqgd9/data.mdb +0 -0
- package/test_data_api_key_1758233857232_aw7fkqgd9/lock.mdb +0 -0
- package/test_data_api_key_1758234881285_4aeflubjb/data.mdb +0 -0
- package/test_data_api_key_1758234881285_4aeflubjb/lock.mdb +0 -0
- package/test_data_api_key_1758234881520_kb0amvtqb/data.mdb +0 -0
- package/test_data_api_key_1758234881520_kb0amvtqb/lock.mdb +0 -0
- package/test_data_api_key_1758234881756_k04gfv2va/data.mdb +0 -0
- package/test_data_api_key_1758234881756_k04gfv2va/lock.mdb +0 -0
- package/test_data_api_key_1758234881876_wn90dpo1z/data.mdb +0 -0
- package/test_data_api_key_1758234881876_wn90dpo1z/lock.mdb +0 -0
- package/test_data_api_key_1758234889461_26xz3dmbr/data.mdb +0 -0
- package/test_data_api_key_1758234889461_26xz3dmbr/lock.mdb +0 -0
- package/test_data_api_key_1758234889572_uziz7e0p5/data.mdb +0 -0
- package/test_data_api_key_1758234889572_uziz7e0p5/lock.mdb +0 -0
- package/test_data_api_key_1758234889684_5f9wmposh/data.mdb +0 -0
- package/test_data_api_key_1758234889684_5f9wmposh/lock.mdb +0 -0
- package/test_data_api_key_1758235657729_prwgm6mxr/data.mdb +0 -0
- package/test_data_api_key_1758235657729_prwgm6mxr/lock.mdb +0 -0
- package/test_data_api_key_1758235657961_rc2da0dc2/data.mdb +0 -0
- package/test_data_api_key_1758235657961_rc2da0dc2/lock.mdb +0 -0
- package/test_data_api_key_1758235658193_oqqxm0sny/data.mdb +0 -0
- package/test_data_api_key_1758235658193_oqqxm0sny/lock.mdb +0 -0
- package/test_data_api_key_1758235658309_vggac1pj6/data.mdb +0 -0
- package/test_data_api_key_1758235658309_vggac1pj6/lock.mdb +0 -0
- package/test_data_api_key_1758235665968_61ko07dd1/data.mdb +0 -0
- package/test_data_api_key_1758235665968_61ko07dd1/lock.mdb +0 -0
- package/test_data_api_key_1758235666082_50lrt6sq8/data.mdb +0 -0
- package/test_data_api_key_1758235666082_50lrt6sq8/lock.mdb +0 -0
- package/test_data_api_key_1758235666194_ykvauwlzh/data.mdb +0 -0
- package/test_data_api_key_1758235666194_ykvauwlzh/lock.mdb +0 -0
- package/test_data_api_key_1758236187207_9c4paeh09/data.mdb +0 -0
- package/test_data_api_key_1758236187207_9c4paeh09/lock.mdb +0 -0
- package/test_data_api_key_1758236187441_4n3o3gkkl/data.mdb +0 -0
- package/test_data_api_key_1758236187441_4n3o3gkkl/lock.mdb +0 -0
- package/test_data_api_key_1758236187672_jt6b21ye0/data.mdb +0 -0
- package/test_data_api_key_1758236187672_jt6b21ye0/lock.mdb +0 -0
- package/test_data_api_key_1758236187788_oo84fz9u6/data.mdb +0 -0
- package/test_data_api_key_1758236187788_oo84fz9u6/lock.mdb +0 -0
- package/test_data_api_key_1758236195507_o9zeznwlm/data.mdb +0 -0
- package/test_data_api_key_1758236195507_o9zeznwlm/lock.mdb +0 -0
- package/test_data_api_key_1758236195619_qsqd60y41/data.mdb +0 -0
- package/test_data_api_key_1758236195619_qsqd60y41/lock.mdb +0 -0
- package/test_data_api_key_1758236195731_im13iq284/data.mdb +0 -0
- package/test_data_api_key_1758236195731_im13iq284/lock.mdb +0 -0
|
@@ -11,51 +11,152 @@ import create_logger from '../logger.js';
|
|
|
11
11
|
|
|
12
12
|
const { create_context_logger } = create_logger('bulk_write');
|
|
13
13
|
|
|
14
|
+
/**
|
|
15
|
+
* Validates database name parameter.
|
|
16
|
+
* @param {string} database_name - Database name to validate
|
|
17
|
+
* @throws {Error} When database name is missing
|
|
18
|
+
*/
|
|
19
|
+
const validate_database_name = (database_name) => {
|
|
20
|
+
if (!database_name) {
|
|
21
|
+
throw new Error('Database name is required');
|
|
22
|
+
}
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Validates collection name parameter.
|
|
27
|
+
* @param {string} collection_name - Collection name to validate
|
|
28
|
+
* @throws {Error} When collection name is missing
|
|
29
|
+
*/
|
|
30
|
+
const validate_collection_name = (collection_name) => {
|
|
31
|
+
if (!collection_name) {
|
|
32
|
+
throw new Error('Collection name is required');
|
|
33
|
+
}
|
|
34
|
+
};
|
|
35
|
+
|
|
36
|
+
/**
|
|
37
|
+
* Validates operations array parameter.
|
|
38
|
+
* @param {Array} operations - Operations array to validate
|
|
39
|
+
* @throws {Error} When operations array is invalid
|
|
40
|
+
*/
|
|
41
|
+
const validate_operations_array = (operations) => {
|
|
42
|
+
if (!Array.isArray(operations) || operations.length === 0) {
|
|
43
|
+
throw new Error('Operations must be a non-empty array');
|
|
44
|
+
}
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Validates all bulk write parameters.
|
|
49
|
+
* @param {string} database_name - Database name
|
|
50
|
+
* @param {string} collection_name - Collection name
|
|
51
|
+
* @param {Array} operations - Operations array
|
|
52
|
+
*/
|
|
53
|
+
const validate_bulk_write_parameters = (database_name, collection_name, operations) => {
|
|
54
|
+
validate_database_name(database_name);
|
|
55
|
+
validate_collection_name(collection_name);
|
|
56
|
+
validate_operations_array(operations);
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
/**
|
|
60
|
+
* Applies $set operator to document.
|
|
61
|
+
* @param {Object} document - Document to update
|
|
62
|
+
* @param {Object} operations - Set operations
|
|
63
|
+
* @returns {Object} Updated document
|
|
64
|
+
*/
|
|
65
|
+
const apply_set_operator = (document, operations) => {
|
|
66
|
+
return { ...document, ...operations };
|
|
67
|
+
};
|
|
68
|
+
|
|
69
|
+
/**
|
|
70
|
+
* Applies $unset operator to document.
|
|
71
|
+
* @param {Object} document - Document to update
|
|
72
|
+
* @param {Object} operations - Unset operations
|
|
73
|
+
* @returns {Object} Updated document
|
|
74
|
+
*/
|
|
75
|
+
const apply_unset_operator = (document, operations) => {
|
|
76
|
+
const updated_document = { ...document };
|
|
77
|
+
for (const field of Object.keys(operations)) {
|
|
78
|
+
delete updated_document[field];
|
|
79
|
+
}
|
|
80
|
+
return updated_document;
|
|
81
|
+
};
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Applies $inc operator to document.
|
|
85
|
+
* @param {Object} document - Document to update
|
|
86
|
+
* @param {Object} operations - Increment operations
|
|
87
|
+
* @returns {Object} Updated document
|
|
88
|
+
*/
|
|
89
|
+
const apply_inc_operator = (document, operations) => {
|
|
90
|
+
const updated_document = { ...document };
|
|
91
|
+
for (const [field, value] of Object.entries(operations)) {
|
|
92
|
+
updated_document[field] = (updated_document[field] || 0) + value;
|
|
93
|
+
}
|
|
94
|
+
return updated_document;
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
/**
|
|
98
|
+
* Applies $push operator to document.
|
|
99
|
+
* @param {Object} document - Document to update
|
|
100
|
+
* @param {Object} operations - Push operations
|
|
101
|
+
* @returns {Object} Updated document
|
|
102
|
+
*/
|
|
103
|
+
const apply_push_operator = (document, operations) => {
|
|
104
|
+
const updated_document = { ...document };
|
|
105
|
+
for (const [field, value] of Object.entries(operations)) {
|
|
106
|
+
if (!Array.isArray(updated_document[field])) {
|
|
107
|
+
updated_document[field] = [];
|
|
108
|
+
}
|
|
109
|
+
updated_document[field] = [...updated_document[field], value];
|
|
110
|
+
}
|
|
111
|
+
return updated_document;
|
|
112
|
+
};
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Applies $pull operator to document.
|
|
116
|
+
* @param {Object} document - Document to update
|
|
117
|
+
* @param {Object} operations - Pull operations
|
|
118
|
+
* @returns {Object} Updated document
|
|
119
|
+
*/
|
|
120
|
+
const apply_pull_operator = (document, operations) => {
|
|
121
|
+
const updated_document = { ...document };
|
|
122
|
+
for (const [field, value] of Object.entries(operations)) {
|
|
123
|
+
if (Array.isArray(updated_document[field])) {
|
|
124
|
+
updated_document[field] = updated_document[field].filter(item => item !== value);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
return updated_document;
|
|
128
|
+
};
|
|
129
|
+
|
|
14
130
|
/**
|
|
15
131
|
* Applies MongoDB-style update operators to a document.
|
|
16
|
-
* Supports $set, $unset, $inc, $push, and $pull operators for document modification.
|
|
17
|
-
* Creates a new document object without mutating the original.
|
|
18
132
|
* @param {Object} document - Original document to update
|
|
19
133
|
* @param {Object} update_operations - Update operations to apply
|
|
20
134
|
* @returns {Object} Updated document with applied operations
|
|
21
135
|
* @throws {Error} When unsupported update operator is used
|
|
22
136
|
*/
|
|
23
137
|
const apply_update_operators = (document, update_operations) => {
|
|
24
|
-
|
|
138
|
+
let updated_document = { ...document };
|
|
25
139
|
|
|
26
140
|
for (const [operator, operations] of Object.entries(update_operations)) {
|
|
27
141
|
switch (operator) {
|
|
28
142
|
case '$set':
|
|
29
|
-
|
|
143
|
+
updated_document = apply_set_operator(updated_document, operations);
|
|
30
144
|
break;
|
|
31
145
|
|
|
32
146
|
case '$unset':
|
|
33
|
-
|
|
34
|
-
delete updated_document[field];
|
|
35
|
-
}
|
|
147
|
+
updated_document = apply_unset_operator(updated_document, operations);
|
|
36
148
|
break;
|
|
37
149
|
|
|
38
150
|
case '$inc':
|
|
39
|
-
|
|
40
|
-
updated_document[field] = (updated_document[field] || 0) + value;
|
|
41
|
-
}
|
|
151
|
+
updated_document = apply_inc_operator(updated_document, operations);
|
|
42
152
|
break;
|
|
43
153
|
|
|
44
154
|
case '$push':
|
|
45
|
-
|
|
46
|
-
if (!Array.isArray(updated_document[field])) {
|
|
47
|
-
updated_document[field] = [];
|
|
48
|
-
}
|
|
49
|
-
updated_document[field].push(value);
|
|
50
|
-
}
|
|
155
|
+
updated_document = apply_push_operator(updated_document, operations);
|
|
51
156
|
break;
|
|
52
157
|
|
|
53
158
|
case '$pull':
|
|
54
|
-
|
|
55
|
-
if (Array.isArray(updated_document[field])) {
|
|
56
|
-
updated_document[field] = updated_document[field].filter(item => item !== value);
|
|
57
|
-
}
|
|
58
|
-
}
|
|
159
|
+
updated_document = apply_pull_operator(updated_document, operations);
|
|
59
160
|
break;
|
|
60
161
|
|
|
61
162
|
default:
|
|
@@ -66,10 +167,19 @@ const apply_update_operators = (document, update_operations) => {
|
|
|
66
167
|
return updated_document;
|
|
67
168
|
};
|
|
68
169
|
|
|
170
|
+
/**
|
|
171
|
+
* Checks if document field matches filter value.
|
|
172
|
+
* @param {Object} document - Document to check
|
|
173
|
+
* @param {string} field - Field name
|
|
174
|
+
* @param {any} value - Expected value
|
|
175
|
+
* @returns {boolean} True if field matches value
|
|
176
|
+
*/
|
|
177
|
+
const field_matches_value = (document, field, value) => {
|
|
178
|
+
return document[field] === value;
|
|
179
|
+
};
|
|
180
|
+
|
|
69
181
|
/**
|
|
70
182
|
* Checks if a document matches the provided filter criteria.
|
|
71
|
-
* Performs exact field matching for all filter properties.
|
|
72
|
-
* Empty or null filters match all documents.
|
|
73
183
|
* @param {Object} document - Document to check against filter
|
|
74
184
|
* @param {Object} filter - Filter criteria for matching
|
|
75
185
|
* @returns {boolean} True if document matches all filter criteria
|
|
@@ -80,7 +190,7 @@ const matches_filter = (document, filter) => {
|
|
|
80
190
|
}
|
|
81
191
|
|
|
82
192
|
for (const [field, value] of Object.entries(filter)) {
|
|
83
|
-
if (document
|
|
193
|
+
if (!field_matches_value(document, field, value)) {
|
|
84
194
|
return false;
|
|
85
195
|
}
|
|
86
196
|
}
|
|
@@ -88,64 +198,88 @@ const matches_filter = (document, filter) => {
|
|
|
88
198
|
return true;
|
|
89
199
|
};
|
|
90
200
|
|
|
201
|
+
/**
|
|
202
|
+
* Validates insert operation document.
|
|
203
|
+
* @param {Object} document_data - Document to validate
|
|
204
|
+
* @throws {Error} When document is invalid
|
|
205
|
+
*/
|
|
206
|
+
const validate_insert_document = (document_data) => {
|
|
207
|
+
if (!document_data || typeof document_data !== 'object') {
|
|
208
|
+
throw new Error('insertOne operation requires a valid document');
|
|
209
|
+
}
|
|
210
|
+
};
|
|
211
|
+
|
|
212
|
+
/**
|
|
213
|
+
* Creates current timestamp string.
|
|
214
|
+
* @returns {string} ISO timestamp string
|
|
215
|
+
*/
|
|
216
|
+
const create_current_timestamp = () => {
|
|
217
|
+
return new Date().toISOString();
|
|
218
|
+
};
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Prepares document for insertion with ID and timestamps.
|
|
222
|
+
* @param {Object} document_data - Original document
|
|
223
|
+
* @returns {Object} Document prepared for insertion
|
|
224
|
+
*/
|
|
225
|
+
const prepare_document_for_insertion = (document_data) => {
|
|
226
|
+
const document_id = document_data._id || generate_document_id();
|
|
227
|
+
const current_timestamp = create_current_timestamp();
|
|
228
|
+
|
|
229
|
+
return {
|
|
230
|
+
...document_data,
|
|
231
|
+
_id: document_id,
|
|
232
|
+
_created_at: current_timestamp,
|
|
233
|
+
_updated_at: current_timestamp
|
|
234
|
+
};
|
|
235
|
+
};
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* Checks if document already exists in database.
|
|
239
|
+
* @param {Object} db - Database instance
|
|
240
|
+
* @param {string} collection_key - Collection key for document
|
|
241
|
+
* @param {string} document_id - Document ID
|
|
242
|
+
* @throws {Error} When document already exists
|
|
243
|
+
*/
|
|
244
|
+
const check_document_does_not_exist = (db, collection_key, document_id) => {
|
|
245
|
+
const existing_document_data = db.get(collection_key);
|
|
246
|
+
if (existing_document_data) {
|
|
247
|
+
throw new Error(`Document with _id ${document_id} already exists`);
|
|
248
|
+
}
|
|
249
|
+
};
|
|
250
|
+
|
|
91
251
|
/**
|
|
92
252
|
* Processes a single insert operation within a bulk write transaction.
|
|
93
|
-
* Validates document, generates ID if needed, checks for duplicates, and inserts document.
|
|
94
|
-
* Adds creation and update timestamps automatically.
|
|
95
253
|
* @param {Object} db - LMDB database instance
|
|
96
254
|
* @param {string} database_name - Name of the database
|
|
97
255
|
* @param {string} collection_name - Name of the collection
|
|
98
256
|
* @param {Object} operation - Insert operation data
|
|
99
|
-
* @param {Object} operation.document - Document to insert
|
|
100
257
|
* @returns {Object} Insert result with inserted_id
|
|
101
|
-
* @throws {Error} When document is invalid or already exists
|
|
102
258
|
*/
|
|
103
259
|
const process_insert_one = (db, database_name, collection_name, operation) => {
|
|
104
260
|
const document_data = operation.document;
|
|
105
261
|
|
|
106
|
-
|
|
107
|
-
throw new Error('insertOne operation requires a valid document');
|
|
108
|
-
}
|
|
262
|
+
validate_insert_document(document_data);
|
|
109
263
|
|
|
110
|
-
const
|
|
111
|
-
const collection_key = build_collection_key(database_name, collection_name,
|
|
264
|
+
const document_to_insert = prepare_document_for_insertion(document_data);
|
|
265
|
+
const collection_key = build_collection_key(database_name, collection_name, document_to_insert._id);
|
|
112
266
|
|
|
113
|
-
|
|
114
|
-
if (existing_document_data) {
|
|
115
|
-
throw new Error(`Document with _id ${document_id} already exists`);
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
const document_to_insert = {
|
|
119
|
-
...document_data,
|
|
120
|
-
_id: document_id,
|
|
121
|
-
_created_at: new Date().toISOString(),
|
|
122
|
-
_updated_at: new Date().toISOString()
|
|
123
|
-
};
|
|
267
|
+
check_document_does_not_exist(db, collection_key, document_to_insert._id);
|
|
124
268
|
|
|
125
269
|
db.put(collection_key, JSON.stringify(document_to_insert));
|
|
126
270
|
|
|
127
271
|
return {
|
|
128
|
-
inserted_id:
|
|
272
|
+
inserted_id: document_to_insert._id
|
|
129
273
|
};
|
|
130
274
|
};
|
|
131
275
|
|
|
132
276
|
/**
|
|
133
|
-
*
|
|
134
|
-
*
|
|
135
|
-
*
|
|
136
|
-
* @
|
|
137
|
-
* @param {string} database_name - Name of the database
|
|
138
|
-
* @param {string} collection_name - Name of the collection
|
|
139
|
-
* @param {Object} operation - Update operation data
|
|
140
|
-
* @param {Object} operation.filter - Filter to find document to update
|
|
141
|
-
* @param {Object} operation.update - Update operations to apply
|
|
142
|
-
* @param {boolean} [operation.upsert=false] - Whether to insert if no match found
|
|
143
|
-
* @returns {Object} Update result with counts and upserted_id if applicable
|
|
144
|
-
* @throws {Error} When filter or update is invalid
|
|
277
|
+
* Validates update operation parameters.
|
|
278
|
+
* @param {Object} filter - Filter criteria
|
|
279
|
+
* @param {Object} update - Update operations
|
|
280
|
+
* @throws {Error} When parameters are invalid
|
|
145
281
|
*/
|
|
146
|
-
const
|
|
147
|
-
const { filter, update, upsert = false } = operation;
|
|
148
|
-
|
|
282
|
+
const validate_update_parameters = (filter, update) => {
|
|
149
283
|
if (!filter || typeof filter !== 'object') {
|
|
150
284
|
throw new Error('updateOne operation requires a valid filter');
|
|
151
285
|
}
|
|
@@ -153,214 +287,344 @@ const process_update_one = (db, database_name, collection_name, operation) => {
|
|
|
153
287
|
if (!update || typeof update !== 'object') {
|
|
154
288
|
throw new Error('updateOne operation requires a valid update');
|
|
155
289
|
}
|
|
290
|
+
};
|
|
291
|
+
|
|
292
|
+
/**
|
|
293
|
+
* Adds updated timestamp to document.
|
|
294
|
+
* @param {Object} document - Document to update
|
|
295
|
+
* @returns {Object} Document with updated timestamp
|
|
296
|
+
*/
|
|
297
|
+
const add_updated_timestamp = (document) => {
|
|
298
|
+
return {
|
|
299
|
+
...document,
|
|
300
|
+
_updated_at: create_current_timestamp()
|
|
301
|
+
};
|
|
302
|
+
};
|
|
303
|
+
|
|
304
|
+
/**
|
|
305
|
+
* Checks if document has been modified.
|
|
306
|
+
* @param {Object} original_document - Original document
|
|
307
|
+
* @param {Object} updated_document - Updated document
|
|
308
|
+
* @returns {boolean} True if document was modified
|
|
309
|
+
*/
|
|
310
|
+
const document_was_modified = (original_document, updated_document) => {
|
|
311
|
+
return JSON.stringify(original_document) !== JSON.stringify(updated_document);
|
|
312
|
+
};
|
|
313
|
+
|
|
314
|
+
/**
|
|
315
|
+
* Creates new document for upsert operation.
|
|
316
|
+
* @param {Object} filter - Filter criteria
|
|
317
|
+
* @param {Object} update - Update operations
|
|
318
|
+
* @returns {Object} New document for upsert
|
|
319
|
+
*/
|
|
320
|
+
const create_upsert_document = (filter, update) => {
|
|
321
|
+
const document_id = generate_document_id();
|
|
322
|
+
const current_timestamp = create_current_timestamp();
|
|
156
323
|
|
|
157
|
-
const
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
324
|
+
const base_document = {
|
|
325
|
+
...filter,
|
|
326
|
+
_id: document_id,
|
|
327
|
+
_created_at: current_timestamp,
|
|
328
|
+
_updated_at: current_timestamp
|
|
329
|
+
};
|
|
161
330
|
|
|
331
|
+
return apply_update_operators(base_document, update);
|
|
332
|
+
};
|
|
333
|
+
|
|
334
|
+
/**
|
|
335
|
+
* Searches for and updates matching document.
|
|
336
|
+
* @param {Object} db - Database instance
|
|
337
|
+
* @param {string} database_name - Database name
|
|
338
|
+
* @param {string} collection_name - Collection name
|
|
339
|
+
* @param {Object} filter - Filter criteria
|
|
340
|
+
* @param {Object} update - Update operations
|
|
341
|
+
* @returns {Object|null} Update result or null if no match
|
|
342
|
+
*/
|
|
343
|
+
const find_and_update_document = (db, database_name, collection_name, filter, update) => {
|
|
344
|
+
const collection_prefix = `${database_name}:${collection_name}:`;
|
|
162
345
|
const range = db.getRange({ start: collection_prefix, end: collection_prefix + '\xFF' });
|
|
346
|
+
|
|
163
347
|
for (const { key, value: document_data } of range) {
|
|
164
348
|
const document = JSON.parse(document_data);
|
|
165
349
|
if (matches_filter(document, filter)) {
|
|
166
|
-
matched_count = 1;
|
|
167
|
-
|
|
168
350
|
const updated_document = apply_update_operators(document, update);
|
|
169
|
-
|
|
351
|
+
const timestamped_document = add_updated_timestamp(updated_document);
|
|
170
352
|
|
|
171
|
-
|
|
172
|
-
|
|
353
|
+
let modified_count = 0;
|
|
354
|
+
if (document_was_modified(document, timestamped_document)) {
|
|
355
|
+
db.put(key, JSON.stringify(timestamped_document));
|
|
173
356
|
modified_count = 1;
|
|
174
357
|
}
|
|
175
358
|
|
|
176
|
-
return { matched_count, modified_count };
|
|
359
|
+
return { matched_count: 1, modified_count };
|
|
177
360
|
}
|
|
178
361
|
}
|
|
179
362
|
|
|
363
|
+
return null;
|
|
364
|
+
};
|
|
365
|
+
|
|
366
|
+
/**
|
|
367
|
+
* Processes a single update operation within a bulk write transaction.
|
|
368
|
+
* @param {Object} db - LMDB database instance
|
|
369
|
+
* @param {string} database_name - Name of the database
|
|
370
|
+
* @param {string} collection_name - Name of the collection
|
|
371
|
+
* @param {Object} operation - Update operation data
|
|
372
|
+
* @returns {Object} Update result with counts and upserted_id if applicable
|
|
373
|
+
*/
|
|
374
|
+
const process_update_one = (db, database_name, collection_name, operation) => {
|
|
375
|
+
const { filter, update, upsert = false } = operation;
|
|
376
|
+
|
|
377
|
+
validate_update_parameters(filter, update);
|
|
378
|
+
|
|
379
|
+
const update_result = find_and_update_document(db, database_name, collection_name, filter, update);
|
|
380
|
+
|
|
381
|
+
if (update_result) {
|
|
382
|
+
return update_result;
|
|
383
|
+
}
|
|
384
|
+
|
|
180
385
|
if (upsert) {
|
|
181
|
-
const
|
|
182
|
-
const collection_key = build_collection_key(database_name, collection_name,
|
|
183
|
-
|
|
184
|
-
const new_document = {
|
|
185
|
-
...filter,
|
|
186
|
-
_id: document_id,
|
|
187
|
-
_created_at: new Date().toISOString(),
|
|
188
|
-
_updated_at: new Date().toISOString()
|
|
189
|
-
};
|
|
386
|
+
const upserted_document = create_upsert_document(filter, update);
|
|
387
|
+
const collection_key = build_collection_key(database_name, collection_name, upserted_document._id);
|
|
190
388
|
|
|
191
|
-
const upserted_document = apply_update_operators(new_document, update);
|
|
192
389
|
db.put(collection_key, JSON.stringify(upserted_document));
|
|
193
390
|
|
|
194
|
-
upserted_id
|
|
391
|
+
return { matched_count: 0, modified_count: 0, upserted_id: upserted_document._id };
|
|
195
392
|
}
|
|
196
393
|
|
|
197
|
-
return { matched_count, modified_count
|
|
394
|
+
return { matched_count: 0, modified_count: 0 };
|
|
198
395
|
};
|
|
199
396
|
|
|
200
397
|
/**
|
|
201
|
-
*
|
|
202
|
-
*
|
|
203
|
-
* Returns count of deleted documents (0 or 1).
|
|
204
|
-
* @param {Object} db - LMDB database instance
|
|
205
|
-
* @param {string} database_name - Name of the database
|
|
206
|
-
* @param {string} collection_name - Name of the collection
|
|
207
|
-
* @param {Object} operation - Delete operation data
|
|
208
|
-
* @param {Object} operation.filter - Filter to find document to delete
|
|
209
|
-
* @returns {Object} Delete result with deleted_count
|
|
398
|
+
* Validates delete operation filter.
|
|
399
|
+
* @param {Object} filter - Filter criteria
|
|
210
400
|
* @throws {Error} When filter is invalid
|
|
211
401
|
*/
|
|
212
|
-
const
|
|
213
|
-
const { filter } = operation;
|
|
214
|
-
|
|
402
|
+
const validate_delete_filter = (filter) => {
|
|
215
403
|
if (!filter || typeof filter !== 'object') {
|
|
216
404
|
throw new Error('deleteOne operation requires a valid filter');
|
|
217
405
|
}
|
|
218
|
-
|
|
406
|
+
};
|
|
407
|
+
|
|
408
|
+
/**
|
|
409
|
+
* Searches for and deletes matching document.
|
|
410
|
+
* @param {Object} db - Database instance
|
|
411
|
+
* @param {string} database_name - Database name
|
|
412
|
+
* @param {string} collection_name - Collection name
|
|
413
|
+
* @param {Object} filter - Filter criteria
|
|
414
|
+
* @returns {number} Number of deleted documents (0 or 1)
|
|
415
|
+
*/
|
|
416
|
+
const find_and_delete_document = (db, database_name, collection_name, filter) => {
|
|
219
417
|
const collection_prefix = `${database_name}:${collection_name}:`;
|
|
220
|
-
|
|
221
418
|
const range = db.getRange({ start: collection_prefix, end: collection_prefix + '\xFF' });
|
|
419
|
+
|
|
222
420
|
for (const { key, value: document_data } of range) {
|
|
223
421
|
const document = JSON.parse(document_data);
|
|
224
422
|
if (matches_filter(document, filter)) {
|
|
225
423
|
db.remove(key);
|
|
226
|
-
return
|
|
424
|
+
return 1;
|
|
227
425
|
}
|
|
228
426
|
}
|
|
229
427
|
|
|
230
|
-
return
|
|
428
|
+
return 0;
|
|
429
|
+
};
|
|
430
|
+
|
|
431
|
+
/**
|
|
432
|
+
* Processes a single delete operation within a bulk write transaction.
|
|
433
|
+
* @param {Object} db - LMDB database instance
|
|
434
|
+
* @param {string} database_name - Name of the database
|
|
435
|
+
* @param {string} collection_name - Name of the collection
|
|
436
|
+
* @param {Object} operation - Delete operation data
|
|
437
|
+
* @returns {Object} Delete result with deleted_count
|
|
438
|
+
*/
|
|
439
|
+
const process_delete_one = (db, database_name, collection_name, operation) => {
|
|
440
|
+
const { filter } = operation;
|
|
441
|
+
|
|
442
|
+
validate_delete_filter(filter);
|
|
443
|
+
|
|
444
|
+
const deleted_count = find_and_delete_document(db, database_name, collection_name, filter);
|
|
445
|
+
|
|
446
|
+
return { deleted_count };
|
|
447
|
+
};
|
|
448
|
+
|
|
449
|
+
/**
|
|
450
|
+
* Creates initial bulk write results object.
|
|
451
|
+
* @returns {Object} Initial results object
|
|
452
|
+
*/
|
|
453
|
+
const create_initial_bulk_results = () => ({
|
|
454
|
+
acknowledged: true,
|
|
455
|
+
inserted_count: 0,
|
|
456
|
+
matched_count: 0,
|
|
457
|
+
modified_count: 0,
|
|
458
|
+
deleted_count: 0,
|
|
459
|
+
upserted_count: 0,
|
|
460
|
+
inserted_ids: {},
|
|
461
|
+
upserted_ids: {}
|
|
462
|
+
});
|
|
463
|
+
|
|
464
|
+
/**
|
|
465
|
+
* Normalizes operation type to handle both naming conventions.
|
|
466
|
+
* @param {string} operation_type - Original operation type
|
|
467
|
+
* @returns {string} Normalized operation type
|
|
468
|
+
*/
|
|
469
|
+
const normalize_operation_type = (operation_type) => {
|
|
470
|
+
const type_mappings = {
|
|
471
|
+
'insertOne': 'insert_one',
|
|
472
|
+
'updateOne': 'update_one',
|
|
473
|
+
'deleteOne': 'delete_one'
|
|
474
|
+
};
|
|
475
|
+
|
|
476
|
+
return type_mappings[operation_type] || operation_type;
|
|
477
|
+
};
|
|
478
|
+
|
|
479
|
+
/**
|
|
480
|
+
* Processes insert operation result.
|
|
481
|
+
* @param {Object} results - Bulk results object
|
|
482
|
+
* @param {Object} result - Insert operation result
|
|
483
|
+
* @param {number} index - Operation index
|
|
484
|
+
*/
|
|
485
|
+
const process_insert_result = (results, result, index) => {
|
|
486
|
+
results.inserted_count++;
|
|
487
|
+
results.inserted_ids[index] = result.inserted_id;
|
|
488
|
+
};
|
|
489
|
+
|
|
490
|
+
/**
|
|
491
|
+
* Processes update operation result.
|
|
492
|
+
* @param {Object} results - Bulk results object
|
|
493
|
+
* @param {Object} result - Update operation result
|
|
494
|
+
* @param {number} index - Operation index
|
|
495
|
+
*/
|
|
496
|
+
const process_update_result = (results, result, index) => {
|
|
497
|
+
results.matched_count += result.matched_count;
|
|
498
|
+
results.modified_count += result.modified_count;
|
|
499
|
+
|
|
500
|
+
if (result.upserted_id) {
|
|
501
|
+
results.upserted_count++;
|
|
502
|
+
results.upserted_ids[index] = result.upserted_id;
|
|
503
|
+
}
|
|
504
|
+
};
|
|
505
|
+
|
|
506
|
+
/**
|
|
507
|
+
* Processes delete operation result.
|
|
508
|
+
* @param {Object} results - Bulk results object
|
|
509
|
+
* @param {Object} result - Delete operation result
|
|
510
|
+
*/
|
|
511
|
+
const process_delete_result = (results, result) => {
|
|
512
|
+
results.deleted_count += result.deleted_count;
|
|
513
|
+
};
|
|
514
|
+
|
|
515
|
+
/**
|
|
516
|
+
* Processes a single bulk operation.
|
|
517
|
+
* @param {Object} db - Database instance
|
|
518
|
+
* @param {string} database_name - Database name
|
|
519
|
+
* @param {string} collection_name - Collection name
|
|
520
|
+
* @param {Object} operation - Operation to process
|
|
521
|
+
* @param {number} index - Operation index
|
|
522
|
+
* @param {Object} results - Bulk results object
|
|
523
|
+
*/
|
|
524
|
+
const process_single_operation = (db, database_name, collection_name, operation, index, results) => {
|
|
525
|
+
const operation_type = Object.keys(operation)[0];
|
|
526
|
+
const operation_data = operation[operation_type];
|
|
527
|
+
const normalized_type = normalize_operation_type(operation_type);
|
|
528
|
+
|
|
529
|
+
switch (normalized_type) {
|
|
530
|
+
case 'insert_one': {
|
|
531
|
+
const result = process_insert_one(db, database_name, collection_name, operation_data);
|
|
532
|
+
process_insert_result(results, result, index);
|
|
533
|
+
break;
|
|
534
|
+
}
|
|
535
|
+
|
|
536
|
+
case 'update_one': {
|
|
537
|
+
const result = process_update_one(db, database_name, collection_name, operation_data);
|
|
538
|
+
process_update_result(results, result, index);
|
|
539
|
+
break;
|
|
540
|
+
}
|
|
541
|
+
|
|
542
|
+
case 'delete_one': {
|
|
543
|
+
const result = process_delete_one(db, database_name, collection_name, operation_data);
|
|
544
|
+
process_delete_result(results, result);
|
|
545
|
+
break;
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
default:
|
|
549
|
+
throw new Error(`Unsupported bulk operation: ${operation_type}`);
|
|
550
|
+
}
|
|
231
551
|
};
|
|
232
552
|
|
|
553
|
+
/**
|
|
554
|
+
* Logs bulk write operation completion.
|
|
555
|
+
* @param {Function} log - Logger function
|
|
556
|
+
* @param {string} database_name - Database name
|
|
557
|
+
* @param {string} collection_name - Collection name
|
|
558
|
+
* @param {number} operations_count - Number of operations
|
|
559
|
+
* @param {Object} results - Operation results
|
|
560
|
+
*/
|
|
561
|
+
const log_bulk_write_completion = (log, database_name, collection_name, operations_count, results) => {
|
|
562
|
+
log.info('Bulk write operation completed', {
|
|
563
|
+
database: database_name,
|
|
564
|
+
collection: collection_name,
|
|
565
|
+
operations_count,
|
|
566
|
+
results
|
|
567
|
+
});
|
|
568
|
+
};
|
|
569
|
+
|
|
570
|
+
/**
|
|
571
|
+
* Creates write queue operation metadata.
|
|
572
|
+
* @param {string} database_name - Database name
|
|
573
|
+
* @param {string} collection_name - Collection name
|
|
574
|
+
* @param {number} operations_count - Number of operations
|
|
575
|
+
* @returns {Object} Operation metadata
|
|
576
|
+
*/
|
|
577
|
+
const create_write_queue_metadata = (database_name, collection_name, operations_count) => ({
|
|
578
|
+
operation: 'bulk_write',
|
|
579
|
+
database: database_name,
|
|
580
|
+
collection: collection_name,
|
|
581
|
+
operations_count
|
|
582
|
+
});
|
|
583
|
+
|
|
233
584
|
/**
|
|
234
585
|
* Internal bulk write implementation with transaction support.
|
|
235
|
-
* Processes all operations atomically within a single database transaction.
|
|
236
|
-
* Supports insert_one, update_one, and delete_one operations with both naming conventions.
|
|
237
586
|
* @param {string} database_name - Name of the database to operate on
|
|
238
587
|
* @param {string} collection_name - Name of the collection to operate on
|
|
239
588
|
* @param {Array<Object>} operations - Array of bulk operations to execute
|
|
240
|
-
* @param {Object}
|
|
589
|
+
* @param {Object} options - Additional options for bulk write
|
|
241
590
|
* @returns {Promise<Object>} Bulk write results with operation counts and IDs
|
|
242
|
-
* @throws {Error} When database name, collection name is missing, operations are invalid, or operation fails
|
|
243
591
|
*/
|
|
244
592
|
const bulk_write_internal = async (database_name, collection_name, operations, options = {}) => {
|
|
245
593
|
const log = create_context_logger();
|
|
246
594
|
|
|
247
|
-
|
|
248
|
-
throw new Error('Database name is required');
|
|
249
|
-
}
|
|
250
|
-
|
|
251
|
-
if (!collection_name) {
|
|
252
|
-
throw new Error('Collection name is required');
|
|
253
|
-
}
|
|
254
|
-
|
|
255
|
-
if (!Array.isArray(operations) || operations.length === 0) {
|
|
256
|
-
throw new Error('Operations must be a non-empty array');
|
|
257
|
-
}
|
|
595
|
+
validate_bulk_write_parameters(database_name, collection_name, operations);
|
|
258
596
|
|
|
259
597
|
const db = get_database();
|
|
260
|
-
const results =
|
|
261
|
-
acknowledged: true,
|
|
262
|
-
inserted_count: 0,
|
|
263
|
-
matched_count: 0,
|
|
264
|
-
modified_count: 0,
|
|
265
|
-
deleted_count: 0,
|
|
266
|
-
upserted_count: 0,
|
|
267
|
-
inserted_ids: {},
|
|
268
|
-
upserted_ids: {}
|
|
269
|
-
};
|
|
598
|
+
const results = create_initial_bulk_results();
|
|
270
599
|
|
|
271
600
|
await db.transaction(() => {
|
|
272
601
|
operations.forEach((operation, index) => {
|
|
273
|
-
|
|
274
|
-
const operation_data = operation[operation_type];
|
|
275
|
-
|
|
276
|
-
// NOTE: Support both snake_case (preferred) and camelCase (backward compatibility).
|
|
277
|
-
switch (operation_type) {
|
|
278
|
-
case 'insert_one':
|
|
279
|
-
case 'insertOne': {
|
|
280
|
-
const result = process_insert_one(db, database_name, collection_name, operation_data);
|
|
281
|
-
results.inserted_count++;
|
|
282
|
-
results.inserted_ids[index] = result.inserted_id;
|
|
283
|
-
break;
|
|
284
|
-
}
|
|
285
|
-
|
|
286
|
-
case 'update_one':
|
|
287
|
-
case 'updateOne': {
|
|
288
|
-
const result = process_update_one(db, database_name, collection_name, operation_data);
|
|
289
|
-
results.matched_count += result.matched_count;
|
|
290
|
-
results.modified_count += result.modified_count;
|
|
291
|
-
|
|
292
|
-
if (result.upserted_id) {
|
|
293
|
-
results.upserted_count++;
|
|
294
|
-
results.upserted_ids[index] = result.upserted_id;
|
|
295
|
-
}
|
|
296
|
-
break;
|
|
297
|
-
}
|
|
298
|
-
|
|
299
|
-
case 'delete_one':
|
|
300
|
-
case 'deleteOne': {
|
|
301
|
-
const result = process_delete_one(db, database_name, collection_name, operation_data);
|
|
302
|
-
results.deleted_count += result.deleted_count;
|
|
303
|
-
break;
|
|
304
|
-
}
|
|
305
|
-
|
|
306
|
-
default:
|
|
307
|
-
throw new Error(`Unsupported bulk operation: ${operation_type}`);
|
|
308
|
-
}
|
|
602
|
+
process_single_operation(db, database_name, collection_name, operation, index, results);
|
|
309
603
|
});
|
|
310
604
|
});
|
|
311
605
|
|
|
312
|
-
log
|
|
313
|
-
database: database_name,
|
|
314
|
-
collection: collection_name,
|
|
315
|
-
operations_count: operations.length,
|
|
316
|
-
results
|
|
317
|
-
});
|
|
606
|
+
log_bulk_write_completion(log, database_name, collection_name, operations.length, results);
|
|
318
607
|
|
|
319
608
|
return results;
|
|
320
609
|
};
|
|
321
610
|
|
|
322
611
|
/**
|
|
323
612
|
* Executes bulk write operations with write queue serialization.
|
|
324
|
-
* Provides atomic execution of multiple write operations within a single transaction.
|
|
325
|
-
* Enqueues operation through write queue to ensure proper serialization and consistency.
|
|
326
613
|
* @param {string} database_name - Name of the database to operate on
|
|
327
614
|
* @param {string} collection_name - Name of the collection to operate on
|
|
328
615
|
* @param {Array<Object>} operations - Array of bulk operations to execute
|
|
329
|
-
* @param {Object}
|
|
616
|
+
* @param {Object} options - Additional options for bulk write
|
|
330
617
|
* @returns {Promise<Object>} Bulk write results
|
|
331
|
-
* @returns {boolean} returns.acknowledged - Whether operation was acknowledged
|
|
332
|
-
* @returns {number} returns.inserted_count - Number of documents inserted
|
|
333
|
-
* @returns {number} returns.matched_count - Number of documents matched for updates
|
|
334
|
-
* @returns {number} returns.modified_count - Number of documents actually modified
|
|
335
|
-
* @returns {number} returns.deleted_count - Number of documents deleted
|
|
336
|
-
* @returns {number} returns.upserted_count - Number of documents upserted
|
|
337
|
-
* @returns {Object} returns.inserted_ids - Map of operation index to inserted document ID
|
|
338
|
-
* @returns {Object} returns.upserted_ids - Map of operation index to upserted document ID
|
|
339
|
-
* @throws {Error} When database name, collection name is missing or operations array is invalid
|
|
340
618
|
*/
|
|
341
619
|
const bulk_write = async (database_name, collection_name, operations, options = {}) => {
|
|
342
|
-
|
|
343
|
-
throw new Error('Database name is required');
|
|
344
|
-
}
|
|
345
|
-
|
|
346
|
-
if (!collection_name) {
|
|
347
|
-
throw new Error('Collection name is required');
|
|
348
|
-
}
|
|
349
|
-
|
|
350
|
-
if (!Array.isArray(operations) || operations.length === 0) {
|
|
351
|
-
throw new Error('Operations must be a non-empty array');
|
|
352
|
-
}
|
|
620
|
+
validate_bulk_write_parameters(database_name, collection_name, operations);
|
|
353
621
|
|
|
354
622
|
const write_queue = get_write_queue();
|
|
623
|
+
const operation_metadata = create_write_queue_metadata(database_name, collection_name, operations.length);
|
|
355
624
|
|
|
356
625
|
return await write_queue.enqueue_write_operation(
|
|
357
626
|
() => bulk_write_internal(database_name, collection_name, operations, options),
|
|
358
|
-
|
|
359
|
-
operation: 'bulk_write',
|
|
360
|
-
database: database_name,
|
|
361
|
-
collection: collection_name,
|
|
362
|
-
operations_count: operations.length
|
|
363
|
-
}
|
|
627
|
+
operation_metadata
|
|
364
628
|
);
|
|
365
629
|
};
|
|
366
630
|
|