@rpcbase/server 0.380.0 → 0.382.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +16 -72
- package/src/getDerivedKey.ts +20 -0
- package/src/hashPassword.ts +24 -0
- package/src/index.ts +3 -0
- package/src/initServer.ts +68 -0
- package/src/types/index.ts +7 -0
- package/src/types/session.d.ts +10 -0
- package/boot/server.js +0 -36
- package/boot/shared.js +0 -17
- package/boot/worker.js +0 -37
- package/constants/keys.ts +0 -1
- package/database.js +0 -96
- package/express/custom_cors.js +0 -80
- package/express/dev_save_coverage.js +0 -18
- package/express/index.js +0 -93
- package/express/setup_handlers.js +0 -49
- package/files.ts +0 -1
- package/firebase.js +0 -33
- package/get_object_id.ts +0 -39
- package/index.js +0 -17
- package/mailer/index.js +0 -31
- package/mongoose/index.ts +0 -16
- package/mongoose/plugins/disable_default_timestamps_plugin.ts +0 -5
- package/mongoose/plugins/disable_default_version_key_plugin.ts +0 -5
- package/mongoose/plugins/object_id_plugin.ts +0 -31
- package/openai.js +0 -10
- package/publish-output.txt +0 -0
- package/queue/dispatch_indexer_queue.js +0 -22
- package/queue/dispatch_worker_queue.js +0 -38
- package/queue/index.js +0 -110
- package/queue/register_queue_listener.js +0 -180
- package/redis.js +0 -2
- package/rts/index.js +0 -444
- package/search/constants.ts +0 -1
- package/search/ensure_index.ts +0 -53
- package/search/get_client.ts +0 -15
- package/search/index.ts +0 -3
- package/src/access-control/apply_policies.js +0 -104
- package/src/access-control/get_added_fields.js +0 -23
- package/src/access-control/get_policies.js +0 -29
- package/src/access-control/hooks/doc_pre_create.js +0 -26
- package/src/access-control/hooks/query_pre_delete.js +0 -30
- package/src/access-control/index.js +0 -6
- package/src/access-control/mongoose_plugin.js +0 -136
- package/src/api/index.js +0 -6
- package/src/api/stored-values/get_stored_values.js +0 -41
- package/src/api/stored-values/index.js +0 -8
- package/src/api/stored-values/set_stored_values.js +0 -31
- package/src/auth/check_session.js +0 -43
- package/src/auth/forgot_password_email.html +0 -515
- package/src/auth/get_account.js +0 -35
- package/src/auth/get_accounts.js +0 -42
- package/src/auth/index.js +0 -24
- package/src/auth/reset_password.js +0 -70
- package/src/auth/set_new_password.js +0 -63
- package/src/auth/set_new_password_email.html +0 -3
- package/src/auth/sign_in.js +0 -61
- package/src/auth/sign_out.js +0 -11
- package/src/auth/sign_up.js +0 -56
- package/src/client/client_router.js +0 -105
- package/src/files/constants.ts +0 -9
- package/src/files/finalize_file_upload.ts +0 -25
- package/src/files/helpers/get_grid_fs_bucket.ts +0 -20
- package/src/files/index.js +0 -5
- package/src/files/tasks/finalize_file_upload/apply_img_preview.ts +0 -49
- package/src/files/tasks/finalize_file_upload/constants.ts +0 -23
- package/src/files/tasks/finalize_file_upload/download_file.ts +0 -98
- package/src/files/tasks/finalize_file_upload/get_text_vectors.ts +0 -13
- package/src/files/tasks/finalize_file_upload/helpers/convert_pdf_to_png.ts +0 -34
- package/src/files/tasks/finalize_file_upload/helpers/exec.ts +0 -5
- package/src/files/tasks/finalize_file_upload/helpers/get_metadata.ts +0 -18
- package/src/files/tasks/finalize_file_upload/index.ts +0 -53
- package/src/files/tasks/finalize_file_upload/run_ocr.ts +0 -42
- package/src/files/tasks/index.ts +0 -6
- package/src/files/upload_chunk.ts +0 -83
- package/src/helpers/sim_test_inject.ts +0 -21
- package/src/models/Invite.js +0 -23
- package/src/models/Notification.js +0 -44
- package/src/models/Policy.ts +0 -13
- package/src/models/ResetPasswordToken.js +0 -14
- package/src/models/SearchHistory.ts +0 -22
- package/src/models/User.js +0 -42
- package/src/models/UserStoredValues.js +0 -18
- package/src/models/index.js +0 -7
- package/src/notitications/ack_notification.js +0 -26
- package/src/notitications/get_notifications.js +0 -39
- package/src/notitications/llt/README.md +0 -8
- package/src/notitications/llt/get_llts.js +0 -42
- package/src/notitications/set_seen.js +0 -26
- package/src/sessions/index.js +0 -27
- package/src/sessions/session_proxy_middleware.js +0 -18
- package/src/sessions/session_store_middleware.js +0 -106
- package/src/sessions/warning_proxy_middleware.js +0 -17
- package/src/tasks/index.js +0 -8
- package/src/tasks/index_item.js +0 -8
- package/store/index.js +0 -31
package/firebase.js
DELETED
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const fs = require("fs")
|
|
3
|
-
const path = require("path")
|
|
4
|
-
const {promisify} = require("util")
|
|
5
|
-
|
|
6
|
-
const admin = require("firebase-admin")
|
|
7
|
-
|
|
8
|
-
const exists = promisify(fs.exists)
|
|
9
|
-
const readFile = promisify(fs.readFile)
|
|
10
|
-
|
|
11
|
-
const NAME = "./firebase-service-account.json"
|
|
12
|
-
|
|
13
|
-
let firebase_app
|
|
14
|
-
|
|
15
|
-
const setup = async() => {
|
|
16
|
-
|
|
17
|
-
const fb_service_account_path = path.join(process.cwd(), NAME)
|
|
18
|
-
|
|
19
|
-
if (await exists(fb_service_account_path)) {
|
|
20
|
-
const service_account = JSON.parse(await readFile(fb_service_account_path))
|
|
21
|
-
|
|
22
|
-
firebase_app = admin.initializeApp({
|
|
23
|
-
credential: admin.credential.cert(service_account),
|
|
24
|
-
})
|
|
25
|
-
} else {
|
|
26
|
-
console.log(NAME, "not found, skipping firebase initialization")
|
|
27
|
-
}
|
|
28
|
-
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
module.exports = {setup}
|
|
32
|
-
|
|
33
|
-
module.exports.firebase_app = firebase_app
|
package/get_object_id.ts
DELETED
|
@@ -1,39 +0,0 @@
|
|
|
1
|
-
import assert from "assert";
|
|
2
|
-
import crypto from "crypto";
|
|
3
|
-
import mongoose from "./mongoose"
|
|
4
|
-
|
|
5
|
-
import isHexadecimal from "validator/lib/isHexadecimal"
|
|
6
|
-
|
|
7
|
-
const {RB_TENANT_ID} = process.env
|
|
8
|
-
|
|
9
|
-
// TODO: WARNING: DANGER: this code is duplicated in the client
|
|
10
|
-
// TODO: move it to iso
|
|
11
|
-
|
|
12
|
-
let _is_valid_tenant_id = false
|
|
13
|
-
|
|
14
|
-
const validate_tenant_id = () => {
|
|
15
|
-
// Validation: is defined
|
|
16
|
-
assert(RB_TENANT_ID, "expected RB_TENANT_ID to be defined")
|
|
17
|
-
// is hexadecimal
|
|
18
|
-
assert(isHexadecimal(RB_TENANT_ID), "expected RB_TENANT_ID to be a hexadecimal")
|
|
19
|
-
// is 4 bytes
|
|
20
|
-
assert(RB_TENANT_ID.length === 8, "RB_TENANT_ID must be exactly 4 bytes (8 hex chars)")
|
|
21
|
-
|
|
22
|
-
_is_valid_tenant_id = true
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
// generates a 12 bytes mongodb object id using the org id prefix or custom customer id
|
|
27
|
-
export const get_object_id = (): mongoose.Types.ObjectId => {
|
|
28
|
-
if (!_is_valid_tenant_id) {
|
|
29
|
-
validate_tenant_id()
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
const ts_bytes = Math.floor(Date.now() / 1000).toString(16)
|
|
33
|
-
expect(ts_bytes.length).toBe(8)
|
|
34
|
-
|
|
35
|
-
const random_bytes = crypto.randomBytes(4).toString("hex")
|
|
36
|
-
|
|
37
|
-
const obj_id = `${ts_bytes}${RB_TENANT_ID}${random_bytes}`
|
|
38
|
-
return new mongoose.Types.ObjectId(obj_id)
|
|
39
|
-
}
|
package/index.js
DELETED
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const database = require("./database")
|
|
3
|
-
const queue = require("./queue")
|
|
4
|
-
const express = require("./express")
|
|
5
|
-
const firebase = require("./firebase")
|
|
6
|
-
const client_router = require("./src/client/client_router")
|
|
7
|
-
const sign_up = require("./src/auth/sign_up")
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
module.exports = {
|
|
11
|
-
queue,
|
|
12
|
-
database,
|
|
13
|
-
express,
|
|
14
|
-
firebase,
|
|
15
|
-
client_router,
|
|
16
|
-
sign_up,
|
|
17
|
-
}
|
package/mailer/index.js
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const debug = require("debug")
|
|
3
|
-
const postmark = require("postmark")
|
|
4
|
-
|
|
5
|
-
const log = debug("rb:mailer")
|
|
6
|
-
|
|
7
|
-
const {POSTMARK_API_KEY, NODE_ENV, RB_FORCE_SEND_EMAILS} = process.env
|
|
8
|
-
|
|
9
|
-
const is_production = NODE_ENV === "production"
|
|
10
|
-
const force_send = RB_FORCE_SEND_EMAILS === "yes"
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
// TMP use proper mock smtp server in development
|
|
14
|
-
|
|
15
|
-
log("mailer, is_production:", {is_production})
|
|
16
|
-
|
|
17
|
-
let client
|
|
18
|
-
|
|
19
|
-
if (force_send || is_production && typeof POSTMARK_API_KEY === "string" && POSTMARK_API_KEY.trim() !== "") {
|
|
20
|
-
client = new postmark.ServerClient(POSTMARK_API_KEY)
|
|
21
|
-
} else {
|
|
22
|
-
client = {
|
|
23
|
-
sendEmail: async(payload) => {
|
|
24
|
-
log("sendEmail disabled when not in production")
|
|
25
|
-
log("From:", payload.From, "To:", payload.To, "Subject:", payload.Subject)
|
|
26
|
-
return {Message: "OK"}
|
|
27
|
-
}
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
module.exports = client
|
package/mongoose/index.ts
DELETED
|
@@ -1,16 +0,0 @@
|
|
|
1
|
-
// eslint-disable-next-line @rpcbase/lint/no-rb-restricted-imports
|
|
2
|
-
import mongoose from "mongoose"
|
|
3
|
-
import {object_id_plugin} from "./plugins/object_id_plugin"
|
|
4
|
-
import {disable_default_version_key_plugin} from "./plugins/disable_default_version_key_plugin"
|
|
5
|
-
import {disable_default_timestamps_plugin} from "./plugins/disable_default_timestamps_plugin"
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
mongoose.set("strictQuery", false)
|
|
9
|
-
|
|
10
|
-
mongoose.plugin(object_id_plugin)
|
|
11
|
-
mongoose.plugin(disable_default_version_key_plugin)
|
|
12
|
-
mongoose.plugin(disable_default_timestamps_plugin)
|
|
13
|
-
|
|
14
|
-
export default mongoose
|
|
15
|
-
|
|
16
|
-
export * from "mongoose"
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
import mongoose, {Schema} from "../"
|
|
2
|
-
|
|
3
|
-
import {get_object_id} from "../../get_object_id"
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
export const object_id_plugin = (schema: Schema) => {
|
|
7
|
-
if (schema.options._id === false) {
|
|
8
|
-
return
|
|
9
|
-
}
|
|
10
|
-
// Disable Mongoose's automatic _id field creation
|
|
11
|
-
if (!schema.options._id) {
|
|
12
|
-
schema.options._id = false
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
// Add your custom _id field
|
|
16
|
-
schema.add({
|
|
17
|
-
_id: {
|
|
18
|
-
type: mongoose.Schema.Types.ObjectId,
|
|
19
|
-
default: () => get_object_id(),
|
|
20
|
-
immutable: true,
|
|
21
|
-
},
|
|
22
|
-
})
|
|
23
|
-
|
|
24
|
-
// Optional: Ensure the _id field is always set
|
|
25
|
-
schema.pre("save", function(next) {
|
|
26
|
-
if (!this._id) {
|
|
27
|
-
this._id = get_object_id()
|
|
28
|
-
}
|
|
29
|
-
next()
|
|
30
|
-
})
|
|
31
|
-
}
|
package/openai.js
DELETED
package/publish-output.txt
DELETED
|
File without changes
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
|
|
3
|
-
const dispatch_indexer_queue = (
|
|
4
|
-
queue,
|
|
5
|
-
model_name,
|
|
6
|
-
coll_name,
|
|
7
|
-
op,
|
|
8
|
-
doc,
|
|
9
|
-
update_description,
|
|
10
|
-
) => {
|
|
11
|
-
queue.add(
|
|
12
|
-
"index_item",
|
|
13
|
-
{op, doc, model_name, coll_name, update_description},
|
|
14
|
-
{
|
|
15
|
-
jobId: `rb-index-item-${op}-${doc._id}-${Date.now()}`,
|
|
16
|
-
removeOnComplete: true,
|
|
17
|
-
removeOnFail: true,
|
|
18
|
-
},
|
|
19
|
-
)
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
module.exports = dispatch_indexer_queue
|
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
|
|
3
|
-
const op_names = {
|
|
4
|
-
// TODO: should we deprecate create, to keep consistency with mongodb?
|
|
5
|
-
insert: "create",
|
|
6
|
-
insert: "instert",
|
|
7
|
-
delete: "delete",
|
|
8
|
-
update: "update",
|
|
9
|
-
}
|
|
10
|
-
|
|
11
|
-
const dispatch_worker_queue = (queue, model_name, op, doc, update_description) => {
|
|
12
|
-
const instance = queue.instance()
|
|
13
|
-
|
|
14
|
-
const tasks = queue.get_tasks()
|
|
15
|
-
|
|
16
|
-
const handler_name = `on_${op_names[op]}_${model_name.toLowerCase()}`
|
|
17
|
-
// console.log("dispatch", handler_name)
|
|
18
|
-
// skip if there's no matching handler
|
|
19
|
-
if (!Object.keys(tasks).includes(handler_name)) return
|
|
20
|
-
|
|
21
|
-
if (op_names[op] === "create") {
|
|
22
|
-
console.warn("create tasks are deprecated, rename to insert instead")
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
if (update_description?.updatedFields) {
|
|
26
|
-
update_description.updated_fields = Object.keys(update_description.updatedFields )
|
|
27
|
-
delete update_description.updatedFields
|
|
28
|
-
}
|
|
29
|
-
|
|
30
|
-
queue.add(handler_name, {doc, update_description}, {
|
|
31
|
-
jobId: `${op}-${doc._id}`,
|
|
32
|
-
removeOnComplete: true,
|
|
33
|
-
removeOnFail: true,
|
|
34
|
-
})
|
|
35
|
-
}
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
module.exports = dispatch_worker_queue
|
package/queue/index.js
DELETED
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const Queue = require("bull")
|
|
3
|
-
|
|
4
|
-
// const {is_docker} = require("@rpcbase/std")
|
|
5
|
-
const is_docker = () => true
|
|
6
|
-
|
|
7
|
-
const {WORKER_QUEUE_PORT} = process.env
|
|
8
|
-
|
|
9
|
-
const hostname = is_docker() ? "worker-queue" : "127.0.0.1"
|
|
10
|
-
|
|
11
|
-
const worker_queue_url = `redis://${hostname}:${WORKER_QUEUE_PORT}`
|
|
12
|
-
|
|
13
|
-
// TODO: implement latency queues
|
|
14
|
-
const QUEUE_DEFAULT_NAME = "rb-queue-default"
|
|
15
|
-
// const QUEUE_10S_NAME
|
|
16
|
-
|
|
17
|
-
const tasks_list = Object.create(null)
|
|
18
|
-
|
|
19
|
-
let worker_queue
|
|
20
|
-
|
|
21
|
-
const get_instance = () => {
|
|
22
|
-
if (!worker_queue) worker_queue = new Queue(QUEUE_DEFAULT_NAME, worker_queue_url)
|
|
23
|
-
return worker_queue
|
|
24
|
-
}
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
const worker_add = async(task_name, payload, options) => {
|
|
28
|
-
if (!worker_queue) {
|
|
29
|
-
worker_queue = new Queue(QUEUE_DEFAULT_NAME, worker_queue_url)
|
|
30
|
-
}
|
|
31
|
-
const res = await worker_queue.add({task_name, payload}, options)
|
|
32
|
-
// console.log("worker_add:created task:", task_name, res.id)
|
|
33
|
-
return res
|
|
34
|
-
}
|
|
35
|
-
|
|
36
|
-
const get_job = async(job_id) => {
|
|
37
|
-
const queue = get_instance()
|
|
38
|
-
return queue.getJob(job_id)
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
const get_jobs = async(...args) => {
|
|
42
|
-
const queue = get_instance()
|
|
43
|
-
return queue.getJobs(...args)
|
|
44
|
-
}
|
|
45
|
-
|
|
46
|
-
const register_task = (name, fn) => {
|
|
47
|
-
// append module to tasks list
|
|
48
|
-
tasks_list[name] = fn
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
const get_tasks = () => tasks_list
|
|
52
|
-
|
|
53
|
-
const start_worker = async() => {
|
|
54
|
-
console.log("start worker queue", worker_queue_url)
|
|
55
|
-
|
|
56
|
-
worker_queue = new Queue(QUEUE_DEFAULT_NAME, worker_queue_url)
|
|
57
|
-
|
|
58
|
-
// error handler
|
|
59
|
-
worker_queue.on("error", (err) => {
|
|
60
|
-
console.log(`A queue error happened: ${err.message}`)
|
|
61
|
-
})
|
|
62
|
-
|
|
63
|
-
// on stall
|
|
64
|
-
worker_queue.on("stalled", (job) => {
|
|
65
|
-
console.log(`Job ${job.id} stalled and will be reprocessed`)
|
|
66
|
-
})
|
|
67
|
-
|
|
68
|
-
worker_queue.on("job failed", (id, err) => {
|
|
69
|
-
console.log(`Job ${id} failed with error ${err.message}`)
|
|
70
|
-
})
|
|
71
|
-
|
|
72
|
-
// processor function
|
|
73
|
-
// default concurrency is 2, in practice we will implement this with multiple workers
|
|
74
|
-
// to take advantage of multi cpu machines
|
|
75
|
-
worker_queue.process(2, async(job) => {
|
|
76
|
-
try {
|
|
77
|
-
const {task_name, payload} = job.data
|
|
78
|
-
|
|
79
|
-
if (!job.id.startsWith("rb-index-item-")) {
|
|
80
|
-
console.log(`starting job ${job.id} ${task_name}`)
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
const task_fn = tasks_list[task_name]
|
|
84
|
-
|
|
85
|
-
const res = await task_fn(payload, job)
|
|
86
|
-
return res
|
|
87
|
-
} catch (err) {
|
|
88
|
-
console.log("ERRRR", err)
|
|
89
|
-
}
|
|
90
|
-
})
|
|
91
|
-
|
|
92
|
-
return worker_queue
|
|
93
|
-
}
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
// TMP used to create additional queues, should be implemented with a better API in the future
|
|
97
|
-
const get_url = () => {
|
|
98
|
-
return worker_queue_url
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
module.exports = {
|
|
102
|
-
start: start_worker,
|
|
103
|
-
register_task,
|
|
104
|
-
get_tasks,
|
|
105
|
-
get_job,
|
|
106
|
-
get_jobs,
|
|
107
|
-
add: worker_add,
|
|
108
|
-
instance: get_instance,
|
|
109
|
-
get_url,
|
|
110
|
-
}
|
|
@@ -1,180 +0,0 @@
|
|
|
1
|
-
/* @flow */
|
|
2
|
-
const debug = require("debug")
|
|
3
|
-
const _get = require("lodash/get")
|
|
4
|
-
const _set = require("lodash/set")
|
|
5
|
-
|
|
6
|
-
const mongoose = require("../mongoose")
|
|
7
|
-
const queue = require("./index")
|
|
8
|
-
|
|
9
|
-
const dispatch_worker_queue = require("./dispatch_worker_queue")
|
|
10
|
-
const dispatch_indexer_queue = require("./dispatch_indexer_queue")
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
const log = debug("rb:queue:listener")
|
|
14
|
-
|
|
15
|
-
const {RB_APP_NAME, RB_TENANT_ID} = process.env
|
|
16
|
-
|
|
17
|
-
const RETRY_MAXIMUM_DELAY = 3000
|
|
18
|
-
const RETRY_MINIMUM_DELAY = 50
|
|
19
|
-
const MAX_RETRIES = 20
|
|
20
|
-
|
|
21
|
-
// TODO: reset retry counters when connection becomes healthy again?
|
|
22
|
-
let retry_counter = 0
|
|
23
|
-
let emitter = null
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
// TODO: use proper mongodb delete event here now
|
|
27
|
-
// mongoose middleware used for delete events
|
|
28
|
-
const mongoose_delete_plugin = (schema) => {
|
|
29
|
-
schema.pre("deleteOne", function(next) {
|
|
30
|
-
throw new Error("rts:deleteOne is deprecated, use findOneAndDelete")
|
|
31
|
-
})
|
|
32
|
-
|
|
33
|
-
// TODO: add other delete operations
|
|
34
|
-
// TODO: this should be deleteOne ?
|
|
35
|
-
// https://mongoosejs.com/docs/queries.html
|
|
36
|
-
schema.post("findOneAndDelete", function(doc) {
|
|
37
|
-
log("queue:findOneAndDelete", "dispatch_doc_change NYI")
|
|
38
|
-
log("del PLUGIN", doc)
|
|
39
|
-
const change = null // TMP
|
|
40
|
-
const coll_name = change.ns.coll
|
|
41
|
-
|
|
42
|
-
const model_name = Object.keys(mongoose.models).find((k) => {
|
|
43
|
-
return mongoose.models[k].collection.collectionName === coll_name
|
|
44
|
-
})
|
|
45
|
-
|
|
46
|
-
// assert(model_name, `unable to resolve model name for coll_name:${coll_name}`)
|
|
47
|
-
// there are dynamic models so we can't assert model_name is there
|
|
48
|
-
if (!model_name) {
|
|
49
|
-
return
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
dispatch_worker_queue(queue, model_name, "delete", doc)
|
|
53
|
-
dispatch_indexer_queue(queue, model_name, "delete", doc)
|
|
54
|
-
})
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
const assert_doc_id = (change) => {
|
|
59
|
-
const doc_id = change.documentKey?._id?.toString()
|
|
60
|
-
|
|
61
|
-
// some changes aren't on documents
|
|
62
|
-
if (!doc_id) return
|
|
63
|
-
|
|
64
|
-
const sub = doc_id.substring(8, 16)
|
|
65
|
-
try {
|
|
66
|
-
expect(sub).toBe(RB_TENANT_ID)
|
|
67
|
-
} catch (err) {
|
|
68
|
-
console.log("in document:", change.ns, change.documentKey)
|
|
69
|
-
console.log("_id must be instanciated with get_object_id()")
|
|
70
|
-
console.log(err.message)
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
const dispatch_change_handler = (change) => {
|
|
75
|
-
// skip if this is a file upload
|
|
76
|
-
if (change.ns?.coll?.endsWith(".files") || change.ns?.coll?.endsWith(".chunks")) {
|
|
77
|
-
return
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
// verify we have correct object ids
|
|
81
|
-
assert_doc_id(change)
|
|
82
|
-
|
|
83
|
-
const coll_name = change.ns.coll
|
|
84
|
-
|
|
85
|
-
const model_name = Object.keys(mongoose.models).find((k) => {
|
|
86
|
-
return mongoose.models[k].collection.collectionName === coll_name
|
|
87
|
-
})
|
|
88
|
-
|
|
89
|
-
if (!model_name) {
|
|
90
|
-
return
|
|
91
|
-
}
|
|
92
|
-
|
|
93
|
-
const op = change.operationType
|
|
94
|
-
|
|
95
|
-
dispatch_worker_queue(queue, model_name, op, change.fullDocument, change.updateDescription)
|
|
96
|
-
dispatch_indexer_queue(queue, model_name, coll_name, op, change.fullDocument, change.updateDescription)
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
const register_db_emitter = () => {
|
|
100
|
-
// TODO: unclear why it's necessary to have this step here too even though we already removeListeners in the on close event
|
|
101
|
-
if (emitter) {
|
|
102
|
-
emitter.removeAllListeners()
|
|
103
|
-
emitter.close()
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
log("registering db emitter")
|
|
107
|
-
|
|
108
|
-
// TODO: implement delete operation fullDocument retrieve,
|
|
109
|
-
// when this is released https://jira.mongodb.org/browse/SERVER-36941
|
|
110
|
-
// this is done via a plugin right now
|
|
111
|
-
// https://mongodb.github.io/node-mongodb-native/4.9/interfaces/ChangeStreamOptions.html
|
|
112
|
-
|
|
113
|
-
// Set up the change stream with a filter to only listen to the specific database
|
|
114
|
-
const pipeline = [
|
|
115
|
-
{ $match: { "ns.db": RB_APP_NAME } },
|
|
116
|
-
{ $match: {
|
|
117
|
-
"ns.coll": { $nin: ["file-uploads.files", "file-uploads.chunks"] },
|
|
118
|
-
"operationType": { $in: ["insert", "update" /* "delete"*/] } },
|
|
119
|
-
},
|
|
120
|
-
]
|
|
121
|
-
|
|
122
|
-
// https://www.mongodb.com/docs/manual/reference/method/Mongo.watch/
|
|
123
|
-
emitter = mongoose.connection.watch(pipeline, {fullDocument: "updateLookup"})
|
|
124
|
-
|
|
125
|
-
emitter.on("change", dispatch_change_handler)
|
|
126
|
-
|
|
127
|
-
emitter.on("error", (err) => {
|
|
128
|
-
console.log("server:queue:register_queue_listener: change listener emitter got error", err)
|
|
129
|
-
})
|
|
130
|
-
|
|
131
|
-
emitter.on("close", (arg, arg2) => {
|
|
132
|
-
emitter.removeAllListeners()
|
|
133
|
-
emitter.close()
|
|
134
|
-
|
|
135
|
-
retry_counter++
|
|
136
|
-
|
|
137
|
-
if (retry_counter > MAX_RETRIES) {
|
|
138
|
-
console.log("queue listener reached max retries, exiting with failure")
|
|
139
|
-
process.exit(1)
|
|
140
|
-
return
|
|
141
|
-
}
|
|
142
|
-
|
|
143
|
-
const timeout_amount = Math.min(RETRY_MAXIMUM_DELAY, RETRY_MINIMUM_DELAY + 10 * Math.pow(2, retry_counter))
|
|
144
|
-
console.log("queue_listener: emitter closed, retrying in", timeout_amount)
|
|
145
|
-
|
|
146
|
-
setTimeout(() => {
|
|
147
|
-
register_db_emitter()
|
|
148
|
-
}, timeout_amount)
|
|
149
|
-
})
|
|
150
|
-
|
|
151
|
-
emitter.on("end", (arg) => {
|
|
152
|
-
console.log("EMITTER ENDED", arg)
|
|
153
|
-
})
|
|
154
|
-
|
|
155
|
-
return emitter
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
const register_queue_listener = () => new Promise((resolve, reject) => {
|
|
160
|
-
// register the mongoose delete plugin
|
|
161
|
-
log("registering mongoose_delete_plugin")
|
|
162
|
-
mongoose.plugin(mongoose_delete_plugin)
|
|
163
|
-
|
|
164
|
-
mongoose.connection.once("open", () => {
|
|
165
|
-
const db_emitter = register_db_emitter()
|
|
166
|
-
resolve(db_emitter)
|
|
167
|
-
})
|
|
168
|
-
|
|
169
|
-
mongoose.connection.on("error", (err) => {
|
|
170
|
-
console.log("register_queue_listener:", "mongoose connection error", err)
|
|
171
|
-
// reject(err)
|
|
172
|
-
})
|
|
173
|
-
|
|
174
|
-
mongoose.connection.on("disconnected", (err) => {
|
|
175
|
-
console.log("mongodb:", "mongodb disconnected", err)
|
|
176
|
-
})
|
|
177
|
-
})
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
module.exports = register_queue_listener
|
package/redis.js
DELETED