@rpcbase/server 0.380.0 → 0.382.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (96) hide show
  1. package/package.json +16 -72
  2. package/src/getDerivedKey.ts +20 -0
  3. package/src/hashPassword.ts +24 -0
  4. package/src/index.ts +3 -0
  5. package/src/initServer.ts +68 -0
  6. package/src/types/index.ts +7 -0
  7. package/src/types/session.d.ts +10 -0
  8. package/boot/server.js +0 -36
  9. package/boot/shared.js +0 -17
  10. package/boot/worker.js +0 -37
  11. package/constants/keys.ts +0 -1
  12. package/database.js +0 -96
  13. package/express/custom_cors.js +0 -80
  14. package/express/dev_save_coverage.js +0 -18
  15. package/express/index.js +0 -93
  16. package/express/setup_handlers.js +0 -49
  17. package/files.ts +0 -1
  18. package/firebase.js +0 -33
  19. package/get_object_id.ts +0 -39
  20. package/index.js +0 -17
  21. package/mailer/index.js +0 -31
  22. package/mongoose/index.ts +0 -16
  23. package/mongoose/plugins/disable_default_timestamps_plugin.ts +0 -5
  24. package/mongoose/plugins/disable_default_version_key_plugin.ts +0 -5
  25. package/mongoose/plugins/object_id_plugin.ts +0 -31
  26. package/openai.js +0 -10
  27. package/publish-output.txt +0 -0
  28. package/queue/dispatch_indexer_queue.js +0 -22
  29. package/queue/dispatch_worker_queue.js +0 -38
  30. package/queue/index.js +0 -110
  31. package/queue/register_queue_listener.js +0 -180
  32. package/redis.js +0 -2
  33. package/rts/index.js +0 -444
  34. package/search/constants.ts +0 -1
  35. package/search/ensure_index.ts +0 -53
  36. package/search/get_client.ts +0 -15
  37. package/search/index.ts +0 -3
  38. package/src/access-control/apply_policies.js +0 -104
  39. package/src/access-control/get_added_fields.js +0 -23
  40. package/src/access-control/get_policies.js +0 -29
  41. package/src/access-control/hooks/doc_pre_create.js +0 -26
  42. package/src/access-control/hooks/query_pre_delete.js +0 -30
  43. package/src/access-control/index.js +0 -6
  44. package/src/access-control/mongoose_plugin.js +0 -136
  45. package/src/api/index.js +0 -6
  46. package/src/api/stored-values/get_stored_values.js +0 -41
  47. package/src/api/stored-values/index.js +0 -8
  48. package/src/api/stored-values/set_stored_values.js +0 -31
  49. package/src/auth/check_session.js +0 -43
  50. package/src/auth/forgot_password_email.html +0 -515
  51. package/src/auth/get_account.js +0 -35
  52. package/src/auth/get_accounts.js +0 -42
  53. package/src/auth/index.js +0 -24
  54. package/src/auth/reset_password.js +0 -70
  55. package/src/auth/set_new_password.js +0 -63
  56. package/src/auth/set_new_password_email.html +0 -3
  57. package/src/auth/sign_in.js +0 -61
  58. package/src/auth/sign_out.js +0 -11
  59. package/src/auth/sign_up.js +0 -56
  60. package/src/client/client_router.js +0 -105
  61. package/src/files/constants.ts +0 -9
  62. package/src/files/finalize_file_upload.ts +0 -25
  63. package/src/files/helpers/get_grid_fs_bucket.ts +0 -20
  64. package/src/files/index.js +0 -5
  65. package/src/files/tasks/finalize_file_upload/apply_img_preview.ts +0 -49
  66. package/src/files/tasks/finalize_file_upload/constants.ts +0 -23
  67. package/src/files/tasks/finalize_file_upload/download_file.ts +0 -98
  68. package/src/files/tasks/finalize_file_upload/get_text_vectors.ts +0 -13
  69. package/src/files/tasks/finalize_file_upload/helpers/convert_pdf_to_png.ts +0 -34
  70. package/src/files/tasks/finalize_file_upload/helpers/exec.ts +0 -5
  71. package/src/files/tasks/finalize_file_upload/helpers/get_metadata.ts +0 -18
  72. package/src/files/tasks/finalize_file_upload/index.ts +0 -53
  73. package/src/files/tasks/finalize_file_upload/run_ocr.ts +0 -42
  74. package/src/files/tasks/index.ts +0 -6
  75. package/src/files/upload_chunk.ts +0 -83
  76. package/src/helpers/sim_test_inject.ts +0 -21
  77. package/src/models/Invite.js +0 -23
  78. package/src/models/Notification.js +0 -44
  79. package/src/models/Policy.ts +0 -13
  80. package/src/models/ResetPasswordToken.js +0 -14
  81. package/src/models/SearchHistory.ts +0 -22
  82. package/src/models/User.js +0 -42
  83. package/src/models/UserStoredValues.js +0 -18
  84. package/src/models/index.js +0 -7
  85. package/src/notitications/ack_notification.js +0 -26
  86. package/src/notitications/get_notifications.js +0 -39
  87. package/src/notitications/llt/README.md +0 -8
  88. package/src/notitications/llt/get_llts.js +0 -42
  89. package/src/notitications/set_seen.js +0 -26
  90. package/src/sessions/index.js +0 -27
  91. package/src/sessions/session_proxy_middleware.js +0 -18
  92. package/src/sessions/session_store_middleware.js +0 -106
  93. package/src/sessions/warning_proxy_middleware.js +0 -17
  94. package/src/tasks/index.js +0 -8
  95. package/src/tasks/index_item.js +0 -8
  96. package/store/index.js +0 -31
@@ -1,42 +0,0 @@
1
- import path from "path"
2
- import Promise from "bluebird"
3
- import {glob} from "glob11"
4
-
5
- import {exec} from "./helpers/exec"
6
-
7
- import {PDF_IMG_DIR} from "./constants"
8
-
9
-
10
- const get_input_files = async(tmp_wd: string): Promise<Array<string>> => {
11
- const input_glob = path.join(tmp_wd, `./${PDF_IMG_DIR}/*.png`)
12
-
13
- const input_files = await glob(input_glob)
14
-
15
- const sorted = input_files.sort((a, b) => {
16
- // Extract the page numbers from the file names
17
- const page_a = parseInt(a.match(/page\.(\d+)\.png/)?.[1] || "0", 10)
18
- const page_b = parseInt(b.match(/page\.(\d+)\.png/)?.[1] || "0", 10)
19
-
20
- return page_a - page_b
21
- })
22
-
23
- return sorted
24
- }
25
-
26
-
27
- export const run_ocr = async({tmp_wd, metadata}: { tmp_wd: string, metadata: any }) => {
28
-
29
- const input_files = await get_input_files(tmp_wd)
30
-
31
- const run_ocr_file = async(file_path: string) => {
32
- const wd = path.dirname(file_path)
33
- const basename = path.basename(file_path, ".png")
34
-
35
- const cmd = `tesseract ${basename}.png ${basename} -l eng --oem 1 --psm 11`
36
- const out = await exec(cmd, {cwd: wd})
37
- console.log("OUTTT", out)
38
- }
39
-
40
- await Promise.map(input_files, run_ocr_file, {concurrency: 4})
41
-
42
- }
@@ -1,6 +0,0 @@
1
- import queue from "../../../queue"
2
-
3
- import {finalize_file_upload} from "./finalize_file_upload"
4
-
5
-
6
- queue.register_task("finalize_file_upload", finalize_file_upload)
@@ -1,83 +0,0 @@
1
- import assert from "assert"
2
- import fs from "fs"
3
- import {formidable, File} from "formidable"
4
-
5
- import {get_grid_fs_bucket} from "./helpers/get_grid_fs_bucket"
6
- import {sim_test_inject} from "../helpers/sim_test_inject"
7
- import {UPLOAD_BUCKET_NAME} from "./constants"
8
-
9
-
10
- const upload_file_to_bucket = async(file: File, metadata): Promise<void> => {
11
- const bucket = get_grid_fs_bucket(UPLOAD_BUCKET_NAME, metadata.chunk_size)
12
-
13
- const chunk_filename = `${metadata.hash}.${metadata.chunk_index}`
14
-
15
- const upload_stream = bucket.openUploadStream(chunk_filename, {
16
- metadata,
17
- })
18
-
19
- const read_stream = fs.createReadStream(file.filepath)
20
-
21
- read_stream.pipe(upload_stream)
22
-
23
- return new Promise((resolve, reject) => {
24
- upload_stream.on("finish", () => {
25
- // console.log("finished uploading:", upload_stream.id)
26
- resolve()
27
- })
28
- upload_stream.on("error", (error) => {
29
- reject(error)
30
- })
31
- })
32
- }
33
-
34
-
35
- export const upload_chunk = async(req, res, next) => {
36
- const {user_id} = req.session
37
- assert(user_id, "upload_chunk: unable to resolve user_id")
38
-
39
- // https://github.com/node-formidable/formidable#options
40
- const form = formidable({})
41
-
42
- let fields, files
43
- try {
44
- [fields, files] = await form.parse(req)
45
- } catch (err) {
46
- return res.status(500).json({error: "Failed to parse uploaded file"})
47
- }
48
-
49
- await sim_test_inject()
50
- // await new Promise(resolve => setTimeout(resolve, 3000))
51
-
52
- const file_chunk = files.file_chunk[0] as File
53
-
54
- const metadata = {
55
- user_id,
56
- original_filename: fields.original_filename[0],
57
- is_compressed: fields.is_compressed[0] === "yes",
58
- chunk_index: parseInt(fields.chunk_index[0]),
59
- total_chunks: parseInt(fields.total_chunks[0]),
60
- chunk_size: parseInt(fields.chunk_size[0]),
61
- mime_type: fields.mime_type[0],
62
- hash: fields.hash[0],
63
- }
64
-
65
- await upload_file_to_bucket(file_chunk, metadata)
66
-
67
- const result: {
68
- status: string;
69
- finalize_token?: string;
70
- } = {
71
- status: "ok",
72
- }
73
-
74
- const is_last_chunk = metadata.chunk_index === metadata.total_chunks - 1
75
-
76
- if (is_last_chunk) {
77
- result.finalize_token = metadata.hash
78
- }
79
-
80
- res.json({
81
- status: "ok",
82
- })
83
- }
@@ -1,21 +0,0 @@
1
- const ERROR_PROBABILITY = 0.001
2
- const THROTTLE_PROBABILITY = 0.2
3
- const MAX_THROTTLE_MS = 2000
4
-
5
- // randomly inject errors or delays
6
- export const sim_test_inject = async() => {
7
- if (!__DEV__) {
8
- return
9
- }
10
-
11
- const should_error = Math.random() > 1 - ERROR_PROBABILITY
12
- if (should_error) {
13
- throw new Error("random sim test error")
14
- }
15
-
16
- const should_throttle = Math.random() > 1 - THROTTLE_PROBABILITY
17
- if (should_throttle) {
18
- const throttle_delay = Math.floor(Math.random() * MAX_THROTTLE_MS)
19
- await new Promise(resolve => setTimeout(resolve, throttle_delay))
20
- }
21
- }
@@ -1,23 +0,0 @@
1
- /* @flow */
2
- const mongoose = require("../../mongoose")
3
-
4
- const Invite = mongoose.model("Invite", {
5
- email: String,
6
- email_hash: String, // TODO: unused field?
7
- token: String,
8
- expires: Date,
9
- is_ready: {
10
- type: Boolean,
11
- default: false,
12
- },
13
- is_sent: {
14
- type: Boolean,
15
- default: false,
16
- },
17
- is_accepted: {
18
- type: Boolean,
19
- default: false,
20
- },
21
- })
22
-
23
- module.exports = Invite
@@ -1,44 +0,0 @@
1
- /* @flow */
2
- const mongoose = require("../../mongoose")
3
-
4
- const NOTIFICATIONS_TYPES = {
5
- new_message: "new_message",
6
- }
7
-
8
- const NotificationSchema = new mongoose.Schema(
9
- {
10
- type: {
11
- type: String,
12
- enum: Object.keys(NOTIFICATIONS_TYPES),
13
- },
14
- notification: {
15
- title: String,
16
- body: String,
17
- icon: String,
18
- },
19
- action_url: String,
20
- ack_at_ms: {
21
- type: Number,
22
- default: null,
23
- },
24
- server_timestamp_ms: {
25
- type: Number,
26
- required: true,
27
- },
28
- context: mongoose.Schema.Types.Mixed,
29
- push_sent: {
30
- type: Array,
31
- default: [],
32
- },
33
- },
34
- {
35
- strict: false,
36
- versionKey: false,
37
- },
38
- )
39
-
40
- const model = mongoose.model("Notification", NotificationSchema)
41
-
42
- model.NOTIFICATIONS_TYPES = NOTIFICATIONS_TYPES
43
-
44
- module.exports = model
@@ -1,13 +0,0 @@
1
- const mongoose = require("../../mongoose")
2
-
3
- const Policy = mongoose.model("Policy", {
4
- user_id: String,
5
- token_hash: String,
6
- created_at: {
7
- type: Date,
8
- expires: 360, // 6min
9
- default: Date.now,
10
- }
11
- })
12
-
13
- module.exports = Policy
@@ -1,14 +0,0 @@
1
- /* @flow */
2
- const mongoose = require("../../mongoose")
3
-
4
- const ResetPasswordToken = mongoose.model("ResetPasswordToken", {
5
- user_id: String,
6
- token_hash: String,
7
- created_at: {
8
- type: Date,
9
- expires: 360, // 6min
10
- default: Date.now,
11
- }
12
- })
13
-
14
- module.exports = ResetPasswordToken
@@ -1,22 +0,0 @@
1
- import mongoose, {Schema, Model, Document} from "@rpcbase/server/mongoose"
2
-
3
-
4
- interface ISearchHistory extends Document {
5
- server_timestamp_ms: number;
6
- context: any;
7
- data: any;
8
- }
9
-
10
- const SearchHistorySchema: Schema<ISearchHistory> = new Schema(
11
- {
12
- server_timestamp_ms: Number,
13
- context: mongoose.Schema.Types.Mixed,
14
- data: mongoose.Schema.Types.Mixed,
15
- },
16
- {timestamps: true},
17
- )
18
-
19
- export const SearchHistory: Model<ISearchHistory> = mongoose.model<ISearchHistory>(
20
- "SearchHistory",
21
- SearchHistorySchema,
22
- )
@@ -1,42 +0,0 @@
1
- /* @flow */
2
- const mongoose = require("../../mongoose")
3
-
4
- const UserSchema = new mongoose.Schema({
5
- email: {
6
- type: String,
7
- index: true,
8
- },
9
- is_email_verified: {
10
- type: Boolean,
11
- default: false,
12
- },
13
- password_hash: String,
14
- first_name: {
15
- type: String,
16
- default: "",
17
- },
18
- last_name: {
19
- type: String,
20
- default: "",
21
- },
22
- initials: {
23
- type: String,
24
- default: "",
25
- },
26
- avatar_color: {
27
- type: String,
28
- },
29
- profile_picture: Buffer,
30
- profile_picture_updated_at_ms: Number,
31
- devices: {
32
- type: Array,
33
- default: [],
34
- }
35
- }, {
36
- strict: false,
37
- versionKey: false,
38
- })
39
-
40
- const model = mongoose.model("User", UserSchema)
41
-
42
- module.exports = model
@@ -1,18 +0,0 @@
1
- /* @flow */
2
- const mongoose = require("../../mongoose")
3
-
4
- const UserStoredValuesSchema = new mongoose.Schema({
5
- _owners: {
6
- type: Array,
7
- required: true,
8
- index: true,
9
- default: undefined,
10
- },
11
- }, {
12
- strict: false,
13
- versionKey: false,
14
- })
15
-
16
- const model = mongoose.model("UserStoredValues", UserStoredValuesSchema, "userstoredvalues")
17
-
18
- module.exports = model
@@ -1,7 +0,0 @@
1
- require("./Invite")
2
- require("./Notification")
3
- require("./Policy")
4
- require("./ResetPasswordToken")
5
- require("./User")
6
- require("./UserStoredValues")
7
- require("./SearchHistory")
@@ -1,26 +0,0 @@
1
- /* @flow */
2
- const assert = require("assert")
3
- const debug = require("debug")
4
-
5
- const Notification = require("../models/Notification")
6
-
7
-
8
- const log = debug("notifications:ack")
9
-
10
- const ack_notification = async(payload, ctx) => {
11
- const {notification_id} = payload
12
- assert(notification_id, "missing notification id")
13
-
14
- const notif = await Notification.findOne({_id: notification_id}, {ack_at_ms: 1}, {ctx})
15
- notif.ack_at_ms = payload.ack_at_ms
16
-
17
- await notif.save({ctx})
18
-
19
- log("acknowledged notification:", notification_id)
20
-
21
- return {
22
- status: "ok",
23
- }
24
- }
25
-
26
- module.exports = ack_notification
@@ -1,39 +0,0 @@
1
- /* @flow */
2
- // const Group = require("../models/Group")
3
-
4
- const get_notifications = async(payload, ctx) => {
5
- // const groups = await Group.find({
6
- // // _owners: {$in: []}
7
- // }, null, {ctx})
8
-
9
- return {
10
- status: "ok",
11
- notifications: [],
12
- }
13
- }
14
-
15
- module.exports = get_notifications
16
-
17
- // const get_notifications = async () => {
18
- // const uid = getUid()
19
- // const db = firebase.firestore()
20
- //
21
- // const query = await db.collection("notifications").where("owner", "==", uid).get()
22
- // const notifications = query.docs.map((doc) => {
23
- // return doc.data()
24
- // })
25
- //
26
- // // const forms = _.uniqBy(
27
- // // _.flatten(
28
- // // result.map((query) => {
29
- // // return query.docs
30
- // // }),
31
- // // ).map((table) => {
32
- // // const data = table.data()
33
- // // data.id = table.id
34
- // // return data
35
- // // }),
36
- // // "id",
37
- // // )
38
- // return notifications
39
- // }
@@ -1,8 +0,0 @@
1
- ## Long Lived Tasks (LLTs)
2
-
3
- These are large jobs that typically take a while to run (minutes / hours).
4
-
5
- The user and sometimes the system needs to be updated on progress of these tasks.
6
-
7
- Any task that takes more than 10 seconds should be a long lived task.
8
- https://www.nngroup.com/articles/response-times-3-important-limits/
@@ -1,42 +0,0 @@
1
- /* @flow */
2
- const assert = require("assert")
3
- const Promise = require("bluebird")
4
-
5
- const queue = require("@rpcbase/server/queue")
6
-
7
-
8
- const get_llts = async(payload, ctx) => {
9
- const {env_id} = payload
10
- assert(env_id, "unable to find env_id")
11
-
12
- const jobs = await queue.get_jobs(["active"])
13
- // console.log("WOW GOT JOBS", jobs)
14
-
15
- const target_jobs = jobs.filter((j) => j.id.toString().startsWith(`llt-${env_id}`))
16
-
17
- const tasks = await Promise.map(
18
- target_jobs,
19
- async(j) => {
20
- const logs = await j.queue.getJobLogs(j.id)
21
- console.log("Llogs", logs)
22
-
23
- return {
24
- id: j.id,
25
- }
26
- },
27
- {concurrency: 16},
28
- )
29
-
30
- tasks.length > 0 && console.log("tasks", tasks)
31
-
32
- target_jobs.forEach((j) => {
33
- console.log("LLT TARGET PROGRESS", j._progress)
34
- })
35
-
36
- return {
37
- status: "ok",
38
- llts: [],
39
- }
40
- }
41
-
42
- module.exports = get_llts
@@ -1,26 +0,0 @@
1
- /* @flow */
2
-
3
- const WorkflowExecution = require("../models/workflows/WorkflowExecution")
4
-
5
-
6
- const set_seen = async(payload, ctx) => {
7
- const {notif_type} = payload
8
-
9
- if (notif_type === "workflow") {
10
- const workflow_exec = await WorkflowExecution.findOne({_id: payload.exec_id}, {_id: 1}, {ctx})
11
- workflow_exec.seen_at = payload.seen_at
12
-
13
- await workflow_exec.save({ctx})
14
- } else {
15
- throw new Error("set_seen unknown notif_type")
16
- }
17
-
18
- // console.log("Set notif seen", payload)
19
-
20
- return {
21
- status: "ok",
22
- }
23
-
24
- }
25
-
26
- module.exports = set_seen
@@ -1,27 +0,0 @@
1
- /* @flow */
2
- const pc = require("picocolors")
3
-
4
-
5
- const is_development = process.env.NODE_ENV === "development"
6
-
7
- module.exports = (app) => {
8
- const has_session_store = process.env.RB_SESSION_STORE === "yes"
9
-
10
- if (has_session_store) {
11
- console.log("using", pc.bold(pc.green("SESSION STORE")))
12
-
13
- const session_store_middleware = require("./session_store_middleware")
14
- app.use(session_store_middleware)
15
- } else {
16
- console.log("using", pc.bold(pc.red("REVERSE-PROXY SESSIONS!")), "if you want to use the session store, start with the --sessions argument")
17
-
18
- // warn the user
19
- if (is_development) {
20
- app.use(require("./warning_proxy_middleware"))
21
- }
22
-
23
- const session_proxy_middleware = require("./session_proxy_middleware")
24
- app.use(session_proxy_middleware)
25
- }
26
-
27
- }
@@ -1,18 +0,0 @@
1
- /* @flow */
2
-
3
- const USER_ID_HEADER = "rb-user-id"
4
-
5
- // Production middleware used to retrieve session from the proxy
6
- const session_proxy_middleware = (req, res, next) => {
7
- req.session = {}
8
-
9
- const user_id = req.headers[USER_ID_HEADER]
10
- if (user_id) {
11
- req.session.user_id = user_id
12
- }
13
-
14
- next()
15
- }
16
-
17
-
18
- module.exports = session_proxy_middleware
@@ -1,106 +0,0 @@
1
- /* @flow */
2
- const assert = require("assert")
3
- const debug = require("debug")
4
- const session = require("express-session")
5
- const validator = require("validator")
6
- const {createClient} = require("redis")
7
- const RedisStore = require("connect-redis").default
8
-
9
- // const {is_docker} = require("@rpcbase/std")
10
- const is_docker = () => true
11
-
12
-
13
- const {SESSION_STORE_PORT, APP_DOMAIN} = process.env
14
- assert(SESSION_STORE_PORT, "SESSION_STORE_PORT is undefined")
15
-
16
-
17
- const log = debug("rb:session")
18
-
19
-
20
- // WARNING:
21
- // https://stackoverflow.com/questions/70867229/error-connection-timeout-when-connecting-to-redis-docker-instance
22
- // https://github.com/redis/node-redis/issues/1656/
23
-
24
- if (typeof SESSION_STORE_PORT === "string" && !validator.isPort(SESSION_STORE_PORT)) {
25
- throw new Error("expected SESSION_STORE_PORT to be a valid port number")
26
- }
27
-
28
- const hostname = is_docker() ? "session-store" : "127.0.0.1"
29
-
30
- console.log("session-store hostname", hostname, "port", SESSION_STORE_PORT)
31
-
32
- let session_middleware = null
33
-
34
- // EXTREME WARNING: docker issue
35
- // when there is synchronous io redis client fails to connect
36
- setTimeout(async() => {
37
- const reconnectStrategy = (retries) => {
38
- log("redis_client::reconnectStrategy::retrying with arg", retries)
39
- if (retries < 5) {
40
- log("retry count:", retries, "retrying in 1s")
41
- return 4000
42
- } else {
43
- return new Error("max retries expiered")
44
- }
45
- }
46
-
47
- const redis_client = createClient({
48
- socket: {
49
- host: hostname,
50
- port: SESSION_STORE_PORT,
51
- reconnectStrategy,
52
- connectTimeout: 10000,
53
- keepAlive: 0,
54
- },
55
- })
56
-
57
- redis_client.on("ready", () => {
58
- log("session-storage::redis_client connected")
59
- })
60
-
61
- const res = await redis_client.connect()
62
-
63
- const session_config = {
64
- name: "rb_sid",
65
- store: new RedisStore({client: redis_client}),
66
- proxy: true,
67
- saveUninitialized: true,
68
- // WARNING DO NOT USE IN PROD
69
- secret: "NOT_A_SECRET_BECAUSE_THIS_MIDDLEWARE_IS_NOT_MEANT_TO_BE_USED_IN_PROD",
70
- resave: true,
71
- cookie: {
72
- // TODO: set to secure in production
73
- // secure: true
74
- // TODO: test this
75
- maxAge: 1000 * 3600 * 24 * 90 // 90 days
76
- }
77
- }
78
-
79
- // WARNING: apparently doesn't work on localhost or .local domains
80
- // https://stackoverflow.com/questions/1134290/cookies-on-localhost-with-explicit-domain
81
- // TODO: WTF IS THIS
82
- // if (is_docker() && typeof APP_DOMAIN === "string" && APP_DOMAIN.trim() !== "") {
83
- // log("\n\n")
84
- // log("\n\n")
85
- // session_config.cookie.domain = APP_DOMAIN
86
- // // session_config.cookie.secure = true
87
- // session_config.cookie.sameSite = false
88
- // }
89
-
90
- session_middleware = session(session_config)
91
- }, 200)
92
-
93
-
94
- module.exports = (req, res, next) => {
95
- if (typeof session_middleware === "function") {
96
- session_middleware(req, res, next)
97
- } else {
98
- // recheck until it's a function and call it
99
- const interval = setInterval(() => {
100
- if (typeof session_middleware === "function") {
101
- clearInterval(interval)
102
- session_middleware(req, res, next)
103
- }
104
- }, 100)
105
- }
106
- }
@@ -1,17 +0,0 @@
1
- /* @flow */
2
-
3
- const {SUPPRESS_PROXY_SESSION_WARNING} = process.env
4
-
5
- const suppress_warning = SUPPRESS_PROXY_SESSION_WARNING === "yes"
6
-
7
- const warning_proxy_middleware = (req, res, next) => {
8
- const {host} = req.headers
9
-
10
- if (!suppress_warning && host.startsWith("localhost:")) {
11
- console.warn("are you running with the proxy address ? you shouldn't be using localhost when you are running the app with the reverse proxyy")
12
- }
13
-
14
- next()
15
- }
16
-
17
- module.exports = warning_proxy_middleware
@@ -1,8 +0,0 @@
1
- const queue = require("../../queue")
2
-
3
- require("../files/tasks")
4
-
5
- const index_item = require("./index_item")
6
-
7
-
8
- queue.register_task("rb_index_item", index_item)
@@ -1,8 +0,0 @@
1
- /* @flow */
2
-
3
- const index_item = async(payload) => {
4
- console.log("TASK:INDEX_ITEM")
5
- console.log("payload", payload)
6
- }
7
-
8
- module.exports = index_item