@budibase/backend-core 2.21.4 → 2.21.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/dist/index.js +249 -32
  2. package/dist/index.js.map +4 -4
  3. package/dist/index.js.meta.json +1 -1
  4. package/dist/package.json +4 -4
  5. package/dist/plugins.js.meta.json +1 -1
  6. package/dist/src/cache/base/index.d.ts +32 -2
  7. package/dist/src/cache/base/index.js +60 -1
  8. package/dist/src/cache/base/index.js.map +1 -1
  9. package/dist/src/cache/docWritethrough.d.ts +21 -0
  10. package/dist/src/cache/docWritethrough.js +107 -0
  11. package/dist/src/cache/docWritethrough.js.map +1 -0
  12. package/dist/src/cache/generic.d.ts +2 -2
  13. package/dist/src/cache/generic.js.map +1 -1
  14. package/dist/src/cache/index.d.ts +1 -0
  15. package/dist/src/cache/index.js +2 -1
  16. package/dist/src/cache/index.js.map +1 -1
  17. package/dist/src/configs/configs.d.ts +1 -1
  18. package/dist/src/constants/db.d.ts +3 -0
  19. package/dist/src/constants/db.js +3 -0
  20. package/dist/src/constants/db.js.map +1 -1
  21. package/dist/src/context/mainContext.d.ts +1 -0
  22. package/dist/src/context/mainContext.js +13 -1
  23. package/dist/src/context/mainContext.js.map +1 -1
  24. package/dist/src/db/couch/DatabaseImpl.d.ts +3 -1
  25. package/dist/src/db/couch/DatabaseImpl.js +18 -1
  26. package/dist/src/db/couch/DatabaseImpl.js.map +1 -1
  27. package/dist/src/db/instrumentation.d.ts +1 -1
  28. package/dist/src/db/instrumentation.js +5 -2
  29. package/dist/src/db/instrumentation.js.map +1 -1
  30. package/dist/src/environment.d.ts +1 -0
  31. package/dist/src/environment.js +1 -1
  32. package/dist/src/environment.js.map +1 -1
  33. package/dist/src/events/analytics.d.ts +1 -1
  34. package/dist/src/index.d.ts +1 -0
  35. package/dist/src/queue/constants.d.ts +2 -1
  36. package/dist/src/queue/constants.js +1 -0
  37. package/dist/src/queue/constants.js.map +1 -1
  38. package/dist/src/queue/inMemoryQueue.d.ts +23 -13
  39. package/dist/src/queue/inMemoryQueue.js +83 -30
  40. package/dist/src/queue/inMemoryQueue.js.map +1 -1
  41. package/dist/src/queue/listeners.js +2 -0
  42. package/dist/src/queue/listeners.js.map +1 -1
  43. package/dist/src/queue/queue.d.ts +1 -0
  44. package/dist/src/queue/queue.js.map +1 -1
  45. package/dist/src/redis/init.d.ts +1 -0
  46. package/dist/src/redis/init.js +12 -2
  47. package/dist/src/redis/init.js.map +1 -1
  48. package/dist/src/redis/redis.d.ts +1 -0
  49. package/dist/src/redis/redis.js +6 -0
  50. package/dist/src/redis/redis.js.map +1 -1
  51. package/dist/src/redis/utils.d.ts +2 -1
  52. package/dist/src/redis/utils.js +1 -0
  53. package/dist/src/redis/utils.js.map +1 -1
  54. package/package.json +4 -4
  55. package/src/cache/base/index.ts +62 -4
  56. package/src/cache/docWritethrough.ts +97 -0
  57. package/src/cache/generic.ts +3 -2
  58. package/src/cache/index.ts +1 -0
  59. package/src/cache/tests/docWritethrough.spec.ts +293 -0
  60. package/src/constants/db.ts +3 -0
  61. package/src/context/mainContext.ts +11 -0
  62. package/src/db/couch/DatabaseImpl.ts +18 -1
  63. package/src/db/instrumentation.ts +5 -2
  64. package/src/db/tests/DatabaseImpl.spec.ts +55 -0
  65. package/src/environment.ts +1 -0
  66. package/src/queue/constants.ts +1 -0
  67. package/src/queue/inMemoryQueue.ts +79 -24
  68. package/src/queue/listeners.ts +2 -0
  69. package/src/queue/queue.ts +2 -0
  70. package/src/redis/init.ts +12 -1
  71. package/src/redis/redis.ts +5 -0
  72. package/src/redis/utils.ts +1 -0
@@ -0,0 +1,97 @@
1
+ import { AnyDocument, Database } from "@budibase/types"
2
+
3
+ import { JobQueue, createQueue } from "../queue"
4
+ import * as dbUtils from "../db"
5
+
6
+ interface ProcessDocMessage {
7
+ dbName: string
8
+ docId: string
9
+ data: Record<string, any>
10
+ }
11
+
12
+ const PERSIST_MAX_ATTEMPTS = 100
13
+ let processor: DocWritethroughProcessor | undefined
14
+
15
+ export const docWritethroughProcessorQueue = createQueue<ProcessDocMessage>(
16
+ JobQueue.DOC_WRITETHROUGH_QUEUE,
17
+ {
18
+ jobOptions: {
19
+ attempts: PERSIST_MAX_ATTEMPTS,
20
+ },
21
+ }
22
+ )
23
+
24
+ class DocWritethroughProcessor {
25
+ init() {
26
+ docWritethroughProcessorQueue.process(async message => {
27
+ try {
28
+ await this.persistToDb(message.data)
29
+ } catch (err: any) {
30
+ if (err.status === 409) {
31
+ // If we get a 409, it means that another job updated it meanwhile. We want to retry it to persist it again.
32
+ throw new Error(
33
+ `Conflict persisting message ${message.id}. Attempt ${message.attemptsMade}`
34
+ )
35
+ }
36
+
37
+ throw err
38
+ }
39
+ })
40
+ return this
41
+ }
42
+
43
+ private async persistToDb({
44
+ dbName,
45
+ docId,
46
+ data,
47
+ }: {
48
+ dbName: string
49
+ docId: string
50
+ data: Record<string, any>
51
+ }) {
52
+ const db = dbUtils.getDB(dbName)
53
+ let doc: AnyDocument | undefined
54
+ try {
55
+ doc = await db.get(docId)
56
+ } catch {
57
+ doc = { _id: docId }
58
+ }
59
+
60
+ doc = { ...doc, ...data }
61
+ await db.put(doc)
62
+ }
63
+ }
64
+
65
+ export class DocWritethrough {
66
+ private db: Database
67
+ private _docId: string
68
+
69
+ constructor(db: Database, docId: string) {
70
+ this.db = db
71
+ this._docId = docId
72
+ }
73
+
74
+ get docId() {
75
+ return this._docId
76
+ }
77
+
78
+ async patch(data: Record<string, any>) {
79
+ await docWritethroughProcessorQueue.add({
80
+ dbName: this.db.name,
81
+ docId: this.docId,
82
+ data,
83
+ })
84
+ }
85
+ }
86
+
87
+ export function init(): DocWritethroughProcessor {
88
+ processor = new DocWritethroughProcessor().init()
89
+ return processor
90
+ }
91
+
92
+ export function getProcessor(): DocWritethroughProcessor {
93
+ if (!processor) {
94
+ return init()
95
+ }
96
+ return processor
97
+ }
@@ -26,7 +26,8 @@ export const store = (...args: Parameters<typeof GENERIC.store>) =>
26
26
  GENERIC.store(...args)
27
27
  export const destroy = (...args: Parameters<typeof GENERIC.delete>) =>
28
28
  GENERIC.delete(...args)
29
- export const withCache = (...args: Parameters<typeof GENERIC.withCache>) =>
30
- GENERIC.withCache(...args)
29
+ export const withCache = <T>(
30
+ ...args: Parameters<typeof GENERIC.withCache<T>>
31
+ ) => GENERIC.withCache(...args)
31
32
  export const bustCache = (...args: Parameters<typeof GENERIC.bustCache>) =>
32
33
  GENERIC.bustCache(...args)
@@ -5,3 +5,4 @@ export * as writethrough from "./writethrough"
5
5
  export * as invite from "./invite"
6
6
  export * as passwordReset from "./passwordReset"
7
7
  export * from "./generic"
8
+ export * as docWritethrough from "./docWritethrough"
@@ -0,0 +1,293 @@
1
+ import tk from "timekeeper"
2
+
3
+ import _ from "lodash"
4
+ import { DBTestConfiguration, generator, structures } from "../../../tests"
5
+ import { getDB } from "../../db"
6
+
7
+ import {
8
+ DocWritethrough,
9
+ docWritethroughProcessorQueue,
10
+ init,
11
+ } from "../docWritethrough"
12
+
13
+ import InMemoryQueue from "../../queue/inMemoryQueue"
14
+
15
+ const initialTime = Date.now()
16
+
17
+ async function waitForQueueCompletion() {
18
+ const queue: InMemoryQueue = docWritethroughProcessorQueue as never
19
+ await queue.waitForCompletion()
20
+ }
21
+
22
+ describe("docWritethrough", () => {
23
+ beforeAll(() => {
24
+ init()
25
+ })
26
+
27
+ const config = new DBTestConfiguration()
28
+
29
+ const db = getDB(structures.db.id())
30
+ let documentId: string
31
+ let docWritethrough: DocWritethrough
32
+
33
+ describe("patch", () => {
34
+ function generatePatchObject(fieldCount: number) {
35
+ const keys = generator.unique(() => generator.word(), fieldCount)
36
+ return keys.reduce((acc, c) => {
37
+ acc[c] = generator.word()
38
+ return acc
39
+ }, {} as Record<string, any>)
40
+ }
41
+
42
+ beforeEach(async () => {
43
+ jest.clearAllMocks()
44
+ documentId = structures.uuid()
45
+ docWritethrough = new DocWritethrough(db, documentId)
46
+ })
47
+
48
+ it("patching will not persist until the messages are persisted", async () => {
49
+ await config.doInTenant(async () => {
50
+ await docWritethrough.patch(generatePatchObject(2))
51
+ await docWritethrough.patch(generatePatchObject(2))
52
+
53
+ expect(await db.exists(documentId)).toBe(false)
54
+ })
55
+ })
56
+
57
+ it("patching will persist when the messages are persisted", async () => {
58
+ await config.doInTenant(async () => {
59
+ const patch1 = generatePatchObject(2)
60
+ const patch2 = generatePatchObject(2)
61
+ await docWritethrough.patch(patch1)
62
+ await docWritethrough.patch(patch2)
63
+
64
+ await waitForQueueCompletion()
65
+
66
+ // This will not be persisted
67
+ const patch3 = generatePatchObject(3)
68
+ await docWritethrough.patch(patch3)
69
+
70
+ expect(await db.get(documentId)).toEqual({
71
+ _id: documentId,
72
+ ...patch1,
73
+ ...patch2,
74
+ _rev: expect.stringMatching(/2-.+/),
75
+ createdAt: new Date(initialTime).toISOString(),
76
+ updatedAt: new Date(initialTime).toISOString(),
77
+ })
78
+ })
79
+ })
80
+
81
+ it("patching will persist keeping the previous data", async () => {
82
+ await config.doInTenant(async () => {
83
+ const patch1 = generatePatchObject(2)
84
+ const patch2 = generatePatchObject(2)
85
+ await docWritethrough.patch(patch1)
86
+ await docWritethrough.patch(patch2)
87
+
88
+ await waitForQueueCompletion()
89
+
90
+ const patch3 = generatePatchObject(3)
91
+ await docWritethrough.patch(patch3)
92
+
93
+ await waitForQueueCompletion()
94
+
95
+ expect(await db.get(documentId)).toEqual(
96
+ expect.objectContaining({
97
+ _id: documentId,
98
+ ...patch1,
99
+ ...patch2,
100
+ ...patch3,
101
+ })
102
+ )
103
+ })
104
+ })
105
+
106
+ it("date audit fields are set correctly when persisting", async () => {
107
+ await config.doInTenant(async () => {
108
+ const patch1 = generatePatchObject(2)
109
+ const patch2 = generatePatchObject(2)
110
+ await docWritethrough.patch(patch1)
111
+ const date1 = new Date()
112
+ await waitForQueueCompletion()
113
+ await docWritethrough.patch(patch2)
114
+
115
+ tk.travel(Date.now() + 100)
116
+ const date2 = new Date()
117
+ await waitForQueueCompletion()
118
+
119
+ expect(date1).not.toEqual(date2)
120
+ expect(await db.get(documentId)).toEqual(
121
+ expect.objectContaining({
122
+ createdAt: date1.toISOString(),
123
+ updatedAt: date2.toISOString(),
124
+ })
125
+ )
126
+ })
127
+ })
128
+
129
+ it("concurrent patches will override keys", async () => {
130
+ await config.doInTenant(async () => {
131
+ const patch1 = generatePatchObject(2)
132
+ await docWritethrough.patch(patch1)
133
+ await waitForQueueCompletion()
134
+ const patch2 = generatePatchObject(1)
135
+ await docWritethrough.patch(patch2)
136
+
137
+ const keyToOverride = _.sample(Object.keys(patch1))!
138
+ expect(await db.get(documentId)).toEqual(
139
+ expect.objectContaining({
140
+ [keyToOverride]: patch1[keyToOverride],
141
+ })
142
+ )
143
+
144
+ await waitForQueueCompletion()
145
+
146
+ const patch3 = {
147
+ ...generatePatchObject(3),
148
+ [keyToOverride]: generator.word(),
149
+ }
150
+ await docWritethrough.patch(patch3)
151
+ await waitForQueueCompletion()
152
+
153
+ expect(await db.get(documentId)).toEqual(
154
+ expect.objectContaining({
155
+ ...patch1,
156
+ ...patch2,
157
+ ...patch3,
158
+ })
159
+ )
160
+ })
161
+ })
162
+
163
+ it("concurrent patches to different docWritethrough will not pollute each other", async () => {
164
+ await config.doInTenant(async () => {
165
+ const secondDocWritethrough = new DocWritethrough(
166
+ db,
167
+ structures.db.id()
168
+ )
169
+
170
+ const doc1Patch = generatePatchObject(2)
171
+ await docWritethrough.patch(doc1Patch)
172
+ const doc2Patch = generatePatchObject(1)
173
+ await secondDocWritethrough.patch(doc2Patch)
174
+
175
+ await waitForQueueCompletion()
176
+
177
+ const doc1Patch2 = generatePatchObject(3)
178
+ await docWritethrough.patch(doc1Patch2)
179
+ const doc2Patch2 = generatePatchObject(3)
180
+ await secondDocWritethrough.patch(doc2Patch2)
181
+ await waitForQueueCompletion()
182
+
183
+ expect(await db.get(docWritethrough.docId)).toEqual(
184
+ expect.objectContaining({
185
+ ...doc1Patch,
186
+ ...doc1Patch2,
187
+ })
188
+ )
189
+
190
+ expect(await db.get(secondDocWritethrough.docId)).toEqual(
191
+ expect.objectContaining({
192
+ ...doc2Patch,
193
+ ...doc2Patch2,
194
+ })
195
+ )
196
+ })
197
+ })
198
+
199
+ it("cached values are persisted only once", async () => {
200
+ await config.doInTenant(async () => {
201
+ const initialPatch = generatePatchObject(5)
202
+
203
+ await docWritethrough.patch(initialPatch)
204
+ await waitForQueueCompletion()
205
+
206
+ expect(await db.get(documentId)).toEqual(
207
+ expect.objectContaining(initialPatch)
208
+ )
209
+
210
+ await db.remove(await db.get(documentId))
211
+
212
+ await waitForQueueCompletion()
213
+ const extraPatch = generatePatchObject(5)
214
+ await docWritethrough.patch(extraPatch)
215
+ await waitForQueueCompletion()
216
+
217
+ expect(await db.get(documentId)).toEqual(
218
+ expect.objectContaining(extraPatch)
219
+ )
220
+ expect(await db.get(documentId)).not.toEqual(
221
+ expect.objectContaining(initialPatch)
222
+ )
223
+ })
224
+ })
225
+
226
+ it("concurrent calls will not cause conflicts", async () => {
227
+ async function parallelPatch(count: number) {
228
+ const patches = Array.from({ length: count }).map(() =>
229
+ generatePatchObject(1)
230
+ )
231
+ await Promise.all(patches.map(p => docWritethrough.patch(p)))
232
+
233
+ return patches.reduce((acc, c) => {
234
+ acc = { ...acc, ...c }
235
+ return acc
236
+ }, {})
237
+ }
238
+ const queueMessageSpy = jest.spyOn(docWritethroughProcessorQueue, "add")
239
+
240
+ await config.doInTenant(async () => {
241
+ let patches = await parallelPatch(5)
242
+ expect(queueMessageSpy).toBeCalledTimes(5)
243
+
244
+ await waitForQueueCompletion()
245
+ expect(await db.get(documentId)).toEqual(
246
+ expect.objectContaining(patches)
247
+ )
248
+
249
+ patches = { ...patches, ...(await parallelPatch(40)) }
250
+ expect(queueMessageSpy).toBeCalledTimes(45)
251
+
252
+ await waitForQueueCompletion()
253
+ expect(await db.get(documentId)).toEqual(
254
+ expect.objectContaining(patches)
255
+ )
256
+
257
+ patches = { ...patches, ...(await parallelPatch(10)) }
258
+ expect(queueMessageSpy).toBeCalledTimes(55)
259
+
260
+ await waitForQueueCompletion()
261
+ expect(await db.get(documentId)).toEqual(
262
+ expect.objectContaining(patches)
263
+ )
264
+ })
265
+ })
266
+
267
+ // This is not yet supported
268
+ it.skip("patches will execute in order", async () => {
269
+ let incrementalValue = 0
270
+ const keyToOverride = generator.word()
271
+ async function incrementalPatches(count: number) {
272
+ for (let i = 0; i < count; i++) {
273
+ await docWritethrough.patch({ [keyToOverride]: incrementalValue++ })
274
+ }
275
+ }
276
+
277
+ await config.doInTenant(async () => {
278
+ await incrementalPatches(5)
279
+
280
+ await waitForQueueCompletion()
281
+ expect(await db.get(documentId)).toEqual(
282
+ expect.objectContaining({ [keyToOverride]: 5 })
283
+ )
284
+
285
+ await incrementalPatches(40)
286
+ await waitForQueueCompletion()
287
+ expect(await db.get(documentId)).toEqual(
288
+ expect.objectContaining({ [keyToOverride]: 45 })
289
+ )
290
+ })
291
+ })
292
+ })
293
+ })
@@ -57,6 +57,9 @@ export const StaticDatabases = {
57
57
  AUDIT_LOGS: {
58
58
  name: "audit-logs",
59
59
  },
60
+ SCIM_LOGS: {
61
+ name: "scim-logs",
62
+ },
60
63
  }
61
64
 
62
65
  export const APP_PREFIX = prefixed(DocumentType.APP)
@@ -35,6 +35,17 @@ export function getAuditLogDBName(tenantId?: string) {
35
35
  }
36
36
  }
37
37
 
38
+ export function getScimDBName(tenantId?: string) {
39
+ if (!tenantId) {
40
+ tenantId = getTenantId()
41
+ }
42
+ if (tenantId === DEFAULT_TENANT_ID) {
43
+ return StaticDatabases.SCIM_LOGS.name
44
+ } else {
45
+ return `${tenantId}${SEPARATOR}${StaticDatabases.SCIM_LOGS.name}`
46
+ }
47
+ }
48
+
38
49
  export function baseGlobalDBName(tenantId: string | undefined | null) {
39
50
  if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
40
51
  return StaticDatabases.GLOBAL.name
@@ -70,7 +70,15 @@ export class DatabaseImpl implements Database {
70
70
  DatabaseImpl.nano = buildNano(couchInfo)
71
71
  }
72
72
 
73
- async exists() {
73
+ exists(docId?: string) {
74
+ if (docId === undefined) {
75
+ return this.dbExists()
76
+ }
77
+
78
+ return this.docExists(docId)
79
+ }
80
+
81
+ private async dbExists() {
74
82
  const response = await directCouchUrlCall({
75
83
  url: `${this.couchInfo.url}/${this.name}`,
76
84
  method: "HEAD",
@@ -79,6 +87,15 @@ export class DatabaseImpl implements Database {
79
87
  return response.status === 200
80
88
  }
81
89
 
90
+ private async docExists(id: string): Promise<boolean> {
91
+ try {
92
+ await this.performCall(db => () => db.head(id))
93
+ return true
94
+ } catch {
95
+ return false
96
+ }
97
+ }
98
+
82
99
  private nano() {
83
100
  return this.instanceNano || DatabaseImpl.nano
84
101
  }
@@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database {
24
24
  return this.db.name
25
25
  }
26
26
 
27
- exists(): Promise<boolean> {
27
+ exists(docId?: string): Promise<boolean> {
28
28
  return tracer.trace("db.exists", span => {
29
- span?.addTags({ db_name: this.name })
29
+ span?.addTags({ db_name: this.name, doc_id: docId })
30
+ if (docId) {
31
+ return this.db.exists(docId)
32
+ }
30
33
  return this.db.exists()
31
34
  })
32
35
  }
@@ -0,0 +1,55 @@
1
+ import _ from "lodash"
2
+ import { AnyDocument } from "@budibase/types"
3
+ import { generator } from "../../../tests"
4
+ import { DatabaseImpl } from "../couch"
5
+ import { newid } from "../../utils"
6
+
7
+ describe("DatabaseImpl", () => {
8
+ const database = new DatabaseImpl(generator.word())
9
+ const documents: AnyDocument[] = []
10
+
11
+ beforeAll(async () => {
12
+ const docsToCreate = Array.from({ length: 10 }).map(() => ({
13
+ _id: newid(),
14
+ }))
15
+ const createdDocs = await database.bulkDocs(docsToCreate)
16
+
17
+ documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev })))
18
+ })
19
+
20
+ describe("document exists", () => {
21
+ it("can check existing docs by id", async () => {
22
+ const existingDoc = _.sample(documents)
23
+ const result = await database.exists(existingDoc!._id!)
24
+
25
+ expect(result).toBe(true)
26
+ })
27
+
28
+ it("can check non existing docs by id", async () => {
29
+ const result = await database.exists(newid())
30
+
31
+ expect(result).toBe(false)
32
+ })
33
+
34
+ it("can check an existing doc by id multiple times", async () => {
35
+ const existingDoc = _.sample(documents)
36
+ const id = existingDoc!._id!
37
+
38
+ const results = []
39
+ results.push(await database.exists(id))
40
+ results.push(await database.exists(id))
41
+ results.push(await database.exists(id))
42
+
43
+ expect(results).toEqual([true, true, true])
44
+ })
45
+
46
+ it("returns false after the doc is deleted", async () => {
47
+ const existingDoc = _.sample(documents)
48
+ const id = existingDoc!._id!
49
+ expect(await database.exists(id)).toBe(true)
50
+
51
+ await database.remove(existingDoc!)
52
+ expect(await database.exists(id)).toBe(false)
53
+ })
54
+ })
55
+ })
@@ -186,6 +186,7 @@ const environment = {
186
186
  environment[key] = value
187
187
  },
188
188
  ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M",
189
+ DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS,
189
190
  }
190
191
 
191
192
  // clean up any environment variable edge cases
@@ -4,4 +4,5 @@ export enum JobQueue {
4
4
  AUDIT_LOG = "auditLogQueue",
5
5
  SYSTEM_EVENT_QUEUE = "systemEventQueue",
6
6
  APP_MIGRATION = "appMigration",
7
+ DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue",
7
8
  }