odac 1.4.7 → 1.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +45 -0
- package/client/odac.js +1 -1
- package/docs/ai/README.md +3 -2
- package/docs/ai/skills/SKILL.md +3 -2
- package/docs/ai/skills/backend/authentication.md +12 -6
- package/docs/ai/skills/backend/database.md +183 -12
- package/docs/ai/skills/backend/ipc.md +71 -12
- package/docs/ai/skills/backend/migrations.md +23 -0
- package/docs/ai/skills/backend/odac-var.md +155 -0
- package/docs/ai/skills/backend/utilities.md +1 -1
- package/docs/ai/skills/frontend/forms.md +23 -1
- package/docs/backend/04-routing/09-websocket-quick-reference.md +21 -1
- package/docs/backend/04-routing/09-websocket.md +22 -1
- package/docs/backend/08-database/05-write-behind-cache.md +230 -0
- package/docs/backend/08-database/06-read-through-cache.md +206 -0
- package/docs/backend/10-authentication/01-authentication-basics.md +53 -0
- package/docs/backend/10-authentication/05-session-management.md +12 -3
- package/docs/backend/13-utilities/01-odac-var.md +13 -19
- package/docs/backend/13-utilities/02-ipc.md +117 -0
- package/docs/frontend/03-forms/01-form-handling.md +15 -2
- package/docs/index.json +1 -1
- package/package.json +1 -1
- package/src/Auth.js +17 -0
- package/src/Database/Migration.js +219 -3
- package/src/Database/ReadCache.js +174 -0
- package/src/Database/WriteBuffer.js +605 -0
- package/src/Database.js +95 -1
- package/src/Ipc.js +343 -81
- package/src/Odac.js +2 -1
- package/src/Storage.js +4 -2
- package/src/Validator.js +1 -1
- package/src/Var.js +1 -0
- package/src/WebSocket.js +80 -23
- package/test/Database/Migration/migrate_column.test.js +168 -0
- package/test/Database/ReadCache/crossTable.test.js +179 -0
- package/test/Database/ReadCache/get.test.js +128 -0
- package/test/Database/ReadCache/invalidate.test.js +103 -0
- package/test/Database/ReadCache/proxy.test.js +184 -0
- package/test/Database/WriteBuffer/_recoverFromCheckpoint.test.js +207 -0
- package/test/Database/WriteBuffer/buffer.test.js +143 -0
- package/test/Database/WriteBuffer/flush.test.js +192 -0
- package/test/Database/WriteBuffer/get.test.js +72 -0
- package/test/Database/WriteBuffer/increment.test.js +118 -0
- package/test/Database/WriteBuffer/update.test.js +178 -0
- package/test/Database/insert.test.js +98 -0
- package/test/Ipc/hset.test.js +59 -0
- package/test/Ipc/incrBy.test.js +65 -0
- package/test/Ipc/lock.test.js +62 -0
- package/test/Ipc/rpush.test.js +68 -0
- package/test/Ipc/sadd.test.js +68 -0
- package/test/WebSocket/Client/fragmentation.test.js +130 -0
- package/test/WebSocket/Client/limits.test.js +10 -4
- package/test/WebSocket/Client/readyState.test.js +154 -0
- package/docs/backend/10-authentication/01-user-logins-with-authjs.md +0 -55
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const cluster = require('node:cluster')
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Tests ReadCache.invalidate() — table-level cache purge.
|
|
7
|
+
* Why: Validates that all cached queries for a table are removed on invalidation,
|
|
8
|
+
* and that unrelated tables remain unaffected.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
let knexLib, db
|
|
12
|
+
|
|
13
|
+
beforeEach(async () => {
|
|
14
|
+
jest.resetModules()
|
|
15
|
+
|
|
16
|
+
knexLib = require('knex')
|
|
17
|
+
db = knexLib({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
18
|
+
|
|
19
|
+
await db.schema.createTable('posts', table => {
|
|
20
|
+
table.integer('id').primary()
|
|
21
|
+
table.string('title', 255)
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
await db.schema.createTable('users', table => {
|
|
25
|
+
table.integer('id').primary()
|
|
26
|
+
table.string('name', 255)
|
|
27
|
+
})
|
|
28
|
+
|
|
29
|
+
await db('posts').insert([
|
|
30
|
+
{id: 1, title: 'Post A'},
|
|
31
|
+
{id: 2, title: 'Post B'}
|
|
32
|
+
])
|
|
33
|
+
|
|
34
|
+
await db('users').insert([{id: 1, name: 'Alice'}])
|
|
35
|
+
|
|
36
|
+
Object.defineProperty(cluster, 'isPrimary', {value: true, configurable: true})
|
|
37
|
+
|
|
38
|
+
const Ipc = require('../../../src/Ipc')
|
|
39
|
+
global.Odac = {
|
|
40
|
+
Config: {cache: {ttl: 60, maxKeys: 10000}},
|
|
41
|
+
Ipc
|
|
42
|
+
}
|
|
43
|
+
await Ipc.init()
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
afterEach(async () => {
|
|
47
|
+
await Odac.Ipc.close()
|
|
48
|
+
await db.destroy()
|
|
49
|
+
delete global.Odac
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
describe('ReadCache.invalidate()', () => {
|
|
53
|
+
let readCache
|
|
54
|
+
|
|
55
|
+
beforeEach(() => {
|
|
56
|
+
readCache = require('../../../src/Database/ReadCache')
|
|
57
|
+
readCache.init()
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
it('should purge all cached queries for the specified table', async () => {
|
|
61
|
+
// Cache two different queries on posts
|
|
62
|
+
const qb1 = db('posts').where({id: 1}).first()
|
|
63
|
+
const qb2 = db('posts').where({id: 2}).first()
|
|
64
|
+
await readCache.get('default', 'posts', qb1, () => qb1.then(r => r), 60)
|
|
65
|
+
await readCache.get('default', 'posts', qb2, () => qb2.then(r => r), 60)
|
|
66
|
+
|
|
67
|
+
const cachedKeys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
68
|
+
expect(cachedKeys).toHaveLength(2)
|
|
69
|
+
|
|
70
|
+
// Invalidate
|
|
71
|
+
await readCache.invalidate('default', 'posts')
|
|
72
|
+
|
|
73
|
+
const keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
74
|
+
expect(keys).toHaveLength(0)
|
|
75
|
+
|
|
76
|
+
// Verify cache entries are actually deleted
|
|
77
|
+
for (const key of cachedKeys) {
|
|
78
|
+
const val = await Odac.Ipc.get(key)
|
|
79
|
+
expect(val).toBeNull()
|
|
80
|
+
}
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
it('should not affect cache of other tables', async () => {
|
|
84
|
+
const qbPosts = db('posts').where({id: 1}).first()
|
|
85
|
+
const qbUsers = db('users').where({id: 1}).first()
|
|
86
|
+
await readCache.get('default', 'posts', qbPosts, () => qbPosts.then(r => r), 60)
|
|
87
|
+
await readCache.get('default', 'users', qbUsers, () => qbUsers.then(r => r), 60)
|
|
88
|
+
|
|
89
|
+
// Invalidate only posts
|
|
90
|
+
await readCache.invalidate('default', 'posts')
|
|
91
|
+
|
|
92
|
+
const postKeys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
93
|
+
const userKeys = await Odac.Ipc.smembers('rc:idx:default:users')
|
|
94
|
+
|
|
95
|
+
expect(postKeys).toHaveLength(0)
|
|
96
|
+
expect(userKeys).toHaveLength(1)
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
it('should be a no-op when no cache exists for the table', async () => {
|
|
100
|
+
// Should not throw
|
|
101
|
+
await expect(readCache.invalidate('default', 'nonexistent')).resolves.toBeUndefined()
|
|
102
|
+
})
|
|
103
|
+
})
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const cluster = require('node:cluster')
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Tests the cache chain API exposed via Database.js proxy.
|
|
7
|
+
* Why: Validates that Odac.DB.posts.cache(60).where(...).select(...) pattern
|
|
8
|
+
* correctly delegates to ReadCache, and that write operations auto-invalidate.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
let knexLib, db
|
|
12
|
+
|
|
13
|
+
beforeEach(async () => {
|
|
14
|
+
jest.resetModules()
|
|
15
|
+
|
|
16
|
+
knexLib = require('knex')
|
|
17
|
+
db = knexLib({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
18
|
+
|
|
19
|
+
await db.schema.createTable('posts', table => {
|
|
20
|
+
table.integer('id').primary()
|
|
21
|
+
table.string('title', 255)
|
|
22
|
+
table.integer('views').defaultTo(0)
|
|
23
|
+
table.boolean('active').defaultTo(true)
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
await db('posts').insert([
|
|
27
|
+
{id: 1, title: 'First Post', views: 100, active: true},
|
|
28
|
+
{id: 2, title: 'Second Post', views: 200, active: true},
|
|
29
|
+
{id: 3, title: 'Draft', views: 0, active: false}
|
|
30
|
+
])
|
|
31
|
+
|
|
32
|
+
Object.defineProperty(cluster, 'isPrimary', {value: true, configurable: true})
|
|
33
|
+
|
|
34
|
+
const Ipc = require('../../../src/Ipc')
|
|
35
|
+
global.Odac = {
|
|
36
|
+
Config: {
|
|
37
|
+
cache: {ttl: 60, maxKeys: 10000},
|
|
38
|
+
buffer: {flushInterval: 999999, checkpointInterval: 999999}
|
|
39
|
+
},
|
|
40
|
+
Storage: {
|
|
41
|
+
isReady: () => false,
|
|
42
|
+
put: jest.fn(),
|
|
43
|
+
remove: jest.fn(),
|
|
44
|
+
getRange: () => []
|
|
45
|
+
},
|
|
46
|
+
Ipc
|
|
47
|
+
}
|
|
48
|
+
await Ipc.init()
|
|
49
|
+
|
|
50
|
+
const writeBuffer = require('../../../src/Database/WriteBuffer')
|
|
51
|
+
await writeBuffer.init({default: db})
|
|
52
|
+
|
|
53
|
+
const readCache = require('../../../src/Database/ReadCache')
|
|
54
|
+
readCache.init()
|
|
55
|
+
|
|
56
|
+
const DB = require('../../../src/Database')
|
|
57
|
+
DB.connections = {default: db}
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
afterEach(async () => {
|
|
61
|
+
const writeBuffer = require('../../../src/Database/WriteBuffer')
|
|
62
|
+
await writeBuffer.close()
|
|
63
|
+
await Odac.Ipc.close()
|
|
64
|
+
await db.destroy()
|
|
65
|
+
delete global.Odac
|
|
66
|
+
})
|
|
67
|
+
|
|
68
|
+
describe('Database.js Proxy - cache(ttl).where().select()', () => {
|
|
69
|
+
it('should cache SELECT results with specified TTL', async () => {
|
|
70
|
+
const DB = require('../../../src/Database')
|
|
71
|
+
|
|
72
|
+
const result1 = await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
73
|
+
expect(result1).toHaveLength(2)
|
|
74
|
+
|
|
75
|
+
// Modify DB directly
|
|
76
|
+
await db('posts').where({id: 1}).update({title: 'Modified'})
|
|
77
|
+
|
|
78
|
+
// Should return cached (stale) data
|
|
79
|
+
const result2 = await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
80
|
+
expect(result2[0].title).toBe('First Post')
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
it('should cache with default TTL when called without argument', async () => {
|
|
84
|
+
const DB = require('../../../src/Database')
|
|
85
|
+
|
|
86
|
+
const result = await DB.posts.cache().where({id: 1}).first()
|
|
87
|
+
expect(result.title).toBe('First Post')
|
|
88
|
+
|
|
89
|
+
// Verify cached
|
|
90
|
+
const keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
91
|
+
expect(keys).toHaveLength(1)
|
|
92
|
+
})
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
describe('Database.js Proxy - cache.clear()', () => {
|
|
96
|
+
it('should manually clear table cache via Odac.DB.posts.cache.clear()', async () => {
|
|
97
|
+
const DB = require('../../../src/Database')
|
|
98
|
+
|
|
99
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
100
|
+
|
|
101
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
102
|
+
expect(keys).toHaveLength(1)
|
|
103
|
+
|
|
104
|
+
await DB.posts.cache.clear()
|
|
105
|
+
|
|
106
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
107
|
+
expect(keys).toHaveLength(0)
|
|
108
|
+
})
|
|
109
|
+
})
|
|
110
|
+
|
|
111
|
+
describe('Database.js Proxy - automatic invalidation on write', () => {
|
|
112
|
+
it('should invalidate cache after update()', async () => {
|
|
113
|
+
const DB = require('../../../src/Database')
|
|
114
|
+
|
|
115
|
+
// Cache a query
|
|
116
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
117
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
118
|
+
expect(keys).toHaveLength(1)
|
|
119
|
+
|
|
120
|
+
// Update via proxy — should auto-invalidate
|
|
121
|
+
await DB.posts.where({id: 1}).update({title: 'Updated'})
|
|
122
|
+
|
|
123
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
124
|
+
expect(keys).toHaveLength(0)
|
|
125
|
+
|
|
126
|
+
// Next cache() call should fetch fresh data
|
|
127
|
+
const result = await DB.posts.cache(60).where({id: 1}).first()
|
|
128
|
+
expect(result.title).toBe('Updated')
|
|
129
|
+
})
|
|
130
|
+
|
|
131
|
+
it('should invalidate cache after insert()', async () => {
|
|
132
|
+
const DB = require('../../../src/Database')
|
|
133
|
+
|
|
134
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
135
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
136
|
+
expect(keys).toHaveLength(1)
|
|
137
|
+
|
|
138
|
+
await DB.posts.insert({id: 4, title: 'New Post', views: 0, active: true})
|
|
139
|
+
|
|
140
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
141
|
+
expect(keys).toHaveLength(0)
|
|
142
|
+
})
|
|
143
|
+
|
|
144
|
+
it('should invalidate cache after delete()', async () => {
|
|
145
|
+
const DB = require('../../../src/Database')
|
|
146
|
+
|
|
147
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
148
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
149
|
+
expect(keys).toHaveLength(1)
|
|
150
|
+
|
|
151
|
+
await DB.posts.where({id: 3}).delete()
|
|
152
|
+
|
|
153
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
154
|
+
expect(keys).toHaveLength(0)
|
|
155
|
+
})
|
|
156
|
+
|
|
157
|
+
it('should invalidate cache after del() (alias)', async () => {
|
|
158
|
+
const DB = require('../../../src/Database')
|
|
159
|
+
|
|
160
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
161
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
162
|
+
expect(keys).toHaveLength(1)
|
|
163
|
+
|
|
164
|
+
await DB.posts.where({id: 3}).del()
|
|
165
|
+
|
|
166
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
167
|
+
expect(keys).toHaveLength(0)
|
|
168
|
+
})
|
|
169
|
+
})
|
|
170
|
+
|
|
171
|
+
describe('Database.js Proxy - global cache.clear()', () => {
|
|
172
|
+
it('should clear cache via Odac.DB.cache.clear(connection, table)', async () => {
|
|
173
|
+
const DB = require('../../../src/Database')
|
|
174
|
+
|
|
175
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
176
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
177
|
+
expect(keys).toHaveLength(1)
|
|
178
|
+
|
|
179
|
+
await DB.cache.clear('default', 'posts')
|
|
180
|
+
|
|
181
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
182
|
+
expect(keys).toHaveLength(0)
|
|
183
|
+
})
|
|
184
|
+
})
|
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const cluster = require('node:cluster')
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Tests WriteBuffer LMDB checkpoint and crash recovery.
|
|
7
|
+
* Why: Validates zero data loss guarantee — buffered data survives process crashes
|
|
8
|
+
* via periodic LMDB checkpoints and is recovered on next startup.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
let knexLib, db, storageData
|
|
12
|
+
|
|
13
|
+
function createMockStorage() {
|
|
14
|
+
storageData = new Map()
|
|
15
|
+
return {
|
|
16
|
+
isReady: () => true,
|
|
17
|
+
put: (key, value) => storageData.set(key, value),
|
|
18
|
+
remove: key => storageData.delete(key),
|
|
19
|
+
get: key => storageData.get(key) ?? null,
|
|
20
|
+
getRange: ({start, end}) => {
|
|
21
|
+
const results = []
|
|
22
|
+
for (const [key, value] of storageData) {
|
|
23
|
+
if (key >= start && key < end) {
|
|
24
|
+
results.push({key, value})
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return results
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
beforeEach(async () => {
|
|
33
|
+
jest.resetModules()
|
|
34
|
+
|
|
35
|
+
knexLib = require('knex')
|
|
36
|
+
db = knexLib({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
37
|
+
|
|
38
|
+
await db.schema.createTable('posts', table => {
|
|
39
|
+
table.integer('id').primary()
|
|
40
|
+
table.integer('views').defaultTo(0)
|
|
41
|
+
})
|
|
42
|
+
await db('posts').insert({id: 1, views: 100})
|
|
43
|
+
|
|
44
|
+
Object.defineProperty(cluster, 'isPrimary', {value: true, configurable: true})
|
|
45
|
+
})
|
|
46
|
+
|
|
47
|
+
afterEach(async () => {
|
|
48
|
+
await db.destroy()
|
|
49
|
+
delete global.Odac
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
describe('WriteBuffer - Checkpoint', () => {
|
|
53
|
+
it('should write counter deltas to LMDB on checkpoint', async () => {
|
|
54
|
+
const Ipc = require('../../../src/Ipc')
|
|
55
|
+
global.Odac = {
|
|
56
|
+
Config: {buffer: {flushInterval: 999999, checkpointInterval: 999999}},
|
|
57
|
+
Storage: createMockStorage(),
|
|
58
|
+
Ipc
|
|
59
|
+
}
|
|
60
|
+
await Ipc.init()
|
|
61
|
+
|
|
62
|
+
const WriteBuffer = require('../../../src/Database/WriteBuffer')
|
|
63
|
+
await WriteBuffer.init({default: db})
|
|
64
|
+
|
|
65
|
+
await WriteBuffer.increment('default', 'posts', 1, 'views', 5)
|
|
66
|
+
await WriteBuffer._writeCheckpoint()
|
|
67
|
+
|
|
68
|
+
const checkpoint = storageData.get('wb:c:default:posts:1:views')
|
|
69
|
+
expect(checkpoint).toBeDefined()
|
|
70
|
+
expect(checkpoint.delta).toBe(5)
|
|
71
|
+
expect(checkpoint.base).toBe(100)
|
|
72
|
+
|
|
73
|
+
await WriteBuffer.close()
|
|
74
|
+
await Ipc.close()
|
|
75
|
+
})
|
|
76
|
+
|
|
77
|
+
it('should write queue rows to LMDB on checkpoint', async () => {
|
|
78
|
+
const Ipc = require('../../../src/Ipc')
|
|
79
|
+
global.Odac = {
|
|
80
|
+
Config: {buffer: {flushInterval: 999999, checkpointInterval: 999999}},
|
|
81
|
+
Storage: createMockStorage(),
|
|
82
|
+
Ipc
|
|
83
|
+
}
|
|
84
|
+
await Ipc.init()
|
|
85
|
+
|
|
86
|
+
const WriteBuffer = require('../../../src/Database/WriteBuffer')
|
|
87
|
+
await WriteBuffer.init({default: db})
|
|
88
|
+
|
|
89
|
+
await WriteBuffer.insert('default', 'activity_log', {user_id: 1, action: 'view'})
|
|
90
|
+
await WriteBuffer.insert('default', 'activity_log', {user_id: 2, action: 'click'})
|
|
91
|
+
await WriteBuffer._writeCheckpoint()
|
|
92
|
+
|
|
93
|
+
const checkpoint = storageData.get('wb:q:default:activity_log')
|
|
94
|
+
expect(checkpoint).toBeDefined()
|
|
95
|
+
expect(checkpoint).toHaveLength(2)
|
|
96
|
+
expect(checkpoint[0].action).toBe('view')
|
|
97
|
+
|
|
98
|
+
await WriteBuffer.close()
|
|
99
|
+
await Ipc.close()
|
|
100
|
+
})
|
|
101
|
+
})
|
|
102
|
+
|
|
103
|
+
describe('WriteBuffer - Recovery', () => {
|
|
104
|
+
it('should recover counter deltas from LMDB on startup', async () => {
|
|
105
|
+
// Simulate crash: write checkpoint data before init
|
|
106
|
+
const mockStorage = createMockStorage()
|
|
107
|
+
storageData.set('wb:c:default:posts:1:views', {delta: 7, base: 100})
|
|
108
|
+
|
|
109
|
+
const Ipc = require('../../../src/Ipc')
|
|
110
|
+
global.Odac = {
|
|
111
|
+
Config: {buffer: {flushInterval: 999999, checkpointInterval: 999999}},
|
|
112
|
+
Storage: mockStorage,
|
|
113
|
+
Ipc
|
|
114
|
+
}
|
|
115
|
+
await Ipc.init()
|
|
116
|
+
|
|
117
|
+
const WriteBuffer = require('../../../src/Database/WriteBuffer')
|
|
118
|
+
await WriteBuffer.init({default: db})
|
|
119
|
+
|
|
120
|
+
// Should recover the delta from checkpoint
|
|
121
|
+
const result = await WriteBuffer.get('default', 'posts', 1, 'views')
|
|
122
|
+
expect(result).toBe(107) // base 100 + recovered delta 7
|
|
123
|
+
|
|
124
|
+
await WriteBuffer.close()
|
|
125
|
+
await Ipc.close()
|
|
126
|
+
})
|
|
127
|
+
|
|
128
|
+
it('should recover queue rows from LMDB on startup', async () => {
|
|
129
|
+
await db.schema.createTable('activity_log', table => {
|
|
130
|
+
table.increments('id')
|
|
131
|
+
table.integer('user_id')
|
|
132
|
+
table.string('action', 50)
|
|
133
|
+
})
|
|
134
|
+
|
|
135
|
+
const mockStorage = createMockStorage()
|
|
136
|
+
storageData.set('wb:q:default:activity_log', [{user_id: 1, action: 'recovered_view'}])
|
|
137
|
+
|
|
138
|
+
const Ipc = require('../../../src/Ipc')
|
|
139
|
+
global.Odac = {
|
|
140
|
+
Config: {buffer: {flushInterval: 999999, checkpointInterval: 999999}},
|
|
141
|
+
Storage: mockStorage,
|
|
142
|
+
Ipc
|
|
143
|
+
}
|
|
144
|
+
await Ipc.init()
|
|
145
|
+
|
|
146
|
+
const WriteBuffer = require('../../../src/Database/WriteBuffer')
|
|
147
|
+
await WriteBuffer.init({default: db})
|
|
148
|
+
|
|
149
|
+
// Flush recovered data
|
|
150
|
+
await WriteBuffer.flush()
|
|
151
|
+
|
|
152
|
+
const rows = await db('activity_log').select()
|
|
153
|
+
expect(rows).toHaveLength(1)
|
|
154
|
+
expect(rows[0].action).toBe('recovered_view')
|
|
155
|
+
|
|
156
|
+
await WriteBuffer.close()
|
|
157
|
+
await Ipc.close()
|
|
158
|
+
})
|
|
159
|
+
|
|
160
|
+
it('should merge recovered data with new increments', async () => {
|
|
161
|
+
const mockStorage = createMockStorage()
|
|
162
|
+
storageData.set('wb:c:default:posts:1:views', {delta: 5, base: 100})
|
|
163
|
+
|
|
164
|
+
const Ipc = require('../../../src/Ipc')
|
|
165
|
+
global.Odac = {
|
|
166
|
+
Config: {buffer: {flushInterval: 999999, checkpointInterval: 999999}},
|
|
167
|
+
Storage: mockStorage,
|
|
168
|
+
Ipc
|
|
169
|
+
}
|
|
170
|
+
await Ipc.init()
|
|
171
|
+
|
|
172
|
+
const WriteBuffer = require('../../../src/Database/WriteBuffer')
|
|
173
|
+
await WriteBuffer.init({default: db})
|
|
174
|
+
|
|
175
|
+
// Add more increments on top of recovered data
|
|
176
|
+
await WriteBuffer.increment('default', 'posts', 1, 'views', 3)
|
|
177
|
+
|
|
178
|
+
const result = await WriteBuffer.get('default', 'posts', 1, 'views')
|
|
179
|
+
expect(result).toBe(108) // base 100 + recovered 5 + new 3
|
|
180
|
+
|
|
181
|
+
await WriteBuffer.close()
|
|
182
|
+
await Ipc.close()
|
|
183
|
+
})
|
|
184
|
+
|
|
185
|
+
it('should clear LMDB checkpoint after successful flush', async () => {
|
|
186
|
+
const mockStorage = createMockStorage()
|
|
187
|
+
storageData.set('wb:c:default:posts:1:views', {delta: 5, base: 100})
|
|
188
|
+
|
|
189
|
+
const Ipc = require('../../../src/Ipc')
|
|
190
|
+
global.Odac = {
|
|
191
|
+
Config: {buffer: {flushInterval: 999999, checkpointInterval: 999999}},
|
|
192
|
+
Storage: mockStorage,
|
|
193
|
+
Ipc
|
|
194
|
+
}
|
|
195
|
+
await Ipc.init()
|
|
196
|
+
|
|
197
|
+
const WriteBuffer = require('../../../src/Database/WriteBuffer')
|
|
198
|
+
await WriteBuffer.init({default: db})
|
|
199
|
+
await WriteBuffer.flush()
|
|
200
|
+
|
|
201
|
+
// Checkpoint data should be cleared
|
|
202
|
+
expect(storageData.has('wb:c:default:posts:1:views')).toBe(false)
|
|
203
|
+
|
|
204
|
+
await WriteBuffer.close()
|
|
205
|
+
await Ipc.close()
|
|
206
|
+
})
|
|
207
|
+
})
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const cluster = require('node:cluster')
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Tests the chainable buffer API exposed via Database.js proxy.
|
|
7
|
+
* Why: Validates that the Odac.DB.table.buffer.where(id).update(data) pattern
|
|
8
|
+
* correctly delegates to WriteBuffer's internal methods.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
let knexLib, db
|
|
12
|
+
|
|
13
|
+
beforeEach(async () => {
|
|
14
|
+
jest.resetModules()
|
|
15
|
+
|
|
16
|
+
knexLib = require('knex')
|
|
17
|
+
db = knexLib({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
18
|
+
|
|
19
|
+
await db.schema.createTable('posts', table => {
|
|
20
|
+
table.integer('id').primary()
|
|
21
|
+
table.integer('views').defaultTo(0)
|
|
22
|
+
table.string('title', 255)
|
|
23
|
+
})
|
|
24
|
+
|
|
25
|
+
await db.schema.createTable('activity_log', table => {
|
|
26
|
+
table.increments('id')
|
|
27
|
+
table.integer('user_id')
|
|
28
|
+
table.string('action', 50)
|
|
29
|
+
})
|
|
30
|
+
|
|
31
|
+
await db('posts').insert([
|
|
32
|
+
{id: 1, views: 100, title: 'First Post'},
|
|
33
|
+
{id: 2, views: 200, title: 'Second Post'}
|
|
34
|
+
])
|
|
35
|
+
|
|
36
|
+
Object.defineProperty(cluster, 'isPrimary', {value: true, configurable: true})
|
|
37
|
+
|
|
38
|
+
const Ipc = require('../../../src/Ipc')
|
|
39
|
+
global.Odac = {
|
|
40
|
+
Config: {buffer: {flushInterval: 999999, checkpointInterval: 999999}},
|
|
41
|
+
Storage: {
|
|
42
|
+
isReady: () => false,
|
|
43
|
+
put: jest.fn(),
|
|
44
|
+
remove: jest.fn(),
|
|
45
|
+
getRange: () => []
|
|
46
|
+
},
|
|
47
|
+
Ipc
|
|
48
|
+
}
|
|
49
|
+
await Ipc.init()
|
|
50
|
+
|
|
51
|
+
// Initialize WriteBuffer with our test DB
|
|
52
|
+
const writeBuffer = require('../../../src/Database/WriteBuffer')
|
|
53
|
+
await writeBuffer.init({default: db})
|
|
54
|
+
|
|
55
|
+
// Wire up Database.js proxy
|
|
56
|
+
const DB = require('../../../src/Database')
|
|
57
|
+
DB.connections = {default: db}
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
afterEach(async () => {
|
|
61
|
+
const writeBuffer = require('../../../src/Database/WriteBuffer')
|
|
62
|
+
await writeBuffer.close()
|
|
63
|
+
await Odac.Ipc.close()
|
|
64
|
+
await db.destroy()
|
|
65
|
+
delete global.Odac
|
|
66
|
+
})
|
|
67
|
+
|
|
68
|
+
describe('Database.js Proxy - buffer.where().update()', () => {
|
|
69
|
+
it('should buffer and flush via Odac.DB.posts.buffer.where(id).update()', async () => {
|
|
70
|
+
const DB = require('../../../src/Database')
|
|
71
|
+
|
|
72
|
+
await DB.posts.buffer.where(1).update({title: 'Updated Title'})
|
|
73
|
+
await DB.posts.buffer.flush()
|
|
74
|
+
|
|
75
|
+
const row = await db('posts').where({id: 1}).first()
|
|
76
|
+
expect(row.title).toBe('Updated Title')
|
|
77
|
+
})
|
|
78
|
+
|
|
79
|
+
it('should merge multiple updates via chainable API', async () => {
|
|
80
|
+
const DB = require('../../../src/Database')
|
|
81
|
+
|
|
82
|
+
await DB.posts.buffer.where(1).update({title: 'New Title'})
|
|
83
|
+
await DB.posts.buffer.where(1).update({title: 'Final Title'})
|
|
84
|
+
await DB.posts.buffer.flush()
|
|
85
|
+
|
|
86
|
+
const row = await db('posts').where({id: 1}).first()
|
|
87
|
+
expect(row.title).toBe('Final Title')
|
|
88
|
+
})
|
|
89
|
+
})
|
|
90
|
+
|
|
91
|
+
describe('Database.js Proxy - buffer.where().increment()', () => {
|
|
92
|
+
it('should increment via Odac.DB.posts.buffer.where(id).increment(col)', async () => {
|
|
93
|
+
const DB = require('../../../src/Database')
|
|
94
|
+
|
|
95
|
+
const result = await DB.posts.buffer.where(1).increment('views')
|
|
96
|
+
expect(result).toBe(101)
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
it('should support custom delta', async () => {
|
|
100
|
+
const DB = require('../../../src/Database')
|
|
101
|
+
|
|
102
|
+
const result = await DB.posts.buffer.where(1).increment('views', 5)
|
|
103
|
+
expect(result).toBe(105)
|
|
104
|
+
})
|
|
105
|
+
})
|
|
106
|
+
|
|
107
|
+
describe('Database.js Proxy - buffer.where().get()', () => {
|
|
108
|
+
it('should get buffered value via Odac.DB.posts.buffer.where(id).get(col)', async () => {
|
|
109
|
+
const DB = require('../../../src/Database')
|
|
110
|
+
|
|
111
|
+
await DB.posts.buffer.where(1).increment('views', 10)
|
|
112
|
+
const result = await DB.posts.buffer.where(1).get('views')
|
|
113
|
+
expect(result).toBe(110)
|
|
114
|
+
})
|
|
115
|
+
})
|
|
116
|
+
|
|
117
|
+
describe('Database.js Proxy - buffer.insert()', () => {
|
|
118
|
+
it('should buffer insert via Odac.DB.activity_log.buffer.insert(row)', async () => {
|
|
119
|
+
const DB = require('../../../src/Database')
|
|
120
|
+
|
|
121
|
+
await DB.activity_log.buffer.insert({user_id: 1, action: 'view'})
|
|
122
|
+
await DB.activity_log.buffer.insert({user_id: 2, action: 'click'})
|
|
123
|
+
await DB.activity_log.buffer.flush()
|
|
124
|
+
|
|
125
|
+
const rows = await db('activity_log').select()
|
|
126
|
+
expect(rows).toHaveLength(2)
|
|
127
|
+
})
|
|
128
|
+
})
|
|
129
|
+
|
|
130
|
+
describe('Database.js Proxy - buffer.flush()', () => {
|
|
131
|
+
it('should flush all buffered data for the table', async () => {
|
|
132
|
+
const DB = require('../../../src/Database')
|
|
133
|
+
|
|
134
|
+
await DB.posts.buffer.where(1).increment('views', 5)
|
|
135
|
+
await DB.posts.buffer.where(2).update({title: 'Changed'})
|
|
136
|
+
await DB.posts.buffer.flush()
|
|
137
|
+
|
|
138
|
+
const row1 = await db('posts').where({id: 1}).first()
|
|
139
|
+
const row2 = await db('posts').where({id: 2}).first()
|
|
140
|
+
expect(row1.views).toBe(105)
|
|
141
|
+
expect(row2.title).toBe('Changed')
|
|
142
|
+
})
|
|
143
|
+
})
|