odac 1.4.8 → 1.4.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +28 -0
- package/docs/ai/README.md +2 -1
- package/docs/ai/skills/SKILL.md +2 -1
- package/docs/ai/skills/backend/authentication.md +12 -6
- package/docs/ai/skills/backend/database.md +85 -5
- package/docs/ai/skills/backend/migrations.md +23 -0
- package/docs/ai/skills/backend/odac-var.md +155 -0
- package/docs/ai/skills/backend/utilities.md +1 -1
- package/docs/ai/skills/frontend/forms.md +23 -1
- package/docs/backend/04-routing/09-websocket-quick-reference.md +21 -1
- package/docs/backend/04-routing/09-websocket.md +22 -1
- package/docs/backend/08-database/06-read-through-cache.md +206 -0
- package/docs/backend/10-authentication/01-authentication-basics.md +53 -0
- package/docs/backend/10-authentication/05-session-management.md +12 -3
- package/docs/backend/13-utilities/01-odac-var.md +13 -19
- package/docs/frontend/03-forms/01-form-handling.md +15 -2
- package/docs/index.json +1 -1
- package/package.json +1 -1
- package/src/Auth.js +17 -0
- package/src/Database/Migration.js +219 -3
- package/src/Database/ReadCache.js +174 -0
- package/src/Database.js +63 -0
- package/src/Validator.js +1 -1
- package/src/Var.js +1 -0
- package/src/WebSocket.js +80 -23
- package/test/Database/Migration/migrate_column.test.js +168 -0
- package/test/Database/ReadCache/crossTable.test.js +179 -0
- package/test/Database/ReadCache/get.test.js +128 -0
- package/test/Database/ReadCache/invalidate.test.js +103 -0
- package/test/Database/ReadCache/proxy.test.js +184 -0
- package/test/Database/insert.test.js +98 -0
- package/test/WebSocket/Client/fragmentation.test.js +130 -0
- package/test/WebSocket/Client/limits.test.js +10 -4
- package/test/WebSocket/Client/readyState.test.js +154 -0
- package/docs/backend/10-authentication/01-user-logins-with-authjs.md +0 -55
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const cluster = require('node:cluster')
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Tests ReadCache.invalidate() — table-level cache purge.
|
|
7
|
+
* Why: Validates that all cached queries for a table are removed on invalidation,
|
|
8
|
+
* and that unrelated tables remain unaffected.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
let knexLib, db
|
|
12
|
+
|
|
13
|
+
beforeEach(async () => {
|
|
14
|
+
jest.resetModules()
|
|
15
|
+
|
|
16
|
+
knexLib = require('knex')
|
|
17
|
+
db = knexLib({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
18
|
+
|
|
19
|
+
await db.schema.createTable('posts', table => {
|
|
20
|
+
table.integer('id').primary()
|
|
21
|
+
table.string('title', 255)
|
|
22
|
+
})
|
|
23
|
+
|
|
24
|
+
await db.schema.createTable('users', table => {
|
|
25
|
+
table.integer('id').primary()
|
|
26
|
+
table.string('name', 255)
|
|
27
|
+
})
|
|
28
|
+
|
|
29
|
+
await db('posts').insert([
|
|
30
|
+
{id: 1, title: 'Post A'},
|
|
31
|
+
{id: 2, title: 'Post B'}
|
|
32
|
+
])
|
|
33
|
+
|
|
34
|
+
await db('users').insert([{id: 1, name: 'Alice'}])
|
|
35
|
+
|
|
36
|
+
Object.defineProperty(cluster, 'isPrimary', {value: true, configurable: true})
|
|
37
|
+
|
|
38
|
+
const Ipc = require('../../../src/Ipc')
|
|
39
|
+
global.Odac = {
|
|
40
|
+
Config: {cache: {ttl: 60, maxKeys: 10000}},
|
|
41
|
+
Ipc
|
|
42
|
+
}
|
|
43
|
+
await Ipc.init()
|
|
44
|
+
})
|
|
45
|
+
|
|
46
|
+
afterEach(async () => {
|
|
47
|
+
await Odac.Ipc.close()
|
|
48
|
+
await db.destroy()
|
|
49
|
+
delete global.Odac
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
describe('ReadCache.invalidate()', () => {
|
|
53
|
+
let readCache
|
|
54
|
+
|
|
55
|
+
beforeEach(() => {
|
|
56
|
+
readCache = require('../../../src/Database/ReadCache')
|
|
57
|
+
readCache.init()
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
it('should purge all cached queries for the specified table', async () => {
|
|
61
|
+
// Cache two different queries on posts
|
|
62
|
+
const qb1 = db('posts').where({id: 1}).first()
|
|
63
|
+
const qb2 = db('posts').where({id: 2}).first()
|
|
64
|
+
await readCache.get('default', 'posts', qb1, () => qb1.then(r => r), 60)
|
|
65
|
+
await readCache.get('default', 'posts', qb2, () => qb2.then(r => r), 60)
|
|
66
|
+
|
|
67
|
+
const cachedKeys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
68
|
+
expect(cachedKeys).toHaveLength(2)
|
|
69
|
+
|
|
70
|
+
// Invalidate
|
|
71
|
+
await readCache.invalidate('default', 'posts')
|
|
72
|
+
|
|
73
|
+
const keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
74
|
+
expect(keys).toHaveLength(0)
|
|
75
|
+
|
|
76
|
+
// Verify cache entries are actually deleted
|
|
77
|
+
for (const key of cachedKeys) {
|
|
78
|
+
const val = await Odac.Ipc.get(key)
|
|
79
|
+
expect(val).toBeNull()
|
|
80
|
+
}
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
it('should not affect cache of other tables', async () => {
|
|
84
|
+
const qbPosts = db('posts').where({id: 1}).first()
|
|
85
|
+
const qbUsers = db('users').where({id: 1}).first()
|
|
86
|
+
await readCache.get('default', 'posts', qbPosts, () => qbPosts.then(r => r), 60)
|
|
87
|
+
await readCache.get('default', 'users', qbUsers, () => qbUsers.then(r => r), 60)
|
|
88
|
+
|
|
89
|
+
// Invalidate only posts
|
|
90
|
+
await readCache.invalidate('default', 'posts')
|
|
91
|
+
|
|
92
|
+
const postKeys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
93
|
+
const userKeys = await Odac.Ipc.smembers('rc:idx:default:users')
|
|
94
|
+
|
|
95
|
+
expect(postKeys).toHaveLength(0)
|
|
96
|
+
expect(userKeys).toHaveLength(1)
|
|
97
|
+
})
|
|
98
|
+
|
|
99
|
+
it('should be a no-op when no cache exists for the table', async () => {
|
|
100
|
+
// Should not throw
|
|
101
|
+
await expect(readCache.invalidate('default', 'nonexistent')).resolves.toBeUndefined()
|
|
102
|
+
})
|
|
103
|
+
})
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const cluster = require('node:cluster')
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Tests the cache chain API exposed via Database.js proxy.
|
|
7
|
+
* Why: Validates that Odac.DB.posts.cache(60).where(...).select(...) pattern
|
|
8
|
+
* correctly delegates to ReadCache, and that write operations auto-invalidate.
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
let knexLib, db
|
|
12
|
+
|
|
13
|
+
beforeEach(async () => {
|
|
14
|
+
jest.resetModules()
|
|
15
|
+
|
|
16
|
+
knexLib = require('knex')
|
|
17
|
+
db = knexLib({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
18
|
+
|
|
19
|
+
await db.schema.createTable('posts', table => {
|
|
20
|
+
table.integer('id').primary()
|
|
21
|
+
table.string('title', 255)
|
|
22
|
+
table.integer('views').defaultTo(0)
|
|
23
|
+
table.boolean('active').defaultTo(true)
|
|
24
|
+
})
|
|
25
|
+
|
|
26
|
+
await db('posts').insert([
|
|
27
|
+
{id: 1, title: 'First Post', views: 100, active: true},
|
|
28
|
+
{id: 2, title: 'Second Post', views: 200, active: true},
|
|
29
|
+
{id: 3, title: 'Draft', views: 0, active: false}
|
|
30
|
+
])
|
|
31
|
+
|
|
32
|
+
Object.defineProperty(cluster, 'isPrimary', {value: true, configurable: true})
|
|
33
|
+
|
|
34
|
+
const Ipc = require('../../../src/Ipc')
|
|
35
|
+
global.Odac = {
|
|
36
|
+
Config: {
|
|
37
|
+
cache: {ttl: 60, maxKeys: 10000},
|
|
38
|
+
buffer: {flushInterval: 999999, checkpointInterval: 999999}
|
|
39
|
+
},
|
|
40
|
+
Storage: {
|
|
41
|
+
isReady: () => false,
|
|
42
|
+
put: jest.fn(),
|
|
43
|
+
remove: jest.fn(),
|
|
44
|
+
getRange: () => []
|
|
45
|
+
},
|
|
46
|
+
Ipc
|
|
47
|
+
}
|
|
48
|
+
await Ipc.init()
|
|
49
|
+
|
|
50
|
+
const writeBuffer = require('../../../src/Database/WriteBuffer')
|
|
51
|
+
await writeBuffer.init({default: db})
|
|
52
|
+
|
|
53
|
+
const readCache = require('../../../src/Database/ReadCache')
|
|
54
|
+
readCache.init()
|
|
55
|
+
|
|
56
|
+
const DB = require('../../../src/Database')
|
|
57
|
+
DB.connections = {default: db}
|
|
58
|
+
})
|
|
59
|
+
|
|
60
|
+
afterEach(async () => {
|
|
61
|
+
const writeBuffer = require('../../../src/Database/WriteBuffer')
|
|
62
|
+
await writeBuffer.close()
|
|
63
|
+
await Odac.Ipc.close()
|
|
64
|
+
await db.destroy()
|
|
65
|
+
delete global.Odac
|
|
66
|
+
})
|
|
67
|
+
|
|
68
|
+
describe('Database.js Proxy - cache(ttl).where().select()', () => {
|
|
69
|
+
it('should cache SELECT results with specified TTL', async () => {
|
|
70
|
+
const DB = require('../../../src/Database')
|
|
71
|
+
|
|
72
|
+
const result1 = await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
73
|
+
expect(result1).toHaveLength(2)
|
|
74
|
+
|
|
75
|
+
// Modify DB directly
|
|
76
|
+
await db('posts').where({id: 1}).update({title: 'Modified'})
|
|
77
|
+
|
|
78
|
+
// Should return cached (stale) data
|
|
79
|
+
const result2 = await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
80
|
+
expect(result2[0].title).toBe('First Post')
|
|
81
|
+
})
|
|
82
|
+
|
|
83
|
+
it('should cache with default TTL when called without argument', async () => {
|
|
84
|
+
const DB = require('../../../src/Database')
|
|
85
|
+
|
|
86
|
+
const result = await DB.posts.cache().where({id: 1}).first()
|
|
87
|
+
expect(result.title).toBe('First Post')
|
|
88
|
+
|
|
89
|
+
// Verify cached
|
|
90
|
+
const keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
91
|
+
expect(keys).toHaveLength(1)
|
|
92
|
+
})
|
|
93
|
+
})
|
|
94
|
+
|
|
95
|
+
describe('Database.js Proxy - cache.clear()', () => {
|
|
96
|
+
it('should manually clear table cache via Odac.DB.posts.cache.clear()', async () => {
|
|
97
|
+
const DB = require('../../../src/Database')
|
|
98
|
+
|
|
99
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
100
|
+
|
|
101
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
102
|
+
expect(keys).toHaveLength(1)
|
|
103
|
+
|
|
104
|
+
await DB.posts.cache.clear()
|
|
105
|
+
|
|
106
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
107
|
+
expect(keys).toHaveLength(0)
|
|
108
|
+
})
|
|
109
|
+
})
|
|
110
|
+
|
|
111
|
+
describe('Database.js Proxy - automatic invalidation on write', () => {
|
|
112
|
+
it('should invalidate cache after update()', async () => {
|
|
113
|
+
const DB = require('../../../src/Database')
|
|
114
|
+
|
|
115
|
+
// Cache a query
|
|
116
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
117
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
118
|
+
expect(keys).toHaveLength(1)
|
|
119
|
+
|
|
120
|
+
// Update via proxy — should auto-invalidate
|
|
121
|
+
await DB.posts.where({id: 1}).update({title: 'Updated'})
|
|
122
|
+
|
|
123
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
124
|
+
expect(keys).toHaveLength(0)
|
|
125
|
+
|
|
126
|
+
// Next cache() call should fetch fresh data
|
|
127
|
+
const result = await DB.posts.cache(60).where({id: 1}).first()
|
|
128
|
+
expect(result.title).toBe('Updated')
|
|
129
|
+
})
|
|
130
|
+
|
|
131
|
+
it('should invalidate cache after insert()', async () => {
|
|
132
|
+
const DB = require('../../../src/Database')
|
|
133
|
+
|
|
134
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
135
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
136
|
+
expect(keys).toHaveLength(1)
|
|
137
|
+
|
|
138
|
+
await DB.posts.insert({id: 4, title: 'New Post', views: 0, active: true})
|
|
139
|
+
|
|
140
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
141
|
+
expect(keys).toHaveLength(0)
|
|
142
|
+
})
|
|
143
|
+
|
|
144
|
+
it('should invalidate cache after delete()', async () => {
|
|
145
|
+
const DB = require('../../../src/Database')
|
|
146
|
+
|
|
147
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
148
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
149
|
+
expect(keys).toHaveLength(1)
|
|
150
|
+
|
|
151
|
+
await DB.posts.where({id: 3}).delete()
|
|
152
|
+
|
|
153
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
154
|
+
expect(keys).toHaveLength(0)
|
|
155
|
+
})
|
|
156
|
+
|
|
157
|
+
it('should invalidate cache after del() (alias)', async () => {
|
|
158
|
+
const DB = require('../../../src/Database')
|
|
159
|
+
|
|
160
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
161
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
162
|
+
expect(keys).toHaveLength(1)
|
|
163
|
+
|
|
164
|
+
await DB.posts.where({id: 3}).del()
|
|
165
|
+
|
|
166
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
167
|
+
expect(keys).toHaveLength(0)
|
|
168
|
+
})
|
|
169
|
+
})
|
|
170
|
+
|
|
171
|
+
describe('Database.js Proxy - global cache.clear()', () => {
|
|
172
|
+
it('should clear cache via Odac.DB.cache.clear(connection, table)', async () => {
|
|
173
|
+
const DB = require('../../../src/Database')
|
|
174
|
+
|
|
175
|
+
await DB.posts.cache(60).where({active: true}).select('id', 'title')
|
|
176
|
+
let keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
177
|
+
expect(keys).toHaveLength(1)
|
|
178
|
+
|
|
179
|
+
await DB.cache.clear('default', 'posts')
|
|
180
|
+
|
|
181
|
+
keys = await Odac.Ipc.smembers('rc:idx:default:posts')
|
|
182
|
+
expect(keys).toHaveLength(0)
|
|
183
|
+
})
|
|
184
|
+
})
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
'use strict'
|
|
2
|
+
|
|
3
|
+
const knexLib = require('knex')
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Tests the wrapWithInvalidation thenable returned by write operations (insert/update/delete/truncate).
|
|
7
|
+
* Why: Ensures the thenable is fully Promise-compatible — supporting both await and .catch() chaining.
|
|
8
|
+
* This prevents TypeError when consumers call .insert(...).catch() or .update(...).catch().
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
let db
|
|
12
|
+
|
|
13
|
+
beforeEach(async () => {
|
|
14
|
+
db = knexLib({client: 'sqlite3', connection: {filename: ':memory:'}, useNullAsDefault: true})
|
|
15
|
+
await db.schema.createTable('tokens', table => {
|
|
16
|
+
table.string('id', 21).primary()
|
|
17
|
+
table.string('user', 100)
|
|
18
|
+
table.string('token_x', 64)
|
|
19
|
+
})
|
|
20
|
+
})
|
|
21
|
+
|
|
22
|
+
afterEach(async () => {
|
|
23
|
+
await db.destroy()
|
|
24
|
+
jest.resetModules()
|
|
25
|
+
})
|
|
26
|
+
|
|
27
|
+
describe('Database.js - wrapWithInvalidation thenable', () => {
|
|
28
|
+
it('insert().catch() should be a function', () => {
|
|
29
|
+
const DB = require('../../src/Database')
|
|
30
|
+
DB.connections = {default: db}
|
|
31
|
+
db._odacConnectionKey = 'default'
|
|
32
|
+
DB._nanoidColumns = {}
|
|
33
|
+
|
|
34
|
+
const result = DB.tokens.insert({id: 'test1', user: 'u1', token_x: 'tx1'})
|
|
35
|
+
expect(typeof result.catch).toBe('function')
|
|
36
|
+
})
|
|
37
|
+
|
|
38
|
+
it('insert().catch() should resolve on success', async () => {
|
|
39
|
+
const DB = require('../../src/Database')
|
|
40
|
+
DB.connections = {default: db}
|
|
41
|
+
db._odacConnectionKey = 'default'
|
|
42
|
+
DB._nanoidColumns = {}
|
|
43
|
+
|
|
44
|
+
const result = await DB.tokens.insert({id: 'test2', user: 'u2', token_x: 'tx2'}).catch(() => false)
|
|
45
|
+
expect(result).not.toBe(false)
|
|
46
|
+
|
|
47
|
+
const rows = await db('tokens').where('id', 'test2')
|
|
48
|
+
expect(rows).toHaveLength(1)
|
|
49
|
+
expect(rows[0].user).toBe('u2')
|
|
50
|
+
})
|
|
51
|
+
|
|
52
|
+
it('insert().catch() should catch errors gracefully', async () => {
|
|
53
|
+
const DB = require('../../src/Database')
|
|
54
|
+
DB.connections = {default: db}
|
|
55
|
+
db._odacConnectionKey = 'default'
|
|
56
|
+
DB._nanoidColumns = {}
|
|
57
|
+
|
|
58
|
+
// Insert first row
|
|
59
|
+
await DB.tokens.insert({id: 'dup1', user: 'u1', token_x: 'tx1'})
|
|
60
|
+
|
|
61
|
+
// Duplicate primary key — should trigger catch
|
|
62
|
+
const result = await DB.tokens.insert({id: 'dup1', user: 'u2', token_x: 'tx2'}).catch(() => 'caught')
|
|
63
|
+
expect(result).toBe('caught')
|
|
64
|
+
})
|
|
65
|
+
|
|
66
|
+
it('update().catch() should be a function', () => {
|
|
67
|
+
const DB = require('../../src/Database')
|
|
68
|
+
DB.connections = {default: db}
|
|
69
|
+
db._odacConnectionKey = 'default'
|
|
70
|
+
DB._nanoidColumns = {}
|
|
71
|
+
|
|
72
|
+
const result = DB.tokens.where('id', 'x').update({user: 'new'})
|
|
73
|
+
expect(typeof result.catch).toBe('function')
|
|
74
|
+
})
|
|
75
|
+
|
|
76
|
+
it('delete().catch() should be a function', () => {
|
|
77
|
+
const DB = require('../../src/Database')
|
|
78
|
+
DB.connections = {default: db}
|
|
79
|
+
db._odacConnectionKey = 'default'
|
|
80
|
+
DB._nanoidColumns = {}
|
|
81
|
+
|
|
82
|
+
const result = DB.tokens.where('id', 'x').delete()
|
|
83
|
+
expect(typeof result.catch).toBe('function')
|
|
84
|
+
})
|
|
85
|
+
|
|
86
|
+
it('insert() should work with await (no .catch)', async () => {
|
|
87
|
+
const DB = require('../../src/Database')
|
|
88
|
+
DB.connections = {default: db}
|
|
89
|
+
db._odacConnectionKey = 'default'
|
|
90
|
+
DB._nanoidColumns = {}
|
|
91
|
+
|
|
92
|
+
await DB.tokens.insert({id: 'await1', user: 'u_await', token_x: 'tx_await'})
|
|
93
|
+
|
|
94
|
+
const rows = await db('tokens').where('id', 'await1')
|
|
95
|
+
expect(rows).toHaveLength(1)
|
|
96
|
+
expect(rows[0].user).toBe('u_await')
|
|
97
|
+
})
|
|
98
|
+
})
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
const {WebSocketServer, WebSocketClient} = require('../../../src/WebSocket.js')
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Builds a masked WebSocket frame from raw payload bytes.
|
|
5
|
+
* Uses a zero mask key for deterministic test output.
|
|
6
|
+
*/
|
|
7
|
+
function buildFrame(opcode, payload, fin = true) {
|
|
8
|
+
const buf = Buffer.isBuffer(payload) ? payload : Buffer.from(payload)
|
|
9
|
+
const maskKey = Buffer.alloc(4) // zero mask — XOR is identity
|
|
10
|
+
const masked = Buffer.from(buf)
|
|
11
|
+
|
|
12
|
+
const finBit = fin ? 0x80 : 0x00
|
|
13
|
+
const header = Buffer.alloc(2 + 4 + buf.length)
|
|
14
|
+
header[0] = finBit | opcode
|
|
15
|
+
header[1] = 0x80 | buf.length // masked bit + length
|
|
16
|
+
maskKey.copy(header, 2)
|
|
17
|
+
masked.copy(header, 6)
|
|
18
|
+
|
|
19
|
+
return header
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function createMockSocket() {
|
|
23
|
+
return {
|
|
24
|
+
pause: jest.fn(),
|
|
25
|
+
resume: jest.fn(),
|
|
26
|
+
on: jest.fn(),
|
|
27
|
+
write: jest.fn(),
|
|
28
|
+
end: jest.fn(),
|
|
29
|
+
removeAllListeners: jest.fn(),
|
|
30
|
+
writable: true
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
describe('WebSocketClient Fragmentation', () => {
|
|
35
|
+
let server
|
|
36
|
+
|
|
37
|
+
beforeEach(() => {
|
|
38
|
+
server = new WebSocketServer()
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
it('should reassemble fragmented text messages', () => {
|
|
42
|
+
const socket = createMockSocket()
|
|
43
|
+
const client = new WebSocketClient(socket, server, 'frag-1')
|
|
44
|
+
client.resume()
|
|
45
|
+
|
|
46
|
+
const messages = []
|
|
47
|
+
client.on('message', msg => messages.push(msg))
|
|
48
|
+
|
|
49
|
+
const dataHandler = socket.on.mock.calls.find(c => c[0] === 'data')[1]
|
|
50
|
+
|
|
51
|
+
// Fragment 1: TEXT opcode, fin=false
|
|
52
|
+
dataHandler(buildFrame(0x1, 'hel', false))
|
|
53
|
+
// Fragment 2: CONTINUATION opcode, fin=false
|
|
54
|
+
dataHandler(buildFrame(0x0, 'lo ', false))
|
|
55
|
+
// Fragment 3: CONTINUATION opcode, fin=true
|
|
56
|
+
dataHandler(buildFrame(0x0, 'world', true))
|
|
57
|
+
|
|
58
|
+
expect(messages).toEqual(['hello world'])
|
|
59
|
+
})
|
|
60
|
+
|
|
61
|
+
it('should reassemble fragmented binary messages', () => {
|
|
62
|
+
const socket = createMockSocket()
|
|
63
|
+
const client = new WebSocketClient(socket, server, 'frag-2')
|
|
64
|
+
client.resume()
|
|
65
|
+
|
|
66
|
+
const messages = []
|
|
67
|
+
client.on('message', msg => messages.push(msg))
|
|
68
|
+
|
|
69
|
+
const dataHandler = socket.on.mock.calls.find(c => c[0] === 'data')[1]
|
|
70
|
+
|
|
71
|
+
const part1 = Buffer.from([0x01, 0x02])
|
|
72
|
+
const part2 = Buffer.from([0x03, 0x04])
|
|
73
|
+
|
|
74
|
+
// Fragment 1: BINARY opcode, fin=false
|
|
75
|
+
dataHandler(buildFrame(0x2, part1, false))
|
|
76
|
+
// Fragment 2: CONTINUATION opcode, fin=true
|
|
77
|
+
dataHandler(buildFrame(0x0, part2, true))
|
|
78
|
+
|
|
79
|
+
expect(messages.length).toBe(1)
|
|
80
|
+
expect(Buffer.isBuffer(messages[0])).toBe(true)
|
|
81
|
+
expect(messages[0]).toEqual(Buffer.from([0x01, 0x02, 0x03, 0x04]))
|
|
82
|
+
})
|
|
83
|
+
|
|
84
|
+
it('should close with 1002 on unexpected continuation frame', () => {
|
|
85
|
+
const socket = createMockSocket()
|
|
86
|
+
const client = new WebSocketClient(socket, server, 'frag-3')
|
|
87
|
+
client.resume()
|
|
88
|
+
|
|
89
|
+
const dataHandler = socket.on.mock.calls.find(c => c[0] === 'data')[1]
|
|
90
|
+
|
|
91
|
+
// Send CONTINUATION without a preceding TEXT/BINARY
|
|
92
|
+
dataHandler(buildFrame(0x0, 'orphan', true))
|
|
93
|
+
|
|
94
|
+
expect(socket.end).toHaveBeenCalled()
|
|
95
|
+
})
|
|
96
|
+
|
|
97
|
+
it('should handle single unfragmented message normally', () => {
|
|
98
|
+
const socket = createMockSocket()
|
|
99
|
+
const client = new WebSocketClient(socket, server, 'frag-4')
|
|
100
|
+
client.resume()
|
|
101
|
+
|
|
102
|
+
const messages = []
|
|
103
|
+
client.on('message', msg => messages.push(msg))
|
|
104
|
+
|
|
105
|
+
const dataHandler = socket.on.mock.calls.find(c => c[0] === 'data')[1]
|
|
106
|
+
|
|
107
|
+
// Single complete frame: TEXT, fin=true
|
|
108
|
+
dataHandler(buildFrame(0x1, 'complete', true))
|
|
109
|
+
|
|
110
|
+
expect(messages).toEqual(['complete'])
|
|
111
|
+
})
|
|
112
|
+
|
|
113
|
+
it('should discard fragment buffer on close', () => {
|
|
114
|
+
const socket = createMockSocket()
|
|
115
|
+
const client = new WebSocketClient(socket, server, 'frag-5')
|
|
116
|
+
client.resume()
|
|
117
|
+
|
|
118
|
+
const messages = []
|
|
119
|
+
client.on('message', msg => messages.push(msg))
|
|
120
|
+
|
|
121
|
+
const dataHandler = socket.on.mock.calls.find(c => c[0] === 'data')[1]
|
|
122
|
+
|
|
123
|
+
// Start a fragmented message but close before completion
|
|
124
|
+
dataHandler(buildFrame(0x1, 'partial', false))
|
|
125
|
+
client.close()
|
|
126
|
+
|
|
127
|
+
// No message should have been emitted
|
|
128
|
+
expect(messages).toEqual([])
|
|
129
|
+
})
|
|
130
|
+
})
|
|
@@ -11,12 +11,15 @@ describe('WebSocketClient Limits', () => {
|
|
|
11
11
|
it('should close connection if payload exceeds limit', () => {
|
|
12
12
|
const socket = {
|
|
13
13
|
pause: jest.fn(),
|
|
14
|
+
resume: jest.fn(),
|
|
14
15
|
on: jest.fn(),
|
|
15
16
|
write: jest.fn(),
|
|
16
17
|
end: jest.fn(),
|
|
17
|
-
removeAllListeners: jest.fn()
|
|
18
|
+
removeAllListeners: jest.fn(),
|
|
19
|
+
writable: true
|
|
18
20
|
}
|
|
19
|
-
new WebSocketClient(socket, server, 'test-id', {maxPayload: 10})
|
|
21
|
+
const client = new WebSocketClient(socket, server, 'test-id', {maxPayload: 10})
|
|
22
|
+
client.resume()
|
|
20
23
|
|
|
21
24
|
const buffer = Buffer.alloc(100)
|
|
22
25
|
buffer[0] = 0x81
|
|
@@ -32,12 +35,15 @@ describe('WebSocketClient Limits', () => {
|
|
|
32
35
|
it('should close connection if rate limit exceeded', () => {
|
|
33
36
|
const socket = {
|
|
34
37
|
pause: jest.fn(),
|
|
38
|
+
resume: jest.fn(),
|
|
35
39
|
on: jest.fn(),
|
|
36
40
|
write: jest.fn(),
|
|
37
41
|
end: jest.fn(),
|
|
38
|
-
removeAllListeners: jest.fn()
|
|
42
|
+
removeAllListeners: jest.fn(),
|
|
43
|
+
writable: true
|
|
39
44
|
}
|
|
40
|
-
new WebSocketClient(socket, server, 'test-id', {rateLimit: {max: 2, window: 1000}})
|
|
45
|
+
const client = new WebSocketClient(socket, server, 'test-id', {rateLimit: {max: 2, window: 1000}})
|
|
46
|
+
client.resume()
|
|
41
47
|
|
|
42
48
|
const buffer = Buffer.alloc(7)
|
|
43
49
|
buffer[0] = 0x81
|