@fluxstack/live-client 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +9 -2
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +9 -2
- package/dist/index.js.map +1 -1
- package/dist/live-client.browser.global.js +9 -2
- package/dist/live-client.browser.global.js.map +1 -1
- package/package.json +4 -3
- package/src/__tests__/rooms.binary.test.ts +359 -0
- package/src/component.ts +364 -0
- package/src/connection.ts +508 -0
- package/src/index.ts +219 -0
- package/src/persistence.ts +48 -0
- package/src/rooms.ts +539 -0
- package/src/state-validator.ts +121 -0
- package/src/upload.ts +366 -0
package/src/upload.ts
ADDED
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
// @fluxstack/live-client - Chunked Upload Manager
|
|
2
|
+
//
|
|
3
|
+
// Framework-agnostic chunked file upload with adaptive sizing and binary protocol.
|
|
4
|
+
|
|
5
|
+
import type {
|
|
6
|
+
FileUploadStartMessage,
|
|
7
|
+
FileUploadChunkMessage,
|
|
8
|
+
FileUploadCompleteMessage,
|
|
9
|
+
FileUploadProgressResponse,
|
|
10
|
+
FileUploadCompleteResponse,
|
|
11
|
+
BinaryChunkHeader,
|
|
12
|
+
} from '@fluxstack/live'
|
|
13
|
+
|
|
14
|
+
// ===== Adaptive Chunk Sizer =====
|
|
15
|
+
|
|
16
|
+
export interface AdaptiveChunkConfig {
|
|
17
|
+
minChunkSize: number
|
|
18
|
+
maxChunkSize: number
|
|
19
|
+
initialChunkSize: number
|
|
20
|
+
targetLatency: number
|
|
21
|
+
adjustmentFactor: number
|
|
22
|
+
measurementWindow: number
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
export interface ChunkMetrics {
|
|
26
|
+
chunkIndex: number
|
|
27
|
+
chunkSize: number
|
|
28
|
+
startTime: number
|
|
29
|
+
endTime: number
|
|
30
|
+
latency: number
|
|
31
|
+
throughput: number
|
|
32
|
+
success: boolean
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
export class AdaptiveChunkSizer {
|
|
36
|
+
private config: Required<AdaptiveChunkConfig>
|
|
37
|
+
private currentChunkSize: number
|
|
38
|
+
private metrics: ChunkMetrics[] = []
|
|
39
|
+
private consecutiveErrors = 0
|
|
40
|
+
private consecutiveSuccesses = 0
|
|
41
|
+
|
|
42
|
+
constructor(config: Partial<AdaptiveChunkConfig> = {}) {
|
|
43
|
+
this.config = {
|
|
44
|
+
minChunkSize: config.minChunkSize ?? 16 * 1024,
|
|
45
|
+
maxChunkSize: config.maxChunkSize ?? 1024 * 1024,
|
|
46
|
+
initialChunkSize: config.initialChunkSize ?? 64 * 1024,
|
|
47
|
+
targetLatency: config.targetLatency ?? 200,
|
|
48
|
+
adjustmentFactor: config.adjustmentFactor ?? 1.5,
|
|
49
|
+
measurementWindow: config.measurementWindow ?? 3,
|
|
50
|
+
}
|
|
51
|
+
this.currentChunkSize = this.config.initialChunkSize
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
getChunkSize(): number {
|
|
55
|
+
return this.currentChunkSize
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
recordChunkStart(_chunkIndex: number): number {
|
|
59
|
+
return Date.now()
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
recordChunkComplete(chunkIndex: number, chunkSize: number, startTime: number, success: boolean): void {
|
|
63
|
+
const endTime = Date.now()
|
|
64
|
+
const latency = endTime - startTime
|
|
65
|
+
const throughput = success ? (chunkSize / latency) * 1000 : 0
|
|
66
|
+
|
|
67
|
+
this.metrics.push({ chunkIndex, chunkSize, startTime, endTime, latency, throughput, success })
|
|
68
|
+
|
|
69
|
+
if (this.metrics.length > this.config.measurementWindow * 2) {
|
|
70
|
+
this.metrics = this.metrics.slice(-this.config.measurementWindow * 2)
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if (success) {
|
|
74
|
+
this.consecutiveSuccesses++
|
|
75
|
+
this.consecutiveErrors = 0
|
|
76
|
+
this.adjustUp(latency)
|
|
77
|
+
} else {
|
|
78
|
+
this.consecutiveErrors++
|
|
79
|
+
this.consecutiveSuccesses = 0
|
|
80
|
+
this.adjustDown()
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
private adjustUp(latency: number): void {
|
|
85
|
+
if (this.consecutiveSuccesses < 2) return
|
|
86
|
+
if (latency > this.config.targetLatency) return
|
|
87
|
+
|
|
88
|
+
const latencyRatio = this.config.targetLatency / latency
|
|
89
|
+
let newSize = Math.floor(this.currentChunkSize * Math.min(latencyRatio, this.config.adjustmentFactor))
|
|
90
|
+
newSize = Math.min(newSize, this.config.maxChunkSize)
|
|
91
|
+
if (newSize > this.currentChunkSize) this.currentChunkSize = newSize
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
private adjustDown(): void {
|
|
95
|
+
const decreaseFactor = this.consecutiveErrors > 1 ? 2 : this.config.adjustmentFactor
|
|
96
|
+
let newSize = Math.floor(this.currentChunkSize / decreaseFactor)
|
|
97
|
+
newSize = Math.max(newSize, this.config.minChunkSize)
|
|
98
|
+
if (newSize < this.currentChunkSize) this.currentChunkSize = newSize
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
getAverageThroughput(): number {
|
|
102
|
+
const recent = this.metrics.slice(-this.config.measurementWindow).filter(m => m.success)
|
|
103
|
+
if (recent.length === 0) return 0
|
|
104
|
+
return recent.reduce((sum, m) => sum + m.throughput, 0) / recent.length
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
getStats() {
|
|
108
|
+
return {
|
|
109
|
+
currentChunkSize: this.currentChunkSize,
|
|
110
|
+
averageThroughput: this.getAverageThroughput(),
|
|
111
|
+
consecutiveSuccesses: this.consecutiveSuccesses,
|
|
112
|
+
consecutiveErrors: this.consecutiveErrors,
|
|
113
|
+
totalMeasurements: this.metrics.length,
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
reset(): void {
|
|
118
|
+
this.currentChunkSize = this.config.initialChunkSize
|
|
119
|
+
this.metrics = []
|
|
120
|
+
this.consecutiveErrors = 0
|
|
121
|
+
this.consecutiveSuccesses = 0
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// ===== Binary Protocol =====
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Creates a binary message with header + data
|
|
129
|
+
* Format: [4 bytes header length LE][JSON header][binary data]
|
|
130
|
+
*/
|
|
131
|
+
export function createBinaryChunkMessage(header: BinaryChunkHeader, chunkData: Uint8Array): ArrayBuffer {
|
|
132
|
+
const headerJson = JSON.stringify(header)
|
|
133
|
+
const headerBytes = new TextEncoder().encode(headerJson)
|
|
134
|
+
|
|
135
|
+
const totalSize = 4 + headerBytes.length + chunkData.length
|
|
136
|
+
const buffer = new ArrayBuffer(totalSize)
|
|
137
|
+
const view = new DataView(buffer)
|
|
138
|
+
const uint8View = new Uint8Array(buffer)
|
|
139
|
+
|
|
140
|
+
view.setUint32(0, headerBytes.length, true)
|
|
141
|
+
uint8View.set(headerBytes, 4)
|
|
142
|
+
uint8View.set(chunkData, 4 + headerBytes.length)
|
|
143
|
+
|
|
144
|
+
return buffer
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
// ===== Chunked Uploader =====
|
|
148
|
+
|
|
149
|
+
export interface ChunkedUploadOptions {
|
|
150
|
+
chunkSize?: number
|
|
151
|
+
maxFileSize?: number
|
|
152
|
+
allowedTypes?: string[]
|
|
153
|
+
sendMessageAndWait: (message: any, timeout?: number) => Promise<any>
|
|
154
|
+
sendBinaryAndWait?: (data: ArrayBuffer, requestId: string, timeout?: number) => Promise<any>
|
|
155
|
+
onProgress?: (progress: number, bytesUploaded: number, totalBytes: number) => void
|
|
156
|
+
onComplete?: (response: FileUploadCompleteResponse) => void
|
|
157
|
+
onError?: (error: string) => void
|
|
158
|
+
adaptiveChunking?: boolean
|
|
159
|
+
adaptiveConfig?: Partial<AdaptiveChunkConfig>
|
|
160
|
+
useBinaryProtocol?: boolean
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
export interface ChunkedUploadState {
|
|
164
|
+
uploading: boolean
|
|
165
|
+
progress: number
|
|
166
|
+
error: string | null
|
|
167
|
+
uploadId: string | null
|
|
168
|
+
bytesUploaded: number
|
|
169
|
+
totalBytes: number
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* Framework-agnostic chunked file uploader.
|
|
174
|
+
* Manages the upload lifecycle without any UI framework dependency.
|
|
175
|
+
*/
|
|
176
|
+
export class ChunkedUploader {
|
|
177
|
+
private options: Required<Pick<ChunkedUploadOptions, 'chunkSize' | 'maxFileSize' | 'allowedTypes' | 'useBinaryProtocol' | 'adaptiveChunking'>> & ChunkedUploadOptions
|
|
178
|
+
private abortController: AbortController | null = null
|
|
179
|
+
private adaptiveSizer: AdaptiveChunkSizer | null = null
|
|
180
|
+
private _state: ChunkedUploadState = {
|
|
181
|
+
uploading: false,
|
|
182
|
+
progress: 0,
|
|
183
|
+
error: null,
|
|
184
|
+
uploadId: null,
|
|
185
|
+
bytesUploaded: 0,
|
|
186
|
+
totalBytes: 0,
|
|
187
|
+
}
|
|
188
|
+
private stateListeners = new Set<(state: ChunkedUploadState) => void>()
|
|
189
|
+
|
|
190
|
+
constructor(
|
|
191
|
+
private componentId: string,
|
|
192
|
+
options: ChunkedUploadOptions,
|
|
193
|
+
) {
|
|
194
|
+
this.options = {
|
|
195
|
+
chunkSize: options.chunkSize ?? 64 * 1024,
|
|
196
|
+
maxFileSize: options.maxFileSize ?? 50 * 1024 * 1024,
|
|
197
|
+
allowedTypes: options.allowedTypes ?? [],
|
|
198
|
+
useBinaryProtocol: options.useBinaryProtocol ?? true,
|
|
199
|
+
adaptiveChunking: options.adaptiveChunking ?? false,
|
|
200
|
+
...options,
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
if (this.options.adaptiveChunking) {
|
|
204
|
+
this.adaptiveSizer = new AdaptiveChunkSizer({
|
|
205
|
+
initialChunkSize: this.options.chunkSize,
|
|
206
|
+
minChunkSize: this.options.chunkSize,
|
|
207
|
+
maxChunkSize: 1024 * 1024,
|
|
208
|
+
...options.adaptiveConfig,
|
|
209
|
+
})
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
get state(): ChunkedUploadState {
|
|
214
|
+
return { ...this._state }
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
onStateChange(callback: (state: ChunkedUploadState) => void): () => void {
|
|
218
|
+
this.stateListeners.add(callback)
|
|
219
|
+
return () => { this.stateListeners.delete(callback) }
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
private setState(patch: Partial<ChunkedUploadState>) {
|
|
223
|
+
this._state = { ...this._state, ...patch }
|
|
224
|
+
for (const cb of this.stateListeners) cb(this._state)
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
async uploadFile(file: File): Promise<void> {
|
|
228
|
+
const { allowedTypes, maxFileSize, chunkSize, sendMessageAndWait, sendBinaryAndWait, useBinaryProtocol } = this.options
|
|
229
|
+
const canUseBinary = useBinaryProtocol && sendBinaryAndWait
|
|
230
|
+
|
|
231
|
+
// Validate
|
|
232
|
+
if (allowedTypes.length > 0 && !allowedTypes.includes(file.type)) {
|
|
233
|
+
const error = `Invalid file type: ${file.type}. Allowed: ${allowedTypes.join(', ')}`
|
|
234
|
+
this.setState({ error })
|
|
235
|
+
this.options.onError?.(error)
|
|
236
|
+
return
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
if (file.size > maxFileSize) {
|
|
240
|
+
const error = `File too large: ${file.size} bytes. Max: ${maxFileSize} bytes`
|
|
241
|
+
this.setState({ error })
|
|
242
|
+
this.options.onError?.(error)
|
|
243
|
+
return
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
try {
|
|
247
|
+
const uploadId = `upload-${Date.now()}-${Math.random().toString(36).substring(2, 8)}`
|
|
248
|
+
this.abortController = new AbortController()
|
|
249
|
+
this.adaptiveSizer?.reset()
|
|
250
|
+
|
|
251
|
+
this.setState({ uploading: true, progress: 0, error: null, uploadId, bytesUploaded: 0, totalBytes: file.size })
|
|
252
|
+
|
|
253
|
+
const initialChunkSize = this.adaptiveSizer?.getChunkSize() ?? chunkSize
|
|
254
|
+
|
|
255
|
+
// Start upload
|
|
256
|
+
const startMessage: FileUploadStartMessage = {
|
|
257
|
+
type: 'FILE_UPLOAD_START',
|
|
258
|
+
componentId: this.componentId,
|
|
259
|
+
uploadId,
|
|
260
|
+
filename: file.name,
|
|
261
|
+
fileType: file.type,
|
|
262
|
+
fileSize: file.size,
|
|
263
|
+
chunkSize,
|
|
264
|
+
requestId: `start-${uploadId}`,
|
|
265
|
+
}
|
|
266
|
+
|
|
267
|
+
const startResponse = await sendMessageAndWait(startMessage, 10000)
|
|
268
|
+
if (!startResponse?.success) throw new Error(startResponse?.error || 'Failed to start upload')
|
|
269
|
+
|
|
270
|
+
let offset = 0
|
|
271
|
+
let chunkIndex = 0
|
|
272
|
+
const estimatedTotalChunks = Math.ceil(file.size / initialChunkSize)
|
|
273
|
+
|
|
274
|
+
while (offset < file.size) {
|
|
275
|
+
if (this.abortController?.signal.aborted) throw new Error('Upload cancelled')
|
|
276
|
+
|
|
277
|
+
const currentChunkSize = this.adaptiveSizer?.getChunkSize() ?? chunkSize
|
|
278
|
+
const chunkEnd = Math.min(offset + currentChunkSize, file.size)
|
|
279
|
+
const sliceBuffer = await file.slice(offset, chunkEnd).arrayBuffer()
|
|
280
|
+
const chunkBytes = new Uint8Array(sliceBuffer)
|
|
281
|
+
const chunkStartTime = this.adaptiveSizer?.recordChunkStart(chunkIndex) ?? 0
|
|
282
|
+
const requestId = `chunk-${uploadId}-${chunkIndex}`
|
|
283
|
+
|
|
284
|
+
try {
|
|
285
|
+
let progressResponse: FileUploadProgressResponse | undefined
|
|
286
|
+
|
|
287
|
+
if (canUseBinary) {
|
|
288
|
+
const header: BinaryChunkHeader = {
|
|
289
|
+
type: 'FILE_UPLOAD_CHUNK',
|
|
290
|
+
componentId: this.componentId,
|
|
291
|
+
uploadId,
|
|
292
|
+
chunkIndex,
|
|
293
|
+
totalChunks: estimatedTotalChunks,
|
|
294
|
+
requestId,
|
|
295
|
+
}
|
|
296
|
+
const binaryMessage = createBinaryChunkMessage(header, chunkBytes)
|
|
297
|
+
progressResponse = await sendBinaryAndWait!(binaryMessage, requestId, 10000) as FileUploadProgressResponse
|
|
298
|
+
} else {
|
|
299
|
+
let binary = ''
|
|
300
|
+
for (let j = 0; j < chunkBytes.length; j++) binary += String.fromCharCode(chunkBytes[j])
|
|
301
|
+
|
|
302
|
+
const chunkMessage: FileUploadChunkMessage = {
|
|
303
|
+
type: 'FILE_UPLOAD_CHUNK',
|
|
304
|
+
componentId: this.componentId,
|
|
305
|
+
uploadId,
|
|
306
|
+
chunkIndex,
|
|
307
|
+
totalChunks: estimatedTotalChunks,
|
|
308
|
+
data: btoa(binary),
|
|
309
|
+
requestId,
|
|
310
|
+
}
|
|
311
|
+
progressResponse = await sendMessageAndWait!(chunkMessage, 10000) as FileUploadProgressResponse
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
if (progressResponse) {
|
|
315
|
+
this.setState({ progress: progressResponse.progress, bytesUploaded: progressResponse.bytesUploaded })
|
|
316
|
+
this.options.onProgress?.(progressResponse.progress, progressResponse.bytesUploaded, file.size)
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
this.adaptiveSizer?.recordChunkComplete(chunkIndex, chunkBytes.length, chunkStartTime, true)
|
|
320
|
+
} catch (error) {
|
|
321
|
+
this.adaptiveSizer?.recordChunkComplete(chunkIndex, chunkBytes.length, chunkStartTime, false)
|
|
322
|
+
throw error
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
offset += chunkBytes.length
|
|
326
|
+
chunkIndex++
|
|
327
|
+
|
|
328
|
+
if (!this.options.adaptiveChunking) {
|
|
329
|
+
await new Promise(resolve => setTimeout(resolve, 10))
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
// Complete
|
|
334
|
+
const completeMessage: FileUploadCompleteMessage = {
|
|
335
|
+
type: 'FILE_UPLOAD_COMPLETE',
|
|
336
|
+
componentId: this.componentId,
|
|
337
|
+
uploadId,
|
|
338
|
+
requestId: `complete-${uploadId}`,
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
const completeResponse = await sendMessageAndWait(completeMessage, 10000) as FileUploadCompleteResponse
|
|
342
|
+
|
|
343
|
+
if (completeResponse?.success) {
|
|
344
|
+
this.setState({ uploading: false, progress: 100, bytesUploaded: file.size })
|
|
345
|
+
this.options.onComplete?.(completeResponse)
|
|
346
|
+
} else {
|
|
347
|
+
throw new Error(completeResponse?.error || 'Upload completion failed')
|
|
348
|
+
}
|
|
349
|
+
} catch (error: any) {
|
|
350
|
+
this.setState({ uploading: false, error: error.message })
|
|
351
|
+
this.options.onError?.(error.message)
|
|
352
|
+
}
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
cancelUpload(): void {
|
|
356
|
+
if (this.abortController) {
|
|
357
|
+
this.abortController.abort()
|
|
358
|
+
this.setState({ uploading: false, error: 'Upload cancelled' })
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
reset(): void {
|
|
363
|
+
this._state = { uploading: false, progress: 0, error: null, uploadId: null, bytesUploaded: 0, totalBytes: 0 }
|
|
364
|
+
for (const cb of this.stateListeners) cb(this._state)
|
|
365
|
+
}
|
|
366
|
+
}
|