api-ape 1.0.1 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +314 -83
- package/client/README.md +32 -0
- package/client/connectSocket.js +288 -5
- package/dist/ape.js +289 -44
- package/example/NextJs/pages/Info.tsx +153 -0
- package/example/NextJs/pages/index.tsx +121 -39
- package/example/NextJs/styles/Chat.module.css +255 -1
- package/package.json +2 -2
- package/server/README.md +44 -0
- package/server/lib/fileTransfer.js +247 -0
- package/server/lib/main.js +70 -2
- package/server/lib/wiring.js +4 -2
- package/server/socket/receive.js +118 -3
- package/server/socket/send.js +97 -2
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* FileTransferManager - Handles temporary binary data endpoints
|
|
3
|
+
*
|
|
4
|
+
* For downloads (server → client):
|
|
5
|
+
* - Registers binary data with a hash
|
|
6
|
+
* - Creates temporary endpoint at GET /api/ape/data/:hash
|
|
7
|
+
* - Verifies session before allowing download
|
|
8
|
+
* - Auto-cleanup after timeout
|
|
9
|
+
*
|
|
10
|
+
* For uploads (client → server):
|
|
11
|
+
* - Registers upload expectation with queryId + pathHash
|
|
12
|
+
* - Receives data via PUT /api/ape/data/:queryId/:pathHash
|
|
13
|
+
* - Waits for matching WS message before processing
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
// Default timeouts (configurable)
|
|
17
|
+
const DEFAULT_START_TIMEOUT = 60 * 1000 // 1 minute to start download
|
|
18
|
+
const DEFAULT_COMPLETE_TIMEOUT = 60 * 1000 // 1 minute after download starts
|
|
19
|
+
|
|
20
|
+
class FileTransferManager {
|
|
21
|
+
constructor(options = {}) {
|
|
22
|
+
this.startTimeout = options.startTimeout || DEFAULT_START_TIMEOUT
|
|
23
|
+
this.completeTimeout = options.completeTimeout || DEFAULT_COMPLETE_TIMEOUT
|
|
24
|
+
|
|
25
|
+
// Map<hash, { data, contentType, sessionHostId, createdAt, downloadStarted, timer }>
|
|
26
|
+
this.pendingDownloads = new Map()
|
|
27
|
+
|
|
28
|
+
// Map<`${queryId}/${pathHash}`, { sessionHostId, createdAt, resolver, rejector, timer, data }>
|
|
29
|
+
this.pendingUploads = new Map()
|
|
30
|
+
|
|
31
|
+
// Cleanup interval
|
|
32
|
+
this._cleanupInterval = setInterval(() => this._cleanup(), 30000)
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Register a binary download
|
|
37
|
+
* @param {string} hash - Unique hash for this download
|
|
38
|
+
* @param {Buffer|ArrayBuffer} data - Binary data to serve
|
|
39
|
+
* @param {string} contentType - MIME type (e.g., 'application/octet-stream')
|
|
40
|
+
* @param {string} sessionHostId - Host ID of the client session
|
|
41
|
+
* @returns {string} The hash (for confirmation)
|
|
42
|
+
*/
|
|
43
|
+
registerDownload(hash, data, contentType, sessionHostId) {
|
|
44
|
+
// Clear any existing entry with same hash
|
|
45
|
+
if (this.pendingDownloads.has(hash)) {
|
|
46
|
+
const existing = this.pendingDownloads.get(hash)
|
|
47
|
+
if (existing.timer) clearTimeout(existing.timer)
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const entry = {
|
|
51
|
+
data,
|
|
52
|
+
contentType: contentType || 'application/octet-stream',
|
|
53
|
+
sessionHostId,
|
|
54
|
+
createdAt: Date.now(),
|
|
55
|
+
downloadStarted: false,
|
|
56
|
+
timer: setTimeout(() => {
|
|
57
|
+
// Auto-remove if download never started
|
|
58
|
+
if (!this.pendingDownloads.get(hash)?.downloadStarted) {
|
|
59
|
+
this.pendingDownloads.delete(hash)
|
|
60
|
+
console.log(`📦 Download expired (never started): ${hash}`)
|
|
61
|
+
}
|
|
62
|
+
}, this.startTimeout)
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
this.pendingDownloads.set(hash, entry)
|
|
66
|
+
console.log(`📦 Registered download: ${hash} for session ${sessionHostId}`)
|
|
67
|
+
return hash
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Get download data (called by HTTP handler)
|
|
72
|
+
* @param {string} hash - Download hash
|
|
73
|
+
* @param {string} requestingHostId - Host ID of requester (from session/cookie)
|
|
74
|
+
* @returns {{ data: Buffer, contentType: string } | null}
|
|
75
|
+
*/
|
|
76
|
+
getDownload(hash, requestingHostId) {
|
|
77
|
+
const entry = this.pendingDownloads.get(hash)
|
|
78
|
+
|
|
79
|
+
if (!entry) {
|
|
80
|
+
console.warn(`📦 Download not found: ${hash}`)
|
|
81
|
+
return null
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// Session verification
|
|
85
|
+
if (entry.sessionHostId !== requestingHostId) {
|
|
86
|
+
console.warn(`📦 Session mismatch for ${hash}: expected ${entry.sessionHostId}, got ${requestingHostId}`)
|
|
87
|
+
return null
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Mark download as started
|
|
91
|
+
if (!entry.downloadStarted) {
|
|
92
|
+
entry.downloadStarted = true
|
|
93
|
+
clearTimeout(entry.timer)
|
|
94
|
+
|
|
95
|
+
// Set new timer for cleanup after completion
|
|
96
|
+
entry.timer = setTimeout(() => {
|
|
97
|
+
this.pendingDownloads.delete(hash)
|
|
98
|
+
console.log(`📦 Download cleaned up: ${hash}`)
|
|
99
|
+
}, this.completeTimeout)
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
return {
|
|
103
|
+
data: entry.data,
|
|
104
|
+
contentType: entry.contentType
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Register an expected upload
|
|
110
|
+
* @param {string} queryId - Query ID from WS message
|
|
111
|
+
* @param {string} pathHash - Hash of property path
|
|
112
|
+
* @param {string} sessionHostId - Host ID of the client session
|
|
113
|
+
* @returns {Promise<Buffer>} Resolves when upload is received
|
|
114
|
+
*/
|
|
115
|
+
registerUpload(queryId, pathHash, sessionHostId) {
|
|
116
|
+
const key = `${queryId}/${pathHash}`
|
|
117
|
+
|
|
118
|
+
return new Promise((resolve, reject) => {
|
|
119
|
+
const entry = {
|
|
120
|
+
sessionHostId,
|
|
121
|
+
createdAt: Date.now(),
|
|
122
|
+
resolver: resolve,
|
|
123
|
+
rejector: reject,
|
|
124
|
+
data: null,
|
|
125
|
+
timer: setTimeout(() => {
|
|
126
|
+
this.pendingUploads.delete(key)
|
|
127
|
+
reject(new Error(`Upload timeout: ${key}`))
|
|
128
|
+
}, this.startTimeout)
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
this.pendingUploads.set(key, entry)
|
|
132
|
+
console.log(`📤 Registered upload expectation: ${key} for session ${sessionHostId}`)
|
|
133
|
+
})
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* Receive upload data (called by HTTP handler)
|
|
138
|
+
* @param {string} queryId - Query ID from URL
|
|
139
|
+
* @param {string} pathHash - Path hash from URL
|
|
140
|
+
* @param {Buffer} data - Uploaded binary data
|
|
141
|
+
* @param {string} requestingHostId - Host ID of uploader
|
|
142
|
+
* @returns {boolean} True if accepted
|
|
143
|
+
*/
|
|
144
|
+
receiveUpload(queryId, pathHash, data, requestingHostId) {
|
|
145
|
+
const key = `${queryId}/${pathHash}`
|
|
146
|
+
const entry = this.pendingUploads.get(key)
|
|
147
|
+
|
|
148
|
+
if (!entry) {
|
|
149
|
+
console.warn(`📤 Upload not expected: ${key}`)
|
|
150
|
+
return false
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Session verification
|
|
154
|
+
if (entry.sessionHostId !== requestingHostId) {
|
|
155
|
+
console.warn(`📤 Session mismatch for upload ${key}: expected ${entry.sessionHostId}, got ${requestingHostId}`)
|
|
156
|
+
return false
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
// Clear timeout and resolve
|
|
160
|
+
clearTimeout(entry.timer)
|
|
161
|
+
entry.resolver(data)
|
|
162
|
+
this.pendingUploads.delete(key)
|
|
163
|
+
console.log(`📤 Upload received: ${key}`)
|
|
164
|
+
|
|
165
|
+
return true
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Generate hash for download from queryId and property path
|
|
170
|
+
* @param {string} queryId - The query ID
|
|
171
|
+
* @param {string} propertyPath - The property path (e.g., 'files.0.data')
|
|
172
|
+
* @returns {string} Combined hash
|
|
173
|
+
*/
|
|
174
|
+
static generateHash(queryId, propertyPath) {
|
|
175
|
+
// Simple hash combining queryId and path
|
|
176
|
+
// In production, could use crypto.createHash
|
|
177
|
+
const combined = `${queryId}:${propertyPath}`
|
|
178
|
+
let hash = 0
|
|
179
|
+
for (let i = 0; i < combined.length; i++) {
|
|
180
|
+
const char = combined.charCodeAt(i)
|
|
181
|
+
hash = ((hash << 5) - hash) + char
|
|
182
|
+
hash = hash & hash // Convert to 32bit integer
|
|
183
|
+
}
|
|
184
|
+
return Math.abs(hash).toString(36)
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
/**
|
|
188
|
+
* Cleanup expired entries
|
|
189
|
+
* @private
|
|
190
|
+
*/
|
|
191
|
+
_cleanup() {
|
|
192
|
+
const now = Date.now()
|
|
193
|
+
const maxAge = this.startTimeout + this.completeTimeout
|
|
194
|
+
|
|
195
|
+
// Cleanup downloads
|
|
196
|
+
for (const [hash, entry] of this.pendingDownloads) {
|
|
197
|
+
if (now - entry.createdAt > maxAge) {
|
|
198
|
+
clearTimeout(entry.timer)
|
|
199
|
+
this.pendingDownloads.delete(hash)
|
|
200
|
+
console.log(`📦 Cleanup stale download: ${hash}`)
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
// Cleanup uploads
|
|
205
|
+
for (const [key, entry] of this.pendingUploads) {
|
|
206
|
+
if (now - entry.createdAt > maxAge) {
|
|
207
|
+
clearTimeout(entry.timer)
|
|
208
|
+
entry.rejector(new Error(`Upload expired: ${key}`))
|
|
209
|
+
this.pendingUploads.delete(key)
|
|
210
|
+
console.log(`📤 Cleanup stale upload: ${key}`)
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Shutdown cleanup
|
|
217
|
+
*/
|
|
218
|
+
destroy() {
|
|
219
|
+
clearInterval(this._cleanupInterval)
|
|
220
|
+
|
|
221
|
+
// Clear all timers
|
|
222
|
+
for (const entry of this.pendingDownloads.values()) {
|
|
223
|
+
clearTimeout(entry.timer)
|
|
224
|
+
}
|
|
225
|
+
for (const entry of this.pendingUploads.values()) {
|
|
226
|
+
clearTimeout(entry.timer)
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
this.pendingDownloads.clear()
|
|
230
|
+
this.pendingUploads.clear()
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// Singleton instance
|
|
235
|
+
let instance = null
|
|
236
|
+
|
|
237
|
+
function getFileTransferManager(options) {
|
|
238
|
+
if (!instance) {
|
|
239
|
+
instance = new FileTransferManager(options)
|
|
240
|
+
}
|
|
241
|
+
return instance
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
module.exports = {
|
|
245
|
+
FileTransferManager,
|
|
246
|
+
getFileTransferManager
|
|
247
|
+
}
|
package/server/lib/main.js
CHANGED
|
@@ -2,10 +2,11 @@ const loader = require('./loader')
|
|
|
2
2
|
const wiring = require('./wiring')
|
|
3
3
|
const expressWs = require('express-ws');
|
|
4
4
|
const path = require('path');
|
|
5
|
+
const { getFileTransferManager } = require('./fileTransfer');
|
|
5
6
|
|
|
6
7
|
let created = false
|
|
7
8
|
|
|
8
|
-
module.exports = function (app, { where, onConnent }) {
|
|
9
|
+
module.exports = function (app, { where, onConnent, fileTransferOptions }) {
|
|
9
10
|
|
|
10
11
|
if (created) {
|
|
11
12
|
throw new Error("Api-Ape already started")
|
|
@@ -14,10 +15,77 @@ module.exports = function (app, { where, onConnent }) {
|
|
|
14
15
|
expressWs(app)
|
|
15
16
|
const controllers = loader(where)
|
|
16
17
|
|
|
18
|
+
// Initialize file transfer manager
|
|
19
|
+
const fileTransfer = getFileTransferManager(fileTransferOptions)
|
|
20
|
+
|
|
17
21
|
// Serve bundled client at /ape.js
|
|
18
22
|
app.get('/api/ape.js', (req, res) => {
|
|
19
23
|
res.sendFile(path.join(__dirname, '../../dist/ape.js'))
|
|
20
24
|
})
|
|
21
25
|
|
|
22
|
-
|
|
26
|
+
// File download endpoint - GET /api/ape/data/:hash
|
|
27
|
+
app.get('/api/ape/data/:hash', (req, res) => {
|
|
28
|
+
const { hash } = req.params
|
|
29
|
+
|
|
30
|
+
// Get hostId from session/cookie (set during WS connection)
|
|
31
|
+
const hostId = req.cookies?.apeHostId || req.headers['x-ape-host-id']
|
|
32
|
+
|
|
33
|
+
if (!hostId) {
|
|
34
|
+
return res.status(401).json({ error: 'Missing session identifier' })
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Enforce HTTPS in production
|
|
38
|
+
const isLocal = ['localhost', '127.0.0.1', '[::1]'].includes(req.hostname)
|
|
39
|
+
if (!isLocal && !req.secure && req.get('x-forwarded-proto') !== 'https') {
|
|
40
|
+
return res.status(403).json({ error: 'HTTPS required for file transfers' })
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const result = fileTransfer.getDownload(hash, hostId)
|
|
44
|
+
|
|
45
|
+
if (!result) {
|
|
46
|
+
return res.status(404).json({ error: 'Download not found or unauthorized' })
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
res.set('Content-Type', result.contentType)
|
|
50
|
+
res.set('Content-Length', result.data.length || result.data.byteLength)
|
|
51
|
+
res.send(result.data)
|
|
52
|
+
})
|
|
53
|
+
|
|
54
|
+
// File upload endpoint - PUT /api/ape/data/:queryId/:pathHash
|
|
55
|
+
app.put('/api/ape/data/:queryId/:pathHash', (req, res) => {
|
|
56
|
+
const { queryId, pathHash } = req.params
|
|
57
|
+
|
|
58
|
+
// Get hostId from session/cookie
|
|
59
|
+
const hostId = req.cookies?.apeHostId || req.headers['x-ape-host-id']
|
|
60
|
+
|
|
61
|
+
if (!hostId) {
|
|
62
|
+
return res.status(401).json({ error: 'Missing session identifier' })
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
// Enforce HTTPS in production
|
|
66
|
+
const isLocal = ['localhost', '127.0.0.1', '[::1]'].includes(req.hostname)
|
|
67
|
+
if (!isLocal && !req.secure && req.get('x-forwarded-proto') !== 'https') {
|
|
68
|
+
return res.status(403).json({ error: 'HTTPS required for file transfers' })
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
// Collect body as buffer
|
|
72
|
+
const chunks = []
|
|
73
|
+
req.on('data', chunk => chunks.push(chunk))
|
|
74
|
+
req.on('end', () => {
|
|
75
|
+
const data = Buffer.concat(chunks)
|
|
76
|
+
const success = fileTransfer.receiveUpload(queryId, pathHash, data, hostId)
|
|
77
|
+
|
|
78
|
+
if (success) {
|
|
79
|
+
res.json({ success: true })
|
|
80
|
+
} else {
|
|
81
|
+
res.status(404).json({ error: 'Upload not expected or unauthorized' })
|
|
82
|
+
}
|
|
83
|
+
})
|
|
84
|
+
req.on('error', (err) => {
|
|
85
|
+
res.status(500).json({ error: err.message })
|
|
86
|
+
})
|
|
87
|
+
})
|
|
88
|
+
|
|
89
|
+
// Pass fileTransfer to wiring so send.js can register downloads
|
|
90
|
+
app.ws('/api/ape', wiring(controllers, onConnent, fileTransfer))
|
|
23
91
|
}
|
package/server/lib/wiring.js
CHANGED
|
@@ -26,7 +26,7 @@ function defaultEvents(events = {}) {
|
|
|
26
26
|
//============================================== wiring
|
|
27
27
|
//=====================================================
|
|
28
28
|
|
|
29
|
-
module.exports = function wiring(controllers, onConnent) {
|
|
29
|
+
module.exports = function wiring(controllers, onConnent, fileTransfer) {
|
|
30
30
|
onConnent = onConnent || (() => { });
|
|
31
31
|
return function webSocketHandler(socket, req) {
|
|
32
32
|
|
|
@@ -70,7 +70,8 @@ module.exports = function wiring(controllers, onConnent) {
|
|
|
70
70
|
events: { onReceive, onSend, onError, onDisconnent },
|
|
71
71
|
controllers,
|
|
72
72
|
sharedValues,
|
|
73
|
-
embedValues: embed
|
|
73
|
+
embedValues: embed,
|
|
74
|
+
fileTransfer // Pass file transfer manager
|
|
74
75
|
}// END ape
|
|
75
76
|
send = socketSend(ape)
|
|
76
77
|
ape.send = send
|
|
@@ -92,3 +93,4 @@ module.exports = function wiring(controllers, onConnent) {
|
|
|
92
93
|
|
|
93
94
|
} // END webSocketHandler
|
|
94
95
|
} // END wiring
|
|
96
|
+
|
package/server/socket/receive.js
CHANGED
|
@@ -2,8 +2,95 @@ const messageHash = require('../../utils/messageHash')
|
|
|
2
2
|
const { broadcast, online, getClients } = require('../lib/broadcast')
|
|
3
3
|
const jss = require('../../utils/jss')
|
|
4
4
|
|
|
5
|
+
/**
|
|
6
|
+
* Find B/A tagged properties in data (indicating pending uploads)
|
|
7
|
+
* Returns array of { path, hash, tag }
|
|
8
|
+
*/
|
|
9
|
+
function findUploadTags(obj, path = '') {
|
|
10
|
+
const uploads = []
|
|
11
|
+
|
|
12
|
+
if (obj === null || obj === undefined || typeof obj !== 'object') {
|
|
13
|
+
return uploads
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
if (Array.isArray(obj)) {
|
|
17
|
+
for (let i = 0; i < obj.length; i++) {
|
|
18
|
+
uploads.push(...findUploadTags(obj[i], path ? `${path}.${i}` : String(i)))
|
|
19
|
+
}
|
|
20
|
+
return uploads
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
for (const key of Object.keys(obj)) {
|
|
24
|
+
// Check for B or A tag (binary upload markers)
|
|
25
|
+
const bMatch = key.match(/^(.+)<!B>$/)
|
|
26
|
+
const aMatch = key.match(/^(.+)<!A>$/)
|
|
27
|
+
|
|
28
|
+
if (bMatch) {
|
|
29
|
+
uploads.push({
|
|
30
|
+
path: path ? `${path}.${bMatch[1]}` : bMatch[1],
|
|
31
|
+
hash: obj[key],
|
|
32
|
+
tag: 'B',
|
|
33
|
+
originalKey: key
|
|
34
|
+
})
|
|
35
|
+
} else if (aMatch) {
|
|
36
|
+
uploads.push({
|
|
37
|
+
path: path ? `${path}.${aMatch[1]}` : aMatch[1],
|
|
38
|
+
hash: obj[key],
|
|
39
|
+
tag: 'A',
|
|
40
|
+
originalKey: key
|
|
41
|
+
})
|
|
42
|
+
} else {
|
|
43
|
+
uploads.push(...findUploadTags(obj[key], path ? `${path}.${key}` : key))
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return uploads
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Clean upload tags from data (rename key<!B> to key)
|
|
52
|
+
*/
|
|
53
|
+
function cleanUploadTags(obj) {
|
|
54
|
+
if (obj === null || obj === undefined || typeof obj !== 'object') {
|
|
55
|
+
return obj
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (Array.isArray(obj)) {
|
|
59
|
+
return obj.map(cleanUploadTags)
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
const cleaned = {}
|
|
63
|
+
for (const key of Object.keys(obj)) {
|
|
64
|
+
const bMatch = key.match(/^(.+)<!B>$/)
|
|
65
|
+
const aMatch = key.match(/^(.+)<!A>$/)
|
|
66
|
+
|
|
67
|
+
if (bMatch) {
|
|
68
|
+
cleaned[bMatch[1]] = obj[key] // Will be replaced with actual data
|
|
69
|
+
} else if (aMatch) {
|
|
70
|
+
cleaned[aMatch[1]] = obj[key] // Will be replaced with actual data
|
|
71
|
+
} else {
|
|
72
|
+
cleaned[key] = cleanUploadTags(obj[key])
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return cleaned
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* Set value at nested path
|
|
80
|
+
*/
|
|
81
|
+
function setValueAtPath(obj, path, value) {
|
|
82
|
+
const parts = path.split('.')
|
|
83
|
+
let current = obj
|
|
84
|
+
|
|
85
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
86
|
+
current = current[parts[i]]
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
current[parts[parts.length - 1]] = value
|
|
90
|
+
}
|
|
91
|
+
|
|
5
92
|
module.exports = function receiveHandler(ape) {
|
|
6
|
-
const { send, checkReply, events, controllers, sharedValues, hostId, embedValues } = ape
|
|
93
|
+
const { send, checkReply, events, controllers, sharedValues, hostId, embedValues, fileTransfer } = ape
|
|
7
94
|
|
|
8
95
|
// Build `this` context for controllers
|
|
9
96
|
// Includes: client metadata + api-ape utilities
|
|
@@ -18,7 +105,7 @@ module.exports = function receiveHandler(ape) {
|
|
|
18
105
|
hostId
|
|
19
106
|
}
|
|
20
107
|
|
|
21
|
-
return function onReceive(msg) {
|
|
108
|
+
return async function onReceive(msg) {
|
|
22
109
|
// Convert Buffer to string - WebSocket messages may arrive as binary
|
|
23
110
|
const msgString = typeof msg === 'string' ? msg : msg.toString('utf8');
|
|
24
111
|
const queryId = messageHash(msgString);
|
|
@@ -31,6 +118,34 @@ module.exports = function receiveHandler(ape) {
|
|
|
31
118
|
// Call onReceive hook - it should return a finish callback
|
|
32
119
|
const onFinish = events.onReceive(queryId, data, type) || (() => { })
|
|
33
120
|
|
|
121
|
+
// Check for pending uploads (B/A tags)
|
|
122
|
+
let processedData = data
|
|
123
|
+
if (fileTransfer && data) {
|
|
124
|
+
const uploadTags = findUploadTags(data)
|
|
125
|
+
|
|
126
|
+
if (uploadTags.length > 0) {
|
|
127
|
+
console.log(`📤 Waiting for ${uploadTags.length} upload(s) for ${type}`)
|
|
128
|
+
|
|
129
|
+
// Clean the data object
|
|
130
|
+
processedData = cleanUploadTags(data)
|
|
131
|
+
|
|
132
|
+
// Wait for all uploads
|
|
133
|
+
try {
|
|
134
|
+
await Promise.all(uploadTags.map(async ({ path, hash }) => {
|
|
135
|
+
const uploadData = await fileTransfer.registerUpload(queryId, hash, hostId)
|
|
136
|
+
setValueAtPath(processedData, path, uploadData)
|
|
137
|
+
}))
|
|
138
|
+
} catch (uploadErr) {
|
|
139
|
+
console.error(`📤 Upload wait failed:`, uploadErr)
|
|
140
|
+
send(queryId, false, false, uploadErr)
|
|
141
|
+
if (typeof onFinish === 'function') {
|
|
142
|
+
onFinish(uploadErr, true)
|
|
143
|
+
}
|
|
144
|
+
return
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
|
|
34
149
|
const result = new Promise((resolve, reject) => {
|
|
35
150
|
try {
|
|
36
151
|
const controller = controllers[type]
|
|
@@ -38,7 +153,7 @@ module.exports = function receiveHandler(ape) {
|
|
|
38
153
|
throw `TypeError: "${type}" was not found`
|
|
39
154
|
}
|
|
40
155
|
checkReply(queryId, createdAt)
|
|
41
|
-
resolve(controller.call(that,
|
|
156
|
+
resolve(controller.call(that, processedData))
|
|
42
157
|
} catch (err) {
|
|
43
158
|
reject(err)
|
|
44
159
|
}
|
package/server/socket/send.js
CHANGED
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
const jss = require('../../utils/jss')
|
|
2
|
+
const { FileTransferManager } = require('../lib/fileTransfer')
|
|
2
3
|
|
|
3
4
|
function checkSocketState(socket) {
|
|
4
5
|
if (socket.readyState !== socket.OPEN) {
|
|
@@ -17,7 +18,88 @@ function checkSocketState(socket) {
|
|
|
17
18
|
} // END if
|
|
18
19
|
} // END checkSocketState
|
|
19
20
|
|
|
20
|
-
|
|
21
|
+
/**
|
|
22
|
+
* Check if value is binary data (Buffer, ArrayBuffer, or typed array)
|
|
23
|
+
*/
|
|
24
|
+
function isBinaryData(value) {
|
|
25
|
+
if (value === null || value === undefined) return false
|
|
26
|
+
return Buffer.isBuffer(value) ||
|
|
27
|
+
value instanceof ArrayBuffer ||
|
|
28
|
+
ArrayBuffer.isView(value)
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/**
|
|
32
|
+
* Detect content type from binary data
|
|
33
|
+
*/
|
|
34
|
+
function detectContentType(data) {
|
|
35
|
+
// Could be enhanced with magic number detection
|
|
36
|
+
return 'application/octet-stream'
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Process data object, replacing binary values with L-tagged hashes
|
|
41
|
+
* Returns { processedData, binaryEntries }
|
|
42
|
+
*/
|
|
43
|
+
function processBinaryData(data, queryId, fileTransfer, hostId, path = '') {
|
|
44
|
+
if (data === null || data === undefined) {
|
|
45
|
+
return { processedData: data, binaryEntries: [] }
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (isBinaryData(data)) {
|
|
49
|
+
// This is binary data - register and return hash
|
|
50
|
+
const hash = FileTransferManager.generateHash(queryId, path || 'root')
|
|
51
|
+
const contentType = detectContentType(data)
|
|
52
|
+
fileTransfer.registerDownload(hash, data, contentType, hostId)
|
|
53
|
+
|
|
54
|
+
return {
|
|
55
|
+
processedData: { [`__ape_link__`]: hash },
|
|
56
|
+
binaryEntries: [{ path, hash }]
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if (Array.isArray(data)) {
|
|
61
|
+
const processedArray = []
|
|
62
|
+
const allBinaryEntries = []
|
|
63
|
+
|
|
64
|
+
for (let i = 0; i < data.length; i++) {
|
|
65
|
+
const itemPath = path ? `${path}.${i}` : String(i)
|
|
66
|
+
const { processedData, binaryEntries } = processBinaryData(
|
|
67
|
+
data[i], queryId, fileTransfer, hostId, itemPath
|
|
68
|
+
)
|
|
69
|
+
processedArray.push(processedData)
|
|
70
|
+
allBinaryEntries.push(...binaryEntries)
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
return { processedData: processedArray, binaryEntries: allBinaryEntries }
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
if (typeof data === 'object') {
|
|
77
|
+
const processedObj = {}
|
|
78
|
+
const allBinaryEntries = []
|
|
79
|
+
|
|
80
|
+
for (const key of Object.keys(data)) {
|
|
81
|
+
const itemPath = path ? `${path}.${key}` : key
|
|
82
|
+
const { processedData, binaryEntries } = processBinaryData(
|
|
83
|
+
data[key], queryId, fileTransfer, hostId, itemPath
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
// If this was binary data, mark the key with <!L> tag
|
|
87
|
+
if (binaryEntries.length > 0 && processedData?.__ape_link__) {
|
|
88
|
+
processedObj[`${key}<!L>`] = processedData.__ape_link__
|
|
89
|
+
} else {
|
|
90
|
+
processedObj[key] = processedData
|
|
91
|
+
}
|
|
92
|
+
allBinaryEntries.push(...binaryEntries)
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
return { processedData: processedObj, binaryEntries: allBinaryEntries }
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
// Primitive value - return as-is
|
|
99
|
+
return { processedData: data, binaryEntries: [] }
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
module.exports = function sendHandler({ socket, events, hostId, fileTransfer }) {
|
|
21
103
|
|
|
22
104
|
return function send(queryId, type, data, err) {
|
|
23
105
|
if (!type && !queryId) {
|
|
@@ -43,11 +125,24 @@ module.exports = function sendHandler({ socket, events, hostId }) {
|
|
|
43
125
|
}
|
|
44
126
|
return;
|
|
45
127
|
}
|
|
128
|
+
|
|
129
|
+
// Process binary data if fileTransfer is available
|
|
130
|
+
let processedData = data
|
|
131
|
+
if (fileTransfer && data && !err) {
|
|
132
|
+
const { processedData: processed, binaryEntries } = processBinaryData(
|
|
133
|
+
data, queryId || type, fileTransfer, hostId
|
|
134
|
+
)
|
|
135
|
+
processedData = processed
|
|
136
|
+
if (binaryEntries.length > 0) {
|
|
137
|
+
console.log(`📦 Registered ${binaryEntries.length} binary download(s) for ${queryId || type}`)
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
46
141
|
if (err) {
|
|
47
142
|
socket.send(jss.stringify({ err: err.message || err, type, queryId }))
|
|
48
143
|
if (typeof onFinish === 'function') onFinish(err, true)
|
|
49
144
|
} else {
|
|
50
|
-
socket.send(jss.stringify({ data, type, queryId }))
|
|
145
|
+
socket.send(jss.stringify({ data: processedData, type, queryId }))
|
|
51
146
|
if (typeof onFinish === 'function') onFinish(false, data)
|
|
52
147
|
}
|
|
53
148
|
|