@roboflow/inference-sdk 0.1.1 → 0.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -1
- package/dist/index.es.js +212 -136
- package/dist/index.js +1 -1
- package/dist/inference-api.d.ts +76 -1
- package/dist/inference-api.d.ts.map +1 -1
- package/dist/webrtc-data-parsing.test.d.ts +2 -0
- package/dist/webrtc-data-parsing.test.d.ts.map +1 -0
- package/dist/webrtc.d.ts +24 -0
- package/dist/webrtc.d.ts.map +1 -1
- package/package.json +7 -5
package/README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# @roboflow/inference-sdk
|
|
2
2
|
|
|
3
|
-
Lightweight client for Roboflow's hosted inference API with WebRTC streaming support for real-time computer vision in the browser.
|
|
3
|
+
Lightweight JS client for Roboflow's hosted inference API with WebRTC streaming support for real-time computer vision in the browser.
|
|
4
4
|
|
|
5
5
|
## Installation
|
|
6
6
|
|
package/dist/index.es.js
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
|
-
var
|
|
2
|
-
var
|
|
3
|
-
var
|
|
4
|
-
class
|
|
1
|
+
var T = Object.defineProperty;
|
|
2
|
+
var O = (a, e, t) => e in a ? T(a, e, { enumerable: !0, configurable: !0, writable: !0, value: t }) : a[e] = t;
|
|
3
|
+
var u = (a, e, t) => O(a, typeof e != "symbol" ? e + "" : e, t);
|
|
4
|
+
class _ {
|
|
5
5
|
/**
|
|
6
6
|
* @private
|
|
7
7
|
* Use InferenceHTTPClient.init() instead
|
|
8
8
|
*/
|
|
9
9
|
constructor(e, t = "https://serverless.roboflow.com") {
|
|
10
|
-
|
|
11
|
-
|
|
10
|
+
u(this, "apiKey");
|
|
11
|
+
u(this, "serverUrl");
|
|
12
12
|
this.apiKey = e, this.serverUrl = t;
|
|
13
13
|
}
|
|
14
14
|
static init({ apiKey: e, serverUrl: t }) {
|
|
15
15
|
if (!e)
|
|
16
16
|
throw new Error("apiKey is required");
|
|
17
|
-
return new
|
|
17
|
+
return new _(e, t);
|
|
18
18
|
}
|
|
19
19
|
/**
|
|
20
20
|
* Initialize a WebRTC worker pipeline
|
|
@@ -44,51 +44,59 @@ class y {
|
|
|
44
44
|
async initializeWebrtcWorker({
|
|
45
45
|
offer: e,
|
|
46
46
|
workflowSpec: t,
|
|
47
|
-
workspaceName:
|
|
48
|
-
workflowId:
|
|
49
|
-
config:
|
|
47
|
+
workspaceName: n,
|
|
48
|
+
workflowId: r,
|
|
49
|
+
config: o = {}
|
|
50
50
|
}) {
|
|
51
51
|
if (!e || !e.sdp || !e.type)
|
|
52
52
|
throw new Error("offer with sdp and type is required");
|
|
53
|
-
const
|
|
54
|
-
if (!
|
|
53
|
+
const i = !!t, s = !!(n && r);
|
|
54
|
+
if (!i && !s)
|
|
55
55
|
throw new Error("Either workflowSpec OR (workspaceName + workflowId) is required");
|
|
56
|
-
if (
|
|
56
|
+
if (i && s)
|
|
57
57
|
throw new Error("Provide either workflowSpec OR (workspaceName + workflowId), not both");
|
|
58
58
|
const {
|
|
59
59
|
imageInputName: d = "image",
|
|
60
|
-
streamOutputNames:
|
|
61
|
-
dataOutputNames:
|
|
62
|
-
threadPoolWorkers:
|
|
63
|
-
|
|
60
|
+
streamOutputNames: c = [],
|
|
61
|
+
dataOutputNames: l = ["string"],
|
|
62
|
+
threadPoolWorkers: p = 4,
|
|
63
|
+
workflowsParameters: y = {},
|
|
64
|
+
iceServers: w,
|
|
65
|
+
processingTimeout: k,
|
|
66
|
+
requestedPlan: S,
|
|
67
|
+
requestedRegion: f
|
|
68
|
+
} = o, g = {
|
|
64
69
|
type: "WorkflowConfiguration",
|
|
65
70
|
image_input_name: d,
|
|
66
|
-
|
|
71
|
+
workflows_parameters: y,
|
|
72
|
+
workflows_thread_pool_workers: p,
|
|
67
73
|
cancel_thread_pool_tasks_on_exit: !0,
|
|
68
74
|
video_metadata_input_name: "video_metadata"
|
|
69
75
|
};
|
|
70
|
-
|
|
71
|
-
const
|
|
72
|
-
workflow_configuration:
|
|
76
|
+
i ? g.workflow_specification = t : (g.workspace_name = n, g.workflow_id = r);
|
|
77
|
+
const m = {
|
|
78
|
+
workflow_configuration: g,
|
|
73
79
|
api_key: this.apiKey,
|
|
74
80
|
webrtc_realtime_processing: !0,
|
|
75
81
|
webrtc_offer: {
|
|
76
82
|
sdp: e.sdp,
|
|
77
83
|
type: e.type
|
|
78
84
|
},
|
|
79
|
-
|
|
80
|
-
stream_output:
|
|
81
|
-
data_output:
|
|
82
|
-
}
|
|
85
|
+
webrtc_config: w ? { iceServers: w } : null,
|
|
86
|
+
stream_output: c,
|
|
87
|
+
data_output: l
|
|
88
|
+
};
|
|
89
|
+
k !== void 0 && (m.processing_timeout = k), S !== void 0 && (m.requested_plan = S), f !== void 0 && (m.requested_region = f), console.trace("payload", m);
|
|
90
|
+
const h = await fetch(`${this.serverUrl}/initialise_webrtc_worker`, {
|
|
83
91
|
method: "POST",
|
|
84
92
|
headers: { "Content-Type": "application/json" },
|
|
85
|
-
body: JSON.stringify(
|
|
93
|
+
body: JSON.stringify(m)
|
|
86
94
|
});
|
|
87
|
-
if (!
|
|
88
|
-
const
|
|
89
|
-
throw new Error(`initialise_webrtc_worker failed (${
|
|
95
|
+
if (!h.ok) {
|
|
96
|
+
const E = await h.text().catch(() => "");
|
|
97
|
+
throw new Error(`initialise_webrtc_worker failed (${h.status}): ${E}`);
|
|
90
98
|
}
|
|
91
|
-
return await
|
|
99
|
+
return await h.json();
|
|
92
100
|
}
|
|
93
101
|
async terminatePipeline({ pipelineId: e }) {
|
|
94
102
|
if (!e)
|
|
@@ -102,7 +110,7 @@ class y {
|
|
|
102
110
|
);
|
|
103
111
|
}
|
|
104
112
|
}
|
|
105
|
-
const
|
|
113
|
+
const K = {
|
|
106
114
|
/**
|
|
107
115
|
* Create a connector that uses API key directly
|
|
108
116
|
*
|
|
@@ -121,25 +129,33 @@ const N = {
|
|
|
121
129
|
* const answer = await connector.connectWrtc(offer, wrtcParams);
|
|
122
130
|
* ```
|
|
123
131
|
*/
|
|
124
|
-
withApiKey(
|
|
132
|
+
withApiKey(a, e = {}) {
|
|
125
133
|
const { serverUrl: t } = e;
|
|
126
134
|
return typeof window < "u" && console.warn(
|
|
127
135
|
"[Security Warning] Using API key directly in browser will expose it. Use connectors.withProxyUrl() for production. See: https://docs.roboflow.com/api-reference/authentication#securing-your-api-key"
|
|
128
136
|
), {
|
|
129
|
-
connectWrtc: async (
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
137
|
+
connectWrtc: async (n, r) => {
|
|
138
|
+
const o = _.init({ apiKey: a, serverUrl: t });
|
|
139
|
+
return console.log("wrtcParams", r), await o.initializeWebrtcWorker({
|
|
140
|
+
offer: n,
|
|
141
|
+
workflowSpec: r.workflowSpec,
|
|
142
|
+
workspaceName: r.workspaceName,
|
|
143
|
+
workflowId: r.workflowId,
|
|
144
|
+
config: {
|
|
145
|
+
imageInputName: r.imageInputName,
|
|
146
|
+
streamOutputNames: r.streamOutputNames,
|
|
147
|
+
dataOutputNames: r.dataOutputNames,
|
|
148
|
+
threadPoolWorkers: r.threadPoolWorkers,
|
|
149
|
+
workflowsParameters: r.workflowsParameters,
|
|
150
|
+
iceServers: r.iceServers,
|
|
151
|
+
processingTimeout: r.processingTimeout,
|
|
152
|
+
requestedPlan: r.requestedPlan,
|
|
153
|
+
requestedRegion: r.requestedRegion
|
|
154
|
+
}
|
|
155
|
+
});
|
|
156
|
+
},
|
|
141
157
|
// Store apiKey for cleanup
|
|
142
|
-
_apiKey:
|
|
158
|
+
_apiKey: a,
|
|
143
159
|
_serverUrl: t
|
|
144
160
|
};
|
|
145
161
|
},
|
|
@@ -176,141 +192,196 @@ const N = {
|
|
|
176
192
|
* imageInputName: wrtcParams.imageInputName,
|
|
177
193
|
* streamOutputNames: wrtcParams.streamOutputNames,
|
|
178
194
|
* dataOutputNames: wrtcParams.dataOutputNames,
|
|
179
|
-
* threadPoolWorkers: wrtcParams.threadPoolWorkers
|
|
195
|
+
* threadPoolWorkers: wrtcParams.threadPoolWorkers,
|
|
196
|
+
* workflowsParameters: wrtcParams.workflowsParameters,
|
|
197
|
+
* iceServers: wrtcParams.iceServers,
|
|
198
|
+
* processingTimeout: wrtcParams.processingTimeout,
|
|
199
|
+
* requestedPlan: wrtcParams.requestedPlan,
|
|
200
|
+
* requestedRegion: wrtcParams.requestedRegion
|
|
180
201
|
* }
|
|
181
202
|
* });
|
|
182
203
|
* res.json(answer);
|
|
183
204
|
* });
|
|
184
205
|
* ```
|
|
185
206
|
*/
|
|
186
|
-
withProxyUrl(
|
|
207
|
+
withProxyUrl(a, e = {}) {
|
|
187
208
|
return {
|
|
188
|
-
connectWrtc: async (t,
|
|
189
|
-
const
|
|
209
|
+
connectWrtc: async (t, n) => {
|
|
210
|
+
const r = await fetch(a, {
|
|
190
211
|
method: "POST",
|
|
191
212
|
headers: { "Content-Type": "application/json" },
|
|
192
213
|
body: JSON.stringify({
|
|
193
214
|
offer: t,
|
|
194
|
-
wrtcParams:
|
|
215
|
+
wrtcParams: n
|
|
195
216
|
})
|
|
196
217
|
});
|
|
197
|
-
if (!
|
|
198
|
-
const
|
|
199
|
-
throw new Error(`Proxy request failed (${
|
|
218
|
+
if (!r.ok) {
|
|
219
|
+
const o = await r.text().catch(() => "");
|
|
220
|
+
throw new Error(`Proxy request failed (${r.status}): ${o}`);
|
|
200
221
|
}
|
|
201
|
-
return await
|
|
222
|
+
return await r.json();
|
|
202
223
|
}
|
|
203
224
|
};
|
|
204
225
|
}
|
|
205
226
|
};
|
|
206
|
-
async function
|
|
227
|
+
async function W(a = { video: !0 }) {
|
|
207
228
|
try {
|
|
208
|
-
console.log("[RFStreams] requesting with",
|
|
209
|
-
const e = await navigator.mediaDevices.getUserMedia(
|
|
229
|
+
console.log("[RFStreams] requesting with", a);
|
|
230
|
+
const e = await navigator.mediaDevices.getUserMedia(a);
|
|
210
231
|
return console.log("[RFStreams] got stream", e.getVideoTracks().map((t) => ({ id: t.id, label: t.label }))), e;
|
|
211
232
|
} catch (e) {
|
|
212
233
|
console.warn("[RFStreams] failed, falling back", e);
|
|
213
234
|
const t = await navigator.mediaDevices.getUserMedia({ video: !0, audio: !1 });
|
|
214
|
-
return console.log("[RFStreams] fallback stream", t.getVideoTracks().map((
|
|
235
|
+
return console.log("[RFStreams] fallback stream", t.getVideoTracks().map((n) => ({ id: n.id, label: n.label }))), t;
|
|
215
236
|
}
|
|
216
237
|
}
|
|
217
|
-
function
|
|
218
|
-
|
|
238
|
+
function v(a) {
|
|
239
|
+
a && (a.getTracks().forEach((e) => e.stop()), console.log("[RFStreams] Stream stopped"));
|
|
219
240
|
}
|
|
220
|
-
const
|
|
241
|
+
const j = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
221
242
|
__proto__: null,
|
|
222
|
-
stopStream:
|
|
223
|
-
useCamera:
|
|
224
|
-
}, Symbol.toStringTag, { value: "Module" }));
|
|
225
|
-
|
|
226
|
-
|
|
243
|
+
stopStream: v,
|
|
244
|
+
useCamera: W
|
|
245
|
+
}, Symbol.toStringTag, { value: "Module" })), F = 12;
|
|
246
|
+
class b {
|
|
247
|
+
constructor() {
|
|
248
|
+
u(this, "pendingFrames", /* @__PURE__ */ new Map());
|
|
249
|
+
}
|
|
250
|
+
/**
|
|
251
|
+
* Process an incoming chunk and return the complete message if all chunks received
|
|
252
|
+
*/
|
|
253
|
+
processChunk(e, t, n, r) {
|
|
254
|
+
if (n === 1)
|
|
255
|
+
return r;
|
|
256
|
+
this.pendingFrames.has(e) || this.pendingFrames.set(e, {
|
|
257
|
+
chunks: /* @__PURE__ */ new Map(),
|
|
258
|
+
totalChunks: n
|
|
259
|
+
});
|
|
260
|
+
const o = this.pendingFrames.get(e);
|
|
261
|
+
if (o.chunks.set(t, r), o.chunks.size === n) {
|
|
262
|
+
const i = Array.from(o.chunks.values()).reduce((c, l) => c + l.length, 0), s = new Uint8Array(i);
|
|
263
|
+
let d = 0;
|
|
264
|
+
for (let c = 0; c < n; c++) {
|
|
265
|
+
const l = o.chunks.get(c);
|
|
266
|
+
s.set(l, d), d += l.length;
|
|
267
|
+
}
|
|
268
|
+
return this.pendingFrames.delete(e), s;
|
|
269
|
+
}
|
|
270
|
+
return null;
|
|
271
|
+
}
|
|
272
|
+
/**
|
|
273
|
+
* Clear all pending frames (for cleanup)
|
|
274
|
+
*/
|
|
275
|
+
clear() {
|
|
276
|
+
this.pendingFrames.clear();
|
|
277
|
+
}
|
|
278
|
+
}
|
|
279
|
+
function C(a) {
|
|
280
|
+
const e = new DataView(a), t = e.getUint32(0, !0), n = e.getUint32(4, !0), r = e.getUint32(8, !0), o = new Uint8Array(a, F);
|
|
281
|
+
return { frameId: t, chunkIndex: n, totalChunks: r, payload: o };
|
|
282
|
+
}
|
|
283
|
+
async function P(a, e = 6e3) {
|
|
284
|
+
if (a.iceGatheringState === "complete") return;
|
|
227
285
|
let t = !1;
|
|
228
|
-
const
|
|
229
|
-
|
|
286
|
+
const n = (r) => {
|
|
287
|
+
r.candidate && r.candidate.type === "srflx" && (t = !0);
|
|
230
288
|
};
|
|
231
|
-
|
|
289
|
+
a.addEventListener("icecandidate", n);
|
|
232
290
|
try {
|
|
233
291
|
await Promise.race([
|
|
234
|
-
new Promise((
|
|
235
|
-
const
|
|
236
|
-
|
|
292
|
+
new Promise((r) => {
|
|
293
|
+
const o = () => {
|
|
294
|
+
a.iceGatheringState === "complete" && (a.removeEventListener("icegatheringstatechange", o), r());
|
|
237
295
|
};
|
|
238
|
-
|
|
296
|
+
a.addEventListener("icegatheringstatechange", o);
|
|
239
297
|
}),
|
|
240
|
-
new Promise((
|
|
298
|
+
new Promise((r, o) => {
|
|
241
299
|
setTimeout(() => {
|
|
242
|
-
t ?
|
|
300
|
+
t ? r() : (console.error("[ICE] timeout with NO srflx candidate! Connection may fail."), o(new Error("ICE gathering timeout without srflx candidate")));
|
|
243
301
|
}, e);
|
|
244
302
|
})
|
|
245
303
|
]);
|
|
246
304
|
} finally {
|
|
247
|
-
|
|
305
|
+
a.removeEventListener("icecandidate", n);
|
|
248
306
|
}
|
|
249
307
|
}
|
|
250
|
-
function
|
|
308
|
+
function I(a) {
|
|
251
309
|
return new Promise((e) => {
|
|
252
|
-
|
|
310
|
+
a.addEventListener("track", (t) => {
|
|
253
311
|
t.streams && t.streams[0] && e(t.streams[0]);
|
|
254
312
|
});
|
|
255
313
|
});
|
|
256
314
|
}
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
315
|
+
const D = [
|
|
316
|
+
{ urls: ["stun:stun.l.google.com:19302"] }
|
|
317
|
+
];
|
|
318
|
+
async function x(a, e) {
|
|
319
|
+
const t = e ?? D, n = new RTCPeerConnection({
|
|
320
|
+
iceServers: t
|
|
260
321
|
});
|
|
261
322
|
try {
|
|
262
|
-
|
|
263
|
-
} catch (
|
|
264
|
-
console.warn("[RFWebRTC] Could not add transceiver:",
|
|
323
|
+
n.addTransceiver("video", { direction: "recvonly" });
|
|
324
|
+
} catch (s) {
|
|
325
|
+
console.warn("[RFWebRTC] Could not add transceiver:", s);
|
|
265
326
|
}
|
|
266
|
-
|
|
327
|
+
a.getVideoTracks().forEach((s) => {
|
|
267
328
|
try {
|
|
268
|
-
|
|
329
|
+
s.contentHint = "detail";
|
|
269
330
|
} catch {
|
|
270
331
|
}
|
|
271
|
-
|
|
332
|
+
n.addTrack(s, a);
|
|
272
333
|
});
|
|
273
|
-
const
|
|
334
|
+
const r = I(n), o = n.createDataChannel("roboflow-control", {
|
|
274
335
|
ordered: !0
|
|
275
|
-
}), i = await
|
|
276
|
-
return await
|
|
277
|
-
pc:
|
|
278
|
-
offer:
|
|
279
|
-
remoteStreamPromise:
|
|
280
|
-
dataChannel:
|
|
336
|
+
}), i = await n.createOffer();
|
|
337
|
+
return await n.setLocalDescription(i), await P(n), {
|
|
338
|
+
pc: n,
|
|
339
|
+
offer: n.localDescription,
|
|
340
|
+
remoteStreamPromise: r,
|
|
341
|
+
dataChannel: o
|
|
281
342
|
};
|
|
282
343
|
}
|
|
283
|
-
async function
|
|
284
|
-
const e =
|
|
344
|
+
async function N(a) {
|
|
345
|
+
const e = a.getSenders().find((n) => n.track && n.track.kind === "video");
|
|
285
346
|
if (!e) return;
|
|
286
347
|
const t = e.getParameters();
|
|
287
348
|
t.encodings = t.encodings || [{}], t.encodings[0].scaleResolutionDownBy = 1;
|
|
288
349
|
try {
|
|
289
350
|
await e.setParameters(t);
|
|
290
|
-
} catch (
|
|
291
|
-
console.warn("[RFWebRTC] Failed to set encoding parameters:",
|
|
351
|
+
} catch (n) {
|
|
352
|
+
console.warn("[RFWebRTC] Failed to set encoding parameters:", n);
|
|
292
353
|
}
|
|
293
354
|
}
|
|
294
|
-
class
|
|
355
|
+
class R {
|
|
295
356
|
/** @private */
|
|
296
|
-
constructor(e, t,
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
this
|
|
357
|
+
constructor(e, t, n, r, o, i, s) {
|
|
358
|
+
u(this, "pc");
|
|
359
|
+
u(this, "_localStream");
|
|
360
|
+
u(this, "remoteStreamPromise");
|
|
361
|
+
u(this, "pipelineId");
|
|
362
|
+
u(this, "apiKey");
|
|
363
|
+
u(this, "dataChannel");
|
|
364
|
+
u(this, "reassembler");
|
|
365
|
+
this.pc = e, this._localStream = t, this.remoteStreamPromise = n, this.pipelineId = r, this.apiKey = o, this.dataChannel = i, this.reassembler = new b(), this.dataChannel.binaryType = "arraybuffer", s && (this.dataChannel.addEventListener("open", () => {
|
|
304
366
|
}), this.dataChannel.addEventListener("message", (d) => {
|
|
305
367
|
try {
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
368
|
+
if (d.data instanceof ArrayBuffer) {
|
|
369
|
+
const { frameId: c, chunkIndex: l, totalChunks: p, payload: y } = C(d.data), w = this.reassembler.processChunk(c, l, p, y);
|
|
370
|
+
if (w) {
|
|
371
|
+
const S = new TextDecoder("utf-8").decode(w), f = JSON.parse(S);
|
|
372
|
+
s(f);
|
|
373
|
+
}
|
|
374
|
+
} else {
|
|
375
|
+
const c = JSON.parse(d.data);
|
|
376
|
+
s(c);
|
|
377
|
+
}
|
|
378
|
+
} catch (c) {
|
|
379
|
+
console.error("[RFWebRTC] Failed to parse data channel message:", c);
|
|
310
380
|
}
|
|
311
381
|
}), this.dataChannel.addEventListener("error", (d) => {
|
|
312
382
|
console.error("[RFWebRTC] Data channel error:", d);
|
|
313
383
|
}), this.dataChannel.addEventListener("close", () => {
|
|
384
|
+
this.reassembler.clear();
|
|
314
385
|
}));
|
|
315
386
|
}
|
|
316
387
|
/**
|
|
@@ -359,7 +430,7 @@ class k {
|
|
|
359
430
|
* ```
|
|
360
431
|
*/
|
|
361
432
|
async cleanup() {
|
|
362
|
-
this.pipelineId && this.apiKey && await
|
|
433
|
+
this.reassembler.clear(), this.pipelineId && this.apiKey && await _.init({ apiKey: this.apiKey }).terminatePipeline({ pipelineId: this.pipelineId }), this.pc && this.pc.connectionState !== "closed" && this.pc.close(), v(this._localStream);
|
|
363
434
|
}
|
|
364
435
|
/**
|
|
365
436
|
* Reconfigure pipeline outputs at runtime
|
|
@@ -414,42 +485,47 @@ class k {
|
|
|
414
485
|
}
|
|
415
486
|
}
|
|
416
487
|
}
|
|
417
|
-
async function
|
|
418
|
-
source:
|
|
488
|
+
async function U({
|
|
489
|
+
source: a,
|
|
419
490
|
connector: e,
|
|
420
491
|
wrtcParams: t,
|
|
421
|
-
onData:
|
|
422
|
-
options:
|
|
492
|
+
onData: n,
|
|
493
|
+
options: r = {}
|
|
423
494
|
}) {
|
|
424
495
|
var f;
|
|
425
496
|
if (!e || typeof e.connectWrtc != "function")
|
|
426
497
|
throw new Error("connector must have a connectWrtc method");
|
|
427
|
-
const
|
|
498
|
+
const o = a, { pc: i, offer: s, remoteStreamPromise: d, dataChannel: c } = await x(
|
|
499
|
+
o,
|
|
500
|
+
t.iceServers
|
|
501
|
+
), l = await e.connectWrtc(
|
|
428
502
|
{ sdp: s.sdp, type: s.type },
|
|
429
503
|
t
|
|
430
|
-
),
|
|
431
|
-
if (!(
|
|
432
|
-
throw console.error("[RFWebRTC] Invalid answer from server:",
|
|
433
|
-
const
|
|
434
|
-
await
|
|
435
|
-
const
|
|
436
|
-
|
|
504
|
+
), p = { sdp: l.sdp, type: l.type };
|
|
505
|
+
if (!(p != null && p.sdp) || !(p != null && p.type))
|
|
506
|
+
throw console.error("[RFWebRTC] Invalid answer from server:", l), new Error("connector.connectWrtc must return answer with sdp and type");
|
|
507
|
+
const y = ((f = l == null ? void 0 : l.context) == null ? void 0 : f.pipeline_id) || null;
|
|
508
|
+
await i.setRemoteDescription(p), await new Promise((g, m) => {
|
|
509
|
+
const h = () => {
|
|
510
|
+
i.connectionState === "connected" ? (i.removeEventListener("connectionstatechange", h), g()) : i.connectionState === "failed" && (i.removeEventListener("connectionstatechange", h), m(new Error("WebRTC connection failed")));
|
|
437
511
|
};
|
|
438
|
-
|
|
439
|
-
|
|
512
|
+
i.addEventListener("connectionstatechange", h), h(), setTimeout(() => {
|
|
513
|
+
i.removeEventListener("connectionstatechange", h), m(new Error("WebRTC connection timeout after 30s"));
|
|
440
514
|
}, 3e4);
|
|
441
|
-
}),
|
|
442
|
-
const
|
|
443
|
-
return new
|
|
515
|
+
}), r.disableInputStreamDownscaling !== !1 && await N(i);
|
|
516
|
+
const k = e._apiKey || null;
|
|
517
|
+
return new R(i, o, d, y, k, c, n);
|
|
444
518
|
}
|
|
445
|
-
const
|
|
519
|
+
const $ = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
446
520
|
__proto__: null,
|
|
447
|
-
|
|
448
|
-
|
|
521
|
+
ChunkReassembler: b,
|
|
522
|
+
RFWebRTCConnection: R,
|
|
523
|
+
parseBinaryHeader: C,
|
|
524
|
+
useStream: U
|
|
449
525
|
}, Symbol.toStringTag, { value: "Module" }));
|
|
450
526
|
export {
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
527
|
+
_ as InferenceHTTPClient,
|
|
528
|
+
K as connectors,
|
|
529
|
+
j as streams,
|
|
530
|
+
$ as webrtc
|
|
455
531
|
};
|
package/dist/index.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
(function(
|
|
1
|
+
(function(l,s){typeof exports=="object"&&typeof module<"u"?s(exports):typeof define=="function"&&define.amd?define(["exports"],s):(l=typeof globalThis<"u"?globalThis:l||self,s(l.RoboflowClient={}))})(this,function(l){"use strict";var K=Object.defineProperty;var j=(l,s,y)=>s in l?K(l,s,{enumerable:!0,configurable:!0,writable:!0,value:y}):l[s]=y;var f=(l,s,y)=>j(l,typeof s!="symbol"?s+"":s,y);class s{constructor(e,t="https://serverless.roboflow.com"){f(this,"apiKey");f(this,"serverUrl");this.apiKey=e,this.serverUrl=t}static init({apiKey:e,serverUrl:t}){if(!e)throw new Error("apiKey is required");return new s(e,t)}async initializeWebrtcWorker({offer:e,workflowSpec:t,workspaceName:r,workflowId:n,config:a={}}){if(!e||!e.sdp||!e.type)throw new Error("offer with sdp and type is required");const i=!!t,c=!!(r&&n);if(!i&&!c)throw new Error("Either workflowSpec OR (workspaceName + workflowId) is required");if(i&&c)throw new Error("Provide either workflowSpec OR (workspaceName + workflowId), not both");const{imageInputName:p="image",streamOutputNames:d=[],dataOutputNames:u=["string"],threadPoolWorkers:h=4,workflowsParameters:_={},iceServers:S,processingTimeout:b,requestedPlan:v,requestedRegion:g}=a,k={type:"WorkflowConfiguration",image_input_name:p,workflows_parameters:_,workflows_thread_pool_workers:h,cancel_thread_pool_tasks_on_exit:!0,video_metadata_input_name:"video_metadata"};i?k.workflow_specification=t:(k.workspace_name=r,k.workflow_id=n);const w={workflow_configuration:k,api_key:this.apiKey,webrtc_realtime_processing:!0,webrtc_offer:{sdp:e.sdp,type:e.type},webrtc_config:S?{iceServers:S}:null,stream_output:d,data_output:u};b!==void 0&&(w.processing_timeout=b),v!==void 0&&(w.requested_plan=v),g!==void 0&&(w.requested_region=g),console.trace("payload",w);const m=await fetch(`${this.serverUrl}/initialise_webrtc_worker`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(w)});if(!m.ok){const q=await m.text().catch(()=>"");throw new Error(`initialise_webrtc_worker failed (${m.status}): ${q}`)}return await m.json()}async terminatePipeline({pipelineId:e}){if(!e)throw new Error("pipelineId is required");await fetch(`${this.serverUrl}/inference_pipelines/${e}/terminate?api_key=${this.apiKey}`,{method:"POST",headers:{"Content-Type":"application/json"}})}}const y={withApiKey(o,e={}){const{serverUrl:t}=e;return typeof window<"u"&&console.warn("[Security Warning] Using API key directly in browser will expose it. Use connectors.withProxyUrl() for production. See: https://docs.roboflow.com/api-reference/authentication#securing-your-api-key"),{connectWrtc:async(r,n)=>{const a=s.init({apiKey:o,serverUrl:t});return console.log("wrtcParams",n),await a.initializeWebrtcWorker({offer:r,workflowSpec:n.workflowSpec,workspaceName:n.workspaceName,workflowId:n.workflowId,config:{imageInputName:n.imageInputName,streamOutputNames:n.streamOutputNames,dataOutputNames:n.dataOutputNames,threadPoolWorkers:n.threadPoolWorkers,workflowsParameters:n.workflowsParameters,iceServers:n.iceServers,processingTimeout:n.processingTimeout,requestedPlan:n.requestedPlan,requestedRegion:n.requestedRegion}})},_apiKey:o,_serverUrl:t}},withProxyUrl(o,e={}){return{connectWrtc:async(t,r)=>{const n=await fetch(o,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({offer:t,wrtcParams:r})});if(!n.ok){const a=await n.text().catch(()=>"");throw new Error(`Proxy request failed (${n.status}): ${a}`)}return await n.json()}}}};async function O(o={video:!0}){try{console.log("[RFStreams] requesting with",o);const e=await navigator.mediaDevices.getUserMedia(o);return console.log("[RFStreams] got stream",e.getVideoTracks().map(t=>({id:t.id,label:t.label}))),e}catch(e){console.warn("[RFStreams] failed, falling back",e);const t=await navigator.mediaDevices.getUserMedia({video:!0,audio:!1});return console.log("[RFStreams] fallback stream",t.getVideoTracks().map(r=>({id:r.id,label:r.label}))),t}}function C(o){o&&(o.getTracks().forEach(e=>e.stop()),console.log("[RFStreams] Stream stopped"))}const W=Object.freeze(Object.defineProperty({__proto__:null,stopStream:C,useCamera:O},Symbol.toStringTag,{value:"Module"})),P=12;class T{constructor(){f(this,"pendingFrames",new Map)}processChunk(e,t,r,n){if(r===1)return n;this.pendingFrames.has(e)||this.pendingFrames.set(e,{chunks:new Map,totalChunks:r});const a=this.pendingFrames.get(e);if(a.chunks.set(t,n),a.chunks.size===r){const i=Array.from(a.chunks.values()).reduce((d,u)=>d+u.length,0),c=new Uint8Array(i);let p=0;for(let d=0;d<r;d++){const u=a.chunks.get(d);c.set(u,p),p+=u.length}return this.pendingFrames.delete(e),c}return null}clear(){this.pendingFrames.clear()}}function R(o){const e=new DataView(o),t=e.getUint32(0,!0),r=e.getUint32(4,!0),n=e.getUint32(8,!0),a=new Uint8Array(o,P);return{frameId:t,chunkIndex:r,totalChunks:n,payload:a}}async function F(o,e=6e3){if(o.iceGatheringState==="complete")return;let t=!1;const r=n=>{n.candidate&&n.candidate.type==="srflx"&&(t=!0)};o.addEventListener("icecandidate",r);try{await Promise.race([new Promise(n=>{const a=()=>{o.iceGatheringState==="complete"&&(o.removeEventListener("icegatheringstatechange",a),n())};o.addEventListener("icegatheringstatechange",a)}),new Promise((n,a)=>{setTimeout(()=>{t?n():(console.error("[ICE] timeout with NO srflx candidate! Connection may fail."),a(new Error("ICE gathering timeout without srflx candidate")))},e)})])}finally{o.removeEventListener("icecandidate",r)}}function I(o){return new Promise(e=>{o.addEventListener("track",t=>{t.streams&&t.streams[0]&&e(t.streams[0])})})}const D=[{urls:["stun:stun.l.google.com:19302"]}];async function N(o,e){const t=e??D,r=new RTCPeerConnection({iceServers:t});try{r.addTransceiver("video",{direction:"recvonly"})}catch(c){console.warn("[RFWebRTC] Could not add transceiver:",c)}o.getVideoTracks().forEach(c=>{try{c.contentHint="detail"}catch{}r.addTrack(c,o)});const n=I(r),a=r.createDataChannel("roboflow-control",{ordered:!0}),i=await r.createOffer();return await r.setLocalDescription(i),await F(r),{pc:r,offer:r.localDescription,remoteStreamPromise:n,dataChannel:a}}async function U(o){const e=o.getSenders().find(r=>r.track&&r.track.kind==="video");if(!e)return;const t=e.getParameters();t.encodings=t.encodings||[{}],t.encodings[0].scaleResolutionDownBy=1;try{await e.setParameters(t)}catch(r){console.warn("[RFWebRTC] Failed to set encoding parameters:",r)}}class E{constructor(e,t,r,n,a,i,c){f(this,"pc");f(this,"_localStream");f(this,"remoteStreamPromise");f(this,"pipelineId");f(this,"apiKey");f(this,"dataChannel");f(this,"reassembler");this.pc=e,this._localStream=t,this.remoteStreamPromise=r,this.pipelineId=n,this.apiKey=a,this.dataChannel=i,this.reassembler=new T,this.dataChannel.binaryType="arraybuffer",c&&(this.dataChannel.addEventListener("open",()=>{}),this.dataChannel.addEventListener("message",p=>{try{if(p.data instanceof ArrayBuffer){const{frameId:d,chunkIndex:u,totalChunks:h,payload:_}=R(p.data),S=this.reassembler.processChunk(d,u,h,_);if(S){const v=new TextDecoder("utf-8").decode(S),g=JSON.parse(v);c(g)}}else{const d=JSON.parse(p.data);c(d)}}catch(d){console.error("[RFWebRTC] Failed to parse data channel message:",d)}}),this.dataChannel.addEventListener("error",p=>{console.error("[RFWebRTC] Data channel error:",p)}),this.dataChannel.addEventListener("close",()=>{this.reassembler.clear()}))}async remoteStream(){return await this.remoteStreamPromise}localStream(){return this._localStream}async cleanup(){this.reassembler.clear(),this.pipelineId&&this.apiKey&&await s.init({apiKey:this.apiKey}).terminatePipeline({pipelineId:this.pipelineId}),this.pc&&this.pc.connectionState!=="closed"&&this.pc.close(),C(this._localStream)}reconfigureOutputs(e){const t={};e.streamOutput!==void 0&&(t.stream_output=e.streamOutput),e.dataOutput!==void 0&&(t.data_output=e.dataOutput),this.sendData(t)}sendData(e){if(this.dataChannel.readyState!=="open"){console.warn("[RFWebRTC] Data channel is not open. Current state:",this.dataChannel.readyState);return}try{const t=typeof e=="string"?e:JSON.stringify(e);this.dataChannel.send(t)}catch(t){console.error("[RFWebRTC] Failed to send data:",t)}}}async function L({source:o,connector:e,wrtcParams:t,onData:r,options:n={}}){var g;if(!e||typeof e.connectWrtc!="function")throw new Error("connector must have a connectWrtc method");const a=o,{pc:i,offer:c,remoteStreamPromise:p,dataChannel:d}=await N(a,t.iceServers),u=await e.connectWrtc({sdp:c.sdp,type:c.type},t),h={sdp:u.sdp,type:u.type};if(!(h!=null&&h.sdp)||!(h!=null&&h.type))throw console.error("[RFWebRTC] Invalid answer from server:",u),new Error("connector.connectWrtc must return answer with sdp and type");const _=((g=u==null?void 0:u.context)==null?void 0:g.pipeline_id)||null;await i.setRemoteDescription(h),await new Promise((k,w)=>{const m=()=>{i.connectionState==="connected"?(i.removeEventListener("connectionstatechange",m),k()):i.connectionState==="failed"&&(i.removeEventListener("connectionstatechange",m),w(new Error("WebRTC connection failed")))};i.addEventListener("connectionstatechange",m),m(),setTimeout(()=>{i.removeEventListener("connectionstatechange",m),w(new Error("WebRTC connection timeout after 30s"))},3e4)}),n.disableInputStreamDownscaling!==!1&&await U(i);const b=e._apiKey||null;return new E(i,a,p,_,b,d,r)}const x=Object.freeze(Object.defineProperty({__proto__:null,ChunkReassembler:T,RFWebRTCConnection:E,parseBinaryHeader:R,useStream:L},Symbol.toStringTag,{value:"Module"}));l.InferenceHTTPClient=s,l.connectors=y,l.streams=W,l.webrtc=x,Object.defineProperty(l,Symbol.toStringTag,{value:"Module"})});
|
package/dist/inference-api.d.ts
CHANGED
|
@@ -3,6 +3,40 @@ export interface WebRTCWorkerConfig {
|
|
|
3
3
|
streamOutputNames?: string[];
|
|
4
4
|
dataOutputNames?: string[];
|
|
5
5
|
threadPoolWorkers?: number;
|
|
6
|
+
/**
|
|
7
|
+
* Workflow parameters to pass to the workflow execution
|
|
8
|
+
*/
|
|
9
|
+
workflowsParameters?: Record<string, any>;
|
|
10
|
+
/**
|
|
11
|
+
* ICE servers for WebRTC connections (used for both client and server)
|
|
12
|
+
*/
|
|
13
|
+
iceServers?: RTCIceServerConfig[];
|
|
14
|
+
/**
|
|
15
|
+
* Processing timeout in seconds (serverless only)
|
|
16
|
+
* @default 600
|
|
17
|
+
*/
|
|
18
|
+
processingTimeout?: number;
|
|
19
|
+
/**
|
|
20
|
+
* Requested compute plan (serverless only)
|
|
21
|
+
* @example "webrtc-gpu-small"
|
|
22
|
+
*/
|
|
23
|
+
requestedPlan?: string;
|
|
24
|
+
/**
|
|
25
|
+
* Requested region for processing (serverless only)
|
|
26
|
+
* @example "us"
|
|
27
|
+
*/
|
|
28
|
+
requestedRegion?: string;
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* ICE server configuration for WebRTC connections
|
|
32
|
+
*
|
|
33
|
+
* Use this to configure custom STUN/TURN servers for users behind
|
|
34
|
+
* symmetric NAT or restrictive firewalls.
|
|
35
|
+
*/
|
|
36
|
+
export interface RTCIceServerConfig {
|
|
37
|
+
urls: string[];
|
|
38
|
+
username?: string;
|
|
39
|
+
credential?: string;
|
|
6
40
|
}
|
|
7
41
|
export interface WebRTCOffer {
|
|
8
42
|
sdp: string;
|
|
@@ -26,6 +60,42 @@ export interface WebRTCParams {
|
|
|
26
60
|
streamOutputNames?: string[];
|
|
27
61
|
dataOutputNames?: string[];
|
|
28
62
|
threadPoolWorkers?: number;
|
|
63
|
+
/**
|
|
64
|
+
* Workflow parameters to pass to the workflow execution
|
|
65
|
+
*/
|
|
66
|
+
workflowsParameters?: Record<string, any>;
|
|
67
|
+
/**
|
|
68
|
+
* ICE servers for WebRTC connections (used for both client and server)
|
|
69
|
+
*
|
|
70
|
+
* Use this to specify custom STUN/TURN servers for users behind
|
|
71
|
+
* symmetric NAT or restrictive firewalls. The same configuration is
|
|
72
|
+
* used for both the client-side RTCPeerConnection and sent to the
|
|
73
|
+
* server via webrtc_config.
|
|
74
|
+
*
|
|
75
|
+
* @example
|
|
76
|
+
* ```typescript
|
|
77
|
+
* iceServers: [
|
|
78
|
+
* { urls: ["stun:stun.l.google.com:19302"] },
|
|
79
|
+
* { urls: ["turn:turn.example.com:3478"], username: "user", credential: "pass" }
|
|
80
|
+
* ]
|
|
81
|
+
* ```
|
|
82
|
+
*/
|
|
83
|
+
iceServers?: RTCIceServerConfig[];
|
|
84
|
+
/**
|
|
85
|
+
* Processing timeout in seconds (serverless only)
|
|
86
|
+
* @default 600
|
|
87
|
+
*/
|
|
88
|
+
processingTimeout?: number;
|
|
89
|
+
/**
|
|
90
|
+
* Requested compute plan (serverless only)
|
|
91
|
+
* @example "webrtc-gpu-small"
|
|
92
|
+
*/
|
|
93
|
+
requestedPlan?: string;
|
|
94
|
+
/**
|
|
95
|
+
* Requested region for processing (serverless only)
|
|
96
|
+
* @example "us"
|
|
97
|
+
*/
|
|
98
|
+
requestedRegion?: string;
|
|
29
99
|
}
|
|
30
100
|
export interface Connector {
|
|
31
101
|
connectWrtc(offer: WebRTCOffer, wrtcParams: WebRTCParams): Promise<WebRTCWorkerResponse>;
|
|
@@ -138,7 +208,12 @@ export declare const connectors: {
|
|
|
138
208
|
* imageInputName: wrtcParams.imageInputName,
|
|
139
209
|
* streamOutputNames: wrtcParams.streamOutputNames,
|
|
140
210
|
* dataOutputNames: wrtcParams.dataOutputNames,
|
|
141
|
-
* threadPoolWorkers: wrtcParams.threadPoolWorkers
|
|
211
|
+
* threadPoolWorkers: wrtcParams.threadPoolWorkers,
|
|
212
|
+
* workflowsParameters: wrtcParams.workflowsParameters,
|
|
213
|
+
* iceServers: wrtcParams.iceServers,
|
|
214
|
+
* processingTimeout: wrtcParams.processingTimeout,
|
|
215
|
+
* requestedPlan: wrtcParams.requestedPlan,
|
|
216
|
+
* requestedRegion: wrtcParams.requestedRegion
|
|
142
217
|
* }
|
|
143
218
|
* });
|
|
144
219
|
* res.json(answer);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"inference-api.d.ts","sourceRoot":"","sources":["../src/inference-api.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,kBAAkB;IACjC,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;IAC7B,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;IAC3B,iBAAiB,CAAC,EAAE,MAAM,CAAC;
|
|
1
|
+
{"version":3,"file":"inference-api.d.ts","sourceRoot":"","sources":["../src/inference-api.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,kBAAkB;IACjC,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;IAC7B,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;IAC3B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B;;OAEG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC1C;;OAEG;IACH,UAAU,CAAC,EAAE,kBAAkB,EAAE,CAAC;IAClC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED;;;;;GAKG;AACH,MAAM,WAAW,kBAAkB;IACjC,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,WAAW;IAC1B,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;CACd;AAED,MAAM,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAE/C,MAAM,WAAW,oBAAoB;IACnC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE;QACR,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;QAC1B,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;KAC5B,CAAC;CACH;AAED,MAAM,WAAW,YAAY;IAC3B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;IAC7B,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;IAC3B,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B;;OAEG;IACH,mBAAmB,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC1C;;;;;;;;;;;;;;;OAeG;IACH,UAAU,CAAC,EAAE,kBAAkB,EAAE,CAAC;IAClC;;;OAGG;IACH,iBAAiB,CAAC,EAAE,MAAM,CAAC;IAC3B;;;OAGG;IACH,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB;;;OAGG;IACH,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B;AAED,MAAM,WAAW,SAAS;IACxB,WAAW,CAAC,KAAK,EAAE,WAAW,EAAE,UAAU,EAAE,YAAY,GAAG,OAAO,CAAC,oBAAoB,CAAC,CAAC;IACzF,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,qBAAa,mBAAmB;IAC9B,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,SAAS,CAAS;IAE1B;;;OAGG;IACH,OAAO;IAKP,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,mBAAmB;IAO/F;;;;;;;;;;;;;;;;;;;;;;;;OAwBG;IACG,sBAAsB,CAAC,EAC3B,KAAK,EACL,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAW,EACZ,EAAE;QACD,KAAK,EAAE,WAAW,CAAC;QACnB,YAAY,CAAC,EAAE,YAAY,CAAC;QAC5B,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,MAAM,CAAC,EAAE,kBAAkB,CAAC;KAC7B,GAAG,OAAO,CAAC,oBAAoB,CAAC;IAqF3B,iBAAiB,CAAC,EAAE,UAAU,EAAE,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC;CAa/E;AAED;;GAEG;AACH,eAAO,MAAM,UAAU;IACrB;;;;;;;;;;;;;;;;;OAiBG;uBACgB,MAAM,YAAW;QAAE,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAAQ,SAAS;IA2C3E;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA4CG;2BACoB,MAAM,YAAW,OAAO,MAAM,EAAE,GAAG,CAAC,GAAQ,SAAS;CAqB7E,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"webrtc-data-parsing.test.d.ts","sourceRoot":"","sources":["../src/webrtc-data-parsing.test.ts"],"names":[],"mappings":""}
|
package/dist/webrtc.d.ts
CHANGED
|
@@ -1,4 +1,27 @@
|
|
|
1
1
|
import { Connector, WebRTCParams } from "./inference-api";
|
|
2
|
+
/**
|
|
3
|
+
* Reassembles chunked binary messages from the datachannel
|
|
4
|
+
*/
|
|
5
|
+
export declare class ChunkReassembler {
|
|
6
|
+
private pendingFrames;
|
|
7
|
+
/**
|
|
8
|
+
* Process an incoming chunk and return the complete message if all chunks received
|
|
9
|
+
*/
|
|
10
|
+
processChunk(frameId: number, chunkIndex: number, totalChunks: number, payload: Uint8Array): Uint8Array | null;
|
|
11
|
+
/**
|
|
12
|
+
* Clear all pending frames (for cleanup)
|
|
13
|
+
*/
|
|
14
|
+
clear(): void;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Parse the binary header from a datachannel message
|
|
18
|
+
*/
|
|
19
|
+
export declare function parseBinaryHeader(buffer: ArrayBuffer): {
|
|
20
|
+
frameId: number;
|
|
21
|
+
chunkIndex: number;
|
|
22
|
+
totalChunks: number;
|
|
23
|
+
payload: Uint8Array;
|
|
24
|
+
};
|
|
2
25
|
export interface UseStreamOptions {
|
|
3
26
|
disableInputStreamDownscaling?: boolean;
|
|
4
27
|
}
|
|
@@ -21,6 +44,7 @@ export declare class RFWebRTCConnection {
|
|
|
21
44
|
private pipelineId;
|
|
22
45
|
private apiKey;
|
|
23
46
|
private dataChannel;
|
|
47
|
+
private reassembler;
|
|
24
48
|
/** @private */
|
|
25
49
|
constructor(pc: RTCPeerConnection, localStream: MediaStream, remoteStreamPromise: Promise<MediaStream>, pipelineId: string | null, apiKey: string | null, dataChannel: RTCDataChannel, onData?: (data: any) => void);
|
|
26
50
|
/**
|
package/dist/webrtc.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"webrtc.d.ts","sourceRoot":"","sources":["../src/webrtc.ts"],"names":[],"mappings":"AACA,OAAO,EAAuB,SAAS,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;
|
|
1
|
+
{"version":3,"file":"webrtc.d.ts","sourceRoot":"","sources":["../src/webrtc.ts"],"names":[],"mappings":"AACA,OAAO,EAAuB,SAAS,EAAE,YAAY,EAAsB,MAAM,iBAAiB,CAAC;AASnG;;GAEG;AACH,qBAAa,gBAAgB;IAC3B,OAAO,CAAC,aAAa,CAGN;IAEf;;OAEG;IACH,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,UAAU,GAAG,UAAU,GAAG,IAAI;IAqC9G;;OAEG;IACH,KAAK,IAAI,IAAI;CAGd;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,WAAW,GAAG;IAAE,OAAO,EAAE,MAAM,CAAC;IAAC,UAAU,EAAE,MAAM,CAAC;IAAC,WAAW,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,UAAU,CAAA;CAAE,CAQxI;AAED,MAAM,WAAW,gBAAgB;IAC/B,6BAA6B,CAAC,EAAE,OAAO,CAAC;CACzC;AAED,MAAM,WAAW,eAAe;IAC9B,MAAM,EAAE,WAAW,CAAC;IACpB,SAAS,EAAE,SAAS,CAAC;IACrB,UAAU,EAAE,YAAY,CAAC;IACzB,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAC7B,OAAO,CAAC,EAAE,gBAAgB,CAAC;CAC5B;AAmID;;;;GAIG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,EAAE,CAAoB;IAC9B,OAAO,CAAC,YAAY,CAAc;IAClC,OAAO,CAAC,mBAAmB,CAAuB;IAClD,OAAO,CAAC,UAAU,CAAgB;IAClC,OAAO,CAAC,MAAM,CAAgB;IAC9B,OAAO,CAAC,WAAW,CAAiB;IACpC,OAAO,CAAC,WAAW,CAAmB;IAEtC,eAAe;gBAEb,EAAE,EAAE,iBAAiB,EACrB,WAAW,EAAE,WAAW,EACxB,mBAAmB,EAAE,OAAO,CAAC,WAAW,CAAC,EACzC,UAAU,EAAE,MAAM,GAAG,IAAI,EACzB,MAAM,EAAE,MAAM,GAAG,IAAI,EACrB,WAAW,EAAE,cAAc,EAC3B,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI;IAsD9B;;;;;;;;;;;OAWG;IACG,YAAY,IAAI,OAAO,CAAC,WAAW,CAAC;IAI1C;;;;;;;;;;;OAWG;IACH,WAAW,IAAI,WAAW;IAI1B;;;;;;;;;;;;;;OAcG;IACG,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAmB9B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+BG;IACH,kBAAkB,CAAC,MAAM,EAAE;QAAE,YAAY,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;KAAE,GAAG,IAAI;IAclG;;;OAGG;IACH,OAAO,CAAC,QAAQ;CAajB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,wBAAsB,SAAS,CAAC,EAC9B,MAAM,EACN,SAAS,EACT,UAAU,EACV,MAAM,EACN,OAAY,EACb,EAAE,eAAe,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAqE/C"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@roboflow/inference-sdk",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.3",
|
|
4
4
|
"description": "Lightweight client for Roboflow's hosted inference API with WebRTC streaming support",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"roboflow",
|
|
@@ -27,7 +27,9 @@
|
|
|
27
27
|
"dev": "vite",
|
|
28
28
|
"build": "vite build",
|
|
29
29
|
"preview": "vite preview",
|
|
30
|
-
"clean": "rm -rf dist"
|
|
30
|
+
"clean": "rm -rf dist",
|
|
31
|
+
"test": "vitest run",
|
|
32
|
+
"test:watch": "vitest"
|
|
31
33
|
},
|
|
32
34
|
"author": "Roboflow",
|
|
33
35
|
"license": "ISC",
|
|
@@ -38,7 +40,7 @@
|
|
|
38
40
|
"devDependencies": {
|
|
39
41
|
"typescript": "^5.3.3",
|
|
40
42
|
"vite": "^5.3.3",
|
|
41
|
-
"vite-plugin-dts": "^3.7.2"
|
|
42
|
-
|
|
43
|
-
|
|
43
|
+
"vite-plugin-dts": "^3.7.2",
|
|
44
|
+
"vitest": "^4.0.14"
|
|
45
|
+
}
|
|
44
46
|
}
|