@roboflow/inference-sdk 0.1.0 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +14 -211
- package/dist/index.es.js +162 -113
- package/dist/index.js +1 -1
- package/dist/webrtc-data-parsing.test.d.ts +2 -0
- package/dist/webrtc-data-parsing.test.d.ts.map +1 -0
- package/dist/webrtc.d.ts +24 -0
- package/dist/webrtc.d.ts.map +1 -1
- package/package.json +7 -5
package/README.md
CHANGED
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
# @roboflow/inference-sdk
|
|
2
2
|
|
|
3
|
-
Lightweight client
|
|
4
|
-
|
|
5
|
-
This package provides WebRTC streaming capabilities and hosted inference API access without bundling TensorFlow or local inference models, making it ideal for production applications.
|
|
3
|
+
Lightweight client for Roboflow's hosted inference API with WebRTC streaming support for real-time computer vision in the browser.
|
|
6
4
|
|
|
7
5
|
## Installation
|
|
8
6
|
|
|
@@ -10,234 +8,39 @@ This package provides WebRTC streaming capabilities and hosted inference API acc
|
|
|
10
8
|
npm install @roboflow/inference-sdk
|
|
11
9
|
```
|
|
12
10
|
|
|
13
|
-
## Quick
|
|
14
|
-
|
|
15
|
-
### Basic WebRTC Streaming Example
|
|
11
|
+
## Quick Example
|
|
16
12
|
|
|
17
13
|
```typescript
|
|
18
|
-
import { useStream } from '@roboflow/inference-sdk
|
|
19
|
-
import { connectors } from '@roboflow/inference-sdk/api';
|
|
14
|
+
import { useStream, connectors } from '@roboflow/inference-sdk';
|
|
20
15
|
import { useCamera } from '@roboflow/inference-sdk/streams';
|
|
21
16
|
|
|
22
|
-
|
|
23
|
-
const connector = connectors.withApiKey("your-api-key");
|
|
24
|
-
|
|
25
|
-
// Get camera stream
|
|
26
|
-
const stream = await useCamera({
|
|
27
|
-
video: {
|
|
28
|
-
facingMode: { ideal: "environment" }
|
|
29
|
-
}
|
|
30
|
-
});
|
|
31
|
-
|
|
32
|
-
// Start WebRTC connection
|
|
17
|
+
const stream = await useCamera({ video: { facingMode: "environment" } });
|
|
33
18
|
const connection = await useStream({
|
|
34
19
|
source: stream,
|
|
35
|
-
connector,
|
|
36
|
-
wrtcParams: {
|
|
37
|
-
|
|
38
|
-
// Your workflow specification
|
|
39
|
-
version: "1.0",
|
|
40
|
-
inputs: [{ type: "InferenceImage", name: "image" }],
|
|
41
|
-
steps: [/* ... */],
|
|
42
|
-
outputs: [/* ... */]
|
|
43
|
-
},
|
|
44
|
-
imageInputName: "image",
|
|
45
|
-
streamOutputNames: ["output"],
|
|
46
|
-
dataOutputNames: ["predictions"]
|
|
47
|
-
},
|
|
48
|
-
onData: (data) => {
|
|
49
|
-
// Receive real-time inference results
|
|
50
|
-
console.log("Inference results:", data);
|
|
51
|
-
}
|
|
52
|
-
});
|
|
53
|
-
|
|
54
|
-
// Display processed video
|
|
55
|
-
const remoteStream = await connection.remoteStream();
|
|
56
|
-
videoElement.srcObject = remoteStream;
|
|
57
|
-
|
|
58
|
-
// Clean up when done
|
|
59
|
-
await connection.cleanup();
|
|
60
|
-
```
|
|
61
|
-
|
|
62
|
-
## Security Best Practices
|
|
63
|
-
|
|
64
|
-
### ⚠️ API Key Security
|
|
65
|
-
|
|
66
|
-
**NEVER expose your API key in frontend code for production applications.**
|
|
67
|
-
|
|
68
|
-
The `connectors.withApiKey()` method is convenient for demos and testing, but it exposes your API key in the browser. For production applications, always use a backend proxy:
|
|
69
|
-
|
|
70
|
-
### Using a Backend Proxy (Recommended)
|
|
71
|
-
|
|
72
|
-
**Frontend:**
|
|
73
|
-
```typescript
|
|
74
|
-
import { useStream } from '@roboflow/inference-sdk/webrtc';
|
|
75
|
-
import { connectors } from '@roboflow/inference-sdk/api';
|
|
76
|
-
|
|
77
|
-
// Use proxy endpoint instead of direct API key
|
|
78
|
-
const connector = connectors.withProxyUrl('/api/init-webrtc');
|
|
79
|
-
|
|
80
|
-
const connection = await useStream({
|
|
81
|
-
source: stream,
|
|
82
|
-
connector,
|
|
83
|
-
wrtcParams: {
|
|
84
|
-
workflowSpec: { /* ... */ },
|
|
85
|
-
imageInputName: "image",
|
|
86
|
-
streamOutputNames: ["output"]
|
|
87
|
-
}
|
|
88
|
-
});
|
|
89
|
-
```
|
|
90
|
-
|
|
91
|
-
**Backend (Express example):**
|
|
92
|
-
```typescript
|
|
93
|
-
import { InferenceHTTPClient } from '@roboflow/inference-sdk/api';
|
|
94
|
-
|
|
95
|
-
app.post('/api/init-webrtc', async (req, res) => {
|
|
96
|
-
const { offer, wrtcParams } = req.body;
|
|
97
|
-
|
|
98
|
-
// API key stays secure on the server
|
|
99
|
-
const client = InferenceHTTPClient.init({
|
|
100
|
-
apiKey: process.env.ROBOFLOW_API_KEY
|
|
101
|
-
});
|
|
102
|
-
|
|
103
|
-
const answer = await client.initializeWebrtcWorker({
|
|
104
|
-
offer,
|
|
105
|
-
workflowSpec: wrtcParams.workflowSpec,
|
|
106
|
-
workspaceName: wrtcParams.workspaceName,
|
|
107
|
-
workflowId: wrtcParams.workflowId,
|
|
108
|
-
config: {
|
|
109
|
-
imageInputName: wrtcParams.imageInputName,
|
|
110
|
-
streamOutputNames: wrtcParams.streamOutputNames,
|
|
111
|
-
dataOutputNames: wrtcParams.dataOutputNames,
|
|
112
|
-
threadPoolWorkers: wrtcParams.threadPoolWorkers
|
|
113
|
-
}
|
|
114
|
-
});
|
|
115
|
-
|
|
116
|
-
res.json(answer);
|
|
117
|
-
});
|
|
118
|
-
```
|
|
119
|
-
|
|
120
|
-
## API Reference
|
|
121
|
-
|
|
122
|
-
### WebRTC Functions
|
|
123
|
-
|
|
124
|
-
#### `useStream(params)`
|
|
125
|
-
|
|
126
|
-
Establishes a WebRTC connection for real-time video inference.
|
|
127
|
-
|
|
128
|
-
**Parameters:**
|
|
129
|
-
- `source: MediaStream` - Input video stream (from camera or other source)
|
|
130
|
-
- `connector: Connector` - Connection method (withApiKey or withProxyUrl)
|
|
131
|
-
- `wrtcParams: WebRTCParams` - Workflow configuration
|
|
132
|
-
- `workflowSpec?: WorkflowSpec` - Workflow specification object
|
|
133
|
-
- `workspaceName?: string` - Workspace name (alternative to workflowSpec)
|
|
134
|
-
- `workflowId?: string` - Workflow ID (alternative to workflowSpec)
|
|
135
|
-
- `imageInputName?: string` - Input image name (default: "image")
|
|
136
|
-
- `streamOutputNames?: string[]` - Output stream names
|
|
137
|
-
- `dataOutputNames?: string[]` - Output data names
|
|
138
|
-
- `threadPoolWorkers?: number` - Thread pool workers (default: 4)
|
|
139
|
-
- `onData?: (data: any) => void` - Callback for data output
|
|
140
|
-
- `options?: UseStreamOptions` - Additional options
|
|
141
|
-
|
|
142
|
-
**Returns:** `Promise<RFWebRTCConnection>`
|
|
143
|
-
|
|
144
|
-
### Connection Methods
|
|
145
|
-
|
|
146
|
-
#### `connection.remoteStream()`
|
|
147
|
-
|
|
148
|
-
Get the processed video stream from Roboflow.
|
|
149
|
-
|
|
150
|
-
**Returns:** `Promise<MediaStream>`
|
|
151
|
-
|
|
152
|
-
#### `connection.localStream()`
|
|
153
|
-
|
|
154
|
-
Get the local input video stream.
|
|
155
|
-
|
|
156
|
-
**Returns:** `MediaStream`
|
|
157
|
-
|
|
158
|
-
#### `connection.cleanup()`
|
|
159
|
-
|
|
160
|
-
Close the connection and clean up resources.
|
|
161
|
-
|
|
162
|
-
**Returns:** `Promise<void>`
|
|
163
|
-
|
|
164
|
-
#### `connection.reconfigureOutputs(config)`
|
|
165
|
-
|
|
166
|
-
Dynamically change stream and data outputs at runtime without restarting the connection.
|
|
167
|
-
|
|
168
|
-
**Parameters:**
|
|
169
|
-
- `config.streamOutput?: string[] | null` - Stream output names
|
|
170
|
-
- `undefined` or not provided: Unchanged
|
|
171
|
-
- `[]`: Auto-detect first valid image output
|
|
172
|
-
- `["output_name"]`: Use specified output
|
|
173
|
-
- `null`: Unchanged
|
|
174
|
-
- `config.dataOutput?: string[] | null` - Data output names
|
|
175
|
-
- `undefined` or not provided: Unchanged
|
|
176
|
-
- `[]`: Disable all data outputs
|
|
177
|
-
- `["output_name"]`: Use specified outputs
|
|
178
|
-
- `null`: Enable all data outputs
|
|
179
|
-
|
|
180
|
-
**Examples:**
|
|
181
|
-
```typescript
|
|
182
|
-
// Change to different stream output
|
|
183
|
-
connection.reconfigureOutputs({
|
|
184
|
-
streamOutput: ["annotated_image"]
|
|
185
|
-
});
|
|
186
|
-
|
|
187
|
-
// Enable all data outputs
|
|
188
|
-
connection.reconfigureOutputs({
|
|
189
|
-
dataOutput: null
|
|
190
|
-
});
|
|
191
|
-
|
|
192
|
-
// Disable all data outputs
|
|
193
|
-
connection.reconfigureOutputs({
|
|
194
|
-
dataOutput: []
|
|
20
|
+
connector: connectors.withProxyUrl('/api/init-webrtc'), // Use backend proxy
|
|
21
|
+
wrtcParams: { workflowSpec: { /* ... */ } },
|
|
22
|
+
onData: (data) => console.log("Inference results:", data)
|
|
195
23
|
});
|
|
196
24
|
|
|
197
|
-
|
|
198
|
-
connection.reconfigureOutputs({
|
|
199
|
-
streamOutput: ["visualization"],
|
|
200
|
-
dataOutput: ["predictions", "metadata"]
|
|
201
|
-
});
|
|
25
|
+
const videoElement.srcObject = await connection.remoteStream();
|
|
202
26
|
```
|
|
203
27
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
#### `useCamera(constraints)`
|
|
207
|
-
|
|
208
|
-
Access device camera with specified constraints.
|
|
209
|
-
|
|
210
|
-
**Parameters:**
|
|
211
|
-
- `constraints: MediaStreamConstraints` - Media constraints
|
|
212
|
-
|
|
213
|
-
**Returns:** `Promise<MediaStream>`
|
|
214
|
-
|
|
215
|
-
#### `stopStream(stream)`
|
|
216
|
-
|
|
217
|
-
Stop a media stream and release camera.
|
|
28
|
+
See the [sample app](https://github.com/roboflow/inferenceSampleApp) for a complete working example.
|
|
218
29
|
|
|
219
|
-
|
|
220
|
-
- `stream: MediaStream` - Stream to stop
|
|
30
|
+
## Security Warning
|
|
221
31
|
|
|
222
|
-
|
|
32
|
+
**Never expose your API key in frontend code.** Always use a backend proxy for production applications. The sample app demonstrates the recommended proxy pattern.
|
|
223
33
|
|
|
224
|
-
|
|
225
|
-
- Building production web applications
|
|
226
|
-
- You need WebRTC streaming inference
|
|
227
|
-
- You want a smaller bundle size
|
|
228
|
-
- You're deploying to browsers
|
|
34
|
+
## Get Started
|
|
229
35
|
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
- You want to run models offline in the browser
|
|
233
|
-
- You need both local and hosted inference options
|
|
36
|
+
For a complete working example with backend proxy setup, see:
|
|
37
|
+
**[github.com/roboflow/inferenceSampleApp](https://github.com/roboflow/inferenceSampleApp)**
|
|
234
38
|
|
|
235
39
|
## Resources
|
|
236
40
|
|
|
237
41
|
- [Roboflow Documentation](https://docs.roboflow.com/)
|
|
238
42
|
- [API Authentication Guide](https://docs.roboflow.com/api-reference/authentication)
|
|
239
43
|
- [Workflows Documentation](https://docs.roboflow.com/workflows)
|
|
240
|
-
- [GitHub Repository](https://github.com/roboflow/inferencejs)
|
|
241
44
|
|
|
242
45
|
## License
|
|
243
46
|
|
package/dist/index.es.js
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
|
-
var
|
|
2
|
-
var
|
|
3
|
-
var
|
|
4
|
-
class
|
|
1
|
+
var R = Object.defineProperty;
|
|
2
|
+
var T = (n, e, t) => e in n ? R(n, e, { enumerable: !0, configurable: !0, writable: !0, value: t }) : n[e] = t;
|
|
3
|
+
var u = (n, e, t) => T(n, typeof e != "symbol" ? e + "" : e, t);
|
|
4
|
+
class g {
|
|
5
5
|
/**
|
|
6
6
|
* @private
|
|
7
7
|
* Use InferenceHTTPClient.init() instead
|
|
8
8
|
*/
|
|
9
9
|
constructor(e, t = "https://serverless.roboflow.com") {
|
|
10
|
-
|
|
11
|
-
|
|
10
|
+
u(this, "apiKey");
|
|
11
|
+
u(this, "serverUrl");
|
|
12
12
|
this.apiKey = e, this.serverUrl = t;
|
|
13
13
|
}
|
|
14
14
|
static init({ apiKey: e, serverUrl: t }) {
|
|
15
15
|
if (!e)
|
|
16
16
|
throw new Error("apiKey is required");
|
|
17
|
-
return new
|
|
17
|
+
return new g(e, t);
|
|
18
18
|
}
|
|
19
19
|
/**
|
|
20
20
|
* Initialize a WebRTC worker pipeline
|
|
@@ -45,30 +45,30 @@ class y {
|
|
|
45
45
|
offer: e,
|
|
46
46
|
workflowSpec: t,
|
|
47
47
|
workspaceName: a,
|
|
48
|
-
workflowId:
|
|
49
|
-
config:
|
|
48
|
+
workflowId: r,
|
|
49
|
+
config: o = {}
|
|
50
50
|
}) {
|
|
51
51
|
if (!e || !e.sdp || !e.type)
|
|
52
52
|
throw new Error("offer with sdp and type is required");
|
|
53
|
-
const
|
|
54
|
-
if (!
|
|
53
|
+
const i = !!t, l = !!(a && r);
|
|
54
|
+
if (!i && !l)
|
|
55
55
|
throw new Error("Either workflowSpec OR (workspaceName + workflowId) is required");
|
|
56
|
-
if (
|
|
56
|
+
if (i && l)
|
|
57
57
|
throw new Error("Provide either workflowSpec OR (workspaceName + workflowId), not both");
|
|
58
58
|
const {
|
|
59
59
|
imageInputName: d = "image",
|
|
60
|
-
streamOutputNames:
|
|
61
|
-
dataOutputNames:
|
|
62
|
-
threadPoolWorkers:
|
|
63
|
-
} =
|
|
60
|
+
streamOutputNames: s = [],
|
|
61
|
+
dataOutputNames: c = ["string"],
|
|
62
|
+
threadPoolWorkers: p = 4
|
|
63
|
+
} = o, h = {
|
|
64
64
|
type: "WorkflowConfiguration",
|
|
65
65
|
image_input_name: d,
|
|
66
|
-
workflows_thread_pool_workers:
|
|
66
|
+
workflows_thread_pool_workers: p,
|
|
67
67
|
cancel_thread_pool_tasks_on_exit: !0,
|
|
68
68
|
video_metadata_input_name: "video_metadata"
|
|
69
69
|
};
|
|
70
|
-
|
|
71
|
-
const
|
|
70
|
+
i ? h.workflow_specification = t : (h.workspace_name = a, h.workflow_id = r);
|
|
71
|
+
const f = {
|
|
72
72
|
workflow_configuration: h,
|
|
73
73
|
api_key: this.apiKey,
|
|
74
74
|
webrtc_realtime_processing: !0,
|
|
@@ -77,16 +77,16 @@ class y {
|
|
|
77
77
|
type: e.type
|
|
78
78
|
},
|
|
79
79
|
webrtc_turn_config: null,
|
|
80
|
-
stream_output:
|
|
81
|
-
data_output:
|
|
80
|
+
stream_output: s,
|
|
81
|
+
data_output: c
|
|
82
82
|
}, m = await fetch(`${this.serverUrl}/initialise_webrtc_worker`, {
|
|
83
83
|
method: "POST",
|
|
84
84
|
headers: { "Content-Type": "application/json" },
|
|
85
|
-
body: JSON.stringify(
|
|
85
|
+
body: JSON.stringify(f)
|
|
86
86
|
});
|
|
87
87
|
if (!m.ok) {
|
|
88
|
-
const
|
|
89
|
-
throw new Error(`initialise_webrtc_worker failed (${m.status}): ${
|
|
88
|
+
const w = await m.text().catch(() => "");
|
|
89
|
+
throw new Error(`initialise_webrtc_worker failed (${m.status}): ${w}`);
|
|
90
90
|
}
|
|
91
91
|
return await m.json();
|
|
92
92
|
}
|
|
@@ -102,7 +102,7 @@ class y {
|
|
|
102
102
|
);
|
|
103
103
|
}
|
|
104
104
|
}
|
|
105
|
-
const
|
|
105
|
+
const U = {
|
|
106
106
|
/**
|
|
107
107
|
* Create a connector that uses API key directly
|
|
108
108
|
*
|
|
@@ -121,25 +121,25 @@ const N = {
|
|
|
121
121
|
* const answer = await connector.connectWrtc(offer, wrtcParams);
|
|
122
122
|
* ```
|
|
123
123
|
*/
|
|
124
|
-
withApiKey(
|
|
124
|
+
withApiKey(n, e = {}) {
|
|
125
125
|
const { serverUrl: t } = e;
|
|
126
126
|
return typeof window < "u" && console.warn(
|
|
127
127
|
"[Security Warning] Using API key directly in browser will expose it. Use connectors.withProxyUrl() for production. See: https://docs.roboflow.com/api-reference/authentication#securing-your-api-key"
|
|
128
128
|
), {
|
|
129
|
-
connectWrtc: async (a,
|
|
129
|
+
connectWrtc: async (a, r) => await g.init({ apiKey: n, serverUrl: t }).initializeWebrtcWorker({
|
|
130
130
|
offer: a,
|
|
131
|
-
workflowSpec:
|
|
132
|
-
workspaceName:
|
|
133
|
-
workflowId:
|
|
131
|
+
workflowSpec: r.workflowSpec,
|
|
132
|
+
workspaceName: r.workspaceName,
|
|
133
|
+
workflowId: r.workflowId,
|
|
134
134
|
config: {
|
|
135
|
-
imageInputName:
|
|
136
|
-
streamOutputNames:
|
|
137
|
-
dataOutputNames:
|
|
138
|
-
threadPoolWorkers:
|
|
135
|
+
imageInputName: r.imageInputName,
|
|
136
|
+
streamOutputNames: r.streamOutputNames,
|
|
137
|
+
dataOutputNames: r.dataOutputNames,
|
|
138
|
+
threadPoolWorkers: r.threadPoolWorkers
|
|
139
139
|
}
|
|
140
140
|
}),
|
|
141
141
|
// Store apiKey for cleanup
|
|
142
|
-
_apiKey:
|
|
142
|
+
_apiKey: n,
|
|
143
143
|
_serverUrl: t
|
|
144
144
|
};
|
|
145
145
|
},
|
|
@@ -183,10 +183,10 @@ const N = {
|
|
|
183
183
|
* });
|
|
184
184
|
* ```
|
|
185
185
|
*/
|
|
186
|
-
withProxyUrl(
|
|
186
|
+
withProxyUrl(n, e = {}) {
|
|
187
187
|
return {
|
|
188
188
|
connectWrtc: async (t, a) => {
|
|
189
|
-
const
|
|
189
|
+
const r = await fetch(n, {
|
|
190
190
|
method: "POST",
|
|
191
191
|
headers: { "Content-Type": "application/json" },
|
|
192
192
|
body: JSON.stringify({
|
|
@@ -194,19 +194,19 @@ const N = {
|
|
|
194
194
|
wrtcParams: a
|
|
195
195
|
})
|
|
196
196
|
});
|
|
197
|
-
if (!
|
|
198
|
-
const
|
|
199
|
-
throw new Error(`Proxy request failed (${
|
|
197
|
+
if (!r.ok) {
|
|
198
|
+
const o = await r.text().catch(() => "");
|
|
199
|
+
throw new Error(`Proxy request failed (${r.status}): ${o}`);
|
|
200
200
|
}
|
|
201
|
-
return await
|
|
201
|
+
return await r.json();
|
|
202
202
|
}
|
|
203
203
|
};
|
|
204
204
|
}
|
|
205
205
|
};
|
|
206
|
-
async function
|
|
206
|
+
async function O(n = { video: !0 }) {
|
|
207
207
|
try {
|
|
208
|
-
console.log("[RFStreams] requesting with",
|
|
209
|
-
const e = await navigator.mediaDevices.getUserMedia(
|
|
208
|
+
console.log("[RFStreams] requesting with", n);
|
|
209
|
+
const e = await navigator.mediaDevices.getUserMedia(n);
|
|
210
210
|
return console.log("[RFStreams] got stream", e.getVideoTracks().map((t) => ({ id: t.id, label: t.label }))), e;
|
|
211
211
|
} catch (e) {
|
|
212
212
|
console.warn("[RFStreams] failed, falling back", e);
|
|
@@ -214,74 +214,111 @@ async function R(r = { video: !0 }) {
|
|
|
214
214
|
return console.log("[RFStreams] fallback stream", t.getVideoTracks().map((a) => ({ id: a.id, label: a.label }))), t;
|
|
215
215
|
}
|
|
216
216
|
}
|
|
217
|
-
function _(
|
|
218
|
-
|
|
217
|
+
function _(n) {
|
|
218
|
+
n && (n.getTracks().forEach((e) => e.stop()), console.log("[RFStreams] Stream stopped"));
|
|
219
219
|
}
|
|
220
|
-
const
|
|
220
|
+
const L = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
221
221
|
__proto__: null,
|
|
222
222
|
stopStream: _,
|
|
223
|
-
useCamera:
|
|
224
|
-
}, Symbol.toStringTag, { value: "Module" }));
|
|
225
|
-
|
|
226
|
-
|
|
223
|
+
useCamera: O
|
|
224
|
+
}, Symbol.toStringTag, { value: "Module" })), W = 12;
|
|
225
|
+
class b {
|
|
226
|
+
constructor() {
|
|
227
|
+
u(this, "pendingFrames", /* @__PURE__ */ new Map());
|
|
228
|
+
}
|
|
229
|
+
/**
|
|
230
|
+
* Process an incoming chunk and return the complete message if all chunks received
|
|
231
|
+
*/
|
|
232
|
+
processChunk(e, t, a, r) {
|
|
233
|
+
if (a === 1)
|
|
234
|
+
return r;
|
|
235
|
+
this.pendingFrames.has(e) || this.pendingFrames.set(e, {
|
|
236
|
+
chunks: /* @__PURE__ */ new Map(),
|
|
237
|
+
totalChunks: a
|
|
238
|
+
});
|
|
239
|
+
const o = this.pendingFrames.get(e);
|
|
240
|
+
if (o.chunks.set(t, r), o.chunks.size === a) {
|
|
241
|
+
const i = Array.from(o.chunks.values()).reduce((s, c) => s + c.length, 0), l = new Uint8Array(i);
|
|
242
|
+
let d = 0;
|
|
243
|
+
for (let s = 0; s < a; s++) {
|
|
244
|
+
const c = o.chunks.get(s);
|
|
245
|
+
l.set(c, d), d += c.length;
|
|
246
|
+
}
|
|
247
|
+
return this.pendingFrames.delete(e), l;
|
|
248
|
+
}
|
|
249
|
+
return null;
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Clear all pending frames (for cleanup)
|
|
253
|
+
*/
|
|
254
|
+
clear() {
|
|
255
|
+
this.pendingFrames.clear();
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
function v(n) {
|
|
259
|
+
const e = new DataView(n), t = e.getUint32(0, !0), a = e.getUint32(4, !0), r = e.getUint32(8, !0), o = new Uint8Array(n, W);
|
|
260
|
+
return { frameId: t, chunkIndex: a, totalChunks: r, payload: o };
|
|
261
|
+
}
|
|
262
|
+
async function P(n, e = 6e3) {
|
|
263
|
+
if (n.iceGatheringState === "complete") return;
|
|
227
264
|
let t = !1;
|
|
228
|
-
const a = (
|
|
229
|
-
|
|
265
|
+
const a = (r) => {
|
|
266
|
+
r.candidate && r.candidate.type === "srflx" && (t = !0);
|
|
230
267
|
};
|
|
231
|
-
|
|
268
|
+
n.addEventListener("icecandidate", a);
|
|
232
269
|
try {
|
|
233
270
|
await Promise.race([
|
|
234
|
-
new Promise((
|
|
235
|
-
const
|
|
236
|
-
|
|
271
|
+
new Promise((r) => {
|
|
272
|
+
const o = () => {
|
|
273
|
+
n.iceGatheringState === "complete" && (n.removeEventListener("icegatheringstatechange", o), r());
|
|
237
274
|
};
|
|
238
|
-
|
|
275
|
+
n.addEventListener("icegatheringstatechange", o);
|
|
239
276
|
}),
|
|
240
|
-
new Promise((
|
|
277
|
+
new Promise((r, o) => {
|
|
241
278
|
setTimeout(() => {
|
|
242
|
-
t ?
|
|
279
|
+
t ? r() : (console.error("[ICE] timeout with NO srflx candidate! Connection may fail."), o(new Error("ICE gathering timeout without srflx candidate")));
|
|
243
280
|
}, e);
|
|
244
281
|
})
|
|
245
282
|
]);
|
|
246
283
|
} finally {
|
|
247
|
-
|
|
284
|
+
n.removeEventListener("icecandidate", a);
|
|
248
285
|
}
|
|
249
286
|
}
|
|
250
|
-
function
|
|
287
|
+
function F(n) {
|
|
251
288
|
return new Promise((e) => {
|
|
252
|
-
|
|
289
|
+
n.addEventListener("track", (t) => {
|
|
253
290
|
t.streams && t.streams[0] && e(t.streams[0]);
|
|
254
291
|
});
|
|
255
292
|
});
|
|
256
293
|
}
|
|
257
|
-
async function
|
|
294
|
+
async function I(n) {
|
|
258
295
|
const e = "stun:stun.l.google.com:19302", t = new RTCPeerConnection({
|
|
259
296
|
iceServers: [{ urls: [e] }]
|
|
260
297
|
});
|
|
261
298
|
try {
|
|
262
299
|
t.addTransceiver("video", { direction: "recvonly" });
|
|
263
|
-
} catch (
|
|
264
|
-
console.warn("[RFWebRTC] Could not add transceiver:",
|
|
300
|
+
} catch (i) {
|
|
301
|
+
console.warn("[RFWebRTC] Could not add transceiver:", i);
|
|
265
302
|
}
|
|
266
|
-
|
|
303
|
+
n.getVideoTracks().forEach((i) => {
|
|
267
304
|
try {
|
|
268
|
-
|
|
305
|
+
i.contentHint = "detail";
|
|
269
306
|
} catch {
|
|
270
307
|
}
|
|
271
|
-
t.addTrack(
|
|
308
|
+
t.addTrack(i, n);
|
|
272
309
|
});
|
|
273
|
-
const a =
|
|
310
|
+
const a = F(t), r = t.createDataChannel("roboflow-control", {
|
|
274
311
|
ordered: !0
|
|
275
|
-
}),
|
|
276
|
-
return await t.setLocalDescription(
|
|
312
|
+
}), o = await t.createOffer();
|
|
313
|
+
return await t.setLocalDescription(o), await P(t), {
|
|
277
314
|
pc: t,
|
|
278
315
|
offer: t.localDescription,
|
|
279
316
|
remoteStreamPromise: a,
|
|
280
|
-
dataChannel:
|
|
317
|
+
dataChannel: r
|
|
281
318
|
};
|
|
282
319
|
}
|
|
283
|
-
async function
|
|
284
|
-
const e =
|
|
320
|
+
async function x(n) {
|
|
321
|
+
const e = n.getSenders().find((a) => a.track && a.track.kind === "video");
|
|
285
322
|
if (!e) return;
|
|
286
323
|
const t = e.getParameters();
|
|
287
324
|
t.encodings = t.encodings || [{}], t.encodings[0].scaleResolutionDownBy = 1;
|
|
@@ -291,26 +328,36 @@ async function P(r) {
|
|
|
291
328
|
console.warn("[RFWebRTC] Failed to set encoding parameters:", a);
|
|
292
329
|
}
|
|
293
330
|
}
|
|
294
|
-
class
|
|
331
|
+
class C {
|
|
295
332
|
/** @private */
|
|
296
|
-
constructor(e, t, a,
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
this
|
|
333
|
+
constructor(e, t, a, r, o, i, l) {
|
|
334
|
+
u(this, "pc");
|
|
335
|
+
u(this, "_localStream");
|
|
336
|
+
u(this, "remoteStreamPromise");
|
|
337
|
+
u(this, "pipelineId");
|
|
338
|
+
u(this, "apiKey");
|
|
339
|
+
u(this, "dataChannel");
|
|
340
|
+
u(this, "reassembler");
|
|
341
|
+
this.pc = e, this._localStream = t, this.remoteStreamPromise = a, this.pipelineId = r, this.apiKey = o, this.dataChannel = i, this.reassembler = new b(), this.dataChannel.binaryType = "arraybuffer", l && (this.dataChannel.addEventListener("open", () => {
|
|
304
342
|
}), this.dataChannel.addEventListener("message", (d) => {
|
|
305
343
|
try {
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
344
|
+
if (d.data instanceof ArrayBuffer) {
|
|
345
|
+
const { frameId: s, chunkIndex: c, totalChunks: p, payload: h } = v(d.data), f = this.reassembler.processChunk(s, c, p, h);
|
|
346
|
+
if (f) {
|
|
347
|
+
const k = new TextDecoder("utf-8").decode(f), w = JSON.parse(k);
|
|
348
|
+
l(w);
|
|
349
|
+
}
|
|
350
|
+
} else {
|
|
351
|
+
const s = JSON.parse(d.data);
|
|
352
|
+
l(s);
|
|
353
|
+
}
|
|
354
|
+
} catch (s) {
|
|
355
|
+
console.error("[RFWebRTC] Failed to parse data channel message:", s);
|
|
310
356
|
}
|
|
311
357
|
}), this.dataChannel.addEventListener("error", (d) => {
|
|
312
358
|
console.error("[RFWebRTC] Data channel error:", d);
|
|
313
359
|
}), this.dataChannel.addEventListener("close", () => {
|
|
360
|
+
this.reassembler.clear();
|
|
314
361
|
}));
|
|
315
362
|
}
|
|
316
363
|
/**
|
|
@@ -359,7 +406,7 @@ class k {
|
|
|
359
406
|
* ```
|
|
360
407
|
*/
|
|
361
408
|
async cleanup() {
|
|
362
|
-
this.pipelineId && this.apiKey && await
|
|
409
|
+
this.reassembler.clear(), this.pipelineId && this.apiKey && await g.init({ apiKey: this.apiKey }).terminatePipeline({ pipelineId: this.pipelineId }), this.pc && this.pc.connectionState !== "closed" && this.pc.close(), _(this._localStream);
|
|
363
410
|
}
|
|
364
411
|
/**
|
|
365
412
|
* Reconfigure pipeline outputs at runtime
|
|
@@ -414,42 +461,44 @@ class k {
|
|
|
414
461
|
}
|
|
415
462
|
}
|
|
416
463
|
}
|
|
417
|
-
async function
|
|
418
|
-
source:
|
|
464
|
+
async function D({
|
|
465
|
+
source: n,
|
|
419
466
|
connector: e,
|
|
420
467
|
wrtcParams: t,
|
|
421
468
|
onData: a,
|
|
422
|
-
options:
|
|
469
|
+
options: r = {}
|
|
423
470
|
}) {
|
|
424
|
-
var
|
|
471
|
+
var w;
|
|
425
472
|
if (!e || typeof e.connectWrtc != "function")
|
|
426
473
|
throw new Error("connector must have a connectWrtc method");
|
|
427
|
-
const
|
|
428
|
-
{ sdp:
|
|
474
|
+
const o = n, { pc: i, offer: l, remoteStreamPromise: d, dataChannel: s } = await I(o), c = await e.connectWrtc(
|
|
475
|
+
{ sdp: l.sdp, type: l.type },
|
|
429
476
|
t
|
|
430
|
-
),
|
|
431
|
-
if (!(
|
|
432
|
-
throw console.error("[RFWebRTC] Invalid answer from server:",
|
|
433
|
-
const h = ((
|
|
434
|
-
await
|
|
435
|
-
const
|
|
436
|
-
|
|
477
|
+
), p = { sdp: c.sdp, type: c.type };
|
|
478
|
+
if (!(p != null && p.sdp) || !(p != null && p.type))
|
|
479
|
+
throw console.error("[RFWebRTC] Invalid answer from server:", c), new Error("connector.connectWrtc must return answer with sdp and type");
|
|
480
|
+
const h = ((w = c == null ? void 0 : c.context) == null ? void 0 : w.pipeline_id) || null;
|
|
481
|
+
await i.setRemoteDescription(p), await new Promise((E, S) => {
|
|
482
|
+
const y = () => {
|
|
483
|
+
i.connectionState === "connected" ? (i.removeEventListener("connectionstatechange", y), E()) : i.connectionState === "failed" && (i.removeEventListener("connectionstatechange", y), S(new Error("WebRTC connection failed")));
|
|
437
484
|
};
|
|
438
|
-
|
|
439
|
-
|
|
485
|
+
i.addEventListener("connectionstatechange", y), y(), setTimeout(() => {
|
|
486
|
+
i.removeEventListener("connectionstatechange", y), S(new Error("WebRTC connection timeout after 30s"));
|
|
440
487
|
}, 3e4);
|
|
441
|
-
}),
|
|
488
|
+
}), r.disableInputStreamDownscaling !== !1 && await x(i);
|
|
442
489
|
const m = e._apiKey || null;
|
|
443
|
-
return new
|
|
490
|
+
return new C(i, o, d, h, m, s, a);
|
|
444
491
|
}
|
|
445
|
-
const
|
|
492
|
+
const K = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
446
493
|
__proto__: null,
|
|
447
|
-
|
|
448
|
-
|
|
494
|
+
ChunkReassembler: b,
|
|
495
|
+
RFWebRTCConnection: C,
|
|
496
|
+
parseBinaryHeader: v,
|
|
497
|
+
useStream: D
|
|
449
498
|
}, Symbol.toStringTag, { value: "Module" }));
|
|
450
499
|
export {
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
500
|
+
g as InferenceHTTPClient,
|
|
501
|
+
U as connectors,
|
|
502
|
+
L as streams,
|
|
503
|
+
K as webrtc
|
|
455
504
|
};
|
package/dist/index.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
(function(c,
|
|
1
|
+
(function(c,s){typeof exports=="object"&&typeof module<"u"?s(exports):typeof define=="function"&&define.amd?define(["exports"],s):(c=typeof globalThis<"u"?globalThis:c||self,s(c.RoboflowClient={}))})(this,function(c){"use strict";var L=Object.defineProperty;var K=(c,s,y)=>s in c?L(c,s,{enumerable:!0,configurable:!0,writable:!0,value:y}):c[s]=y;var m=(c,s,y)=>K(c,typeof s!="symbol"?s+"":s,y);class s{constructor(e,t="https://serverless.roboflow.com"){m(this,"apiKey");m(this,"serverUrl");this.apiKey=e,this.serverUrl=t}static init({apiKey:e,serverUrl:t}){if(!e)throw new Error("apiKey is required");return new s(e,t)}async initializeWebrtcWorker({offer:e,workflowSpec:t,workspaceName:a,workflowId:n,config:o={}}){if(!e||!e.sdp||!e.type)throw new Error("offer with sdp and type is required");const i=!!t,p=!!(a&&n);if(!i&&!p)throw new Error("Either workflowSpec OR (workspaceName + workflowId) is required");if(i&&p)throw new Error("Provide either workflowSpec OR (workspaceName + workflowId), not both");const{imageInputName:u="image",streamOutputNames:l=[],dataOutputNames:d=["string"],threadPoolWorkers:h=4}=o,f={type:"WorkflowConfiguration",image_input_name:u,workflows_thread_pool_workers:h,cancel_thread_pool_tasks_on_exit:!0,video_metadata_input_name:"video_metadata"};i?f.workflow_specification=t:(f.workspace_name=a,f.workflow_id=n);const S={workflow_configuration:f,api_key:this.apiKey,webrtc_realtime_processing:!0,webrtc_offer:{sdp:e.sdp,type:e.type},webrtc_turn_config:null,stream_output:l,data_output:d},w=await fetch(`${this.serverUrl}/initialise_webrtc_worker`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(S)});if(!w.ok){const g=await w.text().catch(()=>"");throw new Error(`initialise_webrtc_worker failed (${w.status}): ${g}`)}return await w.json()}async terminatePipeline({pipelineId:e}){if(!e)throw new Error("pipelineId is required");await fetch(`${this.serverUrl}/inference_pipelines/${e}/terminate?api_key=${this.apiKey}`,{method:"POST",headers:{"Content-Type":"application/json"}})}}const y={withApiKey(r,e={}){const{serverUrl:t}=e;return typeof window<"u"&&console.warn("[Security Warning] Using API key directly in browser will expose it. Use connectors.withProxyUrl() for production. See: https://docs.roboflow.com/api-reference/authentication#securing-your-api-key"),{connectWrtc:async(a,n)=>await s.init({apiKey:r,serverUrl:t}).initializeWebrtcWorker({offer:a,workflowSpec:n.workflowSpec,workspaceName:n.workspaceName,workflowId:n.workflowId,config:{imageInputName:n.imageInputName,streamOutputNames:n.streamOutputNames,dataOutputNames:n.dataOutputNames,threadPoolWorkers:n.threadPoolWorkers}}),_apiKey:r,_serverUrl:t}},withProxyUrl(r,e={}){return{connectWrtc:async(t,a)=>{const n=await fetch(r,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({offer:t,wrtcParams:a})});if(!n.ok){const o=await n.text().catch(()=>"");throw new Error(`Proxy request failed (${n.status}): ${o}`)}return await n.json()}}}};async function E(r={video:!0}){try{console.log("[RFStreams] requesting with",r);const e=await navigator.mediaDevices.getUserMedia(r);return console.log("[RFStreams] got stream",e.getVideoTracks().map(t=>({id:t.id,label:t.label}))),e}catch(e){console.warn("[RFStreams] failed, falling back",e);const t=await navigator.mediaDevices.getUserMedia({video:!0,audio:!1});return console.log("[RFStreams] fallback stream",t.getVideoTracks().map(a=>({id:a.id,label:a.label}))),t}}function _(r){r&&(r.getTracks().forEach(e=>e.stop()),console.log("[RFStreams] Stream stopped"))}const O=Object.freeze(Object.defineProperty({__proto__:null,stopStream:_,useCamera:E},Symbol.toStringTag,{value:"Module"})),W=12;class v{constructor(){m(this,"pendingFrames",new Map)}processChunk(e,t,a,n){if(a===1)return n;this.pendingFrames.has(e)||this.pendingFrames.set(e,{chunks:new Map,totalChunks:a});const o=this.pendingFrames.get(e);if(o.chunks.set(t,n),o.chunks.size===a){const i=Array.from(o.chunks.values()).reduce((l,d)=>l+d.length,0),p=new Uint8Array(i);let u=0;for(let l=0;l<a;l++){const d=o.chunks.get(l);p.set(d,u),u+=d.length}return this.pendingFrames.delete(e),p}return null}clear(){this.pendingFrames.clear()}}function C(r){const e=new DataView(r),t=e.getUint32(0,!0),a=e.getUint32(4,!0),n=e.getUint32(8,!0),o=new Uint8Array(r,W);return{frameId:t,chunkIndex:a,totalChunks:n,payload:o}}async function P(r,e=6e3){if(r.iceGatheringState==="complete")return;let t=!1;const a=n=>{n.candidate&&n.candidate.type==="srflx"&&(t=!0)};r.addEventListener("icecandidate",a);try{await Promise.race([new Promise(n=>{const o=()=>{r.iceGatheringState==="complete"&&(r.removeEventListener("icegatheringstatechange",o),n())};r.addEventListener("icegatheringstatechange",o)}),new Promise((n,o)=>{setTimeout(()=>{t?n():(console.error("[ICE] timeout with NO srflx candidate! Connection may fail."),o(new Error("ICE gathering timeout without srflx candidate")))},e)})])}finally{r.removeEventListener("icecandidate",a)}}function F(r){return new Promise(e=>{r.addEventListener("track",t=>{t.streams&&t.streams[0]&&e(t.streams[0])})})}async function I(r){const e="stun:stun.l.google.com:19302",t=new RTCPeerConnection({iceServers:[{urls:[e]}]});try{t.addTransceiver("video",{direction:"recvonly"})}catch(i){console.warn("[RFWebRTC] Could not add transceiver:",i)}r.getVideoTracks().forEach(i=>{try{i.contentHint="detail"}catch{}t.addTrack(i,r)});const a=F(t),n=t.createDataChannel("roboflow-control",{ordered:!0}),o=await t.createOffer();return await t.setLocalDescription(o),await P(t),{pc:t,offer:t.localDescription,remoteStreamPromise:a,dataChannel:n}}async function D(r){const e=r.getSenders().find(a=>a.track&&a.track.kind==="video");if(!e)return;const t=e.getParameters();t.encodings=t.encodings||[{}],t.encodings[0].scaleResolutionDownBy=1;try{await e.setParameters(t)}catch(a){console.warn("[RFWebRTC] Failed to set encoding parameters:",a)}}class T{constructor(e,t,a,n,o,i,p){m(this,"pc");m(this,"_localStream");m(this,"remoteStreamPromise");m(this,"pipelineId");m(this,"apiKey");m(this,"dataChannel");m(this,"reassembler");this.pc=e,this._localStream=t,this.remoteStreamPromise=a,this.pipelineId=n,this.apiKey=o,this.dataChannel=i,this.reassembler=new v,this.dataChannel.binaryType="arraybuffer",p&&(this.dataChannel.addEventListener("open",()=>{}),this.dataChannel.addEventListener("message",u=>{try{if(u.data instanceof ArrayBuffer){const{frameId:l,chunkIndex:d,totalChunks:h,payload:f}=C(u.data),S=this.reassembler.processChunk(l,d,h,f);if(S){const b=new TextDecoder("utf-8").decode(S),g=JSON.parse(b);p(g)}}else{const l=JSON.parse(u.data);p(l)}}catch(l){console.error("[RFWebRTC] Failed to parse data channel message:",l)}}),this.dataChannel.addEventListener("error",u=>{console.error("[RFWebRTC] Data channel error:",u)}),this.dataChannel.addEventListener("close",()=>{this.reassembler.clear()}))}async remoteStream(){return await this.remoteStreamPromise}localStream(){return this._localStream}async cleanup(){this.reassembler.clear(),this.pipelineId&&this.apiKey&&await s.init({apiKey:this.apiKey}).terminatePipeline({pipelineId:this.pipelineId}),this.pc&&this.pc.connectionState!=="closed"&&this.pc.close(),_(this._localStream)}reconfigureOutputs(e){const t={};e.streamOutput!==void 0&&(t.stream_output=e.streamOutput),e.dataOutput!==void 0&&(t.data_output=e.dataOutput),this.sendData(t)}sendData(e){if(this.dataChannel.readyState!=="open"){console.warn("[RFWebRTC] Data channel is not open. Current state:",this.dataChannel.readyState);return}try{const t=typeof e=="string"?e:JSON.stringify(e);this.dataChannel.send(t)}catch(t){console.error("[RFWebRTC] Failed to send data:",t)}}}async function N({source:r,connector:e,wrtcParams:t,onData:a,options:n={}}){var g;if(!e||typeof e.connectWrtc!="function")throw new Error("connector must have a connectWrtc method");const o=r,{pc:i,offer:p,remoteStreamPromise:u,dataChannel:l}=await I(o),d=await e.connectWrtc({sdp:p.sdp,type:p.type},t),h={sdp:d.sdp,type:d.type};if(!(h!=null&&h.sdp)||!(h!=null&&h.type))throw console.error("[RFWebRTC] Invalid answer from server:",d),new Error("connector.connectWrtc must return answer with sdp and type");const f=((g=d==null?void 0:d.context)==null?void 0:g.pipeline_id)||null;await i.setRemoteDescription(h),await new Promise((x,R)=>{const k=()=>{i.connectionState==="connected"?(i.removeEventListener("connectionstatechange",k),x()):i.connectionState==="failed"&&(i.removeEventListener("connectionstatechange",k),R(new Error("WebRTC connection failed")))};i.addEventListener("connectionstatechange",k),k(),setTimeout(()=>{i.removeEventListener("connectionstatechange",k),R(new Error("WebRTC connection timeout after 30s"))},3e4)}),n.disableInputStreamDownscaling!==!1&&await D(i);const w=e._apiKey||null;return new T(i,o,u,f,w,l,a)}const U=Object.freeze(Object.defineProperty({__proto__:null,ChunkReassembler:v,RFWebRTCConnection:T,parseBinaryHeader:C,useStream:N},Symbol.toStringTag,{value:"Module"}));c.InferenceHTTPClient=s,c.connectors=y,c.streams=O,c.webrtc=U,Object.defineProperty(c,Symbol.toStringTag,{value:"Module"})});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"webrtc-data-parsing.test.d.ts","sourceRoot":"","sources":["../src/webrtc-data-parsing.test.ts"],"names":[],"mappings":""}
|
package/dist/webrtc.d.ts
CHANGED
|
@@ -1,4 +1,27 @@
|
|
|
1
1
|
import { Connector, WebRTCParams } from "./inference-api";
|
|
2
|
+
/**
|
|
3
|
+
* Reassembles chunked binary messages from the datachannel
|
|
4
|
+
*/
|
|
5
|
+
export declare class ChunkReassembler {
|
|
6
|
+
private pendingFrames;
|
|
7
|
+
/**
|
|
8
|
+
* Process an incoming chunk and return the complete message if all chunks received
|
|
9
|
+
*/
|
|
10
|
+
processChunk(frameId: number, chunkIndex: number, totalChunks: number, payload: Uint8Array): Uint8Array | null;
|
|
11
|
+
/**
|
|
12
|
+
* Clear all pending frames (for cleanup)
|
|
13
|
+
*/
|
|
14
|
+
clear(): void;
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Parse the binary header from a datachannel message
|
|
18
|
+
*/
|
|
19
|
+
export declare function parseBinaryHeader(buffer: ArrayBuffer): {
|
|
20
|
+
frameId: number;
|
|
21
|
+
chunkIndex: number;
|
|
22
|
+
totalChunks: number;
|
|
23
|
+
payload: Uint8Array;
|
|
24
|
+
};
|
|
2
25
|
export interface UseStreamOptions {
|
|
3
26
|
disableInputStreamDownscaling?: boolean;
|
|
4
27
|
}
|
|
@@ -21,6 +44,7 @@ export declare class RFWebRTCConnection {
|
|
|
21
44
|
private pipelineId;
|
|
22
45
|
private apiKey;
|
|
23
46
|
private dataChannel;
|
|
47
|
+
private reassembler;
|
|
24
48
|
/** @private */
|
|
25
49
|
constructor(pc: RTCPeerConnection, localStream: MediaStream, remoteStreamPromise: Promise<MediaStream>, pipelineId: string | null, apiKey: string | null, dataChannel: RTCDataChannel, onData?: (data: any) => void);
|
|
26
50
|
/**
|
package/dist/webrtc.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"webrtc.d.ts","sourceRoot":"","sources":["../src/webrtc.ts"],"names":[],"mappings":"AACA,OAAO,EAAuB,SAAS,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;
|
|
1
|
+
{"version":3,"file":"webrtc.d.ts","sourceRoot":"","sources":["../src/webrtc.ts"],"names":[],"mappings":"AACA,OAAO,EAAuB,SAAS,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAS/E;;GAEG;AACH,qBAAa,gBAAgB;IAC3B,OAAO,CAAC,aAAa,CAGN;IAEf;;OAEG;IACH,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,OAAO,EAAE,UAAU,GAAG,UAAU,GAAG,IAAI;IAqC9G;;OAEG;IACH,KAAK,IAAI,IAAI;CAGd;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAAC,MAAM,EAAE,WAAW,GAAG;IAAE,OAAO,EAAE,MAAM,CAAC;IAAC,UAAU,EAAE,MAAM,CAAC;IAAC,WAAW,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,UAAU,CAAA;CAAE,CAQxI;AAED,MAAM,WAAW,gBAAgB;IAC/B,6BAA6B,CAAC,EAAE,OAAO,CAAC;CACzC;AAED,MAAM,WAAW,eAAe;IAC9B,MAAM,EAAE,WAAW,CAAC;IACpB,SAAS,EAAE,SAAS,CAAC;IACrB,UAAU,EAAE,YAAY,CAAC;IACzB,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAC7B,OAAO,CAAC,EAAE,gBAAgB,CAAC;CAC5B;AA4HD;;;;GAIG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,EAAE,CAAoB;IAC9B,OAAO,CAAC,YAAY,CAAc;IAClC,OAAO,CAAC,mBAAmB,CAAuB;IAClD,OAAO,CAAC,UAAU,CAAgB;IAClC,OAAO,CAAC,MAAM,CAAgB;IAC9B,OAAO,CAAC,WAAW,CAAiB;IACpC,OAAO,CAAC,WAAW,CAAmB;IAEtC,eAAe;gBAEb,EAAE,EAAE,iBAAiB,EACrB,WAAW,EAAE,WAAW,EACxB,mBAAmB,EAAE,OAAO,CAAC,WAAW,CAAC,EACzC,UAAU,EAAE,MAAM,GAAG,IAAI,EACzB,MAAM,EAAE,MAAM,GAAG,IAAI,EACrB,WAAW,EAAE,cAAc,EAC3B,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI;IAsD9B;;;;;;;;;;;OAWG;IACG,YAAY,IAAI,OAAO,CAAC,WAAW,CAAC;IAI1C;;;;;;;;;;;OAWG;IACH,WAAW,IAAI,WAAW;IAI1B;;;;;;;;;;;;;;OAcG;IACG,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAmB9B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+BG;IACH,kBAAkB,CAAC,MAAM,EAAE;QAAE,YAAY,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;KAAE,GAAG,IAAI;IAclG;;;OAGG;IACH,OAAO,CAAC,QAAQ;CAajB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,wBAAsB,SAAS,CAAC,EAC9B,MAAM,EACN,SAAS,EACT,UAAU,EACV,MAAM,EACN,OAAY,EACb,EAAE,eAAe,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAkE/C"}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@roboflow/inference-sdk",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.2",
|
|
4
4
|
"description": "Lightweight client for Roboflow's hosted inference API with WebRTC streaming support",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"roboflow",
|
|
@@ -27,7 +27,9 @@
|
|
|
27
27
|
"dev": "vite",
|
|
28
28
|
"build": "vite build",
|
|
29
29
|
"preview": "vite preview",
|
|
30
|
-
"clean": "rm -rf dist"
|
|
30
|
+
"clean": "rm -rf dist",
|
|
31
|
+
"test": "vitest run",
|
|
32
|
+
"test:watch": "vitest"
|
|
31
33
|
},
|
|
32
34
|
"author": "Roboflow",
|
|
33
35
|
"license": "ISC",
|
|
@@ -38,7 +40,7 @@
|
|
|
38
40
|
"devDependencies": {
|
|
39
41
|
"typescript": "^5.3.3",
|
|
40
42
|
"vite": "^5.3.3",
|
|
41
|
-
"vite-plugin-dts": "^3.7.2"
|
|
42
|
-
|
|
43
|
-
|
|
43
|
+
"vite-plugin-dts": "^3.7.2",
|
|
44
|
+
"vitest": "^4.0.14"
|
|
45
|
+
}
|
|
44
46
|
}
|