@roboflow/inference-sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +244 -0
- package/dist/index.d.ts +11 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.es.js +455 -0
- package/dist/index.js +1 -0
- package/dist/inference-api.d.ts +150 -0
- package/dist/inference-api.d.ts.map +1 -0
- package/dist/streams.d.ts +22 -0
- package/dist/streams.d.ts.map +1 -0
- package/dist/webrtc.d.ts +147 -0
- package/dist/webrtc.d.ts.map +1 -0
- package/package.json +44 -0
package/README.md
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
# @roboflow/inference-sdk
|
|
2
|
+
|
|
3
|
+
Lightweight client package for Roboflow inference via WebRTC streaming and hosted API.
|
|
4
|
+
|
|
5
|
+
This package provides WebRTC streaming capabilities and hosted inference API access without bundling TensorFlow or local inference models, making it ideal for production applications.
|
|
6
|
+
|
|
7
|
+
## Installation
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install @roboflow/inference-sdk
|
|
11
|
+
```
|
|
12
|
+
|
|
13
|
+
## Quick Start
|
|
14
|
+
|
|
15
|
+
### Basic WebRTC Streaming Example
|
|
16
|
+
|
|
17
|
+
```typescript
|
|
18
|
+
import { useStream } from '@roboflow/inference-sdk/webrtc';
|
|
19
|
+
import { connectors } from '@roboflow/inference-sdk/api';
|
|
20
|
+
import { useCamera } from '@roboflow/inference-sdk/streams';
|
|
21
|
+
|
|
22
|
+
// Create connector (use proxy for production!)
|
|
23
|
+
const connector = connectors.withApiKey("your-api-key");
|
|
24
|
+
|
|
25
|
+
// Get camera stream
|
|
26
|
+
const stream = await useCamera({
|
|
27
|
+
video: {
|
|
28
|
+
facingMode: { ideal: "environment" }
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
// Start WebRTC connection
|
|
33
|
+
const connection = await useStream({
|
|
34
|
+
source: stream,
|
|
35
|
+
connector,
|
|
36
|
+
wrtcParams: {
|
|
37
|
+
workflowSpec: {
|
|
38
|
+
// Your workflow specification
|
|
39
|
+
version: "1.0",
|
|
40
|
+
inputs: [{ type: "InferenceImage", name: "image" }],
|
|
41
|
+
steps: [/* ... */],
|
|
42
|
+
outputs: [/* ... */]
|
|
43
|
+
},
|
|
44
|
+
imageInputName: "image",
|
|
45
|
+
streamOutputNames: ["output"],
|
|
46
|
+
dataOutputNames: ["predictions"]
|
|
47
|
+
},
|
|
48
|
+
onData: (data) => {
|
|
49
|
+
// Receive real-time inference results
|
|
50
|
+
console.log("Inference results:", data);
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
// Display processed video
|
|
55
|
+
const remoteStream = await connection.remoteStream();
|
|
56
|
+
videoElement.srcObject = remoteStream;
|
|
57
|
+
|
|
58
|
+
// Clean up when done
|
|
59
|
+
await connection.cleanup();
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
## Security Best Practices
|
|
63
|
+
|
|
64
|
+
### ⚠️ API Key Security
|
|
65
|
+
|
|
66
|
+
**NEVER expose your API key in frontend code for production applications.**
|
|
67
|
+
|
|
68
|
+
The `connectors.withApiKey()` method is convenient for demos and testing, but it exposes your API key in the browser. For production applications, always use a backend proxy:
|
|
69
|
+
|
|
70
|
+
### Using a Backend Proxy (Recommended)
|
|
71
|
+
|
|
72
|
+
**Frontend:**
|
|
73
|
+
```typescript
|
|
74
|
+
import { useStream } from '@roboflow/inference-sdk/webrtc';
|
|
75
|
+
import { connectors } from '@roboflow/inference-sdk/api';
|
|
76
|
+
|
|
77
|
+
// Use proxy endpoint instead of direct API key
|
|
78
|
+
const connector = connectors.withProxyUrl('/api/init-webrtc');
|
|
79
|
+
|
|
80
|
+
const connection = await useStream({
|
|
81
|
+
source: stream,
|
|
82
|
+
connector,
|
|
83
|
+
wrtcParams: {
|
|
84
|
+
workflowSpec: { /* ... */ },
|
|
85
|
+
imageInputName: "image",
|
|
86
|
+
streamOutputNames: ["output"]
|
|
87
|
+
}
|
|
88
|
+
});
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
**Backend (Express example):**
|
|
92
|
+
```typescript
|
|
93
|
+
import { InferenceHTTPClient } from '@roboflow/inference-sdk/api';
|
|
94
|
+
|
|
95
|
+
app.post('/api/init-webrtc', async (req, res) => {
|
|
96
|
+
const { offer, wrtcParams } = req.body;
|
|
97
|
+
|
|
98
|
+
// API key stays secure on the server
|
|
99
|
+
const client = InferenceHTTPClient.init({
|
|
100
|
+
apiKey: process.env.ROBOFLOW_API_KEY
|
|
101
|
+
});
|
|
102
|
+
|
|
103
|
+
const answer = await client.initializeWebrtcWorker({
|
|
104
|
+
offer,
|
|
105
|
+
workflowSpec: wrtcParams.workflowSpec,
|
|
106
|
+
workspaceName: wrtcParams.workspaceName,
|
|
107
|
+
workflowId: wrtcParams.workflowId,
|
|
108
|
+
config: {
|
|
109
|
+
imageInputName: wrtcParams.imageInputName,
|
|
110
|
+
streamOutputNames: wrtcParams.streamOutputNames,
|
|
111
|
+
dataOutputNames: wrtcParams.dataOutputNames,
|
|
112
|
+
threadPoolWorkers: wrtcParams.threadPoolWorkers
|
|
113
|
+
}
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
res.json(answer);
|
|
117
|
+
});
|
|
118
|
+
```
|
|
119
|
+
|
|
120
|
+
## API Reference
|
|
121
|
+
|
|
122
|
+
### WebRTC Functions
|
|
123
|
+
|
|
124
|
+
#### `useStream(params)`
|
|
125
|
+
|
|
126
|
+
Establishes a WebRTC connection for real-time video inference.
|
|
127
|
+
|
|
128
|
+
**Parameters:**
|
|
129
|
+
- `source: MediaStream` - Input video stream (from camera or other source)
|
|
130
|
+
- `connector: Connector` - Connection method (withApiKey or withProxyUrl)
|
|
131
|
+
- `wrtcParams: WebRTCParams` - Workflow configuration
|
|
132
|
+
- `workflowSpec?: WorkflowSpec` - Workflow specification object
|
|
133
|
+
- `workspaceName?: string` - Workspace name (alternative to workflowSpec)
|
|
134
|
+
- `workflowId?: string` - Workflow ID (alternative to workflowSpec)
|
|
135
|
+
- `imageInputName?: string` - Input image name (default: "image")
|
|
136
|
+
- `streamOutputNames?: string[]` - Output stream names
|
|
137
|
+
- `dataOutputNames?: string[]` - Output data names
|
|
138
|
+
- `threadPoolWorkers?: number` - Thread pool workers (default: 4)
|
|
139
|
+
- `onData?: (data: any) => void` - Callback for data output
|
|
140
|
+
- `options?: UseStreamOptions` - Additional options
|
|
141
|
+
|
|
142
|
+
**Returns:** `Promise<RFWebRTCConnection>`
|
|
143
|
+
|
|
144
|
+
### Connection Methods
|
|
145
|
+
|
|
146
|
+
#### `connection.remoteStream()`
|
|
147
|
+
|
|
148
|
+
Get the processed video stream from Roboflow.
|
|
149
|
+
|
|
150
|
+
**Returns:** `Promise<MediaStream>`
|
|
151
|
+
|
|
152
|
+
#### `connection.localStream()`
|
|
153
|
+
|
|
154
|
+
Get the local input video stream.
|
|
155
|
+
|
|
156
|
+
**Returns:** `MediaStream`
|
|
157
|
+
|
|
158
|
+
#### `connection.cleanup()`
|
|
159
|
+
|
|
160
|
+
Close the connection and clean up resources.
|
|
161
|
+
|
|
162
|
+
**Returns:** `Promise<void>`
|
|
163
|
+
|
|
164
|
+
#### `connection.reconfigureOutputs(config)`
|
|
165
|
+
|
|
166
|
+
Dynamically change stream and data outputs at runtime without restarting the connection.
|
|
167
|
+
|
|
168
|
+
**Parameters:**
|
|
169
|
+
- `config.streamOutput?: string[] | null` - Stream output names
|
|
170
|
+
- `undefined` or not provided: Unchanged
|
|
171
|
+
- `[]`: Auto-detect first valid image output
|
|
172
|
+
- `["output_name"]`: Use specified output
|
|
173
|
+
- `null`: Unchanged
|
|
174
|
+
- `config.dataOutput?: string[] | null` - Data output names
|
|
175
|
+
- `undefined` or not provided: Unchanged
|
|
176
|
+
- `[]`: Disable all data outputs
|
|
177
|
+
- `["output_name"]`: Use specified outputs
|
|
178
|
+
- `null`: Enable all data outputs
|
|
179
|
+
|
|
180
|
+
**Examples:**
|
|
181
|
+
```typescript
|
|
182
|
+
// Change to different stream output
|
|
183
|
+
connection.reconfigureOutputs({
|
|
184
|
+
streamOutput: ["annotated_image"]
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
// Enable all data outputs
|
|
188
|
+
connection.reconfigureOutputs({
|
|
189
|
+
dataOutput: null
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
// Disable all data outputs
|
|
193
|
+
connection.reconfigureOutputs({
|
|
194
|
+
dataOutput: []
|
|
195
|
+
});
|
|
196
|
+
|
|
197
|
+
// Change both at once
|
|
198
|
+
connection.reconfigureOutputs({
|
|
199
|
+
streamOutput: ["visualization"],
|
|
200
|
+
dataOutput: ["predictions", "metadata"]
|
|
201
|
+
});
|
|
202
|
+
```
|
|
203
|
+
|
|
204
|
+
### Camera Functions
|
|
205
|
+
|
|
206
|
+
#### `useCamera(constraints)`
|
|
207
|
+
|
|
208
|
+
Access device camera with specified constraints.
|
|
209
|
+
|
|
210
|
+
**Parameters:**
|
|
211
|
+
- `constraints: MediaStreamConstraints` - Media constraints
|
|
212
|
+
|
|
213
|
+
**Returns:** `Promise<MediaStream>`
|
|
214
|
+
|
|
215
|
+
#### `stopStream(stream)`
|
|
216
|
+
|
|
217
|
+
Stop a media stream and release camera.
|
|
218
|
+
|
|
219
|
+
**Parameters:**
|
|
220
|
+
- `stream: MediaStream` - Stream to stop
|
|
221
|
+
|
|
222
|
+
## When to Use This Package
|
|
223
|
+
|
|
224
|
+
### Use `@roboflow/inference-sdk` when:
|
|
225
|
+
- Building production web applications
|
|
226
|
+
- You need WebRTC streaming inference
|
|
227
|
+
- You want a smaller bundle size
|
|
228
|
+
- You're deploying to browsers
|
|
229
|
+
|
|
230
|
+
### Use the full `inferencejs` package when:
|
|
231
|
+
- You need local inference with TensorFlow.js
|
|
232
|
+
- You want to run models offline in the browser
|
|
233
|
+
- You need both local and hosted inference options
|
|
234
|
+
|
|
235
|
+
## Resources
|
|
236
|
+
|
|
237
|
+
- [Roboflow Documentation](https://docs.roboflow.com/)
|
|
238
|
+
- [API Authentication Guide](https://docs.roboflow.com/api-reference/authentication)
|
|
239
|
+
- [Workflows Documentation](https://docs.roboflow.com/workflows)
|
|
240
|
+
- [GitHub Repository](https://github.com/roboflow/inferencejs)
|
|
241
|
+
|
|
242
|
+
## License
|
|
243
|
+
|
|
244
|
+
See the main repository for license information.
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Roboflow Inference Client
|
|
3
|
+
*
|
|
4
|
+
* Lightweight client library for Roboflow's hosted inference API.
|
|
5
|
+
* Provides WebRTC streaming, HTTP client, and camera utilities.
|
|
6
|
+
*/
|
|
7
|
+
export * from './inference-api';
|
|
8
|
+
import * as webrtc from './webrtc';
|
|
9
|
+
import * as streams from './streams';
|
|
10
|
+
export { webrtc, streams };
|
|
11
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAGH,cAAc,iBAAiB,CAAC;AAGhC,OAAO,KAAK,MAAM,MAAM,UAAU,CAAC;AACnC,OAAO,KAAK,OAAO,MAAM,WAAW,CAAC;AAErC,OAAO,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC"}
|
package/dist/index.es.js
ADDED
|
@@ -0,0 +1,455 @@
|
|
|
1
|
+
var C = Object.defineProperty;
|
|
2
|
+
var E = (r, e, t) => e in r ? C(r, e, { enumerable: !0, configurable: !0, writable: !0, value: t }) : r[e] = t;
|
|
3
|
+
var l = (r, e, t) => E(r, typeof e != "symbol" ? e + "" : e, t);
|
|
4
|
+
class y {
|
|
5
|
+
/**
|
|
6
|
+
* @private
|
|
7
|
+
* Use InferenceHTTPClient.init() instead
|
|
8
|
+
*/
|
|
9
|
+
constructor(e, t = "https://serverless.roboflow.com") {
|
|
10
|
+
l(this, "apiKey");
|
|
11
|
+
l(this, "serverUrl");
|
|
12
|
+
this.apiKey = e, this.serverUrl = t;
|
|
13
|
+
}
|
|
14
|
+
static init({ apiKey: e, serverUrl: t }) {
|
|
15
|
+
if (!e)
|
|
16
|
+
throw new Error("apiKey is required");
|
|
17
|
+
return new y(e, t);
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Initialize a WebRTC worker pipeline
|
|
21
|
+
*
|
|
22
|
+
* @param params - Pipeline parameters
|
|
23
|
+
* @param params.offer - WebRTC offer { sdp, type }
|
|
24
|
+
* @param params.workflowSpec - Workflow specification
|
|
25
|
+
* @param params.config - Additional configuration
|
|
26
|
+
* @param params.config.imageInputName - Input image name (default: "image")
|
|
27
|
+
* @param params.config.streamOutputNames - Output stream names for video (default: [])
|
|
28
|
+
* @param params.config.dataOutputNames - Output data names (default: ["string"])
|
|
29
|
+
* @param params.config.threadPoolWorkers - Thread pool workers (default: 4)
|
|
30
|
+
* @returns Promise resolving to answer with SDP and pipeline ID
|
|
31
|
+
*
|
|
32
|
+
* @example
|
|
33
|
+
* ```typescript
|
|
34
|
+
* const answer = await client.initializeWebrtcWorker({
|
|
35
|
+
* offer: { sdp, type },
|
|
36
|
+
* workflowSpec: { ... },
|
|
37
|
+
* config: {
|
|
38
|
+
* imageInputName: "image",
|
|
39
|
+
* streamOutputNames: ["output_image"]
|
|
40
|
+
* }
|
|
41
|
+
* });
|
|
42
|
+
* ```
|
|
43
|
+
*/
|
|
44
|
+
async initializeWebrtcWorker({
|
|
45
|
+
offer: e,
|
|
46
|
+
workflowSpec: t,
|
|
47
|
+
workspaceName: a,
|
|
48
|
+
workflowId: n,
|
|
49
|
+
config: i = {}
|
|
50
|
+
}) {
|
|
51
|
+
if (!e || !e.sdp || !e.type)
|
|
52
|
+
throw new Error("offer with sdp and type is required");
|
|
53
|
+
const o = !!t, s = !!(a && n);
|
|
54
|
+
if (!o && !s)
|
|
55
|
+
throw new Error("Either workflowSpec OR (workspaceName + workflowId) is required");
|
|
56
|
+
if (o && s)
|
|
57
|
+
throw new Error("Provide either workflowSpec OR (workspaceName + workflowId), not both");
|
|
58
|
+
const {
|
|
59
|
+
imageInputName: d = "image",
|
|
60
|
+
streamOutputNames: u = [],
|
|
61
|
+
dataOutputNames: p = ["string"],
|
|
62
|
+
threadPoolWorkers: c = 4
|
|
63
|
+
} = i, h = {
|
|
64
|
+
type: "WorkflowConfiguration",
|
|
65
|
+
image_input_name: d,
|
|
66
|
+
workflows_thread_pool_workers: c,
|
|
67
|
+
cancel_thread_pool_tasks_on_exit: !0,
|
|
68
|
+
video_metadata_input_name: "video_metadata"
|
|
69
|
+
};
|
|
70
|
+
o ? h.workflow_specification = t : (h.workspace_name = a, h.workflow_id = n);
|
|
71
|
+
const g = {
|
|
72
|
+
workflow_configuration: h,
|
|
73
|
+
api_key: this.apiKey,
|
|
74
|
+
webrtc_realtime_processing: !0,
|
|
75
|
+
webrtc_offer: {
|
|
76
|
+
sdp: e.sdp,
|
|
77
|
+
type: e.type
|
|
78
|
+
},
|
|
79
|
+
webrtc_turn_config: null,
|
|
80
|
+
stream_output: u,
|
|
81
|
+
data_output: p
|
|
82
|
+
}, m = await fetch(`${this.serverUrl}/initialise_webrtc_worker`, {
|
|
83
|
+
method: "POST",
|
|
84
|
+
headers: { "Content-Type": "application/json" },
|
|
85
|
+
body: JSON.stringify(g)
|
|
86
|
+
});
|
|
87
|
+
if (!m.ok) {
|
|
88
|
+
const f = await m.text().catch(() => "");
|
|
89
|
+
throw new Error(`initialise_webrtc_worker failed (${m.status}): ${f}`);
|
|
90
|
+
}
|
|
91
|
+
return await m.json();
|
|
92
|
+
}
|
|
93
|
+
async terminatePipeline({ pipelineId: e }) {
|
|
94
|
+
if (!e)
|
|
95
|
+
throw new Error("pipelineId is required");
|
|
96
|
+
await fetch(
|
|
97
|
+
`${this.serverUrl}/inference_pipelines/${e}/terminate?api_key=${this.apiKey}`,
|
|
98
|
+
{
|
|
99
|
+
method: "POST",
|
|
100
|
+
headers: { "Content-Type": "application/json" }
|
|
101
|
+
}
|
|
102
|
+
);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
const N = {
|
|
106
|
+
/**
|
|
107
|
+
* Create a connector that uses API key directly
|
|
108
|
+
*
|
|
109
|
+
* **WARNING**: If you use this in the frontend, it will expose your API key.
|
|
110
|
+
* Use only for demos/testing.
|
|
111
|
+
* For production, use withProxyUrl() with a backend proxy.
|
|
112
|
+
*
|
|
113
|
+
* @param apiKey - Roboflow API key
|
|
114
|
+
* @param options - Additional options
|
|
115
|
+
* @param options.serverUrl - Custom Roboflow server URL
|
|
116
|
+
* @returns Connector with connectWrtc method
|
|
117
|
+
*
|
|
118
|
+
* @example
|
|
119
|
+
* ```typescript
|
|
120
|
+
* const connector = connectors.withApiKey("your-api-key");
|
|
121
|
+
* const answer = await connector.connectWrtc(offer, wrtcParams);
|
|
122
|
+
* ```
|
|
123
|
+
*/
|
|
124
|
+
withApiKey(r, e = {}) {
|
|
125
|
+
const { serverUrl: t } = e;
|
|
126
|
+
return typeof window < "u" && console.warn(
|
|
127
|
+
"[Security Warning] Using API key directly in browser will expose it. Use connectors.withProxyUrl() for production. See: https://docs.roboflow.com/api-reference/authentication#securing-your-api-key"
|
|
128
|
+
), {
|
|
129
|
+
connectWrtc: async (a, n) => await y.init({ apiKey: r, serverUrl: t }).initializeWebrtcWorker({
|
|
130
|
+
offer: a,
|
|
131
|
+
workflowSpec: n.workflowSpec,
|
|
132
|
+
workspaceName: n.workspaceName,
|
|
133
|
+
workflowId: n.workflowId,
|
|
134
|
+
config: {
|
|
135
|
+
imageInputName: n.imageInputName,
|
|
136
|
+
streamOutputNames: n.streamOutputNames,
|
|
137
|
+
dataOutputNames: n.dataOutputNames,
|
|
138
|
+
threadPoolWorkers: n.threadPoolWorkers
|
|
139
|
+
}
|
|
140
|
+
}),
|
|
141
|
+
// Store apiKey for cleanup
|
|
142
|
+
_apiKey: r,
|
|
143
|
+
_serverUrl: t
|
|
144
|
+
};
|
|
145
|
+
},
|
|
146
|
+
/**
|
|
147
|
+
* Create a connector that uses a backend proxy (recommended for production)
|
|
148
|
+
*
|
|
149
|
+
* Your backend receives the offer and wrtcParams, adds the secret API key,
|
|
150
|
+
* and forwards to Roboflow. This keeps your API key secure.
|
|
151
|
+
*
|
|
152
|
+
* @param proxyUrl - Backend proxy endpoint URL
|
|
153
|
+
* @param options - Additional options (reserved for future use)
|
|
154
|
+
* @returns Connector with connectWrtc method
|
|
155
|
+
*
|
|
156
|
+
* @example
|
|
157
|
+
* ```typescript
|
|
158
|
+
* const connector = connectors.withProxyUrl('/api/init-webrtc');
|
|
159
|
+
* const answer = await connector.connectWrtc(offer, wrtcParams);
|
|
160
|
+
* ```
|
|
161
|
+
*
|
|
162
|
+
* @example
|
|
163
|
+
* Backend implementation (Express):
|
|
164
|
+
* ```typescript
|
|
165
|
+
* app.post('/api/init-webrtc', async (req, res) => {
|
|
166
|
+
* const { offer, wrtcParams } = req.body;
|
|
167
|
+
* const client = InferenceHTTPClient.init({
|
|
168
|
+
* apiKey: process.env.ROBOFLOW_API_KEY
|
|
169
|
+
* });
|
|
170
|
+
* const answer = await client.initializeWebrtcWorker({
|
|
171
|
+
* offer,
|
|
172
|
+
* workflowSpec: wrtcParams.workflowSpec,
|
|
173
|
+
* workspaceName: wrtcParams.workspaceName,
|
|
174
|
+
* workflowId: wrtcParams.workflowId,
|
|
175
|
+
* config: {
|
|
176
|
+
* imageInputName: wrtcParams.imageInputName,
|
|
177
|
+
* streamOutputNames: wrtcParams.streamOutputNames,
|
|
178
|
+
* dataOutputNames: wrtcParams.dataOutputNames,
|
|
179
|
+
* threadPoolWorkers: wrtcParams.threadPoolWorkers
|
|
180
|
+
* }
|
|
181
|
+
* });
|
|
182
|
+
* res.json(answer);
|
|
183
|
+
* });
|
|
184
|
+
* ```
|
|
185
|
+
*/
|
|
186
|
+
withProxyUrl(r, e = {}) {
|
|
187
|
+
return {
|
|
188
|
+
connectWrtc: async (t, a) => {
|
|
189
|
+
const n = await fetch(r, {
|
|
190
|
+
method: "POST",
|
|
191
|
+
headers: { "Content-Type": "application/json" },
|
|
192
|
+
body: JSON.stringify({
|
|
193
|
+
offer: t,
|
|
194
|
+
wrtcParams: a
|
|
195
|
+
})
|
|
196
|
+
});
|
|
197
|
+
if (!n.ok) {
|
|
198
|
+
const i = await n.text().catch(() => "");
|
|
199
|
+
throw new Error(`Proxy request failed (${n.status}): ${i}`);
|
|
200
|
+
}
|
|
201
|
+
return await n.json();
|
|
202
|
+
}
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
};
|
|
206
|
+
async function R(r = { video: !0 }) {
|
|
207
|
+
try {
|
|
208
|
+
console.log("[RFStreams] requesting with", r);
|
|
209
|
+
const e = await navigator.mediaDevices.getUserMedia(r);
|
|
210
|
+
return console.log("[RFStreams] got stream", e.getVideoTracks().map((t) => ({ id: t.id, label: t.label }))), e;
|
|
211
|
+
} catch (e) {
|
|
212
|
+
console.warn("[RFStreams] failed, falling back", e);
|
|
213
|
+
const t = await navigator.mediaDevices.getUserMedia({ video: !0, audio: !1 });
|
|
214
|
+
return console.log("[RFStreams] fallback stream", t.getVideoTracks().map((a) => ({ id: a.id, label: a.label }))), t;
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
function _(r) {
|
|
218
|
+
r && (r.getTracks().forEach((e) => e.stop()), console.log("[RFStreams] Stream stopped"));
|
|
219
|
+
}
|
|
220
|
+
const x = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
221
|
+
__proto__: null,
|
|
222
|
+
stopStream: _,
|
|
223
|
+
useCamera: R
|
|
224
|
+
}, Symbol.toStringTag, { value: "Module" }));
|
|
225
|
+
async function T(r, e = 6e3) {
|
|
226
|
+
if (r.iceGatheringState === "complete") return;
|
|
227
|
+
let t = !1;
|
|
228
|
+
const a = (n) => {
|
|
229
|
+
n.candidate && n.candidate.type === "srflx" && (t = !0);
|
|
230
|
+
};
|
|
231
|
+
r.addEventListener("icecandidate", a);
|
|
232
|
+
try {
|
|
233
|
+
await Promise.race([
|
|
234
|
+
new Promise((n) => {
|
|
235
|
+
const i = () => {
|
|
236
|
+
r.iceGatheringState === "complete" && (r.removeEventListener("icegatheringstatechange", i), n());
|
|
237
|
+
};
|
|
238
|
+
r.addEventListener("icegatheringstatechange", i);
|
|
239
|
+
}),
|
|
240
|
+
new Promise((n, i) => {
|
|
241
|
+
setTimeout(() => {
|
|
242
|
+
t ? n() : (console.error("[ICE] timeout with NO srflx candidate! Connection may fail."), i(new Error("ICE gathering timeout without srflx candidate")));
|
|
243
|
+
}, e);
|
|
244
|
+
})
|
|
245
|
+
]);
|
|
246
|
+
} finally {
|
|
247
|
+
r.removeEventListener("icecandidate", a);
|
|
248
|
+
}
|
|
249
|
+
}
|
|
250
|
+
function W(r) {
|
|
251
|
+
return new Promise((e) => {
|
|
252
|
+
r.addEventListener("track", (t) => {
|
|
253
|
+
t.streams && t.streams[0] && e(t.streams[0]);
|
|
254
|
+
});
|
|
255
|
+
});
|
|
256
|
+
}
|
|
257
|
+
async function O(r) {
|
|
258
|
+
const e = "stun:stun.l.google.com:19302", t = new RTCPeerConnection({
|
|
259
|
+
iceServers: [{ urls: [e] }]
|
|
260
|
+
});
|
|
261
|
+
try {
|
|
262
|
+
t.addTransceiver("video", { direction: "recvonly" });
|
|
263
|
+
} catch (o) {
|
|
264
|
+
console.warn("[RFWebRTC] Could not add transceiver:", o);
|
|
265
|
+
}
|
|
266
|
+
r.getVideoTracks().forEach((o) => {
|
|
267
|
+
try {
|
|
268
|
+
o.contentHint = "detail";
|
|
269
|
+
} catch {
|
|
270
|
+
}
|
|
271
|
+
t.addTrack(o, r);
|
|
272
|
+
});
|
|
273
|
+
const a = W(t), n = t.createDataChannel("roboflow-control", {
|
|
274
|
+
ordered: !0
|
|
275
|
+
}), i = await t.createOffer();
|
|
276
|
+
return await t.setLocalDescription(i), await T(t), {
|
|
277
|
+
pc: t,
|
|
278
|
+
offer: t.localDescription,
|
|
279
|
+
remoteStreamPromise: a,
|
|
280
|
+
dataChannel: n
|
|
281
|
+
};
|
|
282
|
+
}
|
|
283
|
+
async function P(r) {
|
|
284
|
+
const e = r.getSenders().find((a) => a.track && a.track.kind === "video");
|
|
285
|
+
if (!e) return;
|
|
286
|
+
const t = e.getParameters();
|
|
287
|
+
t.encodings = t.encodings || [{}], t.encodings[0].scaleResolutionDownBy = 1;
|
|
288
|
+
try {
|
|
289
|
+
await e.setParameters(t);
|
|
290
|
+
} catch (a) {
|
|
291
|
+
console.warn("[RFWebRTC] Failed to set encoding parameters:", a);
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
class k {
|
|
295
|
+
/** @private */
|
|
296
|
+
constructor(e, t, a, n, i, o, s) {
|
|
297
|
+
l(this, "pc");
|
|
298
|
+
l(this, "_localStream");
|
|
299
|
+
l(this, "remoteStreamPromise");
|
|
300
|
+
l(this, "pipelineId");
|
|
301
|
+
l(this, "apiKey");
|
|
302
|
+
l(this, "dataChannel");
|
|
303
|
+
this.pc = e, this._localStream = t, this.remoteStreamPromise = a, this.pipelineId = n, this.apiKey = i, this.dataChannel = o, s && (this.dataChannel.addEventListener("open", () => {
|
|
304
|
+
}), this.dataChannel.addEventListener("message", (d) => {
|
|
305
|
+
try {
|
|
306
|
+
const u = JSON.parse(d.data);
|
|
307
|
+
s(u);
|
|
308
|
+
} catch (u) {
|
|
309
|
+
console.error("[RFWebRTC] Failed to parse data channel message:", u), s(d.data);
|
|
310
|
+
}
|
|
311
|
+
}), this.dataChannel.addEventListener("error", (d) => {
|
|
312
|
+
console.error("[RFWebRTC] Data channel error:", d);
|
|
313
|
+
}), this.dataChannel.addEventListener("close", () => {
|
|
314
|
+
}));
|
|
315
|
+
}
|
|
316
|
+
/**
|
|
317
|
+
* Get the remote stream (processed video from Roboflow)
|
|
318
|
+
*
|
|
319
|
+
* @returns Promise resolving to the remote MediaStream
|
|
320
|
+
*
|
|
321
|
+
* @example
|
|
322
|
+
* ```typescript
|
|
323
|
+
* const conn = await useStream({ ... });
|
|
324
|
+
* const remoteStream = await conn.remoteStream();
|
|
325
|
+
* videoElement.srcObject = remoteStream;
|
|
326
|
+
* ```
|
|
327
|
+
*/
|
|
328
|
+
async remoteStream() {
|
|
329
|
+
return await this.remoteStreamPromise;
|
|
330
|
+
}
|
|
331
|
+
/**
|
|
332
|
+
* Get the local stream (original camera)
|
|
333
|
+
*
|
|
334
|
+
* @returns The local MediaStream
|
|
335
|
+
*
|
|
336
|
+
* @example
|
|
337
|
+
* ```typescript
|
|
338
|
+
* const conn = await useStream({ ... });
|
|
339
|
+
* const localStream = conn.localStream();
|
|
340
|
+
* videoElement.srcObject = localStream;
|
|
341
|
+
* ```
|
|
342
|
+
*/
|
|
343
|
+
localStream() {
|
|
344
|
+
return this._localStream;
|
|
345
|
+
}
|
|
346
|
+
/**
|
|
347
|
+
* Cleanup and close connection
|
|
348
|
+
*
|
|
349
|
+
* Terminates the pipeline on Roboflow, closes the peer connection,
|
|
350
|
+
* and stops the local media stream.
|
|
351
|
+
*
|
|
352
|
+
* @returns Promise that resolves when cleanup is complete
|
|
353
|
+
*
|
|
354
|
+
* @example
|
|
355
|
+
* ```typescript
|
|
356
|
+
* const conn = await useStream({ ... });
|
|
357
|
+
* // ... use connection ...
|
|
358
|
+
* await conn.cleanup(); // Clean up when done
|
|
359
|
+
* ```
|
|
360
|
+
*/
|
|
361
|
+
async cleanup() {
|
|
362
|
+
this.pipelineId && this.apiKey && await y.init({ apiKey: this.apiKey }).terminatePipeline({ pipelineId: this.pipelineId }), this.pc && this.pc.connectionState !== "closed" && this.pc.close(), _(this._localStream);
|
|
363
|
+
}
|
|
364
|
+
/**
|
|
365
|
+
* Reconfigure pipeline outputs at runtime
|
|
366
|
+
*
|
|
367
|
+
* Dynamically change stream and data outputs without restarting the connection.
|
|
368
|
+
* Set a field to `null` to leave it unchanged, or to `null` value to enable all outputs,
|
|
369
|
+
* or to `[]` to disable/auto-detect.
|
|
370
|
+
*
|
|
371
|
+
* @param config - Output configuration
|
|
372
|
+
* @param config.streamOutput - Stream output names (null = unchanged, [] = auto-detect, ["name"] = specific output)
|
|
373
|
+
* @param config.dataOutput - Data output names (null = unchanged, [] = disable, ["name"] = specific outputs, null value = all outputs)
|
|
374
|
+
*
|
|
375
|
+
* @example
|
|
376
|
+
* ```typescript
|
|
377
|
+
* // Change to different stream output
|
|
378
|
+
* connection.reconfigureOutputs({
|
|
379
|
+
* streamOutput: ["annotated_image"],
|
|
380
|
+
* dataOutput: null // unchanged
|
|
381
|
+
* });
|
|
382
|
+
*
|
|
383
|
+
* // Enable all data outputs
|
|
384
|
+
* connection.reconfigureOutputs({
|
|
385
|
+
* streamOutput: null, // unchanged
|
|
386
|
+
* dataOutput: null // null value = all outputs
|
|
387
|
+
* });
|
|
388
|
+
*
|
|
389
|
+
* // Disable all data outputs
|
|
390
|
+
* connection.reconfigureOutputs({
|
|
391
|
+
* streamOutput: null, // unchanged
|
|
392
|
+
* dataOutput: [] // empty array = disable
|
|
393
|
+
* });
|
|
394
|
+
* ```
|
|
395
|
+
*/
|
|
396
|
+
reconfigureOutputs(e) {
|
|
397
|
+
const t = {};
|
|
398
|
+
e.streamOutput !== void 0 && (t.stream_output = e.streamOutput), e.dataOutput !== void 0 && (t.data_output = e.dataOutput), this.sendData(t);
|
|
399
|
+
}
|
|
400
|
+
/**
|
|
401
|
+
* Send data through the data channel
|
|
402
|
+
* @private
|
|
403
|
+
*/
|
|
404
|
+
sendData(e) {
|
|
405
|
+
if (this.dataChannel.readyState !== "open") {
|
|
406
|
+
console.warn("[RFWebRTC] Data channel is not open. Current state:", this.dataChannel.readyState);
|
|
407
|
+
return;
|
|
408
|
+
}
|
|
409
|
+
try {
|
|
410
|
+
const t = typeof e == "string" ? e : JSON.stringify(e);
|
|
411
|
+
this.dataChannel.send(t);
|
|
412
|
+
} catch (t) {
|
|
413
|
+
console.error("[RFWebRTC] Failed to send data:", t);
|
|
414
|
+
}
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
async function I({
|
|
418
|
+
source: r,
|
|
419
|
+
connector: e,
|
|
420
|
+
wrtcParams: t,
|
|
421
|
+
onData: a,
|
|
422
|
+
options: n = {}
|
|
423
|
+
}) {
|
|
424
|
+
var f;
|
|
425
|
+
if (!e || typeof e.connectWrtc != "function")
|
|
426
|
+
throw new Error("connector must have a connectWrtc method");
|
|
427
|
+
const i = r, { pc: o, offer: s, remoteStreamPromise: d, dataChannel: u } = await O(i), p = await e.connectWrtc(
|
|
428
|
+
{ sdp: s.sdp, type: s.type },
|
|
429
|
+
t
|
|
430
|
+
), c = { sdp: p.sdp, type: p.type };
|
|
431
|
+
if (!(c != null && c.sdp) || !(c != null && c.type))
|
|
432
|
+
throw console.error("[RFWebRTC] Invalid answer from server:", p), new Error("connector.connectWrtc must return answer with sdp and type");
|
|
433
|
+
const h = ((f = p == null ? void 0 : p.context) == null ? void 0 : f.pipeline_id) || null;
|
|
434
|
+
await o.setRemoteDescription(c), await new Promise((b, S) => {
|
|
435
|
+
const w = () => {
|
|
436
|
+
o.connectionState === "connected" ? (o.removeEventListener("connectionstatechange", w), b()) : o.connectionState === "failed" && (o.removeEventListener("connectionstatechange", w), S(new Error("WebRTC connection failed")));
|
|
437
|
+
};
|
|
438
|
+
o.addEventListener("connectionstatechange", w), w(), setTimeout(() => {
|
|
439
|
+
o.removeEventListener("connectionstatechange", w), S(new Error("WebRTC connection timeout after 30s"));
|
|
440
|
+
}, 3e4);
|
|
441
|
+
}), n.disableInputStreamDownscaling !== !1 && await P(o);
|
|
442
|
+
const m = e._apiKey || null;
|
|
443
|
+
return new k(o, i, d, h, m, u, a);
|
|
444
|
+
}
|
|
445
|
+
const D = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
446
|
+
__proto__: null,
|
|
447
|
+
RFWebRTCConnection: k,
|
|
448
|
+
useStream: I
|
|
449
|
+
}, Symbol.toStringTag, { value: "Module" }));
|
|
450
|
+
export {
|
|
451
|
+
y as InferenceHTTPClient,
|
|
452
|
+
N as connectors,
|
|
453
|
+
x as streams,
|
|
454
|
+
D as webrtc
|
|
455
|
+
};
|
package/dist/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
(function(c,i){typeof exports=="object"&&typeof module<"u"?i(exports):typeof define=="function"&&define.amd?define(["exports"],i):(c=typeof globalThis<"u"?globalThis:c||self,i(c.RoboflowClient={}))})(this,function(c){"use strict";var D=Object.defineProperty;var L=(c,i,f)=>i in c?D(c,i,{enumerable:!0,configurable:!0,writable:!0,value:f}):c[i]=f;var p=(c,i,f)=>L(c,typeof i!="symbol"?i+"":i,f);class i{constructor(t,e="https://serverless.roboflow.com"){p(this,"apiKey");p(this,"serverUrl");this.apiKey=t,this.serverUrl=e}static init({apiKey:t,serverUrl:e}){if(!t)throw new Error("apiKey is required");return new i(t,e)}async initializeWebrtcWorker({offer:t,workflowSpec:e,workspaceName:a,workflowId:n,config:s={}}){if(!t||!t.sdp||!t.type)throw new Error("offer with sdp and type is required");const o=!!e,l=!!(a&&n);if(!o&&!l)throw new Error("Either workflowSpec OR (workspaceName + workflowId) is required");if(o&&l)throw new Error("Provide either workflowSpec OR (workspaceName + workflowId), not both");const{imageInputName:u="image",streamOutputNames:m=[],dataOutputNames:h=["string"],threadPoolWorkers:d=4}=s,w={type:"WorkflowConfiguration",image_input_name:u,workflows_thread_pool_workers:d,cancel_thread_pool_tasks_on_exit:!0,video_metadata_input_name:"video_metadata"};o?w.workflow_specification=e:(w.workspace_name=a,w.workflow_id=n);const k={workflow_configuration:w,api_key:this.apiKey,webrtc_realtime_processing:!0,webrtc_offer:{sdp:t.sdp,type:t.type},webrtc_turn_config:null,stream_output:m,data_output:h},y=await fetch(`${this.serverUrl}/initialise_webrtc_worker`,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify(k)});if(!y.ok){const S=await y.text().catch(()=>"");throw new Error(`initialise_webrtc_worker failed (${y.status}): ${S}`)}return await y.json()}async terminatePipeline({pipelineId:t}){if(!t)throw new Error("pipelineId is required");await fetch(`${this.serverUrl}/inference_pipelines/${t}/terminate?api_key=${this.apiKey}`,{method:"POST",headers:{"Content-Type":"application/json"}})}}const f={withApiKey(r,t={}){const{serverUrl:e}=t;return typeof window<"u"&&console.warn("[Security Warning] Using API key directly in browser will expose it. Use connectors.withProxyUrl() for production. See: https://docs.roboflow.com/api-reference/authentication#securing-your-api-key"),{connectWrtc:async(a,n)=>await i.init({apiKey:r,serverUrl:e}).initializeWebrtcWorker({offer:a,workflowSpec:n.workflowSpec,workspaceName:n.workspaceName,workflowId:n.workflowId,config:{imageInputName:n.imageInputName,streamOutputNames:n.streamOutputNames,dataOutputNames:n.dataOutputNames,threadPoolWorkers:n.threadPoolWorkers}}),_apiKey:r,_serverUrl:e}},withProxyUrl(r,t={}){return{connectWrtc:async(e,a)=>{const n=await fetch(r,{method:"POST",headers:{"Content-Type":"application/json"},body:JSON.stringify({offer:e,wrtcParams:a})});if(!n.ok){const s=await n.text().catch(()=>"");throw new Error(`Proxy request failed (${n.status}): ${s}`)}return await n.json()}}}};async function C(r={video:!0}){try{console.log("[RFStreams] requesting with",r);const t=await navigator.mediaDevices.getUserMedia(r);return console.log("[RFStreams] got stream",t.getVideoTracks().map(e=>({id:e.id,label:e.label}))),t}catch(t){console.warn("[RFStreams] failed, falling back",t);const e=await navigator.mediaDevices.getUserMedia({video:!0,audio:!1});return console.log("[RFStreams] fallback stream",e.getVideoTracks().map(a=>({id:a.id,label:a.label}))),e}}function _(r){r&&(r.getTracks().forEach(t=>t.stop()),console.log("[RFStreams] Stream stopped"))}const T=Object.freeze(Object.defineProperty({__proto__:null,stopStream:_,useCamera:C},Symbol.toStringTag,{value:"Module"}));async function R(r,t=6e3){if(r.iceGatheringState==="complete")return;let e=!1;const a=n=>{n.candidate&&n.candidate.type==="srflx"&&(e=!0)};r.addEventListener("icecandidate",a);try{await Promise.race([new Promise(n=>{const s=()=>{r.iceGatheringState==="complete"&&(r.removeEventListener("icegatheringstatechange",s),n())};r.addEventListener("icegatheringstatechange",s)}),new Promise((n,s)=>{setTimeout(()=>{e?n():(console.error("[ICE] timeout with NO srflx candidate! Connection may fail."),s(new Error("ICE gathering timeout without srflx candidate")))},t)})])}finally{r.removeEventListener("icecandidate",a)}}function E(r){return new Promise(t=>{r.addEventListener("track",e=>{e.streams&&e.streams[0]&&t(e.streams[0])})})}async function O(r){const t="stun:stun.l.google.com:19302",e=new RTCPeerConnection({iceServers:[{urls:[t]}]});try{e.addTransceiver("video",{direction:"recvonly"})}catch(o){console.warn("[RFWebRTC] Could not add transceiver:",o)}r.getVideoTracks().forEach(o=>{try{o.contentHint="detail"}catch{}e.addTrack(o,r)});const a=E(e),n=e.createDataChannel("roboflow-control",{ordered:!0}),s=await e.createOffer();return await e.setLocalDescription(s),await R(e),{pc:e,offer:e.localDescription,remoteStreamPromise:a,dataChannel:n}}async function W(r){const t=r.getSenders().find(a=>a.track&&a.track.kind==="video");if(!t)return;const e=t.getParameters();e.encodings=e.encodings||[{}],e.encodings[0].scaleResolutionDownBy=1;try{await t.setParameters(e)}catch(a){console.warn("[RFWebRTC] Failed to set encoding parameters:",a)}}class b{constructor(t,e,a,n,s,o,l){p(this,"pc");p(this,"_localStream");p(this,"remoteStreamPromise");p(this,"pipelineId");p(this,"apiKey");p(this,"dataChannel");this.pc=t,this._localStream=e,this.remoteStreamPromise=a,this.pipelineId=n,this.apiKey=s,this.dataChannel=o,l&&(this.dataChannel.addEventListener("open",()=>{}),this.dataChannel.addEventListener("message",u=>{try{const m=JSON.parse(u.data);l(m)}catch(m){console.error("[RFWebRTC] Failed to parse data channel message:",m),l(u.data)}}),this.dataChannel.addEventListener("error",u=>{console.error("[RFWebRTC] Data channel error:",u)}),this.dataChannel.addEventListener("close",()=>{}))}async remoteStream(){return await this.remoteStreamPromise}localStream(){return this._localStream}async cleanup(){this.pipelineId&&this.apiKey&&await i.init({apiKey:this.apiKey}).terminatePipeline({pipelineId:this.pipelineId}),this.pc&&this.pc.connectionState!=="closed"&&this.pc.close(),_(this._localStream)}reconfigureOutputs(t){const e={};t.streamOutput!==void 0&&(e.stream_output=t.streamOutput),t.dataOutput!==void 0&&(e.data_output=t.dataOutput),this.sendData(e)}sendData(t){if(this.dataChannel.readyState!=="open"){console.warn("[RFWebRTC] Data channel is not open. Current state:",this.dataChannel.readyState);return}try{const e=typeof t=="string"?t:JSON.stringify(t);this.dataChannel.send(e)}catch(e){console.error("[RFWebRTC] Failed to send data:",e)}}}async function P({source:r,connector:t,wrtcParams:e,onData:a,options:n={}}){var S;if(!t||typeof t.connectWrtc!="function")throw new Error("connector must have a connectWrtc method");const s=r,{pc:o,offer:l,remoteStreamPromise:u,dataChannel:m}=await O(s),h=await t.connectWrtc({sdp:l.sdp,type:l.type},e),d={sdp:h.sdp,type:h.type};if(!(d!=null&&d.sdp)||!(d!=null&&d.type))throw console.error("[RFWebRTC] Invalid answer from server:",h),new Error("connector.connectWrtc must return answer with sdp and type");const w=((S=h==null?void 0:h.context)==null?void 0:S.pipeline_id)||null;await o.setRemoteDescription(d),await new Promise((N,v)=>{const g=()=>{o.connectionState==="connected"?(o.removeEventListener("connectionstatechange",g),N()):o.connectionState==="failed"&&(o.removeEventListener("connectionstatechange",g),v(new Error("WebRTC connection failed")))};o.addEventListener("connectionstatechange",g),g(),setTimeout(()=>{o.removeEventListener("connectionstatechange",g),v(new Error("WebRTC connection timeout after 30s"))},3e4)}),n.disableInputStreamDownscaling!==!1&&await W(o);const y=t._apiKey||null;return new b(o,s,u,w,y,m,a)}const I=Object.freeze(Object.defineProperty({__proto__:null,RFWebRTCConnection:b,useStream:P},Symbol.toStringTag,{value:"Module"}));c.InferenceHTTPClient=i,c.connectors=f,c.streams=T,c.webrtc=I,Object.defineProperty(c,Symbol.toStringTag,{value:"Module"})});
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
export interface WebRTCWorkerConfig {
|
|
2
|
+
imageInputName?: string;
|
|
3
|
+
streamOutputNames?: string[];
|
|
4
|
+
dataOutputNames?: string[];
|
|
5
|
+
threadPoolWorkers?: number;
|
|
6
|
+
}
|
|
7
|
+
export interface WebRTCOffer {
|
|
8
|
+
sdp: string;
|
|
9
|
+
type: string;
|
|
10
|
+
}
|
|
11
|
+
export type WorkflowSpec = Record<string, any>;
|
|
12
|
+
export interface WebRTCWorkerResponse {
|
|
13
|
+
status?: string;
|
|
14
|
+
sdp: string;
|
|
15
|
+
type: string;
|
|
16
|
+
context?: {
|
|
17
|
+
request_id: string | null;
|
|
18
|
+
pipeline_id: string | null;
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
export interface WebRTCParams {
|
|
22
|
+
workflowSpec?: WorkflowSpec;
|
|
23
|
+
workspaceName?: string;
|
|
24
|
+
workflowId?: string;
|
|
25
|
+
imageInputName?: string;
|
|
26
|
+
streamOutputNames?: string[];
|
|
27
|
+
dataOutputNames?: string[];
|
|
28
|
+
threadPoolWorkers?: number;
|
|
29
|
+
}
|
|
30
|
+
export interface Connector {
|
|
31
|
+
connectWrtc(offer: WebRTCOffer, wrtcParams: WebRTCParams): Promise<WebRTCWorkerResponse>;
|
|
32
|
+
_apiKey?: string;
|
|
33
|
+
_serverUrl?: string;
|
|
34
|
+
}
|
|
35
|
+
export declare class InferenceHTTPClient {
|
|
36
|
+
private apiKey;
|
|
37
|
+
private serverUrl;
|
|
38
|
+
/**
|
|
39
|
+
* @private
|
|
40
|
+
* Use InferenceHTTPClient.init() instead
|
|
41
|
+
*/
|
|
42
|
+
private constructor();
|
|
43
|
+
static init({ apiKey, serverUrl }: {
|
|
44
|
+
apiKey: string;
|
|
45
|
+
serverUrl?: string;
|
|
46
|
+
}): InferenceHTTPClient;
|
|
47
|
+
/**
|
|
48
|
+
* Initialize a WebRTC worker pipeline
|
|
49
|
+
*
|
|
50
|
+
* @param params - Pipeline parameters
|
|
51
|
+
* @param params.offer - WebRTC offer { sdp, type }
|
|
52
|
+
* @param params.workflowSpec - Workflow specification
|
|
53
|
+
* @param params.config - Additional configuration
|
|
54
|
+
* @param params.config.imageInputName - Input image name (default: "image")
|
|
55
|
+
* @param params.config.streamOutputNames - Output stream names for video (default: [])
|
|
56
|
+
* @param params.config.dataOutputNames - Output data names (default: ["string"])
|
|
57
|
+
* @param params.config.threadPoolWorkers - Thread pool workers (default: 4)
|
|
58
|
+
* @returns Promise resolving to answer with SDP and pipeline ID
|
|
59
|
+
*
|
|
60
|
+
* @example
|
|
61
|
+
* ```typescript
|
|
62
|
+
* const answer = await client.initializeWebrtcWorker({
|
|
63
|
+
* offer: { sdp, type },
|
|
64
|
+
* workflowSpec: { ... },
|
|
65
|
+
* config: {
|
|
66
|
+
* imageInputName: "image",
|
|
67
|
+
* streamOutputNames: ["output_image"]
|
|
68
|
+
* }
|
|
69
|
+
* });
|
|
70
|
+
* ```
|
|
71
|
+
*/
|
|
72
|
+
initializeWebrtcWorker({ offer, workflowSpec, workspaceName, workflowId, config }: {
|
|
73
|
+
offer: WebRTCOffer;
|
|
74
|
+
workflowSpec?: WorkflowSpec;
|
|
75
|
+
workspaceName?: string;
|
|
76
|
+
workflowId?: string;
|
|
77
|
+
config?: WebRTCWorkerConfig;
|
|
78
|
+
}): Promise<WebRTCWorkerResponse>;
|
|
79
|
+
terminatePipeline({ pipelineId }: {
|
|
80
|
+
pipelineId: string;
|
|
81
|
+
}): Promise<void>;
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Connectors for establishing WebRTC connections to Roboflow
|
|
85
|
+
*/
|
|
86
|
+
export declare const connectors: {
|
|
87
|
+
/**
|
|
88
|
+
* Create a connector that uses API key directly
|
|
89
|
+
*
|
|
90
|
+
* **WARNING**: If you use this in the frontend, it will expose your API key.
|
|
91
|
+
* Use only for demos/testing.
|
|
92
|
+
* For production, use withProxyUrl() with a backend proxy.
|
|
93
|
+
*
|
|
94
|
+
* @param apiKey - Roboflow API key
|
|
95
|
+
* @param options - Additional options
|
|
96
|
+
* @param options.serverUrl - Custom Roboflow server URL
|
|
97
|
+
* @returns Connector with connectWrtc method
|
|
98
|
+
*
|
|
99
|
+
* @example
|
|
100
|
+
* ```typescript
|
|
101
|
+
* const connector = connectors.withApiKey("your-api-key");
|
|
102
|
+
* const answer = await connector.connectWrtc(offer, wrtcParams);
|
|
103
|
+
* ```
|
|
104
|
+
*/
|
|
105
|
+
withApiKey(apiKey: string, options?: {
|
|
106
|
+
serverUrl?: string;
|
|
107
|
+
}): Connector;
|
|
108
|
+
/**
|
|
109
|
+
* Create a connector that uses a backend proxy (recommended for production)
|
|
110
|
+
*
|
|
111
|
+
* Your backend receives the offer and wrtcParams, adds the secret API key,
|
|
112
|
+
* and forwards to Roboflow. This keeps your API key secure.
|
|
113
|
+
*
|
|
114
|
+
* @param proxyUrl - Backend proxy endpoint URL
|
|
115
|
+
* @param options - Additional options (reserved for future use)
|
|
116
|
+
* @returns Connector with connectWrtc method
|
|
117
|
+
*
|
|
118
|
+
* @example
|
|
119
|
+
* ```typescript
|
|
120
|
+
* const connector = connectors.withProxyUrl('/api/init-webrtc');
|
|
121
|
+
* const answer = await connector.connectWrtc(offer, wrtcParams);
|
|
122
|
+
* ```
|
|
123
|
+
*
|
|
124
|
+
* @example
|
|
125
|
+
* Backend implementation (Express):
|
|
126
|
+
* ```typescript
|
|
127
|
+
* app.post('/api/init-webrtc', async (req, res) => {
|
|
128
|
+
* const { offer, wrtcParams } = req.body;
|
|
129
|
+
* const client = InferenceHTTPClient.init({
|
|
130
|
+
* apiKey: process.env.ROBOFLOW_API_KEY
|
|
131
|
+
* });
|
|
132
|
+
* const answer = await client.initializeWebrtcWorker({
|
|
133
|
+
* offer,
|
|
134
|
+
* workflowSpec: wrtcParams.workflowSpec,
|
|
135
|
+
* workspaceName: wrtcParams.workspaceName,
|
|
136
|
+
* workflowId: wrtcParams.workflowId,
|
|
137
|
+
* config: {
|
|
138
|
+
* imageInputName: wrtcParams.imageInputName,
|
|
139
|
+
* streamOutputNames: wrtcParams.streamOutputNames,
|
|
140
|
+
* dataOutputNames: wrtcParams.dataOutputNames,
|
|
141
|
+
* threadPoolWorkers: wrtcParams.threadPoolWorkers
|
|
142
|
+
* }
|
|
143
|
+
* });
|
|
144
|
+
* res.json(answer);
|
|
145
|
+
* });
|
|
146
|
+
* ```
|
|
147
|
+
*/
|
|
148
|
+
withProxyUrl(proxyUrl: string, options?: Record<string, any>): Connector;
|
|
149
|
+
};
|
|
150
|
+
//# sourceMappingURL=inference-api.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"inference-api.d.ts","sourceRoot":"","sources":["../src/inference-api.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,kBAAkB;IACjC,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;IAC7B,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;IAC3B,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B;AAED,MAAM,WAAW,WAAW;IAC1B,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;CACd;AAED,MAAM,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAE/C,MAAM,WAAW,oBAAoB;IACnC,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE;QACR,UAAU,EAAE,MAAM,GAAG,IAAI,CAAC;QAC1B,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;KAC5B,CAAC;CACH;AAED,MAAM,WAAW,YAAY;IAC3B,YAAY,CAAC,EAAE,YAAY,CAAC;IAC5B,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,iBAAiB,CAAC,EAAE,MAAM,EAAE,CAAC;IAC7B,eAAe,CAAC,EAAE,MAAM,EAAE,CAAC;IAC3B,iBAAiB,CAAC,EAAE,MAAM,CAAC;CAC5B;AAED,MAAM,WAAW,SAAS;IACxB,WAAW,CAAC,KAAK,EAAE,WAAW,EAAE,UAAU,EAAE,YAAY,GAAG,OAAO,CAAC,oBAAoB,CAAC,CAAC;IACzF,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,qBAAa,mBAAmB;IAC9B,OAAO,CAAC,MAAM,CAAS;IACvB,OAAO,CAAC,SAAS,CAAS;IAE1B;;;OAGG;IACH,OAAO;IAKP,MAAM,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,SAAS,EAAE,EAAE;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAAG,mBAAmB;IAO/F;;;;;;;;;;;;;;;;;;;;;;;;OAwBG;IACG,sBAAsB,CAAC,EAC3B,KAAK,EACL,YAAY,EACZ,aAAa,EACb,UAAU,EACV,MAAW,EACZ,EAAE;QACD,KAAK,EAAE,WAAW,CAAC;QACnB,YAAY,CAAC,EAAE,YAAY,CAAC;QAC5B,aAAa,CAAC,EAAE,MAAM,CAAC;QACvB,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,MAAM,CAAC,EAAE,kBAAkB,CAAC;KAC7B,GAAG,OAAO,CAAC,oBAAoB,CAAC;IAoE3B,iBAAiB,CAAC,EAAE,UAAU,EAAE,EAAE;QAAE,UAAU,EAAE,MAAM,CAAA;KAAE,GAAG,OAAO,CAAC,IAAI,CAAC;CAa/E;AAED;;GAEG;AACH,eAAO,MAAM,UAAU;IACrB;;;;;;;;;;;;;;;;;OAiBG;uBACgB,MAAM,YAAW;QAAE,SAAS,CAAC,EAAE,MAAM,CAAA;KAAE,GAAQ,SAAS;IAsC3E;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAuCG;2BACoB,MAAM,YAAW,OAAO,MAAM,EAAE,GAAG,CAAC,GAAQ,SAAS;CAqB7E,CAAC"}
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Get a camera stream with the given constraints.
|
|
3
|
+
*
|
|
4
|
+
* @param constraints - MediaStreamConstraints for getUserMedia
|
|
5
|
+
* @returns Promise that resolves to MediaStream
|
|
6
|
+
*
|
|
7
|
+
* @example
|
|
8
|
+
* ```typescript
|
|
9
|
+
* const stream = await useCamera({
|
|
10
|
+
* video: {
|
|
11
|
+
* facingMode: { ideal: "user" },
|
|
12
|
+
* width: { ideal: 1280 },
|
|
13
|
+
* height: { ideal: 720 },
|
|
14
|
+
* frameRate: { ideal: 30 }
|
|
15
|
+
* },
|
|
16
|
+
* audio: false
|
|
17
|
+
* });
|
|
18
|
+
* ```
|
|
19
|
+
*/
|
|
20
|
+
export declare function useCamera(constraints?: MediaStreamConstraints): Promise<MediaStream>;
|
|
21
|
+
export declare function stopStream(stream: MediaStream | null | undefined): void;
|
|
22
|
+
//# sourceMappingURL=streams.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"streams.d.ts","sourceRoot":"","sources":["../src/streams.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;GAkBG;AACH,wBAAsB,SAAS,CAAC,WAAW,GAAE,sBAAwC,GAAG,OAAO,CAAC,WAAW,CAAC,CAY3G;AAED,wBAAgB,UAAU,CAAC,MAAM,EAAE,WAAW,GAAG,IAAI,GAAG,SAAS,GAAG,IAAI,CAKvE"}
|
package/dist/webrtc.d.ts
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
import { Connector, WebRTCParams } from "./inference-api";
|
|
2
|
+
export interface UseStreamOptions {
|
|
3
|
+
disableInputStreamDownscaling?: boolean;
|
|
4
|
+
}
|
|
5
|
+
export interface UseStreamParams {
|
|
6
|
+
source: MediaStream;
|
|
7
|
+
connector: Connector;
|
|
8
|
+
wrtcParams: WebRTCParams;
|
|
9
|
+
onData?: (data: any) => void;
|
|
10
|
+
options?: UseStreamOptions;
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* WebRTC Connection object
|
|
14
|
+
*
|
|
15
|
+
* Represents an active WebRTC connection to Roboflow for streaming inference.
|
|
16
|
+
*/
|
|
17
|
+
export declare class RFWebRTCConnection {
|
|
18
|
+
private pc;
|
|
19
|
+
private _localStream;
|
|
20
|
+
private remoteStreamPromise;
|
|
21
|
+
private pipelineId;
|
|
22
|
+
private apiKey;
|
|
23
|
+
private dataChannel;
|
|
24
|
+
/** @private */
|
|
25
|
+
constructor(pc: RTCPeerConnection, localStream: MediaStream, remoteStreamPromise: Promise<MediaStream>, pipelineId: string | null, apiKey: string | null, dataChannel: RTCDataChannel, onData?: (data: any) => void);
|
|
26
|
+
/**
|
|
27
|
+
* Get the remote stream (processed video from Roboflow)
|
|
28
|
+
*
|
|
29
|
+
* @returns Promise resolving to the remote MediaStream
|
|
30
|
+
*
|
|
31
|
+
* @example
|
|
32
|
+
* ```typescript
|
|
33
|
+
* const conn = await useStream({ ... });
|
|
34
|
+
* const remoteStream = await conn.remoteStream();
|
|
35
|
+
* videoElement.srcObject = remoteStream;
|
|
36
|
+
* ```
|
|
37
|
+
*/
|
|
38
|
+
remoteStream(): Promise<MediaStream>;
|
|
39
|
+
/**
|
|
40
|
+
* Get the local stream (original camera)
|
|
41
|
+
*
|
|
42
|
+
* @returns The local MediaStream
|
|
43
|
+
*
|
|
44
|
+
* @example
|
|
45
|
+
* ```typescript
|
|
46
|
+
* const conn = await useStream({ ... });
|
|
47
|
+
* const localStream = conn.localStream();
|
|
48
|
+
* videoElement.srcObject = localStream;
|
|
49
|
+
* ```
|
|
50
|
+
*/
|
|
51
|
+
localStream(): MediaStream;
|
|
52
|
+
/**
|
|
53
|
+
* Cleanup and close connection
|
|
54
|
+
*
|
|
55
|
+
* Terminates the pipeline on Roboflow, closes the peer connection,
|
|
56
|
+
* and stops the local media stream.
|
|
57
|
+
*
|
|
58
|
+
* @returns Promise that resolves when cleanup is complete
|
|
59
|
+
*
|
|
60
|
+
* @example
|
|
61
|
+
* ```typescript
|
|
62
|
+
* const conn = await useStream({ ... });
|
|
63
|
+
* // ... use connection ...
|
|
64
|
+
* await conn.cleanup(); // Clean up when done
|
|
65
|
+
* ```
|
|
66
|
+
*/
|
|
67
|
+
cleanup(): Promise<void>;
|
|
68
|
+
/**
|
|
69
|
+
* Reconfigure pipeline outputs at runtime
|
|
70
|
+
*
|
|
71
|
+
* Dynamically change stream and data outputs without restarting the connection.
|
|
72
|
+
* Set a field to `null` to leave it unchanged, or to `null` value to enable all outputs,
|
|
73
|
+
* or to `[]` to disable/auto-detect.
|
|
74
|
+
*
|
|
75
|
+
* @param config - Output configuration
|
|
76
|
+
* @param config.streamOutput - Stream output names (null = unchanged, [] = auto-detect, ["name"] = specific output)
|
|
77
|
+
* @param config.dataOutput - Data output names (null = unchanged, [] = disable, ["name"] = specific outputs, null value = all outputs)
|
|
78
|
+
*
|
|
79
|
+
* @example
|
|
80
|
+
* ```typescript
|
|
81
|
+
* // Change to different stream output
|
|
82
|
+
* connection.reconfigureOutputs({
|
|
83
|
+
* streamOutput: ["annotated_image"],
|
|
84
|
+
* dataOutput: null // unchanged
|
|
85
|
+
* });
|
|
86
|
+
*
|
|
87
|
+
* // Enable all data outputs
|
|
88
|
+
* connection.reconfigureOutputs({
|
|
89
|
+
* streamOutput: null, // unchanged
|
|
90
|
+
* dataOutput: null // null value = all outputs
|
|
91
|
+
* });
|
|
92
|
+
*
|
|
93
|
+
* // Disable all data outputs
|
|
94
|
+
* connection.reconfigureOutputs({
|
|
95
|
+
* streamOutput: null, // unchanged
|
|
96
|
+
* dataOutput: [] // empty array = disable
|
|
97
|
+
* });
|
|
98
|
+
* ```
|
|
99
|
+
*/
|
|
100
|
+
reconfigureOutputs(config: {
|
|
101
|
+
streamOutput?: string[] | null;
|
|
102
|
+
dataOutput?: string[] | null;
|
|
103
|
+
}): void;
|
|
104
|
+
/**
|
|
105
|
+
* Send data through the data channel
|
|
106
|
+
* @private
|
|
107
|
+
*/
|
|
108
|
+
private sendData;
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Main function to establish WebRTC streaming connection
|
|
112
|
+
*
|
|
113
|
+
* Creates a WebRTC connection to Roboflow for real-time inference on video streams.
|
|
114
|
+
*
|
|
115
|
+
* @param params - Connection parameters
|
|
116
|
+
* @returns Promise resolving to RFWebRTCConnection
|
|
117
|
+
*
|
|
118
|
+
* @example
|
|
119
|
+
* ```typescript
|
|
120
|
+
* import { useStream } from 'inferencejs/webrtc';
|
|
121
|
+
* import { connectors } from 'inferencejs/api';
|
|
122
|
+
* import { useCamera } from 'inferencejs/streams';
|
|
123
|
+
*
|
|
124
|
+
* const connector = connectors.withApiKey("your-api-key");
|
|
125
|
+
* const stream = await useCamera({ video: { facingMode: { ideal: "environment" } } });
|
|
126
|
+
* const conn = await useStream({
|
|
127
|
+
* source: stream,
|
|
128
|
+
* connector,
|
|
129
|
+
* wrtcParams: {
|
|
130
|
+
* workflowSpec: {
|
|
131
|
+
* // Your workflow specification
|
|
132
|
+
* },
|
|
133
|
+
* imageInputName: "image",
|
|
134
|
+
* streamOutputNames: ["output"],
|
|
135
|
+
* dataOutputNames: ["predictions"]
|
|
136
|
+
* },
|
|
137
|
+
* onData: (data) => {
|
|
138
|
+
* console.log("Inference results:", data);
|
|
139
|
+
* }
|
|
140
|
+
* });
|
|
141
|
+
*
|
|
142
|
+
* const remoteStream = await conn.remoteStream();
|
|
143
|
+
* videoElement.srcObject = remoteStream;
|
|
144
|
+
* ```
|
|
145
|
+
*/
|
|
146
|
+
export declare function useStream({ source, connector, wrtcParams, onData, options }: UseStreamParams): Promise<RFWebRTCConnection>;
|
|
147
|
+
//# sourceMappingURL=webrtc.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"webrtc.d.ts","sourceRoot":"","sources":["../src/webrtc.ts"],"names":[],"mappings":"AACA,OAAO,EAAuB,SAAS,EAAE,YAAY,EAAE,MAAM,iBAAiB,CAAC;AAG/E,MAAM,WAAW,gBAAgB;IAC/B,6BAA6B,CAAC,EAAE,OAAO,CAAC;CACzC;AAED,MAAM,WAAW,eAAe;IAC9B,MAAM,EAAE,WAAW,CAAC;IACpB,SAAS,EAAE,SAAS,CAAC;IACrB,UAAU,EAAE,YAAY,CAAC;IACzB,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAC7B,OAAO,CAAC,EAAE,gBAAgB,CAAC;CAC5B;AA4HD;;;;GAIG;AACH,qBAAa,kBAAkB;IAC7B,OAAO,CAAC,EAAE,CAAoB;IAC9B,OAAO,CAAC,YAAY,CAAc;IAClC,OAAO,CAAC,mBAAmB,CAAuB;IAClD,OAAO,CAAC,UAAU,CAAgB;IAClC,OAAO,CAAC,MAAM,CAAgB;IAC9B,OAAO,CAAC,WAAW,CAAiB;IAEpC,eAAe;gBAEb,EAAE,EAAE,iBAAiB,EACrB,WAAW,EAAE,WAAW,EACxB,mBAAmB,EAAE,OAAO,CAAC,WAAW,CAAC,EACzC,UAAU,EAAE,MAAM,GAAG,IAAI,EACzB,MAAM,EAAE,MAAM,GAAG,IAAI,EACrB,WAAW,EAAE,cAAc,EAC3B,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,GAAG,KAAK,IAAI;IAmC9B;;;;;;;;;;;OAWG;IACG,YAAY,IAAI,OAAO,CAAC,WAAW,CAAC;IAI1C;;;;;;;;;;;OAWG;IACH,WAAW,IAAI,WAAW;IAI1B;;;;;;;;;;;;;;OAcG;IACG,OAAO,IAAI,OAAO,CAAC,IAAI,CAAC;IAgB9B;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OA+BG;IACH,kBAAkB,CAAC,MAAM,EAAE;QAAE,YAAY,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,CAAC;QAAC,UAAU,CAAC,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;KAAE,GAAG,IAAI;IAclG;;;OAGG;IACH,OAAO,CAAC,QAAQ;CAajB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAmCG;AACH,wBAAsB,SAAS,CAAC,EAC9B,MAAM,EACN,SAAS,EACT,UAAU,EACV,MAAM,EACN,OAAY,EACb,EAAE,eAAe,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAkE/C"}
|
package/package.json
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@roboflow/inference-sdk",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "Lightweight client for Roboflow's hosted inference API with WebRTC streaming support",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"roboflow",
|
|
7
|
+
"computer-vision",
|
|
8
|
+
"webrtc",
|
|
9
|
+
"inference",
|
|
10
|
+
"object-detection"
|
|
11
|
+
],
|
|
12
|
+
"files": [
|
|
13
|
+
"dist"
|
|
14
|
+
],
|
|
15
|
+
"main": "./dist/index.js",
|
|
16
|
+
"module": "./dist/index.es.js",
|
|
17
|
+
"types": "./dist/index.d.ts",
|
|
18
|
+
"sideEffects": false,
|
|
19
|
+
"exports": {
|
|
20
|
+
".": {
|
|
21
|
+
"import": "./dist/index.es.js",
|
|
22
|
+
"require": "./dist/index.js",
|
|
23
|
+
"types": "./dist/index.d.ts"
|
|
24
|
+
}
|
|
25
|
+
},
|
|
26
|
+
"scripts": {
|
|
27
|
+
"dev": "vite",
|
|
28
|
+
"build": "vite build",
|
|
29
|
+
"preview": "vite preview",
|
|
30
|
+
"clean": "rm -rf dist"
|
|
31
|
+
},
|
|
32
|
+
"author": "Roboflow",
|
|
33
|
+
"license": "ISC",
|
|
34
|
+
"repository": {
|
|
35
|
+
"type": "git",
|
|
36
|
+
"url": "https://github.com/roboflow/inference-sdk.git"
|
|
37
|
+
},
|
|
38
|
+
"devDependencies": {
|
|
39
|
+
"typescript": "^5.3.3",
|
|
40
|
+
"vite": "^5.3.3",
|
|
41
|
+
"vite-plugin-dts": "^3.7.2"
|
|
42
|
+
},
|
|
43
|
+
"dependencies": {}
|
|
44
|
+
}
|