rtmlib-ts 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.gitattributes +1 -0
- package/README.md +202 -0
- package/dist/core/base.d.ts +20 -0
- package/dist/core/base.d.ts.map +1 -0
- package/dist/core/base.js +40 -0
- package/dist/core/file.d.ts +11 -0
- package/dist/core/file.d.ts.map +1 -0
- package/dist/core/file.js +111 -0
- package/dist/core/modelCache.d.ts +35 -0
- package/dist/core/modelCache.d.ts.map +1 -0
- package/dist/core/modelCache.js +161 -0
- package/dist/core/posePostprocessing.d.ts +12 -0
- package/dist/core/posePostprocessing.d.ts.map +1 -0
- package/dist/core/posePostprocessing.js +76 -0
- package/dist/core/postprocessing.d.ts +10 -0
- package/dist/core/postprocessing.d.ts.map +1 -0
- package/dist/core/postprocessing.js +70 -0
- package/dist/core/preprocessing.d.ts +14 -0
- package/dist/core/preprocessing.d.ts.map +1 -0
- package/dist/core/preprocessing.js +79 -0
- package/dist/index.d.ts +27 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +31 -0
- package/dist/models/rtmpose.d.ts +25 -0
- package/dist/models/rtmpose.d.ts.map +1 -0
- package/dist/models/rtmpose.js +185 -0
- package/dist/models/rtmpose3d.d.ts +28 -0
- package/dist/models/rtmpose3d.d.ts.map +1 -0
- package/dist/models/rtmpose3d.js +184 -0
- package/dist/models/yolo12.d.ts +23 -0
- package/dist/models/yolo12.d.ts.map +1 -0
- package/dist/models/yolo12.js +165 -0
- package/dist/models/yolox.d.ts +18 -0
- package/dist/models/yolox.d.ts.map +1 -0
- package/dist/models/yolox.js +167 -0
- package/dist/solution/animalDetector.d.ts +229 -0
- package/dist/solution/animalDetector.d.ts.map +1 -0
- package/dist/solution/animalDetector.js +663 -0
- package/dist/solution/body.d.ts +16 -0
- package/dist/solution/body.d.ts.map +1 -0
- package/dist/solution/body.js +52 -0
- package/dist/solution/bodyWithFeet.d.ts +16 -0
- package/dist/solution/bodyWithFeet.d.ts.map +1 -0
- package/dist/solution/bodyWithFeet.js +52 -0
- package/dist/solution/customDetector.d.ts +137 -0
- package/dist/solution/customDetector.d.ts.map +1 -0
- package/dist/solution/customDetector.js +342 -0
- package/dist/solution/hand.d.ts +14 -0
- package/dist/solution/hand.d.ts.map +1 -0
- package/dist/solution/hand.js +20 -0
- package/dist/solution/index.d.ts +10 -0
- package/dist/solution/index.d.ts.map +1 -0
- package/dist/solution/index.js +9 -0
- package/dist/solution/objectDetector.d.ts +172 -0
- package/dist/solution/objectDetector.d.ts.map +1 -0
- package/dist/solution/objectDetector.js +606 -0
- package/dist/solution/pose3dDetector.d.ts +145 -0
- package/dist/solution/pose3dDetector.d.ts.map +1 -0
- package/dist/solution/pose3dDetector.js +611 -0
- package/dist/solution/poseDetector.d.ts +198 -0
- package/dist/solution/poseDetector.d.ts.map +1 -0
- package/dist/solution/poseDetector.js +622 -0
- package/dist/solution/poseTracker.d.ts +22 -0
- package/dist/solution/poseTracker.d.ts.map +1 -0
- package/dist/solution/poseTracker.js +106 -0
- package/dist/solution/wholebody.d.ts +19 -0
- package/dist/solution/wholebody.d.ts.map +1 -0
- package/dist/solution/wholebody.js +82 -0
- package/dist/solution/wholebody3d.d.ts +22 -0
- package/dist/solution/wholebody3d.d.ts.map +1 -0
- package/dist/solution/wholebody3d.js +75 -0
- package/dist/types/index.d.ts +52 -0
- package/dist/types/index.d.ts.map +1 -0
- package/dist/types/index.js +5 -0
- package/dist/visualization/draw.d.ts +57 -0
- package/dist/visualization/draw.d.ts.map +1 -0
- package/dist/visualization/draw.js +400 -0
- package/dist/visualization/skeleton/coco133.d.ts +350 -0
- package/dist/visualization/skeleton/coco133.d.ts.map +1 -0
- package/dist/visualization/skeleton/coco133.js +120 -0
- package/dist/visualization/skeleton/coco17.d.ts +180 -0
- package/dist/visualization/skeleton/coco17.d.ts.map +1 -0
- package/dist/visualization/skeleton/coco17.js +48 -0
- package/dist/visualization/skeleton/halpe26.d.ts +278 -0
- package/dist/visualization/skeleton/halpe26.d.ts.map +1 -0
- package/dist/visualization/skeleton/halpe26.js +70 -0
- package/dist/visualization/skeleton/hand21.d.ts +196 -0
- package/dist/visualization/skeleton/hand21.d.ts.map +1 -0
- package/dist/visualization/skeleton/hand21.js +51 -0
- package/dist/visualization/skeleton/index.d.ts +10 -0
- package/dist/visualization/skeleton/index.d.ts.map +1 -0
- package/dist/visualization/skeleton/index.js +9 -0
- package/dist/visualization/skeleton/openpose134.d.ts +357 -0
- package/dist/visualization/skeleton/openpose134.d.ts.map +1 -0
- package/dist/visualization/skeleton/openpose134.js +116 -0
- package/dist/visualization/skeleton/openpose18.d.ts +177 -0
- package/dist/visualization/skeleton/openpose18.d.ts.map +1 -0
- package/dist/visualization/skeleton/openpose18.js +47 -0
- package/docs/ANIMAL_DETECTOR.md +450 -0
- package/docs/CUSTOM_DETECTOR.md +568 -0
- package/docs/OBJECT_DETECTOR.md +373 -0
- package/docs/POSE3D_DETECTOR.md +458 -0
- package/docs/POSE_DETECTOR.md +442 -0
- package/examples/README.md +119 -0
- package/examples/index.html +746 -0
- package/package.json +51 -0
- package/playground/README.md +114 -0
- package/playground/app/favicon.ico +0 -0
- package/playground/app/globals.css +17 -0
- package/playground/app/layout.tsx +19 -0
- package/playground/app/page.tsx +1338 -0
- package/playground/eslint.config.mjs +18 -0
- package/playground/next.config.ts +34 -0
- package/playground/package-lock.json +6723 -0
- package/playground/package.json +27 -0
- package/playground/postcss.config.mjs +7 -0
- package/playground/tsconfig.json +34 -0
- package/src/core/base.ts +66 -0
- package/src/core/file.ts +141 -0
- package/src/core/modelCache.ts +189 -0
- package/src/core/posePostprocessing.ts +91 -0
- package/src/core/postprocessing.ts +93 -0
- package/src/core/preprocessing.ts +127 -0
- package/src/index.ts +69 -0
- package/src/models/rtmpose.ts +265 -0
- package/src/models/rtmpose3d.ts +289 -0
- package/src/models/yolo12.ts +220 -0
- package/src/models/yolox.ts +214 -0
- package/src/solution/animalDetector.ts +955 -0
- package/src/solution/body.ts +89 -0
- package/src/solution/bodyWithFeet.ts +89 -0
- package/src/solution/customDetector.ts +474 -0
- package/src/solution/hand.ts +52 -0
- package/src/solution/index.ts +10 -0
- package/src/solution/objectDetector.ts +816 -0
- package/src/solution/pose3dDetector.ts +890 -0
- package/src/solution/poseDetector.ts +892 -0
- package/src/solution/poseTracker.ts +172 -0
- package/src/solution/wholebody.ts +130 -0
- package/src/solution/wholebody3d.ts +125 -0
- package/src/types/index.ts +62 -0
- package/src/visualization/draw.ts +543 -0
- package/src/visualization/skeleton/coco133.ts +131 -0
- package/src/visualization/skeleton/coco17.ts +49 -0
- package/src/visualization/skeleton/halpe26.ts +71 -0
- package/src/visualization/skeleton/hand21.ts +52 -0
- package/src/visualization/skeleton/index.ts +10 -0
- package/src/visualization/skeleton/openpose134.ts +125 -0
- package/src/visualization/skeleton/openpose18.ts +48 -0
- package/tsconfig.json +32 -0
package/.gitattributes
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"models/rtmpose/end2end.onnx" filter=lfs diff=lfs merge=lfs -text
|
package/README.md
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
1
|
+
# rtmlib-ts
|
|
2
|
+
|
|
3
|
+
**Real-time Multi-Person Pose Estimation & Object Detection Library**
|
|
4
|
+
|
|
5
|
+
TypeScript port of [rtmlib](https://github.com/Tau-J/rtmlib) with YOLO12 support for browser-based AI inference.
|
|
6
|
+
|
|
7
|
+
## 🚀 Features
|
|
8
|
+
|
|
9
|
+
- 🎯 **Object Detection** - 80 COCO classes with YOLO12n
|
|
10
|
+
- 🧘 **Pose Estimation** - 17 keypoints skeleton tracking
|
|
11
|
+
- 📹 **Video Support** - Real-time camera & video file processing
|
|
12
|
+
- 🌐 **Browser-based** - Pure WebAssembly, no backend required
|
|
13
|
+
- ⚡ **Fast** - Optimized for ~200ms inference (416×416)
|
|
14
|
+
- 🎨 **Beautiful UI** - Modern gradient design
|
|
15
|
+
|
|
16
|
+
## 📦 Installation
|
|
17
|
+
|
|
18
|
+
```bash
|
|
19
|
+
npm install rtmlib-ts
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
## 🎮 Quick Start
|
|
23
|
+
|
|
24
|
+
### 1. Try the Playground
|
|
25
|
+
|
|
26
|
+
```bash
|
|
27
|
+
cd playground
|
|
28
|
+
npm install
|
|
29
|
+
npm run dev
|
|
30
|
+
|
|
31
|
+
# Open http://localhost:3000
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
### 2. Object Detection
|
|
35
|
+
|
|
36
|
+
```typescript
|
|
37
|
+
import { ObjectDetector, drawResultsOnCanvas } from 'rtmlib-ts';
|
|
38
|
+
|
|
39
|
+
// Initialize
|
|
40
|
+
const detector = new ObjectDetector({
|
|
41
|
+
model: 'https://huggingface.co/demon2233/rtmlib-ts/resolve/main/yolo/yolov12n.onnx',
|
|
42
|
+
classes: ['person', 'car', 'dog'], // Filter classes or null for all
|
|
43
|
+
confidence: 0.5,
|
|
44
|
+
inputSize: [416, 416], // 416 for speed, 640 for accuracy
|
|
45
|
+
backend: 'wasm',
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
await detector.init();
|
|
49
|
+
|
|
50
|
+
// Detect from canvas
|
|
51
|
+
const canvas = document.getElementById('canvas') as HTMLCanvasElement;
|
|
52
|
+
const results = await detector.detectFromCanvas(canvas);
|
|
53
|
+
|
|
54
|
+
// Draw results
|
|
55
|
+
const ctx = canvas.getContext('2d')!;
|
|
56
|
+
drawResultsOnCanvas(ctx, results, 'object');
|
|
57
|
+
|
|
58
|
+
console.log(`Found ${results.length} objects`);
|
|
59
|
+
results.forEach(obj => {
|
|
60
|
+
console.log(`${obj.className}: ${(obj.confidence * 100).toFixed(1)}%`);
|
|
61
|
+
});
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
### 3. Pose Estimation
|
|
65
|
+
|
|
66
|
+
```typescript
|
|
67
|
+
import { PoseDetector, drawResultsOnCanvas } from 'rtmlib-ts';
|
|
68
|
+
|
|
69
|
+
// Initialize
|
|
70
|
+
const poseDetector = new PoseDetector({
|
|
71
|
+
detModel: 'https://huggingface.co/demon2233/rtmlib-ts/resolve/main/yolo/yolov12n.onnx',
|
|
72
|
+
poseModel: 'https://huggingface.co/demon2233/rtmlib-ts/resolve/main/rtmpose/end2end.onnx',
|
|
73
|
+
detInputSize: [416, 416],
|
|
74
|
+
detConfidence: 0.5,
|
|
75
|
+
poseConfidence: 0.3,
|
|
76
|
+
backend: 'wasm',
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
await poseDetector.init();
|
|
80
|
+
|
|
81
|
+
// Detect poses
|
|
82
|
+
const results = await poseDetector.detectFromCanvas(canvas);
|
|
83
|
+
|
|
84
|
+
// Draw skeleton
|
|
85
|
+
const ctx = canvas.getContext('2d')!;
|
|
86
|
+
drawResultsOnCanvas(ctx, results, 'pose');
|
|
87
|
+
|
|
88
|
+
console.log(`Found ${results.length} people`);
|
|
89
|
+
results.forEach(person => {
|
|
90
|
+
const visibleKpts = person.keypoints.filter(k => k.visible).length;
|
|
91
|
+
console.log(`Person: ${visibleKpts}/17 keypoints visible`);
|
|
92
|
+
});
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
### 4. Real-time Video
|
|
96
|
+
|
|
97
|
+
```typescript
|
|
98
|
+
import { ObjectDetector } from 'rtmlib-ts';
|
|
99
|
+
|
|
100
|
+
const detector = new ObjectDetector({
|
|
101
|
+
model: 'https://huggingface.co/demon2233/rtmlib-ts/resolve/main/yolo/yolov12n.onnx',
|
|
102
|
+
inputSize: [416, 416], // Faster for video
|
|
103
|
+
});
|
|
104
|
+
await detector.init();
|
|
105
|
+
|
|
106
|
+
// Camera stream
|
|
107
|
+
const video = document.querySelector('video')!;
|
|
108
|
+
const stream = await navigator.mediaDevices.getUserMedia({ video: true });
|
|
109
|
+
video.srcObject = stream;
|
|
110
|
+
|
|
111
|
+
// Detection loop (every 500ms)
|
|
112
|
+
setInterval(async () => {
|
|
113
|
+
const results = await detector.detectFromVideo(video);
|
|
114
|
+
console.log(`Detected: ${results.map(r => r.className).join(', ')}`);
|
|
115
|
+
}, 500);
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
### 5. Image Upload
|
|
119
|
+
|
|
120
|
+
```typescript
|
|
121
|
+
// File input
|
|
122
|
+
<input type="file" accept="image/*" onChange={handleFile} />
|
|
123
|
+
|
|
124
|
+
// Handler
|
|
125
|
+
const handleFile = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
|
126
|
+
const file = e.target.files?.[0];
|
|
127
|
+
if (!file) return;
|
|
128
|
+
|
|
129
|
+
const results = await detector.detectFromFile(file);
|
|
130
|
+
console.log(`Found ${results.length} objects`);
|
|
131
|
+
};
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
## 📊 Performance
|
|
135
|
+
|
|
136
|
+
| Model | Input | Time | Use Case |
|
|
137
|
+
|-------|-------|------|----------|
|
|
138
|
+
| YOLO12n | 416×416 | ~200ms | Real-time video |
|
|
139
|
+
| YOLO12n | 640×640 | ~500ms | High accuracy |
|
|
140
|
+
| RTMW Pose | 384×288 | ~100ms | Per person |
|
|
141
|
+
|
|
142
|
+
**Optimization Tips:**
|
|
143
|
+
- Use 416×416 for video/real-time
|
|
144
|
+
- Use 640×640 for static images
|
|
145
|
+
- First run is slower (WASM compilation)
|
|
146
|
+
- Filter classes to reduce processing
|
|
147
|
+
|
|
148
|
+
## 🎯 Supported Classes (COCO 80)
|
|
149
|
+
|
|
150
|
+
**Common:** person, car, dog, cat, bicycle, bus, truck
|
|
151
|
+
**Objects:** bottle, chair, couch, potted plant
|
|
152
|
+
**Animals:** bird, horse, sheep, cow, elephant
|
|
153
|
+
**Full list:** See `COCO_CLASSES` export
|
|
154
|
+
|
|
155
|
+
## 🎨 Drawing Utilities
|
|
156
|
+
|
|
157
|
+
```typescript
|
|
158
|
+
import {
|
|
159
|
+
drawDetectionsOnCanvas,
|
|
160
|
+
drawPoseOnCanvas,
|
|
161
|
+
drawResultsOnCanvas // Universal
|
|
162
|
+
} from 'rtmlib-ts';
|
|
163
|
+
|
|
164
|
+
// Auto-detects mode
|
|
165
|
+
drawResultsOnCanvas(ctx, results, 'object'); // or 'pose'
|
|
166
|
+
|
|
167
|
+
// Custom drawing
|
|
168
|
+
drawDetectionsOnCanvas(ctx, detections, '#00ff00');
|
|
169
|
+
drawPoseOnCanvas(ctx, people, 0.3); // 0.3 confidence threshold
|
|
170
|
+
```
|
|
171
|
+
|
|
172
|
+
## 📁 Project Structure
|
|
173
|
+
|
|
174
|
+
```
|
|
175
|
+
rtmlib-ts/
|
|
176
|
+
├── src/
|
|
177
|
+
│ ├── solution/
|
|
178
|
+
│ │ ├── objectDetector.ts # Object detection
|
|
179
|
+
│ │ └── poseDetector.ts # Pose estimation
|
|
180
|
+
│ └── visualization/
|
|
181
|
+
│ └── draw.ts # Canvas utilities
|
|
182
|
+
├── playground/ # Next.js demo
|
|
183
|
+
└── models/
|
|
184
|
+
├── yolo/yolov12n.onnx # Detector
|
|
185
|
+
└── rtmpose/end2end.onnx # Pose model
|
|
186
|
+
```
|
|
187
|
+
|
|
188
|
+
## 🐛 Known Issues
|
|
189
|
+
|
|
190
|
+
- **YOLOv26n**: Requires model re-export (format mismatch)
|
|
191
|
+
- **First run**: Slow due to WASM compilation
|
|
192
|
+
- **Mobile**: Performance varies by device
|
|
193
|
+
|
|
194
|
+
## 📝 License
|
|
195
|
+
|
|
196
|
+
Apache 2.0
|
|
197
|
+
|
|
198
|
+
## 🙏 Credits
|
|
199
|
+
|
|
200
|
+
Based on [rtmlib](https://github.com/Tau-J/rtmlib) by Tao Jiang
|
|
201
|
+
YOLO12 by [Ultralytics](https://ultralytics.com)
|
|
202
|
+
RTMW by [OpenMMLab](https://openmmlab.com)
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* BaseTool - Abstract base class for all models
|
|
3
|
+
* Handles ONNX model loading and inference
|
|
4
|
+
* Compatible with onnxruntime-web (browser) and onnxruntime-node
|
|
5
|
+
*/
|
|
6
|
+
import * as ort from 'onnxruntime-web';
|
|
7
|
+
import { BackendType } from '../types/index.js';
|
|
8
|
+
export declare abstract class BaseTool {
|
|
9
|
+
protected session: ort.InferenceSession | null;
|
|
10
|
+
protected modelPath: string;
|
|
11
|
+
protected modelInputSize: [number, number];
|
|
12
|
+
protected mean: number[] | null;
|
|
13
|
+
protected std: number[] | null;
|
|
14
|
+
protected backend: BackendType;
|
|
15
|
+
constructor(modelPath: string, modelInputSize: [number, number], mean?: number[] | null, std?: number[] | null, backend?: BackendType);
|
|
16
|
+
protected init(): Promise<void>;
|
|
17
|
+
protected inference(img: Float32Array, inputSize?: [number, number]): Promise<any[]>;
|
|
18
|
+
abstract call(...args: unknown[]): Promise<unknown>;
|
|
19
|
+
}
|
|
20
|
+
//# sourceMappingURL=base.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"base.d.ts","sourceRoot":"","sources":["../../src/core/base.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,KAAK,GAAG,MAAM,iBAAiB,CAAC;AACvC,OAAO,EAAE,WAAW,EAAE,MAAM,mBAAmB,CAAC;AAEhD,8BAAsB,QAAQ;IAC5B,SAAS,CAAC,OAAO,EAAE,GAAG,CAAC,gBAAgB,GAAG,IAAI,CAAQ;IACtD,SAAS,CAAC,SAAS,EAAE,MAAM,CAAC;IAC5B,SAAS,CAAC,cAAc,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAC3C,SAAS,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,IAAI,CAAC;IAChC,SAAS,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,IAAI,CAAC;IAC/B,SAAS,CAAC,OAAO,EAAE,WAAW,CAAC;gBAG7B,SAAS,EAAE,MAAM,EACjB,cAAc,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,EAChC,IAAI,GAAE,MAAM,EAAE,GAAG,IAAW,EAC5B,GAAG,GAAE,MAAM,EAAE,GAAG,IAAW,EAC3B,OAAO,GAAE,WAAsB;cASjB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;cAerB,SAAS,CAAC,GAAG,EAAE,YAAY,EAAE,SAAS,CAAC,EAAE,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,OAAO,CAAC,GAAG,EAAE,CAAC;IAkB1F,QAAQ,CAAC,IAAI,CAAC,GAAG,IAAI,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC;CACpD"}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* BaseTool - Abstract base class for all models
|
|
3
|
+
* Handles ONNX model loading and inference
|
|
4
|
+
* Compatible with onnxruntime-web (browser) and onnxruntime-node
|
|
5
|
+
*/
|
|
6
|
+
import * as ort from 'onnxruntime-web';
|
|
7
|
+
export class BaseTool {
|
|
8
|
+
constructor(modelPath, modelInputSize, mean = null, std = null, backend = 'webgpu') {
|
|
9
|
+
this.session = null;
|
|
10
|
+
this.modelPath = modelPath;
|
|
11
|
+
this.modelInputSize = modelInputSize;
|
|
12
|
+
this.mean = mean;
|
|
13
|
+
this.std = std;
|
|
14
|
+
this.backend = backend;
|
|
15
|
+
}
|
|
16
|
+
async init() {
|
|
17
|
+
// Configure ONNX Runtime Web - use CDN for WASM files
|
|
18
|
+
ort.env.wasm.wasmPaths = 'https://cdn.jsdelivr.net/npm/onnxruntime-web@1.23.0/dist/';
|
|
19
|
+
ort.env.wasm.simd = true;
|
|
20
|
+
ort.env.wasm.proxy = false;
|
|
21
|
+
// Load model from path/URL
|
|
22
|
+
this.session = await ort.InferenceSession.create(this.modelPath, {
|
|
23
|
+
executionProviders: ['wasm'],
|
|
24
|
+
graphOptimizationLevel: 'all',
|
|
25
|
+
});
|
|
26
|
+
console.log(`Loaded model: ${this.modelPath}`);
|
|
27
|
+
}
|
|
28
|
+
async inference(img, inputSize) {
|
|
29
|
+
if (!this.session) {
|
|
30
|
+
throw new Error('Session not initialized. Call init() first.');
|
|
31
|
+
}
|
|
32
|
+
const [h, w] = inputSize || this.modelInputSize;
|
|
33
|
+
// Build input tensor (1, 3, H, W)
|
|
34
|
+
const inputTensor = new (await import('onnxruntime-web')).Tensor('float32', img, [1, 3, h, w]);
|
|
35
|
+
const feeds = {};
|
|
36
|
+
feeds[this.session.inputNames[0]] = inputTensor;
|
|
37
|
+
const results = await this.session.run(feeds);
|
|
38
|
+
return this.session.outputNames.map((name) => results[name]);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File utilities for downloading and loading models
|
|
3
|
+
*/
|
|
4
|
+
export declare function downloadCheckpoint(url: string, localPath?: string): Promise<string>;
|
|
5
|
+
export declare function fileExists(filePath: string): boolean;
|
|
6
|
+
export declare function resolveModelPath(modelPath: string): string;
|
|
7
|
+
/**
|
|
8
|
+
* Extract local zip file and return onnx path
|
|
9
|
+
*/
|
|
10
|
+
export declare function extractLocalZip(zipPath: string): Promise<string>;
|
|
11
|
+
//# sourceMappingURL=file.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"file.d.ts","sourceRoot":"","sources":["../../src/core/file.ts"],"names":[],"mappings":"AAAA;;GAEG;AASH,wBAAsB,kBAAkB,CAAC,GAAG,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CA+BzF;AAkDD,wBAAgB,UAAU,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAEpD;AAED,wBAAgB,gBAAgB,CAAC,SAAS,EAAE,MAAM,GAAG,MAAM,CAK1D;AAED;;GAEG;AACH,wBAAsB,eAAe,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAkCtE"}
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File utilities for downloading and loading models
|
|
3
|
+
*/
|
|
4
|
+
import * as fs from 'fs';
|
|
5
|
+
import * as path from 'path';
|
|
6
|
+
import * as https from 'https';
|
|
7
|
+
import JSZip from 'jszip';
|
|
8
|
+
const CACHE_DIR = path.join(process.env.HOME || process.env.USERPROFILE || '.', '.rtmlib', 'models');
|
|
9
|
+
export async function downloadCheckpoint(url, localPath) {
|
|
10
|
+
// If local path provided, use it directly
|
|
11
|
+
if (localPath && fs.existsSync(localPath)) {
|
|
12
|
+
console.log(`Using local model: ${localPath}`);
|
|
13
|
+
return localPath;
|
|
14
|
+
}
|
|
15
|
+
const fileName = path.basename(url);
|
|
16
|
+
const cachePath = path.join(CACHE_DIR, fileName.replace('.zip', '.onnx'));
|
|
17
|
+
if (fs.existsSync(cachePath)) {
|
|
18
|
+
console.log(`Using cached model: ${cachePath}`);
|
|
19
|
+
return cachePath;
|
|
20
|
+
}
|
|
21
|
+
console.log(`Downloading model from ${url}`);
|
|
22
|
+
if (!fs.existsSync(CACHE_DIR)) {
|
|
23
|
+
fs.mkdirSync(CACHE_DIR, { recursive: true });
|
|
24
|
+
}
|
|
25
|
+
const tempPath = path.join(CACHE_DIR, fileName);
|
|
26
|
+
await downloadFile(url, tempPath);
|
|
27
|
+
if (fileName.endsWith('.zip')) {
|
|
28
|
+
await extractZip(tempPath, CACHE_DIR);
|
|
29
|
+
fs.unlinkSync(tempPath);
|
|
30
|
+
}
|
|
31
|
+
return cachePath;
|
|
32
|
+
}
|
|
33
|
+
async function downloadFile(url, dest) {
|
|
34
|
+
return new Promise((resolve, reject) => {
|
|
35
|
+
const file = fs.createWriteStream(dest);
|
|
36
|
+
const download = (url) => {
|
|
37
|
+
https.get(url, (response) => {
|
|
38
|
+
if (response.statusCode === 302 || response.statusCode === 301) {
|
|
39
|
+
download(response.headers.location);
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
response.pipe(file);
|
|
43
|
+
file.on('finish', () => {
|
|
44
|
+
file.close();
|
|
45
|
+
resolve();
|
|
46
|
+
});
|
|
47
|
+
}).on('error', reject);
|
|
48
|
+
};
|
|
49
|
+
download(url);
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
async function extractZip(zipPath, dest) {
|
|
53
|
+
const data = fs.readFileSync(zipPath);
|
|
54
|
+
const zip = await JSZip.loadAsync(data);
|
|
55
|
+
// Find .onnx file in zip
|
|
56
|
+
for (const [filename, file] of Object.entries(zip.files)) {
|
|
57
|
+
if (filename.endsWith('.onnx')) {
|
|
58
|
+
const content = await file.async('nodebuffer');
|
|
59
|
+
const onnxPath = path.join(dest, filename);
|
|
60
|
+
// Create directory if needed
|
|
61
|
+
const dir = path.dirname(onnxPath);
|
|
62
|
+
if (!fs.existsSync(dir)) {
|
|
63
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
64
|
+
}
|
|
65
|
+
fs.writeFileSync(onnxPath, content);
|
|
66
|
+
console.log(`Extracted: ${filename}`);
|
|
67
|
+
return;
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
throw new Error('No .onnx file found in zip');
|
|
71
|
+
}
|
|
72
|
+
export function fileExists(filePath) {
|
|
73
|
+
return fs.existsSync(filePath);
|
|
74
|
+
}
|
|
75
|
+
export function resolveModelPath(modelPath) {
|
|
76
|
+
if (modelPath.startsWith('http://') || modelPath.startsWith('https://')) {
|
|
77
|
+
return modelPath;
|
|
78
|
+
}
|
|
79
|
+
return path.resolve(modelPath);
|
|
80
|
+
}
|
|
81
|
+
/**
|
|
82
|
+
* Extract local zip file and return onnx path
|
|
83
|
+
*/
|
|
84
|
+
export async function extractLocalZip(zipPath) {
|
|
85
|
+
if (!fs.existsSync(zipPath)) {
|
|
86
|
+
throw new Error(`Zip file not found: ${zipPath}`);
|
|
87
|
+
}
|
|
88
|
+
const destDir = path.dirname(zipPath);
|
|
89
|
+
const data = fs.readFileSync(zipPath);
|
|
90
|
+
const zip = await JSZip.loadAsync(data);
|
|
91
|
+
// Find .onnx file in zip
|
|
92
|
+
for (const [filename, file] of Object.entries(zip.files)) {
|
|
93
|
+
if (filename.endsWith('.onnx')) {
|
|
94
|
+
const onnxPath = path.join(destDir, filename);
|
|
95
|
+
if (fs.existsSync(onnxPath)) {
|
|
96
|
+
console.log(`Using extracted model: ${onnxPath}`);
|
|
97
|
+
return onnxPath;
|
|
98
|
+
}
|
|
99
|
+
const content = await file.async('nodebuffer');
|
|
100
|
+
// Create directory if needed
|
|
101
|
+
const dir = path.dirname(onnxPath);
|
|
102
|
+
if (!fs.existsSync(dir)) {
|
|
103
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
104
|
+
}
|
|
105
|
+
fs.writeFileSync(onnxPath, content);
|
|
106
|
+
console.log(`Extracted: ${filename} -> ${onnxPath}`);
|
|
107
|
+
return onnxPath;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
throw new Error('No .onnx file found in zip');
|
|
111
|
+
}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Model caching utility using Cache API
|
|
3
|
+
* Caches ONNX models in browser to avoid repeated downloads
|
|
4
|
+
*/
|
|
5
|
+
/**
|
|
6
|
+
* Check if model is available in cache
|
|
7
|
+
*/
|
|
8
|
+
export declare function isModelCached(url: string): Promise<boolean>;
|
|
9
|
+
/**
|
|
10
|
+
* Get model from cache or fetch from network
|
|
11
|
+
* @param url - Model URL
|
|
12
|
+
* @param forceRefresh - Force refresh from network
|
|
13
|
+
*/
|
|
14
|
+
export declare function getCachedModel(url: string, forceRefresh?: boolean): Promise<ArrayBuffer>;
|
|
15
|
+
/**
|
|
16
|
+
* Preload and cache multiple models
|
|
17
|
+
*/
|
|
18
|
+
export declare function preloadModels(urls: string[]): Promise<void>;
|
|
19
|
+
/**
|
|
20
|
+
* Clear all cached models
|
|
21
|
+
*/
|
|
22
|
+
export declare function clearModelCache(): Promise<void>;
|
|
23
|
+
/**
|
|
24
|
+
* Get cache size in bytes
|
|
25
|
+
*/
|
|
26
|
+
export declare function getCacheSize(): Promise<number>;
|
|
27
|
+
/**
|
|
28
|
+
* Get cache info
|
|
29
|
+
*/
|
|
30
|
+
export declare function getCacheInfo(): Promise<{
|
|
31
|
+
cachedModels: string[];
|
|
32
|
+
totalSize: number;
|
|
33
|
+
totalSizeFormatted: string;
|
|
34
|
+
}>;
|
|
35
|
+
//# sourceMappingURL=modelCache.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"modelCache.d.ts","sourceRoot":"","sources":["../../src/core/modelCache.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAIH;;GAEG;AACH,wBAAsB,aAAa,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAcjE;AAED;;;;GAIG;AACH,wBAAsB,cAAc,CAAC,GAAG,EAAE,MAAM,EAAE,YAAY,GAAE,OAAe,GAAG,OAAO,CAAC,WAAW,CAAC,CAsCrG;AAeD;;GAEG;AACH,wBAAsB,aAAa,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC,CAiBjE;AAED;;GAEG;AACH,wBAAsB,eAAe,IAAI,OAAO,CAAC,IAAI,CAAC,CAWrD;AAED;;GAEG;AACH,wBAAsB,YAAY,IAAI,OAAO,CAAC,MAAM,CAAC,CAuBpD;AAED;;GAEG;AACH,wBAAsB,YAAY,IAAI,OAAO,CAAC;IAC5C,YAAY,EAAE,MAAM,EAAE,CAAC;IACvB,SAAS,EAAE,MAAM,CAAC;IAClB,kBAAkB,EAAE,MAAM,CAAC;CAC5B,CAAC,CAoBD"}
|
|
@@ -0,0 +1,161 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Model caching utility using Cache API
|
|
3
|
+
* Caches ONNX models in browser to avoid repeated downloads
|
|
4
|
+
*/
|
|
5
|
+
const CACHE_NAME = 'rtmlib-ts-models-v1';
|
|
6
|
+
/**
|
|
7
|
+
* Check if model is available in cache
|
|
8
|
+
*/
|
|
9
|
+
export async function isModelCached(url) {
|
|
10
|
+
if (typeof caches === 'undefined') {
|
|
11
|
+
// Cache API not available (e.g., Node.js)
|
|
12
|
+
return false;
|
|
13
|
+
}
|
|
14
|
+
try {
|
|
15
|
+
const cache = await caches.open(CACHE_NAME);
|
|
16
|
+
const response = await cache.match(url);
|
|
17
|
+
return !!response;
|
|
18
|
+
}
|
|
19
|
+
catch (error) {
|
|
20
|
+
console.warn(`[ModelCache] Failed to check cache for ${url}:`, error);
|
|
21
|
+
return false;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Get model from cache or fetch from network
|
|
26
|
+
* @param url - Model URL
|
|
27
|
+
* @param forceRefresh - Force refresh from network
|
|
28
|
+
*/
|
|
29
|
+
export async function getCachedModel(url, forceRefresh = false) {
|
|
30
|
+
if (typeof caches === 'undefined') {
|
|
31
|
+
// Cache API not available, fetch directly
|
|
32
|
+
console.log(`[ModelCache] Cache API not available, fetching from network`);
|
|
33
|
+
return fetchModelFromNetwork(url);
|
|
34
|
+
}
|
|
35
|
+
try {
|
|
36
|
+
const cache = await caches.open(CACHE_NAME);
|
|
37
|
+
// Try to get from cache first
|
|
38
|
+
if (!forceRefresh) {
|
|
39
|
+
const cachedResponse = await cache.match(url);
|
|
40
|
+
if (cachedResponse) {
|
|
41
|
+
console.log(`[ModelCache] ✅ Hit for ${url}`);
|
|
42
|
+
return await cachedResponse.arrayBuffer();
|
|
43
|
+
}
|
|
44
|
+
console.log(`[ModelCache] ❌ Miss for ${url}, fetching from network...`);
|
|
45
|
+
}
|
|
46
|
+
// Fetch from network
|
|
47
|
+
const networkResponse = await fetchModelFromNetwork(url);
|
|
48
|
+
// Cache the response
|
|
49
|
+
const responseToCache = new Response(networkResponse, {
|
|
50
|
+
headers: {
|
|
51
|
+
'Content-Type': 'application/octet-stream',
|
|
52
|
+
},
|
|
53
|
+
});
|
|
54
|
+
await cache.put(url, responseToCache);
|
|
55
|
+
console.log(`[ModelCache] 💾 Cached ${url}`);
|
|
56
|
+
return networkResponse;
|
|
57
|
+
}
|
|
58
|
+
catch (error) {
|
|
59
|
+
console.error(`[ModelCache] Failed to get/cache model ${url}:`, error);
|
|
60
|
+
throw error;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Fetch model from network with progress tracking
|
|
65
|
+
*/
|
|
66
|
+
async function fetchModelFromNetwork(url) {
|
|
67
|
+
const response = await fetch(url);
|
|
68
|
+
if (!response.ok) {
|
|
69
|
+
throw new Error(`Failed to fetch model: HTTP ${response.status} ${response.statusText}`);
|
|
70
|
+
}
|
|
71
|
+
return await response.arrayBuffer();
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Preload and cache multiple models
|
|
75
|
+
*/
|
|
76
|
+
export async function preloadModels(urls) {
|
|
77
|
+
console.log(`[ModelCache] Preloading ${urls.length} model(s)...`);
|
|
78
|
+
const results = await Promise.allSettled(urls.map(url => getCachedModel(url)));
|
|
79
|
+
const success = results.filter(r => r.status === 'fulfilled').length;
|
|
80
|
+
const failed = results.filter(r => r.status === 'rejected').length;
|
|
81
|
+
console.log(`[ModelCache] Preload complete: ${success} succeeded, ${failed} failed`);
|
|
82
|
+
results.forEach((result, index) => {
|
|
83
|
+
if (result.status === 'rejected') {
|
|
84
|
+
console.error(`[ModelCache] Failed to preload ${urls[index]}:`, result.reason);
|
|
85
|
+
}
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Clear all cached models
|
|
90
|
+
*/
|
|
91
|
+
export async function clearModelCache() {
|
|
92
|
+
if (typeof caches === 'undefined') {
|
|
93
|
+
return;
|
|
94
|
+
}
|
|
95
|
+
try {
|
|
96
|
+
await caches.delete(CACHE_NAME);
|
|
97
|
+
console.log('[ModelCache] Cache cleared');
|
|
98
|
+
}
|
|
99
|
+
catch (error) {
|
|
100
|
+
console.error('[ModelCache] Failed to clear cache:', error);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
/**
|
|
104
|
+
* Get cache size in bytes
|
|
105
|
+
*/
|
|
106
|
+
export async function getCacheSize() {
|
|
107
|
+
if (typeof caches === 'undefined' || !navigator.storage) {
|
|
108
|
+
return 0;
|
|
109
|
+
}
|
|
110
|
+
try {
|
|
111
|
+
const cache = await caches.open(CACHE_NAME);
|
|
112
|
+
const keys = await cache.keys();
|
|
113
|
+
let totalSize = 0;
|
|
114
|
+
for (const request of keys) {
|
|
115
|
+
const response = await cache.match(request);
|
|
116
|
+
if (response) {
|
|
117
|
+
const blob = await response.blob();
|
|
118
|
+
totalSize += blob.size;
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
return totalSize;
|
|
122
|
+
}
|
|
123
|
+
catch (error) {
|
|
124
|
+
console.warn('[ModelCache] Failed to get cache size:', error);
|
|
125
|
+
return 0;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* Get cache info
|
|
130
|
+
*/
|
|
131
|
+
export async function getCacheInfo() {
|
|
132
|
+
if (typeof caches === 'undefined') {
|
|
133
|
+
return { cachedModels: [], totalSize: 0, totalSizeFormatted: '0 B' };
|
|
134
|
+
}
|
|
135
|
+
try {
|
|
136
|
+
const cache = await caches.open(CACHE_NAME);
|
|
137
|
+
const keys = await cache.keys();
|
|
138
|
+
const cachedModels = keys.map(k => k.url);
|
|
139
|
+
const totalSize = await getCacheSize();
|
|
140
|
+
return {
|
|
141
|
+
cachedModels,
|
|
142
|
+
totalSize,
|
|
143
|
+
totalSizeFormatted: formatBytes(totalSize),
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
catch (error) {
|
|
147
|
+
console.warn('[ModelCache] Failed to get cache info:', error);
|
|
148
|
+
return { cachedModels: [], totalSize: 0, totalSizeFormatted: '0 B' };
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
/**
|
|
152
|
+
* Format bytes to human-readable string
|
|
153
|
+
*/
|
|
154
|
+
function formatBytes(bytes) {
|
|
155
|
+
if (bytes === 0)
|
|
156
|
+
return '0 B';
|
|
157
|
+
const k = 1024;
|
|
158
|
+
const sizes = ['B', 'KB', 'MB', 'GB'];
|
|
159
|
+
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
160
|
+
return `${(bytes / Math.pow(k, i)).toFixed(2)} ${sizes[i]}`;
|
|
161
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Post-processing utilities for pose estimation
|
|
3
|
+
*/
|
|
4
|
+
export declare function getSimccMaximum(simccX: Float32Array, simccY: Float32Array): {
|
|
5
|
+
locations: number[];
|
|
6
|
+
scores: number[];
|
|
7
|
+
};
|
|
8
|
+
export declare function convertCocoToOpenpose(keypoints: number[][], scores: number[]): {
|
|
9
|
+
keypoints: number[][];
|
|
10
|
+
scores: number[];
|
|
11
|
+
};
|
|
12
|
+
//# sourceMappingURL=posePostprocessing.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"posePostprocessing.d.ts","sourceRoot":"","sources":["../../src/core/posePostprocessing.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,wBAAgB,eAAe,CAC7B,MAAM,EAAE,YAAY,EACpB,MAAM,EAAE,YAAY,GACnB;IAAE,SAAS,EAAE,MAAM,EAAE,CAAC;IAAC,MAAM,EAAE,MAAM,EAAE,CAAA;CAAE,CAkC3C;AAED,wBAAgB,qBAAqB,CACnC,SAAS,EAAE,MAAM,EAAE,EAAE,EACrB,MAAM,EAAE,MAAM,EAAE,GACf;IAAE,SAAS,EAAE,MAAM,EAAE,EAAE,CAAC;IAAC,MAAM,EAAE,MAAM,EAAE,CAAA;CAAE,CA4C7C"}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Post-processing utilities for pose estimation
|
|
3
|
+
*/
|
|
4
|
+
export function getSimccMaximum(simccX, simccY) {
|
|
5
|
+
const numKeypoints = simccX.length / 2; // Assuming split_ratio = 2
|
|
6
|
+
const locations = [];
|
|
7
|
+
const scores = [];
|
|
8
|
+
for (let i = 0; i < numKeypoints; i++) {
|
|
9
|
+
// Find argmax for x
|
|
10
|
+
let maxX = -Infinity;
|
|
11
|
+
let argmaxX = 0;
|
|
12
|
+
for (let j = 0; j < 2; j++) {
|
|
13
|
+
const val = simccX[i * 2 + j];
|
|
14
|
+
if (val > maxX) {
|
|
15
|
+
maxX = val;
|
|
16
|
+
argmaxX = j;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
// Find argmax for y
|
|
20
|
+
let maxY = -Infinity;
|
|
21
|
+
let argmaxY = 0;
|
|
22
|
+
for (let j = 0; j < 2; j++) {
|
|
23
|
+
const val = simccY[i * 2 + j];
|
|
24
|
+
if (val > maxY) {
|
|
25
|
+
maxY = val;
|
|
26
|
+
argmaxY = j;
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
locations.push(argmaxX, argmaxY);
|
|
30
|
+
scores.push((maxX + maxY) / 2);
|
|
31
|
+
}
|
|
32
|
+
return { locations, scores };
|
|
33
|
+
}
|
|
34
|
+
export function convertCocoToOpenpose(keypoints, scores) {
|
|
35
|
+
// COCO 17 keypoints to OpenPose 18 keypoints mapping
|
|
36
|
+
const cocoToOpenpose = [
|
|
37
|
+
0, // nose
|
|
38
|
+
1, // neck (average of shoulders)
|
|
39
|
+
2, // right_shoulder
|
|
40
|
+
3, // right_elbow
|
|
41
|
+
4, // right_wrist
|
|
42
|
+
5, // left_shoulder
|
|
43
|
+
6, // left_elbow
|
|
44
|
+
7, // left_wrist
|
|
45
|
+
8, // right_hip
|
|
46
|
+
9, // right_knee
|
|
47
|
+
10, // right_ankle
|
|
48
|
+
11, // left_hip
|
|
49
|
+
12, // left_knee
|
|
50
|
+
13, // left_ankle
|
|
51
|
+
14, // right_eye
|
|
52
|
+
15, // left_eye
|
|
53
|
+
16, // right_ear
|
|
54
|
+
17, // left_ear
|
|
55
|
+
];
|
|
56
|
+
const openposeKeypoints = [];
|
|
57
|
+
const openposeScores = [];
|
|
58
|
+
for (let i = 0; i < 18; i++) {
|
|
59
|
+
if (i === 1) {
|
|
60
|
+
// Neck is average of shoulders
|
|
61
|
+
const rightShoulder = keypoints[2];
|
|
62
|
+
const leftShoulder = keypoints[5];
|
|
63
|
+
openposeKeypoints.push([
|
|
64
|
+
(rightShoulder[0] + leftShoulder[0]) / 2,
|
|
65
|
+
(rightShoulder[1] + leftShoulder[1]) / 2,
|
|
66
|
+
]);
|
|
67
|
+
openposeScores.push((scores[2] + scores[5]) / 2);
|
|
68
|
+
}
|
|
69
|
+
else {
|
|
70
|
+
const cocoIdx = cocoToOpenpose[i];
|
|
71
|
+
openposeKeypoints.push([...keypoints[cocoIdx]]);
|
|
72
|
+
openposeScores.push(scores[cocoIdx]);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return { keypoints: openposeKeypoints, scores: openposeScores };
|
|
76
|
+
}
|