@vonage/ml-transformers 7.0.0-alpha.1 → 7.0.0-alpha.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. package/dist/assets/processor-worker-CR7IjkAn.js.map +1 -0
  2. package/dist/ml-transformers.es.js +3371 -22011
  3. package/dist/ml-transformers.es.js.map +1 -1
  4. package/dist/ml-transformers.min.js +1 -1
  5. package/dist/ml-transformers.static.js +3371 -22011
  6. package/dist/ml-transformers.umd.js +32 -151
  7. package/dist/ml-transformers.umd.js.map +1 -1
  8. package/dist/types/lib/main.d.ts +3 -121
  9. package/dist/types/lib/src/renderers/canvas2d/renderers/canvas2d-renderer.d.ts +0 -2
  10. package/dist/types/lib/src/renderers/webgl/pipelines/improve-segmentation-mask.d.ts +0 -12
  11. package/dist/types/lib/src/renderers/webgl/programs/blur/blur.d.ts +0 -3
  12. package/dist/types/lib/src/transformers/background-transformer.d.ts +2 -2
  13. package/dist/types/lib/src/types.d.ts +0 -56
  14. package/dist/types/lib/src/utils/webgl.d.ts +1 -16
  15. package/package.json +4 -11
  16. package/dist/assets/processor-worker-Ct1XXlKJ.js.map +0 -1
  17. package/dist/types/lib/src/renderers/webgl/programs/expend/expend.d.ts +0 -5
  18. package/dist/types/lib/src/renderers/webgl/programs/flickering/flickering.d.ts +0 -5
  19. package/dist/types/lib/src/renderers/webgl/programs/highpass/highpass.d.ts +0 -5
  20. package/dist/types/lib/src/renderers/webgl/programs/minimum-surface/minimum-surface.d.ts +0 -5
  21. package/dist/types/lib/src/renderers/webgl/programs/power/power.d.ts +0 -5
  22. package/dist/types/lib/src/renderers/webgl/programs/refine-mask/refine-mask.d.ts +0 -5
  23. package/dist/types/lib/src/renderers/webgl/programs/smooth/smooth.d.ts +0 -5
  24. package/dist/types/lib/src/renderers/webgl/programs/track/track.d.ts +0 -5
  25. package/dist/types/lib/src/renderers/webgl/webgl-pipeline-program-double-buffer.d.ts +0 -15
  26. package/dist/types/lib/src/utils/error.d.ts +0 -1
  27. package/dist/types/lib/src/utils/mediapipe.d.ts +0 -11
@@ -1,126 +1,8 @@
1
1
  import { WarningType, ErrorFunction, PipelineInfoData } from "@vonage/media-processor";
2
- import { BlurRadius, BackgroundBlurConfig, VirtualBackgroundConfig, AssetBackgroundConfig, VideoBackgroundConfig, MediaProcessorBaseConfig, SilhouetteBlurConfig, MediapipeConfig, MediaPipeResults, MediaPipeResultsListener, MediaPipeModelType, FaceDetectionResults, FaceDetectionOptions, FaceMeshResults, FaceMeshOptions, HandsResults, HandsOptions, HolisticResults, HolisticOptions, ObjectronResults, ObjectronOptions, PoseResults, PoseOptions, SelfieSegmentationResults, SelfieSegmentationOptions, MediaPipeModelConfig, SupportedBrowserFeatures, FeatureSupport, WebglSelfieSegmentationType, BackgroundOptions, BackgroundTransformerType, RenderingOptions, RenderingType, SegmentationOptions } from "./src/types";
2
+ import { BlurRadius, BackgroundBlurConfig, VirtualBackgroundConfig, AssetBackgroundConfig, VideoBackgroundConfig, MediaProcessorBaseConfig, SilhouetteBlurConfig, SupportedBrowserFeatures, FeatureSupport, WebglSelfieSegmentationType, BackgroundOptions, BackgroundTransformerType, RenderingOptions, RenderingType, SegmentationOptions } from "./src/types";
3
3
  import { isSupported, getSupportedBrowserFeatures } from "./src/utils/support";
4
- import { MediapipeHelper } from "./src/utils/mediapipe";
5
4
  import { VonageMediaProcessor } from "./src/facades/vonage-media-processor";
6
5
  import { BackgroundTransformer } from "./src/transformers/background-transformer";
7
- import VonageSelfieSegmentation from "./libs/selfie-segmentation-helper";
8
- /**
9
- * MediaPipe Selfie Segmentation static metadata.
10
- * @returns
11
- * An object containing MediaPipe Selfie Segmentation metadata:
12
- * - VERSION
13
- */
14
- declare function getVonageSelfieSegmentation(): VonageSelfieSegmentation;
15
- import VonageHands from "./libs/hands-helper";
16
- /**
17
- * Vonage Mediapipe objects info (it will be used by the @mediapipe/drawing_utils package which is not included by this library).
18
- * @param VonageSelfiesegmentation:
19
- * - VERSION
20
- * @param VonageHands:
21
- * - VERSION
22
- * - HAND_CONNECTIONS
23
- */
24
- declare function getVonageHands(): VonageHands;
25
- import VonageHolistic from "./libs/holistic-helper";
26
- /**
27
- * MediaPipe Holistic static metadata.
28
- * This metadata is needed while using MediaPipe.
29
- * [drawing utils](https://www.npmjs.com/package/@mediapipe/drawing_utils).
30
- * And [control utils](https://www.npmjs.com/package/@mediapipe/control_utils).
31
- * @returns
32
- * An object containing MediaPipe Holistic metadata:
33
- * - VERSION
34
- * - FACE_GEOMETRY
35
- * - FACEMESH_LIPS
36
- * - FACEMESH_LEFT_EYE
37
- * - FACEMESH_LEFT_EYEBROW
38
- * - FACEMESH_LEFT_IRIS
39
- * - FACEMESH_RIGHT_EYE
40
- * - FACEMESH_RIGHT_EYEBROW
41
- * - FACEMESH_RIGHT_IRIS
42
- * - FACEMESH_FACE_OVAL
43
- * - FACEMESH_CONTOURS
44
- * - FACEMESH_TESSELATION
45
- * - HAND_CONNECTIONS
46
- * - POSE_CONNECTIONS
47
- * - POSE_LANDMARKS
48
- * - POSE_LANDMARKS_LEFT
49
- * - POSE_LANDMARKS_RIGHT
50
- * - POSE_LANDMARKS_NEUTRAL
51
- */
52
- declare function getVonageHolistic(): VonageHolistic;
53
- import VonageFaceDetection from "./libs/face-detection-helper";
54
- /**
55
- * MediaPipe Face Detection static metadata.
56
- * This metadata is needed while using MediaPipe.
57
- * [drawing utils](https://www.npmjs.com/package/@mediapipe/drawing_utils).
58
- * And [control utils](https://www.npmjs.com/package/@mediapipe/control_utils).
59
- * @returns
60
- * An object containing MediaPipe Face Detection metadata:
61
- * - VERSION
62
- * - FACEDETECTION_LIPS
63
- * - FACEDETECTION_LEFT_EYE
64
- * - FACEDETECTION_LEFT_EYEBROW
65
- * - FACEDETECTION_RIGHT_EYE
66
- * - FACEDETECTION_RIGHT_EYEBROW
67
- * - FACEDETECTION_FACE_OVAL
68
- * - FACEDETECTION_CONTOURS
69
- * - FACEDETECTION_TESSELATION
70
- */
71
- declare function getVonageFaceDetection(): VonageFaceDetection;
72
- import VonageFaceMash from "./libs/face-mash-helper";
73
- /**
74
- * MediaPipe Face Mash static metadata.
75
- * This metadata is needed while using MediaPipe.
76
- * [drawing utils](https://www.npmjs.com/package/@mediapipe/drawing_utils).
77
- * And [control utils](https://www.npmjs.com/package/@mediapipe/control_utils).
78
- * @returns
79
- * An object containing MediaPipe Face Mash metadata:
80
- * - VERSION
81
- * - FACE_GEOMETRY
82
- * - FACEMESH_LIPS
83
- * - FACEMESH_LEFT_EYE
84
- * - FACEMESH_LEFT_EYEBROW
85
- * - FACEMESH_LEFT_IRIS
86
- * - FACEMESH_RIGHT_EYE
87
- * - FACEMESH_RIGHT_EYEBROW
88
- * - FACEMESH_RIGHT_IRIS
89
- * - FACEMESH_FACE_OVAL
90
- * - FACEMESH_CONTOURS
91
- * - FACEMESH_TESSELATION
92
- */
93
- declare function getVonageFaceMash(): VonageFaceMash;
94
- import VonageObjectron from "./libs/objectron-helper";
95
- /**
96
- * MediaPipe Objectron static metadata.
97
- * This metadata is needed while using MediaPipe.
98
- * [drawing utils](https://www.npmjs.com/package/@mediapipe/drawing_utils).
99
- * And [control utils](https://www.npmjs.com/package/@mediapipe/control_utils).
100
- * @returns
101
- * An object containing MediaPipe Objectron metadata:
102
- * - VERSION
103
- * - VERSION
104
- * - BOX_CONNECTIONS
105
- * - BOX_KEYPOINTS
106
- */
107
- declare function getVonageObjectron(): VonageObjectron;
108
- import VonagePose from "./libs/pose-helper";
109
- /**
110
- * MediaPipe Pose static metadata.
111
- * This metadata is needed while using MediaPipe.
112
- * [drawing utils](https://www.npmjs.com/package/@mediapipe/drawing_utils).
113
- * And [control utils](https://www.npmjs.com/package/@mediapipe/control_utils).
114
- * @returns
115
- * An object containing MediaPipe Pose metadata:
116
- * - VERSION
117
- * - POSE_CONNECTIONS
118
- * - POSE_LANDMARKS
119
- * - POSE_LANDMARKS_LEFT
120
- * - POSE_LANDMARKS_RIGHT
121
- * - POSE_LANDMARKS_NEUTRAL
122
- */
123
- declare function getVonagePose(): VonagePose;
124
6
  /**
125
7
  * MediaProcessorConfig specifies the transformer logic to be performed.
126
8
  */
@@ -132,5 +14,5 @@ type MediaProcessorConfig = BackgroundOptions;
132
14
  */
133
15
  export declare function createVonageMediaProcessor(config: MediaProcessorConfig): Promise<VonageMediaProcessor>;
134
16
  export type { EventDataMap, WarnData, ErrorData, EventMetaData, DropInfo, } from "@vonage/media-processor";
135
- export type { MediaProcessorBaseConfig, BackgroundBlurConfig, VirtualBackgroundConfig, VideoBackgroundConfig, AssetBackgroundConfig, SilhouetteBlurConfig, MediaProcessorConfig, BackgroundOptions, MediapipeConfig, MediaPipeResults, MediaPipeModelType, MediaPipeResultsListener, FaceDetectionResults, FaceDetectionOptions, FaceMeshResults, FaceMeshOptions, HandsResults, HandsOptions, HolisticResults, HolisticOptions, ObjectronResults, ObjectronOptions, SelfieSegmentationOptions, SelfieSegmentationResults, PoseResults, PoseOptions, MediaPipeModelConfig, SupportedBrowserFeatures, FeatureSupport, RenderingOptions, SegmentationOptions, };
136
- export { VonageMediaProcessor, BlurRadius, isSupported, getSupportedBrowserFeatures, MediapipeHelper, getVonageSelfieSegmentation, getVonageFaceDetection, getVonageFaceMash, getVonageHands, getVonageHolistic, getVonageObjectron, getVonagePose, WarningType, PipelineInfoData, ErrorFunction, BackgroundTransformer, WebglSelfieSegmentationType, BackgroundTransformerType, RenderingType, };
17
+ export type { MediaProcessorBaseConfig, BackgroundBlurConfig, VirtualBackgroundConfig, VideoBackgroundConfig, AssetBackgroundConfig, SilhouetteBlurConfig, MediaProcessorConfig, BackgroundOptions, SupportedBrowserFeatures, FeatureSupport, RenderingOptions, SegmentationOptions, };
18
+ export { VonageMediaProcessor, BlurRadius, isSupported, getSupportedBrowserFeatures, WarningType, PipelineInfoData, ErrorFunction, BackgroundTransformer, WebglSelfieSegmentationType, BackgroundTransformerType, RenderingType, };
@@ -1,11 +1,9 @@
1
1
  import { RendererInterface } from "../../interfaces/renderer";
2
- import { FlickeringOptions } from "../../webgl/pipelines/improve-segmentation-mask";
3
2
  import { ResolvedWebglQuery } from "../../webgl/webgl-profiler";
4
3
  export declare abstract class Canvas2dRenderer implements RendererInterface {
5
4
  protected canvas: OffscreenCanvas;
6
5
  protected context: OffscreenCanvasRenderingContext2D;
7
6
  constructor();
8
- setFlickeringOptions(options: FlickeringOptions): void;
9
7
  enablePostProcessing(): void;
10
8
  disablePostProcessing(): void;
11
9
  profileWebgl(duration: number): Promise<ResolvedWebglQuery[]>;
@@ -1,17 +1,5 @@
1
1
  import { TextureSource } from "../../../types";
2
2
  import { WebglPipeline } from "../webgl-pipeline";
3
- /**
4
- * @internal
5
- */
6
- export interface FlickeringOptions {
7
- currentThresholdMinimum: number;
8
- currentThresholdMaximum: number;
9
- previousThresholdMinimum: number;
10
- previousThresholdMaximum: number;
11
- thresholdMinimum: number;
12
- thresholdMaximum: number;
13
- thresholdFactor: number;
14
- }
15
3
  export declare class ImproveSegmentationMaskPipeline extends WebglPipeline {
16
4
  private readonly context;
17
5
  inputImage?: ImageBitmap;
@@ -14,7 +14,4 @@ export declare class Blur extends WebglPipelineProgram<BlurOptions> {
14
14
  protected getFragmentShader(): string;
15
15
  protected getVertexShader(): string;
16
16
  private createLoopFunction;
17
- private getFactor;
18
- private getLinearFactor;
19
- private getGaussianFactor;
20
17
  }
@@ -3,13 +3,13 @@ import { ResolvedWebglQuery } from "../renderers/webgl/webgl-profiler";
3
3
  export declare class BackgroundTransformer {
4
4
  private selfieSegmentation?;
5
5
  private backgroundFilter?;
6
+ private backgroundOptions?;
6
7
  private reporter;
7
8
  private isEnabled;
8
9
  init(id: string, config: MediaProcessorConfig): Promise<void>;
9
10
  transform?(frame: VideoFrame, controller: TransformStreamDefaultController): Promise<void>;
10
- private backgroundOptions?;
11
11
  setBackgroundOptions(options: BackgroundOptions): Promise<void>;
12
- setVideoBGReadable(stream: ReadableStream): Promise<void>;
12
+ setVideoBGReadable(stream: ReadableStream): void;
13
13
  setVirtualBGImage(image: ImageBitmap): Promise<void>;
14
14
  profile(duration: number): Promise<ResolvedWebglQuery[]>;
15
15
  getTransformerType(): string;
@@ -1,10 +1,3 @@
1
- import { Options as FaceDetectionOptions, Results as FaceDetectionResults } from "@mediapipe/face_detection";
2
- import { Options as FaceMeshOptions, Results as FaceMeshResults } from "@mediapipe/face_mesh";
3
- import { Options as HandsOptions, Results as HandsResults } from "@mediapipe/hands";
4
- import { Options as HolisticOptions, Results as HolisticResults } from "@mediapipe/holistic";
5
- import { Options as ObjectronOptions, Results as ObjectronResults } from "@mediapipe/objectron";
6
- import { Options as PoseOptions, Results as PoseResults } from "@mediapipe/pose";
7
- import { Options as SelfieSegmentationOptions, Results as SelfieSegmentationResults } from "@mediapipe/selfie_segmentation";
8
1
  import { TextureFunc } from "twgl.js";
9
2
  /**
10
3
  * BlurRadius specifies how much bluring filter to apply by a given transformer.
@@ -153,55 +146,6 @@ export interface SilhouetteBlurConfig extends MediaProcessorBaseConfig {
153
146
  * BackgroundOptions background options
154
147
  */
155
148
  export type BackgroundOptions = BackgroundBlurConfig | VirtualBackgroundConfig | VideoBackgroundConfig | SilhouetteBlurConfig;
156
- export type { FaceDetectionResults, FaceDetectionOptions, FaceMeshResults, FaceMeshOptions, HandsResults, HandsOptions, HolisticResults, HolisticOptions, ObjectronResults, ObjectronOptions, SelfieSegmentationResults, SelfieSegmentationOptions, PoseResults, PoseOptions, };
157
- /**
158
- * MediaPipeResults types of results object of MediaPipe
159
- */
160
- export type MediaPipeResults = FaceDetectionResults | FaceMeshResults | HandsResults | HolisticResults | ObjectronResults | SelfieSegmentationResults | PoseResults;
161
- /**
162
- * ResultsListener callback function from MediaPipe process
163
- * @results - The results object from MediaPipe
164
- * @returns - can return a promise of void
165
- */
166
- export type MediaPipeResultsListener = <T extends MediaPipeResults>(results: T) => Promise<void> | void;
167
- /**
168
- * MediaPipeModelType supported models types
169
- */
170
- export type MediaPipeModelType = "face_mesh" | "face_detection" | "hands" | "holistic" | "objectron" | "selfie_segmentation" | "pose";
171
- /**
172
- * defines one mediapipe model config
173
- * @modelType - which model is required
174
- * @listener - callback function from the model
175
- * @options - define options for the mediapipe model that is used. for more info check https://google.github.io/mediapipe/getting_started/javascript.html
176
- * @assetsUri (optional) - can be set to get wasm/tflite/js/binarypb assets. Vonage provides static assets.
177
- * *** WARNING *** - using this option moves the responsibility of the assets and the versioning to the user.
178
- * please keep in mind that the assets list can change between versions!
179
- * assets are different between mediapipe models.
180
- */
181
- export type MediaPipeModelConfig = {
182
- modelType: MediaPipeModelType;
183
- listener: MediaPipeResultsListener;
184
- options: FaceDetectionOptions | FaceMeshOptions | HandsOptions | HolisticOptions | ObjectronOptions | SelfieSegmentationOptions | PoseOptions;
185
- assetsUri?: string;
186
- };
187
- /**
188
- * MediapipeConfig specified config of mediapipe helper:
189
- * this structure allows the user to create few mediapipe models that will run in parallel.
190
- * @modelTypesArray - array of mediapipe models to be loaded and called.
191
- */
192
- export type MediapipeConfig = {
193
- mediaPipeModelConfigArray: Array<MediaPipeModelConfig>;
194
- };
195
- /**
196
- * Vector containing 3 numbers
197
- * @internal
198
- */
199
- export type vec3 = [number, number, number];
200
- /**
201
- * Vector containing 4 numbers
202
- * @internal
203
- */
204
- export type vec4 = [number, number, number, number];
205
149
  /**
206
150
  * Valid data to feed webgl texture
207
151
  * @internal
@@ -1,10 +1,4 @@
1
- import { TextureSource, vec4 } from "../types";
2
- /**
3
- * Render a VideoFrame within a canvas
4
- * @param frame VideoFrame to render
5
- * @param context Canvas context to render in
6
- */
7
- export declare function frameToCanvas(frame: VideoFrame, context: OffscreenCanvasRenderingContext2D): void;
1
+ import { TextureSource } from "../types";
8
2
  /**
9
3
  * Create and fill a webgl texture
10
4
  * If source is a texture, it will just be returned without creating a new texture
@@ -13,15 +7,6 @@ export declare function frameToCanvas(frame: VideoFrame, context: OffscreenCanva
13
7
  * @returns Created texture, or source if it's a WebGLTexture
14
8
  */
15
9
  export declare function texture(context: WebGLRenderingContext, source?: TextureSource | WebGLTexture): () => WebGLTexture;
16
- /**
17
- * Create a webgl texture and fill it with a color
18
- * @param context Webgl context to create the texture on
19
- * @param width Width of the texture
20
- * @param height Height of the texture
21
- * @param color Color to fill the texture with
22
- * @returns Created texture
23
- */
24
- export declare function textureWithColor(context: WebGLRenderingContext, width: number, height: number, color: vec4): () => WebGLTexture;
25
10
  /**
26
11
  * Minimum size for a webgl texture
27
12
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vonage/ml-transformers",
3
- "version": "7.0.0-alpha.1",
3
+ "version": "7.0.0-alpha.3",
4
4
  "author": "Vonage <vcp_webrtc@vonage.com>",
5
5
  "keywords": [
6
6
  "vonage",
@@ -32,24 +32,17 @@
32
32
  "version:published": "npm show @vonage/ml-transformers version"
33
33
  },
34
34
  "dependencies": {
35
- "@mediapipe/face_detection": "0.4.1646425229",
36
- "@mediapipe/face_mesh": "0.4.1633559619",
37
- "@mediapipe/hands": "0.4.1646424915",
38
- "@mediapipe/holistic": "0.5.1635989137",
39
- "@mediapipe/objectron": "0.4.1636596145",
40
- "@mediapipe/pose": "0.5.1635988162",
41
- "@mediapipe/selfie_segmentation": "0.1.1632777926",
35
+ "@mediapipe/tasks-vision": "0.10.20",
42
36
  "@types/dom-mediacapture-transform": "^0.1.2",
43
37
  "@types/emscripten": "^1.39.6",
44
38
  "@types/offscreencanvas": "^2019.6.4",
45
39
  "@vonage/js-onewebrtc-telemetry": "1.1.2",
46
40
  "@vonage/js-workerizer": "^1.1.2",
47
- "@vonage/media-processor": "3.0.0-alpha.2",
41
+ "@vonage/media-processor": "3.0.1-alpha.0",
48
42
  "twgl.js": "^5.1.0",
49
43
  "typescript-optional": "3.0.0-alpha.3",
50
44
  "uuid": "^13.0.0",
51
- "wasm-feature-detect": "^1.2.11",
52
- "@mediapipe/tasks-vision": "0.10.20"
45
+ "wasm-feature-detect": "^1.2.11"
53
46
  },
54
47
  "files": [
55
48
  "dist"