@vonage/ml-transformers 5.0.1 → 5.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -124,7 +124,7 @@ declare function getVonagePose(): VonagePose;
124
124
  /**
125
125
  * MediaProcessorConfig specifies the transformer logic to be performed.
126
126
  */
127
- declare type MediaProcessorConfig = BackgroundOptions;
127
+ type MediaProcessorConfig = BackgroundOptions;
128
128
  /**
129
129
  * builder function to create MediaProcessor
130
130
  * @param config - see `MediaProcessorConfig` definition
@@ -23,12 +23,6 @@ export declare class VonageMediaProcessor extends Emittery<EventDataMap> {
23
23
  * @internal
24
24
  */
25
25
  private constructor();
26
- /**
27
- * Initialize the processor worker an events
28
- * @param config Configuration to initialize with
29
- * @internal
30
- */
31
- private init;
32
26
  /**
33
27
  * change the background option during run time using this function.
34
28
  * while using this function the media-processor will not be destroyed.
@@ -36,6 +30,14 @@ export declare class VonageMediaProcessor extends Emittery<EventDataMap> {
36
30
  * @param backgroundOptions - see `BackgroundOptions` definition
37
31
  */
38
32
  setBackgroundOptions(options: BackgroundOptions): Promise<void>;
33
+ /**
34
+ * Enable the processing
35
+ */
36
+ enable(): Promise<void>;
37
+ /**
38
+ * Disable the processing
39
+ */
40
+ disable(): Promise<void>;
39
41
  /**
40
42
  * Sets the expected rate of the track per second.
41
43
  * The media processor will use this number for calculating drops in the rate.
@@ -24,12 +24,19 @@ export declare class ProcessorMain extends Emittery<EventDataMap> implements Med
24
24
  private rate;
25
25
  private worker?;
26
26
  private static initCount;
27
- init(config: MediaProcessorConfig): Promise<void>;
27
+ private config;
28
+ private backgroundOptions?;
29
+ private isEnabled;
30
+ constructor(config: MediaProcessorConfig);
28
31
  setTrackExpectedRate(rate: number): Promise<void>;
29
32
  setBackgroundOptions(options: BackgroundOptions): Promise<void>;
30
33
  transform(readable: ReadableStream<any>, writable: WritableStream<any>): Promise<void>;
31
34
  destroy(): Promise<void>;
32
35
  setFlickeringOptions(options: FlickeringOptions): Promise<void>;
33
36
  profile(duration: number): Promise<ResolvedWebglQuery[]>;
37
+ enable(): Promise<void>;
38
+ disable(): Promise<void>;
34
39
  private listenWorker;
40
+ private startWorker;
41
+ private updateBackgroundOptions;
35
42
  }
@@ -12,6 +12,8 @@ export declare class ProcessorWorker {
12
12
  private resolveOnMediaProcessor?;
13
13
  private eventsQueue;
14
14
  init(id: string, config: MediaProcessorConfig): Promise<void>;
15
+ enable(): void;
16
+ disable(): void;
15
17
  onMediaProcessorEvent(): Promise<MediaProcessorEvent>;
16
18
  setTrackExpectedRate(rate: number): Promise<void>;
17
19
  transform(readable: ReadableStream<any>, writable: WritableStream<any>): Promise<void>;
@@ -1,5 +1,3 @@
1
- /// <reference types="dom-webcodecs" />
2
- /// <reference types="offscreencanvas" />
3
1
  import { RendererInterface } from "../../interfaces/renderer";
4
2
  import { FlickeringOptions } from "../../webgl/pipelines/improve-segmentation-mask";
5
3
  import { ResolvedWebglQuery } from "../../webgl/webgl-profiler";
@@ -6,6 +6,7 @@ export declare class BackgroundTransformer {
6
6
  private selfieSegmentation;
7
7
  private backgroundFilter?;
8
8
  private reporter;
9
+ private isEnabled;
9
10
  init(id: string, config: MediaProcessorConfig): Promise<void>;
10
11
  transform?(frame: VideoFrame, controller: TransformStreamDefaultController): Promise<void>;
11
12
  private backgroundOptions?;
@@ -15,5 +16,7 @@ export declare class BackgroundTransformer {
15
16
  setFlickeringOptions(options: FlickeringOptions): void;
16
17
  profile(duration: number): Promise<ResolvedWebglQuery[]>;
17
18
  getTransformerType(): string;
19
+ enable(): void;
20
+ disable(): void;
18
21
  private report;
19
22
  }
@@ -69,7 +69,7 @@ export interface MediaProcessorBaseConfig {
69
69
  tfliteAssetUriPath?: string;
70
70
  renderingOptions?: RenderingOptions;
71
71
  }
72
- export declare type RenderingOptions = Canvas2dOptions | WebglOptions;
72
+ export type RenderingOptions = Canvas2dOptions | WebglOptions;
73
73
  /**
74
74
  * AssetBackgroundConfig specifies the absolute URL to the image or video file used for background replacement.
75
75
  */
@@ -126,22 +126,22 @@ export interface SilhouetteBlurConfig extends MediaProcessorBaseConfig {
126
126
  /**
127
127
  * BackgroundOptions background options
128
128
  */
129
- export declare type BackgroundOptions = BackgroundBlurConfig | VirtualBackgroundConfig | VideoBackgroundConfig | SilhouetteBlurConfig;
129
+ export type BackgroundOptions = BackgroundBlurConfig | VirtualBackgroundConfig | VideoBackgroundConfig | SilhouetteBlurConfig;
130
130
  export type { FaceDetectionResults, FaceDetectionOptions, FaceMeshResults, FaceMeshOptions, HandsResults, HandsOptions, HolisticResults, HolisticOptions, ObjectronResults, ObjectronOptions, SelfieSegmentationResults, SelfieSegmentationOptions, PoseResults, PoseOptions, };
131
131
  /**
132
132
  * MediaPipeResults types of results object of MediaPipe
133
133
  */
134
- export declare type MediaPipeResults = FaceDetectionResults | FaceMeshResults | HandsResults | HolisticResults | ObjectronResults | SelfieSegmentationResults | PoseResults;
134
+ export type MediaPipeResults = FaceDetectionResults | FaceMeshResults | HandsResults | HolisticResults | ObjectronResults | SelfieSegmentationResults | PoseResults;
135
135
  /**
136
136
  * ResultsListener callback function from MediaPipe process
137
137
  * @results - The results object from MediaPipe
138
138
  * @returns - can return a promise of void
139
139
  */
140
- export declare type MediaPipeResultsListener = <T extends MediaPipeResults>(results: T) => Promise<void> | void;
140
+ export type MediaPipeResultsListener = <T extends MediaPipeResults>(results: T) => Promise<void> | void;
141
141
  /**
142
142
  * MediaPipeModelType supported models types
143
143
  */
144
- export declare type MediaPipeModelType = "face_mesh" | "face_detection" | "hands" | "holistic" | "objectron" | "selfie_segmentation" | "pose";
144
+ export type MediaPipeModelType = "face_mesh" | "face_detection" | "hands" | "holistic" | "objectron" | "selfie_segmentation" | "pose";
145
145
  /**
146
146
  * defines one mediapipe model config
147
147
  * @modelType - which model is required
@@ -152,7 +152,7 @@ export declare type MediaPipeModelType = "face_mesh" | "face_detection" | "hands
152
152
  * please keep in mind that the assets list can change between versions!
153
153
  * assets are different between mediapipe models.
154
154
  */
155
- export declare type MediaPipeModelConfig = {
155
+ export type MediaPipeModelConfig = {
156
156
  modelType: MediaPipeModelType;
157
157
  listener: MediaPipeResultsListener;
158
158
  options: FaceDetectionOptions | FaceMeshOptions | HandsOptions | HolisticOptions | ObjectronOptions | SelfieSegmentationOptions | PoseOptions;
@@ -163,29 +163,29 @@ export declare type MediaPipeModelConfig = {
163
163
  * this structure allows the user to create few mediapipe models that will run in parallel.
164
164
  * @modelTypesArray - array of mediapipe models to be loaded and called.
165
165
  */
166
- export declare type MediapipeConfig = {
166
+ export type MediapipeConfig = {
167
167
  mediaPipeModelConfigArray: Array<MediaPipeModelConfig>;
168
168
  };
169
169
  /**
170
170
  * Vector containing 3 numbers
171
171
  * @internal
172
172
  */
173
- export declare type vec3 = [number, number, number];
173
+ export type vec3 = [number, number, number];
174
174
  /**
175
175
  * Vector containing 4 numbers
176
176
  * @internal
177
177
  */
178
- export declare type vec4 = [number, number, number, number];
178
+ export type vec4 = [number, number, number, number];
179
179
  /**
180
180
  * Valid data to feed webgl texture
181
181
  * @internal
182
182
  */
183
- export declare type TextureSource = string | number[] | ArrayBufferView | TexImageSource | TexImageSource[] | string[] | TextureFunc | undefined;
183
+ export type TextureSource = string | number[] | ArrayBufferView | TexImageSource | TexImageSource[] | string[] | TextureFunc | undefined;
184
184
  /**
185
185
  * Valid uniform data map
186
186
  * @internal
187
187
  */
188
- export declare type UniformDataMap = {
188
+ export type UniformDataMap = {
189
189
  [key: string]: number | number[] | WebGLTexture;
190
190
  };
191
191
  /**
@@ -1,6 +1,4 @@
1
1
  /// <reference types="dom-webcodecs" />
2
- /// <reference types="dom-webcodecs" />
3
- /// <reference types="offscreencanvas" />
4
2
  import { TextureSource, vec4 } from "../types";
5
3
  /**
6
4
  * Render a VideoFrame within a canvas
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vonage/ml-transformers",
3
- "version": "5.0.1",
3
+ "version": "5.1.0",
4
4
  "author": "Guy Mininberg <guy.mininberg@vonage.com>",
5
5
  "contributors": [
6
6
  "Guy Mininberg <guy.mininberg@vonage.com>",
@@ -31,7 +31,7 @@
31
31
  "e2e:app": "vite ./tests/e2e/apps/vite",
32
32
  "e2e:app:run:vite": "vite ./tests/e2e/apps/vite",
33
33
  "e2e:app:run:webpack": "vite ./tests/e2e/apps/webpack/app",
34
- "e2e:app:install": "rm -f ./vonage-ml-transformers-* && npm run build && npm pack && node ./tests/e2e/scripts/install-app.js && node ./tests/e2e/scripts/build-app.js",
34
+ "e2e:app:install": "npm run build && npm pack && node ./tests/e2e/scripts/install-app.js && node ./tests/e2e/scripts/build-app.js",
35
35
  "test": "vitest --run",
36
36
  "test:install": "npm install && npm run build && npm pack && npm run e2e:app:install",
37
37
  "version:local": "node -pe 'require(\"./package.json\").version'",
@@ -86,7 +86,7 @@
86
86
  "puppeteer": "^19.0.0",
87
87
  "rimraf": "^3.0.2",
88
88
  "typescript": "^4.8.4",
89
- "vite": "3.1.8",
89
+ "vite": "^3.2.7",
90
90
  "vitest": "^0.28.5"
91
91
  },
92
92
  "repository": "https://github.com/Vonage/vonage-media-transformers-samples",