@designcombo/video 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +63 -0
- package/dist/SharedSystems-BSw9neqH.js +2691 -0
- package/dist/WebGLRenderer-BrabW-VK.js +2639 -0
- package/dist/WebGPURenderer-BKwBKkzk.js +1655 -0
- package/dist/browserAll-C7HVZtqZ.js +1876 -0
- package/dist/clips/audio-clip.d.ts +132 -0
- package/dist/clips/base-clip.d.ts +86 -0
- package/dist/clips/caption-clip.d.ts +257 -0
- package/dist/clips/iclip.d.ts +120 -0
- package/dist/clips/image-clip.d.ts +110 -0
- package/dist/clips/index.d.ts +8 -0
- package/dist/clips/text-clip.d.ts +192 -0
- package/dist/clips/video-clip.d.ts +200 -0
- package/dist/colorToUniform-Du0ROyNd.js +274 -0
- package/dist/compositor.d.ts +111 -0
- package/dist/index-CjzowIhV.js +28270 -0
- package/dist/index.d.ts +14 -0
- package/dist/index.es.js +20 -0
- package/dist/index.umd.js +1295 -0
- package/dist/internal-utils/event-tool.d.ts +50 -0
- package/dist/internal-utils/index.d.ts +14 -0
- package/dist/internal-utils/log.d.ts +34 -0
- package/dist/internal-utils/meta-box.d.ts +1 -0
- package/dist/internal-utils/recodemux.d.ts +65 -0
- package/dist/internal-utils/stream-utils.d.ts +43 -0
- package/dist/internal-utils/worker-timer.d.ts +8 -0
- package/dist/json-serialization.d.ts +142 -0
- package/dist/mp4-utils/index.d.ts +31 -0
- package/dist/mp4-utils/mp4box-utils.d.ts +36 -0
- package/dist/mp4-utils/sample-transform.d.ts +23 -0
- package/dist/sprite/base-sprite.d.ts +147 -0
- package/dist/sprite/pixi-sprite-renderer.d.ts +48 -0
- package/dist/studio.d.ts +142 -0
- package/dist/transfomer/parts/handle.d.ts +17 -0
- package/dist/transfomer/parts/wireframe.d.ts +5 -0
- package/dist/transfomer/transformer.d.ts +21 -0
- package/dist/utils/audio.d.ts +82 -0
- package/dist/utils/chromakey.d.ts +24 -0
- package/dist/utils/color.d.ts +4 -0
- package/dist/utils/common.d.ts +7 -0
- package/dist/utils/dom.d.ts +48 -0
- package/dist/utils/fonts.d.ts +16 -0
- package/dist/utils/index.d.ts +5 -0
- package/dist/utils/srt-parser.d.ts +15 -0
- package/dist/utils/video.d.ts +18 -0
- package/dist/webworkerAll-DsE6HIYE.js +2497 -0
- package/package.json +53 -0
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export * from './audio-clip';
|
|
2
|
+
export * from './caption-clip';
|
|
3
|
+
export * from './iclip';
|
|
4
|
+
export * from './image-clip';
|
|
5
|
+
export * from './video-clip';
|
|
6
|
+
export { VideoClip } from './video-clip';
|
|
7
|
+
export type { IMP4ClipOpts } from './video-clip';
|
|
8
|
+
export * from './text-clip';
|
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
import { Application, Texture } from 'pixi.js';
|
|
2
|
+
import { BaseClip } from './base-clip';
|
|
3
|
+
import { IClip } from './iclip';
|
|
4
|
+
import { TextClipJSON } from '../json-serialization';
|
|
5
|
+
export interface ITextClipOpts {
|
|
6
|
+
/**
|
|
7
|
+
* Font size in pixels
|
|
8
|
+
* @default 40
|
|
9
|
+
*/
|
|
10
|
+
fontSize?: number;
|
|
11
|
+
/**
|
|
12
|
+
* Font family
|
|
13
|
+
* @default 'Arial'
|
|
14
|
+
*/
|
|
15
|
+
fontFamily?: string;
|
|
16
|
+
/**
|
|
17
|
+
* Font weight (e.g., 'normal', 'bold', '400', '700')
|
|
18
|
+
* @default 'normal'
|
|
19
|
+
*/
|
|
20
|
+
fontWeight?: string;
|
|
21
|
+
/**
|
|
22
|
+
* Font style (e.g., 'normal', 'italic')
|
|
23
|
+
* @default 'normal'
|
|
24
|
+
*/
|
|
25
|
+
fontStyle?: string;
|
|
26
|
+
/**
|
|
27
|
+
* Text color (hex string, color name, or gradient object)
|
|
28
|
+
* @default '#ffffff'
|
|
29
|
+
*/
|
|
30
|
+
fill?: string | number | {
|
|
31
|
+
type: 'gradient';
|
|
32
|
+
x0: number;
|
|
33
|
+
y0: number;
|
|
34
|
+
x1: number;
|
|
35
|
+
y1: number;
|
|
36
|
+
colors: Array<{
|
|
37
|
+
ratio: number;
|
|
38
|
+
color: string | number;
|
|
39
|
+
}>;
|
|
40
|
+
};
|
|
41
|
+
/**
|
|
42
|
+
* Stroke color (hex string or color name) or stroke object with advanced options
|
|
43
|
+
*/
|
|
44
|
+
stroke?: string | number | {
|
|
45
|
+
color: string | number;
|
|
46
|
+
width: number;
|
|
47
|
+
join?: 'miter' | 'round' | 'bevel';
|
|
48
|
+
};
|
|
49
|
+
/**
|
|
50
|
+
* Stroke width in pixels (used when stroke is a simple color)
|
|
51
|
+
* @default 0
|
|
52
|
+
*/
|
|
53
|
+
strokeWidth?: number;
|
|
54
|
+
/**
|
|
55
|
+
* Text alignment ('left', 'center', 'right')
|
|
56
|
+
* @default 'left'
|
|
57
|
+
*/
|
|
58
|
+
align?: 'left' | 'center' | 'right';
|
|
59
|
+
/**
|
|
60
|
+
* Drop shadow configuration
|
|
61
|
+
*/
|
|
62
|
+
dropShadow?: {
|
|
63
|
+
color?: string | number;
|
|
64
|
+
alpha?: number;
|
|
65
|
+
blur?: number;
|
|
66
|
+
angle?: number;
|
|
67
|
+
distance?: number;
|
|
68
|
+
};
|
|
69
|
+
/**
|
|
70
|
+
* Word wrap width (0 = no wrap)
|
|
71
|
+
* @default 0
|
|
72
|
+
*/
|
|
73
|
+
wordWrapWidth?: number;
|
|
74
|
+
/**
|
|
75
|
+
* Word wrap mode ('break-word' or 'normal')
|
|
76
|
+
* @default 'break-word'
|
|
77
|
+
*/
|
|
78
|
+
wordWrap?: boolean;
|
|
79
|
+
/**
|
|
80
|
+
* Line height (multiplier)
|
|
81
|
+
* @default 1
|
|
82
|
+
*/
|
|
83
|
+
lineHeight?: number;
|
|
84
|
+
/**
|
|
85
|
+
* Letter spacing in pixels
|
|
86
|
+
* @default 0
|
|
87
|
+
*/
|
|
88
|
+
letterSpacing?: number;
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* Text clip using PixiJS Text for rendering
|
|
92
|
+
*
|
|
93
|
+
* @example
|
|
94
|
+
* const textClip = new TextClip('Hello World', {
|
|
95
|
+
* fontSize: 48,
|
|
96
|
+
* fill: '#ffffff',
|
|
97
|
+
* stroke: '#000000',
|
|
98
|
+
* strokeWidth: 2,
|
|
99
|
+
* dropShadow: {
|
|
100
|
+
* color: '#000000',
|
|
101
|
+
* alpha: 0.5,
|
|
102
|
+
* blur: 4,
|
|
103
|
+
* distance: 2,
|
|
104
|
+
* },
|
|
105
|
+
* });
|
|
106
|
+
* textClip.duration = 5e6; // 5 seconds
|
|
107
|
+
*/
|
|
108
|
+
export declare class TextClip extends BaseClip {
|
|
109
|
+
ready: IClip['ready'];
|
|
110
|
+
private _meta;
|
|
111
|
+
get meta(): {
|
|
112
|
+
duration: number;
|
|
113
|
+
width: number;
|
|
114
|
+
height: number;
|
|
115
|
+
};
|
|
116
|
+
/**
|
|
117
|
+
* Text content (hybrid JSON structure)
|
|
118
|
+
*/
|
|
119
|
+
text: string;
|
|
120
|
+
/**
|
|
121
|
+
* Text styling (hybrid JSON structure)
|
|
122
|
+
* Provides direct access to styling properties
|
|
123
|
+
*/
|
|
124
|
+
get style(): {
|
|
125
|
+
fontSize: number | undefined;
|
|
126
|
+
fontFamily: string | undefined;
|
|
127
|
+
fontWeight: string | undefined;
|
|
128
|
+
fontStyle: string | undefined;
|
|
129
|
+
color: string | number | {
|
|
130
|
+
type: "gradient";
|
|
131
|
+
x0: number;
|
|
132
|
+
y0: number;
|
|
133
|
+
x1: number;
|
|
134
|
+
y1: number;
|
|
135
|
+
colors: Array<{
|
|
136
|
+
ratio: number;
|
|
137
|
+
color: string | number;
|
|
138
|
+
}>;
|
|
139
|
+
} | undefined;
|
|
140
|
+
align: "center" | "left" | "right" | undefined;
|
|
141
|
+
stroke: {
|
|
142
|
+
color: string | number;
|
|
143
|
+
width: number;
|
|
144
|
+
} | undefined;
|
|
145
|
+
shadow: {
|
|
146
|
+
color: string | number;
|
|
147
|
+
alpha: number;
|
|
148
|
+
blur: number;
|
|
149
|
+
offsetX: number;
|
|
150
|
+
offsetY: number;
|
|
151
|
+
} | undefined;
|
|
152
|
+
};
|
|
153
|
+
private pixiText;
|
|
154
|
+
private textStyle;
|
|
155
|
+
private renderTexture;
|
|
156
|
+
private externalRenderer;
|
|
157
|
+
private pixiApp;
|
|
158
|
+
private originalOpts;
|
|
159
|
+
constructor(text: string, opts?: ITextClipOpts, renderer?: Application['renderer']);
|
|
160
|
+
/**
|
|
161
|
+
* Set an external renderer (e.g., from Studio) to avoid creating our own Pixi App
|
|
162
|
+
* This is an optimization for Studio preview
|
|
163
|
+
* Can be called before ready() completes
|
|
164
|
+
*/
|
|
165
|
+
setRenderer(renderer: Application['renderer']): void;
|
|
166
|
+
/**
|
|
167
|
+
* Get the renderer for rendering text to RenderTexture
|
|
168
|
+
* Creates a minimal renderer as fallback if no external renderer is provided
|
|
169
|
+
*/
|
|
170
|
+
private getRenderer;
|
|
171
|
+
/**
|
|
172
|
+
* Get the PixiJS Texture (RenderTexture) for optimized rendering in Studio
|
|
173
|
+
* This avoids ImageBitmap → Canvas → Texture conversion
|
|
174
|
+
*
|
|
175
|
+
* @returns The RenderTexture containing the rendered text, or null if not ready
|
|
176
|
+
*/
|
|
177
|
+
getTexture(): Promise<Texture | null>;
|
|
178
|
+
tick(_time: number): Promise<{
|
|
179
|
+
video: ImageBitmap;
|
|
180
|
+
state: 'success';
|
|
181
|
+
}>;
|
|
182
|
+
split(_time: number): Promise<[this, this]>;
|
|
183
|
+
clone(): Promise<this>;
|
|
184
|
+
destroy(): void;
|
|
185
|
+
toJSON(main?: boolean): TextClipJSON;
|
|
186
|
+
/**
|
|
187
|
+
* Create a TextClip instance from a JSON object (fabric.js pattern)
|
|
188
|
+
* @param json The JSON object representing the clip
|
|
189
|
+
* @returns Promise that resolves to a TextClip instance
|
|
190
|
+
*/
|
|
191
|
+
static fromObject(json: TextClipJSON): Promise<TextClip>;
|
|
192
|
+
}
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
import { MP4Sample } from '@webav/mp4box.js';
|
|
2
|
+
import { file } from 'opfs-tools';
|
|
3
|
+
import { BaseClip } from './base-clip';
|
|
4
|
+
import { IClip, IPlaybackCapable } from './iclip';
|
|
5
|
+
import { VideoClipJSON } from '../json-serialization';
|
|
6
|
+
type OPFSToolFile = ReturnType<typeof file>;
|
|
7
|
+
type MPClipCloneArgs = Awaited<ReturnType<typeof mp4FileToSamples>> & {
|
|
8
|
+
localFile: OPFSToolFile;
|
|
9
|
+
};
|
|
10
|
+
interface MP4DecoderConf {
|
|
11
|
+
video: VideoDecoderConfig | null;
|
|
12
|
+
audio: AudioDecoderConfig | null;
|
|
13
|
+
}
|
|
14
|
+
export interface IMP4ClipOpts {
|
|
15
|
+
audio?: boolean | {
|
|
16
|
+
volume: number;
|
|
17
|
+
};
|
|
18
|
+
/**
|
|
19
|
+
* 不安全,随时可能废弃
|
|
20
|
+
*/
|
|
21
|
+
__unsafe_hardwareAcceleration__?: HardwarePreference;
|
|
22
|
+
}
|
|
23
|
+
type ExtMP4Sample = Omit<MP4Sample, 'data'> & {
|
|
24
|
+
is_idr: boolean;
|
|
25
|
+
deleted?: boolean;
|
|
26
|
+
data: null | Uint8Array;
|
|
27
|
+
};
|
|
28
|
+
/**
|
|
29
|
+
* Video clip, parses MP4 files, uses {@link VideoClip.tick} to decode image frames at specified time on demand
|
|
30
|
+
*
|
|
31
|
+
* Can be used to implement video frame extraction, thumbnail generation, video editing and other functions
|
|
32
|
+
*
|
|
33
|
+
* @example
|
|
34
|
+
* // Load video clip asynchronously
|
|
35
|
+
* const videoClip = await VideoClip.fromUrl("clip.mp4", {
|
|
36
|
+
* x: 0,
|
|
37
|
+
* y: 0,
|
|
38
|
+
* width: 1920,
|
|
39
|
+
* height: 1080,
|
|
40
|
+
* });
|
|
41
|
+
*
|
|
42
|
+
* // Set timeline position
|
|
43
|
+
* videoClip.set({
|
|
44
|
+
* display: {
|
|
45
|
+
* from: 150, // frames
|
|
46
|
+
* to: 450, // frames (10 seconds at 30fps)
|
|
47
|
+
* },
|
|
48
|
+
* });
|
|
49
|
+
*
|
|
50
|
+
* @example
|
|
51
|
+
* // Direct constructor (for advanced use)
|
|
52
|
+
* new VideoClip((await fetch('<mp4 url>')).body)
|
|
53
|
+
* new VideoClip(mp4File.stream())
|
|
54
|
+
*
|
|
55
|
+
* @see {@link Compositor}
|
|
56
|
+
* @see [AVCanvas](../../av-canvas/classes/AVCanvas.html)
|
|
57
|
+
*
|
|
58
|
+
* @see [Decode and play video](https://webav-tech.github.io/WebAV/demo/1_1-decode-video)
|
|
59
|
+
*/
|
|
60
|
+
export declare class VideoClip extends BaseClip implements IPlaybackCapable {
|
|
61
|
+
private insId;
|
|
62
|
+
private logger;
|
|
63
|
+
ready: IClip['ready'];
|
|
64
|
+
private _meta;
|
|
65
|
+
get meta(): {
|
|
66
|
+
duration: number;
|
|
67
|
+
width: number;
|
|
68
|
+
height: number;
|
|
69
|
+
audioSampleRate: number;
|
|
70
|
+
audioChanCount: number;
|
|
71
|
+
};
|
|
72
|
+
private localFile;
|
|
73
|
+
/** Store binary data of video header (box: ftyp, moov) */
|
|
74
|
+
private headerBoxPos;
|
|
75
|
+
/**
|
|
76
|
+
* Provide binary data of video header (box: ftyp, moov)
|
|
77
|
+
* Use any mp4 demuxer to parse and get detailed video information
|
|
78
|
+
* Unit tests include sample code using mp4box.js
|
|
79
|
+
*/
|
|
80
|
+
getFileHeaderBinData(): Promise<ArrayBuffer>;
|
|
81
|
+
/** Store video transform and rotation info, currently only restores rotation */
|
|
82
|
+
private parsedMatrix;
|
|
83
|
+
private vfRotater;
|
|
84
|
+
private volume;
|
|
85
|
+
private videoSamples;
|
|
86
|
+
private audioSamples;
|
|
87
|
+
private videoFrameFinder;
|
|
88
|
+
private audioFrameFinder;
|
|
89
|
+
private decoderConf;
|
|
90
|
+
private opts;
|
|
91
|
+
/**
|
|
92
|
+
* Whether to include audio track (hybrid JSON structure)
|
|
93
|
+
*/
|
|
94
|
+
audio: boolean;
|
|
95
|
+
/**
|
|
96
|
+
* Load a video clip from a URL
|
|
97
|
+
* @param url Video URL
|
|
98
|
+
* @param opts Position and size options
|
|
99
|
+
* @returns Promise that resolves to a video clip
|
|
100
|
+
*
|
|
101
|
+
* @example
|
|
102
|
+
* const videoClip = await VideoClip.fromUrl("clip.mp4", {
|
|
103
|
+
* x: 0,
|
|
104
|
+
* y: 0,
|
|
105
|
+
* width: 1920,
|
|
106
|
+
* height: 1080,
|
|
107
|
+
* });
|
|
108
|
+
*/
|
|
109
|
+
static fromUrl(url: string, opts?: {
|
|
110
|
+
x?: number;
|
|
111
|
+
y?: number;
|
|
112
|
+
width?: number;
|
|
113
|
+
height?: number;
|
|
114
|
+
}): Promise<VideoClip>;
|
|
115
|
+
constructor(source: OPFSToolFile | ReadableStream<Uint8Array> | MPClipCloneArgs, opts?: IMP4ClipOpts, src?: string);
|
|
116
|
+
/**
|
|
117
|
+
* Intercept data returned by {@link VideoClip.tick} method for secondary processing of image and audio data
|
|
118
|
+
* @param time Time when tick was called
|
|
119
|
+
* @param tickRet Data returned by tick
|
|
120
|
+
*
|
|
121
|
+
* @see [Remove video green screen background](https://webav-tech.github.io/WebAV/demo/3_2-chromakey-video)
|
|
122
|
+
*/
|
|
123
|
+
tickInterceptor: <T extends Awaited<ReturnType<VideoClip['tick']>>>(time: number, tickRet: T) => Promise<T>;
|
|
124
|
+
/**
|
|
125
|
+
* Get image frame and audio data at specified time
|
|
126
|
+
* @param time Time in microseconds
|
|
127
|
+
*/
|
|
128
|
+
tick(time: number): Promise<{
|
|
129
|
+
video?: VideoFrame;
|
|
130
|
+
audio: Float32Array[];
|
|
131
|
+
state: 'success' | 'done';
|
|
132
|
+
}>;
|
|
133
|
+
split(time: number): Promise<[this, this]>;
|
|
134
|
+
clone(): Promise<this>;
|
|
135
|
+
/**
|
|
136
|
+
* Split VideoClip into VideoClips containing only video track and audio track
|
|
137
|
+
* @returns VideoClip[]
|
|
138
|
+
*/
|
|
139
|
+
splitTrack(): Promise<VideoClip[]>;
|
|
140
|
+
destroy(): void;
|
|
141
|
+
toJSON(main?: boolean): VideoClipJSON;
|
|
142
|
+
/**
|
|
143
|
+
* Create a VideoClip instance from a JSON object (fabric.js pattern)
|
|
144
|
+
* @param json The JSON object representing the clip
|
|
145
|
+
* @returns Promise that resolves to a VideoClip instance
|
|
146
|
+
*/
|
|
147
|
+
static fromObject(json: VideoClipJSON): Promise<VideoClip>;
|
|
148
|
+
/**
|
|
149
|
+
* Create HTMLVideoElement for playback
|
|
150
|
+
*/
|
|
151
|
+
createPlaybackElement(): Promise<{
|
|
152
|
+
element: HTMLVideoElement;
|
|
153
|
+
objectUrl?: string;
|
|
154
|
+
}>;
|
|
155
|
+
play(element: HTMLVideoElement | HTMLAudioElement, timeSeconds: number): Promise<void>;
|
|
156
|
+
pause(element: HTMLVideoElement | HTMLAudioElement): void;
|
|
157
|
+
seek(element: HTMLVideoElement | HTMLAudioElement, timeSeconds: number): Promise<void>;
|
|
158
|
+
syncPlayback(element: HTMLVideoElement | HTMLAudioElement, isPlaying: boolean, timeSeconds: number): void;
|
|
159
|
+
cleanupPlayback(element: HTMLVideoElement | HTMLAudioElement, objectUrl?: string): void;
|
|
160
|
+
/**
|
|
161
|
+
* Scale clip to fit within the scene dimensions while maintaining aspect ratio
|
|
162
|
+
* Similar to fabric.js scaleToFit
|
|
163
|
+
* @param sceneWidth Scene width
|
|
164
|
+
* @param sceneHeight Scene height
|
|
165
|
+
*/
|
|
166
|
+
scaleToFit(sceneWidth: number, sceneHeight: number): Promise<void>;
|
|
167
|
+
/**
|
|
168
|
+
* Scale clip to fill the scene dimensions while maintaining aspect ratio
|
|
169
|
+
* May crop parts of the clip. Similar to fabric.js scaleToFill
|
|
170
|
+
* @param sceneWidth Scene width
|
|
171
|
+
* @param sceneHeight Scene height
|
|
172
|
+
*/
|
|
173
|
+
scaleToFill(sceneWidth: number, sceneHeight: number): Promise<void>;
|
|
174
|
+
/**
|
|
175
|
+
* Center the clip within the scene dimensions
|
|
176
|
+
* Similar to fabric.js center
|
|
177
|
+
* @param sceneWidth Scene width
|
|
178
|
+
* @param sceneHeight Scene height
|
|
179
|
+
*/
|
|
180
|
+
centerInScene(sceneWidth: number, sceneHeight: number): void;
|
|
181
|
+
}
|
|
182
|
+
declare function mp4FileToSamples(otFile: OPFSToolFile, opts?: IMP4ClipOpts): Promise<{
|
|
183
|
+
videoSamples: ExtMP4Sample[];
|
|
184
|
+
audioSamples: ExtMP4Sample[];
|
|
185
|
+
decoderConf: MP4DecoderConf;
|
|
186
|
+
headerBoxPos: {
|
|
187
|
+
start: number;
|
|
188
|
+
size: number;
|
|
189
|
+
}[];
|
|
190
|
+
parsedMatrix: {
|
|
191
|
+
perspective: number;
|
|
192
|
+
rotationRad: number;
|
|
193
|
+
rotationDeg: number;
|
|
194
|
+
scaleX: number;
|
|
195
|
+
scaleY: number;
|
|
196
|
+
translateX: number;
|
|
197
|
+
translateY: number;
|
|
198
|
+
};
|
|
199
|
+
}>;
|
|
200
|
+
export {};
|
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import { S as M, G as x, a as F } from "./index-CjzowIhV.js";
|
|
2
|
+
const T = {
|
|
3
|
+
normal: 0,
|
|
4
|
+
add: 1,
|
|
5
|
+
multiply: 2,
|
|
6
|
+
screen: 3,
|
|
7
|
+
overlay: 4,
|
|
8
|
+
erase: 5,
|
|
9
|
+
"normal-npm": 6,
|
|
10
|
+
"add-npm": 7,
|
|
11
|
+
"screen-npm": 8,
|
|
12
|
+
min: 9,
|
|
13
|
+
max: 10
|
|
14
|
+
}, o = 0, a = 1, n = 2, l = 3, d = 4, u = 5, c = class b {
|
|
15
|
+
constructor() {
|
|
16
|
+
this.data = 0, this.blendMode = "normal", this.polygonOffset = 0, this.blend = !0, this.depthMask = !0;
|
|
17
|
+
}
|
|
18
|
+
/**
|
|
19
|
+
* Activates blending of the computed fragment color values.
|
|
20
|
+
* @default true
|
|
21
|
+
*/
|
|
22
|
+
get blend() {
|
|
23
|
+
return !!(this.data & 1 << o);
|
|
24
|
+
}
|
|
25
|
+
set blend(t) {
|
|
26
|
+
!!(this.data & 1 << o) !== t && (this.data ^= 1 << o);
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Activates adding an offset to depth values of polygon's fragments
|
|
30
|
+
* @default false
|
|
31
|
+
*/
|
|
32
|
+
get offsets() {
|
|
33
|
+
return !!(this.data & 1 << a);
|
|
34
|
+
}
|
|
35
|
+
set offsets(t) {
|
|
36
|
+
!!(this.data & 1 << a) !== t && (this.data ^= 1 << a);
|
|
37
|
+
}
|
|
38
|
+
/** The culling settings for this state none - No culling back - Back face culling front - Front face culling */
|
|
39
|
+
set cullMode(t) {
|
|
40
|
+
if (t === "none") {
|
|
41
|
+
this.culling = !1;
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
this.culling = !0, this.clockwiseFrontFace = t === "front";
|
|
45
|
+
}
|
|
46
|
+
get cullMode() {
|
|
47
|
+
return this.culling ? this.clockwiseFrontFace ? "front" : "back" : "none";
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Activates culling of polygons.
|
|
51
|
+
* @default false
|
|
52
|
+
*/
|
|
53
|
+
get culling() {
|
|
54
|
+
return !!(this.data & 1 << n);
|
|
55
|
+
}
|
|
56
|
+
set culling(t) {
|
|
57
|
+
!!(this.data & 1 << n) !== t && (this.data ^= 1 << n);
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Activates depth comparisons and updates to the depth buffer.
|
|
61
|
+
* @default false
|
|
62
|
+
*/
|
|
63
|
+
get depthTest() {
|
|
64
|
+
return !!(this.data & 1 << l);
|
|
65
|
+
}
|
|
66
|
+
set depthTest(t) {
|
|
67
|
+
!!(this.data & 1 << l) !== t && (this.data ^= 1 << l);
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Enables or disables writing to the depth buffer.
|
|
71
|
+
* @default true
|
|
72
|
+
*/
|
|
73
|
+
get depthMask() {
|
|
74
|
+
return !!(this.data & 1 << u);
|
|
75
|
+
}
|
|
76
|
+
set depthMask(t) {
|
|
77
|
+
!!(this.data & 1 << u) !== t && (this.data ^= 1 << u);
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Specifies whether or not front or back-facing polygons can be culled.
|
|
81
|
+
* @default false
|
|
82
|
+
*/
|
|
83
|
+
get clockwiseFrontFace() {
|
|
84
|
+
return !!(this.data & 1 << d);
|
|
85
|
+
}
|
|
86
|
+
set clockwiseFrontFace(t) {
|
|
87
|
+
!!(this.data & 1 << d) !== t && (this.data ^= 1 << d);
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* The blend mode to be applied when this state is set. Apply a value of `normal` to reset the blend mode.
|
|
91
|
+
* Setting this mode to anything other than NO_BLEND will automatically switch blending on.
|
|
92
|
+
* @default 'normal'
|
|
93
|
+
*/
|
|
94
|
+
get blendMode() {
|
|
95
|
+
return this._blendMode;
|
|
96
|
+
}
|
|
97
|
+
set blendMode(t) {
|
|
98
|
+
this.blend = t !== "none", this._blendMode = t, this._blendModeId = T[t] || 0;
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* The polygon offset. Setting this property to anything other than 0 will automatically enable polygon offset fill.
|
|
102
|
+
* @default 0
|
|
103
|
+
*/
|
|
104
|
+
get polygonOffset() {
|
|
105
|
+
return this._polygonOffset;
|
|
106
|
+
}
|
|
107
|
+
set polygonOffset(t) {
|
|
108
|
+
this.offsets = !!t, this._polygonOffset = t;
|
|
109
|
+
}
|
|
110
|
+
toString() {
|
|
111
|
+
return `[pixi.js/core:State blendMode=${this.blendMode} clockwiseFrontFace=${this.clockwiseFrontFace} culling=${this.culling} depthMask=${this.depthMask} polygonOffset=${this.polygonOffset}]`;
|
|
112
|
+
}
|
|
113
|
+
/**
|
|
114
|
+
* A quickly getting an instance of a State that is configured for 2d rendering.
|
|
115
|
+
* @returns a new State with values set for 2d rendering
|
|
116
|
+
*/
|
|
117
|
+
static for2d() {
|
|
118
|
+
const t = new b();
|
|
119
|
+
return t.depthTest = !1, t.blend = !0, t;
|
|
120
|
+
}
|
|
121
|
+
};
|
|
122
|
+
c.default2d = c.for2d();
|
|
123
|
+
let _ = c;
|
|
124
|
+
const p = class f extends M {
|
|
125
|
+
/**
|
|
126
|
+
* @param options - The optional parameters of this filter.
|
|
127
|
+
*/
|
|
128
|
+
constructor(t) {
|
|
129
|
+
t = { ...f.defaultOptions, ...t }, super(t), this.enabled = !0, this._state = _.for2d(), this.blendMode = t.blendMode, this.padding = t.padding, typeof t.antialias == "boolean" ? this.antialias = t.antialias ? "on" : "off" : this.antialias = t.antialias, this.resolution = t.resolution, this.blendRequired = t.blendRequired, this.clipToViewport = t.clipToViewport, this.addResource("uTexture", 0, 1), t.blendRequired && this.addResource("uBackTexture", 0, 3);
|
|
130
|
+
}
|
|
131
|
+
/**
|
|
132
|
+
* Applies the filter
|
|
133
|
+
* @param filterManager - The renderer to retrieve the filter from
|
|
134
|
+
* @param input - The input render target.
|
|
135
|
+
* @param output - The target to output to.
|
|
136
|
+
* @param clearMode - Should the output be cleared before rendering to it
|
|
137
|
+
*/
|
|
138
|
+
apply(t, e, i, r) {
|
|
139
|
+
t.applyFilter(this, e, i, r);
|
|
140
|
+
}
|
|
141
|
+
/**
|
|
142
|
+
* Get the blend mode of the filter.
|
|
143
|
+
* @default "normal"
|
|
144
|
+
*/
|
|
145
|
+
get blendMode() {
|
|
146
|
+
return this._state.blendMode;
|
|
147
|
+
}
|
|
148
|
+
/** Sets the blend mode of the filter. */
|
|
149
|
+
set blendMode(t) {
|
|
150
|
+
this._state.blendMode = t;
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* A short hand function to create a filter based of a vertex and fragment shader src.
|
|
154
|
+
* @param options
|
|
155
|
+
* @returns A shiny new PixiJS filter!
|
|
156
|
+
*/
|
|
157
|
+
static from(t) {
|
|
158
|
+
const { gpu: e, gl: i, ...r } = t;
|
|
159
|
+
let m, g;
|
|
160
|
+
return e && (m = x.from(e)), i && (g = F.from(i)), new f({
|
|
161
|
+
gpuProgram: m,
|
|
162
|
+
glProgram: g,
|
|
163
|
+
...r
|
|
164
|
+
});
|
|
165
|
+
}
|
|
166
|
+
};
|
|
167
|
+
p.defaultOptions = {
|
|
168
|
+
blendMode: "normal",
|
|
169
|
+
resolution: 1,
|
|
170
|
+
padding: 0,
|
|
171
|
+
antialias: "off",
|
|
172
|
+
blendRequired: !1,
|
|
173
|
+
clipToViewport: !0
|
|
174
|
+
};
|
|
175
|
+
let S = p;
|
|
176
|
+
const h = {
|
|
177
|
+
name: "local-uniform-bit",
|
|
178
|
+
vertex: {
|
|
179
|
+
header: (
|
|
180
|
+
/* wgsl */
|
|
181
|
+
`
|
|
182
|
+
|
|
183
|
+
struct LocalUniforms {
|
|
184
|
+
uTransformMatrix:mat3x3<f32>,
|
|
185
|
+
uColor:vec4<f32>,
|
|
186
|
+
uRound:f32,
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
@group(1) @binding(0) var<uniform> localUniforms : LocalUniforms;
|
|
190
|
+
`
|
|
191
|
+
),
|
|
192
|
+
main: (
|
|
193
|
+
/* wgsl */
|
|
194
|
+
`
|
|
195
|
+
vColor *= localUniforms.uColor;
|
|
196
|
+
modelMatrix *= localUniforms.uTransformMatrix;
|
|
197
|
+
`
|
|
198
|
+
),
|
|
199
|
+
end: (
|
|
200
|
+
/* wgsl */
|
|
201
|
+
`
|
|
202
|
+
if(localUniforms.uRound == 1)
|
|
203
|
+
{
|
|
204
|
+
vPosition = vec4(roundPixels(vPosition.xy, globalUniforms.uResolution), vPosition.zw);
|
|
205
|
+
}
|
|
206
|
+
`
|
|
207
|
+
)
|
|
208
|
+
}
|
|
209
|
+
}, k = {
|
|
210
|
+
...h,
|
|
211
|
+
vertex: {
|
|
212
|
+
...h.vertex,
|
|
213
|
+
// replace the group!
|
|
214
|
+
header: h.vertex.header.replace("group(1)", "group(2)")
|
|
215
|
+
}
|
|
216
|
+
}, P = {
|
|
217
|
+
name: "local-uniform-bit",
|
|
218
|
+
vertex: {
|
|
219
|
+
header: (
|
|
220
|
+
/* glsl */
|
|
221
|
+
`
|
|
222
|
+
|
|
223
|
+
uniform mat3 uTransformMatrix;
|
|
224
|
+
uniform vec4 uColor;
|
|
225
|
+
uniform float uRound;
|
|
226
|
+
`
|
|
227
|
+
),
|
|
228
|
+
main: (
|
|
229
|
+
/* glsl */
|
|
230
|
+
`
|
|
231
|
+
vColor *= uColor;
|
|
232
|
+
modelMatrix = uTransformMatrix;
|
|
233
|
+
`
|
|
234
|
+
),
|
|
235
|
+
end: (
|
|
236
|
+
/* glsl */
|
|
237
|
+
`
|
|
238
|
+
if(uRound == 1.)
|
|
239
|
+
{
|
|
240
|
+
gl_Position.xy = roundPixels(gl_Position.xy, uResolution);
|
|
241
|
+
}
|
|
242
|
+
`
|
|
243
|
+
)
|
|
244
|
+
}
|
|
245
|
+
};
|
|
246
|
+
class w {
|
|
247
|
+
constructor() {
|
|
248
|
+
this.batcherName = "default", this.topology = "triangle-list", this.attributeSize = 4, this.indexSize = 6, this.packAsQuad = !0, this.roundPixels = 0, this._attributeStart = 0, this._batcher = null, this._batch = null;
|
|
249
|
+
}
|
|
250
|
+
get blendMode() {
|
|
251
|
+
return this.renderable.groupBlendMode;
|
|
252
|
+
}
|
|
253
|
+
get color() {
|
|
254
|
+
return this.renderable.groupColorAlpha;
|
|
255
|
+
}
|
|
256
|
+
reset() {
|
|
257
|
+
this.renderable = null, this.texture = null, this._batcher = null, this._batch = null, this.bounds = null;
|
|
258
|
+
}
|
|
259
|
+
destroy() {
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
function R(s, t, e) {
|
|
263
|
+
const i = (s >> 24 & 255) / 255;
|
|
264
|
+
t[e++] = (s & 255) / 255 * i, t[e++] = (s >> 8 & 255) / 255 * i, t[e++] = (s >> 16 & 255) / 255 * i, t[e++] = i;
|
|
265
|
+
}
|
|
266
|
+
export {
|
|
267
|
+
w as B,
|
|
268
|
+
S as F,
|
|
269
|
+
_ as S,
|
|
270
|
+
h as a,
|
|
271
|
+
P as b,
|
|
272
|
+
R as c,
|
|
273
|
+
k as l
|
|
274
|
+
};
|