ugcinc-render 1.3.13 → 1.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +94 -2
- package/dist/index.d.ts +94 -2
- package/dist/index.js +302 -55
- package/dist/index.mjs +289 -55
- package/package.json +1 -1
package/dist/index.d.mts
CHANGED
|
@@ -457,6 +457,15 @@ interface VideoEditorNodeConfig {
|
|
|
457
457
|
previewUrls?: Record<string, string>;
|
|
458
458
|
previewTextValues?: Record<string, string>;
|
|
459
459
|
}
|
|
460
|
+
/**
|
|
461
|
+
* Segment position on timeline (calculated from segment timing properties)
|
|
462
|
+
*/
|
|
463
|
+
interface SegmentTimelinePosition {
|
|
464
|
+
/** Start time in milliseconds */
|
|
465
|
+
startMs: number;
|
|
466
|
+
/** Duration in milliseconds */
|
|
467
|
+
durationMs: number;
|
|
468
|
+
}
|
|
460
469
|
|
|
461
470
|
/**
|
|
462
471
|
* Segment types for the rendering system
|
|
@@ -756,6 +765,8 @@ interface ImageEditorCompositionProps {
|
|
|
756
765
|
imageUrls?: Record<string, string | null>;
|
|
757
766
|
/** Text values keyed by textInputId (when using elements, for autoWidth calculation) */
|
|
758
767
|
textValues?: Record<string, string>;
|
|
768
|
+
/** Dynamic crop configuration */
|
|
769
|
+
dynamicCrop?: DynamicCropConfig;
|
|
759
770
|
}
|
|
760
771
|
/**
|
|
761
772
|
* ImageEditorComposition renders a complete image editor configuration.
|
|
@@ -790,7 +801,7 @@ interface ImageEditorCompositionProps {
|
|
|
790
801
|
* />
|
|
791
802
|
* ```
|
|
792
803
|
*/
|
|
793
|
-
declare function ImageEditorComposition({ config, sources, scale, elements, width, height, backgroundType, backgroundColor, backgroundFit, backgroundUrl, imageUrls, textValues, }: ImageEditorCompositionProps): react_jsx_runtime.JSX.Element;
|
|
804
|
+
declare function ImageEditorComposition({ config, sources, scale, elements, width, height, backgroundType, backgroundColor, backgroundFit, backgroundUrl, imageUrls, textValues, dynamicCrop, }: ImageEditorCompositionProps): react_jsx_runtime.JSX.Element;
|
|
794
805
|
|
|
795
806
|
interface VideoEditorCompositionProps {
|
|
796
807
|
/** The editor configuration to render */
|
|
@@ -1144,6 +1155,87 @@ declare function getReferenceElementX(elements: ImageEditorElement[], elementId:
|
|
|
1144
1155
|
*/
|
|
1145
1156
|
declare function getReferenceElementY(elements: ImageEditorElement[], elementId: string): ImageEditorElement | null;
|
|
1146
1157
|
|
|
1158
|
+
/**
|
|
1159
|
+
* Utility functions for calculating dynamic crop bounds
|
|
1160
|
+
*/
|
|
1161
|
+
|
|
1162
|
+
/**
|
|
1163
|
+
* Calculate dynamic crop bounds based on element positions
|
|
1164
|
+
*
|
|
1165
|
+
* @param elements - Array of resolved elements with absolute positions
|
|
1166
|
+
* @param dynamicCrop - Crop configuration
|
|
1167
|
+
* @param canvasWidth - Original canvas width
|
|
1168
|
+
* @param canvasHeight - Original canvas height
|
|
1169
|
+
* @returns CropBounds with x, y, width, height
|
|
1170
|
+
*/
|
|
1171
|
+
declare function calculateCropBounds(elements: ImageEditorElement[], dynamicCrop: DynamicCropConfig | undefined, canvasWidth: number, canvasHeight: number): CropBounds;
|
|
1172
|
+
/**
|
|
1173
|
+
* Check if dynamic crop is enabled (either vertical or horizontal)
|
|
1174
|
+
*/
|
|
1175
|
+
declare function isDynamicCropEnabled(dynamicCrop: DynamicCropConfig | undefined): boolean;
|
|
1176
|
+
|
|
1177
|
+
/**
|
|
1178
|
+
* Timeline utility functions for video editor
|
|
1179
|
+
*
|
|
1180
|
+
* These functions calculate segment positions on the timeline based on
|
|
1181
|
+
* the segment/overlay timing model. Used by both the webapp UI and
|
|
1182
|
+
* the VideoEditorComposition for rendering.
|
|
1183
|
+
*/
|
|
1184
|
+
|
|
1185
|
+
/**
|
|
1186
|
+
* Create a default TimeValue for segment offsets
|
|
1187
|
+
*/
|
|
1188
|
+
declare function defaultOffset(mode?: TimeMode): TimeValue;
|
|
1189
|
+
/**
|
|
1190
|
+
* Get base segments (no parentId) for a channel
|
|
1191
|
+
* Base segments are the primary timeline elements that overlays attach to
|
|
1192
|
+
*/
|
|
1193
|
+
declare function getBaseSegments(channel: VideoEditorChannel): VideoEditorSegment[];
|
|
1194
|
+
/**
|
|
1195
|
+
* Get overlays for a specific parent segment (or global overlays if parentId is null)
|
|
1196
|
+
*/
|
|
1197
|
+
declare function getOverlays(channel: VideoEditorChannel, parentId: string | null): VideoEditorSegment[];
|
|
1198
|
+
/**
|
|
1199
|
+
* Calculate segment position on timeline
|
|
1200
|
+
*
|
|
1201
|
+
* For base segments:
|
|
1202
|
+
* - Position is calculated from offset and previous segments
|
|
1203
|
+
* - Duration is from the duration property or default (5 seconds)
|
|
1204
|
+
*
|
|
1205
|
+
* For overlay segments:
|
|
1206
|
+
* - Position is calculated relative to parent using relativeStart/relativeEnd
|
|
1207
|
+
* - relativeStart/relativeEnd are fractions (0-1) of parent duration
|
|
1208
|
+
*/
|
|
1209
|
+
declare function getSegmentTimelinePosition(segment: VideoEditorSegment, baseSegments: VideoEditorSegment[], channel: VideoEditorChannel): SegmentTimelinePosition;
|
|
1210
|
+
/**
|
|
1211
|
+
* Check if a segment is visible at a given time
|
|
1212
|
+
*/
|
|
1213
|
+
declare function isSegmentVisibleAtTime(segment: VideoEditorSegment, time: number, channel: VideoEditorChannel): boolean;
|
|
1214
|
+
/**
|
|
1215
|
+
* Calculate estimated total duration based on segments
|
|
1216
|
+
*/
|
|
1217
|
+
declare function calculateEstimatedDuration(channels: VideoEditorChannel[]): number;
|
|
1218
|
+
/**
|
|
1219
|
+
* Calculate the timeline content end time (used for both ruler and scroll width)
|
|
1220
|
+
*/
|
|
1221
|
+
declare function calculateTimelineContentEnd(channel: VideoEditorChannel): number;
|
|
1222
|
+
/**
|
|
1223
|
+
* Format time in mm:ss.ms
|
|
1224
|
+
*/
|
|
1225
|
+
declare function formatTime(ms: number): string;
|
|
1226
|
+
/**
|
|
1227
|
+
* Parse time string to milliseconds
|
|
1228
|
+
*/
|
|
1229
|
+
declare function parseTime(timeStr: string): number;
|
|
1230
|
+
/**
|
|
1231
|
+
* Generate a unique segment ID
|
|
1232
|
+
*/
|
|
1233
|
+
declare function generateSegmentId(): string;
|
|
1234
|
+
/**
|
|
1235
|
+
* Generate a unique overlay ID
|
|
1236
|
+
*/
|
|
1237
|
+
declare function generateOverlayId(): string;
|
|
1238
|
+
|
|
1147
1239
|
/**
|
|
1148
1240
|
* Hook exports for ugcinc-render
|
|
1149
1241
|
*
|
|
@@ -1203,4 +1295,4 @@ declare function useResolvedPositions(elements: ImageEditorElement[], textValues
|
|
|
1203
1295
|
|
|
1204
1296
|
declare const RenderRoot: React.FC;
|
|
1205
1297
|
|
|
1206
|
-
export { type AudioSegment, type BaseEditorConfig, type BaseSegment, type BorderRadiusConfig, type Channel, type CropAxisConfig, type CropBoundary, type CropBounds, DIMENSION_PRESETS, type DimensionPreset, type DimensionPresetKey, type DynamicCropConfig, type EditorConfig, type EditorSegment, FONT_FAMILIES, FONT_URLS, type FitDimensions, type FitMode, type FontType, type FontWeight, type HorizontalAnchor, type HorizontalSelfAnchor, type Hyphenation, IMAGE_DEFAULTS, ImageEditorComposition, type ImageEditorCompositionProps, type ImageEditorConfig, type ImageEditorElement, type ImageEditorNodeConfig, ImageElement, type ImageElementProps, type ImageSegment, type PictureSegment, type PositionResolutionError, type PositionResolutionResult, type RelativePositionConfigX, type RelativePositionConfigY, RenderRoot, type Segment, type SegmentType, type StaticSegment, TEXT_DEFAULTS, type TextAlignment, type TextDirection, TextElement, type TextElementProps, type TextOverflow, type TextSegment, type TextWrap, type TimeMode, type TimeValue, VIDEO_DEFAULTS, VISUAL_DEFAULTS, type VerticalAlignment, type VerticalAnchor, type VerticalSelfAnchor, type VideoEditorAudioSegment, type VideoEditorBaseSegment, type VideoEditorChannel, VideoEditorComposition, type VideoEditorCompositionProps, type VideoEditorConfig, type VideoEditorImageSegment, type VideoEditorNodeConfig, type VideoEditorSegment, type VideoEditorTextSegment, type VideoEditorVideoSegment, type VideoEditorVisualSegment, VideoElement, type VideoElementProps, type VideoSegment, type VisualSegment, type VisualSegmentUnion, type WordBreak, applyImageDefaults, applyTextDefaults, applyVideoDefaults, areFontsLoaded, buildFontString, calculateAutoWidthDimensions, calculateFitDimensions, calculateLineWidth, canSetAsReference, getBorderRadii, getDependentElements, getFontFamily, getReferenceElementX, getReferenceElementY, hexToRgba, parseHexColor, preloadFonts, resolveElementPositions, useFontsLoaded, useImageLoader, useImagePreloader, useResolvedPositions, wrapText };
|
|
1298
|
+
export { type AudioSegment, type BaseEditorConfig, type BaseSegment, type BorderRadiusConfig, type Channel, type CropAxisConfig, type CropBoundary, type CropBounds, DIMENSION_PRESETS, type DimensionPreset, type DimensionPresetKey, type DynamicCropConfig, type EditorConfig, type EditorSegment, FONT_FAMILIES, FONT_URLS, type FitDimensions, type FitMode, type FontType, type FontWeight, type HorizontalAnchor, type HorizontalSelfAnchor, type Hyphenation, IMAGE_DEFAULTS, ImageEditorComposition, type ImageEditorCompositionProps, type ImageEditorConfig, type ImageEditorElement, type ImageEditorNodeConfig, ImageElement, type ImageElementProps, type ImageSegment, type PictureSegment, type PositionResolutionError, type PositionResolutionResult, type RelativePositionConfigX, type RelativePositionConfigY, RenderRoot, type Segment, type SegmentTimelinePosition, type SegmentType, type StaticSegment, TEXT_DEFAULTS, type TextAlignment, type TextDirection, TextElement, type TextElementProps, type TextOverflow, type TextSegment, type TextWrap, type TimeMode, type TimeValue, VIDEO_DEFAULTS, VISUAL_DEFAULTS, type VerticalAlignment, type VerticalAnchor, type VerticalSelfAnchor, type VideoEditorAudioSegment, type VideoEditorBaseSegment, type VideoEditorChannel, VideoEditorComposition, type VideoEditorCompositionProps, type VideoEditorConfig, type VideoEditorImageSegment, type VideoEditorNodeConfig, type VideoEditorSegment, type VideoEditorTextSegment, type VideoEditorVideoSegment, type VideoEditorVisualSegment, VideoElement, type VideoElementProps, type VideoSegment, type VisualSegment, type VisualSegmentUnion, type WordBreak, applyImageDefaults, applyTextDefaults, applyVideoDefaults, areFontsLoaded, buildFontString, calculateAutoWidthDimensions, calculateCropBounds, calculateEstimatedDuration, calculateFitDimensions, calculateLineWidth, calculateTimelineContentEnd, canSetAsReference, defaultOffset, formatTime, generateOverlayId, generateSegmentId, getBaseSegments, getBorderRadii, getDependentElements, getFontFamily, getOverlays, getReferenceElementX, getReferenceElementY, getSegmentTimelinePosition, hexToRgba, isDynamicCropEnabled, isSegmentVisibleAtTime, parseHexColor, parseTime, preloadFonts, resolveElementPositions, useFontsLoaded, useImageLoader, useImagePreloader, useResolvedPositions, wrapText };
|
package/dist/index.d.ts
CHANGED
|
@@ -457,6 +457,15 @@ interface VideoEditorNodeConfig {
|
|
|
457
457
|
previewUrls?: Record<string, string>;
|
|
458
458
|
previewTextValues?: Record<string, string>;
|
|
459
459
|
}
|
|
460
|
+
/**
|
|
461
|
+
* Segment position on timeline (calculated from segment timing properties)
|
|
462
|
+
*/
|
|
463
|
+
interface SegmentTimelinePosition {
|
|
464
|
+
/** Start time in milliseconds */
|
|
465
|
+
startMs: number;
|
|
466
|
+
/** Duration in milliseconds */
|
|
467
|
+
durationMs: number;
|
|
468
|
+
}
|
|
460
469
|
|
|
461
470
|
/**
|
|
462
471
|
* Segment types for the rendering system
|
|
@@ -756,6 +765,8 @@ interface ImageEditorCompositionProps {
|
|
|
756
765
|
imageUrls?: Record<string, string | null>;
|
|
757
766
|
/** Text values keyed by textInputId (when using elements, for autoWidth calculation) */
|
|
758
767
|
textValues?: Record<string, string>;
|
|
768
|
+
/** Dynamic crop configuration */
|
|
769
|
+
dynamicCrop?: DynamicCropConfig;
|
|
759
770
|
}
|
|
760
771
|
/**
|
|
761
772
|
* ImageEditorComposition renders a complete image editor configuration.
|
|
@@ -790,7 +801,7 @@ interface ImageEditorCompositionProps {
|
|
|
790
801
|
* />
|
|
791
802
|
* ```
|
|
792
803
|
*/
|
|
793
|
-
declare function ImageEditorComposition({ config, sources, scale, elements, width, height, backgroundType, backgroundColor, backgroundFit, backgroundUrl, imageUrls, textValues, }: ImageEditorCompositionProps): react_jsx_runtime.JSX.Element;
|
|
804
|
+
declare function ImageEditorComposition({ config, sources, scale, elements, width, height, backgroundType, backgroundColor, backgroundFit, backgroundUrl, imageUrls, textValues, dynamicCrop, }: ImageEditorCompositionProps): react_jsx_runtime.JSX.Element;
|
|
794
805
|
|
|
795
806
|
interface VideoEditorCompositionProps {
|
|
796
807
|
/** The editor configuration to render */
|
|
@@ -1144,6 +1155,87 @@ declare function getReferenceElementX(elements: ImageEditorElement[], elementId:
|
|
|
1144
1155
|
*/
|
|
1145
1156
|
declare function getReferenceElementY(elements: ImageEditorElement[], elementId: string): ImageEditorElement | null;
|
|
1146
1157
|
|
|
1158
|
+
/**
|
|
1159
|
+
* Utility functions for calculating dynamic crop bounds
|
|
1160
|
+
*/
|
|
1161
|
+
|
|
1162
|
+
/**
|
|
1163
|
+
* Calculate dynamic crop bounds based on element positions
|
|
1164
|
+
*
|
|
1165
|
+
* @param elements - Array of resolved elements with absolute positions
|
|
1166
|
+
* @param dynamicCrop - Crop configuration
|
|
1167
|
+
* @param canvasWidth - Original canvas width
|
|
1168
|
+
* @param canvasHeight - Original canvas height
|
|
1169
|
+
* @returns CropBounds with x, y, width, height
|
|
1170
|
+
*/
|
|
1171
|
+
declare function calculateCropBounds(elements: ImageEditorElement[], dynamicCrop: DynamicCropConfig | undefined, canvasWidth: number, canvasHeight: number): CropBounds;
|
|
1172
|
+
/**
|
|
1173
|
+
* Check if dynamic crop is enabled (either vertical or horizontal)
|
|
1174
|
+
*/
|
|
1175
|
+
declare function isDynamicCropEnabled(dynamicCrop: DynamicCropConfig | undefined): boolean;
|
|
1176
|
+
|
|
1177
|
+
/**
|
|
1178
|
+
* Timeline utility functions for video editor
|
|
1179
|
+
*
|
|
1180
|
+
* These functions calculate segment positions on the timeline based on
|
|
1181
|
+
* the segment/overlay timing model. Used by both the webapp UI and
|
|
1182
|
+
* the VideoEditorComposition for rendering.
|
|
1183
|
+
*/
|
|
1184
|
+
|
|
1185
|
+
/**
|
|
1186
|
+
* Create a default TimeValue for segment offsets
|
|
1187
|
+
*/
|
|
1188
|
+
declare function defaultOffset(mode?: TimeMode): TimeValue;
|
|
1189
|
+
/**
|
|
1190
|
+
* Get base segments (no parentId) for a channel
|
|
1191
|
+
* Base segments are the primary timeline elements that overlays attach to
|
|
1192
|
+
*/
|
|
1193
|
+
declare function getBaseSegments(channel: VideoEditorChannel): VideoEditorSegment[];
|
|
1194
|
+
/**
|
|
1195
|
+
* Get overlays for a specific parent segment (or global overlays if parentId is null)
|
|
1196
|
+
*/
|
|
1197
|
+
declare function getOverlays(channel: VideoEditorChannel, parentId: string | null): VideoEditorSegment[];
|
|
1198
|
+
/**
|
|
1199
|
+
* Calculate segment position on timeline
|
|
1200
|
+
*
|
|
1201
|
+
* For base segments:
|
|
1202
|
+
* - Position is calculated from offset and previous segments
|
|
1203
|
+
* - Duration is from the duration property or default (5 seconds)
|
|
1204
|
+
*
|
|
1205
|
+
* For overlay segments:
|
|
1206
|
+
* - Position is calculated relative to parent using relativeStart/relativeEnd
|
|
1207
|
+
* - relativeStart/relativeEnd are fractions (0-1) of parent duration
|
|
1208
|
+
*/
|
|
1209
|
+
declare function getSegmentTimelinePosition(segment: VideoEditorSegment, baseSegments: VideoEditorSegment[], channel: VideoEditorChannel): SegmentTimelinePosition;
|
|
1210
|
+
/**
|
|
1211
|
+
* Check if a segment is visible at a given time
|
|
1212
|
+
*/
|
|
1213
|
+
declare function isSegmentVisibleAtTime(segment: VideoEditorSegment, time: number, channel: VideoEditorChannel): boolean;
|
|
1214
|
+
/**
|
|
1215
|
+
* Calculate estimated total duration based on segments
|
|
1216
|
+
*/
|
|
1217
|
+
declare function calculateEstimatedDuration(channels: VideoEditorChannel[]): number;
|
|
1218
|
+
/**
|
|
1219
|
+
* Calculate the timeline content end time (used for both ruler and scroll width)
|
|
1220
|
+
*/
|
|
1221
|
+
declare function calculateTimelineContentEnd(channel: VideoEditorChannel): number;
|
|
1222
|
+
/**
|
|
1223
|
+
* Format time in mm:ss.ms
|
|
1224
|
+
*/
|
|
1225
|
+
declare function formatTime(ms: number): string;
|
|
1226
|
+
/**
|
|
1227
|
+
* Parse time string to milliseconds
|
|
1228
|
+
*/
|
|
1229
|
+
declare function parseTime(timeStr: string): number;
|
|
1230
|
+
/**
|
|
1231
|
+
* Generate a unique segment ID
|
|
1232
|
+
*/
|
|
1233
|
+
declare function generateSegmentId(): string;
|
|
1234
|
+
/**
|
|
1235
|
+
* Generate a unique overlay ID
|
|
1236
|
+
*/
|
|
1237
|
+
declare function generateOverlayId(): string;
|
|
1238
|
+
|
|
1147
1239
|
/**
|
|
1148
1240
|
* Hook exports for ugcinc-render
|
|
1149
1241
|
*
|
|
@@ -1203,4 +1295,4 @@ declare function useResolvedPositions(elements: ImageEditorElement[], textValues
|
|
|
1203
1295
|
|
|
1204
1296
|
declare const RenderRoot: React.FC;
|
|
1205
1297
|
|
|
1206
|
-
export { type AudioSegment, type BaseEditorConfig, type BaseSegment, type BorderRadiusConfig, type Channel, type CropAxisConfig, type CropBoundary, type CropBounds, DIMENSION_PRESETS, type DimensionPreset, type DimensionPresetKey, type DynamicCropConfig, type EditorConfig, type EditorSegment, FONT_FAMILIES, FONT_URLS, type FitDimensions, type FitMode, type FontType, type FontWeight, type HorizontalAnchor, type HorizontalSelfAnchor, type Hyphenation, IMAGE_DEFAULTS, ImageEditorComposition, type ImageEditorCompositionProps, type ImageEditorConfig, type ImageEditorElement, type ImageEditorNodeConfig, ImageElement, type ImageElementProps, type ImageSegment, type PictureSegment, type PositionResolutionError, type PositionResolutionResult, type RelativePositionConfigX, type RelativePositionConfigY, RenderRoot, type Segment, type SegmentType, type StaticSegment, TEXT_DEFAULTS, type TextAlignment, type TextDirection, TextElement, type TextElementProps, type TextOverflow, type TextSegment, type TextWrap, type TimeMode, type TimeValue, VIDEO_DEFAULTS, VISUAL_DEFAULTS, type VerticalAlignment, type VerticalAnchor, type VerticalSelfAnchor, type VideoEditorAudioSegment, type VideoEditorBaseSegment, type VideoEditorChannel, VideoEditorComposition, type VideoEditorCompositionProps, type VideoEditorConfig, type VideoEditorImageSegment, type VideoEditorNodeConfig, type VideoEditorSegment, type VideoEditorTextSegment, type VideoEditorVideoSegment, type VideoEditorVisualSegment, VideoElement, type VideoElementProps, type VideoSegment, type VisualSegment, type VisualSegmentUnion, type WordBreak, applyImageDefaults, applyTextDefaults, applyVideoDefaults, areFontsLoaded, buildFontString, calculateAutoWidthDimensions, calculateFitDimensions, calculateLineWidth, canSetAsReference, getBorderRadii, getDependentElements, getFontFamily, getReferenceElementX, getReferenceElementY, hexToRgba, parseHexColor, preloadFonts, resolveElementPositions, useFontsLoaded, useImageLoader, useImagePreloader, useResolvedPositions, wrapText };
|
|
1298
|
+
export { type AudioSegment, type BaseEditorConfig, type BaseSegment, type BorderRadiusConfig, type Channel, type CropAxisConfig, type CropBoundary, type CropBounds, DIMENSION_PRESETS, type DimensionPreset, type DimensionPresetKey, type DynamicCropConfig, type EditorConfig, type EditorSegment, FONT_FAMILIES, FONT_URLS, type FitDimensions, type FitMode, type FontType, type FontWeight, type HorizontalAnchor, type HorizontalSelfAnchor, type Hyphenation, IMAGE_DEFAULTS, ImageEditorComposition, type ImageEditorCompositionProps, type ImageEditorConfig, type ImageEditorElement, type ImageEditorNodeConfig, ImageElement, type ImageElementProps, type ImageSegment, type PictureSegment, type PositionResolutionError, type PositionResolutionResult, type RelativePositionConfigX, type RelativePositionConfigY, RenderRoot, type Segment, type SegmentTimelinePosition, type SegmentType, type StaticSegment, TEXT_DEFAULTS, type TextAlignment, type TextDirection, TextElement, type TextElementProps, type TextOverflow, type TextSegment, type TextWrap, type TimeMode, type TimeValue, VIDEO_DEFAULTS, VISUAL_DEFAULTS, type VerticalAlignment, type VerticalAnchor, type VerticalSelfAnchor, type VideoEditorAudioSegment, type VideoEditorBaseSegment, type VideoEditorChannel, VideoEditorComposition, type VideoEditorCompositionProps, type VideoEditorConfig, type VideoEditorImageSegment, type VideoEditorNodeConfig, type VideoEditorSegment, type VideoEditorTextSegment, type VideoEditorVideoSegment, type VideoEditorVisualSegment, VideoElement, type VideoElementProps, type VideoSegment, type VisualSegment, type VisualSegmentUnion, type WordBreak, applyImageDefaults, applyTextDefaults, applyVideoDefaults, areFontsLoaded, buildFontString, calculateAutoWidthDimensions, calculateCropBounds, calculateEstimatedDuration, calculateFitDimensions, calculateLineWidth, calculateTimelineContentEnd, canSetAsReference, defaultOffset, formatTime, generateOverlayId, generateSegmentId, getBaseSegments, getBorderRadii, getDependentElements, getFontFamily, getOverlays, getReferenceElementX, getReferenceElementY, getSegmentTimelinePosition, hexToRgba, isDynamicCropEnabled, isSegmentVisibleAtTime, parseHexColor, parseTime, preloadFonts, resolveElementPositions, useFontsLoaded, useImageLoader, useImagePreloader, useResolvedPositions, wrapText };
|
package/dist/index.js
CHANGED
|
@@ -39,16 +39,29 @@ __export(index_exports, {
|
|
|
39
39
|
areFontsLoaded: () => areFontsLoaded,
|
|
40
40
|
buildFontString: () => buildFontString,
|
|
41
41
|
calculateAutoWidthDimensions: () => calculateAutoWidthDimensions,
|
|
42
|
+
calculateCropBounds: () => calculateCropBounds,
|
|
43
|
+
calculateEstimatedDuration: () => calculateEstimatedDuration,
|
|
42
44
|
calculateFitDimensions: () => calculateFitDimensions,
|
|
43
45
|
calculateLineWidth: () => calculateLineWidth,
|
|
46
|
+
calculateTimelineContentEnd: () => calculateTimelineContentEnd,
|
|
44
47
|
canSetAsReference: () => canSetAsReference,
|
|
48
|
+
defaultOffset: () => defaultOffset,
|
|
49
|
+
formatTime: () => formatTime,
|
|
50
|
+
generateOverlayId: () => generateOverlayId,
|
|
51
|
+
generateSegmentId: () => generateSegmentId,
|
|
52
|
+
getBaseSegments: () => getBaseSegments,
|
|
45
53
|
getBorderRadii: () => getBorderRadii,
|
|
46
54
|
getDependentElements: () => getDependentElements,
|
|
47
55
|
getFontFamily: () => getFontFamily,
|
|
56
|
+
getOverlays: () => getOverlays,
|
|
48
57
|
getReferenceElementX: () => getReferenceElementX,
|
|
49
58
|
getReferenceElementY: () => getReferenceElementY,
|
|
59
|
+
getSegmentTimelinePosition: () => getSegmentTimelinePosition,
|
|
50
60
|
hexToRgba: () => hexToRgba,
|
|
61
|
+
isDynamicCropEnabled: () => isDynamicCropEnabled,
|
|
62
|
+
isSegmentVisibleAtTime: () => isSegmentVisibleAtTime,
|
|
51
63
|
parseHexColor: () => parseHexColor,
|
|
64
|
+
parseTime: () => parseTime,
|
|
52
65
|
preloadFonts: () => preloadFonts,
|
|
53
66
|
resolveElementPositions: () => resolveElementPositions,
|
|
54
67
|
useFontsLoaded: () => useFontsLoaded,
|
|
@@ -799,6 +812,113 @@ function getReferenceElementY(elements, elementId) {
|
|
|
799
812
|
return elements.find((e) => e.id === element.relativePositionY.elementId) ?? null;
|
|
800
813
|
}
|
|
801
814
|
|
|
815
|
+
// src/utils/cropBounds.ts
|
|
816
|
+
function calculateCropBounds(elements, dynamicCrop, canvasWidth, canvasHeight) {
|
|
817
|
+
if (!dynamicCrop) {
|
|
818
|
+
return { x: 0, y: 0, width: canvasWidth, height: canvasHeight };
|
|
819
|
+
}
|
|
820
|
+
const elementMap = /* @__PURE__ */ new Map();
|
|
821
|
+
for (const elem of elements) {
|
|
822
|
+
elementMap.set(elem.id, elem);
|
|
823
|
+
}
|
|
824
|
+
const resolveBoundary = (boundary) => {
|
|
825
|
+
if (!boundary) return void 0;
|
|
826
|
+
if (boundary.elementId) return boundary.elementId;
|
|
827
|
+
return void 0;
|
|
828
|
+
};
|
|
829
|
+
let cropY = 0;
|
|
830
|
+
let cropHeight = canvasHeight;
|
|
831
|
+
if (dynamicCrop.vertical?.enabled) {
|
|
832
|
+
const vCrop = dynamicCrop.vertical;
|
|
833
|
+
const paddingStart = vCrop.paddingStart ?? 0;
|
|
834
|
+
const paddingEnd = vCrop.paddingEnd ?? 0;
|
|
835
|
+
if (vCrop.mode === "all-elements") {
|
|
836
|
+
let minY = canvasHeight;
|
|
837
|
+
let maxY = 0;
|
|
838
|
+
for (const elem of elements) {
|
|
839
|
+
minY = Math.min(minY, elem.y);
|
|
840
|
+
maxY = Math.max(maxY, elem.y + elem.height);
|
|
841
|
+
}
|
|
842
|
+
if (elements.length > 0) {
|
|
843
|
+
cropY = Math.max(0, minY - paddingStart);
|
|
844
|
+
const bottomY = Math.min(canvasHeight, maxY + paddingEnd);
|
|
845
|
+
cropHeight = bottomY - cropY;
|
|
846
|
+
}
|
|
847
|
+
} else if (vCrop.mode === "between-elements") {
|
|
848
|
+
const startElementId = resolveBoundary(vCrop.startBoundary);
|
|
849
|
+
const endElementId = resolveBoundary(vCrop.endBoundary);
|
|
850
|
+
let topY = 0;
|
|
851
|
+
let bottomY = canvasHeight;
|
|
852
|
+
if (startElementId) {
|
|
853
|
+
const startElem = elementMap.get(startElementId);
|
|
854
|
+
if (startElem) {
|
|
855
|
+
topY = startElem.y;
|
|
856
|
+
}
|
|
857
|
+
}
|
|
858
|
+
if (endElementId) {
|
|
859
|
+
const endElem = elementMap.get(endElementId);
|
|
860
|
+
if (endElem) {
|
|
861
|
+
bottomY = endElem.y + endElem.height;
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
cropY = Math.max(0, topY - paddingStart);
|
|
865
|
+
const adjustedBottom = Math.min(canvasHeight, bottomY + paddingEnd);
|
|
866
|
+
cropHeight = adjustedBottom - cropY;
|
|
867
|
+
}
|
|
868
|
+
if (vCrop.minSize && cropHeight < vCrop.minSize) {
|
|
869
|
+
cropHeight = vCrop.minSize;
|
|
870
|
+
}
|
|
871
|
+
}
|
|
872
|
+
let cropX = 0;
|
|
873
|
+
let cropWidth = canvasWidth;
|
|
874
|
+
if (dynamicCrop.horizontal?.enabled) {
|
|
875
|
+
const hCrop = dynamicCrop.horizontal;
|
|
876
|
+
const paddingStart = hCrop.paddingStart ?? 0;
|
|
877
|
+
const paddingEnd = hCrop.paddingEnd ?? 0;
|
|
878
|
+
if (hCrop.mode === "all-elements") {
|
|
879
|
+
let minX = canvasWidth;
|
|
880
|
+
let maxX = 0;
|
|
881
|
+
for (const elem of elements) {
|
|
882
|
+
minX = Math.min(minX, elem.x);
|
|
883
|
+
maxX = Math.max(maxX, elem.x + elem.width);
|
|
884
|
+
}
|
|
885
|
+
if (elements.length > 0) {
|
|
886
|
+
cropX = Math.max(0, minX - paddingStart);
|
|
887
|
+
const rightX = Math.min(canvasWidth, maxX + paddingEnd);
|
|
888
|
+
cropWidth = rightX - cropX;
|
|
889
|
+
}
|
|
890
|
+
} else if (hCrop.mode === "between-elements") {
|
|
891
|
+
const startElementId = resolveBoundary(hCrop.startBoundary);
|
|
892
|
+
const endElementId = resolveBoundary(hCrop.endBoundary);
|
|
893
|
+
let leftX = 0;
|
|
894
|
+
let rightX = canvasWidth;
|
|
895
|
+
if (startElementId) {
|
|
896
|
+
const startElem = elementMap.get(startElementId);
|
|
897
|
+
if (startElem) {
|
|
898
|
+
leftX = startElem.x;
|
|
899
|
+
}
|
|
900
|
+
}
|
|
901
|
+
if (endElementId) {
|
|
902
|
+
const endElem = elementMap.get(endElementId);
|
|
903
|
+
if (endElem) {
|
|
904
|
+
rightX = endElem.x + endElem.width;
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
cropX = Math.max(0, leftX - paddingStart);
|
|
908
|
+
const adjustedRight = Math.min(canvasWidth, rightX + paddingEnd);
|
|
909
|
+
cropWidth = adjustedRight - cropX;
|
|
910
|
+
}
|
|
911
|
+
if (hCrop.minSize && cropWidth < hCrop.minSize) {
|
|
912
|
+
cropWidth = hCrop.minSize;
|
|
913
|
+
}
|
|
914
|
+
}
|
|
915
|
+
return { x: cropX, y: cropY, width: cropWidth, height: cropHeight };
|
|
916
|
+
}
|
|
917
|
+
function isDynamicCropEnabled(dynamicCrop) {
|
|
918
|
+
if (!dynamicCrop) return false;
|
|
919
|
+
return !!(dynamicCrop.vertical?.enabled || dynamicCrop.horizontal?.enabled);
|
|
920
|
+
}
|
|
921
|
+
|
|
802
922
|
// src/compositions/ImageEditorComposition.tsx
|
|
803
923
|
var import_jsx_runtime3 = require("react/jsx-runtime");
|
|
804
924
|
function getSortedSegments(config) {
|
|
@@ -879,8 +999,11 @@ function ImageEditorComposition({
|
|
|
879
999
|
backgroundFit = "cover",
|
|
880
1000
|
backgroundUrl,
|
|
881
1001
|
imageUrls = {},
|
|
882
|
-
textValues = {}
|
|
1002
|
+
textValues = {},
|
|
1003
|
+
dynamicCrop
|
|
883
1004
|
}) {
|
|
1005
|
+
const canvasWidth = width ?? config?.width ?? 1080;
|
|
1006
|
+
const canvasHeight = height ?? config?.height ?? 1920;
|
|
884
1007
|
const resolvedElements = (0, import_react3.useMemo)(() => {
|
|
885
1008
|
if (!elements) return null;
|
|
886
1009
|
const result = resolveElementPositions(elements, textValues);
|
|
@@ -889,6 +1012,10 @@ function ImageEditorComposition({
|
|
|
889
1012
|
}
|
|
890
1013
|
return result.elements;
|
|
891
1014
|
}, [elements, textValues]);
|
|
1015
|
+
const cropBounds = (0, import_react3.useMemo)(() => {
|
|
1016
|
+
if (!isDynamicCropEnabled(dynamicCrop) || !resolvedElements) return null;
|
|
1017
|
+
return calculateCropBounds(resolvedElements, dynamicCrop, canvasWidth, canvasHeight);
|
|
1018
|
+
}, [resolvedElements, dynamicCrop, canvasWidth, canvasHeight]);
|
|
892
1019
|
const segmentsFromElements = (0, import_react3.useMemo)(() => {
|
|
893
1020
|
if (!resolvedElements) return null;
|
|
894
1021
|
const segments = [];
|
|
@@ -905,10 +1032,10 @@ function ImageEditorComposition({
|
|
|
905
1032
|
}
|
|
906
1033
|
return segments.sort((a, b) => (a.zIndex ?? 0) - (b.zIndex ?? 0));
|
|
907
1034
|
}, [resolvedElements, imageUrls, textValues]);
|
|
908
|
-
const canvasWidth = width ?? config?.width ?? 1080;
|
|
909
|
-
const canvasHeight = height ?? config?.height ?? 1920;
|
|
910
1035
|
const bgFit = backgroundFit ?? "cover";
|
|
911
1036
|
const bgUrl = backgroundUrl ?? sources.background;
|
|
1037
|
+
const cropOffsetX = cropBounds?.x ?? 0;
|
|
1038
|
+
const cropOffsetY = cropBounds?.y ?? 0;
|
|
912
1039
|
const contentSegments = segmentsFromElements ?? (() => {
|
|
913
1040
|
if (!config) return [];
|
|
914
1041
|
const sorted = getSortedSegments(config);
|
|
@@ -929,61 +1056,73 @@ function ImageEditorComposition({
|
|
|
929
1056
|
return void 0;
|
|
930
1057
|
};
|
|
931
1058
|
const containerBgColor = backgroundType === "color" && backgroundColor ? backgroundColor : "#000000";
|
|
932
|
-
return /* @__PURE__ */ (0, import_jsx_runtime3.
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
{
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
}
|
|
946
|
-
),
|
|
947
|
-
legacyBackgroundSegment && !segmentsFromElements && /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(
|
|
948
|
-
BackgroundImage,
|
|
949
|
-
{
|
|
950
|
-
segment: legacyBackgroundSegment,
|
|
951
|
-
src: getSource(legacyBackgroundSegment),
|
|
952
|
-
width: canvasWidth,
|
|
953
|
-
height: canvasHeight,
|
|
954
|
-
scale
|
|
955
|
-
}
|
|
956
|
-
),
|
|
957
|
-
contentSegments.map((segment) => {
|
|
958
|
-
if (segment.type === "text") {
|
|
959
|
-
return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(
|
|
960
|
-
TextElement,
|
|
1059
|
+
return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(import_remotion2.AbsoluteFill, { style: { backgroundColor: containerBgColor }, children: /* @__PURE__ */ (0, import_jsx_runtime3.jsxs)(
|
|
1060
|
+
"div",
|
|
1061
|
+
{
|
|
1062
|
+
style: {
|
|
1063
|
+
position: "absolute",
|
|
1064
|
+
left: -cropOffsetX * scale,
|
|
1065
|
+
top: -cropOffsetY * scale,
|
|
1066
|
+
width: canvasWidth * scale,
|
|
1067
|
+
height: canvasHeight * scale
|
|
1068
|
+
},
|
|
1069
|
+
children: [
|
|
1070
|
+
backgroundType === "image" && bgUrl && segmentsFromElements && /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(
|
|
1071
|
+
import_remotion2.Img,
|
|
961
1072
|
{
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
ImageElement,
|
|
1073
|
+
src: bgUrl,
|
|
1074
|
+
style: {
|
|
1075
|
+
position: "absolute",
|
|
1076
|
+
left: 0,
|
|
1077
|
+
top: 0,
|
|
1078
|
+
width: canvasWidth * scale,
|
|
1079
|
+
height: canvasHeight * scale,
|
|
1080
|
+
objectFit: bgFit
|
|
1081
|
+
}
|
|
1082
|
+
}
|
|
1083
|
+
),
|
|
1084
|
+
legacyBackgroundSegment && !segmentsFromElements && /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(
|
|
1085
|
+
BackgroundImage,
|
|
976
1086
|
{
|
|
977
|
-
segment,
|
|
978
|
-
src,
|
|
1087
|
+
segment: legacyBackgroundSegment,
|
|
1088
|
+
src: getSource(legacyBackgroundSegment),
|
|
1089
|
+
width: canvasWidth,
|
|
1090
|
+
height: canvasHeight,
|
|
979
1091
|
scale
|
|
980
|
-
}
|
|
981
|
-
|
|
982
|
-
)
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
1092
|
+
}
|
|
1093
|
+
),
|
|
1094
|
+
contentSegments.map((segment) => {
|
|
1095
|
+
if (segment.type === "text") {
|
|
1096
|
+
return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(
|
|
1097
|
+
TextElement,
|
|
1098
|
+
{
|
|
1099
|
+
segment,
|
|
1100
|
+
scale
|
|
1101
|
+
},
|
|
1102
|
+
segment.id
|
|
1103
|
+
);
|
|
1104
|
+
}
|
|
1105
|
+
if (segment.type === "image") {
|
|
1106
|
+
const src = segment.source || getSource(segment);
|
|
1107
|
+
if (!src) {
|
|
1108
|
+
console.warn(`No source found for image segment: ${segment.id}`);
|
|
1109
|
+
return null;
|
|
1110
|
+
}
|
|
1111
|
+
return /* @__PURE__ */ (0, import_jsx_runtime3.jsx)(
|
|
1112
|
+
ImageElement,
|
|
1113
|
+
{
|
|
1114
|
+
segment,
|
|
1115
|
+
src,
|
|
1116
|
+
scale
|
|
1117
|
+
},
|
|
1118
|
+
segment.id
|
|
1119
|
+
);
|
|
1120
|
+
}
|
|
1121
|
+
return null;
|
|
1122
|
+
})
|
|
1123
|
+
]
|
|
1124
|
+
}
|
|
1125
|
+
) });
|
|
987
1126
|
}
|
|
988
1127
|
function BackgroundImage({
|
|
989
1128
|
segment,
|
|
@@ -1333,6 +1472,101 @@ function calculateFitDimensions({
|
|
|
1333
1472
|
};
|
|
1334
1473
|
}
|
|
1335
1474
|
|
|
1475
|
+
// src/utils/timeline.ts
|
|
1476
|
+
function defaultOffset(mode = "flexible") {
|
|
1477
|
+
return mode === "flexible" ? { type: "relative", value: 0 } : { type: "absolute", value: 0 };
|
|
1478
|
+
}
|
|
1479
|
+
function getBaseSegments(channel) {
|
|
1480
|
+
return channel.segments.filter((s) => s.parentId === void 0);
|
|
1481
|
+
}
|
|
1482
|
+
function getOverlays(channel, parentId) {
|
|
1483
|
+
return channel.segments.filter((s) => s.parentId === (parentId ?? void 0));
|
|
1484
|
+
}
|
|
1485
|
+
function getSegmentTimelinePosition(segment, baseSegments, channel) {
|
|
1486
|
+
if (segment.parentId) {
|
|
1487
|
+
const parent = channel.segments.find((s) => s.id === segment.parentId);
|
|
1488
|
+
if (parent) {
|
|
1489
|
+
const parentPos = getSegmentTimelinePosition(parent, baseSegments, channel);
|
|
1490
|
+
const relStart = segment.relativeStart ?? 0;
|
|
1491
|
+
const relEnd = segment.relativeEnd ?? 1;
|
|
1492
|
+
return {
|
|
1493
|
+
startMs: parentPos.startMs + parentPos.durationMs * relStart,
|
|
1494
|
+
durationMs: parentPos.durationMs * (relEnd - relStart)
|
|
1495
|
+
};
|
|
1496
|
+
}
|
|
1497
|
+
}
|
|
1498
|
+
const baseIndex = baseSegments.findIndex((s) => s.id === segment.id);
|
|
1499
|
+
let accumulatedTime = 0;
|
|
1500
|
+
for (let i = 0; i < baseIndex; i++) {
|
|
1501
|
+
const prev = baseSegments[i];
|
|
1502
|
+
if (prev) {
|
|
1503
|
+
accumulatedTime += prev.duration?.type === "absolute" ? prev.duration.value : 5e3;
|
|
1504
|
+
}
|
|
1505
|
+
}
|
|
1506
|
+
const startMs = segment.offset.type === "absolute" ? segment.offset.value : accumulatedTime;
|
|
1507
|
+
const durationMs = segment.duration?.type === "absolute" ? segment.duration.value : 5e3;
|
|
1508
|
+
return { startMs, durationMs };
|
|
1509
|
+
}
|
|
1510
|
+
function isSegmentVisibleAtTime(segment, time, channel) {
|
|
1511
|
+
const baseSegments = getBaseSegments(channel);
|
|
1512
|
+
const { startMs, durationMs } = getSegmentTimelinePosition(segment, baseSegments, channel);
|
|
1513
|
+
const endMs = startMs + durationMs;
|
|
1514
|
+
return time >= startMs && time < endMs;
|
|
1515
|
+
}
|
|
1516
|
+
function calculateEstimatedDuration(channels) {
|
|
1517
|
+
let maxDuration = 5e3;
|
|
1518
|
+
for (const channel of channels) {
|
|
1519
|
+
let channelTime = 0;
|
|
1520
|
+
for (const segment of channel.segments) {
|
|
1521
|
+
if (segment.parentId) continue;
|
|
1522
|
+
if (segment.offset.type === "absolute") {
|
|
1523
|
+
channelTime = segment.offset.value;
|
|
1524
|
+
} else {
|
|
1525
|
+
channelTime += 5e3;
|
|
1526
|
+
}
|
|
1527
|
+
if (segment.duration?.type === "absolute") {
|
|
1528
|
+
channelTime += segment.duration.value;
|
|
1529
|
+
} else {
|
|
1530
|
+
channelTime += 5e3;
|
|
1531
|
+
}
|
|
1532
|
+
}
|
|
1533
|
+
maxDuration = Math.max(maxDuration, channelTime);
|
|
1534
|
+
}
|
|
1535
|
+
return maxDuration;
|
|
1536
|
+
}
|
|
1537
|
+
function calculateTimelineContentEnd(channel) {
|
|
1538
|
+
const baseSegments = getBaseSegments(channel);
|
|
1539
|
+
let lastEnd = 0;
|
|
1540
|
+
for (const segment of baseSegments) {
|
|
1541
|
+
const { startMs, durationMs } = getSegmentTimelinePosition(segment, baseSegments, channel);
|
|
1542
|
+
lastEnd = Math.max(lastEnd, startMs + durationMs);
|
|
1543
|
+
}
|
|
1544
|
+
return Math.ceil((lastEnd + 2e3) / 1e3) * 1e3;
|
|
1545
|
+
}
|
|
1546
|
+
function formatTime(ms) {
|
|
1547
|
+
const totalSeconds = Math.floor(ms / 1e3);
|
|
1548
|
+
const minutes = Math.floor(totalSeconds / 60);
|
|
1549
|
+
const seconds = totalSeconds % 60;
|
|
1550
|
+
const milliseconds = Math.floor(ms % 1e3 / 10);
|
|
1551
|
+
return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}.${milliseconds.toString().padStart(2, "0")}`;
|
|
1552
|
+
}
|
|
1553
|
+
function parseTime(timeStr) {
|
|
1554
|
+
const parts = timeStr.split(":");
|
|
1555
|
+
if (parts.length !== 2) return 0;
|
|
1556
|
+
const [minStr, secPart] = parts;
|
|
1557
|
+
const minutes = parseInt(minStr ?? "0", 10) || 0;
|
|
1558
|
+
const secParts = (secPart ?? "0").split(".");
|
|
1559
|
+
const seconds = parseInt(secParts[0] ?? "0", 10) || 0;
|
|
1560
|
+
const ms = parseInt((secParts[1] ?? "0").padEnd(2, "0").slice(0, 2), 10) * 10 || 0;
|
|
1561
|
+
return (minutes * 60 + seconds) * 1e3 + ms;
|
|
1562
|
+
}
|
|
1563
|
+
function generateSegmentId() {
|
|
1564
|
+
return `segment-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`;
|
|
1565
|
+
}
|
|
1566
|
+
function generateOverlayId() {
|
|
1567
|
+
return `overlay-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`;
|
|
1568
|
+
}
|
|
1569
|
+
|
|
1336
1570
|
// src/hooks/index.ts
|
|
1337
1571
|
var import_react6 = require("react");
|
|
1338
1572
|
function useFontsLoaded() {
|
|
@@ -1489,16 +1723,29 @@ var RenderRoot = () => {
|
|
|
1489
1723
|
areFontsLoaded,
|
|
1490
1724
|
buildFontString,
|
|
1491
1725
|
calculateAutoWidthDimensions,
|
|
1726
|
+
calculateCropBounds,
|
|
1727
|
+
calculateEstimatedDuration,
|
|
1492
1728
|
calculateFitDimensions,
|
|
1493
1729
|
calculateLineWidth,
|
|
1730
|
+
calculateTimelineContentEnd,
|
|
1494
1731
|
canSetAsReference,
|
|
1732
|
+
defaultOffset,
|
|
1733
|
+
formatTime,
|
|
1734
|
+
generateOverlayId,
|
|
1735
|
+
generateSegmentId,
|
|
1736
|
+
getBaseSegments,
|
|
1495
1737
|
getBorderRadii,
|
|
1496
1738
|
getDependentElements,
|
|
1497
1739
|
getFontFamily,
|
|
1740
|
+
getOverlays,
|
|
1498
1741
|
getReferenceElementX,
|
|
1499
1742
|
getReferenceElementY,
|
|
1743
|
+
getSegmentTimelinePosition,
|
|
1500
1744
|
hexToRgba,
|
|
1745
|
+
isDynamicCropEnabled,
|
|
1746
|
+
isSegmentVisibleAtTime,
|
|
1501
1747
|
parseHexColor,
|
|
1748
|
+
parseTime,
|
|
1502
1749
|
preloadFonts,
|
|
1503
1750
|
resolveElementPositions,
|
|
1504
1751
|
useFontsLoaded,
|
package/dist/index.mjs
CHANGED
|
@@ -738,6 +738,113 @@ function getReferenceElementY(elements, elementId) {
|
|
|
738
738
|
return elements.find((e) => e.id === element.relativePositionY.elementId) ?? null;
|
|
739
739
|
}
|
|
740
740
|
|
|
741
|
+
// src/utils/cropBounds.ts
|
|
742
|
+
function calculateCropBounds(elements, dynamicCrop, canvasWidth, canvasHeight) {
|
|
743
|
+
if (!dynamicCrop) {
|
|
744
|
+
return { x: 0, y: 0, width: canvasWidth, height: canvasHeight };
|
|
745
|
+
}
|
|
746
|
+
const elementMap = /* @__PURE__ */ new Map();
|
|
747
|
+
for (const elem of elements) {
|
|
748
|
+
elementMap.set(elem.id, elem);
|
|
749
|
+
}
|
|
750
|
+
const resolveBoundary = (boundary) => {
|
|
751
|
+
if (!boundary) return void 0;
|
|
752
|
+
if (boundary.elementId) return boundary.elementId;
|
|
753
|
+
return void 0;
|
|
754
|
+
};
|
|
755
|
+
let cropY = 0;
|
|
756
|
+
let cropHeight = canvasHeight;
|
|
757
|
+
if (dynamicCrop.vertical?.enabled) {
|
|
758
|
+
const vCrop = dynamicCrop.vertical;
|
|
759
|
+
const paddingStart = vCrop.paddingStart ?? 0;
|
|
760
|
+
const paddingEnd = vCrop.paddingEnd ?? 0;
|
|
761
|
+
if (vCrop.mode === "all-elements") {
|
|
762
|
+
let minY = canvasHeight;
|
|
763
|
+
let maxY = 0;
|
|
764
|
+
for (const elem of elements) {
|
|
765
|
+
minY = Math.min(minY, elem.y);
|
|
766
|
+
maxY = Math.max(maxY, elem.y + elem.height);
|
|
767
|
+
}
|
|
768
|
+
if (elements.length > 0) {
|
|
769
|
+
cropY = Math.max(0, minY - paddingStart);
|
|
770
|
+
const bottomY = Math.min(canvasHeight, maxY + paddingEnd);
|
|
771
|
+
cropHeight = bottomY - cropY;
|
|
772
|
+
}
|
|
773
|
+
} else if (vCrop.mode === "between-elements") {
|
|
774
|
+
const startElementId = resolveBoundary(vCrop.startBoundary);
|
|
775
|
+
const endElementId = resolveBoundary(vCrop.endBoundary);
|
|
776
|
+
let topY = 0;
|
|
777
|
+
let bottomY = canvasHeight;
|
|
778
|
+
if (startElementId) {
|
|
779
|
+
const startElem = elementMap.get(startElementId);
|
|
780
|
+
if (startElem) {
|
|
781
|
+
topY = startElem.y;
|
|
782
|
+
}
|
|
783
|
+
}
|
|
784
|
+
if (endElementId) {
|
|
785
|
+
const endElem = elementMap.get(endElementId);
|
|
786
|
+
if (endElem) {
|
|
787
|
+
bottomY = endElem.y + endElem.height;
|
|
788
|
+
}
|
|
789
|
+
}
|
|
790
|
+
cropY = Math.max(0, topY - paddingStart);
|
|
791
|
+
const adjustedBottom = Math.min(canvasHeight, bottomY + paddingEnd);
|
|
792
|
+
cropHeight = adjustedBottom - cropY;
|
|
793
|
+
}
|
|
794
|
+
if (vCrop.minSize && cropHeight < vCrop.minSize) {
|
|
795
|
+
cropHeight = vCrop.minSize;
|
|
796
|
+
}
|
|
797
|
+
}
|
|
798
|
+
let cropX = 0;
|
|
799
|
+
let cropWidth = canvasWidth;
|
|
800
|
+
if (dynamicCrop.horizontal?.enabled) {
|
|
801
|
+
const hCrop = dynamicCrop.horizontal;
|
|
802
|
+
const paddingStart = hCrop.paddingStart ?? 0;
|
|
803
|
+
const paddingEnd = hCrop.paddingEnd ?? 0;
|
|
804
|
+
if (hCrop.mode === "all-elements") {
|
|
805
|
+
let minX = canvasWidth;
|
|
806
|
+
let maxX = 0;
|
|
807
|
+
for (const elem of elements) {
|
|
808
|
+
minX = Math.min(minX, elem.x);
|
|
809
|
+
maxX = Math.max(maxX, elem.x + elem.width);
|
|
810
|
+
}
|
|
811
|
+
if (elements.length > 0) {
|
|
812
|
+
cropX = Math.max(0, minX - paddingStart);
|
|
813
|
+
const rightX = Math.min(canvasWidth, maxX + paddingEnd);
|
|
814
|
+
cropWidth = rightX - cropX;
|
|
815
|
+
}
|
|
816
|
+
} else if (hCrop.mode === "between-elements") {
|
|
817
|
+
const startElementId = resolveBoundary(hCrop.startBoundary);
|
|
818
|
+
const endElementId = resolveBoundary(hCrop.endBoundary);
|
|
819
|
+
let leftX = 0;
|
|
820
|
+
let rightX = canvasWidth;
|
|
821
|
+
if (startElementId) {
|
|
822
|
+
const startElem = elementMap.get(startElementId);
|
|
823
|
+
if (startElem) {
|
|
824
|
+
leftX = startElem.x;
|
|
825
|
+
}
|
|
826
|
+
}
|
|
827
|
+
if (endElementId) {
|
|
828
|
+
const endElem = elementMap.get(endElementId);
|
|
829
|
+
if (endElem) {
|
|
830
|
+
rightX = endElem.x + endElem.width;
|
|
831
|
+
}
|
|
832
|
+
}
|
|
833
|
+
cropX = Math.max(0, leftX - paddingStart);
|
|
834
|
+
const adjustedRight = Math.min(canvasWidth, rightX + paddingEnd);
|
|
835
|
+
cropWidth = adjustedRight - cropX;
|
|
836
|
+
}
|
|
837
|
+
if (hCrop.minSize && cropWidth < hCrop.minSize) {
|
|
838
|
+
cropWidth = hCrop.minSize;
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
return { x: cropX, y: cropY, width: cropWidth, height: cropHeight };
|
|
842
|
+
}
|
|
843
|
+
function isDynamicCropEnabled(dynamicCrop) {
|
|
844
|
+
if (!dynamicCrop) return false;
|
|
845
|
+
return !!(dynamicCrop.vertical?.enabled || dynamicCrop.horizontal?.enabled);
|
|
846
|
+
}
|
|
847
|
+
|
|
741
848
|
// src/compositions/ImageEditorComposition.tsx
|
|
742
849
|
import { jsx as jsx3, jsxs } from "react/jsx-runtime";
|
|
743
850
|
function getSortedSegments(config) {
|
|
@@ -818,8 +925,11 @@ function ImageEditorComposition({
|
|
|
818
925
|
backgroundFit = "cover",
|
|
819
926
|
backgroundUrl,
|
|
820
927
|
imageUrls = {},
|
|
821
|
-
textValues = {}
|
|
928
|
+
textValues = {},
|
|
929
|
+
dynamicCrop
|
|
822
930
|
}) {
|
|
931
|
+
const canvasWidth = width ?? config?.width ?? 1080;
|
|
932
|
+
const canvasHeight = height ?? config?.height ?? 1920;
|
|
823
933
|
const resolvedElements = useMemo3(() => {
|
|
824
934
|
if (!elements) return null;
|
|
825
935
|
const result = resolveElementPositions(elements, textValues);
|
|
@@ -828,6 +938,10 @@ function ImageEditorComposition({
|
|
|
828
938
|
}
|
|
829
939
|
return result.elements;
|
|
830
940
|
}, [elements, textValues]);
|
|
941
|
+
const cropBounds = useMemo3(() => {
|
|
942
|
+
if (!isDynamicCropEnabled(dynamicCrop) || !resolvedElements) return null;
|
|
943
|
+
return calculateCropBounds(resolvedElements, dynamicCrop, canvasWidth, canvasHeight);
|
|
944
|
+
}, [resolvedElements, dynamicCrop, canvasWidth, canvasHeight]);
|
|
831
945
|
const segmentsFromElements = useMemo3(() => {
|
|
832
946
|
if (!resolvedElements) return null;
|
|
833
947
|
const segments = [];
|
|
@@ -844,10 +958,10 @@ function ImageEditorComposition({
|
|
|
844
958
|
}
|
|
845
959
|
return segments.sort((a, b) => (a.zIndex ?? 0) - (b.zIndex ?? 0));
|
|
846
960
|
}, [resolvedElements, imageUrls, textValues]);
|
|
847
|
-
const canvasWidth = width ?? config?.width ?? 1080;
|
|
848
|
-
const canvasHeight = height ?? config?.height ?? 1920;
|
|
849
961
|
const bgFit = backgroundFit ?? "cover";
|
|
850
962
|
const bgUrl = backgroundUrl ?? sources.background;
|
|
963
|
+
const cropOffsetX = cropBounds?.x ?? 0;
|
|
964
|
+
const cropOffsetY = cropBounds?.y ?? 0;
|
|
851
965
|
const contentSegments = segmentsFromElements ?? (() => {
|
|
852
966
|
if (!config) return [];
|
|
853
967
|
const sorted = getSortedSegments(config);
|
|
@@ -868,61 +982,73 @@ function ImageEditorComposition({
|
|
|
868
982
|
return void 0;
|
|
869
983
|
};
|
|
870
984
|
const containerBgColor = backgroundType === "color" && backgroundColor ? backgroundColor : "#000000";
|
|
871
|
-
return /* @__PURE__ */
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
{
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
}
|
|
885
|
-
),
|
|
886
|
-
legacyBackgroundSegment && !segmentsFromElements && /* @__PURE__ */ jsx3(
|
|
887
|
-
BackgroundImage,
|
|
888
|
-
{
|
|
889
|
-
segment: legacyBackgroundSegment,
|
|
890
|
-
src: getSource(legacyBackgroundSegment),
|
|
891
|
-
width: canvasWidth,
|
|
892
|
-
height: canvasHeight,
|
|
893
|
-
scale
|
|
894
|
-
}
|
|
895
|
-
),
|
|
896
|
-
contentSegments.map((segment) => {
|
|
897
|
-
if (segment.type === "text") {
|
|
898
|
-
return /* @__PURE__ */ jsx3(
|
|
899
|
-
TextElement,
|
|
985
|
+
return /* @__PURE__ */ jsx3(AbsoluteFill, { style: { backgroundColor: containerBgColor }, children: /* @__PURE__ */ jsxs(
|
|
986
|
+
"div",
|
|
987
|
+
{
|
|
988
|
+
style: {
|
|
989
|
+
position: "absolute",
|
|
990
|
+
left: -cropOffsetX * scale,
|
|
991
|
+
top: -cropOffsetY * scale,
|
|
992
|
+
width: canvasWidth * scale,
|
|
993
|
+
height: canvasHeight * scale
|
|
994
|
+
},
|
|
995
|
+
children: [
|
|
996
|
+
backgroundType === "image" && bgUrl && segmentsFromElements && /* @__PURE__ */ jsx3(
|
|
997
|
+
Img2,
|
|
900
998
|
{
|
|
901
|
-
|
|
902
|
-
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
|
|
912
|
-
|
|
913
|
-
|
|
914
|
-
ImageElement,
|
|
999
|
+
src: bgUrl,
|
|
1000
|
+
style: {
|
|
1001
|
+
position: "absolute",
|
|
1002
|
+
left: 0,
|
|
1003
|
+
top: 0,
|
|
1004
|
+
width: canvasWidth * scale,
|
|
1005
|
+
height: canvasHeight * scale,
|
|
1006
|
+
objectFit: bgFit
|
|
1007
|
+
}
|
|
1008
|
+
}
|
|
1009
|
+
),
|
|
1010
|
+
legacyBackgroundSegment && !segmentsFromElements && /* @__PURE__ */ jsx3(
|
|
1011
|
+
BackgroundImage,
|
|
915
1012
|
{
|
|
916
|
-
segment,
|
|
917
|
-
src,
|
|
1013
|
+
segment: legacyBackgroundSegment,
|
|
1014
|
+
src: getSource(legacyBackgroundSegment),
|
|
1015
|
+
width: canvasWidth,
|
|
1016
|
+
height: canvasHeight,
|
|
918
1017
|
scale
|
|
919
|
-
}
|
|
920
|
-
|
|
921
|
-
)
|
|
922
|
-
|
|
923
|
-
|
|
924
|
-
|
|
925
|
-
|
|
1018
|
+
}
|
|
1019
|
+
),
|
|
1020
|
+
contentSegments.map((segment) => {
|
|
1021
|
+
if (segment.type === "text") {
|
|
1022
|
+
return /* @__PURE__ */ jsx3(
|
|
1023
|
+
TextElement,
|
|
1024
|
+
{
|
|
1025
|
+
segment,
|
|
1026
|
+
scale
|
|
1027
|
+
},
|
|
1028
|
+
segment.id
|
|
1029
|
+
);
|
|
1030
|
+
}
|
|
1031
|
+
if (segment.type === "image") {
|
|
1032
|
+
const src = segment.source || getSource(segment);
|
|
1033
|
+
if (!src) {
|
|
1034
|
+
console.warn(`No source found for image segment: ${segment.id}`);
|
|
1035
|
+
return null;
|
|
1036
|
+
}
|
|
1037
|
+
return /* @__PURE__ */ jsx3(
|
|
1038
|
+
ImageElement,
|
|
1039
|
+
{
|
|
1040
|
+
segment,
|
|
1041
|
+
src,
|
|
1042
|
+
scale
|
|
1043
|
+
},
|
|
1044
|
+
segment.id
|
|
1045
|
+
);
|
|
1046
|
+
}
|
|
1047
|
+
return null;
|
|
1048
|
+
})
|
|
1049
|
+
]
|
|
1050
|
+
}
|
|
1051
|
+
) });
|
|
926
1052
|
}
|
|
927
1053
|
function BackgroundImage({
|
|
928
1054
|
segment,
|
|
@@ -1272,6 +1398,101 @@ function calculateFitDimensions({
|
|
|
1272
1398
|
};
|
|
1273
1399
|
}
|
|
1274
1400
|
|
|
1401
|
+
// src/utils/timeline.ts
|
|
1402
|
+
function defaultOffset(mode = "flexible") {
|
|
1403
|
+
return mode === "flexible" ? { type: "relative", value: 0 } : { type: "absolute", value: 0 };
|
|
1404
|
+
}
|
|
1405
|
+
function getBaseSegments(channel) {
|
|
1406
|
+
return channel.segments.filter((s) => s.parentId === void 0);
|
|
1407
|
+
}
|
|
1408
|
+
function getOverlays(channel, parentId) {
|
|
1409
|
+
return channel.segments.filter((s) => s.parentId === (parentId ?? void 0));
|
|
1410
|
+
}
|
|
1411
|
+
function getSegmentTimelinePosition(segment, baseSegments, channel) {
|
|
1412
|
+
if (segment.parentId) {
|
|
1413
|
+
const parent = channel.segments.find((s) => s.id === segment.parentId);
|
|
1414
|
+
if (parent) {
|
|
1415
|
+
const parentPos = getSegmentTimelinePosition(parent, baseSegments, channel);
|
|
1416
|
+
const relStart = segment.relativeStart ?? 0;
|
|
1417
|
+
const relEnd = segment.relativeEnd ?? 1;
|
|
1418
|
+
return {
|
|
1419
|
+
startMs: parentPos.startMs + parentPos.durationMs * relStart,
|
|
1420
|
+
durationMs: parentPos.durationMs * (relEnd - relStart)
|
|
1421
|
+
};
|
|
1422
|
+
}
|
|
1423
|
+
}
|
|
1424
|
+
const baseIndex = baseSegments.findIndex((s) => s.id === segment.id);
|
|
1425
|
+
let accumulatedTime = 0;
|
|
1426
|
+
for (let i = 0; i < baseIndex; i++) {
|
|
1427
|
+
const prev = baseSegments[i];
|
|
1428
|
+
if (prev) {
|
|
1429
|
+
accumulatedTime += prev.duration?.type === "absolute" ? prev.duration.value : 5e3;
|
|
1430
|
+
}
|
|
1431
|
+
}
|
|
1432
|
+
const startMs = segment.offset.type === "absolute" ? segment.offset.value : accumulatedTime;
|
|
1433
|
+
const durationMs = segment.duration?.type === "absolute" ? segment.duration.value : 5e3;
|
|
1434
|
+
return { startMs, durationMs };
|
|
1435
|
+
}
|
|
1436
|
+
function isSegmentVisibleAtTime(segment, time, channel) {
|
|
1437
|
+
const baseSegments = getBaseSegments(channel);
|
|
1438
|
+
const { startMs, durationMs } = getSegmentTimelinePosition(segment, baseSegments, channel);
|
|
1439
|
+
const endMs = startMs + durationMs;
|
|
1440
|
+
return time >= startMs && time < endMs;
|
|
1441
|
+
}
|
|
1442
|
+
function calculateEstimatedDuration(channels) {
|
|
1443
|
+
let maxDuration = 5e3;
|
|
1444
|
+
for (const channel of channels) {
|
|
1445
|
+
let channelTime = 0;
|
|
1446
|
+
for (const segment of channel.segments) {
|
|
1447
|
+
if (segment.parentId) continue;
|
|
1448
|
+
if (segment.offset.type === "absolute") {
|
|
1449
|
+
channelTime = segment.offset.value;
|
|
1450
|
+
} else {
|
|
1451
|
+
channelTime += 5e3;
|
|
1452
|
+
}
|
|
1453
|
+
if (segment.duration?.type === "absolute") {
|
|
1454
|
+
channelTime += segment.duration.value;
|
|
1455
|
+
} else {
|
|
1456
|
+
channelTime += 5e3;
|
|
1457
|
+
}
|
|
1458
|
+
}
|
|
1459
|
+
maxDuration = Math.max(maxDuration, channelTime);
|
|
1460
|
+
}
|
|
1461
|
+
return maxDuration;
|
|
1462
|
+
}
|
|
1463
|
+
function calculateTimelineContentEnd(channel) {
|
|
1464
|
+
const baseSegments = getBaseSegments(channel);
|
|
1465
|
+
let lastEnd = 0;
|
|
1466
|
+
for (const segment of baseSegments) {
|
|
1467
|
+
const { startMs, durationMs } = getSegmentTimelinePosition(segment, baseSegments, channel);
|
|
1468
|
+
lastEnd = Math.max(lastEnd, startMs + durationMs);
|
|
1469
|
+
}
|
|
1470
|
+
return Math.ceil((lastEnd + 2e3) / 1e3) * 1e3;
|
|
1471
|
+
}
|
|
1472
|
+
function formatTime(ms) {
|
|
1473
|
+
const totalSeconds = Math.floor(ms / 1e3);
|
|
1474
|
+
const minutes = Math.floor(totalSeconds / 60);
|
|
1475
|
+
const seconds = totalSeconds % 60;
|
|
1476
|
+
const milliseconds = Math.floor(ms % 1e3 / 10);
|
|
1477
|
+
return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}.${milliseconds.toString().padStart(2, "0")}`;
|
|
1478
|
+
}
|
|
1479
|
+
function parseTime(timeStr) {
|
|
1480
|
+
const parts = timeStr.split(":");
|
|
1481
|
+
if (parts.length !== 2) return 0;
|
|
1482
|
+
const [minStr, secPart] = parts;
|
|
1483
|
+
const minutes = parseInt(minStr ?? "0", 10) || 0;
|
|
1484
|
+
const secParts = (secPart ?? "0").split(".");
|
|
1485
|
+
const seconds = parseInt(secParts[0] ?? "0", 10) || 0;
|
|
1486
|
+
const ms = parseInt((secParts[1] ?? "0").padEnd(2, "0").slice(0, 2), 10) * 10 || 0;
|
|
1487
|
+
return (minutes * 60 + seconds) * 1e3 + ms;
|
|
1488
|
+
}
|
|
1489
|
+
function generateSegmentId() {
|
|
1490
|
+
return `segment-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`;
|
|
1491
|
+
}
|
|
1492
|
+
function generateOverlayId() {
|
|
1493
|
+
return `overlay-${Date.now()}-${Math.random().toString(36).slice(2, 9)}`;
|
|
1494
|
+
}
|
|
1495
|
+
|
|
1275
1496
|
// src/hooks/index.ts
|
|
1276
1497
|
import { useEffect, useState, useMemo as useMemo6 } from "react";
|
|
1277
1498
|
function useFontsLoaded() {
|
|
@@ -1427,16 +1648,29 @@ export {
|
|
|
1427
1648
|
areFontsLoaded,
|
|
1428
1649
|
buildFontString,
|
|
1429
1650
|
calculateAutoWidthDimensions,
|
|
1651
|
+
calculateCropBounds,
|
|
1652
|
+
calculateEstimatedDuration,
|
|
1430
1653
|
calculateFitDimensions,
|
|
1431
1654
|
calculateLineWidth,
|
|
1655
|
+
calculateTimelineContentEnd,
|
|
1432
1656
|
canSetAsReference,
|
|
1657
|
+
defaultOffset,
|
|
1658
|
+
formatTime,
|
|
1659
|
+
generateOverlayId,
|
|
1660
|
+
generateSegmentId,
|
|
1661
|
+
getBaseSegments,
|
|
1433
1662
|
getBorderRadii,
|
|
1434
1663
|
getDependentElements,
|
|
1435
1664
|
getFontFamily,
|
|
1665
|
+
getOverlays,
|
|
1436
1666
|
getReferenceElementX,
|
|
1437
1667
|
getReferenceElementY,
|
|
1668
|
+
getSegmentTimelinePosition,
|
|
1438
1669
|
hexToRgba,
|
|
1670
|
+
isDynamicCropEnabled,
|
|
1671
|
+
isSegmentVisibleAtTime,
|
|
1439
1672
|
parseHexColor,
|
|
1673
|
+
parseTime,
|
|
1440
1674
|
preloadFonts,
|
|
1441
1675
|
resolveElementPositions,
|
|
1442
1676
|
useFontsLoaded,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "ugcinc-render",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.4.0",
|
|
4
4
|
"description": "Unified rendering package for UGC Inc - shared types, components, and compositions for pixel-perfect client/server rendering",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"module": "dist/index.mjs",
|