scrolltube 2.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENTS.md +46 -0
- package/CLAUDE.md +1 -0
- package/README.md +148 -0
- package/dist/cli/_not_used_api.d.ts +1 -0
- package/dist/cli/_not_used_api.js +21 -0
- package/dist/core/CoreEngine.d.ts +74 -0
- package/dist/core/CoreOrchestrator.d.ts +39 -0
- package/dist/core/WebGLRenderer.d.ts +20 -0
- package/dist/core/index.d.ts +10 -0
- package/dist/core/scrolltube.umd.min.js +14 -0
- package/dist/core/types.d.ts +102 -0
- package/dist/core/types.js +8 -0
- package/dist/pipeline/browser-driver.d.ts +31 -0
- package/dist/pipeline/browser-driver.js +184 -0
- package/dist/pipeline/cloud-service.d.ts +17 -0
- package/dist/pipeline/cloud-service.js +109 -0
- package/dist/pipeline/index.d.ts +21 -0
- package/dist/pipeline/index.js +227 -0
- package/dist/pipeline/node-driver.d.ts +18 -0
- package/dist/pipeline/node-driver.js +108 -0
- package/dist/pipeline/types.d.ts +43 -0
- package/dist/pipeline/types.js +2 -0
- package/dist/react/ScrollTubeProvider.d.ts +49 -0
- package/dist/react/index.d.ts +1 -0
- package/dist/react/index.js +14 -0
- package/docs/ai-integration.md +72 -0
- package/docs/architecture.md +55 -0
- package/docs/asset-pipeline.md +105 -0
- package/docs/react-integration.md +89 -0
- package/package.json +99 -0
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SCROLLTUBE - DECLARATIVE SCHEMA
|
|
3
|
+
*
|
|
4
|
+
* This file defines the core data structures that allow an AI Agent
|
|
5
|
+
* to describe a scroll experience in one step.
|
|
6
|
+
*/
|
|
7
|
+
export interface ProjectConfiguration {
|
|
8
|
+
version: string;
|
|
9
|
+
settings: ProjectSettings;
|
|
10
|
+
assets: SequenceAsset[];
|
|
11
|
+
timeline: TimelineDefinition;
|
|
12
|
+
}
|
|
13
|
+
export interface ProjectSettings {
|
|
14
|
+
baseResolution: {
|
|
15
|
+
width: number;
|
|
16
|
+
height: number;
|
|
17
|
+
};
|
|
18
|
+
scrollMode: 'vh' | 'px';
|
|
19
|
+
basePath?: string;
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* ASSET SYSTEM
|
|
23
|
+
*/
|
|
24
|
+
export interface SequenceAsset {
|
|
25
|
+
id: string;
|
|
26
|
+
strategy: 'adaptive' | 'fixed';
|
|
27
|
+
variants: AssetVariant[];
|
|
28
|
+
}
|
|
29
|
+
export interface AssetVariant {
|
|
30
|
+
id: string;
|
|
31
|
+
media: string;
|
|
32
|
+
path: string;
|
|
33
|
+
aspectRatio: string;
|
|
34
|
+
frameCount: number;
|
|
35
|
+
width: number;
|
|
36
|
+
height: number;
|
|
37
|
+
orientation: 'portrait' | 'landscape';
|
|
38
|
+
hasDepthMap?: boolean;
|
|
39
|
+
subjects?: string[];
|
|
40
|
+
}
|
|
41
|
+
export interface SubjectFrameData {
|
|
42
|
+
frame: number;
|
|
43
|
+
x: number;
|
|
44
|
+
y: number;
|
|
45
|
+
scale?: number;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* TIMELINE SYSTEM
|
|
49
|
+
*/
|
|
50
|
+
export interface TimelineDefinition {
|
|
51
|
+
totalDuration: string | number;
|
|
52
|
+
scenes: SceneDefinition[];
|
|
53
|
+
}
|
|
54
|
+
export interface SceneDefinition {
|
|
55
|
+
id: string;
|
|
56
|
+
assetId: string;
|
|
57
|
+
startProgress: number;
|
|
58
|
+
duration: number;
|
|
59
|
+
assetRange: [number, number];
|
|
60
|
+
layers: LayerDefinition[];
|
|
61
|
+
}
|
|
62
|
+
/**
|
|
63
|
+
* LAYER SYSTEM
|
|
64
|
+
*/
|
|
65
|
+
export type LayerDefinition = HTMLLayer | CanvasLayer;
|
|
66
|
+
export interface BaseLayer {
|
|
67
|
+
id: string;
|
|
68
|
+
type: string;
|
|
69
|
+
zIndex?: number;
|
|
70
|
+
anchor: 'viewport' | 'subject';
|
|
71
|
+
position: ResponsiveCoordinate;
|
|
72
|
+
animations: LayerAnimation[];
|
|
73
|
+
}
|
|
74
|
+
export interface HTMLLayer extends BaseLayer {
|
|
75
|
+
type: 'html';
|
|
76
|
+
content: string;
|
|
77
|
+
style?: Record<string, string>;
|
|
78
|
+
}
|
|
79
|
+
export interface CanvasLayer extends BaseLayer {
|
|
80
|
+
type: 'canvas';
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* UTILITY TYPES
|
|
84
|
+
*/
|
|
85
|
+
export interface ResponsiveCoordinate {
|
|
86
|
+
default: Point;
|
|
87
|
+
mobile?: Point;
|
|
88
|
+
tablet?: Point;
|
|
89
|
+
desktop?: Point;
|
|
90
|
+
}
|
|
91
|
+
export interface Point {
|
|
92
|
+
x: string | number;
|
|
93
|
+
y: string | number;
|
|
94
|
+
}
|
|
95
|
+
export interface LayerAnimation {
|
|
96
|
+
property: string;
|
|
97
|
+
from: number | string;
|
|
98
|
+
to: number | string;
|
|
99
|
+
start: number;
|
|
100
|
+
end: number;
|
|
101
|
+
easing?: string;
|
|
102
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
import { IPipelineDriver, VariantConfig } from './types';
|
|
2
|
+
export declare class BrowserDriver implements IPipelineDriver {
|
|
3
|
+
private files;
|
|
4
|
+
constructor();
|
|
5
|
+
readFile(path: string): Promise<Uint8Array>;
|
|
6
|
+
writeFile(path: string, data: Uint8Array | string): Promise<void>;
|
|
7
|
+
mkdir(path: string): Promise<void>;
|
|
8
|
+
exists(path: string): Promise<boolean>;
|
|
9
|
+
readdir(dirPath: string): Promise<string[]>;
|
|
10
|
+
remove(path: string): Promise<void>;
|
|
11
|
+
join(...parts: string[]): string;
|
|
12
|
+
resolve(...parts: string[]): string;
|
|
13
|
+
/**
|
|
14
|
+
* EXTRACT FRAMES (via ffmpeg.wasm)
|
|
15
|
+
* Note: Requires SharedArrayBuffer & specific Headers if using multithreading.
|
|
16
|
+
*/
|
|
17
|
+
extractFrames(videoSource: string | File | Blob, outputDir: string, onProgress?: (percent: number) => void): Promise<void>;
|
|
18
|
+
/**
|
|
19
|
+
* PROCESS IMAGE (via Canvas API)
|
|
20
|
+
* High-performance resizing and cropping using the browser's hardware-accelerated Canvas.
|
|
21
|
+
*/
|
|
22
|
+
processImage(input: Uint8Array | string, config: VariantConfig, options?: {
|
|
23
|
+
grayscale?: boolean;
|
|
24
|
+
blur?: number;
|
|
25
|
+
}): Promise<Uint8Array>;
|
|
26
|
+
/**
|
|
27
|
+
* ZIP PROJECT (via JSZip)
|
|
28
|
+
* Bundles all processed assets into a single file for upload or download.
|
|
29
|
+
*/
|
|
30
|
+
zipProject(outDir: string): Promise<Uint8Array>;
|
|
31
|
+
}
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.BrowserDriver = void 0;
|
|
37
|
+
class BrowserDriver {
|
|
38
|
+
files = new Map();
|
|
39
|
+
constructor() {
|
|
40
|
+
console.log('🌐 BrowserDriver initialized');
|
|
41
|
+
}
|
|
42
|
+
async readFile(path) {
|
|
43
|
+
const data = this.files.get(path);
|
|
44
|
+
if (!data)
|
|
45
|
+
throw new Error(`File not found: ${path}`);
|
|
46
|
+
if (typeof data === 'string')
|
|
47
|
+
return new TextEncoder().encode(data);
|
|
48
|
+
return data;
|
|
49
|
+
}
|
|
50
|
+
async writeFile(path, data) {
|
|
51
|
+
this.files.set(path, data);
|
|
52
|
+
}
|
|
53
|
+
async mkdir(path) {
|
|
54
|
+
// Virtual folders - no-op for simple Map implementation
|
|
55
|
+
}
|
|
56
|
+
async exists(path) {
|
|
57
|
+
return this.files.has(path);
|
|
58
|
+
}
|
|
59
|
+
async readdir(dirPath) {
|
|
60
|
+
const results = [];
|
|
61
|
+
for (const key of this.files.keys()) {
|
|
62
|
+
if (key.startsWith(dirPath)) {
|
|
63
|
+
// Simple relative path extraction
|
|
64
|
+
const relative = key.replace(dirPath, '').replace(/^[\\\/]/, '');
|
|
65
|
+
if (relative && !relative.includes('/') && !relative.includes('\\')) {
|
|
66
|
+
results.push(relative);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return results;
|
|
71
|
+
}
|
|
72
|
+
async remove(path) {
|
|
73
|
+
// 1. Delete the exact file/folder key
|
|
74
|
+
this.files.delete(path);
|
|
75
|
+
// 2. Delete all children (recursive cleanup for virtual folders)
|
|
76
|
+
const prefix = path.endsWith('/') ? path : `${path}/`;
|
|
77
|
+
for (const key of this.files.keys()) {
|
|
78
|
+
if (key.startsWith(prefix)) {
|
|
79
|
+
this.files.delete(key);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
join(...parts) {
|
|
84
|
+
return parts.join('/').replace(/\/+/g, '/');
|
|
85
|
+
}
|
|
86
|
+
resolve(...parts) {
|
|
87
|
+
return this.join(...parts);
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* EXTRACT FRAMES (via ffmpeg.wasm)
|
|
91
|
+
* Note: Requires SharedArrayBuffer & specific Headers if using multithreading.
|
|
92
|
+
*/
|
|
93
|
+
async extractFrames(videoSource, outputDir, onProgress) {
|
|
94
|
+
try {
|
|
95
|
+
// Dynamic import to keep core bundle small
|
|
96
|
+
const { FFmpeg } = await Promise.resolve().then(() => __importStar(require('@ffmpeg/ffmpeg')));
|
|
97
|
+
const { fetchFile, toBlobURL } = await Promise.resolve().then(() => __importStar(require('@ffmpeg/util')));
|
|
98
|
+
const ffmpeg = new FFmpeg();
|
|
99
|
+
ffmpeg.on('progress', ({ progress }) => {
|
|
100
|
+
if (onProgress)
|
|
101
|
+
onProgress(Math.round(progress * 100));
|
|
102
|
+
});
|
|
103
|
+
// Load FFmpeg WASM
|
|
104
|
+
// You'll need to provide the correct URL for the core/worker files in your WP plugin
|
|
105
|
+
await ffmpeg.load({
|
|
106
|
+
coreURL: await toBlobURL(`https://unpkg.com/@ffmpeg/core@0.12.6/dist/esm/ffmpeg-core.js`, 'text/javascript'),
|
|
107
|
+
wasmURL: await toBlobURL(`https://unpkg.com/@ffmpeg/core@0.12.6/dist/esm/ffmpeg-core.wasm`, 'application/wasm'),
|
|
108
|
+
});
|
|
109
|
+
const inputName = 'input.mp4';
|
|
110
|
+
await ffmpeg.writeFile(inputName, await fetchFile(videoSource));
|
|
111
|
+
// Extract as PNGs/WebPs (WebP might be faster if supported in the WASM build)
|
|
112
|
+
await ffmpeg.exec(['-i', inputName, `${outputDir}/frame_%04d.png`]);
|
|
113
|
+
// Move files from FFmpeg VFS to our Map FS
|
|
114
|
+
const files = await ffmpeg.listDir(outputDir);
|
|
115
|
+
for (const file of files) {
|
|
116
|
+
if (file.name.startsWith('frame_')) {
|
|
117
|
+
const data = await ffmpeg.readFile(`${outputDir}/${file.name}`);
|
|
118
|
+
await this.writeFile(this.join(outputDir, file.name), data);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
await ffmpeg.terminate();
|
|
122
|
+
}
|
|
123
|
+
catch (err) {
|
|
124
|
+
console.error('FFmpeg WASM Error:', err);
|
|
125
|
+
throw new Error('Failed to extract frames in browser. Did you enable SharedArrayBuffer headers?');
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* PROCESS IMAGE (via Canvas API)
|
|
130
|
+
* High-performance resizing and cropping using the browser's hardware-accelerated Canvas.
|
|
131
|
+
*/
|
|
132
|
+
async processImage(input, config, options = {}) {
|
|
133
|
+
// 1. Load image into a bitmap
|
|
134
|
+
let blob;
|
|
135
|
+
if (typeof input === 'string') {
|
|
136
|
+
const data = await this.readFile(input);
|
|
137
|
+
blob = new Blob([data]);
|
|
138
|
+
}
|
|
139
|
+
else {
|
|
140
|
+
blob = new Blob([input]);
|
|
141
|
+
}
|
|
142
|
+
const img = await createImageBitmap(blob);
|
|
143
|
+
// 2. Setup Canvas
|
|
144
|
+
const canvas = new OffscreenCanvas(config.width, config.height);
|
|
145
|
+
const ctx = canvas.getContext('2d');
|
|
146
|
+
if (!ctx)
|
|
147
|
+
throw new Error('Could not get Canvas context');
|
|
148
|
+
// 3. Smart Crop Logic (simplified to cover/center)
|
|
149
|
+
const scale = Math.max(config.width / img.width, config.height / img.height);
|
|
150
|
+
const x = (config.width - img.width * scale) / 2;
|
|
151
|
+
const y = (config.height - img.height * scale) / 2;
|
|
152
|
+
// Apply filters
|
|
153
|
+
let filters = '';
|
|
154
|
+
if (options.grayscale)
|
|
155
|
+
filters += 'grayscale(100%) ';
|
|
156
|
+
if (options.blur)
|
|
157
|
+
filters += `blur(${options.blur}px) `;
|
|
158
|
+
if (filters)
|
|
159
|
+
ctx.filter = filters.trim();
|
|
160
|
+
ctx.drawImage(img, x, y, img.width * scale, img.height * scale);
|
|
161
|
+
// 4. Encode to WebP
|
|
162
|
+
const outputBlob = await canvas.convertToBlob({
|
|
163
|
+
type: 'image/webp',
|
|
164
|
+
quality: 0.8
|
|
165
|
+
});
|
|
166
|
+
return new Uint8Array(await outputBlob.arrayBuffer());
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* ZIP PROJECT (via JSZip)
|
|
170
|
+
* Bundles all processed assets into a single file for upload or download.
|
|
171
|
+
*/
|
|
172
|
+
async zipProject(outDir) {
|
|
173
|
+
const { default: JSZip } = await Promise.resolve().then(() => __importStar(require('jszip')));
|
|
174
|
+
const zip = new JSZip();
|
|
175
|
+
for (const [path, data] of this.files.entries()) {
|
|
176
|
+
if (path.startsWith(outDir)) {
|
|
177
|
+
const relativePath = path.replace(outDir, '').replace(/^[\\\/]/, '');
|
|
178
|
+
zip.file(relativePath, data);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
return await zip.generateAsync({ type: 'uint8array' });
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
exports.BrowserDriver = BrowserDriver;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { IPipelineDriver } from './types';
|
|
2
|
+
import { SubjectFrameData } from '../core/types';
|
|
3
|
+
export interface CloudOptions {
|
|
4
|
+
apiKey?: string;
|
|
5
|
+
baseUrl?: string;
|
|
6
|
+
proxyUrl?: string;
|
|
7
|
+
}
|
|
8
|
+
export declare class CloudService {
|
|
9
|
+
private options;
|
|
10
|
+
private isScrollTube;
|
|
11
|
+
constructor(options?: CloudOptions);
|
|
12
|
+
private getAuthHeaders;
|
|
13
|
+
trackSubject(input: string | File | Blob, driver: IPipelineDriver, prompt?: string): Promise<SubjectFrameData[]>;
|
|
14
|
+
generateDepthMap(input: string | File | Blob, driver: IPipelineDriver): Promise<string>;
|
|
15
|
+
private uploadFile;
|
|
16
|
+
private mapBoxesToTrackingData;
|
|
17
|
+
}
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CloudService = void 0;
|
|
4
|
+
const client_1 = require("@fal-ai/client");
|
|
5
|
+
class CloudService {
|
|
6
|
+
options;
|
|
7
|
+
isScrollTube = false;
|
|
8
|
+
constructor(options = {}) {
|
|
9
|
+
this.options = options;
|
|
10
|
+
// Prioritize SCROLLTUBE_KEY from environment or options
|
|
11
|
+
const envStube = typeof process !== 'undefined' ? process.env?.SCROLLTUBE_KEY : '';
|
|
12
|
+
const envFal = typeof process !== 'undefined' ? process.env?.FAL_KEY : '';
|
|
13
|
+
const key = options.apiKey || envStube || envFal;
|
|
14
|
+
if (envStube || (options.apiKey && options.apiKey.startsWith('stube_'))) {
|
|
15
|
+
this.isScrollTube = true;
|
|
16
|
+
}
|
|
17
|
+
if (!key && !options.proxyUrl) {
|
|
18
|
+
// Don't throw yet, only when a cloud method is called
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
getAuthHeaders() {
|
|
22
|
+
// For now, fal-ai/client uses the env variable FE_FAL_KEY or the key provided to it.
|
|
23
|
+
// In the future, once we use a proxy, we'll manually set headers here.
|
|
24
|
+
return {};
|
|
25
|
+
}
|
|
26
|
+
async trackSubject(input, driver, prompt = "main subject") {
|
|
27
|
+
let videoUrl;
|
|
28
|
+
if (typeof input === 'string') {
|
|
29
|
+
// Local path or URL
|
|
30
|
+
if (await driver.exists(input)) {
|
|
31
|
+
videoUrl = await this.uploadFile(input, driver);
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
videoUrl = input;
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
else {
|
|
38
|
+
// File or Blob
|
|
39
|
+
videoUrl = await client_1.fal.storage.upload(input);
|
|
40
|
+
}
|
|
41
|
+
console.log(`🤖 AI is tracking "${prompt}" via SAM 3...`);
|
|
42
|
+
const result = await client_1.fal.subscribe("fal-ai/sam-3/video-rle", {
|
|
43
|
+
input: {
|
|
44
|
+
video_url: videoUrl,
|
|
45
|
+
prompt: prompt,
|
|
46
|
+
},
|
|
47
|
+
logs: true,
|
|
48
|
+
});
|
|
49
|
+
const payload = result.data || result;
|
|
50
|
+
const boxes = payload.boxes;
|
|
51
|
+
if (!boxes || !Array.isArray(boxes) || boxes.length === 0) {
|
|
52
|
+
throw new Error(`AI tracking returned no data.`);
|
|
53
|
+
}
|
|
54
|
+
return this.mapBoxesToTrackingData(boxes);
|
|
55
|
+
}
|
|
56
|
+
async generateDepthMap(input, driver) {
|
|
57
|
+
let videoUrl;
|
|
58
|
+
if (typeof input === 'string') {
|
|
59
|
+
if (await driver.exists(input)) {
|
|
60
|
+
videoUrl = await this.uploadFile(input, driver);
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
videoUrl = input;
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
videoUrl = await client_1.fal.storage.upload(input);
|
|
68
|
+
}
|
|
69
|
+
console.log(`🤖 AI is generating Depth Map...`);
|
|
70
|
+
const result = await client_1.fal.subscribe("fal-ai/video-depth-anything", {
|
|
71
|
+
input: {
|
|
72
|
+
video_url: videoUrl,
|
|
73
|
+
model_size: "VDA-Base",
|
|
74
|
+
},
|
|
75
|
+
logs: true
|
|
76
|
+
});
|
|
77
|
+
const payload = result.data || result;
|
|
78
|
+
if (!payload.video || !payload.video.url) {
|
|
79
|
+
throw new Error(`AI Depth Map generation failed.`);
|
|
80
|
+
}
|
|
81
|
+
return payload.video.url;
|
|
82
|
+
}
|
|
83
|
+
async uploadFile(filePath, driver) {
|
|
84
|
+
const data = await driver.readFile(filePath);
|
|
85
|
+
return await client_1.fal.storage.upload(new Blob([data]));
|
|
86
|
+
}
|
|
87
|
+
mapBoxesToTrackingData(boxes) {
|
|
88
|
+
let lastKnown = { x: 0.5, y: 0.5, scale: 0 };
|
|
89
|
+
return boxes.map((frameBoxes, i) => {
|
|
90
|
+
if (frameBoxes && Array.isArray(frameBoxes)) {
|
|
91
|
+
let box = null;
|
|
92
|
+
if (typeof frameBoxes[0] === 'number' && frameBoxes.length >= 4) {
|
|
93
|
+
box = frameBoxes;
|
|
94
|
+
}
|
|
95
|
+
else if (Array.isArray(frameBoxes[0]) && frameBoxes[0].length >= 4) {
|
|
96
|
+
box = frameBoxes[0];
|
|
97
|
+
}
|
|
98
|
+
else if (typeof frameBoxes[0] === 'object' && frameBoxes[0].box_2d) {
|
|
99
|
+
box = frameBoxes[0].box_2d;
|
|
100
|
+
}
|
|
101
|
+
if (box) {
|
|
102
|
+
lastKnown = { x: box[0], y: box[1], scale: box[2] * box[3] };
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
return { frame: i, ...lastKnown };
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
exports.CloudService = CloudService;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import { PipelineOptions, CreateCommandOptions } from './types';
|
|
2
|
+
import { ProjectConfiguration, AssetVariant } from '../core/types';
|
|
3
|
+
export declare class AssetPipeline {
|
|
4
|
+
private driver;
|
|
5
|
+
private options;
|
|
6
|
+
private cloud;
|
|
7
|
+
constructor(options?: PipelineOptions);
|
|
8
|
+
/**
|
|
9
|
+
* INITIALIZE DRIVER
|
|
10
|
+
* Detects environment and loads the appropriate driver dynamically.
|
|
11
|
+
*/
|
|
12
|
+
init(): Promise<void>;
|
|
13
|
+
private report;
|
|
14
|
+
/**
|
|
15
|
+
* THE MAIN ORCHESTRATOR
|
|
16
|
+
*/
|
|
17
|
+
create(opts: CreateCommandOptions): Promise<ProjectConfiguration | Uint8Array<ArrayBufferLike>>;
|
|
18
|
+
private normalizeVariants;
|
|
19
|
+
private processVariants;
|
|
20
|
+
saveConfig(variants: AssetVariant[], outDir: string): Promise<ProjectConfiguration>;
|
|
21
|
+
}
|