scrollcraft 2.0.8 → 2.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +7 -7
- package/dist/core/CoreEngine.d.ts +4 -0
- package/dist/core/CoreOrchestrator.d.ts +39 -0
- package/dist/core/WebGLRenderer.d.ts +5 -1
- package/dist/core/index.d.ts +8 -2
- package/dist/core/scrollcraft.umd.min.js +1 -1
- package/dist/react/ScrollCraftProvider.d.ts +24 -5
- package/dist/react/index.d.ts +1 -1
- package/dist/react/index.js +1 -1
- package/package.json +77 -76
- package/dist/cli/api.d.ts +0 -1
- package/dist/cli/api.js +0 -21
- package/dist/cli/fal-service.d.ts +0 -38
- package/dist/cli/fal-service.js +0 -190
- package/dist/cli/processor.d.ts +0 -25
- package/dist/cli/processor.js +0 -175
package/dist/cli/fal-service.js
DELETED
|
@@ -1,190 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
-
var ownKeys = function(o) {
|
|
20
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
-
var ar = [];
|
|
22
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
-
return ar;
|
|
24
|
-
};
|
|
25
|
-
return ownKeys(o);
|
|
26
|
-
};
|
|
27
|
-
return function (mod) {
|
|
28
|
-
if (mod && mod.__esModule) return mod;
|
|
29
|
-
var result = {};
|
|
30
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
-
__setModuleDefault(result, mod);
|
|
32
|
-
return result;
|
|
33
|
-
};
|
|
34
|
-
})();
|
|
35
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
-
exports.FalService = void 0;
|
|
37
|
-
const client_1 = require("@fal-ai/client");
|
|
38
|
-
const fs = __importStar(require("fs-extra"));
|
|
39
|
-
const path = __importStar(require("path"));
|
|
40
|
-
require("dotenv/config");
|
|
41
|
-
/**
|
|
42
|
-
* FAL.AI SERVICE
|
|
43
|
-
*
|
|
44
|
-
* Handles all cloud-based AI processing for the asset pipeline.
|
|
45
|
-
*/
|
|
46
|
-
class FalService {
|
|
47
|
-
constructor() {
|
|
48
|
-
if (!process.env.FAL_KEY) {
|
|
49
|
-
throw new Error('FAL_KEY not found in environment. Please add it to your .env file.');
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
/**
|
|
53
|
-
* SUBJECT TRACKING (SAM 3)
|
|
54
|
-
*
|
|
55
|
-
* Analyzes a video and returns frame-by-frame (x,y) coordinates of the subject.
|
|
56
|
-
*/
|
|
57
|
-
async trackSubject(videoPathOrUrl, prompt = "main subject") {
|
|
58
|
-
let videoUrl = videoPathOrUrl;
|
|
59
|
-
// Auto-upload if the input is a local file
|
|
60
|
-
if (fs.existsSync(videoPathOrUrl) && fs.statSync(videoPathOrUrl).isFile()) {
|
|
61
|
-
videoUrl = await this.uploadFile(videoPathOrUrl);
|
|
62
|
-
}
|
|
63
|
-
console.log(`🤖 AI is tracking "${prompt}" in the video via SAM 3...`);
|
|
64
|
-
const result = await client_1.fal.subscribe("fal-ai/sam-3/video-rle", {
|
|
65
|
-
input: {
|
|
66
|
-
video_url: videoUrl,
|
|
67
|
-
prompt: prompt,
|
|
68
|
-
},
|
|
69
|
-
logs: true,
|
|
70
|
-
onQueueUpdate: (update) => {
|
|
71
|
-
if (update.status === "IN_PROGRESS") {
|
|
72
|
-
update.logs.forEach(l => console.log(`⏳ AI Tracking: ${l.message}`));
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
});
|
|
76
|
-
// SAM 3 Video RLE structure parsing
|
|
77
|
-
const payload = result.data || result;
|
|
78
|
-
const boxes = payload.boxes;
|
|
79
|
-
if (!boxes || !Array.isArray(boxes) || boxes.length === 0) {
|
|
80
|
-
throw new Error(`AI tracking returned no box data. Check if your FAL_KEY is active and the prompt "${prompt}" matches an object in the video.`);
|
|
81
|
-
}
|
|
82
|
-
console.log(`✅ AI identified tracking data for ${boxes.length} frames.`);
|
|
83
|
-
return this.mapBoxesToTrackingData(boxes, payload);
|
|
84
|
-
}
|
|
85
|
-
/**
|
|
86
|
-
* AUTO-UPLOAD HELPER
|
|
87
|
-
* Uploads a local file to fal.ai temporary storage.
|
|
88
|
-
*/
|
|
89
|
-
async uploadFile(filePath) {
|
|
90
|
-
console.log(`☁️ Uploading local file to AI Cloud: ${path.basename(filePath)}...`);
|
|
91
|
-
const fileBuffer = await fs.readFile(filePath);
|
|
92
|
-
const url = await client_1.fal.storage.upload(new Blob([fileBuffer]));
|
|
93
|
-
console.log(`✅ Upload complete: ${url}`);
|
|
94
|
-
return url;
|
|
95
|
-
}
|
|
96
|
-
/**
|
|
97
|
-
* DEPTH MAP GENERATION (Video Depth Anything)
|
|
98
|
-
* Creates a temporally consistent grayscale depth video.
|
|
99
|
-
*/
|
|
100
|
-
async generateDepthMap(videoPathOrUrl) {
|
|
101
|
-
let videoUrl = videoPathOrUrl;
|
|
102
|
-
// Auto-upload if the input is a local file
|
|
103
|
-
if (fs.existsSync(videoPathOrUrl) && fs.statSync(videoPathOrUrl).isFile()) {
|
|
104
|
-
videoUrl = await this.uploadFile(videoPathOrUrl);
|
|
105
|
-
}
|
|
106
|
-
console.log(`🤖 AI is generating Depth Map Video using Video Depth Anything...`);
|
|
107
|
-
const result = await client_1.fal.subscribe("fal-ai/video-depth-anything", {
|
|
108
|
-
input: {
|
|
109
|
-
video_url: videoUrl,
|
|
110
|
-
model_size: "VDA-Base", // Small, Base, or Large. Base is a good balance.
|
|
111
|
-
},
|
|
112
|
-
logs: true,
|
|
113
|
-
onQueueUpdate: (update) => {
|
|
114
|
-
if (update.status === "IN_PROGRESS") {
|
|
115
|
-
update.logs.forEach(l => console.log(`⏳ AI Depth Map: ${l.message}`));
|
|
116
|
-
}
|
|
117
|
-
}
|
|
118
|
-
});
|
|
119
|
-
// Debug output to see what Fal is actually returning
|
|
120
|
-
//await fs.writeFile('debug_fal.json', JSON.stringify(result, null, 2));
|
|
121
|
-
const payload = result.data || result;
|
|
122
|
-
if (!payload.video || !payload.video.url) {
|
|
123
|
-
throw new Error(`AI Depth Map generation failed. No video URL returned. Saved response to debug_fal.json`);
|
|
124
|
-
}
|
|
125
|
-
console.log(`✅ Depth Map Video Generated: ${payload.video.url}`);
|
|
126
|
-
return payload.video.url;
|
|
127
|
-
}
|
|
128
|
-
/**
|
|
129
|
-
* IMAGE REFINEMENT (Upscale / BG Remove)
|
|
130
|
-
*/
|
|
131
|
-
async refineImage(imageUrl, options) {
|
|
132
|
-
let currentUrl = imageUrl;
|
|
133
|
-
if (options.removeBg) {
|
|
134
|
-
const bgResult = await client_1.fal.subscribe("fal-ai/bria/background-removal", {
|
|
135
|
-
input: { image_url: currentUrl }
|
|
136
|
-
});
|
|
137
|
-
currentUrl = bgResult.image.url;
|
|
138
|
-
}
|
|
139
|
-
if (options.upscale) {
|
|
140
|
-
const upscaleResult = await client_1.fal.subscribe("fal-ai/esrgan", {
|
|
141
|
-
input: { image_url: currentUrl, scale: 2 }
|
|
142
|
-
});
|
|
143
|
-
currentUrl = upscaleResult.image.url;
|
|
144
|
-
}
|
|
145
|
-
return currentUrl;
|
|
146
|
-
}
|
|
147
|
-
mapBoxesToTrackingData(boxes, payload = {}) {
|
|
148
|
-
let lastKnown = { x: 0.5, y: 0.5, scale: 0 };
|
|
149
|
-
let detectedCount = 0;
|
|
150
|
-
const mapped = boxes.map((frameBoxes, i) => {
|
|
151
|
-
// SAM-3 video-rle returns frames as [null, [cx,cy,w,h], [cx,cy,w,h], ...]
|
|
152
|
-
// Or sometimes [[cx,cy,w,h]] if it's an array of objects
|
|
153
|
-
if (frameBoxes && Array.isArray(frameBoxes)) {
|
|
154
|
-
let box = null;
|
|
155
|
-
// Case 1: frameBoxes is [cx, cy, w, h] directly
|
|
156
|
-
if (typeof frameBoxes[0] === 'number' && frameBoxes.length >= 4) {
|
|
157
|
-
box = frameBoxes;
|
|
158
|
-
}
|
|
159
|
-
// Case 2: frameBoxes is [[cx, cy, w, h]]
|
|
160
|
-
else if (Array.isArray(frameBoxes[0]) && frameBoxes[0].length >= 4) {
|
|
161
|
-
box = frameBoxes[0];
|
|
162
|
-
}
|
|
163
|
-
// Case 3: frameBoxes is [{box_2d: [...]}]
|
|
164
|
-
else if (typeof frameBoxes[0] === 'object' && frameBoxes[0].box_2d) {
|
|
165
|
-
box = frameBoxes[0].box_2d;
|
|
166
|
-
}
|
|
167
|
-
if (box) {
|
|
168
|
-
lastKnown = {
|
|
169
|
-
x: box[0],
|
|
170
|
-
y: box[1],
|
|
171
|
-
scale: box[2] * box[3]
|
|
172
|
-
};
|
|
173
|
-
detectedCount++;
|
|
174
|
-
}
|
|
175
|
-
}
|
|
176
|
-
return {
|
|
177
|
-
frame: i,
|
|
178
|
-
...lastKnown
|
|
179
|
-
};
|
|
180
|
-
});
|
|
181
|
-
if (detectedCount === 0) {
|
|
182
|
-
console.warn('⚠️ AI found frames but NO objects were detected with the logic. All coordinates defaulted to 0.5.');
|
|
183
|
-
}
|
|
184
|
-
else {
|
|
185
|
-
console.log(`🎯 Successfully extracted unique coordinates for ${detectedCount} frames.`);
|
|
186
|
-
}
|
|
187
|
-
return mapped;
|
|
188
|
-
}
|
|
189
|
-
}
|
|
190
|
-
exports.FalService = FalService;
|
package/dist/cli/processor.d.ts
DELETED
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
import { ProjectConfiguration, AssetVariant, SubjectFrameData } from '../core/types';
|
|
2
|
-
/**
|
|
3
|
-
* LOCAL ASSET PROCESSOR
|
|
4
|
-
*
|
|
5
|
-
* Handles cropping, resizing, and variant generation.
|
|
6
|
-
*/
|
|
7
|
-
export declare class AssetProcessor {
|
|
8
|
-
private outDir;
|
|
9
|
-
constructor(outDir: string);
|
|
10
|
-
/**
|
|
11
|
-
* GENERATE VARIANTS
|
|
12
|
-
*
|
|
13
|
-
* Creates folders for Mobile, Tablet, Desktop with optimized images.
|
|
14
|
-
*/
|
|
15
|
-
processVariants(sourceFramesDir: string, trackingData: SubjectFrameData[], options?: {
|
|
16
|
-
step?: number;
|
|
17
|
-
hasDepth?: boolean;
|
|
18
|
-
variants?: any[];
|
|
19
|
-
}): Promise<AssetVariant[]>;
|
|
20
|
-
private subjectToSharpPosition;
|
|
21
|
-
/**
|
|
22
|
-
* SAVE PROJECT FILE
|
|
23
|
-
*/
|
|
24
|
-
saveConfig(variants: AssetVariant[]): Promise<ProjectConfiguration>;
|
|
25
|
-
}
|
package/dist/cli/processor.js
DELETED
|
@@ -1,175 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
-
var ownKeys = function(o) {
|
|
20
|
-
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
-
var ar = [];
|
|
22
|
-
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
-
return ar;
|
|
24
|
-
};
|
|
25
|
-
return ownKeys(o);
|
|
26
|
-
};
|
|
27
|
-
return function (mod) {
|
|
28
|
-
if (mod && mod.__esModule) return mod;
|
|
29
|
-
var result = {};
|
|
30
|
-
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
-
__setModuleDefault(result, mod);
|
|
32
|
-
return result;
|
|
33
|
-
};
|
|
34
|
-
})();
|
|
35
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
-
};
|
|
38
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
-
exports.AssetProcessor = void 0;
|
|
40
|
-
const fs = __importStar(require("fs-extra"));
|
|
41
|
-
const path = __importStar(require("path"));
|
|
42
|
-
const sharp_1 = __importDefault(require("sharp"));
|
|
43
|
-
const pkg = require('../../package.json');
|
|
44
|
-
/**
|
|
45
|
-
* LOCAL ASSET PROCESSOR
|
|
46
|
-
*
|
|
47
|
-
* Handles cropping, resizing, and variant generation.
|
|
48
|
-
*/
|
|
49
|
-
class AssetProcessor {
|
|
50
|
-
outDir;
|
|
51
|
-
constructor(outDir) {
|
|
52
|
-
this.outDir = outDir;
|
|
53
|
-
}
|
|
54
|
-
/**
|
|
55
|
-
* GENERATE VARIANTS
|
|
56
|
-
*
|
|
57
|
-
* Creates folders for Mobile, Tablet, Desktop with optimized images.
|
|
58
|
-
*/
|
|
59
|
-
async processVariants(sourceFramesDir, trackingData, options = {}) {
|
|
60
|
-
const step = options.step || 1;
|
|
61
|
-
const allFiles = await fs.readdir(sourceFramesDir);
|
|
62
|
-
// Ensure we only process regular frames for the main loop
|
|
63
|
-
const allFrames = allFiles.filter(f => f.startsWith('frame_'));
|
|
64
|
-
// Sort frames numerically to ensure consistent indexing (e.g. 1, 2, 10 instead of 1, 10, 2)
|
|
65
|
-
allFrames.sort((a, b) => a.localeCompare(b, undefined, { numeric: true, sensitivity: 'base' }));
|
|
66
|
-
const framesToProcess = allFrames.filter((_, i) => i % step === 0);
|
|
67
|
-
const variants = [];
|
|
68
|
-
// Define our target variants
|
|
69
|
-
const configs = options.variants || [
|
|
70
|
-
{ id: 'mobile', width: 720, height: 1280, media: '(max-width: 600px)' },
|
|
71
|
-
{ id: 'desktop', width: 1920, height: 1080, media: '(min-width: 601px)' }
|
|
72
|
-
];
|
|
73
|
-
for (const config of configs) {
|
|
74
|
-
const variantDir = path.join(this.outDir, config.id);
|
|
75
|
-
await fs.ensureDir(variantDir);
|
|
76
|
-
console.log(`🎨 Generating ${config.id} variant (${config.width}x${config.height}) for ${framesToProcess.length} images...`);
|
|
77
|
-
const variantTracking = [];
|
|
78
|
-
for (let i = 0; i < framesToProcess.length; i++) {
|
|
79
|
-
const originalIndex = i * step;
|
|
80
|
-
const frameName = framesToProcess[i];
|
|
81
|
-
const framePath = path.join(sourceFramesDir, frameName);
|
|
82
|
-
const targetPath = path.join(variantDir, `index_${i}.webp`);
|
|
83
|
-
const subject = trackingData.find(f => f.frame === originalIndex) || { frame: originalIndex, x: 0.5, y: 0.5, scale: 0 };
|
|
84
|
-
// SMART CROP: Center on the subject (x,y)
|
|
85
|
-
// This logic would calculate the top/left based on subject relative position
|
|
86
|
-
await (0, sharp_1.default)(framePath)
|
|
87
|
-
.resize(config.width, config.height, {
|
|
88
|
-
fit: 'cover',
|
|
89
|
-
position: this.subjectToSharpPosition(subject)
|
|
90
|
-
})
|
|
91
|
-
.webp({ quality: 80 })
|
|
92
|
-
.toFile(targetPath);
|
|
93
|
-
if (options.hasDepth) {
|
|
94
|
-
const numStr = frameName.match(/(\d+)/)?.[1] || "";
|
|
95
|
-
// Look for a depth file that matches the same numeric index
|
|
96
|
-
const depthFrameName = allFiles.find(f => f.startsWith('depth_') && f.includes(numStr));
|
|
97
|
-
const depthFramePath = depthFrameName ? path.join(sourceFramesDir, depthFrameName) : '';
|
|
98
|
-
if (depthFramePath && fs.existsSync(depthFramePath)) {
|
|
99
|
-
const depthTargetPath = path.join(variantDir, `index_${i}_depth.webp`);
|
|
100
|
-
await (0, sharp_1.default)(depthFramePath)
|
|
101
|
-
.resize(config.width, config.height, {
|
|
102
|
-
fit: 'cover',
|
|
103
|
-
position: this.subjectToSharpPosition(subject)
|
|
104
|
-
})
|
|
105
|
-
// Grayscale, then blur slightly to prevent "staircase" effects in displacement
|
|
106
|
-
.grayscale()
|
|
107
|
-
.blur(2)
|
|
108
|
-
.webp({ quality: 80 })
|
|
109
|
-
.toFile(depthTargetPath);
|
|
110
|
-
}
|
|
111
|
-
}
|
|
112
|
-
// Add to variant tracking (using relative frame 0...N)
|
|
113
|
-
variantTracking.push({
|
|
114
|
-
...subject,
|
|
115
|
-
frame: i
|
|
116
|
-
});
|
|
117
|
-
}
|
|
118
|
-
// Extract tracking data into its own file
|
|
119
|
-
const trackingPath = path.join(variantDir, '000_tracking-main.json');
|
|
120
|
-
await fs.writeJson(trackingPath, variantTracking, { spaces: 2 });
|
|
121
|
-
variants.push({
|
|
122
|
-
id: config.id,
|
|
123
|
-
media: config.media,
|
|
124
|
-
width: config.width,
|
|
125
|
-
height: config.height,
|
|
126
|
-
orientation: config.orientation,
|
|
127
|
-
path: `./${config.id}`, // Relative path in the final output
|
|
128
|
-
aspectRatio: config.aspectRatio,
|
|
129
|
-
frameCount: framesToProcess.length,
|
|
130
|
-
hasDepthMap: options.hasDepth,
|
|
131
|
-
subjects: ['main']
|
|
132
|
-
});
|
|
133
|
-
}
|
|
134
|
-
return variants;
|
|
135
|
-
}
|
|
136
|
-
subjectToSharpPosition(subject) {
|
|
137
|
-
// Map 0-1 to percentages for sharp
|
|
138
|
-
const xPercent = Math.round(subject.x * 100);
|
|
139
|
-
const yPercent = Math.round(subject.y * 100);
|
|
140
|
-
// Return a string sharp understands or use its gravity system
|
|
141
|
-
// For custom positioning, we'd need more complex math with .extract()
|
|
142
|
-
return 'center'; // Placeholder for now
|
|
143
|
-
}
|
|
144
|
-
/**
|
|
145
|
-
* SAVE PROJECT FILE
|
|
146
|
-
*/
|
|
147
|
-
async saveConfig(variants) {
|
|
148
|
-
const config = {
|
|
149
|
-
version: pkg.version,
|
|
150
|
-
settings: {
|
|
151
|
-
baseResolution: { width: 1920, height: 1080 },
|
|
152
|
-
scrollMode: 'vh'
|
|
153
|
-
},
|
|
154
|
-
assets: [{
|
|
155
|
-
id: "main-sequence",
|
|
156
|
-
strategy: "adaptive",
|
|
157
|
-
variants: variants
|
|
158
|
-
}],
|
|
159
|
-
timeline: {
|
|
160
|
-
totalDuration: "300vh",
|
|
161
|
-
scenes: [{
|
|
162
|
-
id: "scene-1",
|
|
163
|
-
assetId: "main-sequence",
|
|
164
|
-
startProgress: 0,
|
|
165
|
-
duration: 1,
|
|
166
|
-
assetRange: [0, variants[0].frameCount - 1],
|
|
167
|
-
layers: []
|
|
168
|
-
}]
|
|
169
|
-
}
|
|
170
|
-
};
|
|
171
|
-
await fs.writeJson(path.join(this.outDir, 'scrollcraft.json'), config, { spaces: 2 });
|
|
172
|
-
return config;
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
exports.AssetProcessor = AssetProcessor;
|