scrollcraft 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +58 -0
- package/dist/cli/_not_used_api.d.ts +1 -0
- package/dist/cli/_not_used_api.js +21 -0
- package/dist/cli/api.d.ts +1 -0
- package/dist/cli/api.js +21 -0
- package/dist/cli/fal-service.d.ts +38 -0
- package/dist/cli/fal-service.js +190 -0
- package/dist/cli/index.d.ts +2 -0
- package/dist/cli/index.js +164 -0
- package/dist/cli/processor.d.ts +24 -0
- package/dist/cli/processor.js +168 -0
- package/dist/core/CoreEngine.d.ts +52 -0
- package/dist/core/WebGLRenderer.d.ts +16 -0
- package/dist/core/index.d.ts +1 -0
- package/dist/core/scrollcraft.umd.min.js +14 -0
- package/dist/core/types.d.ts +172 -0
- package/dist/core/types.js +8 -0
- package/dist/react/ScrollCraftProvider.d.ts +29 -0
- package/dist/react/index.d.ts +1 -0
- package/dist/react/index.js +14 -0
- package/package.json +100 -0
package/README.md
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
# 🎞️ ScrollCraft 2.0
|
|
2
|
+
|
|
3
|
+
**Transform cinematic motion into interactive web experiences.**
|
|
4
|
+
|
|
5
|
+
ScrollCraft 2.0 is a modern animation SDK built for the era of high-performance, agent-driven development. It allows you to transform standard video or image sequences into "Intelligent Assets" that precisely track subjects and depth.
|
|
6
|
+
|
|
7
|
+
---
|
|
8
|
+
|
|
9
|
+
## 🚀 Quick Start
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
# 1. Transform your video into an intelligent asset sequence
|
|
13
|
+
npx scft create "examples/sample-media/jabko.mp4" --cloud --depth --prompt "apple"
|
|
14
|
+
|
|
15
|
+
# this command will output a folder named scrollcraft-project
|
|
16
|
+
```
|
|
17
|
+
|
|
18
|
+
```tsx
|
|
19
|
+
// 2. Drop it into your React app
|
|
20
|
+
import project from './scrollcraft-project/scrollcraft.json';
|
|
21
|
+
import { ScrollCraftProvider, ScrollCraftCanvas, SubjectLayer } from 'scrollcraft';
|
|
22
|
+
|
|
23
|
+
const App = () => (
|
|
24
|
+
<ScrollCraftProvider project={project}>
|
|
25
|
+
<ScrollCraftCanvas />
|
|
26
|
+
<SubjectLayer offset={{ x: 10, y: -5 }}>
|
|
27
|
+
<h2>Pin UI to moving objects.</h2>
|
|
28
|
+
</SubjectLayer>
|
|
29
|
+
</ScrollCraftProvider>
|
|
30
|
+
);
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
---
|
|
34
|
+
|
|
35
|
+
## 📖 Documentation & Guides
|
|
36
|
+
|
|
37
|
+
Choose your path based on your role:
|
|
38
|
+
|
|
39
|
+
### 👤 For Humans
|
|
40
|
+
- [**Core Architecture**](docs/architecture.md): Understand the state-snapshot engine.
|
|
41
|
+
- [**Asset Pipeline**](docs/asset-pipeline.md): Learn how to use the CLI and AI tracking.
|
|
42
|
+
- [**React Hooks**](docs/react-integration.md): Build custom interactive components.
|
|
43
|
+
|
|
44
|
+
### 🤖 For AI Agents
|
|
45
|
+
- [**AGENTS.md**](AGENTS.md): Technical standard operating procedures for the repository.
|
|
46
|
+
- [**AI Integration Protocol**](docs/ai-integration.md): How to prompt agents to build scenes for you.
|
|
47
|
+
|
|
48
|
+
---
|
|
49
|
+
|
|
50
|
+
## 🛠️ Performance & Tech
|
|
51
|
+
- **WebGL Accelerated**: High-FPS rendering even for 4K sequences.
|
|
52
|
+
- **AI Subject Tracking**: Automatic (x,y) pinning via SAM 3.
|
|
53
|
+
- **Mouse-Interactive Parallax**: Automatic 3D depth map generation and rendering.
|
|
54
|
+
- **Object-Fit Support**: Responsive "Cover" and "Contain" logic built into the shader.
|
|
55
|
+
|
|
56
|
+
---
|
|
57
|
+
|
|
58
|
+
*Dedicated to my family — Svetla, Verca, Natalka, Alex, and Agatka — and our daughter Agatka, who stays in our hearts.*
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function sendRequest(endpoint: string, method?: 'GET' | 'POST', body?: any, token?: string): Promise<any>;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.sendRequest = sendRequest;
|
|
4
|
+
const API_BASE_URL = 'https://api.scrollcraft.com';
|
|
5
|
+
async function sendRequest(endpoint, method = 'GET', body, token) {
|
|
6
|
+
const headers = {
|
|
7
|
+
'Content-Type': 'application/json',
|
|
8
|
+
};
|
|
9
|
+
if (token) {
|
|
10
|
+
headers['Authorization'] = `Bearer ${token}`;
|
|
11
|
+
}
|
|
12
|
+
const response = await fetch(`${API_BASE_URL}${endpoint}`, {
|
|
13
|
+
method,
|
|
14
|
+
headers,
|
|
15
|
+
body: body ? JSON.stringify(body) : undefined,
|
|
16
|
+
});
|
|
17
|
+
if (!response.ok) {
|
|
18
|
+
throw new Error(`API request failed: ${response.statusText}`);
|
|
19
|
+
}
|
|
20
|
+
return response.json();
|
|
21
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function sendRequest(endpoint: string, method?: 'GET' | 'POST', body?: any, token?: string): Promise<any>;
|
package/dist/cli/api.js
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.sendRequest = sendRequest;
|
|
4
|
+
const API_BASE_URL = 'https://api.scrollcraft.com';
|
|
5
|
+
async function sendRequest(endpoint, method = 'GET', body, token) {
|
|
6
|
+
const headers = {
|
|
7
|
+
'Content-Type': 'application/json',
|
|
8
|
+
};
|
|
9
|
+
if (token) {
|
|
10
|
+
headers['Authorization'] = `Bearer ${token}`;
|
|
11
|
+
}
|
|
12
|
+
const response = await fetch(`${API_BASE_URL}${endpoint}`, {
|
|
13
|
+
method,
|
|
14
|
+
headers,
|
|
15
|
+
body: body ? JSON.stringify(body) : undefined,
|
|
16
|
+
});
|
|
17
|
+
if (!response.ok) {
|
|
18
|
+
throw new Error(`API request failed: ${response.statusText}`);
|
|
19
|
+
}
|
|
20
|
+
return response.json();
|
|
21
|
+
}
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import 'dotenv/config';
|
|
2
|
+
/**
|
|
3
|
+
* FAL.AI SERVICE
|
|
4
|
+
*
|
|
5
|
+
* Handles all cloud-based AI processing for the asset pipeline.
|
|
6
|
+
*/
|
|
7
|
+
export declare class FalService {
|
|
8
|
+
constructor();
|
|
9
|
+
/**
|
|
10
|
+
* SUBJECT TRACKING (SAM 3)
|
|
11
|
+
*
|
|
12
|
+
* Analyzes a video and returns frame-by-frame (x,y) coordinates of the subject.
|
|
13
|
+
*/
|
|
14
|
+
trackSubject(videoPathOrUrl: string, prompt?: string): Promise<{
|
|
15
|
+
x: number;
|
|
16
|
+
y: number;
|
|
17
|
+
scale: number;
|
|
18
|
+
frame: number;
|
|
19
|
+
}[]>;
|
|
20
|
+
/**
|
|
21
|
+
* AUTO-UPLOAD HELPER
|
|
22
|
+
* Uploads a local file to fal.ai temporary storage.
|
|
23
|
+
*/
|
|
24
|
+
private uploadFile;
|
|
25
|
+
/**
|
|
26
|
+
* DEPTH MAP GENERATION (Video Depth Anything)
|
|
27
|
+
* Creates a temporally consistent grayscale depth video.
|
|
28
|
+
*/
|
|
29
|
+
generateDepthMap(videoPathOrUrl: string): Promise<any>;
|
|
30
|
+
/**
|
|
31
|
+
* IMAGE REFINEMENT (Upscale / BG Remove)
|
|
32
|
+
*/
|
|
33
|
+
refineImage(imageUrl: string, options: {
|
|
34
|
+
upscale?: boolean;
|
|
35
|
+
removeBg?: boolean;
|
|
36
|
+
}): Promise<string>;
|
|
37
|
+
private mapBoxesToTrackingData;
|
|
38
|
+
}
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.FalService = void 0;
|
|
37
|
+
const client_1 = require("@fal-ai/client");
|
|
38
|
+
const fs = __importStar(require("fs-extra"));
|
|
39
|
+
const path = __importStar(require("path"));
|
|
40
|
+
require("dotenv/config");
|
|
41
|
+
/**
|
|
42
|
+
* FAL.AI SERVICE
|
|
43
|
+
*
|
|
44
|
+
* Handles all cloud-based AI processing for the asset pipeline.
|
|
45
|
+
*/
|
|
46
|
+
class FalService {
|
|
47
|
+
constructor() {
|
|
48
|
+
if (!process.env.FAL_KEY) {
|
|
49
|
+
throw new Error('FAL_KEY not found in environment. Please add it to your .env file.');
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* SUBJECT TRACKING (SAM 3)
|
|
54
|
+
*
|
|
55
|
+
* Analyzes a video and returns frame-by-frame (x,y) coordinates of the subject.
|
|
56
|
+
*/
|
|
57
|
+
async trackSubject(videoPathOrUrl, prompt = "main subject") {
|
|
58
|
+
let videoUrl = videoPathOrUrl;
|
|
59
|
+
// Auto-upload if the input is a local file
|
|
60
|
+
if (fs.existsSync(videoPathOrUrl) && fs.statSync(videoPathOrUrl).isFile()) {
|
|
61
|
+
videoUrl = await this.uploadFile(videoPathOrUrl);
|
|
62
|
+
}
|
|
63
|
+
console.log(`🤖 AI is tracking "${prompt}" in the video via SAM 3...`);
|
|
64
|
+
const result = await client_1.fal.subscribe("fal-ai/sam-3/video-rle", {
|
|
65
|
+
input: {
|
|
66
|
+
video_url: videoUrl,
|
|
67
|
+
prompt: prompt,
|
|
68
|
+
},
|
|
69
|
+
logs: true,
|
|
70
|
+
onQueueUpdate: (update) => {
|
|
71
|
+
if (update.status === "IN_PROGRESS") {
|
|
72
|
+
update.logs.forEach(l => console.log(`⏳ AI Tracking: ${l.message}`));
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
// SAM 3 Video RLE structure parsing
|
|
77
|
+
const payload = result.data || result;
|
|
78
|
+
const boxes = payload.boxes;
|
|
79
|
+
if (!boxes || !Array.isArray(boxes) || boxes.length === 0) {
|
|
80
|
+
throw new Error(`AI tracking returned no box data. Check if your FAL_KEY is active and the prompt "${prompt}" matches an object in the video.`);
|
|
81
|
+
}
|
|
82
|
+
console.log(`✅ AI identified tracking data for ${boxes.length} frames.`);
|
|
83
|
+
return this.mapBoxesToTrackingData(boxes, payload);
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* AUTO-UPLOAD HELPER
|
|
87
|
+
* Uploads a local file to fal.ai temporary storage.
|
|
88
|
+
*/
|
|
89
|
+
async uploadFile(filePath) {
|
|
90
|
+
console.log(`☁️ Uploading local file to AI Cloud: ${path.basename(filePath)}...`);
|
|
91
|
+
const fileBuffer = await fs.readFile(filePath);
|
|
92
|
+
const url = await client_1.fal.storage.upload(new Blob([fileBuffer]));
|
|
93
|
+
console.log(`✅ Upload complete: ${url}`);
|
|
94
|
+
return url;
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* DEPTH MAP GENERATION (Video Depth Anything)
|
|
98
|
+
* Creates a temporally consistent grayscale depth video.
|
|
99
|
+
*/
|
|
100
|
+
async generateDepthMap(videoPathOrUrl) {
|
|
101
|
+
let videoUrl = videoPathOrUrl;
|
|
102
|
+
// Auto-upload if the input is a local file
|
|
103
|
+
if (fs.existsSync(videoPathOrUrl) && fs.statSync(videoPathOrUrl).isFile()) {
|
|
104
|
+
videoUrl = await this.uploadFile(videoPathOrUrl);
|
|
105
|
+
}
|
|
106
|
+
console.log(`🤖 AI is generating Depth Map Video using Video Depth Anything...`);
|
|
107
|
+
const result = await client_1.fal.subscribe("fal-ai/video-depth-anything", {
|
|
108
|
+
input: {
|
|
109
|
+
video_url: videoUrl,
|
|
110
|
+
model_size: "VDA-Base", // Small, Base, or Large. Base is a good balance.
|
|
111
|
+
},
|
|
112
|
+
logs: true,
|
|
113
|
+
onQueueUpdate: (update) => {
|
|
114
|
+
if (update.status === "IN_PROGRESS") {
|
|
115
|
+
update.logs.forEach(l => console.log(`⏳ AI Depth Map: ${l.message}`));
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
});
|
|
119
|
+
// Debug output to see what Fal is actually returning
|
|
120
|
+
await fs.writeFile('debug_fal.json', JSON.stringify(result, null, 2));
|
|
121
|
+
const payload = result.data || result;
|
|
122
|
+
if (!payload.video || !payload.video.url) {
|
|
123
|
+
throw new Error(`AI Depth Map generation failed. No video URL returned. Saved response to debug_fal.json`);
|
|
124
|
+
}
|
|
125
|
+
console.log(`✅ Depth Map Video Generated: ${payload.video.url}`);
|
|
126
|
+
return payload.video.url;
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* IMAGE REFINEMENT (Upscale / BG Remove)
|
|
130
|
+
*/
|
|
131
|
+
async refineImage(imageUrl, options) {
|
|
132
|
+
let currentUrl = imageUrl;
|
|
133
|
+
if (options.removeBg) {
|
|
134
|
+
const bgResult = await client_1.fal.subscribe("fal-ai/bria/background-removal", {
|
|
135
|
+
input: { image_url: currentUrl }
|
|
136
|
+
});
|
|
137
|
+
currentUrl = bgResult.image.url;
|
|
138
|
+
}
|
|
139
|
+
if (options.upscale) {
|
|
140
|
+
const upscaleResult = await client_1.fal.subscribe("fal-ai/esrgan", {
|
|
141
|
+
input: { image_url: currentUrl, scale: 2 }
|
|
142
|
+
});
|
|
143
|
+
currentUrl = upscaleResult.image.url;
|
|
144
|
+
}
|
|
145
|
+
return currentUrl;
|
|
146
|
+
}
|
|
147
|
+
mapBoxesToTrackingData(boxes, payload = {}) {
|
|
148
|
+
let lastKnown = { x: 0.5, y: 0.5, scale: 0 };
|
|
149
|
+
let detectedCount = 0;
|
|
150
|
+
const mapped = boxes.map((frameBoxes, i) => {
|
|
151
|
+
// SAM-3 video-rle returns frames as [null, [cx,cy,w,h], [cx,cy,w,h], ...]
|
|
152
|
+
// Or sometimes [[cx,cy,w,h]] if it's an array of objects
|
|
153
|
+
if (frameBoxes && Array.isArray(frameBoxes)) {
|
|
154
|
+
let box = null;
|
|
155
|
+
// Case 1: frameBoxes is [cx, cy, w, h] directly
|
|
156
|
+
if (typeof frameBoxes[0] === 'number' && frameBoxes.length >= 4) {
|
|
157
|
+
box = frameBoxes;
|
|
158
|
+
}
|
|
159
|
+
// Case 2: frameBoxes is [[cx, cy, w, h]]
|
|
160
|
+
else if (Array.isArray(frameBoxes[0]) && frameBoxes[0].length >= 4) {
|
|
161
|
+
box = frameBoxes[0];
|
|
162
|
+
}
|
|
163
|
+
// Case 3: frameBoxes is [{box_2d: [...]}]
|
|
164
|
+
else if (typeof frameBoxes[0] === 'object' && frameBoxes[0].box_2d) {
|
|
165
|
+
box = frameBoxes[0].box_2d;
|
|
166
|
+
}
|
|
167
|
+
if (box) {
|
|
168
|
+
lastKnown = {
|
|
169
|
+
x: box[0],
|
|
170
|
+
y: box[1],
|
|
171
|
+
scale: box[2] * box[3]
|
|
172
|
+
};
|
|
173
|
+
detectedCount++;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
return {
|
|
177
|
+
frame: i,
|
|
178
|
+
...lastKnown
|
|
179
|
+
};
|
|
180
|
+
});
|
|
181
|
+
if (detectedCount === 0) {
|
|
182
|
+
console.warn('⚠️ AI found frames but NO objects were detected with the logic. All coordinates defaulted to 0.5.');
|
|
183
|
+
}
|
|
184
|
+
else {
|
|
185
|
+
console.log(`🎯 Successfully extracted unique coordinates for ${detectedCount} frames.`);
|
|
186
|
+
}
|
|
187
|
+
return mapped;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
exports.FalService = FalService;
|
|
@@ -0,0 +1,164 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
4
|
+
if (k2 === undefined) k2 = k;
|
|
5
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
6
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
7
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
8
|
+
}
|
|
9
|
+
Object.defineProperty(o, k2, desc);
|
|
10
|
+
}) : (function(o, m, k, k2) {
|
|
11
|
+
if (k2 === undefined) k2 = k;
|
|
12
|
+
o[k2] = m[k];
|
|
13
|
+
}));
|
|
14
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
15
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
16
|
+
}) : function(o, v) {
|
|
17
|
+
o["default"] = v;
|
|
18
|
+
});
|
|
19
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
20
|
+
var ownKeys = function(o) {
|
|
21
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
22
|
+
var ar = [];
|
|
23
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
24
|
+
return ar;
|
|
25
|
+
};
|
|
26
|
+
return ownKeys(o);
|
|
27
|
+
};
|
|
28
|
+
return function (mod) {
|
|
29
|
+
if (mod && mod.__esModule) return mod;
|
|
30
|
+
var result = {};
|
|
31
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
32
|
+
__setModuleDefault(result, mod);
|
|
33
|
+
return result;
|
|
34
|
+
};
|
|
35
|
+
})();
|
|
36
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
37
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
38
|
+
};
|
|
39
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
40
|
+
const commander_1 = require("commander");
|
|
41
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
42
|
+
const fs = __importStar(require("fs-extra"));
|
|
43
|
+
const path = __importStar(require("path"));
|
|
44
|
+
const child_process_1 = require("child_process");
|
|
45
|
+
const ffmpeg_static_1 = __importDefault(require("ffmpeg-static"));
|
|
46
|
+
const fal_service_1 = require("./fal-service");
|
|
47
|
+
const processor_1 = require("./processor");
|
|
48
|
+
/**
|
|
49
|
+
* Robust FFmpeg Detection
|
|
50
|
+
* Prioritizes bundled static binary, then system PATH.
|
|
51
|
+
*/
|
|
52
|
+
function getFFmpegPath() {
|
|
53
|
+
// 1. Try bundled ffmpeg-static
|
|
54
|
+
if (ffmpeg_static_1.default)
|
|
55
|
+
return ffmpeg_static_1.default;
|
|
56
|
+
// 2. Try system PATH
|
|
57
|
+
try {
|
|
58
|
+
(0, child_process_1.execSync)('ffmpeg -version', { stdio: 'ignore' });
|
|
59
|
+
return 'ffmpeg';
|
|
60
|
+
}
|
|
61
|
+
catch (e) {
|
|
62
|
+
return null;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
const program = new commander_1.Command();
|
|
66
|
+
program
|
|
67
|
+
.name('scft')
|
|
68
|
+
.description('ScrollCraft CLI 2.0 - Immersive Web SDK')
|
|
69
|
+
.version('2.0.1');
|
|
70
|
+
program
|
|
71
|
+
.command('create')
|
|
72
|
+
.description('ONE-STEP: Transform video/images into a responsive ScrollCraft')
|
|
73
|
+
.argument('<input>', 'Path to input video or directory of images')
|
|
74
|
+
.option('-o, --output <dir>', 'Output directory', './scrollcraft-project')
|
|
75
|
+
.option('-p, --prompt <text>', 'Text prompt for subject tracking', 'main subject')
|
|
76
|
+
.option('-s, --step <number>', 'Process every Nth frame (default: 1)', '1')
|
|
77
|
+
.option('--cloud', 'Use Fal.ai for tracking and refinement', false)
|
|
78
|
+
.option('--depth', 'Generate a 3D depth map for the displacement effect (Requires --cloud)', false)
|
|
79
|
+
.action(async (input, opts) => {
|
|
80
|
+
console.log(chalk_1.default.bold.blue('\n🎞️ ScrollCraft 2.0 Asset Pipeline\n'));
|
|
81
|
+
// 0. PRE-FLIGHT CHECK
|
|
82
|
+
const ffmpegPath = getFFmpegPath();
|
|
83
|
+
if (!ffmpegPath) {
|
|
84
|
+
console.error(chalk_1.default.red('\n❌ FFmpeg not found!'));
|
|
85
|
+
console.log(chalk_1.default.yellow('This CLI requires FFmpeg to process videos.'));
|
|
86
|
+
console.log('Please install it manually or ensure regular npm install was successful.');
|
|
87
|
+
process.exit(1);
|
|
88
|
+
}
|
|
89
|
+
const outDir = path.resolve(opts.output);
|
|
90
|
+
const tempDir = path.join(outDir, '.temp-frames');
|
|
91
|
+
const step = parseInt(opts.step) || 1;
|
|
92
|
+
try {
|
|
93
|
+
await fs.ensureDir(outDir);
|
|
94
|
+
await fs.ensureDir(tempDir);
|
|
95
|
+
// 1. FRAME EXTRACTION
|
|
96
|
+
if (fs.statSync(input).isFile()) {
|
|
97
|
+
console.log(chalk_1.default.yellow(`📦 Extracting frames from video: ${input}`));
|
|
98
|
+
// Extract 30 frames per second (matching our default)
|
|
99
|
+
// Using the robust path discovered in pre-flight
|
|
100
|
+
(0, child_process_1.execSync)(`"${ffmpegPath}" -i "${input}" -vf "fps=30" "${tempDir}/frame_%04d.png"`, { stdio: 'inherit' });
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
console.log(chalk_1.default.yellow(`📂 Using images from: ${input}`));
|
|
104
|
+
if (opts.cloud || opts.depth) {
|
|
105
|
+
console.error(chalk_1.default.red('\n❌ AI Cloud features (tracking/depth) currently require a video file as input.'));
|
|
106
|
+
console.log(chalk_1.default.yellow('To use a directory of images, please use local mode (disable --cloud and --depth).'));
|
|
107
|
+
process.exit(1);
|
|
108
|
+
}
|
|
109
|
+
const files = (await fs.readdir(input))
|
|
110
|
+
.filter(f => /\.(png|jpg|jpeg|webp)$/i.test(f))
|
|
111
|
+
.sort((a, b) => a.localeCompare(b, undefined, { numeric: true, sensitivity: 'base' }));
|
|
112
|
+
if (files.length === 0) {
|
|
113
|
+
throw new Error(`No compatible images (png, jpg, webp) found in ${input}`);
|
|
114
|
+
}
|
|
115
|
+
console.log(chalk_1.default.dim(`📦 Standardizing ${files.length} images...`));
|
|
116
|
+
for (let i = 0; i < files.length; i++) {
|
|
117
|
+
const ext = path.extname(files[i]);
|
|
118
|
+
const frameName = `frame_${(i + 1).toString().padStart(4, '0')}${ext}`;
|
|
119
|
+
await fs.copy(path.join(input, files[i]), path.join(tempDir, frameName));
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
// 2. SUBJECT TRACKING & DEPTH MAP
|
|
123
|
+
let trackingData = [];
|
|
124
|
+
let hasDepth = false;
|
|
125
|
+
if (opts.cloud) {
|
|
126
|
+
const fal = new fal_service_1.FalService();
|
|
127
|
+
// Tracking
|
|
128
|
+
trackingData = await fal.trackSubject(input, opts.prompt);
|
|
129
|
+
// Depth Map
|
|
130
|
+
if (opts.depth) {
|
|
131
|
+
console.log(chalk_1.default.yellow(`\n🕳️ Generating Depth Map via AI...`));
|
|
132
|
+
const depthUrl = await fal.generateDepthMap(input);
|
|
133
|
+
console.log(chalk_1.default.yellow(`📥 Downloading Depth Map Video...`));
|
|
134
|
+
const res = await fetch(depthUrl);
|
|
135
|
+
const arrayBuffer = await res.arrayBuffer();
|
|
136
|
+
const depthVideoPath = path.join(tempDir, 'depth_video.mp4');
|
|
137
|
+
await fs.writeFile(depthVideoPath, Buffer.from(arrayBuffer));
|
|
138
|
+
console.log(chalk_1.default.yellow(`📦 Extracting depth frames...`));
|
|
139
|
+
(0, child_process_1.execSync)(`"${ffmpegPath}" -i "${depthVideoPath}" -vf "fps=30" "${tempDir}/depth_%04d.png"`, { stdio: 'inherit' });
|
|
140
|
+
hasDepth = true;
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
else {
|
|
144
|
+
console.log(chalk_1.default.dim('ℹ️ Local tracking not yet implemented. Using center-pinned defaults.'));
|
|
145
|
+
const frames = (await fs.readdir(tempDir)).filter(f => f.startsWith('frame_'));
|
|
146
|
+
trackingData = frames.map((_, i) => ({ frame: i, x: 0.5, y: 0.5, scale: 0 }));
|
|
147
|
+
}
|
|
148
|
+
// 3. VARIANT GENERATION (Mobile/Desktop)
|
|
149
|
+
const processor = new processor_1.AssetProcessor(outDir);
|
|
150
|
+
const variants = await processor.processVariants(tempDir, trackingData, { step, hasDepth });
|
|
151
|
+
// 4. CLEANUP & SAVE
|
|
152
|
+
await processor.saveConfig(variants);
|
|
153
|
+
await fs.remove(tempDir);
|
|
154
|
+
console.log(chalk_1.default.bold.green(`\n✅ Project Created Successfully!`));
|
|
155
|
+
console.log(chalk_1.default.white(`📍 Output: ${outDir}`));
|
|
156
|
+
console.log(chalk_1.default.white(`📜 Config: scrollcraft.json`));
|
|
157
|
+
console.log(chalk_1.default.cyan(`\nNext: Import the .json into your <ScrollCraftProvider />\n`));
|
|
158
|
+
}
|
|
159
|
+
catch (err) {
|
|
160
|
+
console.error(chalk_1.default.red(`\n❌ Error: ${err.message}`));
|
|
161
|
+
process.exit(1);
|
|
162
|
+
}
|
|
163
|
+
});
|
|
164
|
+
program.parse(process.argv);
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { ProjectConfiguration, AssetVariant, SubjectFrameData } from '../core/types';
|
|
2
|
+
/**
|
|
3
|
+
* LOCAL ASSET PROCESSOR
|
|
4
|
+
*
|
|
5
|
+
* Handles cropping, resizing, and variant generation.
|
|
6
|
+
*/
|
|
7
|
+
export declare class AssetProcessor {
|
|
8
|
+
private outDir;
|
|
9
|
+
constructor(outDir: string);
|
|
10
|
+
/**
|
|
11
|
+
* GENERATE VARIANTS
|
|
12
|
+
*
|
|
13
|
+
* Creates folders for Mobile, Tablet, Desktop with optimized images.
|
|
14
|
+
*/
|
|
15
|
+
processVariants(sourceFramesDir: string, trackingData: SubjectFrameData[], options?: {
|
|
16
|
+
step?: number;
|
|
17
|
+
hasDepth?: boolean;
|
|
18
|
+
}): Promise<AssetVariant[]>;
|
|
19
|
+
private subjectToSharpPosition;
|
|
20
|
+
/**
|
|
21
|
+
* SAVE PROJECT FILE
|
|
22
|
+
*/
|
|
23
|
+
saveConfig(variants: AssetVariant[]): Promise<ProjectConfiguration>;
|
|
24
|
+
}
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
36
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
37
|
+
};
|
|
38
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
39
|
+
exports.AssetProcessor = void 0;
|
|
40
|
+
const fs = __importStar(require("fs-extra"));
|
|
41
|
+
const path = __importStar(require("path"));
|
|
42
|
+
const sharp_1 = __importDefault(require("sharp"));
|
|
43
|
+
/**
|
|
44
|
+
* LOCAL ASSET PROCESSOR
|
|
45
|
+
*
|
|
46
|
+
* Handles cropping, resizing, and variant generation.
|
|
47
|
+
*/
|
|
48
|
+
class AssetProcessor {
|
|
49
|
+
outDir;
|
|
50
|
+
constructor(outDir) {
|
|
51
|
+
this.outDir = outDir;
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* GENERATE VARIANTS
|
|
55
|
+
*
|
|
56
|
+
* Creates folders for Mobile, Tablet, Desktop with optimized images.
|
|
57
|
+
*/
|
|
58
|
+
async processVariants(sourceFramesDir, trackingData, options = {}) {
|
|
59
|
+
const step = options.step || 1;
|
|
60
|
+
const allFiles = await fs.readdir(sourceFramesDir);
|
|
61
|
+
// Ensure we only process regular frames for the main loop
|
|
62
|
+
const allFrames = allFiles.filter(f => f.startsWith('frame_'));
|
|
63
|
+
// Sort frames numerically to ensure consistent indexing (e.g. 1, 2, 10 instead of 1, 10, 2)
|
|
64
|
+
allFrames.sort((a, b) => a.localeCompare(b, undefined, { numeric: true, sensitivity: 'base' }));
|
|
65
|
+
const framesToProcess = allFrames.filter((_, i) => i % step === 0);
|
|
66
|
+
const variants = [];
|
|
67
|
+
// Define our target variants
|
|
68
|
+
const configs = [
|
|
69
|
+
{ id: 'mobile', width: 720, height: 1280, media: '(max-width: 600px)' },
|
|
70
|
+
{ id: 'desktop', width: 1920, height: 1080, media: '(min-width: 601px)' }
|
|
71
|
+
];
|
|
72
|
+
for (const config of configs) {
|
|
73
|
+
const variantDir = path.join(this.outDir, config.id);
|
|
74
|
+
await fs.ensureDir(variantDir);
|
|
75
|
+
console.log(`🎨 Generating ${config.id} variant (${config.width}x${config.height}) for ${framesToProcess.length} images...`);
|
|
76
|
+
const variantTracking = [];
|
|
77
|
+
for (let i = 0; i < framesToProcess.length; i++) {
|
|
78
|
+
const originalIndex = i * step;
|
|
79
|
+
const frameName = framesToProcess[i];
|
|
80
|
+
const framePath = path.join(sourceFramesDir, frameName);
|
|
81
|
+
const targetPath = path.join(variantDir, `index_${i}.webp`);
|
|
82
|
+
const subject = trackingData.find(f => f.frame === originalIndex) || { frame: originalIndex, x: 0.5, y: 0.5, scale: 0 };
|
|
83
|
+
// SMART CROP: Center on the subject (x,y)
|
|
84
|
+
// This logic would calculate the top/left based on subject relative position
|
|
85
|
+
await (0, sharp_1.default)(framePath)
|
|
86
|
+
.resize(config.width, config.height, {
|
|
87
|
+
fit: 'cover',
|
|
88
|
+
position: this.subjectToSharpPosition(subject)
|
|
89
|
+
})
|
|
90
|
+
.webp({ quality: 80 })
|
|
91
|
+
.toFile(targetPath);
|
|
92
|
+
if (options.hasDepth) {
|
|
93
|
+
const numStr = frameName.match(/(\d+)/)?.[1] || "";
|
|
94
|
+
// Look for a depth file that matches the same numeric index
|
|
95
|
+
const depthFrameName = allFiles.find(f => f.startsWith('depth_') && f.includes(numStr));
|
|
96
|
+
const depthFramePath = depthFrameName ? path.join(sourceFramesDir, depthFrameName) : '';
|
|
97
|
+
if (depthFramePath && fs.existsSync(depthFramePath)) {
|
|
98
|
+
const depthTargetPath = path.join(variantDir, `index_${i}_depth.webp`);
|
|
99
|
+
await (0, sharp_1.default)(depthFramePath)
|
|
100
|
+
.resize(config.width, config.height, {
|
|
101
|
+
fit: 'cover',
|
|
102
|
+
position: this.subjectToSharpPosition(subject)
|
|
103
|
+
})
|
|
104
|
+
// We grayscale and save as webp
|
|
105
|
+
.grayscale()
|
|
106
|
+
.webp({ quality: 80 })
|
|
107
|
+
.toFile(depthTargetPath);
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
// Add to variant tracking (using relative frame 0...N)
|
|
111
|
+
variantTracking.push({
|
|
112
|
+
...subject,
|
|
113
|
+
frame: i
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
variants.push({
|
|
117
|
+
id: config.id,
|
|
118
|
+
media: config.media,
|
|
119
|
+
path: `./${config.id}`, // Relative path in the final output
|
|
120
|
+
aspectRatio: config.id === 'mobile' ? '9:16' : '16:9',
|
|
121
|
+
frameCount: framesToProcess.length,
|
|
122
|
+
hasDepthMap: options.hasDepth,
|
|
123
|
+
subjectTracking: variantTracking
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
return variants;
|
|
127
|
+
}
|
|
128
|
+
subjectToSharpPosition(subject) {
|
|
129
|
+
// Map 0-1 to percentages for sharp
|
|
130
|
+
const xPercent = Math.round(subject.x * 100);
|
|
131
|
+
const yPercent = Math.round(subject.y * 100);
|
|
132
|
+
// Return a string sharp understands or use its gravity system
|
|
133
|
+
// For custom positioning, we'd need more complex math with .extract()
|
|
134
|
+
return 'center'; // Placeholder for now
|
|
135
|
+
}
|
|
136
|
+
/**
|
|
137
|
+
* SAVE PROJECT FILE
|
|
138
|
+
*/
|
|
139
|
+
async saveConfig(variants) {
|
|
140
|
+
const config = {
|
|
141
|
+
version: "2.0.1",
|
|
142
|
+
settings: {
|
|
143
|
+
fps: 30,
|
|
144
|
+
baseResolution: { width: 1920, height: 1080 },
|
|
145
|
+
scrollMode: 'vh'
|
|
146
|
+
},
|
|
147
|
+
assets: [{
|
|
148
|
+
id: "main-sequence",
|
|
149
|
+
strategy: "adaptive",
|
|
150
|
+
variants: variants
|
|
151
|
+
}],
|
|
152
|
+
timeline: {
|
|
153
|
+
totalDuration: "300vh",
|
|
154
|
+
scenes: [{
|
|
155
|
+
id: "scene-1",
|
|
156
|
+
assetId: "main-sequence",
|
|
157
|
+
startProgress: 0,
|
|
158
|
+
duration: 1,
|
|
159
|
+
assetRange: [0, variants[0].frameCount - 1],
|
|
160
|
+
layers: []
|
|
161
|
+
}]
|
|
162
|
+
}
|
|
163
|
+
};
|
|
164
|
+
await fs.writeJson(path.join(this.outDir, 'scrollcraft.json'), config, { spaces: 2 });
|
|
165
|
+
return config;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
exports.AssetProcessor = AssetProcessor;
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { ProjectConfiguration } from './types';
|
|
2
|
+
/**
|
|
3
|
+
* SCROLLCRAFT 2.0 CORE ENGINE
|
|
4
|
+
*
|
|
5
|
+
* A declarative, performant engine that maps scroll progress
|
|
6
|
+
* to high-performance image sequence rendering.
|
|
7
|
+
*/
|
|
8
|
+
export declare class CoreEngine {
|
|
9
|
+
private config;
|
|
10
|
+
private currentFrame;
|
|
11
|
+
private activeVariant;
|
|
12
|
+
private canvas;
|
|
13
|
+
private ctx;
|
|
14
|
+
private renderer;
|
|
15
|
+
private imageCache;
|
|
16
|
+
private depthCache;
|
|
17
|
+
private scrollTimeout;
|
|
18
|
+
constructor(config: ProjectConfiguration);
|
|
19
|
+
/**
|
|
20
|
+
* ATTACH CANVAS
|
|
21
|
+
* Connects the engine to a DOM element for rendering.
|
|
22
|
+
*/
|
|
23
|
+
attachCanvas(canvas: HTMLCanvasElement): void;
|
|
24
|
+
private resizeCanvas;
|
|
25
|
+
/**
|
|
26
|
+
* ADAPTIVE RENDERING
|
|
27
|
+
* Selects the best image folder based on current browser media queries.
|
|
28
|
+
*/
|
|
29
|
+
private detectBestVariant;
|
|
30
|
+
private clearCache;
|
|
31
|
+
private preloadInitial;
|
|
32
|
+
/**
|
|
33
|
+
* THE PLAYER ENGINE
|
|
34
|
+
* Maps global scroll progress (0-1) to local scene frames.
|
|
35
|
+
*/
|
|
36
|
+
update(progress: number): {
|
|
37
|
+
frame: number;
|
|
38
|
+
subjectCoords: {
|
|
39
|
+
x: number;
|
|
40
|
+
y: number;
|
|
41
|
+
};
|
|
42
|
+
} | undefined;
|
|
43
|
+
private getSubjectCoords;
|
|
44
|
+
/**
|
|
45
|
+
* RENDER LOOP
|
|
46
|
+
* Draws the image to the canvas with object-fit: cover logic.
|
|
47
|
+
*/
|
|
48
|
+
private render;
|
|
49
|
+
private getImage;
|
|
50
|
+
private loadDepthMap;
|
|
51
|
+
private getDepthImage;
|
|
52
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export declare class WebGLRenderer {
|
|
2
|
+
private gl;
|
|
3
|
+
private program;
|
|
4
|
+
private positionBuffer;
|
|
5
|
+
private texture;
|
|
6
|
+
private depthTexture;
|
|
7
|
+
private targetMouse;
|
|
8
|
+
private currentMouse;
|
|
9
|
+
private animationFrameId;
|
|
10
|
+
constructor(canvas: HTMLCanvasElement);
|
|
11
|
+
private createProgram;
|
|
12
|
+
private animate;
|
|
13
|
+
render(img: HTMLImageElement, depthImg: HTMLImageElement | null, width: number, height: number): void;
|
|
14
|
+
private draw;
|
|
15
|
+
destroy(): void;
|
|
16
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from './types';
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Venovani:
|
|
3
|
+
Tento plugin je venovan me rodine - mamince Svetle,
|
|
4
|
+
manzelce Verce, detem Natalce a Alexovi
|
|
5
|
+
a nasi dceri Agatce, ktera navzdy zustane v nasich srdcich.
|
|
6
|
+
|
|
7
|
+
Dedication:
|
|
8
|
+
This plugin is dedicated to my family - my mother Svetla,
|
|
9
|
+
my wife Verca, my children Natalka and Alex,
|
|
10
|
+
and our daughter Agatka, who will forever remain in our hearts.
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
!function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.ScrollCraft=t():e.ScrollCraft=t()}(this,()=>(()=>{"use strict";var e={};return e=e.default})());
|
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SCROLLCRAFT 2.0 - DECLARATIVE SCHEMA
|
|
3
|
+
*
|
|
4
|
+
* This file defines the core data structures that allow an AI Agent
|
|
5
|
+
* to describe a scroll experience in one step.
|
|
6
|
+
*/
|
|
7
|
+
export interface ProjectConfiguration {
|
|
8
|
+
version: string;
|
|
9
|
+
settings: ProjectSettings;
|
|
10
|
+
assets: SequenceAsset[];
|
|
11
|
+
timeline: TimelineDefinition;
|
|
12
|
+
}
|
|
13
|
+
export interface ProjectSettings {
|
|
14
|
+
fps: number;
|
|
15
|
+
baseResolution: {
|
|
16
|
+
width: number;
|
|
17
|
+
height: number;
|
|
18
|
+
};
|
|
19
|
+
scrollMode: 'vh' | 'px';
|
|
20
|
+
}
|
|
21
|
+
/**
|
|
22
|
+
* ASSET SYSTEM
|
|
23
|
+
*/
|
|
24
|
+
export interface SequenceAsset {
|
|
25
|
+
id: string;
|
|
26
|
+
strategy: 'adaptive' | 'fixed';
|
|
27
|
+
variants: AssetVariant[];
|
|
28
|
+
}
|
|
29
|
+
export interface AssetVariant {
|
|
30
|
+
id: string;
|
|
31
|
+
media: string;
|
|
32
|
+
path: string;
|
|
33
|
+
aspectRatio: string;
|
|
34
|
+
frameCount: number;
|
|
35
|
+
hasDepthMap?: boolean;
|
|
36
|
+
subjectTracking?: SubjectFrameData[];
|
|
37
|
+
}
|
|
38
|
+
export interface SubjectFrameData {
|
|
39
|
+
frame: number;
|
|
40
|
+
x: number;
|
|
41
|
+
y: number;
|
|
42
|
+
scale?: number;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* TIMELINE SYSTEM
|
|
46
|
+
*/
|
|
47
|
+
export interface TimelineDefinition {
|
|
48
|
+
totalDuration: string | number;
|
|
49
|
+
scenes: SceneDefinition[];
|
|
50
|
+
}
|
|
51
|
+
export interface SceneDefinition {
|
|
52
|
+
id: string;
|
|
53
|
+
assetId: string;
|
|
54
|
+
startProgress: number;
|
|
55
|
+
duration: number;
|
|
56
|
+
assetRange: [number, number];
|
|
57
|
+
layers: LayerDefinition[];
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* LAYER SYSTEM
|
|
61
|
+
*/
|
|
62
|
+
export type LayerDefinition = HTMLLayer | CanvasLayer;
|
|
63
|
+
export interface BaseLayer {
|
|
64
|
+
id: string;
|
|
65
|
+
type: string;
|
|
66
|
+
zIndex?: number;
|
|
67
|
+
anchor: 'viewport' | 'subject';
|
|
68
|
+
position: ResponsiveCoordinate;
|
|
69
|
+
animations: LayerAnimation[];
|
|
70
|
+
}
|
|
71
|
+
export interface HTMLLayer extends BaseLayer {
|
|
72
|
+
type: 'html';
|
|
73
|
+
content: string;
|
|
74
|
+
style?: Record<string, string>;
|
|
75
|
+
}
|
|
76
|
+
export interface CanvasLayer extends BaseLayer {
|
|
77
|
+
type: 'canvas';
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* UTILITY TYPES
|
|
81
|
+
*/
|
|
82
|
+
export interface ResponsiveCoordinate {
|
|
83
|
+
default: Point;
|
|
84
|
+
mobile?: Point;
|
|
85
|
+
tablet?: Point;
|
|
86
|
+
desktop?: Point;
|
|
87
|
+
}
|
|
88
|
+
export interface Point {
|
|
89
|
+
x: string | number;
|
|
90
|
+
y: string | number;
|
|
91
|
+
}
|
|
92
|
+
export interface LayerAnimation {
|
|
93
|
+
property: string;
|
|
94
|
+
from: number | string;
|
|
95
|
+
to: number | string;
|
|
96
|
+
start: number;
|
|
97
|
+
end: number;
|
|
98
|
+
easing?: string;
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* LEGACY TYPES (V1 Compatibility)
|
|
102
|
+
* These are required for existing modules to compile.
|
|
103
|
+
*/
|
|
104
|
+
export interface BlockInstanceInterface {
|
|
105
|
+
destroy(): void;
|
|
106
|
+
resize(params: {
|
|
107
|
+
wiWidth: number;
|
|
108
|
+
wiHeight: number;
|
|
109
|
+
}): void;
|
|
110
|
+
}
|
|
111
|
+
export type MediaGroupPositionAndSize = {
|
|
112
|
+
bgSize?: 'contain' | 'cover' | 'custom';
|
|
113
|
+
bgPosition?: {
|
|
114
|
+
x: number;
|
|
115
|
+
y: number;
|
|
116
|
+
};
|
|
117
|
+
};
|
|
118
|
+
export type ImagesUrlListArray = Array<string | {
|
|
119
|
+
i: string;
|
|
120
|
+
dur?: number;
|
|
121
|
+
}>;
|
|
122
|
+
export type ImageGroupUrlList = MediaGroupPositionAndSize & {
|
|
123
|
+
type: 'urlList';
|
|
124
|
+
prefix?: string;
|
|
125
|
+
suffix?: string;
|
|
126
|
+
images: ImagesUrlListArray;
|
|
127
|
+
duration?: number;
|
|
128
|
+
};
|
|
129
|
+
export type ImageGroupGap = {
|
|
130
|
+
type: 'gap';
|
|
131
|
+
duration: number;
|
|
132
|
+
};
|
|
133
|
+
export type VideoGroupUrl = MediaGroupPositionAndSize & {
|
|
134
|
+
type: 'urlVideo';
|
|
135
|
+
video?: string;
|
|
136
|
+
duration?: number;
|
|
137
|
+
};
|
|
138
|
+
export type ImageGroupWp = MediaGroupPositionAndSize & {
|
|
139
|
+
type: 'wpMedia';
|
|
140
|
+
images: number[];
|
|
141
|
+
duration?: number;
|
|
142
|
+
};
|
|
143
|
+
export type VideoGroupWp = MediaGroupPositionAndSize & {
|
|
144
|
+
type: 'wpVideo';
|
|
145
|
+
video?: number;
|
|
146
|
+
duration?: number;
|
|
147
|
+
};
|
|
148
|
+
export type MediaGroup = ImageGroupGap | ImageGroupUrlList | VideoGroupUrl | ImageGroupWp | VideoGroupWp;
|
|
149
|
+
export type MediaGroups = MediaGroup[];
|
|
150
|
+
export type AttributesCanvas2d = {
|
|
151
|
+
mediaGroups: MediaGroups;
|
|
152
|
+
markers: boolean;
|
|
153
|
+
scrub: number;
|
|
154
|
+
triggerStart: number;
|
|
155
|
+
triggerEnd: number;
|
|
156
|
+
pin: boolean;
|
|
157
|
+
pinSpacing: boolean;
|
|
158
|
+
};
|
|
159
|
+
export type AttributesScene = {
|
|
160
|
+
scenePosition: 'flow' | 'sticky' | 'fixed';
|
|
161
|
+
stickyType: 'css' | 'js';
|
|
162
|
+
height: number;
|
|
163
|
+
heightUnit: string;
|
|
164
|
+
duration: number;
|
|
165
|
+
durationUnit: string;
|
|
166
|
+
top: number;
|
|
167
|
+
topUnit: string;
|
|
168
|
+
markers: boolean;
|
|
169
|
+
scrub: number;
|
|
170
|
+
triggerStart: number;
|
|
171
|
+
triggerEnd: number;
|
|
172
|
+
};
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
import React from 'react';
|
|
2
|
+
import { CoreEngine } from '../core/CoreEngine';
|
|
3
|
+
import { ProjectConfiguration } from '../core/types';
|
|
4
|
+
interface SCFTContext {
|
|
5
|
+
progress: number;
|
|
6
|
+
frame: number;
|
|
7
|
+
subjectCoords: {
|
|
8
|
+
x: number;
|
|
9
|
+
y: number;
|
|
10
|
+
};
|
|
11
|
+
engine: CoreEngine | null;
|
|
12
|
+
}
|
|
13
|
+
export declare const ScrollCraftProvider: React.FC<{
|
|
14
|
+
project: ProjectConfiguration;
|
|
15
|
+
children: React.ReactNode;
|
|
16
|
+
}>;
|
|
17
|
+
export declare const ScrollCraftCanvas: React.FC<{
|
|
18
|
+
assetId?: string;
|
|
19
|
+
style?: React.CSSProperties;
|
|
20
|
+
}>;
|
|
21
|
+
export declare const SubjectLayer: React.FC<{
|
|
22
|
+
offset?: {
|
|
23
|
+
x: number;
|
|
24
|
+
y: number;
|
|
25
|
+
};
|
|
26
|
+
children: React.ReactNode;
|
|
27
|
+
}>;
|
|
28
|
+
export declare const useScrollCraft: () => SCFTContext;
|
|
29
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export { ScrollCraftProvider, ScrollCraftCanvas, SubjectLayer, useScrollCraft } from './ScrollCraftProvider';
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Venovani:
|
|
3
|
+
Tento plugin je venovan me rodine - mamince Svetle,
|
|
4
|
+
manzelce Verce, detem Natalce a Alexovi
|
|
5
|
+
a nasi dceri Agatce, ktera navzdy zustane v nasich srdcich.
|
|
6
|
+
|
|
7
|
+
Dedication:
|
|
8
|
+
This plugin is dedicated to my family - my mother Svetla,
|
|
9
|
+
my wife Verca, my children Natalka and Alex,
|
|
10
|
+
and our daughter Agatka, who will forever remain in our hearts.
|
|
11
|
+
*/
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
!function(t,e){"object"==typeof exports&&"object"==typeof module?module.exports=e(require("react")):"function"==typeof define&&define.amd?define(["react"],e):"object"==typeof exports?exports.ScrollCraftReact=e(require("react")):t.ScrollCraftReact=e(t.react)}(this,t=>(()=>{"use strict";var e={326(t,e){var i=Symbol.for("react.transitional.element");function r(t,e,r){var s=null;if(void 0!==r&&(s=""+r),void 0!==e.key&&(s=""+e.key),"key"in e)for(var a in r={},e)"key"!==a&&(r[a]=e[a]);else r=e;return e=r.ref,{$$typeof:i,type:t,key:s,ref:void 0!==e?e:null,props:r}}Symbol.for("react.fragment"),e.jsx=r},540(t,e,i){t.exports=i(326)},155(e){e.exports=t}},i={};function r(t){var s=i[t];if(void 0!==s)return s.exports;var a=i[t]={exports:{}};return e[t](a,a.exports,r),a.exports}r.d=(t,e)=>{for(var i in e)r.o(e,i)&&!r.o(t,i)&&Object.defineProperty(t,i,{enumerable:!0,get:e[i]})},r.o=(t,e)=>Object.prototype.hasOwnProperty.call(t,e),r.r=t=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})};var s={};r.r(s),r.d(s,{ScrollCraftCanvas:()=>u,ScrollCraftProvider:()=>c,SubjectLayer:()=>g,useScrollCraft:()=>d});var a=r(540),n=r(155);class o{constructor(t){if(this.targetMouse={x:0,y:0},this.currentMouse={x:0,y:0},this.animationFrameId=0,this.animate=()=>{if(this.currentMouse.x+=.1*(this.targetMouse.x-this.currentMouse.x),this.currentMouse.y+=.1*(this.targetMouse.y-this.currentMouse.y),this.gl&&this.program){this.gl.useProgram(this.program);const t=this.gl.getUniformLocation(this.program,"u_mouse");this.gl.uniform2f(t,this.currentMouse.x,this.currentMouse.y),this.draw()}this.animationFrameId=requestAnimationFrame(this.animate)},this.gl=t.getContext("webgl",{alpha:!1,antialias:!1}),!this.gl)throw new Error("WebGL not supported");this.program=this.createProgram("\n attribute vec2 a_position;\n varying vec2 v_texCoord;\n void main() {\n gl_Position = vec4(a_position, 0.0, 1.0);\n // Convert -1 -> 1 to 0 -> 1 for UVs\n v_texCoord = a_position * 0.5 + 0.5;\n v_texCoord.y = 1.0 - v_texCoord.y;\n }\n ","\n precision mediump float;\n uniform sampler2D u_image;\n uniform sampler2D u_depthMap;\n uniform vec2 u_resolution;\n uniform vec2 u_imageResolution;\n uniform vec2 u_mouse;\n uniform bool u_hasDepth;\n varying vec2 v_texCoord;\n\n void main() {\n // object-fit: cover math\n vec2 ratio = vec2(\n min((u_resolution.x / u_resolution.y) / (u_imageResolution.x / u_imageResolution.y), 1.0),\n min((u_resolution.y / u_resolution.x) / (u_imageResolution.y / u_imageResolution.x), 1.0)\n );\n vec2 uv = vec2(\n v_texCoord.x * ratio.x + (1.0 - ratio.x) * 0.5,\n v_texCoord.y * ratio.y + (1.0 - ratio.y) * 0.5\n );\n\n if (u_hasDepth) {\n float depth = texture2D(u_depthMap, uv).r;\n // White is close (1), Black is far (0). We move close objects more.\n vec2 parallax = u_mouse * depth * 0.04;\n uv += parallax;\n }\n \n gl_FragColor = texture2D(u_image, uv);\n }\n "),this.gl.useProgram(this.program),this.positionBuffer=this.gl.createBuffer(),this.gl.bindBuffer(this.gl.ARRAY_BUFFER,this.positionBuffer),this.gl.bufferData(this.gl.ARRAY_BUFFER,new Float32Array([-1,-1,1,-1,-1,1,-1,1,1,-1,1,1]),this.gl.STATIC_DRAW),this.texture=this.gl.createTexture(),this.depthTexture=this.gl.createTexture(),window.addEventListener("mousemove",t=>{this.targetMouse.x=t.clientX/window.innerWidth*2-1,this.targetMouse.y=-t.clientY/window.innerHeight*2+1}),this.animate()}createProgram(t,e){const i=this.gl.createShader(this.gl.VERTEX_SHADER);this.gl.shaderSource(i,t),this.gl.compileShader(i);const r=this.gl.createShader(this.gl.FRAGMENT_SHADER);this.gl.shaderSource(r,e),this.gl.compileShader(r);const s=this.gl.createProgram();return this.gl.attachShader(s,i),this.gl.attachShader(s,r),this.gl.linkProgram(s),s}render(t,e,i,r){this.gl.useProgram(this.program),this.gl.activeTexture(this.gl.TEXTURE0),this.gl.bindTexture(this.gl.TEXTURE_2D,this.texture),this.gl.texImage2D(this.gl.TEXTURE_2D,0,this.gl.RGBA,this.gl.RGBA,this.gl.UNSIGNED_BYTE,t),this.gl.texParameteri(this.gl.TEXTURE_2D,this.gl.TEXTURE_WRAP_S,this.gl.CLAMP_TO_EDGE),this.gl.texParameteri(this.gl.TEXTURE_2D,this.gl.TEXTURE_WRAP_T,this.gl.CLAMP_TO_EDGE),this.gl.texParameteri(this.gl.TEXTURE_2D,this.gl.TEXTURE_MIN_FILTER,this.gl.LINEAR),this.gl.activeTexture(this.gl.TEXTURE1),this.gl.bindTexture(this.gl.TEXTURE_2D,this.depthTexture),e&&(this.gl.texImage2D(this.gl.TEXTURE_2D,0,this.gl.RGBA,this.gl.RGBA,this.gl.UNSIGNED_BYTE,e),this.gl.texParameteri(this.gl.TEXTURE_2D,this.gl.TEXTURE_WRAP_S,this.gl.CLAMP_TO_EDGE),this.gl.texParameteri(this.gl.TEXTURE_2D,this.gl.TEXTURE_WRAP_T,this.gl.CLAMP_TO_EDGE),this.gl.texParameteri(this.gl.TEXTURE_2D,this.gl.TEXTURE_MIN_FILTER,this.gl.LINEAR)),this.gl.uniform1i(this.gl.getUniformLocation(this.program,"u_image"),0),this.gl.uniform1i(this.gl.getUniformLocation(this.program,"u_depthMap"),1),this.gl.uniform1i(this.gl.getUniformLocation(this.program,"u_hasDepth"),e?1:0),this.gl.uniform2f(this.gl.getUniformLocation(this.program,"u_resolution"),i,r),this.gl.uniform2f(this.gl.getUniformLocation(this.program,"u_imageResolution"),t.naturalWidth,t.naturalHeight);const s=this.gl.getAttribLocation(this.program,"a_position");this.gl.enableVertexAttribArray(s),this.gl.bindBuffer(this.gl.ARRAY_BUFFER,this.positionBuffer),this.gl.vertexAttribPointer(s,2,this.gl.FLOAT,!1,0,0),this.gl.viewport(0,0,i,r),this.draw()}draw(){this.gl.drawArrays(this.gl.TRIANGLES,0,6)}destroy(){cancelAnimationFrame(this.animationFrameId)}}class h{constructor(t){this.currentFrame=-1,this.activeVariant=null,this.canvas=null,this.ctx=null,this.renderer=null,this.imageCache=new Map,this.depthCache=new Map,this.scrollTimeout=null,this.config=t,this.detectBestVariant(),window.addEventListener("resize",()=>{this.detectBestVariant(),this.resizeCanvas(),this.render()})}attachCanvas(t){this.canvas=t;try{this.renderer=new o(t)}catch(e){console.warn("WebGL failed, falling back to 2D",e),this.ctx=t.getContext("2d",{alpha:!1})}this.resizeCanvas(),this.render()}resizeCanvas(){if(!this.canvas)return;const t=window.innerWidth,e=window.innerHeight,i=window.devicePixelRatio||1;this.canvas.width=t*i,this.canvas.height=e*i,this.ctx&&this.ctx.scale(i,i)}detectBestVariant(){var t;const e=this.config.assets[0];if(!e)return;const i=e.variants.find(t=>window.matchMedia(t.media).matches)||e.variants[0];i?(null===(t=this.activeVariant)||void 0===t?void 0:t.id)!==i.id&&(console.log(`🎯 Variant Switched: ${i.id}`),this.activeVariant=i,console.log("[CoreEngine] Variant hasDepthMap:",this.activeVariant.hasDepthMap),this.clearCache(),this.preloadInitial()):console.warn("[CoreEngine] No best match found")}clearCache(){this.imageCache.clear(),this.depthCache.clear()}preloadInitial(){for(let t=0;t<15;t++)this.getImage(t)}update(t){const e=this.config.timeline.scenes[0];if(!e)return;const i=e.assetRange[1]-e.assetRange[0],r=Math.floor(e.assetRange[0]+t*i);return r!==this.currentFrame&&(this.currentFrame=r,this.render(),this.getImage(this.currentFrame+5),this.getImage(this.currentFrame+10),this.scrollTimeout&&clearTimeout(this.scrollTimeout),this.scrollTimeout=setTimeout(()=>{this.loadDepthMap(this.currentFrame)},100)),{frame:this.currentFrame,subjectCoords:this.getSubjectCoords(this.currentFrame)}}getSubjectCoords(t){var e;if(!(null===(e=this.activeVariant)||void 0===e?void 0:e.subjectTracking))return{x:.5,y:.5};const i=this.activeVariant.subjectTracking.find(e=>e.frame===t);return i?{x:i.x,y:i.y}:{x:.5,y:.5}}render(){var t;if(!this.canvas||-1===this.currentFrame)return;const e=this.getImage(this.currentFrame);if(!e||!e.complete)return;const i=window.innerWidth,r=window.innerHeight;let s=null;if((null===(t=this.activeVariant)||void 0===t?void 0:t.hasDepthMap)&&(s=this.getDepthImage(this.currentFrame),s&&!s.complete&&(s=null)),this.renderer)this.renderer.render(e,s,i*(window.devicePixelRatio||1),r*(window.devicePixelRatio||1));else if(this.ctx){const t=e.naturalWidth/e.naturalHeight;let s,a,n,o;t>i/r?(a=r,s=r*t,n=(i-s)/2,o=0):(s=i,a=i/t,n=0,o=(r-a)/2),this.ctx.clearRect(0,0,i,r),this.ctx.drawImage(e,n,o,s,a)}}getImage(t){if(!this.activeVariant)return null;if(t<0||t>=this.activeVariant.frameCount)return null;const e=`${this.activeVariant.id}_${t}`;if(this.imageCache.has(e))return this.imageCache.get(e);const i=new Image;return i.crossOrigin="anonymous",i.src=`${this.activeVariant.path}/index_${t}.webp`,i.onload=()=>{this.currentFrame===t&&this.render()},this.imageCache.set(e,i),i}loadDepthMap(t){var e;if(!(null===(e=this.activeVariant)||void 0===e?void 0:e.hasDepthMap))return void console.log("[CoreEngine] activeVariant does not define hasDepthMap=true");console.log(`[CoreEngine] Lazy requesting depth map for frame: ${t}`);this.getDepthImage(t)}getDepthImage(t){var e;if(!(null===(e=this.activeVariant)||void 0===e?void 0:e.hasDepthMap))return null;if(t<0||t>=this.activeVariant.frameCount)return null;const i=`${this.activeVariant.id}_depth_${t}`;if(this.depthCache.has(i))return this.depthCache.get(i);console.log(`[CoreEngine] Downloading: ${this.activeVariant.path}/index_${t}_depth.webp`);const r=new Image;return r.crossOrigin="anonymous",r.src=`${this.activeVariant.path}/index_${t}_depth.webp`,r.onload=()=>{console.log(`[CoreEngine] Depth map loaded for frame: ${t}`),this.currentFrame===t&&this.render()},r.onerror=e=>{console.error(`[CoreEngine] Depth map failed to load for frame: ${t}`,e)},this.depthCache.set(i,r),r}}const l=(0,n.createContext)(null),c=({project:t,children:e})=>{const[i,r]=(0,n.useState)({progress:0,frame:-1,subjectCoords:{x:.5,y:.5}}),s=(0,n.useRef)(null);return(0,n.useEffect)(()=>{const e=new h(t);s.current=e;const i=()=>{const t=window.scrollY,i=document.body.scrollHeight-window.innerHeight,s=i<=0?0:Math.max(0,Math.min(1,t/i)),a=e.update(s);a&&r({progress:s,frame:a.frame,subjectCoords:a.subjectCoords})};return window.addEventListener("scroll",i,{passive:!0}),setTimeout(i,100),()=>window.removeEventListener("scroll",i)},[t]),(0,a.jsx)(l.Provider,{value:Object.assign(Object.assign({},i),{engine:s.current}),children:(0,a.jsx)("div",{className:"scft-wrapper",style:{position:"relative"},children:e})})},u=({style:t})=>{const e=(0,n.useContext)(l),i=(0,n.useRef)(null);return(0,n.useEffect)(()=>{i.current&&(null==e?void 0:e.engine)&&e.engine.attachCanvas(i.current)},[null==e?void 0:e.engine]),(0,a.jsx)("canvas",{ref:i,style:Object.assign({width:"100%",height:"100%",display:"block"},t)})},g=({offset:t={x:0,y:0},children:e})=>{const i=(0,n.useContext)(l);if(!i||-1===i.frame)return null;const r={position:"fixed",left:`${100*i.subjectCoords.x+t.x}%`,top:`${100*i.subjectCoords.y+t.y}%`,transform:"translate(-50%, -50%)",pointerEvents:"auto",zIndex:10,transition:"left 0.1s linear, top 0.1s linear"};return(0,a.jsx)("div",{style:r,children:e})},d=()=>{const t=(0,n.useContext)(l);if(!t)throw new Error("useScrollCraft must be used within a ScrollCraftProvider");return t};return s})());
|
package/package.json
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "scrollcraft",
|
|
3
|
+
"version": "2.0.1",
|
|
4
|
+
"description": "ScrollCraft is a web-based tool for scroll-triggered animations.",
|
|
5
|
+
"main": "dist/core/scrollcraft.umd.min.js",
|
|
6
|
+
"module": "dist/core/scrollcraft.umd.min.js",
|
|
7
|
+
"exports": {
|
|
8
|
+
".": {
|
|
9
|
+
"types": "./dist/core/index.d.ts",
|
|
10
|
+
"import": "./dist/core/scrollcraft.umd.min.js",
|
|
11
|
+
"require": "./dist/core/scrollcraft.umd.min.js"
|
|
12
|
+
},
|
|
13
|
+
"./core": {
|
|
14
|
+
"types": "./dist/core/index.d.ts",
|
|
15
|
+
"import": "./dist/core/scrollcraft.umd.min.js",
|
|
16
|
+
"require": "./dist/core/scrollcraft.umd.min.js"
|
|
17
|
+
},
|
|
18
|
+
"./react": {
|
|
19
|
+
"types": "./dist/react/index.d.ts",
|
|
20
|
+
"require": "./dist/react/index.js",
|
|
21
|
+
"import": "./dist/react/index.js"
|
|
22
|
+
},
|
|
23
|
+
"./cli": "./dist/cli/index.js"
|
|
24
|
+
},
|
|
25
|
+
"types": "dist/core/index.d.ts",
|
|
26
|
+
"bin": {
|
|
27
|
+
"scft": "dist/cli/index.js"
|
|
28
|
+
},
|
|
29
|
+
"files": [
|
|
30
|
+
"dist"
|
|
31
|
+
],
|
|
32
|
+
"scripts": {
|
|
33
|
+
"prepublishOnly": "npm run build",
|
|
34
|
+
"build": "npm run build:js && npm run build:types && npm run build:cli",
|
|
35
|
+
"build:js": "webpack --mode production",
|
|
36
|
+
"build:types": "tsc --emitDeclarationOnly",
|
|
37
|
+
"build:cli": "tsc --project tsconfig.cli.json && node scripts/make-executable.cjs",
|
|
38
|
+
"dev": "concurrently -n JS,CLI,TYP -c \"blue,yellow,magenta\" \"npm:dev:js\" \"npm:dev:cli\" \"npm:dev:types\"",
|
|
39
|
+
"dev:js": "webpack --mode development --watch",
|
|
40
|
+
"dev:cli": "tsc --project tsconfig.cli.json --watch --preserveWatchOutput",
|
|
41
|
+
"dev:types": "tsc --emitDeclarationOnly --watch --preserveWatchOutput",
|
|
42
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
43
|
+
},
|
|
44
|
+
"peerDependencies": {
|
|
45
|
+
"react": ">=16.8.0",
|
|
46
|
+
"react-dom": ">=16.8.0"
|
|
47
|
+
},
|
|
48
|
+
"devDependencies": {
|
|
49
|
+
"@types/fluent-ffmpeg": "^2.1.28",
|
|
50
|
+
"@types/node": "^20.0.0",
|
|
51
|
+
"@types/react": "^18.0.0",
|
|
52
|
+
"@types/react-dom": "^18.0.0",
|
|
53
|
+
"chalk": "^4.1.2",
|
|
54
|
+
"clean-webpack-plugin": "^4.0.0",
|
|
55
|
+
"commander": "^11.1.0",
|
|
56
|
+
"concurrently": "^8.2.0",
|
|
57
|
+
"css-loader": "^6.8.0",
|
|
58
|
+
"style-loader": "^3.3.0",
|
|
59
|
+
"terser-webpack-plugin": "^5.3.10",
|
|
60
|
+
"ts-loader": "^9.5.0",
|
|
61
|
+
"typescript": "^5.3.0",
|
|
62
|
+
"webpack": "^5.90.0",
|
|
63
|
+
"webpack-cli": "^5.1.0"
|
|
64
|
+
},
|
|
65
|
+
"repository": {
|
|
66
|
+
"type": "git",
|
|
67
|
+
"url": "git+https://github.com/aleskozelsky/scrollcraft.git"
|
|
68
|
+
},
|
|
69
|
+
"keywords": [
|
|
70
|
+
"scroll",
|
|
71
|
+
"animation",
|
|
72
|
+
"video",
|
|
73
|
+
"scroll",
|
|
74
|
+
"image",
|
|
75
|
+
"sequence",
|
|
76
|
+
"scroll",
|
|
77
|
+
"sequence"
|
|
78
|
+
],
|
|
79
|
+
"author": "Ales Kozelsky",
|
|
80
|
+
"license": "ISC",
|
|
81
|
+
"bugs": {
|
|
82
|
+
"url": "https://github.com/aleskozelsky/scrollcraft/issues"
|
|
83
|
+
},
|
|
84
|
+
"homepage": "https://github.com/aleskozelsky/scrollcraft#readme",
|
|
85
|
+
"dependencies": {
|
|
86
|
+
"@fal-ai/client": "^1.9.4",
|
|
87
|
+
"@types/fs-extra": "^11.0.4",
|
|
88
|
+
"@types/sharp": "^0.31.1",
|
|
89
|
+
"dotenv": "^17.3.1",
|
|
90
|
+
"ffmpeg-static": "^5.3.0",
|
|
91
|
+
"fluent-ffmpeg": "^2.1.3",
|
|
92
|
+
"fs-extra": "^11.3.4",
|
|
93
|
+
"motion": "^12.35.1",
|
|
94
|
+
"sharp": "^0.34.5"
|
|
95
|
+
},
|
|
96
|
+
"directories": {
|
|
97
|
+
"doc": "docs",
|
|
98
|
+
"example": "examples"
|
|
99
|
+
}
|
|
100
|
+
}
|