restyle-sprites 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +3 -0
- package/LICENSE +21 -0
- package/README.md +156 -0
- package/dist/AssetPackWriter.d.ts +14 -0
- package/dist/AssetPackWriter.js +41 -0
- package/dist/BatchGenerator.d.ts +28 -0
- package/dist/BatchGenerator.js +214 -0
- package/dist/ImageProcessor.d.ts +15 -0
- package/dist/ImageProcessor.js +126 -0
- package/dist/OpenAIImageClient.d.ts +38 -0
- package/dist/OpenAIImageClient.js +335 -0
- package/dist/PixelArtPostProcessor.d.ts +33 -0
- package/dist/PixelArtPostProcessor.js +404 -0
- package/dist/StyleExplorer.d.ts +24 -0
- package/dist/StyleExplorer.js +134 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +228 -0
- package/dist/config.d.ts +8 -0
- package/dist/config.js +91 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.js +8 -0
- package/dist/types.d.ts +54 -0
- package/dist/types.js +1 -0
- package/package.json +58 -0
|
@@ -0,0 +1,404 @@
|
|
|
1
|
+
import sharp from 'sharp';
|
|
2
|
+
import * as iq from 'image-q';
|
|
3
|
+
export class PixelArtPostProcessor {
|
|
4
|
+
static DEFAULT_MAX_COLORS = 24;
|
|
5
|
+
static DEFAULT_ALPHA_THRESHOLD = 128;
|
|
6
|
+
async process(rawBuffer, targetWidth, targetHeight, options) {
|
|
7
|
+
const alphaThreshold = options?.alphaThreshold ?? PixelArtPostProcessor.DEFAULT_ALPHA_THRESHOLD;
|
|
8
|
+
const maxColors = options?.maxColors ?? PixelArtPostProcessor.DEFAULT_MAX_COLORS;
|
|
9
|
+
const isLayoutSensitiveCategory = options?.category === 'font' || options?.category === 'scene';
|
|
10
|
+
const binarized = await this.binarizeAlpha(rawBuffer, alphaThreshold);
|
|
11
|
+
const processedBase = isLayoutSensitiveCategory ? binarized : await this.alphaTightCrop(binarized);
|
|
12
|
+
const quantized = await this.quantizeColors(processedBase, maxColors);
|
|
13
|
+
const resized = isLayoutSensitiveCategory
|
|
14
|
+
? await this.resizeNearest(quantized, targetWidth, targetHeight)
|
|
15
|
+
: await this.resizeWithReferenceGeometry({
|
|
16
|
+
content: quantized,
|
|
17
|
+
targetWidth,
|
|
18
|
+
targetHeight,
|
|
19
|
+
sourceReference: options?.sourceReference,
|
|
20
|
+
alphaThreshold,
|
|
21
|
+
});
|
|
22
|
+
const cleaned = await this.finalAlphaCleanup(resized, alphaThreshold);
|
|
23
|
+
if (!isLayoutSensitiveCategory && options?.sourceReference) {
|
|
24
|
+
const masked = await this.applySourceMask(cleaned, options.sourceReference, targetWidth, targetHeight);
|
|
25
|
+
return this.zeroRgbOnTransparent(masked);
|
|
26
|
+
}
|
|
27
|
+
return this.zeroRgbOnTransparent(cleaned);
|
|
28
|
+
}
|
|
29
|
+
async extractPalette(sourceBuffer, maxColors = PixelArtPostProcessor.DEFAULT_MAX_COLORS) {
|
|
30
|
+
const quantized = await this.quantizeColors(sourceBuffer, maxColors);
|
|
31
|
+
const raw = await this.toRawImage(quantized);
|
|
32
|
+
const colors = new Set();
|
|
33
|
+
for (let index = 0; index < raw.data.length; index += 4) {
|
|
34
|
+
const alpha = raw.data[index + 3];
|
|
35
|
+
if (alpha === 0) {
|
|
36
|
+
continue;
|
|
37
|
+
}
|
|
38
|
+
const red = raw.data[index];
|
|
39
|
+
const green = raw.data[index + 1];
|
|
40
|
+
const blue = raw.data[index + 2];
|
|
41
|
+
colors.add(this.rgbToHex(red, green, blue));
|
|
42
|
+
if (colors.size >= maxColors) {
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
return Array.from(colors);
|
|
47
|
+
}
|
|
48
|
+
async stripLegacyBackground(sourceBuffer, tolerance = 34) {
|
|
49
|
+
const raw = await this.toRawImage(sourceBuffer);
|
|
50
|
+
if (!this.isFullyOpaque(raw)) {
|
|
51
|
+
return this.zeroRgbOnTransparent(sourceBuffer);
|
|
52
|
+
}
|
|
53
|
+
const keyColor = this.estimateBackgroundColor(raw.data, raw.width, raw.height);
|
|
54
|
+
for (let index = 0; index < raw.data.length; index += 4) {
|
|
55
|
+
const red = raw.data[index];
|
|
56
|
+
const green = raw.data[index + 1];
|
|
57
|
+
const blue = raw.data[index + 2];
|
|
58
|
+
const distance = Math.sqrt((red - keyColor.r) ** 2 + (green - keyColor.g) ** 2 + (blue - keyColor.b) ** 2);
|
|
59
|
+
if (distance > tolerance) {
|
|
60
|
+
raw.data[index + 3] = 255;
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
raw.data[index + 3] = 0;
|
|
64
|
+
raw.data[index] = 0;
|
|
65
|
+
raw.data[index + 1] = 0;
|
|
66
|
+
raw.data[index + 2] = 0;
|
|
67
|
+
}
|
|
68
|
+
return this.fromRawImage(raw);
|
|
69
|
+
}
|
|
70
|
+
async binarizeAlpha(buffer, threshold = PixelArtPostProcessor.DEFAULT_ALPHA_THRESHOLD) {
|
|
71
|
+
const raw = await this.toRawImage(buffer);
|
|
72
|
+
for (let index = 3; index < raw.data.length; index += 4) {
|
|
73
|
+
raw.data[index] = raw.data[index] < threshold ? 0 : 255;
|
|
74
|
+
}
|
|
75
|
+
return this.fromRawImage(raw);
|
|
76
|
+
}
|
|
77
|
+
async alphaTightCrop(buffer) {
|
|
78
|
+
const raw = await this.toRawImage(buffer);
|
|
79
|
+
let minX = raw.width;
|
|
80
|
+
let minY = raw.height;
|
|
81
|
+
let maxX = -1;
|
|
82
|
+
let maxY = -1;
|
|
83
|
+
for (let y = 0; y < raw.height; y += 1) {
|
|
84
|
+
for (let x = 0; x < raw.width; x += 1) {
|
|
85
|
+
const pixelOffset = (y * raw.width + x) * 4;
|
|
86
|
+
const alpha = raw.data[pixelOffset + 3];
|
|
87
|
+
if (alpha === 0) {
|
|
88
|
+
continue;
|
|
89
|
+
}
|
|
90
|
+
if (x < minX) {
|
|
91
|
+
minX = x;
|
|
92
|
+
}
|
|
93
|
+
if (y < minY) {
|
|
94
|
+
minY = y;
|
|
95
|
+
}
|
|
96
|
+
if (x > maxX) {
|
|
97
|
+
maxX = x;
|
|
98
|
+
}
|
|
99
|
+
if (y > maxY) {
|
|
100
|
+
maxY = y;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
if (maxX < minX || maxY < minY) {
|
|
105
|
+
return this.createTransparentPixel();
|
|
106
|
+
}
|
|
107
|
+
return sharp(buffer)
|
|
108
|
+
.extract({
|
|
109
|
+
left: minX,
|
|
110
|
+
top: minY,
|
|
111
|
+
width: maxX - minX + 1,
|
|
112
|
+
height: maxY - minY + 1,
|
|
113
|
+
})
|
|
114
|
+
.png()
|
|
115
|
+
.toBuffer();
|
|
116
|
+
}
|
|
117
|
+
async quantizeColors(buffer, maxColors = PixelArtPostProcessor.DEFAULT_MAX_COLORS) {
|
|
118
|
+
const raw = await this.toRawImage(buffer);
|
|
119
|
+
const pointContainer = iq.utils.PointContainer.fromUint8Array(raw.data, raw.width, raw.height);
|
|
120
|
+
const palette = iq.buildPaletteSync([pointContainer], {
|
|
121
|
+
colors: maxColors,
|
|
122
|
+
paletteQuantization: 'wuquant',
|
|
123
|
+
colorDistanceFormula: 'euclidean',
|
|
124
|
+
});
|
|
125
|
+
const quantized = iq.applyPaletteSync(pointContainer, palette, {
|
|
126
|
+
imageQuantization: 'nearest',
|
|
127
|
+
colorDistanceFormula: 'euclidean',
|
|
128
|
+
});
|
|
129
|
+
const quantizedData = Buffer.from(quantized.toUint8Array());
|
|
130
|
+
return this.fromRawImage({
|
|
131
|
+
data: quantizedData,
|
|
132
|
+
width: raw.width,
|
|
133
|
+
height: raw.height,
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
async resizeNearest(buffer, width, height) {
|
|
137
|
+
return sharp(buffer)
|
|
138
|
+
.resize(width, height, {
|
|
139
|
+
fit: 'contain',
|
|
140
|
+
position: 'centre',
|
|
141
|
+
background: { r: 0, g: 0, b: 0, alpha: 0 },
|
|
142
|
+
kernel: sharp.kernel.nearest,
|
|
143
|
+
})
|
|
144
|
+
.png()
|
|
145
|
+
.toBuffer();
|
|
146
|
+
}
|
|
147
|
+
async finalAlphaCleanup(buffer, threshold = PixelArtPostProcessor.DEFAULT_ALPHA_THRESHOLD) {
|
|
148
|
+
return this.binarizeAlpha(buffer, threshold);
|
|
149
|
+
}
|
|
150
|
+
async resizeWithReferenceGeometry(params) {
|
|
151
|
+
if (!params.sourceReference) {
|
|
152
|
+
return this.resizeNearest(params.content, params.targetWidth, params.targetHeight);
|
|
153
|
+
}
|
|
154
|
+
const sourceBinarized = await this.binarizeAlpha(params.sourceReference, params.alphaThreshold);
|
|
155
|
+
const sourceRaw = await this.toRawImage(sourceBinarized);
|
|
156
|
+
const sourceBounds = this.getSourceContentBounds(sourceRaw);
|
|
157
|
+
if (!sourceBounds) {
|
|
158
|
+
return this.resizeNearest(params.content, params.targetWidth, params.targetHeight);
|
|
159
|
+
}
|
|
160
|
+
const desiredWidth = Math.max(1, Math.round((sourceBounds.width / sourceRaw.width) * params.targetWidth));
|
|
161
|
+
const desiredHeight = Math.max(1, Math.round((sourceBounds.height / sourceRaw.height) * params.targetHeight));
|
|
162
|
+
const xRatio = sourceBounds.x / sourceRaw.width;
|
|
163
|
+
const yRatio = sourceBounds.y / sourceRaw.height;
|
|
164
|
+
const fitted = await sharp(params.content)
|
|
165
|
+
.resize(desiredWidth, desiredHeight, {
|
|
166
|
+
fit: 'fill',
|
|
167
|
+
kernel: sharp.kernel.nearest,
|
|
168
|
+
})
|
|
169
|
+
.png()
|
|
170
|
+
.toBuffer();
|
|
171
|
+
const left = this.clamp(Math.round(xRatio * params.targetWidth), 0, Math.max(0, params.targetWidth - desiredWidth));
|
|
172
|
+
const top = this.clamp(Math.round(yRatio * params.targetHeight), 0, Math.max(0, params.targetHeight - desiredHeight));
|
|
173
|
+
return sharp({
|
|
174
|
+
create: {
|
|
175
|
+
width: params.targetWidth,
|
|
176
|
+
height: params.targetHeight,
|
|
177
|
+
channels: 4,
|
|
178
|
+
background: { r: 0, g: 0, b: 0, alpha: 0 },
|
|
179
|
+
},
|
|
180
|
+
})
|
|
181
|
+
.composite([{ input: fitted, left, top }])
|
|
182
|
+
.png()
|
|
183
|
+
.toBuffer();
|
|
184
|
+
}
|
|
185
|
+
getAlphaBounds(raw) {
|
|
186
|
+
let minX = raw.width;
|
|
187
|
+
let minY = raw.height;
|
|
188
|
+
let maxX = -1;
|
|
189
|
+
let maxY = -1;
|
|
190
|
+
for (let y = 0; y < raw.height; y += 1) {
|
|
191
|
+
for (let x = 0; x < raw.width; x += 1) {
|
|
192
|
+
const pixelOffset = (y * raw.width + x) * 4;
|
|
193
|
+
if (raw.data[pixelOffset + 3] === 0) {
|
|
194
|
+
continue;
|
|
195
|
+
}
|
|
196
|
+
if (x < minX) {
|
|
197
|
+
minX = x;
|
|
198
|
+
}
|
|
199
|
+
if (y < minY) {
|
|
200
|
+
minY = y;
|
|
201
|
+
}
|
|
202
|
+
if (x > maxX) {
|
|
203
|
+
maxX = x;
|
|
204
|
+
}
|
|
205
|
+
if (y > maxY) {
|
|
206
|
+
maxY = y;
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
if (maxX < minX || maxY < minY) {
|
|
211
|
+
return null;
|
|
212
|
+
}
|
|
213
|
+
return {
|
|
214
|
+
x: minX,
|
|
215
|
+
y: minY,
|
|
216
|
+
width: maxX - minX + 1,
|
|
217
|
+
height: maxY - minY + 1,
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
getSourceContentBounds(raw) {
|
|
221
|
+
const alphaBounds = this.getAlphaBounds(raw);
|
|
222
|
+
if (!alphaBounds) {
|
|
223
|
+
return null;
|
|
224
|
+
}
|
|
225
|
+
const allOpaque = this.isFullyOpaque(raw);
|
|
226
|
+
if (!allOpaque) {
|
|
227
|
+
return alphaBounds;
|
|
228
|
+
}
|
|
229
|
+
const keyColor = this.estimateBackgroundColor(raw.data, raw.width, raw.height);
|
|
230
|
+
const keyedBounds = this.getColorKeyBounds(raw, keyColor, 16);
|
|
231
|
+
return keyedBounds ?? alphaBounds;
|
|
232
|
+
}
|
|
233
|
+
isFullyOpaque(raw) {
|
|
234
|
+
for (let index = 3; index < raw.data.length; index += 4) {
|
|
235
|
+
if (raw.data[index] !== 255) {
|
|
236
|
+
return false;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
return true;
|
|
240
|
+
}
|
|
241
|
+
getColorKeyBounds(raw, keyColor, tolerance) {
|
|
242
|
+
let minX = raw.width;
|
|
243
|
+
let minY = raw.height;
|
|
244
|
+
let maxX = -1;
|
|
245
|
+
let maxY = -1;
|
|
246
|
+
for (let y = 0; y < raw.height; y += 1) {
|
|
247
|
+
for (let x = 0; x < raw.width; x += 1) {
|
|
248
|
+
const pixelOffset = (y * raw.width + x) * 4;
|
|
249
|
+
const red = raw.data[pixelOffset];
|
|
250
|
+
const green = raw.data[pixelOffset + 1];
|
|
251
|
+
const blue = raw.data[pixelOffset + 2];
|
|
252
|
+
const distance = Math.sqrt((red - keyColor.r) ** 2 + (green - keyColor.g) ** 2 + (blue - keyColor.b) ** 2);
|
|
253
|
+
if (distance <= tolerance) {
|
|
254
|
+
continue;
|
|
255
|
+
}
|
|
256
|
+
if (x < minX) {
|
|
257
|
+
minX = x;
|
|
258
|
+
}
|
|
259
|
+
if (y < minY) {
|
|
260
|
+
minY = y;
|
|
261
|
+
}
|
|
262
|
+
if (x > maxX) {
|
|
263
|
+
maxX = x;
|
|
264
|
+
}
|
|
265
|
+
if (y > maxY) {
|
|
266
|
+
maxY = y;
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
if (maxX < minX || maxY < minY) {
|
|
271
|
+
return null;
|
|
272
|
+
}
|
|
273
|
+
return {
|
|
274
|
+
x: minX,
|
|
275
|
+
y: minY,
|
|
276
|
+
width: maxX - minX + 1,
|
|
277
|
+
height: maxY - minY + 1,
|
|
278
|
+
};
|
|
279
|
+
}
|
|
280
|
+
clamp(value, min, max) {
|
|
281
|
+
return Math.max(min, Math.min(max, value));
|
|
282
|
+
}
|
|
283
|
+
async applySourceMask(rendered, sourceReference, targetWidth, targetHeight) {
|
|
284
|
+
const resizedSource = await sharp(sourceReference)
|
|
285
|
+
.resize(targetWidth, targetHeight, {
|
|
286
|
+
fit: 'fill',
|
|
287
|
+
kernel: sharp.kernel.nearest,
|
|
288
|
+
})
|
|
289
|
+
.ensureAlpha()
|
|
290
|
+
.raw()
|
|
291
|
+
.toBuffer({ resolveWithObject: true });
|
|
292
|
+
const renderedRaw = await sharp(rendered)
|
|
293
|
+
.resize(targetWidth, targetHeight, {
|
|
294
|
+
fit: 'fill',
|
|
295
|
+
kernel: sharp.kernel.nearest,
|
|
296
|
+
})
|
|
297
|
+
.ensureAlpha()
|
|
298
|
+
.raw()
|
|
299
|
+
.toBuffer({ resolveWithObject: true });
|
|
300
|
+
let hasTransparentSourcePixels = false;
|
|
301
|
+
for (let index = 3; index < resizedSource.data.length; index += 4) {
|
|
302
|
+
if (resizedSource.data[index] === 0) {
|
|
303
|
+
hasTransparentSourcePixels = true;
|
|
304
|
+
break;
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
const keyColor = this.estimateBackgroundColor(resizedSource.data, resizedSource.info.width, resizedSource.info.height);
|
|
308
|
+
const tolerance = 34;
|
|
309
|
+
const output = Buffer.from(renderedRaw.data);
|
|
310
|
+
for (let index = 0; index < resizedSource.data.length; index += 4) {
|
|
311
|
+
if (hasTransparentSourcePixels) {
|
|
312
|
+
output[index + 3] = resizedSource.data[index + 3] === 0 ? 0 : 255;
|
|
313
|
+
continue;
|
|
314
|
+
}
|
|
315
|
+
const sr = resizedSource.data[index];
|
|
316
|
+
const sg = resizedSource.data[index + 1];
|
|
317
|
+
const sb = resizedSource.data[index + 2];
|
|
318
|
+
const distance = Math.sqrt((sr - keyColor.r) ** 2 + (sg - keyColor.g) ** 2 + (sb - keyColor.b) ** 2);
|
|
319
|
+
output[index + 3] = distance <= tolerance ? 0 : 255;
|
|
320
|
+
}
|
|
321
|
+
return sharp(output, {
|
|
322
|
+
raw: {
|
|
323
|
+
width: resizedSource.info.width,
|
|
324
|
+
height: resizedSource.info.height,
|
|
325
|
+
channels: 4,
|
|
326
|
+
},
|
|
327
|
+
})
|
|
328
|
+
.png()
|
|
329
|
+
.toBuffer();
|
|
330
|
+
}
|
|
331
|
+
estimateBackgroundColor(data, width, height) {
|
|
332
|
+
const samples = [];
|
|
333
|
+
const points = [
|
|
334
|
+
[0, 0],
|
|
335
|
+
[width - 1, 0],
|
|
336
|
+
[0, height - 1],
|
|
337
|
+
[width - 1, height - 1],
|
|
338
|
+
[Math.floor(width / 2), 0],
|
|
339
|
+
[Math.floor(width / 2), height - 1],
|
|
340
|
+
[0, Math.floor(height / 2)],
|
|
341
|
+
[width - 1, Math.floor(height / 2)],
|
|
342
|
+
];
|
|
343
|
+
for (const [x, y] of points) {
|
|
344
|
+
const index = (y * width + x) * 4;
|
|
345
|
+
samples.push({ r: data[index], g: data[index + 1], b: data[index + 2] });
|
|
346
|
+
}
|
|
347
|
+
const r = Math.round(samples.reduce((sum, color) => sum + color.r, 0) / samples.length);
|
|
348
|
+
const g = Math.round(samples.reduce((sum, color) => sum + color.g, 0) / samples.length);
|
|
349
|
+
const b = Math.round(samples.reduce((sum, color) => sum + color.b, 0) / samples.length);
|
|
350
|
+
return { r, g, b };
|
|
351
|
+
}
|
|
352
|
+
async zeroRgbOnTransparent(buffer) {
|
|
353
|
+
const raw = await this.toRawImage(buffer);
|
|
354
|
+
for (let index = 0; index < raw.data.length; index += 4) {
|
|
355
|
+
if (raw.data[index + 3] !== 0) {
|
|
356
|
+
continue;
|
|
357
|
+
}
|
|
358
|
+
raw.data[index] = 0;
|
|
359
|
+
raw.data[index + 1] = 0;
|
|
360
|
+
raw.data[index + 2] = 0;
|
|
361
|
+
}
|
|
362
|
+
return this.fromRawImage(raw);
|
|
363
|
+
}
|
|
364
|
+
async toRawImage(buffer) {
|
|
365
|
+
const result = await sharp(buffer)
|
|
366
|
+
.ensureAlpha()
|
|
367
|
+
.raw()
|
|
368
|
+
.toBuffer({ resolveWithObject: true });
|
|
369
|
+
return {
|
|
370
|
+
data: result.data,
|
|
371
|
+
width: result.info.width,
|
|
372
|
+
height: result.info.height,
|
|
373
|
+
};
|
|
374
|
+
}
|
|
375
|
+
async fromRawImage(image) {
|
|
376
|
+
return sharp(image.data, {
|
|
377
|
+
raw: {
|
|
378
|
+
width: image.width,
|
|
379
|
+
height: image.height,
|
|
380
|
+
channels: 4,
|
|
381
|
+
},
|
|
382
|
+
})
|
|
383
|
+
.png()
|
|
384
|
+
.toBuffer();
|
|
385
|
+
}
|
|
386
|
+
async createTransparentPixel() {
|
|
387
|
+
return sharp({
|
|
388
|
+
create: {
|
|
389
|
+
width: 1,
|
|
390
|
+
height: 1,
|
|
391
|
+
channels: 4,
|
|
392
|
+
background: { r: 0, g: 0, b: 0, alpha: 0 },
|
|
393
|
+
},
|
|
394
|
+
})
|
|
395
|
+
.png()
|
|
396
|
+
.toBuffer();
|
|
397
|
+
}
|
|
398
|
+
rgbToHex(red, green, blue) {
|
|
399
|
+
return `#${this.toHex(red)}${this.toHex(green)}${this.toHex(blue)}`;
|
|
400
|
+
}
|
|
401
|
+
toHex(value) {
|
|
402
|
+
return value.toString(16).padStart(2, '0');
|
|
403
|
+
}
|
|
404
|
+
}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { ImageProcessor } from './ImageProcessor.js';
|
|
2
|
+
import { OpenAIImageClient } from './OpenAIImageClient.js';
|
|
3
|
+
export interface StyleExplorerResult {
|
|
4
|
+
prompt: string;
|
|
5
|
+
styleReferencePath: string;
|
|
6
|
+
}
|
|
7
|
+
interface StyleExplorerOptions {
|
|
8
|
+
workspaceRoot: string;
|
|
9
|
+
sampleSprites: string[];
|
|
10
|
+
imageProcessor: ImageProcessor;
|
|
11
|
+
}
|
|
12
|
+
export declare class StyleExplorer {
|
|
13
|
+
private readonly client;
|
|
14
|
+
private readonly options;
|
|
15
|
+
private static readonly SAMPLE_CELL_SIZE;
|
|
16
|
+
private static readonly DEFAULT_SAMPLE_COLS;
|
|
17
|
+
constructor(client: OpenAIImageClient, options: StyleExplorerOptions);
|
|
18
|
+
static buildSampleSheetFromSources(workspaceRoot: string, imageProcessor: ImageProcessor, sampleSprites: string[]): Promise<Buffer>;
|
|
19
|
+
runInteractive(packDir: string): Promise<StyleExplorerResult>;
|
|
20
|
+
static buildStyleReferencePrompt(styleDirection: string): string;
|
|
21
|
+
private writeSampleSheet;
|
|
22
|
+
private askInitialStyleDirection;
|
|
23
|
+
}
|
|
24
|
+
export {};
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import fs from 'node:fs/promises';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import inquirer from 'inquirer';
|
|
4
|
+
import sharp from 'sharp';
|
|
5
|
+
export class StyleExplorer {
|
|
6
|
+
client;
|
|
7
|
+
options;
|
|
8
|
+
static SAMPLE_CELL_SIZE = 128;
|
|
9
|
+
static DEFAULT_SAMPLE_COLS = 3;
|
|
10
|
+
constructor(client, options) {
|
|
11
|
+
this.client = client;
|
|
12
|
+
this.options = options;
|
|
13
|
+
}
|
|
14
|
+
static async buildSampleSheetFromSources(workspaceRoot, imageProcessor, sampleSprites) {
|
|
15
|
+
if (sampleSprites.length === 0) {
|
|
16
|
+
throw new Error('sampleSprites must contain at least one source path.');
|
|
17
|
+
}
|
|
18
|
+
const cols = Math.min(StyleExplorer.DEFAULT_SAMPLE_COLS, sampleSprites.length);
|
|
19
|
+
const rows = Math.ceil(sampleSprites.length / cols);
|
|
20
|
+
const sampleSources = sampleSprites.map((sourceFile, index) => {
|
|
21
|
+
const col = index % cols;
|
|
22
|
+
const row = Math.floor(index / cols);
|
|
23
|
+
return {
|
|
24
|
+
sourceFile,
|
|
25
|
+
left: col * StyleExplorer.SAMPLE_CELL_SIZE,
|
|
26
|
+
top: row * StyleExplorer.SAMPLE_CELL_SIZE,
|
|
27
|
+
};
|
|
28
|
+
});
|
|
29
|
+
const composites = await Promise.all(sampleSources.map(async (sample) => {
|
|
30
|
+
const absoluteSourcePath = path.join(workspaceRoot, sample.sourceFile);
|
|
31
|
+
const cellImage = await imageProcessor.fitToExactSize(absoluteSourcePath, StyleExplorer.SAMPLE_CELL_SIZE, StyleExplorer.SAMPLE_CELL_SIZE);
|
|
32
|
+
return {
|
|
33
|
+
input: cellImage,
|
|
34
|
+
left: sample.left,
|
|
35
|
+
top: sample.top,
|
|
36
|
+
};
|
|
37
|
+
}));
|
|
38
|
+
const width = StyleExplorer.SAMPLE_CELL_SIZE * cols;
|
|
39
|
+
const height = StyleExplorer.SAMPLE_CELL_SIZE * rows;
|
|
40
|
+
return sharp({
|
|
41
|
+
create: {
|
|
42
|
+
width,
|
|
43
|
+
height,
|
|
44
|
+
channels: 4,
|
|
45
|
+
background: { r: 0, g: 0, b: 0, alpha: 0 },
|
|
46
|
+
},
|
|
47
|
+
})
|
|
48
|
+
.composite(composites)
|
|
49
|
+
.png()
|
|
50
|
+
.toBuffer();
|
|
51
|
+
}
|
|
52
|
+
async runInteractive(packDir) {
|
|
53
|
+
let styleDirection = await this.askInitialStyleDirection();
|
|
54
|
+
let currentPrompt = StyleExplorer.buildStyleReferencePrompt(styleDirection);
|
|
55
|
+
let attempt = 1;
|
|
56
|
+
const sampleSheetPath = await this.writeSampleSheet(packDir);
|
|
57
|
+
while (true) {
|
|
58
|
+
const styleResult = await this.client.generateStyleReference(currentPrompt, sampleSheetPath);
|
|
59
|
+
const previewPath = path.join(packDir, `style-preview-${attempt}.png`);
|
|
60
|
+
await fs.mkdir(packDir, { recursive: true });
|
|
61
|
+
await fs.writeFile(previewPath, styleResult.image);
|
|
62
|
+
console.log(`Preview saved to ${previewPath}`);
|
|
63
|
+
if (styleResult.revisedPrompt) {
|
|
64
|
+
console.log(`Revised prompt: ${styleResult.revisedPrompt}`);
|
|
65
|
+
}
|
|
66
|
+
const decision = await inquirer.prompt([
|
|
67
|
+
{
|
|
68
|
+
type: 'list',
|
|
69
|
+
name: 'action',
|
|
70
|
+
message: 'Approve style reference?',
|
|
71
|
+
choices: [
|
|
72
|
+
{ name: 'Approve and continue', value: 'approve' },
|
|
73
|
+
{ name: 'Refine prompt', value: 'refine' },
|
|
74
|
+
{ name: 'Retry same prompt', value: 'retry' },
|
|
75
|
+
],
|
|
76
|
+
},
|
|
77
|
+
]);
|
|
78
|
+
if (decision.action === 'approve') {
|
|
79
|
+
const styleReferencePath = path.join(packDir, 'style-reference.png');
|
|
80
|
+
await fs.writeFile(styleReferencePath, styleResult.image);
|
|
81
|
+
return { prompt: currentPrompt, styleReferencePath };
|
|
82
|
+
}
|
|
83
|
+
if (decision.action === 'refine') {
|
|
84
|
+
const refineAnswer = await inquirer.prompt([
|
|
85
|
+
{
|
|
86
|
+
type: 'input',
|
|
87
|
+
name: 'refinement',
|
|
88
|
+
message: 'How should the style direction be changed?',
|
|
89
|
+
validate: (value) => (value.trim().length > 0 ? true : 'Please provide a refinement hint.'),
|
|
90
|
+
},
|
|
91
|
+
]);
|
|
92
|
+
styleDirection = `${styleDirection}. Refinement: ${refineAnswer.refinement.trim()}`;
|
|
93
|
+
currentPrompt = StyleExplorer.buildStyleReferencePrompt(styleDirection);
|
|
94
|
+
}
|
|
95
|
+
attempt += 1;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
static buildStyleReferencePrompt(styleDirection) {
|
|
99
|
+
return [
|
|
100
|
+
'The attached image is a sample sheet of actual game sprites arranged in a grid.',
|
|
101
|
+
'Each sprite is upscaled with nearest-neighbor so the original pixel structure is clearly visible.',
|
|
102
|
+
'TASK: Redraw this exact sample sheet in a new consistent style.',
|
|
103
|
+
`STYLE DIRECTION: ${styleDirection}.`,
|
|
104
|
+
'RULES:',
|
|
105
|
+
'- Keep the same grid layout with the same subjects in the same positions.',
|
|
106
|
+
'- Preserve each sprite silhouette, proportions, and identity exactly.',
|
|
107
|
+
'- Apply the style direction uniformly across all sprites.',
|
|
108
|
+
'- Pixel art constraints: hard edges, no anti-aliasing, no blur, no gradients, 16-24 colors max.',
|
|
109
|
+
'- Transparent background.',
|
|
110
|
+
'- Never draw a checkerboard, grid, or fake transparency pattern into the image.',
|
|
111
|
+
'- No text, no labels, no UI elements.',
|
|
112
|
+
'- Every sprite must stay readable at small final sizes.',
|
|
113
|
+
'Return exactly one PNG image.',
|
|
114
|
+
].join(' ');
|
|
115
|
+
}
|
|
116
|
+
async writeSampleSheet(packDir) {
|
|
117
|
+
const sampleSheet = await StyleExplorer.buildSampleSheetFromSources(this.options.workspaceRoot, this.options.imageProcessor, this.options.sampleSprites);
|
|
118
|
+
const sampleSheetPath = path.join(packDir, 'style-source-sample.png');
|
|
119
|
+
await fs.mkdir(packDir, { recursive: true });
|
|
120
|
+
await fs.writeFile(sampleSheetPath, sampleSheet);
|
|
121
|
+
return sampleSheetPath;
|
|
122
|
+
}
|
|
123
|
+
async askInitialStyleDirection() {
|
|
124
|
+
const answer = await inquirer.prompt([
|
|
125
|
+
{
|
|
126
|
+
type: 'input',
|
|
127
|
+
name: 'stylePrompt',
|
|
128
|
+
message: 'Describe your target sprite style',
|
|
129
|
+
validate: (value) => (value.trim().length > 0 ? true : 'A style prompt is required.'),
|
|
130
|
+
},
|
|
131
|
+
]);
|
|
132
|
+
return answer.stylePrompt.trim();
|
|
133
|
+
}
|
|
134
|
+
}
|
package/dist/cli.d.ts
ADDED