pabal-resource-mcp 1.5.1 → 1.5.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/mcp-server.js +130 -35
- package/package.json +1 -1
package/dist/bin/mcp-server.js
CHANGED
|
@@ -3428,9 +3428,10 @@ function scanLocaleScreenshots(slug, locale) {
|
|
|
3428
3428
|
}
|
|
3429
3429
|
|
|
3430
3430
|
// src/tools/aso/utils/localize-screenshots/gemini-image-translator.util.ts
|
|
3431
|
-
import { GoogleGenAI
|
|
3431
|
+
import { GoogleGenAI } from "@google/genai";
|
|
3432
3432
|
import fs10 from "fs";
|
|
3433
3433
|
import path10 from "path";
|
|
3434
|
+
import sharp from "sharp";
|
|
3434
3435
|
var LANGUAGE_NAMES = {
|
|
3435
3436
|
"en-US": "English (US)",
|
|
3436
3437
|
"en-GB": "English (UK)",
|
|
@@ -3506,11 +3507,29 @@ function readImageAsBase64(imagePath) {
|
|
|
3506
3507
|
}
|
|
3507
3508
|
return { data: base64, mimeType };
|
|
3508
3509
|
}
|
|
3510
|
+
async function getImageDimensions(imagePath) {
|
|
3511
|
+
const metadata = await sharp(imagePath).metadata();
|
|
3512
|
+
return {
|
|
3513
|
+
width: metadata.width || 1080,
|
|
3514
|
+
height: metadata.height || 1920
|
|
3515
|
+
};
|
|
3516
|
+
}
|
|
3517
|
+
function calculateAspectRatio(width, height) {
|
|
3518
|
+
const ratio = width / height;
|
|
3519
|
+
if (Math.abs(ratio - 1) < 0.1) return "1:1";
|
|
3520
|
+
if (Math.abs(ratio - 9 / 16) < 0.1) return "9:16";
|
|
3521
|
+
if (Math.abs(ratio - 16 / 9) < 0.1) return "16:9";
|
|
3522
|
+
if (Math.abs(ratio - 3 / 4) < 0.1) return "3:4";
|
|
3523
|
+
if (Math.abs(ratio - 4 / 3) < 0.1) return "4:3";
|
|
3524
|
+
return ratio < 1 ? "9:16" : "16:9";
|
|
3525
|
+
}
|
|
3509
3526
|
async function translateImage(sourcePath, sourceLocale, targetLocale, outputPath) {
|
|
3510
3527
|
try {
|
|
3511
3528
|
const client = getGeminiClient();
|
|
3512
3529
|
const sourceLanguage = getLanguageName(sourceLocale);
|
|
3513
3530
|
const targetLanguage = getLanguageName(targetLocale);
|
|
3531
|
+
const { width, height } = await getImageDimensions(sourcePath);
|
|
3532
|
+
const aspectRatio = calculateAspectRatio(width, height);
|
|
3514
3533
|
const { data: imageData, mimeType } = readImageAsBase64(sourcePath);
|
|
3515
3534
|
const prompt = `This is an app screenshot with text in ${sourceLanguage}.
|
|
3516
3535
|
Please translate ONLY the text/words in this image to ${targetLanguage}.
|
|
@@ -3522,24 +3541,27 @@ IMPORTANT INSTRUCTIONS:
|
|
|
3522
3541
|
- Do NOT add any new elements or remove existing design elements
|
|
3523
3542
|
- The output should look identical except the text language is ${targetLanguage}
|
|
3524
3543
|
- Preserve all icons, images, and graphical elements exactly as they are`;
|
|
3525
|
-
const
|
|
3526
|
-
model: "
|
|
3527
|
-
|
|
3544
|
+
const chat = client.chats.create({
|
|
3545
|
+
model: "gemini-3-pro-image-preview",
|
|
3546
|
+
config: {
|
|
3547
|
+
responseModalities: ["TEXT", "IMAGE"]
|
|
3548
|
+
}
|
|
3549
|
+
});
|
|
3550
|
+
const response = await chat.sendMessage({
|
|
3551
|
+
message: [
|
|
3552
|
+
{ text: prompt },
|
|
3528
3553
|
{
|
|
3529
|
-
|
|
3530
|
-
|
|
3531
|
-
|
|
3532
|
-
|
|
3533
|
-
inlineData: {
|
|
3534
|
-
mimeType,
|
|
3535
|
-
data: imageData
|
|
3536
|
-
}
|
|
3537
|
-
}
|
|
3538
|
-
]
|
|
3554
|
+
inlineData: {
|
|
3555
|
+
mimeType,
|
|
3556
|
+
data: imageData
|
|
3557
|
+
}
|
|
3539
3558
|
}
|
|
3540
3559
|
],
|
|
3541
3560
|
config: {
|
|
3542
|
-
responseModalities: [
|
|
3561
|
+
responseModalities: ["TEXT", "IMAGE"],
|
|
3562
|
+
imageConfig: {
|
|
3563
|
+
aspectRatio
|
|
3564
|
+
}
|
|
3543
3565
|
}
|
|
3544
3566
|
});
|
|
3545
3567
|
const candidates = response.candidates;
|
|
@@ -3563,7 +3585,7 @@ IMPORTANT INSTRUCTIONS:
|
|
|
3563
3585
|
if (!fs10.existsSync(outputDir)) {
|
|
3564
3586
|
fs10.mkdirSync(outputDir, { recursive: true });
|
|
3565
3587
|
}
|
|
3566
|
-
|
|
3588
|
+
await sharp(imageBuffer).png().toFile(outputPath);
|
|
3567
3589
|
return {
|
|
3568
3590
|
success: true,
|
|
3569
3591
|
outputPath
|
|
@@ -3586,13 +3608,18 @@ async function translateImagesWithProgress(translations, onProgress) {
|
|
|
3586
3608
|
let successful = 0;
|
|
3587
3609
|
let failed = 0;
|
|
3588
3610
|
const errors = [];
|
|
3589
|
-
|
|
3611
|
+
const total = translations.length;
|
|
3612
|
+
for (let i = 0; i < translations.length; i++) {
|
|
3613
|
+
const translation = translations[i];
|
|
3614
|
+
const current = i + 1;
|
|
3590
3615
|
const progress = {
|
|
3591
3616
|
sourceLocale: translation.sourceLocale,
|
|
3592
3617
|
targetLocale: translation.targetLocale,
|
|
3593
3618
|
deviceType: translation.deviceType,
|
|
3594
3619
|
filename: translation.filename,
|
|
3595
|
-
status: "translating"
|
|
3620
|
+
status: "translating",
|
|
3621
|
+
current,
|
|
3622
|
+
total
|
|
3596
3623
|
};
|
|
3597
3624
|
onProgress?.(progress);
|
|
3598
3625
|
const result = await translateImage(
|
|
@@ -3608,7 +3635,10 @@ async function translateImagesWithProgress(translations, onProgress) {
|
|
|
3608
3635
|
failed++;
|
|
3609
3636
|
progress.status = "failed";
|
|
3610
3637
|
progress.error = result.error;
|
|
3611
|
-
errors.push({
|
|
3638
|
+
errors.push({
|
|
3639
|
+
path: translation.sourcePath,
|
|
3640
|
+
error: result.error || "Unknown error"
|
|
3641
|
+
});
|
|
3612
3642
|
}
|
|
3613
3643
|
onProgress?.(progress);
|
|
3614
3644
|
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
@@ -3617,10 +3647,10 @@ async function translateImagesWithProgress(translations, onProgress) {
|
|
|
3617
3647
|
}
|
|
3618
3648
|
|
|
3619
3649
|
// src/tools/aso/utils/localize-screenshots/image-resizer.util.ts
|
|
3620
|
-
import
|
|
3650
|
+
import sharp2 from "sharp";
|
|
3621
3651
|
import fs11 from "fs";
|
|
3622
|
-
async function
|
|
3623
|
-
const metadata = await
|
|
3652
|
+
async function getImageDimensions2(imagePath) {
|
|
3653
|
+
const metadata = await sharp2(imagePath).metadata();
|
|
3624
3654
|
if (!metadata.width || !metadata.height) {
|
|
3625
3655
|
throw new Error(`Unable to read dimensions from ${imagePath}`);
|
|
3626
3656
|
}
|
|
@@ -3629,18 +3659,63 @@ async function getImageDimensions(imagePath) {
|
|
|
3629
3659
|
height: metadata.height
|
|
3630
3660
|
};
|
|
3631
3661
|
}
|
|
3662
|
+
async function detectEdgeColor(imagePath) {
|
|
3663
|
+
const image = sharp2(imagePath);
|
|
3664
|
+
const metadata = await image.metadata();
|
|
3665
|
+
const width = metadata.width || 100;
|
|
3666
|
+
const height = metadata.height || 100;
|
|
3667
|
+
const { data, info } = await image.raw().toBuffer({ resolveWithObject: true });
|
|
3668
|
+
const channels = info.channels;
|
|
3669
|
+
const colorCounts = /* @__PURE__ */ new Map();
|
|
3670
|
+
const sampleEdgePixel = (x, y) => {
|
|
3671
|
+
const idx = (y * width + x) * channels;
|
|
3672
|
+
const r = data[idx];
|
|
3673
|
+
const g = data[idx + 1];
|
|
3674
|
+
const b = data[idx + 2];
|
|
3675
|
+
const qr = Math.round(r / 16) * 16;
|
|
3676
|
+
const qg = Math.round(g / 16) * 16;
|
|
3677
|
+
const qb = Math.round(b / 16) * 16;
|
|
3678
|
+
const key = `${qr},${qg},${qb}`;
|
|
3679
|
+
const existing = colorCounts.get(key);
|
|
3680
|
+
if (existing) {
|
|
3681
|
+
existing.count++;
|
|
3682
|
+
} else {
|
|
3683
|
+
colorCounts.set(key, { count: 1, color: { r: qr, g: qg, b: qb } });
|
|
3684
|
+
}
|
|
3685
|
+
};
|
|
3686
|
+
for (let x = 0; x < width; x += 2) {
|
|
3687
|
+
sampleEdgePixel(x, 0);
|
|
3688
|
+
sampleEdgePixel(x, height - 1);
|
|
3689
|
+
}
|
|
3690
|
+
for (let y = 0; y < height; y += 2) {
|
|
3691
|
+
sampleEdgePixel(0, y);
|
|
3692
|
+
sampleEdgePixel(width - 1, y);
|
|
3693
|
+
}
|
|
3694
|
+
let maxCount = 0;
|
|
3695
|
+
let dominantColor = { r: 255, g: 255, b: 255 };
|
|
3696
|
+
for (const { count, color } of colorCounts.values()) {
|
|
3697
|
+
if (count > maxCount) {
|
|
3698
|
+
maxCount = count;
|
|
3699
|
+
dominantColor = color;
|
|
3700
|
+
}
|
|
3701
|
+
}
|
|
3702
|
+
return dominantColor;
|
|
3703
|
+
}
|
|
3632
3704
|
async function resizeImage(inputPath, outputPath, targetDimensions) {
|
|
3633
|
-
await
|
|
3634
|
-
|
|
3635
|
-
|
|
3636
|
-
|
|
3705
|
+
const bgColor = await detectEdgeColor(inputPath);
|
|
3706
|
+
await sharp2(inputPath).resize(targetDimensions.width, targetDimensions.height, {
|
|
3707
|
+
fit: "contain",
|
|
3708
|
+
// Preserve aspect ratio
|
|
3709
|
+
withoutEnlargement: false,
|
|
3637
3710
|
// Allow enlargement if needed
|
|
3638
|
-
|
|
3711
|
+
background: bgColor
|
|
3712
|
+
// Use detected edge color
|
|
3713
|
+
}).flatten({ background: bgColor }).png().toFile(outputPath + ".tmp");
|
|
3639
3714
|
fs11.renameSync(outputPath + ".tmp", outputPath);
|
|
3640
3715
|
}
|
|
3641
3716
|
async function validateAndResizeImage(sourcePath, translatedPath) {
|
|
3642
|
-
const sourceDimensions = await
|
|
3643
|
-
const translatedDimensions = await
|
|
3717
|
+
const sourceDimensions = await getImageDimensions2(sourcePath);
|
|
3718
|
+
const translatedDimensions = await getImageDimensions2(translatedPath);
|
|
3644
3719
|
const needsResize = sourceDimensions.width !== translatedDimensions.width || sourceDimensions.height !== translatedDimensions.height;
|
|
3645
3720
|
if (needsResize) {
|
|
3646
3721
|
await resizeImage(translatedPath, translatedPath, sourceDimensions);
|
|
@@ -3687,7 +3762,10 @@ var localizeScreenshotsInputSchema = z7.object({
|
|
|
3687
3762
|
),
|
|
3688
3763
|
deviceTypes: z7.array(z7.enum(["phone", "tablet"])).optional().default(["phone", "tablet"]).describe("Device types to process (default: both phone and tablet)"),
|
|
3689
3764
|
dryRun: z7.boolean().optional().default(false).describe("Preview mode - shows what would be translated without actually translating"),
|
|
3690
|
-
skipExisting: z7.boolean().optional().default(true).describe("Skip translation if target file already exists (default: true)")
|
|
3765
|
+
skipExisting: z7.boolean().optional().default(true).describe("Skip translation if target file already exists (default: true)"),
|
|
3766
|
+
screenshotNumbers: z7.array(z7.number().int().positive()).optional().describe(
|
|
3767
|
+
"Specific screenshot numbers to process (e.g., [1, 3, 5]). If not provided, all screenshots will be processed."
|
|
3768
|
+
)
|
|
3691
3769
|
});
|
|
3692
3770
|
var jsonSchema7 = zodToJsonSchema7(localizeScreenshotsInputSchema, {
|
|
3693
3771
|
name: "LocalizeScreenshotsInput",
|
|
@@ -3802,7 +3880,8 @@ async function handleLocalizeScreenshots(input) {
|
|
|
3802
3880
|
targetLocales: requestedTargetLocales,
|
|
3803
3881
|
deviceTypes = ["phone", "tablet"],
|
|
3804
3882
|
dryRun = false,
|
|
3805
|
-
skipExisting = true
|
|
3883
|
+
skipExisting = true,
|
|
3884
|
+
screenshotNumbers
|
|
3806
3885
|
} = input;
|
|
3807
3886
|
const results = [];
|
|
3808
3887
|
let appInfo;
|
|
@@ -3854,9 +3933,20 @@ async function handleLocalizeScreenshots(input) {
|
|
|
3854
3933
|
}
|
|
3855
3934
|
results.push(`\u{1F3AF} Target locales: ${targetLocales.join(", ")}`);
|
|
3856
3935
|
const sourceScreenshots = scanLocaleScreenshots(appInfo.slug, primaryLocale);
|
|
3857
|
-
|
|
3936
|
+
let filteredScreenshots = sourceScreenshots.filter(
|
|
3858
3937
|
(s) => deviceTypes.includes(s.type)
|
|
3859
3938
|
);
|
|
3939
|
+
if (screenshotNumbers && screenshotNumbers.length > 0) {
|
|
3940
|
+
filteredScreenshots = filteredScreenshots.filter((s) => {
|
|
3941
|
+
const match = s.filename.match(/^(\d+)\./);
|
|
3942
|
+
if (match) {
|
|
3943
|
+
const num = parseInt(match[1], 10);
|
|
3944
|
+
return screenshotNumbers.includes(num);
|
|
3945
|
+
}
|
|
3946
|
+
return false;
|
|
3947
|
+
});
|
|
3948
|
+
results.push(`\u{1F522} Filtering screenshots: ${screenshotNumbers.join(", ")}`);
|
|
3949
|
+
}
|
|
3860
3950
|
if (filteredScreenshots.length === 0) {
|
|
3861
3951
|
const screenshotsDir2 = getScreenshotsDir(appInfo.slug);
|
|
3862
3952
|
return {
|
|
@@ -3928,13 +4018,18 @@ ${screenshotsDir2}/${primaryLocale}/tablet/1.png, 2.png, ...`
|
|
|
3928
4018
|
const translationResult = await translateImagesWithProgress(
|
|
3929
4019
|
tasks,
|
|
3930
4020
|
(progress) => {
|
|
3931
|
-
|
|
4021
|
+
const progressPrefix = `[${progress.current}/${progress.total}]`;
|
|
4022
|
+
if (progress.status === "translating") {
|
|
4023
|
+
console.log(
|
|
4024
|
+
`\u{1F504} ${progressPrefix} Translating ${progress.targetLocale}/${progress.deviceType}/${progress.filename}...`
|
|
4025
|
+
);
|
|
4026
|
+
} else if (progress.status === "completed") {
|
|
3932
4027
|
console.log(
|
|
3933
|
-
`\u2705 ${progress.targetLocale}/${progress.deviceType}/${progress.filename}`
|
|
4028
|
+
`\u2705 ${progressPrefix} ${progress.targetLocale}/${progress.deviceType}/${progress.filename}`
|
|
3934
4029
|
);
|
|
3935
4030
|
} else if (progress.status === "failed") {
|
|
3936
4031
|
console.log(
|
|
3937
|
-
`\u274C ${progress.targetLocale}/${progress.deviceType}/${progress.filename}: ${progress.error}`
|
|
4032
|
+
`\u274C ${progressPrefix} ${progress.targetLocale}/${progress.deviceType}/${progress.filename}: ${progress.error}`
|
|
3938
4033
|
);
|
|
3939
4034
|
}
|
|
3940
4035
|
}
|