lingo.dev 0.74.9 → 0.74.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/build/cli.mjs CHANGED
@@ -265,12 +265,13 @@ function _getConfigFilePath() {
265
265
  // src/cli/cmd/init.ts
266
266
  import { defaultConfig, resolveLocaleCode, bucketTypes } from "@lingo.dev/_spec";
267
267
  import fs3 from "fs";
268
+ import path3 from "path";
268
269
  import { spawn } from "child_process";
269
270
  import _2 from "lodash";
270
271
  import { confirm } from "@inquirer/prompts";
271
- var openUrl = (path8) => {
272
+ var openUrl = (path9) => {
272
273
  const settings = getSettings(void 0);
273
- spawn("open", [`${settings.auth.webUrl}${path8}`]);
274
+ spawn("open", [`${settings.auth.webUrl}${path9}`]);
274
275
  };
275
276
  var throwHelpError = (option, value) => {
276
277
  if (value === "help") {
@@ -311,20 +312,22 @@ var init_default = new InteractiveCommand().command("init").description("Initial
311
312
  return value;
312
313
  }).default("json")
313
314
  ).addOption(
314
- new InteractiveOption("-p, --paths <path...>", "List of paths for the bucket").argParser((value) => {
315
+ new InteractiveOption("-p, --paths [path...]", "List of paths for the bucket").argParser((value) => {
316
+ if (!value || value.length === 0) return [];
315
317
  const values = value.includes(",") ? value.split(",") : value.split(" ");
316
- for (const path8 of values) {
318
+ for (const p of values) {
317
319
  try {
318
- const stats = fs3.statSync(path8);
320
+ const dirPath = path3.dirname(p);
321
+ const stats = fs3.statSync(dirPath);
319
322
  if (!stats.isDirectory()) {
320
- throw new Error(`${path8} is not a directory`);
323
+ throw new Error(`${dirPath} is not a directory`);
321
324
  }
322
325
  } catch (err) {
323
- throw new Error(`Invalid directory path: ${path8}`);
326
+ throw new Error(`Invalid path: ${p}`);
324
327
  }
325
328
  }
326
329
  return values;
327
- }).default(".")
330
+ }).default([])
328
331
  ).action(async (options) => {
329
332
  const settings = getSettings(void 0);
330
333
  const spinner = Ora2().start("Initializing Lingo.dev project");
@@ -337,7 +340,9 @@ var init_default = new InteractiveCommand().command("init").description("Initial
337
340
  newConfig.locale.source = options.source;
338
341
  newConfig.locale.targets = options.targets;
339
342
  newConfig.buckets = {
340
- [options.bucket]: options.paths
343
+ [options.bucket]: {
344
+ include: options.paths || []
345
+ }
341
346
  };
342
347
  await saveConfig(newConfig);
343
348
  spinner.succeed("Lingo.dev project initialized");
@@ -391,7 +396,7 @@ import { Command as Command5 } from "interactive-commander";
391
396
  import { Command as Command2 } from "interactive-commander";
392
397
  import _3 from "lodash";
393
398
  import fs4 from "fs";
394
- import path3 from "path";
399
+ import path4 from "path";
395
400
  import { defaultConfig as defaultConfig2 } from "@lingo.dev/_spec";
396
401
  var config_default = new Command2().command("config").description("Print out the current configuration").helpOption("-h, --help", "Show help").action(async (options) => {
397
402
  const fileConfig = loadReplexicaFileConfig();
@@ -399,7 +404,7 @@ var config_default = new Command2().command("config").description("Print out the
399
404
  console.log(JSON.stringify(config, null, 2));
400
405
  });
401
406
  function loadReplexicaFileConfig() {
402
- const replexicaConfigPath = path3.resolve(process.cwd(), "i18n.json");
407
+ const replexicaConfigPath = path4.resolve(process.cwd(), "i18n.json");
403
408
  const fileExists = fs4.existsSync(replexicaConfigPath);
404
409
  if (!fileExists) {
405
410
  return void 0;
@@ -441,7 +446,7 @@ import Ora4 from "ora";
441
446
 
442
447
  // src/cli/utils/buckets.ts
443
448
  import _4 from "lodash";
444
- import path4 from "path";
449
+ import path5 from "path";
445
450
  import * as glob from "glob";
446
451
  import { resolveOverridenLocale } from "@lingo.dev/_spec";
447
452
  function getBuckets(i18nConfig) {
@@ -476,9 +481,9 @@ function extractPathPatterns(sourceLocale, include, exclude) {
476
481
  return result;
477
482
  }
478
483
  function expandPlaceholderedGlob(_pathPattern, sourceLocale) {
479
- const absolutePathPattern = path4.resolve(_pathPattern);
480
- const pathPattern = path4.relative(process.cwd(), absolutePathPattern);
481
- if (path4.relative(process.cwd(), pathPattern).startsWith("..")) {
484
+ const absolutePathPattern = path5.resolve(_pathPattern);
485
+ const pathPattern = path5.relative(process.cwd(), absolutePathPattern);
486
+ if (path5.relative(process.cwd(), pathPattern).startsWith("..")) {
482
487
  throw new CLIError({
483
488
  message: `Invalid path pattern: ${pathPattern}. Path pattern must be within the current working directory.`,
484
489
  docUrl: "invalidPathPattern"
@@ -496,19 +501,19 @@ function expandPlaceholderedGlob(_pathPattern, sourceLocale) {
496
501
  docUrl: "invalidPathPattern"
497
502
  });
498
503
  }
499
- const pathPatternChunks = pathPattern.split(path4.sep);
504
+ const pathPatternChunks = pathPattern.split(path5.sep);
500
505
  const localeSegmentIndex = pathPatternChunks.findIndex((segment) => segment.includes("[locale]"));
501
506
  const localePlaceholderIndex = pathPatternChunks[localeSegmentIndex]?.indexOf("[locale]") ?? -1;
502
507
  const sourcePathPattern = pathPattern.replace(/\[locale\]/g, sourceLocale);
503
- const sourcePaths = glob.sync(sourcePathPattern, { follow: true, withFileTypes: true }).filter((file) => file.isFile() || file.isSymbolicLink()).map((file) => file.fullpath()).map((fullpath) => path4.relative(process.cwd(), fullpath));
508
+ const sourcePaths = glob.sync(sourcePathPattern, { follow: true, withFileTypes: true }).filter((file) => file.isFile() || file.isSymbolicLink()).map((file) => file.fullpath()).map((fullpath) => path5.relative(process.cwd(), fullpath));
504
509
  const placeholderedPaths = sourcePaths.map((sourcePath) => {
505
- const sourcePathChunks = sourcePath.split(path4.sep);
510
+ const sourcePathChunks = sourcePath.split(path5.sep);
506
511
  if (localeSegmentIndex >= 0 && localePlaceholderIndex >= 0) {
507
512
  const placeholderedPathChunk = sourcePathChunks[localeSegmentIndex];
508
513
  const placeholderedSegment = placeholderedPathChunk.substring(0, localePlaceholderIndex) + "[locale]" + placeholderedPathChunk.substring(localePlaceholderIndex + sourceLocale.length);
509
514
  sourcePathChunks[localeSegmentIndex] = placeholderedSegment;
510
515
  }
511
- const placeholderedPath = sourcePathChunks.join(path4.sep);
516
+ const placeholderedPath = sourcePathChunks.join(path5.sep);
512
517
  return placeholderedPath;
513
518
  });
514
519
  return placeholderedPaths;
@@ -550,8 +555,8 @@ var files_default = new Command4().command("files").description("Print out the l
550
555
  } else if (type.target) {
551
556
  result.push(...targetPaths);
552
557
  }
553
- result.forEach((path8) => {
554
- console.log(path8);
558
+ result.forEach((path9) => {
559
+ console.log(path9);
555
560
  });
556
561
  }
557
562
  }
@@ -575,7 +580,7 @@ import { bucketTypeSchema, localeCodeSchema, resolveOverridenLocale as resolveOv
575
580
  import { ReplexicaEngine } from "@lingo.dev/_sdk";
576
581
  import { Command as Command6 } from "interactive-commander";
577
582
  import Z4 from "zod";
578
- import _17 from "lodash";
583
+ import _18 from "lodash";
579
584
  import Ora5 from "ora";
580
585
 
581
586
  // src/cli/loaders/_utils.ts
@@ -677,30 +682,88 @@ function createJsonLoader() {
677
682
 
678
683
  // src/cli/loaders/flat.ts
679
684
  import { flatten, unflatten } from "flat";
685
+ import _5 from "lodash";
686
+ var OBJECT_NUMERIC_KEY_PREFIX = "__lingodotdev__obj__";
680
687
  function createFlatLoader() {
688
+ let denormalizedKeysMap;
681
689
  return createLoader({
682
690
  pull: async (locale, input) => {
683
- return flatten(input || {}, {
691
+ const denormalized = denormalizeObjectKeys(input || {});
692
+ const flattened = flatten(denormalized, {
684
693
  delimiter: "/",
685
694
  transformKey(key) {
686
695
  return encodeURIComponent(String(key));
687
696
  }
688
697
  });
698
+ denormalizedKeysMap = buildDenormalizedKeysMap(flattened);
699
+ const normalized = normalizeObjectKeys(flattened);
700
+ return normalized;
689
701
  },
690
702
  push: async (locale, data) => {
691
- return unflatten(data || {}, {
703
+ const denormalized = mapDeormalizedKeys(data, denormalizedKeysMap);
704
+ const unflattened = unflatten(denormalized || {}, {
692
705
  delimiter: "/",
693
706
  transformKey(key) {
694
707
  return decodeURIComponent(String(key));
695
708
  }
696
709
  });
710
+ const normalized = normalizeObjectKeys(unflattened);
711
+ return normalized;
697
712
  }
698
713
  });
699
714
  }
715
+ function buildDenormalizedKeysMap(obj) {
716
+ return Object.keys(obj).reduce(
717
+ (acc, key) => {
718
+ const normalizedKey = `${key}`.replace(OBJECT_NUMERIC_KEY_PREFIX, "");
719
+ acc[normalizedKey] = key;
720
+ return acc;
721
+ },
722
+ {}
723
+ );
724
+ }
725
+ function mapDeormalizedKeys(obj, denormalizedKeysMap) {
726
+ return Object.keys(obj).reduce(
727
+ (acc, key) => {
728
+ const denormalizedKey = denormalizedKeysMap[key];
729
+ acc[denormalizedKey] = obj[key];
730
+ return acc;
731
+ },
732
+ {}
733
+ );
734
+ }
735
+ function denormalizeObjectKeys(obj) {
736
+ if (_5.isObject(obj) && !_5.isArray(obj)) {
737
+ return _5.transform(
738
+ obj,
739
+ (result, value, key) => {
740
+ const newKey = !isNaN(Number(key)) ? `${OBJECT_NUMERIC_KEY_PREFIX}${key}` : key;
741
+ result[newKey] = _5.isObject(value) ? denormalizeObjectKeys(value) : value;
742
+ },
743
+ {}
744
+ );
745
+ } else {
746
+ return obj;
747
+ }
748
+ }
749
+ function normalizeObjectKeys(obj) {
750
+ if (_5.isObject(obj) && !_5.isArray(obj)) {
751
+ return _5.transform(
752
+ obj,
753
+ (result, value, key) => {
754
+ const newKey = `${key}`.replace(OBJECT_NUMERIC_KEY_PREFIX, "");
755
+ result[newKey] = _5.isObject(value) ? normalizeObjectKeys(value) : value;
756
+ },
757
+ {}
758
+ );
759
+ } else {
760
+ return obj;
761
+ }
762
+ }
700
763
 
701
764
  // src/cli/loaders/text-file.ts
702
765
  import fs5 from "fs/promises";
703
- import path5 from "path";
766
+ import path6 from "path";
704
767
  function createTextFileLoader(pathPattern) {
705
768
  return createLoader({
706
769
  async pull(locale) {
@@ -708,10 +771,10 @@ function createTextFileLoader(pathPattern) {
708
771
  const trimmedResult = result.trim();
709
772
  return trimmedResult;
710
773
  },
711
- async push(locale, data, _19, originalLocale) {
774
+ async push(locale, data, _20, originalLocale) {
712
775
  const draftPath = pathPattern.replace("[locale]", locale);
713
- const finalPath = path5.resolve(draftPath);
714
- const dirPath = path5.dirname(finalPath);
776
+ const finalPath = path6.resolve(draftPath);
777
+ const dirPath = path6.dirname(finalPath);
715
778
  await fs5.mkdir(dirPath, { recursive: true });
716
779
  const trimmedPayload = data.trim();
717
780
  const trailingNewLine = await getTrailingNewLine(pathPattern, locale, originalLocale);
@@ -725,7 +788,7 @@ function createTextFileLoader(pathPattern) {
725
788
  }
726
789
  async function readFileForLocale(pathPattern, locale) {
727
790
  const draftPath = pathPattern.replace("[locale]", locale);
728
- const finalPath = path5.resolve(draftPath);
791
+ const finalPath = path6.resolve(draftPath);
729
792
  const exists = await fs5.access(finalPath).then(() => true).catch(() => false);
730
793
  if (!exists) {
731
794
  return "";
@@ -777,15 +840,15 @@ function createRootKeyLoader(replaceAll = false) {
777
840
  }
778
841
 
779
842
  // src/cli/loaders/flutter.ts
780
- import _5 from "lodash";
843
+ import _6 from "lodash";
781
844
  function createFlutterLoader() {
782
845
  return createLoader({
783
846
  async pull(locale, input) {
784
- const result = _5.pickBy(input, (value, key) => !key.startsWith("@"));
847
+ const result = _6.pickBy(input, (value, key) => !key.startsWith("@"));
785
848
  return result;
786
849
  },
787
850
  async push(locale, data, originalInput) {
788
- const result = _5.merge({}, originalInput, { "@@locale": locale }, data);
851
+ const result = _6.merge({}, originalInput, { "@@locale": locale }, data);
789
852
  return result;
790
853
  }
791
854
  });
@@ -887,7 +950,7 @@ function createAndroidLoader() {
887
950
  // src/cli/loaders/csv.ts
888
951
  import { parse } from "csv-parse/sync";
889
952
  import { stringify } from "csv-stringify/sync";
890
- import _6 from "lodash";
953
+ import _7 from "lodash";
891
954
  function createCsvLoader() {
892
955
  return createLoader({
893
956
  async pull(locale, _input) {
@@ -895,7 +958,7 @@ function createCsvLoader() {
895
958
  columns: true
896
959
  });
897
960
  const result = {};
898
- _6.forEach(input, (row) => {
961
+ _7.forEach(input, (row) => {
899
962
  const key = row.id;
900
963
  if (key && row[locale]) {
901
964
  result[key] = row[locale];
@@ -1008,9 +1071,9 @@ function createHtmlLoader() {
1008
1071
  const bDepth = b.split("/").length;
1009
1072
  return aDepth - bDepth;
1010
1073
  });
1011
- paths.forEach((path8) => {
1012
- const value = data[path8];
1013
- const [nodePath, attribute] = path8.split("#");
1074
+ paths.forEach((path9) => {
1075
+ const value = data[path9];
1076
+ const [nodePath, attribute] = path9.split("#");
1014
1077
  const [rootTag, ...indices] = nodePath.split("/");
1015
1078
  let parent = rootTag === "head" ? document.head : document.body;
1016
1079
  let current = parent;
@@ -1113,7 +1176,7 @@ function createPropertiesLoader() {
1113
1176
  return result;
1114
1177
  },
1115
1178
  async push(locale, payload) {
1116
- const result = Object.entries(payload).filter(([_19, value]) => value != null).map(([key, value]) => `${key}=${value}`).join("\n");
1179
+ const result = Object.entries(payload).filter(([_20, value]) => value != null).map(([key, value]) => `${key}=${value}`).join("\n");
1117
1180
  return result;
1118
1181
  }
1119
1182
  });
@@ -1199,7 +1262,7 @@ function createXcodeStringsdictLoader() {
1199
1262
  }
1200
1263
 
1201
1264
  // src/cli/loaders/xcode-xcstrings.ts
1202
- import _7 from "lodash";
1265
+ import _8 from "lodash";
1203
1266
  function createXcodeXcstringsLoader() {
1204
1267
  return createLoader({
1205
1268
  async pull(locale, input) {
@@ -1263,7 +1326,7 @@ function createXcodeXcstringsLoader() {
1263
1326
  };
1264
1327
  }
1265
1328
  }
1266
- const result = _7.merge({}, originalInput, langDataToMerge);
1329
+ const result = _8.merge({}, originalInput, langDataToMerge);
1267
1330
  return result;
1268
1331
  }
1269
1332
  });
@@ -1305,17 +1368,17 @@ async function loadPrettierConfig() {
1305
1368
  }
1306
1369
 
1307
1370
  // src/cli/loaders/unlocalizable.ts
1308
- import _8 from "lodash";
1371
+ import _9 from "lodash";
1309
1372
  import _isUrl from "is-url";
1310
1373
  import { isValid, parseISO } from "date-fns";
1311
1374
  function createUnlocalizableLoader() {
1312
1375
  const rules = {
1313
- isEmpty: (v) => _8.isEmpty(v),
1314
- isNumber: (v) => !_8.isNaN(_8.toNumber(v)),
1315
- isBoolean: (v) => _8.isBoolean(v),
1316
- isIsoDate: (v) => _8.isString(v) && _isIsoDate(v),
1317
- isSystemId: (v) => _8.isString(v) && _isSystemId(v),
1318
- isUrl: (v) => _8.isString(v) && _isUrl(v)
1376
+ isEmpty: (v) => _9.isEmpty(v),
1377
+ isNumber: (v) => !_9.isNaN(_9.toNumber(v)),
1378
+ isBoolean: (v) => _9.isBoolean(v),
1379
+ isIsoDate: (v) => _9.isString(v) && _isIsoDate(v),
1380
+ isSystemId: (v) => _9.isString(v) && _isSystemId(v),
1381
+ isUrl: (v) => _9.isString(v) && _isUrl(v)
1319
1382
  };
1320
1383
  return createLoader({
1321
1384
  async pull(locale, input) {
@@ -1326,12 +1389,12 @@ function createUnlocalizableLoader() {
1326
1389
  }
1327
1390
  }
1328
1391
  return false;
1329
- }).map(([key, _19]) => key);
1330
- const result = _8.omitBy(input, (_19, key) => passthroughKeys.includes(key));
1392
+ }).map(([key, _20]) => key);
1393
+ const result = _9.omitBy(input, (_20, key) => passthroughKeys.includes(key));
1331
1394
  return result;
1332
1395
  },
1333
1396
  async push(locale, data, originalInput) {
1334
- const result = _8.merge({}, originalInput, data);
1397
+ const result = _9.merge({}, originalInput, data);
1335
1398
  return result;
1336
1399
  }
1337
1400
  });
@@ -1344,7 +1407,7 @@ function _isIsoDate(v) {
1344
1407
  }
1345
1408
 
1346
1409
  // src/cli/loaders/po/index.ts
1347
- import _9 from "lodash";
1410
+ import _10 from "lodash";
1348
1411
  import gettextParser from "gettext-parser";
1349
1412
  function createPoLoader(params = { multiline: false }) {
1350
1413
  return composeLoaders(createPoDataLoader(params), createPoContentLoader());
@@ -1357,7 +1420,7 @@ function createPoDataLoader(params) {
1357
1420
  const sections = input.split("\n\n").filter(Boolean);
1358
1421
  for (const section of sections) {
1359
1422
  const sectionPo = gettextParser.po.parse(section);
1360
- const contextKey = _9.keys(sectionPo.translations)[0];
1423
+ const contextKey = _10.keys(sectionPo.translations)[0];
1361
1424
  const entries = sectionPo.translations[contextKey];
1362
1425
  Object.entries(entries).forEach(([msgid, entry]) => {
1363
1426
  if (msgid && entry.msgid) {
@@ -1375,12 +1438,12 @@ function createPoDataLoader(params) {
1375
1438
  const sections = originalInput?.split("\n\n").filter(Boolean) || [];
1376
1439
  const result = sections.map((section) => {
1377
1440
  const sectionPo = gettextParser.po.parse(section);
1378
- const contextKey = _9.keys(sectionPo.translations)[0];
1441
+ const contextKey = _10.keys(sectionPo.translations)[0];
1379
1442
  const entries = sectionPo.translations[contextKey];
1380
1443
  const msgid = Object.keys(entries).find((key) => entries[key].msgid);
1381
1444
  if (!msgid) return section;
1382
1445
  if (data[msgid]) {
1383
- const updatedPo = _9.merge({}, sectionPo, {
1446
+ const updatedPo = _10.merge({}, sectionPo, {
1384
1447
  translations: {
1385
1448
  [contextKey]: {
1386
1449
  [msgid]: {
@@ -1400,7 +1463,7 @@ function createPoDataLoader(params) {
1400
1463
  function createPoContentLoader() {
1401
1464
  return createLoader({
1402
1465
  async pull(locale, input) {
1403
- const result = _9.chain(input).entries().filter(([, entry]) => !!entry.msgid).map(([, entry]) => [
1466
+ const result = _10.chain(input).entries().filter(([, entry]) => !!entry.msgid).map(([, entry]) => [
1404
1467
  entry.msgid,
1405
1468
  {
1406
1469
  singular: entry.msgstr[0] || entry.msgid,
@@ -1410,7 +1473,7 @@ function createPoContentLoader() {
1410
1473
  return result;
1411
1474
  },
1412
1475
  async push(locale, data, originalInput) {
1413
- const result = _9.chain(originalInput).entries().map(([, entry]) => [
1476
+ const result = _10.chain(originalInput).entries().map(([, entry]) => [
1414
1477
  entry.msgid,
1415
1478
  {
1416
1479
  ...entry,
@@ -1533,34 +1596,34 @@ var datoSettingsSchema = Z2.object({
1533
1596
  });
1534
1597
 
1535
1598
  // src/cli/loaders/dato/filter.ts
1536
- import _10 from "lodash";
1599
+ import _11 from "lodash";
1537
1600
  function createDatoFilterLoader() {
1538
1601
  return createLoader({
1539
1602
  async pull(locale, input) {
1540
1603
  const result = {};
1541
- for (const [modelId, modelInfo] of _10.entries(input)) {
1604
+ for (const [modelId, modelInfo] of _11.entries(input)) {
1542
1605
  result[modelId] = {};
1543
1606
  for (const record of modelInfo.records) {
1544
- result[modelId][record.id] = _10.chain(modelInfo.fields).mapKeys((field) => field.api_key).mapValues((field) => _10.get(record, [field.api_key, locale])).value();
1607
+ result[modelId][record.id] = _11.chain(modelInfo.fields).mapKeys((field) => field.api_key).mapValues((field) => _11.get(record, [field.api_key, locale])).value();
1545
1608
  }
1546
1609
  }
1547
1610
  return result;
1548
1611
  },
1549
1612
  async push(locale, data, originalInput, originalLocale) {
1550
- const result = _10.cloneDeep(originalInput || {});
1551
- for (const [modelId, modelInfo] of _10.entries(result)) {
1613
+ const result = _11.cloneDeep(originalInput || {});
1614
+ for (const [modelId, modelInfo] of _11.entries(result)) {
1552
1615
  for (const record of modelInfo.records) {
1553
- for (const [fieldId, fieldValue] of _10.entries(record)) {
1616
+ for (const [fieldId, fieldValue] of _11.entries(record)) {
1554
1617
  const fieldInfo = modelInfo.fields.find((field) => field.api_key === fieldId);
1555
1618
  if (fieldInfo) {
1556
- const sourceFieldValue = _10.get(fieldValue, [originalLocale]);
1557
- const targetFieldValue = _10.get(data, [modelId, record.id, fieldId]);
1619
+ const sourceFieldValue = _11.get(fieldValue, [originalLocale]);
1620
+ const targetFieldValue = _11.get(data, [modelId, record.id, fieldId]);
1558
1621
  if (targetFieldValue) {
1559
- _10.set(record, [fieldId, locale], targetFieldValue);
1622
+ _11.set(record, [fieldId, locale], targetFieldValue);
1560
1623
  } else {
1561
- _10.set(record, [fieldId, locale], sourceFieldValue);
1624
+ _11.set(record, [fieldId, locale], sourceFieldValue);
1562
1625
  }
1563
- _10.chain(fieldValue).keys().reject((loc) => loc === locale || loc === originalLocale).filter((loc) => _10.isEmpty(_10.get(fieldValue, [loc]))).forEach((loc) => _10.set(record, [fieldId, loc], sourceFieldValue)).value();
1626
+ _11.chain(fieldValue).keys().reject((loc) => loc === locale || loc === originalLocale).filter((loc) => _11.isEmpty(_11.get(fieldValue, [loc]))).forEach((loc) => _11.set(record, [fieldId, loc], sourceFieldValue)).value();
1564
1627
  }
1565
1628
  }
1566
1629
  }
@@ -1571,10 +1634,10 @@ function createDatoFilterLoader() {
1571
1634
  }
1572
1635
 
1573
1636
  // src/cli/loaders/dato/api.ts
1574
- import _12 from "lodash";
1637
+ import _13 from "lodash";
1575
1638
 
1576
1639
  // src/cli/loaders/dato/_utils.ts
1577
- import _11 from "lodash";
1640
+ import _12 from "lodash";
1578
1641
  import { buildClient } from "@datocms/cma-client-node";
1579
1642
  function createDatoClient(params) {
1580
1643
  if (!params.apiKey) {
@@ -1749,7 +1812,7 @@ function createDatoApiLoader(config, onConfigUpdate) {
1749
1812
  const result = {
1750
1813
  models: {}
1751
1814
  };
1752
- const updatedConfig = _12.cloneDeep(config);
1815
+ const updatedConfig = _13.cloneDeep(config);
1753
1816
  console.log(`Initializing DatoCMS loader...`);
1754
1817
  const project = await dato.findProject();
1755
1818
  const modelChoices = await getModelChoices(dato, config);
@@ -1767,7 +1830,7 @@ function createDatoApiLoader(config, onConfigUpdate) {
1767
1830
  delete updatedConfig.models[modelId];
1768
1831
  }
1769
1832
  }
1770
- for (const modelId of _12.keys(updatedConfig.models)) {
1833
+ for (const modelId of _13.keys(updatedConfig.models)) {
1771
1834
  const { modelName, fields } = await getModelFields(dato, modelId);
1772
1835
  if (fields.length > 0) {
1773
1836
  result.models[modelId] = { fields: [], records: [] };
@@ -1778,7 +1841,7 @@ function createDatoApiLoader(config, onConfigUpdate) {
1778
1841
  const isLocalized = await updateFieldLocalization(dato, fieldInfo, selectedFields.includes(fieldInfo.id));
1779
1842
  if (isLocalized) {
1780
1843
  result.models[modelId].fields.push(fieldInfo);
1781
- updatedConfig.models[modelId].fields = _12.uniq([
1844
+ updatedConfig.models[modelId].fields = _13.uniq([
1782
1845
  ...updatedConfig.models[modelId].fields || [],
1783
1846
  fieldInfo.api_key
1784
1847
  ]);
@@ -1797,7 +1860,7 @@ function createDatoApiLoader(config, onConfigUpdate) {
1797
1860
  },
1798
1861
  async pull(locale, input, initCtx) {
1799
1862
  const result = {};
1800
- for (const modelId of _12.keys(initCtx?.models || {})) {
1863
+ for (const modelId of _13.keys(initCtx?.models || {})) {
1801
1864
  let records = initCtx?.models[modelId].records || [];
1802
1865
  const recordIds = records.map((record) => record.id);
1803
1866
  records = await dato.findRecords(recordIds);
@@ -1812,7 +1875,7 @@ function createDatoApiLoader(config, onConfigUpdate) {
1812
1875
  return result;
1813
1876
  },
1814
1877
  async push(locale, data, originalInput) {
1815
- for (const modelId of _12.keys(data)) {
1878
+ for (const modelId of _13.keys(data)) {
1816
1879
  for (let i = 0; i < data[modelId].records.length; i++) {
1817
1880
  const record = data[modelId].records[i];
1818
1881
  console.log(`Updating record ${i + 1}/${data[modelId].records.length} for model ${modelId}...`);
@@ -1826,7 +1889,7 @@ async function getModelFields(dato, modelId) {
1826
1889
  const modelInfo = await dato.findModel(modelId);
1827
1890
  return {
1828
1891
  modelName: modelInfo.name,
1829
- fields: _12.filter(modelInfo.fields, (field) => field.type === "field")
1892
+ fields: _13.filter(modelInfo.fields, (field) => field.type === "field")
1830
1893
  };
1831
1894
  }
1832
1895
  async function getFieldDetails(dato, fields) {
@@ -1904,17 +1967,17 @@ async function promptModelSelection(choices) {
1904
1967
  }
1905
1968
 
1906
1969
  // src/cli/loaders/dato/extract.ts
1907
- import _13 from "lodash";
1970
+ import _14 from "lodash";
1908
1971
  function createDatoExtractLoader() {
1909
1972
  return createLoader({
1910
1973
  async pull(locale, input) {
1911
1974
  const result = {};
1912
- for (const [modelId, modelInfo] of _13.entries(input)) {
1913
- for (const [recordId, record] of _13.entries(modelInfo)) {
1914
- for (const [fieldName, fieldValue] of _13.entries(record)) {
1975
+ for (const [modelId, modelInfo] of _14.entries(input)) {
1976
+ for (const [recordId, record] of _14.entries(modelInfo)) {
1977
+ for (const [fieldName, fieldValue] of _14.entries(record)) {
1915
1978
  const parsedValue = createParsedDatoValue(fieldValue);
1916
1979
  if (parsedValue) {
1917
- _13.set(result, [modelId, `_${recordId}`, fieldName], parsedValue);
1980
+ _14.set(result, [modelId, `_${recordId}`, fieldName], parsedValue);
1918
1981
  }
1919
1982
  }
1920
1983
  }
@@ -1922,14 +1985,14 @@ function createDatoExtractLoader() {
1922
1985
  return result;
1923
1986
  },
1924
1987
  async push(locale, data, originalInput) {
1925
- const result = _13.cloneDeep(originalInput || {});
1926
- for (const [modelId, modelInfo] of _13.entries(data)) {
1927
- for (const [virtualRecordId, record] of _13.entries(modelInfo)) {
1928
- for (const [fieldName, fieldValue] of _13.entries(record)) {
1988
+ const result = _14.cloneDeep(originalInput || {});
1989
+ for (const [modelId, modelInfo] of _14.entries(data)) {
1990
+ for (const [virtualRecordId, record] of _14.entries(modelInfo)) {
1991
+ for (const [fieldName, fieldValue] of _14.entries(record)) {
1929
1992
  const [, recordId] = virtualRecordId.split("_");
1930
- const originalFieldValue = _13.get(originalInput, [modelId, recordId, fieldName]);
1993
+ const originalFieldValue = _14.get(originalInput, [modelId, recordId, fieldName]);
1931
1994
  const rawValue = createRawDatoValue(fieldValue, originalFieldValue, true);
1932
- _13.set(result, [modelId, recordId, fieldName], rawValue || originalFieldValue);
1995
+ _14.set(result, [modelId, recordId, fieldName], rawValue || originalFieldValue);
1933
1996
  }
1934
1997
  }
1935
1998
  }
@@ -1938,25 +2001,25 @@ function createDatoExtractLoader() {
1938
2001
  });
1939
2002
  }
1940
2003
  function detectDatoFieldType(rawDatoValue) {
1941
- if (_13.has(rawDatoValue, "document") && _13.get(rawDatoValue, "schema") === "dast") {
2004
+ if (_14.has(rawDatoValue, "document") && _14.get(rawDatoValue, "schema") === "dast") {
1942
2005
  return "structured_text";
1943
- } else if (_13.has(rawDatoValue, "no_index") || _13.has(rawDatoValue, "twitter_card")) {
2006
+ } else if (_14.has(rawDatoValue, "no_index") || _14.has(rawDatoValue, "twitter_card")) {
1944
2007
  return "seo";
1945
- } else if (_13.get(rawDatoValue, "type") === "item") {
2008
+ } else if (_14.get(rawDatoValue, "type") === "item") {
1946
2009
  return "single_block";
1947
- } else if (_13.isArray(rawDatoValue) && _13.every(rawDatoValue, (item) => _13.get(item, "type") === "item")) {
2010
+ } else if (_14.isArray(rawDatoValue) && _14.every(rawDatoValue, (item) => _14.get(item, "type") === "item")) {
1948
2011
  return "rich_text";
1949
2012
  } else if (_isFile(rawDatoValue)) {
1950
2013
  return "file";
1951
- } else if (_13.isArray(rawDatoValue) && _13.every(rawDatoValue, (item) => _isFile(item))) {
2014
+ } else if (_14.isArray(rawDatoValue) && _14.every(rawDatoValue, (item) => _isFile(item))) {
1952
2015
  return "gallery";
1953
2016
  } else if (_isJson(rawDatoValue)) {
1954
2017
  return "json";
1955
- } else if (_13.isString(rawDatoValue)) {
2018
+ } else if (_14.isString(rawDatoValue)) {
1956
2019
  return "string";
1957
2020
  } else if (_isVideo(rawDatoValue)) {
1958
2021
  return "video";
1959
- } else if (_13.isArray(rawDatoValue) && _13.every(rawDatoValue, (item) => _13.isString(item))) {
2022
+ } else if (_14.isArray(rawDatoValue) && _14.every(rawDatoValue, (item) => _14.isString(item))) {
1960
2023
  return "ref_list";
1961
2024
  } else {
1962
2025
  return null;
@@ -2014,62 +2077,62 @@ function createRawDatoValue(parsedDatoValue, originalRawDatoValue, isClean = fal
2014
2077
  }
2015
2078
  function serializeStructuredText(rawStructuredText) {
2016
2079
  return serializeStructuredTextNode(rawStructuredText);
2017
- function serializeStructuredTextNode(node, path8 = [], acc = {}) {
2080
+ function serializeStructuredTextNode(node, path9 = [], acc = {}) {
2018
2081
  if ("document" in node) {
2019
- return serializeStructuredTextNode(node.document, [...path8, "document"], acc);
2082
+ return serializeStructuredTextNode(node.document, [...path9, "document"], acc);
2020
2083
  }
2021
- if (!_13.isNil(node.value)) {
2022
- acc[[...path8, "value"].join(".")] = node.value;
2023
- } else if (_13.get(node, "type") === "block") {
2024
- acc[[...path8, "item"].join(".")] = serializeBlock(node.item);
2084
+ if (!_14.isNil(node.value)) {
2085
+ acc[[...path9, "value"].join(".")] = node.value;
2086
+ } else if (_14.get(node, "type") === "block") {
2087
+ acc[[...path9, "item"].join(".")] = serializeBlock(node.item);
2025
2088
  }
2026
2089
  if (node.children) {
2027
2090
  for (let i = 0; i < node.children.length; i++) {
2028
- serializeStructuredTextNode(node.children[i], [...path8, i.toString()], acc);
2091
+ serializeStructuredTextNode(node.children[i], [...path9, i.toString()], acc);
2029
2092
  }
2030
2093
  }
2031
2094
  return acc;
2032
2095
  }
2033
2096
  }
2034
2097
  function serializeSeo(rawSeo) {
2035
- return _13.chain(rawSeo).pick(["title", "description"]).value();
2098
+ return _14.chain(rawSeo).pick(["title", "description"]).value();
2036
2099
  }
2037
2100
  function serializeBlock(rawBlock) {
2038
- if (_13.get(rawBlock, "type") === "item" && _13.has(rawBlock, "id")) {
2101
+ if (_14.get(rawBlock, "type") === "item" && _14.has(rawBlock, "id")) {
2039
2102
  return serializeBlock(rawBlock.attributes);
2040
2103
  }
2041
2104
  const result = {};
2042
- for (const [attributeName, attributeValue] of _13.entries(rawBlock)) {
2105
+ for (const [attributeName, attributeValue] of _14.entries(rawBlock)) {
2043
2106
  result[attributeName] = createParsedDatoValue(attributeValue);
2044
2107
  }
2045
2108
  return result;
2046
2109
  }
2047
2110
  function serializeBlockList(rawBlockList) {
2048
- return _13.chain(rawBlockList).map((block) => serializeBlock(block)).value();
2111
+ return _14.chain(rawBlockList).map((block) => serializeBlock(block)).value();
2049
2112
  }
2050
2113
  function serializeVideo(rawVideo) {
2051
- return _13.chain(rawVideo).pick(["title"]).value();
2114
+ return _14.chain(rawVideo).pick(["title"]).value();
2052
2115
  }
2053
2116
  function serializeFile(rawFile) {
2054
- return _13.chain(rawFile).pick(["alt", "title"]).value();
2117
+ return _14.chain(rawFile).pick(["alt", "title"]).value();
2055
2118
  }
2056
2119
  function serializeGallery(rawGallery) {
2057
- return _13.chain(rawGallery).map((item) => serializeFile(item)).value();
2120
+ return _14.chain(rawGallery).map((item) => serializeFile(item)).value();
2058
2121
  }
2059
2122
  function deserializeFile(parsedFile, originalRawFile) {
2060
- return _13.chain(parsedFile).defaults(originalRawFile).value();
2123
+ return _14.chain(parsedFile).defaults(originalRawFile).value();
2061
2124
  }
2062
2125
  function deserializeGallery(parsedGallery, originalRawGallery) {
2063
- return _13.chain(parsedGallery).map((item, i) => deserializeFile(item, originalRawGallery[i])).value();
2126
+ return _14.chain(parsedGallery).map((item, i) => deserializeFile(item, originalRawGallery[i])).value();
2064
2127
  }
2065
2128
  function deserializeVideo(parsedVideo, originalRawVideo) {
2066
- return _13.chain(parsedVideo).defaults(originalRawVideo).value();
2129
+ return _14.chain(parsedVideo).defaults(originalRawVideo).value();
2067
2130
  }
2068
2131
  function deserializeBlock(payload, rawNode, isClean = false) {
2069
- const result = _13.cloneDeep(rawNode);
2070
- for (const [attributeName, attributeValue] of _13.entries(rawNode.attributes)) {
2132
+ const result = _14.cloneDeep(rawNode);
2133
+ for (const [attributeName, attributeValue] of _14.entries(rawNode.attributes)) {
2071
2134
  const rawValue = createRawDatoValue(payload[attributeName], attributeValue, isClean);
2072
- _13.set(result, ["attributes", attributeName], rawValue);
2135
+ _14.set(result, ["attributes", attributeName], rawValue);
2073
2136
  }
2074
2137
  if (isClean) {
2075
2138
  delete result["id"];
@@ -2077,33 +2140,33 @@ function deserializeBlock(payload, rawNode, isClean = false) {
2077
2140
  return result;
2078
2141
  }
2079
2142
  function deserializeSeo(parsedSeo, originalRawSeo) {
2080
- return _13.chain(parsedSeo).pick(["title", "description"]).defaults(originalRawSeo).value();
2143
+ return _14.chain(parsedSeo).pick(["title", "description"]).defaults(originalRawSeo).value();
2081
2144
  }
2082
2145
  function deserializeBlockList(parsedBlockList, originalRawBlockList, isClean = false) {
2083
- return _13.chain(parsedBlockList).map((block, i) => deserializeBlock(block, originalRawBlockList[i], isClean)).value();
2146
+ return _14.chain(parsedBlockList).map((block, i) => deserializeBlock(block, originalRawBlockList[i], isClean)).value();
2084
2147
  }
2085
2148
  function deserializeStructuredText(parsedStructuredText, originalRawStructuredText) {
2086
- const result = _13.cloneDeep(originalRawStructuredText);
2087
- for (const [path8, value] of _13.entries(parsedStructuredText)) {
2088
- const realPath = _13.chain(path8.split(".")).flatMap((s) => !_13.isNaN(_13.toNumber(s)) ? ["children", s] : s).value();
2089
- const deserializedValue = createRawDatoValue(value, _13.get(originalRawStructuredText, realPath), true);
2090
- _13.set(result, realPath, deserializedValue);
2149
+ const result = _14.cloneDeep(originalRawStructuredText);
2150
+ for (const [path9, value] of _14.entries(parsedStructuredText)) {
2151
+ const realPath = _14.chain(path9.split(".")).flatMap((s) => !_14.isNaN(_14.toNumber(s)) ? ["children", s] : s).value();
2152
+ const deserializedValue = createRawDatoValue(value, _14.get(originalRawStructuredText, realPath), true);
2153
+ _14.set(result, realPath, deserializedValue);
2091
2154
  }
2092
2155
  return result;
2093
2156
  }
2094
2157
  function _isJson(rawDatoValue) {
2095
2158
  try {
2096
- return _13.isString(rawDatoValue) && rawDatoValue.startsWith("{") && rawDatoValue.endsWith("}") && !!JSON.parse(rawDatoValue);
2159
+ return _14.isString(rawDatoValue) && rawDatoValue.startsWith("{") && rawDatoValue.endsWith("}") && !!JSON.parse(rawDatoValue);
2097
2160
  } catch (e) {
2098
2161
  return false;
2099
2162
  }
2100
2163
  }
2101
2164
  function _isFile(rawDatoValue) {
2102
- return _13.isObject(rawDatoValue) && ["alt", "title", "custom_data", "focal_point", "upload_id"].every((key) => _13.has(rawDatoValue, key));
2165
+ return _14.isObject(rawDatoValue) && ["alt", "title", "custom_data", "focal_point", "upload_id"].every((key) => _14.has(rawDatoValue, key));
2103
2166
  }
2104
2167
  function _isVideo(rawDatoValue) {
2105
- return _13.isObject(rawDatoValue) && ["url", "title", "width", "height", "provider", "provider_uid", "thumbnail_url"].every(
2106
- (key) => _13.has(rawDatoValue, key)
2168
+ return _14.isObject(rawDatoValue) && ["url", "title", "width", "height", "provider", "provider_uid", "thumbnail_url"].every(
2169
+ (key) => _14.has(rawDatoValue, key)
2107
2170
  );
2108
2171
  }
2109
2172
 
@@ -2164,7 +2227,7 @@ function createVttLoader() {
2164
2227
  }
2165
2228
 
2166
2229
  // src/cli/loaders/variable/index.ts
2167
- import _14 from "lodash";
2230
+ import _15 from "lodash";
2168
2231
  function createVariableLoader(params) {
2169
2232
  return composeLoaders(variableExtractLoader(params), variableContentLoader());
2170
2233
  }
@@ -2207,11 +2270,11 @@ function variableExtractLoader(params) {
2207
2270
  function variableContentLoader() {
2208
2271
  return createLoader({
2209
2272
  pull: async (locale, input) => {
2210
- const result = _14.mapValues(input, (payload) => payload.value);
2273
+ const result = _15.mapValues(input, (payload) => payload.value);
2211
2274
  return result;
2212
2275
  },
2213
2276
  push: async (locale, data, originalInput) => {
2214
- const result = _14.cloneDeep(originalInput || {});
2277
+ const result = _15.cloneDeep(originalInput || {});
2215
2278
  for (const [key, originalValueObj] of Object.entries(result)) {
2216
2279
  result[key] = {
2217
2280
  ...originalValueObj,
@@ -2234,20 +2297,20 @@ function getFormatSpecifierPattern(type) {
2234
2297
  }
2235
2298
 
2236
2299
  // src/cli/loaders/sync.ts
2237
- import _15 from "lodash";
2300
+ import _16 from "lodash";
2238
2301
  function createSyncLoader() {
2239
2302
  return createLoader({
2240
2303
  async pull(locale, input, originalInput) {
2241
2304
  if (!originalInput) {
2242
2305
  return input;
2243
2306
  }
2244
- return _15.chain(originalInput).mapValues((value, key) => input[key]).value();
2307
+ return _16.chain(originalInput).mapValues((value, key) => input[key]).value();
2245
2308
  },
2246
2309
  async push(locale, data, originalInput) {
2247
2310
  if (!originalInput) {
2248
2311
  return data;
2249
2312
  }
2250
- return _15.chain(originalInput || {}).mapValues((value, key) => data[key]).value();
2313
+ return _16.chain(originalInput || {}).mapValues((value, key) => data[key]).value();
2251
2314
  }
2252
2315
  });
2253
2316
  }
@@ -2470,11 +2533,11 @@ function createBucketLoader(bucketType, bucketPathPattern) {
2470
2533
 
2471
2534
  // src/cli/utils/lockfile.ts
2472
2535
  import fs7 from "fs";
2473
- import path6 from "path";
2536
+ import path7 from "path";
2474
2537
  import Z3 from "zod";
2475
2538
  import YAML3 from "yaml";
2476
2539
  import { MD5 } from "object-hash";
2477
- import _16 from "lodash";
2540
+ import _17 from "lodash";
2478
2541
  function createLockfileHelper() {
2479
2542
  return {
2480
2543
  isLockfileExists: () => {
@@ -2484,23 +2547,23 @@ function createLockfileHelper() {
2484
2547
  registerSourceData: (pathPattern, sourceData) => {
2485
2548
  const lockfile = _loadLockfile();
2486
2549
  const sectionKey = MD5(pathPattern);
2487
- const sectionChecksums = _16.mapValues(sourceData, (value) => MD5(value));
2550
+ const sectionChecksums = _17.mapValues(sourceData, (value) => MD5(value));
2488
2551
  lockfile.checksums[sectionKey] = sectionChecksums;
2489
2552
  _saveLockfile(lockfile);
2490
2553
  },
2491
2554
  registerPartialSourceData: (pathPattern, partialSourceData) => {
2492
2555
  const lockfile = _loadLockfile();
2493
2556
  const sectionKey = MD5(pathPattern);
2494
- const sectionChecksums = _16.mapValues(partialSourceData, (value) => MD5(value));
2495
- lockfile.checksums[sectionKey] = _16.merge({}, lockfile.checksums[sectionKey] ?? {}, sectionChecksums);
2557
+ const sectionChecksums = _17.mapValues(partialSourceData, (value) => MD5(value));
2558
+ lockfile.checksums[sectionKey] = _17.merge({}, lockfile.checksums[sectionKey] ?? {}, sectionChecksums);
2496
2559
  _saveLockfile(lockfile);
2497
2560
  },
2498
2561
  extractUpdatedData: (pathPattern, sourceData) => {
2499
2562
  const lockfile = _loadLockfile();
2500
2563
  const sectionKey = MD5(pathPattern);
2501
- const currentChecksums = _16.mapValues(sourceData, (value) => MD5(value));
2564
+ const currentChecksums = _17.mapValues(sourceData, (value) => MD5(value));
2502
2565
  const savedChecksums = lockfile.checksums[sectionKey] || {};
2503
- const updatedData = _16.pickBy(sourceData, (value, key) => savedChecksums[key] !== currentChecksums[key]);
2566
+ const updatedData = _17.pickBy(sourceData, (value, key) => savedChecksums[key] !== currentChecksums[key]);
2504
2567
  return updatedData;
2505
2568
  }
2506
2569
  };
@@ -2519,7 +2582,7 @@ function createLockfileHelper() {
2519
2582
  fs7.writeFileSync(lockfilePath, content);
2520
2583
  }
2521
2584
  function _getLockfilePath() {
2522
- return path6.join(process.cwd(), "i18n.lock");
2585
+ return path7.join(process.cwd(), "i18n.lock");
2523
2586
  }
2524
2587
  }
2525
2588
  var LockfileSchema = Z3.object({
@@ -2544,7 +2607,7 @@ import inquirer2 from "inquirer";
2544
2607
  import externalEditor from "external-editor";
2545
2608
 
2546
2609
  // src/cli/utils/cache.ts
2547
- import path7 from "path";
2610
+ import path8 from "path";
2548
2611
  import fs8 from "fs";
2549
2612
  var cacheChunk = (targetLocale, sourceChunk, processedChunk) => {
2550
2613
  const rows = Object.entries(sourceChunk).map(([key, source]) => ({
@@ -2594,7 +2657,7 @@ function _appendToCache(rows) {
2594
2657
  fs8.appendFileSync(cacheFilePath, lines);
2595
2658
  }
2596
2659
  function _getCacheFilePath() {
2597
- return path7.join(process.cwd(), "i18n.cache");
2660
+ return path8.join(process.cwd(), "i18n.cache");
2598
2661
  }
2599
2662
  function _buildJSONLines(rows) {
2600
2663
  return rows.map((row) => JSON.stringify(row)).join("\n") + "\n";
@@ -2748,7 +2811,7 @@ var i18n_default = new Command6().command("i18n").description("Run Localization
2748
2811
  targetData
2749
2812
  });
2750
2813
  if (flags.key) {
2751
- processableData = _17.pickBy(processableData, (_19, key) => key === flags.key);
2814
+ processableData = _18.pickBy(processableData, (_20, key) => key === flags.key);
2752
2815
  }
2753
2816
  if (flags.verbose) {
2754
2817
  bucketOra.info(JSON.stringify(processableData, null, 2));
@@ -2784,7 +2847,7 @@ var i18n_default = new Command6().command("i18n").description("Run Localization
2784
2847
  if (flags.verbose) {
2785
2848
  bucketOra.info(JSON.stringify(processedTargetData, null, 2));
2786
2849
  }
2787
- let finalTargetData = _17.merge({}, sourceData, targetData, processedTargetData);
2850
+ let finalTargetData = _18.merge({}, sourceData, targetData, processedTargetData);
2788
2851
  if (flags.interactive) {
2789
2852
  bucketOra.stop();
2790
2853
  const reviewedData = await reviewChanges({
@@ -2798,7 +2861,7 @@ var i18n_default = new Command6().command("i18n").description("Run Localization
2798
2861
  finalTargetData = reviewedData;
2799
2862
  bucketOra.start(`Applying changes to ${bucketConfig} (${targetLocale})`);
2800
2863
  }
2801
- const finalDiffSize = _17.chain(finalTargetData).omitBy((value, key) => value === targetData[key]).size().value();
2864
+ const finalDiffSize = _18.chain(finalTargetData).omitBy((value, key) => value === targetData[key]).size().value();
2802
2865
  if (finalDiffSize > 0 || flags.force) {
2803
2866
  await bucketLoader.push(targetLocale, finalTargetData);
2804
2867
  bucketOra.succeed(`[${sourceLocale} -> ${targetLocale}] Localization completed`);
@@ -2843,9 +2906,9 @@ var i18n_default = new Command6().command("i18n").description("Run Localization
2843
2906
  }
2844
2907
  });
2845
2908
  function calculateDataDelta(args) {
2846
- const newKeys = _17.difference(Object.keys(args.sourceData), Object.keys(args.targetData));
2909
+ const newKeys = _18.difference(Object.keys(args.sourceData), Object.keys(args.targetData));
2847
2910
  const updatedKeys = Object.keys(args.updatedSourceData);
2848
- const result = _17.chain(args.sourceData).pickBy((value, key) => newKeys.includes(key) || updatedKeys.includes(key)).value();
2911
+ const result = _18.chain(args.sourceData).pickBy((value, key) => newKeys.includes(key) || updatedKeys.includes(key)).value();
2849
2912
  return result;
2850
2913
  }
2851
2914
  async function retryWithExponentialBackoff(operation, maxAttempts, baseDelay = 1e3) {
@@ -2986,7 +3049,7 @@ Reviewing changes for ${chalk.blue(args.pathPattern)} (${chalk.yellow(args.targe
2986
3049
  return args.currentData;
2987
3050
  }
2988
3051
  const customData = { ...args.currentData };
2989
- const changes = _17.reduce(
3052
+ const changes = _18.reduce(
2990
3053
  args.proposedData,
2991
3054
  (result, value, key) => {
2992
3055
  if (args.currentData[key] !== value) {
@@ -3067,7 +3130,7 @@ var flagsSchema = Z5.object({
3067
3130
  // src/cli/cmd/cleanup.ts
3068
3131
  import { resolveOverridenLocale as resolveOverridenLocale5 } from "@lingo.dev/_spec";
3069
3132
  import { Command as Command8 } from "interactive-commander";
3070
- import _18 from "lodash";
3133
+ import _19 from "lodash";
3071
3134
  import Ora7 from "ora";
3072
3135
  var cleanup_default = new Command8().command("cleanup").description("Remove keys from target files that do not exist in the source file").helpOption("-h, --help", "Show help").option("--locale <locale>", "Specific locale to cleanup").option("--bucket <bucket>", "Specific bucket to cleanup").option("--dry-run", "Show what would be removed without making changes").option("--verbose", "Show verbose output").action(async function(options) {
3073
3136
  const ora = Ora7();
@@ -3097,7 +3160,7 @@ var cleanup_default = new Command8().command("cleanup").description("Remove keys
3097
3160
  try {
3098
3161
  const targetData = await bucketLoader.pull(targetLocale);
3099
3162
  const targetKeys = Object.keys(targetData);
3100
- const keysToRemove = _18.difference(targetKeys, sourceKeys);
3163
+ const keysToRemove = _19.difference(targetKeys, sourceKeys);
3101
3164
  if (keysToRemove.length === 0) {
3102
3165
  bucketOra.succeed(`[${targetLocale}] No keys to remove`);
3103
3166
  continue;
@@ -3106,7 +3169,7 @@ var cleanup_default = new Command8().command("cleanup").description("Remove keys
3106
3169
  bucketOra.info(`[${targetLocale}] Keys to remove: ${JSON.stringify(keysToRemove, null, 2)}`);
3107
3170
  }
3108
3171
  if (!options.dryRun) {
3109
- const cleanedData = _18.pick(targetData, sourceKeys);
3172
+ const cleanedData = _19.pick(targetData, sourceKeys);
3110
3173
  await bucketLoader.push(targetLocale, cleanedData);
3111
3174
  bucketOra.succeed(`[${targetLocale}] Removed ${keysToRemove.length} keys`);
3112
3175
  } else {
@@ -3158,7 +3221,7 @@ function displaySummary(results) {
3158
3221
  // package.json
3159
3222
  var package_default = {
3160
3223
  name: "lingo.dev",
3161
- version: "0.74.9",
3224
+ version: "0.74.11",
3162
3225
  description: "Lingo.dev CLI",
3163
3226
  private: false,
3164
3227
  publishConfig: {
@@ -3195,6 +3258,7 @@ var package_default = {
3195
3258
  dev: "tsup --watch",
3196
3259
  build: "tsc --noEmit && tsup",
3197
3260
  test: "vitest run",
3261
+ "test:watch": "vitest",
3198
3262
  clean: "rm -rf build"
3199
3263
  },
3200
3264
  keywords: [],