@lingo.dev/compiler 0.1.3 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,9 @@
1
1
  import { logger } from "../utils/logger.mjs";
2
2
  import { DEFAULT_TIMEOUTS, withTimeout } from "../utils/timeout.mjs";
3
3
  import { getLingoDir } from "../utils/path-helpers.mjs";
4
- import fs from "fs/promises";
4
+ import fsPromises from "fs/promises";
5
5
  import path from "path";
6
- import fs$1 from "fs";
6
+ import fs from "fs";
7
7
  import lockfile from "proper-lockfile";
8
8
 
9
9
  //#region src/metadata/manager.ts
@@ -22,7 +22,7 @@ function loadMetadata(path$1) {
22
22
  function cleanupExistingMetadata(metadataFilePath) {
23
23
  logger.debug(`Attempting to cleanup metadata file: ${metadataFilePath}`);
24
24
  try {
25
- fs$1.unlinkSync(metadataFilePath);
25
+ fs.unlinkSync(metadataFilePath);
26
26
  logger.info(`🧹 Cleaned up build metadata file: ${metadataFilePath}`);
27
27
  } catch (error) {
28
28
  if (error.code === "ENOENT") logger.debug(`Metadata file already deleted or doesn't exist: ${metadataFilePath}`);
@@ -50,7 +50,7 @@ var MetadataManager = class {
50
50
  */
51
51
  async loadMetadata() {
52
52
  try {
53
- const content = await withTimeout(fs.readFile(this.filePath, "utf-8"), DEFAULT_TIMEOUTS.METADATA, "Load metadata");
53
+ const content = await withTimeout(fsPromises.readFile(this.filePath, "utf-8"), DEFAULT_TIMEOUTS.METADATA, "Load metadata");
54
54
  return JSON.parse(content);
55
55
  } catch (error) {
56
56
  if (error.code === "ENOENT") return createEmptyMetadata();
@@ -62,7 +62,7 @@ var MetadataManager = class {
62
62
  * Times out after 15 seconds to prevent indefinite hangs
63
63
  */
64
64
  async saveMetadata(metadata) {
65
- await withTimeout(fs.mkdir(path.dirname(this.filePath), { recursive: true }), DEFAULT_TIMEOUTS.FILE_IO, "Create metadata directory");
65
+ await withTimeout(fsPromises.mkdir(path.dirname(this.filePath), { recursive: true }), DEFAULT_TIMEOUTS.FILE_IO, "Create metadata directory");
66
66
  metadata.stats = {
67
67
  totalEntries: Object.keys(metadata.entries).length,
68
68
  lastUpdated: (/* @__PURE__ */ new Date()).toISOString()
@@ -71,17 +71,17 @@ var MetadataManager = class {
71
71
  const base = path.basename(this.filePath);
72
72
  const tmpPath = path.join(dir, `.${base}.tmp-${process.pid}-${Date.now()}`);
73
73
  const json = JSON.stringify(metadata, null, 2);
74
- await withTimeout(fs.writeFile(tmpPath, json, "utf-8"), DEFAULT_TIMEOUTS.METADATA, "Save metadata (tmp write)");
74
+ await withTimeout(fsPromises.writeFile(tmpPath, json, "utf-8"), DEFAULT_TIMEOUTS.METADATA, "Save metadata (tmp write)");
75
75
  try {
76
- await withTimeout(fs.rename(tmpPath, this.filePath), DEFAULT_TIMEOUTS.METADATA, "Save metadata (atomic rename)");
76
+ await withTimeout(fsPromises.rename(tmpPath, this.filePath), DEFAULT_TIMEOUTS.METADATA, "Save metadata (atomic rename)");
77
77
  } catch (error) {
78
78
  if (error && typeof error === "object" && "code" in error && error.code === "EPERM") {
79
- await withTimeout(fs.writeFile(this.filePath, json, "utf-8"), DEFAULT_TIMEOUTS.METADATA, "Save metadata (EPERM fallback direct write)");
79
+ await withTimeout(fsPromises.writeFile(this.filePath, json, "utf-8"), DEFAULT_TIMEOUTS.METADATA, "Save metadata (EPERM fallback direct write)");
80
80
  return;
81
81
  }
82
82
  throw error;
83
83
  } finally {
84
- await fs.unlink(tmpPath).catch(() => {});
84
+ await fsPromises.unlink(tmpPath).catch(() => {});
85
85
  }
86
86
  }
87
87
  /**
@@ -93,11 +93,11 @@ var MetadataManager = class {
93
93
  */
94
94
  async saveMetadataWithEntries(entries) {
95
95
  const lockDir = path.dirname(this.filePath);
96
- await fs.mkdir(lockDir, { recursive: true });
96
+ await fsPromises.mkdir(lockDir, { recursive: true });
97
97
  try {
98
- await fs.access(this.filePath);
98
+ await fsPromises.access(this.filePath);
99
99
  } catch {
100
- await fs.writeFile(this.filePath, JSON.stringify(createEmptyMetadata(), null, 2), "utf-8");
100
+ await fsPromises.writeFile(this.filePath, JSON.stringify(createEmptyMetadata(), null, 2), "utf-8");
101
101
  }
102
102
  const release = await lockfile.lock(this.filePath, {
103
103
  retries: {
@@ -1 +1 @@
1
- {"version":3,"file":"manager.mjs","names":["path","error: any","filePath: string","fsPromises"],"sources":["../../src/metadata/manager.ts"],"sourcesContent":["import fsPromises from \"fs/promises\";\nimport fs from \"fs\";\nimport path from \"path\";\nimport lockfile from \"proper-lockfile\";\nimport type { MetadataSchema, PathConfig, TranslationEntry } from \"../types\";\nimport { DEFAULT_TIMEOUTS, withTimeout } from \"../utils/timeout\";\nimport { getLingoDir } from \"../utils/path-helpers\";\nimport { logger } from \"../utils/logger\";\n\nexport function createEmptyMetadata(): MetadataSchema {\n return {\n entries: {},\n stats: {\n totalEntries: 0,\n lastUpdated: new Date().toISOString(),\n },\n };\n}\n\nexport function loadMetadata(path: string) {\n return new MetadataManager(path).loadMetadata();\n}\n\nexport function cleanupExistingMetadata(metadataFilePath: string) {\n // General cleanup. Delete metadata and stop the server if any was started.\n logger.debug(`Attempting to cleanup metadata file: ${metadataFilePath}`);\n\n try {\n fs.unlinkSync(metadataFilePath);\n logger.info(`🧹 Cleaned up build metadata file: ${metadataFilePath}`);\n } catch (error: any) {\n // Ignore if file doesn't exist\n if (error.code === \"ENOENT\") {\n logger.debug(\n `Metadata file already deleted or doesn't exist: ${metadataFilePath}`,\n );\n } else {\n logger.warn(`Failed to cleanup metadata file: ${error.message}`);\n }\n }\n}\n\n/**\n * Get the absolute path to the metadata file\n *\n * @param config - Config with sourceRoot, lingoDir, and environment\n * @returns Absolute path to metadata file\n */\nexport function getMetadataPath(config: PathConfig): string {\n const filename =\n // Similar to next keeping dev build separate, let's keep the build metadata clean of any dev mode additions\n config.environment === \"development\"\n ? \"metadata-dev.json\"\n : \"metadata-build.json\";\n return path.join(getLingoDir(config), filename);\n}\n\nexport class MetadataManager {\n constructor(private readonly filePath: string) {}\n\n /**\n * Load metadata from disk\n * Creates empty metadata if file doesn't exist\n * Times out after 15 seconds to prevent indefinite hangs\n */\n async loadMetadata(): Promise<MetadataSchema> {\n try {\n const content = await withTimeout(\n fsPromises.readFile(this.filePath, \"utf-8\"),\n DEFAULT_TIMEOUTS.METADATA,\n \"Load metadata\",\n );\n return JSON.parse(content) as MetadataSchema;\n } catch (error: any) {\n if (error.code === \"ENOENT\") {\n // File doesn't exist, create new metadata\n return createEmptyMetadata();\n }\n throw error;\n }\n }\n\n /**\n * Save metadata to disk\n * Times out after 15 seconds to prevent indefinite hangs\n */\n private async saveMetadata(metadata: MetadataSchema): Promise<void> {\n await withTimeout(\n fsPromises.mkdir(path.dirname(this.filePath), { recursive: true }),\n DEFAULT_TIMEOUTS.FILE_IO,\n \"Create metadata directory\",\n );\n\n metadata.stats = {\n totalEntries: Object.keys(metadata.entries).length,\n lastUpdated: new Date().toISOString(),\n };\n\n // Per LLM writing to a file is not an atomic operation while rename is, so nobody should get partial content.\n // Sounds reasonable.\n const dir = path.dirname(this.filePath);\n const base = path.basename(this.filePath);\n\n // Keep temp file in the same directory to maximize chance that rename is atomic\n const tmpPath = path.join(dir, `.${base}.tmp-${process.pid}-${Date.now()}`);\n\n const json = JSON.stringify(metadata, null, 2);\n\n await withTimeout(\n fsPromises.writeFile(tmpPath, json, \"utf-8\"),\n DEFAULT_TIMEOUTS.METADATA,\n \"Save metadata (tmp write)\",\n );\n\n try {\n // TODO (AleksandrSl 14/12/2025): LLM says that we may want to remove older file first for windows, but it seems lo work fine as is.\n await withTimeout(\n fsPromises.rename(tmpPath, this.filePath),\n DEFAULT_TIMEOUTS.METADATA,\n \"Save metadata (atomic rename)\",\n );\n } catch (error) {\n // On Windows, rename() can fail with EPERM if something briefly holds the file.\n // As a fallback, try writing directly to the destination (not atomic).\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === \"EPERM\"\n ) {\n await withTimeout(\n fsPromises.writeFile(this.filePath, json, \"utf-8\"),\n DEFAULT_TIMEOUTS.METADATA,\n \"Save metadata (EPERM fallback direct write)\",\n );\n return;\n }\n throw error;\n } finally {\n // Best-effort cleanup if rename failed for some reason\n await fsPromises.unlink(tmpPath).catch(() => {});\n }\n }\n\n /**\n * Thread-safe save operation that atomically updates metadata with new entries\n * Uses file locking to prevent concurrent write corruption\n *\n * @param entries - Translation entries to add/update\n * @returns The updated metadata schema\n */\n async saveMetadataWithEntries(\n entries: TranslationEntry[],\n ): Promise<MetadataSchema> {\n const lockDir = path.dirname(this.filePath);\n\n await fsPromises.mkdir(lockDir, { recursive: true });\n\n try {\n await fsPromises.access(this.filePath);\n } catch {\n await fsPromises.writeFile(\n this.filePath,\n JSON.stringify(createEmptyMetadata(), null, 2),\n \"utf-8\",\n );\n }\n\n const release = await lockfile.lock(this.filePath, {\n retries: {\n retries: 10,\n minTimeout: 50,\n maxTimeout: 1000,\n },\n stale: 2000,\n });\n\n try {\n // Re-load metadata inside lock to get latest state\n const currentMetadata = await this.loadMetadata();\n for (const entry of entries) {\n currentMetadata.entries[entry.hash] = entry;\n }\n await this.saveMetadata(currentMetadata);\n return currentMetadata;\n } finally {\n await release();\n }\n }\n}\n"],"mappings":";;;;;;;;;AASA,SAAgB,sBAAsC;AACpD,QAAO;EACL,SAAS,EAAE;EACX,OAAO;GACL,cAAc;GACd,8BAAa,IAAI,MAAM,EAAC,aAAa;GACtC;EACF;;AAGH,SAAgB,aAAa,QAAc;AACzC,QAAO,IAAI,gBAAgBA,OAAK,CAAC,cAAc;;AAGjD,SAAgB,wBAAwB,kBAA0B;AAEhE,QAAO,MAAM,wCAAwC,mBAAmB;AAExE,KAAI;AACF,OAAG,WAAW,iBAAiB;AAC/B,SAAO,KAAK,sCAAsC,mBAAmB;UAC9DC,OAAY;AAEnB,MAAI,MAAM,SAAS,SACjB,QAAO,MACL,mDAAmD,mBACpD;MAED,QAAO,KAAK,oCAAoC,MAAM,UAAU;;;;;;;;;AAWtE,SAAgB,gBAAgB,QAA4B;CAC1D,MAAM,WAEJ,OAAO,gBAAgB,gBACnB,sBACA;AACN,QAAO,KAAK,KAAK,YAAY,OAAO,EAAE,SAAS;;AAGjD,IAAa,kBAAb,MAA6B;CAC3B,YAAY,AAAiBC,UAAkB;EAAlB;;;;;;;CAO7B,MAAM,eAAwC;AAC5C,MAAI;GACF,MAAM,UAAU,MAAM,YACpBC,GAAW,SAAS,KAAK,UAAU,QAAQ,EAC3C,iBAAiB,UACjB,gBACD;AACD,UAAO,KAAK,MAAM,QAAQ;WACnBF,OAAY;AACnB,OAAI,MAAM,SAAS,SAEjB,QAAO,qBAAqB;AAE9B,SAAM;;;;;;;CAQV,MAAc,aAAa,UAAyC;AAClE,QAAM,YACJE,GAAW,MAAM,KAAK,QAAQ,KAAK,SAAS,EAAE,EAAE,WAAW,MAAM,CAAC,EAClE,iBAAiB,SACjB,4BACD;AAED,WAAS,QAAQ;GACf,cAAc,OAAO,KAAK,SAAS,QAAQ,CAAC;GAC5C,8BAAa,IAAI,MAAM,EAAC,aAAa;GACtC;EAID,MAAM,MAAM,KAAK,QAAQ,KAAK,SAAS;EACvC,MAAM,OAAO,KAAK,SAAS,KAAK,SAAS;EAGzC,MAAM,UAAU,KAAK,KAAK,KAAK,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG,KAAK,KAAK,GAAG;EAE3E,MAAM,OAAO,KAAK,UAAU,UAAU,MAAM,EAAE;AAE9C,QAAM,YACJA,GAAW,UAAU,SAAS,MAAM,QAAQ,EAC5C,iBAAiB,UACjB,4BACD;AAED,MAAI;AAEF,SAAM,YACJA,GAAW,OAAO,SAAS,KAAK,SAAS,EACzC,iBAAiB,UACjB,gCACD;WACM,OAAO;AAGd,OACE,SACA,OAAO,UAAU,YACjB,UAAU,SACV,MAAM,SAAS,SACf;AACA,UAAM,YACJA,GAAW,UAAU,KAAK,UAAU,MAAM,QAAQ,EAClD,iBAAiB,UACjB,8CACD;AACD;;AAEF,SAAM;YACE;AAER,SAAMA,GAAW,OAAO,QAAQ,CAAC,YAAY,GAAG;;;;;;;;;;CAWpD,MAAM,wBACJ,SACyB;EACzB,MAAM,UAAU,KAAK,QAAQ,KAAK,SAAS;AAE3C,QAAMA,GAAW,MAAM,SAAS,EAAE,WAAW,MAAM,CAAC;AAEpD,MAAI;AACF,SAAMA,GAAW,OAAO,KAAK,SAAS;UAChC;AACN,SAAMA,GAAW,UACf,KAAK,UACL,KAAK,UAAU,qBAAqB,EAAE,MAAM,EAAE,EAC9C,QACD;;EAGH,MAAM,UAAU,MAAM,SAAS,KAAK,KAAK,UAAU;GACjD,SAAS;IACP,SAAS;IACT,YAAY;IACZ,YAAY;IACb;GACD,OAAO;GACR,CAAC;AAEF,MAAI;GAEF,MAAM,kBAAkB,MAAM,KAAK,cAAc;AACjD,QAAK,MAAM,SAAS,QAClB,iBAAgB,QAAQ,MAAM,QAAQ;AAExC,SAAM,KAAK,aAAa,gBAAgB;AACxC,UAAO;YACC;AACR,SAAM,SAAS"}
1
+ {"version":3,"file":"manager.mjs","names":["path","error: any","filePath: string"],"sources":["../../src/metadata/manager.ts"],"sourcesContent":["import fsPromises from \"fs/promises\";\nimport fs from \"fs\";\nimport path from \"path\";\nimport lockfile from \"proper-lockfile\";\nimport type { MetadataSchema, PathConfig, TranslationEntry } from \"../types\";\nimport { DEFAULT_TIMEOUTS, withTimeout } from \"../utils/timeout\";\nimport { getLingoDir } from \"../utils/path-helpers\";\nimport { logger } from \"../utils/logger\";\n\nexport function createEmptyMetadata(): MetadataSchema {\n return {\n entries: {},\n stats: {\n totalEntries: 0,\n lastUpdated: new Date().toISOString(),\n },\n };\n}\n\nexport function loadMetadata(path: string) {\n return new MetadataManager(path).loadMetadata();\n}\n\nexport function cleanupExistingMetadata(metadataFilePath: string) {\n // General cleanup. Delete metadata and stop the server if any was started.\n logger.debug(`Attempting to cleanup metadata file: ${metadataFilePath}`);\n\n try {\n fs.unlinkSync(metadataFilePath);\n logger.info(`🧹 Cleaned up build metadata file: ${metadataFilePath}`);\n } catch (error: any) {\n // Ignore if file doesn't exist\n if (error.code === \"ENOENT\") {\n logger.debug(\n `Metadata file already deleted or doesn't exist: ${metadataFilePath}`,\n );\n } else {\n logger.warn(`Failed to cleanup metadata file: ${error.message}`);\n }\n }\n}\n\n/**\n * Get the absolute path to the metadata file\n *\n * @param config - Config with sourceRoot, lingoDir, and environment\n * @returns Absolute path to metadata file\n */\nexport function getMetadataPath(config: PathConfig): string {\n const filename =\n // Similar to next keeping dev build separate, let's keep the build metadata clean of any dev mode additions\n config.environment === \"development\"\n ? \"metadata-dev.json\"\n : \"metadata-build.json\";\n return path.join(getLingoDir(config), filename);\n}\n\nexport class MetadataManager {\n constructor(private readonly filePath: string) {}\n\n /**\n * Load metadata from disk\n * Creates empty metadata if file doesn't exist\n * Times out after 15 seconds to prevent indefinite hangs\n */\n async loadMetadata(): Promise<MetadataSchema> {\n try {\n const content = await withTimeout(\n fsPromises.readFile(this.filePath, \"utf-8\"),\n DEFAULT_TIMEOUTS.METADATA,\n \"Load metadata\",\n );\n return JSON.parse(content) as MetadataSchema;\n } catch (error: any) {\n if (error.code === \"ENOENT\") {\n // File doesn't exist, create new metadata\n return createEmptyMetadata();\n }\n throw error;\n }\n }\n\n /**\n * Save metadata to disk\n * Times out after 15 seconds to prevent indefinite hangs\n */\n private async saveMetadata(metadata: MetadataSchema): Promise<void> {\n await withTimeout(\n fsPromises.mkdir(path.dirname(this.filePath), { recursive: true }),\n DEFAULT_TIMEOUTS.FILE_IO,\n \"Create metadata directory\",\n );\n\n metadata.stats = {\n totalEntries: Object.keys(metadata.entries).length,\n lastUpdated: new Date().toISOString(),\n };\n\n // Per LLM writing to a file is not an atomic operation while rename is, so nobody should get partial content.\n // Sounds reasonable.\n const dir = path.dirname(this.filePath);\n const base = path.basename(this.filePath);\n\n // Keep temp file in the same directory to maximize chance that rename is atomic\n const tmpPath = path.join(dir, `.${base}.tmp-${process.pid}-${Date.now()}`);\n\n const json = JSON.stringify(metadata, null, 2);\n\n await withTimeout(\n fsPromises.writeFile(tmpPath, json, \"utf-8\"),\n DEFAULT_TIMEOUTS.METADATA,\n \"Save metadata (tmp write)\",\n );\n\n try {\n // TODO (AleksandrSl 14/12/2025): LLM says that we may want to remove older file first for windows, but it seems lo work fine as is.\n await withTimeout(\n fsPromises.rename(tmpPath, this.filePath),\n DEFAULT_TIMEOUTS.METADATA,\n \"Save metadata (atomic rename)\",\n );\n } catch (error) {\n // On Windows, rename() can fail with EPERM if something briefly holds the file.\n // As a fallback, try writing directly to the destination (not atomic).\n if (\n error &&\n typeof error === \"object\" &&\n \"code\" in error &&\n error.code === \"EPERM\"\n ) {\n await withTimeout(\n fsPromises.writeFile(this.filePath, json, \"utf-8\"),\n DEFAULT_TIMEOUTS.METADATA,\n \"Save metadata (EPERM fallback direct write)\",\n );\n return;\n }\n throw error;\n } finally {\n // Best-effort cleanup if rename failed for some reason\n await fsPromises.unlink(tmpPath).catch(() => {});\n }\n }\n\n /**\n * Thread-safe save operation that atomically updates metadata with new entries\n * Uses file locking to prevent concurrent write corruption\n *\n * @param entries - Translation entries to add/update\n * @returns The updated metadata schema\n */\n async saveMetadataWithEntries(\n entries: TranslationEntry[],\n ): Promise<MetadataSchema> {\n const lockDir = path.dirname(this.filePath);\n\n await fsPromises.mkdir(lockDir, { recursive: true });\n\n try {\n await fsPromises.access(this.filePath);\n } catch {\n await fsPromises.writeFile(\n this.filePath,\n JSON.stringify(createEmptyMetadata(), null, 2),\n \"utf-8\",\n );\n }\n\n const release = await lockfile.lock(this.filePath, {\n retries: {\n retries: 10,\n minTimeout: 50,\n maxTimeout: 1000,\n },\n stale: 2000,\n });\n\n try {\n // Re-load metadata inside lock to get latest state\n const currentMetadata = await this.loadMetadata();\n for (const entry of entries) {\n currentMetadata.entries[entry.hash] = entry;\n }\n await this.saveMetadata(currentMetadata);\n return currentMetadata;\n } finally {\n await release();\n }\n }\n}\n"],"mappings":";;;;;;;;;AASA,SAAgB,sBAAsC;AACpD,QAAO;EACL,SAAS,EAAE;EACX,OAAO;GACL,cAAc;GACd,8BAAa,IAAI,MAAM,EAAC,aAAa;GACtC;EACF;;AAGH,SAAgB,aAAa,QAAc;AACzC,QAAO,IAAI,gBAAgBA,OAAK,CAAC,cAAc;;AAGjD,SAAgB,wBAAwB,kBAA0B;AAEhE,QAAO,MAAM,wCAAwC,mBAAmB;AAExE,KAAI;AACF,KAAG,WAAW,iBAAiB;AAC/B,SAAO,KAAK,sCAAsC,mBAAmB;UAC9DC,OAAY;AAEnB,MAAI,MAAM,SAAS,SACjB,QAAO,MACL,mDAAmD,mBACpD;MAED,QAAO,KAAK,oCAAoC,MAAM,UAAU;;;;;;;;;AAWtE,SAAgB,gBAAgB,QAA4B;CAC1D,MAAM,WAEJ,OAAO,gBAAgB,gBACnB,sBACA;AACN,QAAO,KAAK,KAAK,YAAY,OAAO,EAAE,SAAS;;AAGjD,IAAa,kBAAb,MAA6B;CAC3B,YAAY,AAAiBC,UAAkB;EAAlB;;;;;;;CAO7B,MAAM,eAAwC;AAC5C,MAAI;GACF,MAAM,UAAU,MAAM,YACpB,WAAW,SAAS,KAAK,UAAU,QAAQ,EAC3C,iBAAiB,UACjB,gBACD;AACD,UAAO,KAAK,MAAM,QAAQ;WACnBD,OAAY;AACnB,OAAI,MAAM,SAAS,SAEjB,QAAO,qBAAqB;AAE9B,SAAM;;;;;;;CAQV,MAAc,aAAa,UAAyC;AAClE,QAAM,YACJ,WAAW,MAAM,KAAK,QAAQ,KAAK,SAAS,EAAE,EAAE,WAAW,MAAM,CAAC,EAClE,iBAAiB,SACjB,4BACD;AAED,WAAS,QAAQ;GACf,cAAc,OAAO,KAAK,SAAS,QAAQ,CAAC;GAC5C,8BAAa,IAAI,MAAM,EAAC,aAAa;GACtC;EAID,MAAM,MAAM,KAAK,QAAQ,KAAK,SAAS;EACvC,MAAM,OAAO,KAAK,SAAS,KAAK,SAAS;EAGzC,MAAM,UAAU,KAAK,KAAK,KAAK,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG,KAAK,KAAK,GAAG;EAE3E,MAAM,OAAO,KAAK,UAAU,UAAU,MAAM,EAAE;AAE9C,QAAM,YACJ,WAAW,UAAU,SAAS,MAAM,QAAQ,EAC5C,iBAAiB,UACjB,4BACD;AAED,MAAI;AAEF,SAAM,YACJ,WAAW,OAAO,SAAS,KAAK,SAAS,EACzC,iBAAiB,UACjB,gCACD;WACM,OAAO;AAGd,OACE,SACA,OAAO,UAAU,YACjB,UAAU,SACV,MAAM,SAAS,SACf;AACA,UAAM,YACJ,WAAW,UAAU,KAAK,UAAU,MAAM,QAAQ,EAClD,iBAAiB,UACjB,8CACD;AACD;;AAEF,SAAM;YACE;AAER,SAAM,WAAW,OAAO,QAAQ,CAAC,YAAY,GAAG;;;;;;;;;;CAWpD,MAAM,wBACJ,SACyB;EACzB,MAAM,UAAU,KAAK,QAAQ,KAAK,SAAS;AAE3C,QAAM,WAAW,MAAM,SAAS,EAAE,WAAW,MAAM,CAAC;AAEpD,MAAI;AACF,SAAM,WAAW,OAAO,KAAK,SAAS;UAChC;AACN,SAAM,WAAW,UACf,KAAK,UACL,KAAK,UAAU,qBAAqB,EAAE,MAAM,EAAE,EAC9C,QACD;;EAGH,MAAM,UAAU,MAAM,SAAS,KAAK,KAAK,UAAU;GACjD,SAAS;IACP,SAAS;IACT,YAAY;IACZ,YAAY;IACb;GACD,OAAO;GACR,CAAC;AAEF,MAAI;GAEF,MAAM,kBAAkB,MAAM,KAAK,cAAc;AACjD,QAAK,MAAM,SAAS,QAClB,iBAAgB,QAAQ,MAAM,QAAQ;AAExC,SAAM,KAAK,aAAa,gBAAgB;AACxC,UAAO;YACC;AACR,SAAM,SAAS"}
@@ -4,7 +4,7 @@ import { createCache } from "../translators/cache-factory.mjs";
4
4
  import { TranslationService } from "../translators/translation-service.mjs";
5
5
  import { loadMetadata } from "../metadata/manager.mjs";
6
6
  import { startTranslationServer } from "../translation-server/translation-server.mjs";
7
- import fs from "fs/promises";
7
+ import fsPromises from "fs/promises";
8
8
  import path from "path";
9
9
 
10
10
  //#region src/plugin/build-translator.ts
@@ -147,7 +147,7 @@ function buildCacheStats(config, metadata) {
147
147
  }
148
148
  async function copyStaticFiles(config, publicOutputPath, metadata, cache) {
149
149
  logger.info(`📦 Generating static translation files in ${publicOutputPath}`);
150
- await fs.mkdir(publicOutputPath, { recursive: true });
150
+ await fsPromises.mkdir(publicOutputPath, { recursive: true });
151
151
  const usedHashes = new Set(Object.keys(metadata.entries));
152
152
  logger.info(`📊 Filtering translations to ${usedHashes.size} used hash(es)`);
153
153
  const allLocales = config.pluralization?.enabled !== false ? [config.sourceLocale, ...config.targetLocales] : config.targetLocales;
@@ -156,7 +156,7 @@ async function copyStaticFiles(config, publicOutputPath, metadata, cache) {
156
156
  try {
157
157
  const entries = await cache.get(locale, Array.from(usedHashes));
158
158
  const outputData = dictionaryFrom(locale, entries);
159
- await fs.writeFile(publicFilePath, JSON.stringify(outputData, null, 2), "utf-8");
159
+ await fsPromises.writeFile(publicFilePath, JSON.stringify(outputData, null, 2), "utf-8");
160
160
  logger.info(`✓ Generated ${locale}.json (${Object.keys(entries).length} translations)`);
161
161
  } catch (error) {
162
162
  logger.error(`❌ Failed to generate ${locale}.json:`, error);
@@ -1 +1 @@
1
- {"version":3,"file":"build-translator.mjs","names":["translationServer: TranslationServer | undefined","stats: BuildTranslationResult[\"stats\"]","errors: Array<{ locale: LocaleCode; error: string }>","missingLocales: string[]","incompleteLocales: Array<{\n locale: LocaleCode;\n missing: number;\n total: number;\n }>"],"sources":["../../src/plugin/build-translator.ts"],"sourcesContent":["/**\n * Build-time translation processor\n *\n * Handles translation generation and validation at build time\n * Supports two modes:\n * - \"translate\": Generate all translations, fail if translation fails\n * - \"cache-only\": Validate cache completeness, fail if incomplete\n */\n// TODO (AleksandrSl 08/12/2025): Add ICU validation for messages? The problem is that we don't know which will be rendered as a simple text\nimport fs from \"fs/promises\";\nimport path from \"path\";\nimport type { LingoConfig, MetadataSchema } from \"../types\";\nimport { logger } from \"../utils/logger\";\nimport { startTranslationServer, type TranslationServer, } from \"../translation-server\";\nimport { loadMetadata } from \"../metadata/manager\";\nimport { createCache, type TranslationCache, TranslationService, } from \"../translators\";\nimport { dictionaryFrom } from \"../translators/api\";\nimport type { LocaleCode } from \"lingo.dev/spec\";\n\nexport interface BuildTranslationOptions {\n config: LingoConfig;\n publicOutputPath: string;\n metadataFilePath: string;\n}\n\nexport interface BuildTranslationResult {\n /**\n * Whether the build succeeded\n */\n success: boolean;\n\n /**\n * Error message if build failed\n */\n error?: string;\n\n /**\n * Translation statistics per locale\n */\n stats: Record<\n string,\n {\n total: number;\n translated: number;\n failed: number;\n }\n >;\n}\n\n/**\n * Process translations at build time\n *\n * @throws Error if validation or translation fails (causes build to fail)\n */\nexport async function processBuildTranslations(\n options: BuildTranslationOptions,\n): Promise<BuildTranslationResult> {\n const { config, publicOutputPath, metadataFilePath } = options;\n\n // Determine build mode (env var > options > config)\n const buildMode =\n (process.env.LINGO_BUILD_MODE as \"translate\" | \"cache-only\") ||\n config.buildMode;\n\n logger.info(`🌍 Build mode: ${buildMode}`);\n\n const metadata = await loadMetadata(metadataFilePath);\n\n if (!metadata || Object.keys(metadata.entries).length === 0) {\n logger.info(\"No translations to process (metadata is empty)\");\n return {\n success: true,\n stats: {},\n };\n }\n\n const totalEntries = Object.keys(metadata.entries).length;\n logger.info(`📊 Found ${totalEntries} translatable entries`);\n\n const cache = createCache(config);\n\n // Handle cache-only mode\n if (buildMode === \"cache-only\") {\n logger.info(\"🔍 Validating translation cache...\");\n await validateCache(config, metadata, cache);\n logger.info(\"✅ Cache validation passed\");\n\n if (publicOutputPath) {\n await copyStaticFiles(config, publicOutputPath, metadata, cache);\n }\n\n return {\n success: true,\n stats: buildCacheStats(config, metadata),\n };\n }\n\n // Handle translate mode\n logger.info(\"🔄 Generating translations...\");\n let translationServer: TranslationServer | undefined;\n\n try {\n translationServer = await startTranslationServer({\n translationService: new TranslationService(config, logger),\n onError: (err) => {\n logger.error(\"Translation server error:\", err);\n },\n config,\n });\n\n // When pluralization is enabled, we need to generate the source locale file too\n // because pluralization modifies the sourceText\n const needsSourceLocale = config.pluralization?.enabled !== false;\n const allLocales = needsSourceLocale\n ? [config.sourceLocale, ...config.targetLocales]\n : config.targetLocales;\n\n logger.info(\n `Processing translations for ${allLocales.length} locale(s)${needsSourceLocale ? \" (including source locale for pluralization)\" : \"\"}...`,\n );\n\n const stats: BuildTranslationResult[\"stats\"] = {};\n const errors: Array<{ locale: LocaleCode; error: string }> = [];\n\n // Translate all locales in parallel\n const localePromises = allLocales.map(async (locale) => {\n logger.info(`Translating to ${locale}...`);\n\n const result = await translationServer!.translateAll(locale);\n\n stats[locale] = {\n total: totalEntries,\n translated: Object.keys(result.translations).length,\n failed: result.errors.length,\n };\n\n if (result.errors.length > 0) {\n logger.warn(\n `⚠️ ${result.errors.length} translation error(s) for ${locale}`,\n );\n errors.push({\n locale,\n error: `${result.errors.length} translation(s) failed`,\n });\n } else {\n logger.info(`✅ ${locale} completed successfully`);\n }\n });\n\n await Promise.all(localePromises);\n\n // Fail build if any translations failed in translate mode\n if (errors.length > 0) {\n const errorMsg = formatTranslationErrors(errors);\n logger.error(errorMsg);\n process.exit(1);\n }\n\n // Copy cache to public directory if requested\n if (publicOutputPath) {\n await copyStaticFiles(config, publicOutputPath, metadata, cache);\n }\n\n logger.info(\"✅ Translation generation completed successfully\");\n\n return {\n success: true,\n stats,\n };\n } catch (error) {\n logger.error(\n \"❌ Translation generation failed:\\n\",\n error instanceof Error ? error.message : error,\n );\n process.exit(1);\n } finally {\n if (translationServer) {\n await translationServer.stop();\n logger.info(\"✅ Translation server stopped\");\n }\n }\n}\n\n/**\n * Validate that all required translations exist in cache\n * @throws Error if cache is incomplete or missing\n */\nasync function validateCache(\n config: LingoConfig,\n metadata: MetadataSchema,\n cache: TranslationCache,\n): Promise<void> {\n const allHashes = Object.keys(metadata.entries);\n const missingLocales: string[] = [];\n const incompleteLocales: Array<{\n locale: LocaleCode;\n missing: number;\n total: number;\n }> = [];\n\n // Include source locale if pluralization is enabled\n const needsSourceLocale = config.pluralization?.enabled !== false;\n const allLocales = needsSourceLocale\n ? [config.sourceLocale, ...config.targetLocales]\n : config.targetLocales;\n\n for (const locale of allLocales) {\n try {\n const entries = await cache.get(locale);\n\n if (Object.keys(entries).length === 0) {\n missingLocales.push(locale);\n logger.debug(`Cache file not found or empty for ${locale}`);\n continue;\n }\n\n const missingHashes = allHashes.filter((hash) => !entries[hash]);\n\n if (missingHashes.length > 0) {\n incompleteLocales.push({\n locale,\n missing: missingHashes.length,\n total: allHashes.length,\n });\n\n // Log first few missing hashes for debugging\n logger.debug(\n `Missing hashes in ${locale}: ${missingHashes.slice(0, 5).join(\", \")}${\n missingHashes.length > 5 ? \"...\" : \"\"\n }`,\n );\n }\n } catch (error) {\n missingLocales.push(locale);\n logger.debug(`Failed to read cache for ${locale}:`, error);\n }\n }\n\n if (missingLocales.length > 0 || incompleteLocales.length > 0) {\n const errorMsg = formatCacheValidationError(\n missingLocales,\n incompleteLocales,\n );\n logger.error(errorMsg);\n process.exit(1);\n }\n}\n\nfunction buildCacheStats(\n config: LingoConfig,\n metadata: MetadataSchema,\n): BuildTranslationResult[\"stats\"] {\n const totalEntries = Object.keys(metadata.entries).length;\n const stats: BuildTranslationResult[\"stats\"] = {};\n\n // Include source locale if pluralization is enabled\n const needsSourceLocale = config.pluralization?.enabled !== false;\n const allLocales = needsSourceLocale\n ? [config.sourceLocale, ...config.targetLocales]\n : config.targetLocales;\n\n for (const locale of allLocales) {\n stats[locale] = {\n total: totalEntries,\n translated: totalEntries, // Assumed complete if validation passed\n failed: 0,\n };\n }\n\n return stats;\n}\n\nasync function copyStaticFiles(\n config: LingoConfig,\n publicOutputPath: string,\n metadata: MetadataSchema,\n cache: TranslationCache,\n): Promise<void> {\n logger.info(`📦 Generating static translation files in ${publicOutputPath}`);\n\n await fs.mkdir(publicOutputPath, { recursive: true });\n\n const usedHashes = new Set(Object.keys(metadata.entries));\n logger.info(`📊 Filtering translations to ${usedHashes.size} used hash(es)`);\n\n // Include source locale if pluralization is enabled\n const needsSourceLocale = config.pluralization?.enabled !== false;\n const allLocales = needsSourceLocale\n ? [config.sourceLocale, ...config.targetLocales]\n : config.targetLocales;\n\n for (const locale of allLocales) {\n const publicFilePath = path.join(publicOutputPath, `${locale}.json`);\n\n try {\n const entries = await cache.get(locale, Array.from(usedHashes));\n const outputData = dictionaryFrom(locale, entries);\n\n await fs.writeFile(\n publicFilePath,\n JSON.stringify(outputData, null, 2),\n \"utf-8\",\n );\n\n logger.info(\n `✓ Generated ${locale}.json (${Object.keys(entries).length} translations)`,\n );\n } catch (error) {\n logger.error(`❌ Failed to generate ${locale}.json:`, error);\n process.exit(1);\n }\n }\n}\n\nfunction formatCacheValidationError(\n missingLocales: string[],\n incompleteLocales: Array<{\n locale: LocaleCode;\n missing: number;\n total: number;\n }>,\n): string {\n let msg = \"❌ Cache validation failed in cache-only mode:\\n\\n\";\n\n if (missingLocales.length > 0) {\n msg += ` 📁 Missing cache files:\\n`;\n msg += missingLocales.map((locale) => ` - ${locale}.json`).join(\"\\n\");\n msg += \"\\n\\n\";\n }\n\n if (incompleteLocales.length > 0) {\n msg += ` 📊 Incomplete cache:\\n`;\n msg += incompleteLocales\n .map(\n (item) =>\n ` - ${item.locale}: ${item.missing}/${item.total} translations missing`,\n )\n .join(\"\\n\");\n msg += \"\\n\\n\";\n }\n\n msg += ` 💡 To fix:\\n`;\n msg += ` 1. Set LINGO_BUILD_MODE=translate to generate translations\\n`;\n msg += ` 2. Commit the generated .lingo/cache/*.json files\\n`;\n msg += ` 3. Ensure translation API keys are available if generating translations`;\n\n return msg;\n}\n\nfunction formatTranslationErrors(\n errors: Array<{ locale: LocaleCode; error: string }>,\n): string {\n let msg = \"❌ Translation generation failed:\\n\\n\";\n\n msg += errors.map((err) => ` - ${err.locale}: ${err.error}`).join(\"\\n\");\n\n msg += \"\\n\\n\";\n msg += ` 💡 Translation errors must be resolved in \"translate\" mode.\\n`;\n msg += ` Check translation server logs for details.`;\n\n return msg;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAsDA,eAAsB,yBACpB,SACiC;CACjC,MAAM,EAAE,QAAQ,kBAAkB,qBAAqB;CAGvD,MAAM,YACH,QAAQ,IAAI,oBACb,OAAO;AAET,QAAO,KAAK,kBAAkB,YAAY;CAE1C,MAAM,WAAW,MAAM,aAAa,iBAAiB;AAErD,KAAI,CAAC,YAAY,OAAO,KAAK,SAAS,QAAQ,CAAC,WAAW,GAAG;AAC3D,SAAO,KAAK,iDAAiD;AAC7D,SAAO;GACL,SAAS;GACT,OAAO,EAAE;GACV;;CAGH,MAAM,eAAe,OAAO,KAAK,SAAS,QAAQ,CAAC;AACnD,QAAO,KAAK,YAAY,aAAa,uBAAuB;CAE5D,MAAM,QAAQ,YAAY,OAAO;AAGjC,KAAI,cAAc,cAAc;AAC9B,SAAO,KAAK,qCAAqC;AACjD,QAAM,cAAc,QAAQ,UAAU,MAAM;AAC5C,SAAO,KAAK,4BAA4B;AAExC,MAAI,iBACF,OAAM,gBAAgB,QAAQ,kBAAkB,UAAU,MAAM;AAGlE,SAAO;GACL,SAAS;GACT,OAAO,gBAAgB,QAAQ,SAAS;GACzC;;AAIH,QAAO,KAAK,gCAAgC;CAC5C,IAAIA;AAEJ,KAAI;AACF,sBAAoB,MAAM,uBAAuB;GAC/C,oBAAoB,IAAI,mBAAmB,QAAQ,OAAO;GAC1D,UAAU,QAAQ;AAChB,WAAO,MAAM,6BAA6B,IAAI;;GAEhD;GACD,CAAC;EAIF,MAAM,oBAAoB,OAAO,eAAe,YAAY;EAC5D,MAAM,aAAa,oBACf,CAAC,OAAO,cAAc,GAAG,OAAO,cAAc,GAC9C,OAAO;AAEX,SAAO,KACL,+BAA+B,WAAW,OAAO,YAAY,oBAAoB,iDAAiD,GAAG,KACtI;EAED,MAAMC,QAAyC,EAAE;EACjD,MAAMC,SAAuD,EAAE;EAG/D,MAAM,iBAAiB,WAAW,IAAI,OAAO,WAAW;AACtD,UAAO,KAAK,kBAAkB,OAAO,KAAK;GAE1C,MAAM,SAAS,MAAM,kBAAmB,aAAa,OAAO;AAE5D,SAAM,UAAU;IACd,OAAO;IACP,YAAY,OAAO,KAAK,OAAO,aAAa,CAAC;IAC7C,QAAQ,OAAO,OAAO;IACvB;AAED,OAAI,OAAO,OAAO,SAAS,GAAG;AAC5B,WAAO,KACL,OAAO,OAAO,OAAO,OAAO,4BAA4B,SACzD;AACD,WAAO,KAAK;KACV;KACA,OAAO,GAAG,OAAO,OAAO,OAAO;KAChC,CAAC;SAEF,QAAO,KAAK,KAAK,OAAO,yBAAyB;IAEnD;AAEF,QAAM,QAAQ,IAAI,eAAe;AAGjC,MAAI,OAAO,SAAS,GAAG;GACrB,MAAM,WAAW,wBAAwB,OAAO;AAChD,UAAO,MAAM,SAAS;AACtB,WAAQ,KAAK,EAAE;;AAIjB,MAAI,iBACF,OAAM,gBAAgB,QAAQ,kBAAkB,UAAU,MAAM;AAGlE,SAAO,KAAK,kDAAkD;AAE9D,SAAO;GACL,SAAS;GACT;GACD;UACM,OAAO;AACd,SAAO,MACL,sCACA,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;AACD,UAAQ,KAAK,EAAE;WACP;AACR,MAAI,mBAAmB;AACrB,SAAM,kBAAkB,MAAM;AAC9B,UAAO,KAAK,+BAA+B;;;;;;;;AASjD,eAAe,cACb,QACA,UACA,OACe;CACf,MAAM,YAAY,OAAO,KAAK,SAAS,QAAQ;CAC/C,MAAMC,iBAA2B,EAAE;CACnC,MAAMC,oBAID,EAAE;CAIP,MAAM,aADoB,OAAO,eAAe,YAAY,QAExD,CAAC,OAAO,cAAc,GAAG,OAAO,cAAc,GAC9C,OAAO;AAEX,MAAK,MAAM,UAAU,WACnB,KAAI;EACF,MAAM,UAAU,MAAM,MAAM,IAAI,OAAO;AAEvC,MAAI,OAAO,KAAK,QAAQ,CAAC,WAAW,GAAG;AACrC,kBAAe,KAAK,OAAO;AAC3B,UAAO,MAAM,qCAAqC,SAAS;AAC3D;;EAGF,MAAM,gBAAgB,UAAU,QAAQ,SAAS,CAAC,QAAQ,MAAM;AAEhE,MAAI,cAAc,SAAS,GAAG;AAC5B,qBAAkB,KAAK;IACrB;IACA,SAAS,cAAc;IACvB,OAAO,UAAU;IAClB,CAAC;AAGF,UAAO,MACL,qBAAqB,OAAO,IAAI,cAAc,MAAM,GAAG,EAAE,CAAC,KAAK,KAAK,GAClE,cAAc,SAAS,IAAI,QAAQ,KAEtC;;UAEI,OAAO;AACd,iBAAe,KAAK,OAAO;AAC3B,SAAO,MAAM,4BAA4B,OAAO,IAAI,MAAM;;AAI9D,KAAI,eAAe,SAAS,KAAK,kBAAkB,SAAS,GAAG;EAC7D,MAAM,WAAW,2BACf,gBACA,kBACD;AACD,SAAO,MAAM,SAAS;AACtB,UAAQ,KAAK,EAAE;;;AAInB,SAAS,gBACP,QACA,UACiC;CACjC,MAAM,eAAe,OAAO,KAAK,SAAS,QAAQ,CAAC;CACnD,MAAMH,QAAyC,EAAE;CAIjD,MAAM,aADoB,OAAO,eAAe,YAAY,QAExD,CAAC,OAAO,cAAc,GAAG,OAAO,cAAc,GAC9C,OAAO;AAEX,MAAK,MAAM,UAAU,WACnB,OAAM,UAAU;EACd,OAAO;EACP,YAAY;EACZ,QAAQ;EACT;AAGH,QAAO;;AAGT,eAAe,gBACb,QACA,kBACA,UACA,OACe;AACf,QAAO,KAAK,6CAA6C,mBAAmB;AAE5E,OAAM,GAAG,MAAM,kBAAkB,EAAE,WAAW,MAAM,CAAC;CAErD,MAAM,aAAa,IAAI,IAAI,OAAO,KAAK,SAAS,QAAQ,CAAC;AACzD,QAAO,KAAK,gCAAgC,WAAW,KAAK,gBAAgB;CAI5E,MAAM,aADoB,OAAO,eAAe,YAAY,QAExD,CAAC,OAAO,cAAc,GAAG,OAAO,cAAc,GAC9C,OAAO;AAEX,MAAK,MAAM,UAAU,YAAY;EAC/B,MAAM,iBAAiB,KAAK,KAAK,kBAAkB,GAAG,OAAO,OAAO;AAEpE,MAAI;GACF,MAAM,UAAU,MAAM,MAAM,IAAI,QAAQ,MAAM,KAAK,WAAW,CAAC;GAC/D,MAAM,aAAa,eAAe,QAAQ,QAAQ;AAElD,SAAM,GAAG,UACP,gBACA,KAAK,UAAU,YAAY,MAAM,EAAE,EACnC,QACD;AAED,UAAO,KACL,eAAe,OAAO,SAAS,OAAO,KAAK,QAAQ,CAAC,OAAO,gBAC5D;WACM,OAAO;AACd,UAAO,MAAM,wBAAwB,OAAO,SAAS,MAAM;AAC3D,WAAQ,KAAK,EAAE;;;;AAKrB,SAAS,2BACP,gBACA,mBAKQ;CACR,IAAI,MAAM;AAEV,KAAI,eAAe,SAAS,GAAG;AAC7B,SAAO;AACP,SAAO,eAAe,KAAK,WAAW,SAAS,OAAO,OAAO,CAAC,KAAK,KAAK;AACxE,SAAO;;AAGT,KAAI,kBAAkB,SAAS,GAAG;AAChC,SAAO;AACP,SAAO,kBACJ,KACE,SACC,SAAS,KAAK,OAAO,IAAI,KAAK,QAAQ,GAAG,KAAK,MAAM,uBACvD,CACA,KAAK,KAAK;AACb,SAAO;;AAGT,QAAO;AACP,QAAO;AACP,QAAO;AACP,QAAO;AAEP,QAAO;;AAGT,SAAS,wBACP,QACQ;CACR,IAAI,MAAM;AAEV,QAAO,OAAO,KAAK,QAAQ,OAAO,IAAI,OAAO,IAAI,IAAI,QAAQ,CAAC,KAAK,KAAK;AAExE,QAAO;AACP,QAAO;AACP,QAAO;AAEP,QAAO"}
1
+ {"version":3,"file":"build-translator.mjs","names":["translationServer: TranslationServer | undefined","stats: BuildTranslationResult[\"stats\"]","errors: Array<{ locale: LocaleCode; error: string }>","missingLocales: string[]","incompleteLocales: Array<{\n locale: LocaleCode;\n missing: number;\n total: number;\n }>","fs"],"sources":["../../src/plugin/build-translator.ts"],"sourcesContent":["/**\n * Build-time translation processor\n *\n * Handles translation generation and validation at build time\n * Supports two modes:\n * - \"translate\": Generate all translations, fail if translation fails\n * - \"cache-only\": Validate cache completeness, fail if incomplete\n */\n// TODO (AleksandrSl 08/12/2025): Add ICU validation for messages? The problem is that we don't know which will be rendered as a simple text\nimport fs from \"fs/promises\";\nimport path from \"path\";\nimport type { LingoConfig, MetadataSchema } from \"../types\";\nimport { logger } from \"../utils/logger\";\nimport { startTranslationServer, type TranslationServer, } from \"../translation-server\";\nimport { loadMetadata } from \"../metadata/manager\";\nimport { createCache, type TranslationCache, TranslationService, } from \"../translators\";\nimport { dictionaryFrom } from \"../translators/api\";\nimport type { LocaleCode } from \"lingo.dev/spec\";\n\nexport interface BuildTranslationOptions {\n config: LingoConfig;\n publicOutputPath: string;\n metadataFilePath: string;\n}\n\nexport interface BuildTranslationResult {\n /**\n * Whether the build succeeded\n */\n success: boolean;\n\n /**\n * Error message if build failed\n */\n error?: string;\n\n /**\n * Translation statistics per locale\n */\n stats: Record<\n string,\n {\n total: number;\n translated: number;\n failed: number;\n }\n >;\n}\n\n/**\n * Process translations at build time\n *\n * @throws Error if validation or translation fails (causes build to fail)\n */\nexport async function processBuildTranslations(\n options: BuildTranslationOptions,\n): Promise<BuildTranslationResult> {\n const { config, publicOutputPath, metadataFilePath } = options;\n\n // Determine build mode (env var > options > config)\n const buildMode =\n (process.env.LINGO_BUILD_MODE as \"translate\" | \"cache-only\") ||\n config.buildMode;\n\n logger.info(`🌍 Build mode: ${buildMode}`);\n\n const metadata = await loadMetadata(metadataFilePath);\n\n if (!metadata || Object.keys(metadata.entries).length === 0) {\n logger.info(\"No translations to process (metadata is empty)\");\n return {\n success: true,\n stats: {},\n };\n }\n\n const totalEntries = Object.keys(metadata.entries).length;\n logger.info(`📊 Found ${totalEntries} translatable entries`);\n\n const cache = createCache(config);\n\n // Handle cache-only mode\n if (buildMode === \"cache-only\") {\n logger.info(\"🔍 Validating translation cache...\");\n await validateCache(config, metadata, cache);\n logger.info(\"✅ Cache validation passed\");\n\n if (publicOutputPath) {\n await copyStaticFiles(config, publicOutputPath, metadata, cache);\n }\n\n return {\n success: true,\n stats: buildCacheStats(config, metadata),\n };\n }\n\n // Handle translate mode\n logger.info(\"🔄 Generating translations...\");\n let translationServer: TranslationServer | undefined;\n\n try {\n translationServer = await startTranslationServer({\n translationService: new TranslationService(config, logger),\n onError: (err) => {\n logger.error(\"Translation server error:\", err);\n },\n config,\n });\n\n // When pluralization is enabled, we need to generate the source locale file too\n // because pluralization modifies the sourceText\n const needsSourceLocale = config.pluralization?.enabled !== false;\n const allLocales = needsSourceLocale\n ? [config.sourceLocale, ...config.targetLocales]\n : config.targetLocales;\n\n logger.info(\n `Processing translations for ${allLocales.length} locale(s)${needsSourceLocale ? \" (including source locale for pluralization)\" : \"\"}...`,\n );\n\n const stats: BuildTranslationResult[\"stats\"] = {};\n const errors: Array<{ locale: LocaleCode; error: string }> = [];\n\n // Translate all locales in parallel\n const localePromises = allLocales.map(async (locale) => {\n logger.info(`Translating to ${locale}...`);\n\n const result = await translationServer!.translateAll(locale);\n\n stats[locale] = {\n total: totalEntries,\n translated: Object.keys(result.translations).length,\n failed: result.errors.length,\n };\n\n if (result.errors.length > 0) {\n logger.warn(\n `⚠️ ${result.errors.length} translation error(s) for ${locale}`,\n );\n errors.push({\n locale,\n error: `${result.errors.length} translation(s) failed`,\n });\n } else {\n logger.info(`✅ ${locale} completed successfully`);\n }\n });\n\n await Promise.all(localePromises);\n\n // Fail build if any translations failed in translate mode\n if (errors.length > 0) {\n const errorMsg = formatTranslationErrors(errors);\n logger.error(errorMsg);\n process.exit(1);\n }\n\n // Copy cache to public directory if requested\n if (publicOutputPath) {\n await copyStaticFiles(config, publicOutputPath, metadata, cache);\n }\n\n logger.info(\"✅ Translation generation completed successfully\");\n\n return {\n success: true,\n stats,\n };\n } catch (error) {\n logger.error(\n \"❌ Translation generation failed:\\n\",\n error instanceof Error ? error.message : error,\n );\n process.exit(1);\n } finally {\n if (translationServer) {\n await translationServer.stop();\n logger.info(\"✅ Translation server stopped\");\n }\n }\n}\n\n/**\n * Validate that all required translations exist in cache\n * @throws Error if cache is incomplete or missing\n */\nasync function validateCache(\n config: LingoConfig,\n metadata: MetadataSchema,\n cache: TranslationCache,\n): Promise<void> {\n const allHashes = Object.keys(metadata.entries);\n const missingLocales: string[] = [];\n const incompleteLocales: Array<{\n locale: LocaleCode;\n missing: number;\n total: number;\n }> = [];\n\n // Include source locale if pluralization is enabled\n const needsSourceLocale = config.pluralization?.enabled !== false;\n const allLocales = needsSourceLocale\n ? [config.sourceLocale, ...config.targetLocales]\n : config.targetLocales;\n\n for (const locale of allLocales) {\n try {\n const entries = await cache.get(locale);\n\n if (Object.keys(entries).length === 0) {\n missingLocales.push(locale);\n logger.debug(`Cache file not found or empty for ${locale}`);\n continue;\n }\n\n const missingHashes = allHashes.filter((hash) => !entries[hash]);\n\n if (missingHashes.length > 0) {\n incompleteLocales.push({\n locale,\n missing: missingHashes.length,\n total: allHashes.length,\n });\n\n // Log first few missing hashes for debugging\n logger.debug(\n `Missing hashes in ${locale}: ${missingHashes.slice(0, 5).join(\", \")}${\n missingHashes.length > 5 ? \"...\" : \"\"\n }`,\n );\n }\n } catch (error) {\n missingLocales.push(locale);\n logger.debug(`Failed to read cache for ${locale}:`, error);\n }\n }\n\n if (missingLocales.length > 0 || incompleteLocales.length > 0) {\n const errorMsg = formatCacheValidationError(\n missingLocales,\n incompleteLocales,\n );\n logger.error(errorMsg);\n process.exit(1);\n }\n}\n\nfunction buildCacheStats(\n config: LingoConfig,\n metadata: MetadataSchema,\n): BuildTranslationResult[\"stats\"] {\n const totalEntries = Object.keys(metadata.entries).length;\n const stats: BuildTranslationResult[\"stats\"] = {};\n\n // Include source locale if pluralization is enabled\n const needsSourceLocale = config.pluralization?.enabled !== false;\n const allLocales = needsSourceLocale\n ? [config.sourceLocale, ...config.targetLocales]\n : config.targetLocales;\n\n for (const locale of allLocales) {\n stats[locale] = {\n total: totalEntries,\n translated: totalEntries, // Assumed complete if validation passed\n failed: 0,\n };\n }\n\n return stats;\n}\n\nasync function copyStaticFiles(\n config: LingoConfig,\n publicOutputPath: string,\n metadata: MetadataSchema,\n cache: TranslationCache,\n): Promise<void> {\n logger.info(`📦 Generating static translation files in ${publicOutputPath}`);\n\n await fs.mkdir(publicOutputPath, { recursive: true });\n\n const usedHashes = new Set(Object.keys(metadata.entries));\n logger.info(`📊 Filtering translations to ${usedHashes.size} used hash(es)`);\n\n // Include source locale if pluralization is enabled\n const needsSourceLocale = config.pluralization?.enabled !== false;\n const allLocales = needsSourceLocale\n ? [config.sourceLocale, ...config.targetLocales]\n : config.targetLocales;\n\n for (const locale of allLocales) {\n const publicFilePath = path.join(publicOutputPath, `${locale}.json`);\n\n try {\n const entries = await cache.get(locale, Array.from(usedHashes));\n const outputData = dictionaryFrom(locale, entries);\n\n await fs.writeFile(\n publicFilePath,\n JSON.stringify(outputData, null, 2),\n \"utf-8\",\n );\n\n logger.info(\n `✓ Generated ${locale}.json (${Object.keys(entries).length} translations)`,\n );\n } catch (error) {\n logger.error(`❌ Failed to generate ${locale}.json:`, error);\n process.exit(1);\n }\n }\n}\n\nfunction formatCacheValidationError(\n missingLocales: string[],\n incompleteLocales: Array<{\n locale: LocaleCode;\n missing: number;\n total: number;\n }>,\n): string {\n let msg = \"❌ Cache validation failed in cache-only mode:\\n\\n\";\n\n if (missingLocales.length > 0) {\n msg += ` 📁 Missing cache files:\\n`;\n msg += missingLocales.map((locale) => ` - ${locale}.json`).join(\"\\n\");\n msg += \"\\n\\n\";\n }\n\n if (incompleteLocales.length > 0) {\n msg += ` 📊 Incomplete cache:\\n`;\n msg += incompleteLocales\n .map(\n (item) =>\n ` - ${item.locale}: ${item.missing}/${item.total} translations missing`,\n )\n .join(\"\\n\");\n msg += \"\\n\\n\";\n }\n\n msg += ` 💡 To fix:\\n`;\n msg += ` 1. Set LINGO_BUILD_MODE=translate to generate translations\\n`;\n msg += ` 2. Commit the generated .lingo/cache/*.json files\\n`;\n msg += ` 3. Ensure translation API keys are available if generating translations`;\n\n return msg;\n}\n\nfunction formatTranslationErrors(\n errors: Array<{ locale: LocaleCode; error: string }>,\n): string {\n let msg = \"❌ Translation generation failed:\\n\\n\";\n\n msg += errors.map((err) => ` - ${err.locale}: ${err.error}`).join(\"\\n\");\n\n msg += \"\\n\\n\";\n msg += ` 💡 Translation errors must be resolved in \"translate\" mode.\\n`;\n msg += ` Check translation server logs for details.`;\n\n return msg;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAsDA,eAAsB,yBACpB,SACiC;CACjC,MAAM,EAAE,QAAQ,kBAAkB,qBAAqB;CAGvD,MAAM,YACH,QAAQ,IAAI,oBACb,OAAO;AAET,QAAO,KAAK,kBAAkB,YAAY;CAE1C,MAAM,WAAW,MAAM,aAAa,iBAAiB;AAErD,KAAI,CAAC,YAAY,OAAO,KAAK,SAAS,QAAQ,CAAC,WAAW,GAAG;AAC3D,SAAO,KAAK,iDAAiD;AAC7D,SAAO;GACL,SAAS;GACT,OAAO,EAAE;GACV;;CAGH,MAAM,eAAe,OAAO,KAAK,SAAS,QAAQ,CAAC;AACnD,QAAO,KAAK,YAAY,aAAa,uBAAuB;CAE5D,MAAM,QAAQ,YAAY,OAAO;AAGjC,KAAI,cAAc,cAAc;AAC9B,SAAO,KAAK,qCAAqC;AACjD,QAAM,cAAc,QAAQ,UAAU,MAAM;AAC5C,SAAO,KAAK,4BAA4B;AAExC,MAAI,iBACF,OAAM,gBAAgB,QAAQ,kBAAkB,UAAU,MAAM;AAGlE,SAAO;GACL,SAAS;GACT,OAAO,gBAAgB,QAAQ,SAAS;GACzC;;AAIH,QAAO,KAAK,gCAAgC;CAC5C,IAAIA;AAEJ,KAAI;AACF,sBAAoB,MAAM,uBAAuB;GAC/C,oBAAoB,IAAI,mBAAmB,QAAQ,OAAO;GAC1D,UAAU,QAAQ;AAChB,WAAO,MAAM,6BAA6B,IAAI;;GAEhD;GACD,CAAC;EAIF,MAAM,oBAAoB,OAAO,eAAe,YAAY;EAC5D,MAAM,aAAa,oBACf,CAAC,OAAO,cAAc,GAAG,OAAO,cAAc,GAC9C,OAAO;AAEX,SAAO,KACL,+BAA+B,WAAW,OAAO,YAAY,oBAAoB,iDAAiD,GAAG,KACtI;EAED,MAAMC,QAAyC,EAAE;EACjD,MAAMC,SAAuD,EAAE;EAG/D,MAAM,iBAAiB,WAAW,IAAI,OAAO,WAAW;AACtD,UAAO,KAAK,kBAAkB,OAAO,KAAK;GAE1C,MAAM,SAAS,MAAM,kBAAmB,aAAa,OAAO;AAE5D,SAAM,UAAU;IACd,OAAO;IACP,YAAY,OAAO,KAAK,OAAO,aAAa,CAAC;IAC7C,QAAQ,OAAO,OAAO;IACvB;AAED,OAAI,OAAO,OAAO,SAAS,GAAG;AAC5B,WAAO,KACL,OAAO,OAAO,OAAO,OAAO,4BAA4B,SACzD;AACD,WAAO,KAAK;KACV;KACA,OAAO,GAAG,OAAO,OAAO,OAAO;KAChC,CAAC;SAEF,QAAO,KAAK,KAAK,OAAO,yBAAyB;IAEnD;AAEF,QAAM,QAAQ,IAAI,eAAe;AAGjC,MAAI,OAAO,SAAS,GAAG;GACrB,MAAM,WAAW,wBAAwB,OAAO;AAChD,UAAO,MAAM,SAAS;AACtB,WAAQ,KAAK,EAAE;;AAIjB,MAAI,iBACF,OAAM,gBAAgB,QAAQ,kBAAkB,UAAU,MAAM;AAGlE,SAAO,KAAK,kDAAkD;AAE9D,SAAO;GACL,SAAS;GACT;GACD;UACM,OAAO;AACd,SAAO,MACL,sCACA,iBAAiB,QAAQ,MAAM,UAAU,MAC1C;AACD,UAAQ,KAAK,EAAE;WACP;AACR,MAAI,mBAAmB;AACrB,SAAM,kBAAkB,MAAM;AAC9B,UAAO,KAAK,+BAA+B;;;;;;;;AASjD,eAAe,cACb,QACA,UACA,OACe;CACf,MAAM,YAAY,OAAO,KAAK,SAAS,QAAQ;CAC/C,MAAMC,iBAA2B,EAAE;CACnC,MAAMC,oBAID,EAAE;CAIP,MAAM,aADoB,OAAO,eAAe,YAAY,QAExD,CAAC,OAAO,cAAc,GAAG,OAAO,cAAc,GAC9C,OAAO;AAEX,MAAK,MAAM,UAAU,WACnB,KAAI;EACF,MAAM,UAAU,MAAM,MAAM,IAAI,OAAO;AAEvC,MAAI,OAAO,KAAK,QAAQ,CAAC,WAAW,GAAG;AACrC,kBAAe,KAAK,OAAO;AAC3B,UAAO,MAAM,qCAAqC,SAAS;AAC3D;;EAGF,MAAM,gBAAgB,UAAU,QAAQ,SAAS,CAAC,QAAQ,MAAM;AAEhE,MAAI,cAAc,SAAS,GAAG;AAC5B,qBAAkB,KAAK;IACrB;IACA,SAAS,cAAc;IACvB,OAAO,UAAU;IAClB,CAAC;AAGF,UAAO,MACL,qBAAqB,OAAO,IAAI,cAAc,MAAM,GAAG,EAAE,CAAC,KAAK,KAAK,GAClE,cAAc,SAAS,IAAI,QAAQ,KAEtC;;UAEI,OAAO;AACd,iBAAe,KAAK,OAAO;AAC3B,SAAO,MAAM,4BAA4B,OAAO,IAAI,MAAM;;AAI9D,KAAI,eAAe,SAAS,KAAK,kBAAkB,SAAS,GAAG;EAC7D,MAAM,WAAW,2BACf,gBACA,kBACD;AACD,SAAO,MAAM,SAAS;AACtB,UAAQ,KAAK,EAAE;;;AAInB,SAAS,gBACP,QACA,UACiC;CACjC,MAAM,eAAe,OAAO,KAAK,SAAS,QAAQ,CAAC;CACnD,MAAMH,QAAyC,EAAE;CAIjD,MAAM,aADoB,OAAO,eAAe,YAAY,QAExD,CAAC,OAAO,cAAc,GAAG,OAAO,cAAc,GAC9C,OAAO;AAEX,MAAK,MAAM,UAAU,WACnB,OAAM,UAAU;EACd,OAAO;EACP,YAAY;EACZ,QAAQ;EACT;AAGH,QAAO;;AAGT,eAAe,gBACb,QACA,kBACA,UACA,OACe;AACf,QAAO,KAAK,6CAA6C,mBAAmB;AAE5E,OAAMI,WAAG,MAAM,kBAAkB,EAAE,WAAW,MAAM,CAAC;CAErD,MAAM,aAAa,IAAI,IAAI,OAAO,KAAK,SAAS,QAAQ,CAAC;AACzD,QAAO,KAAK,gCAAgC,WAAW,KAAK,gBAAgB;CAI5E,MAAM,aADoB,OAAO,eAAe,YAAY,QAExD,CAAC,OAAO,cAAc,GAAG,OAAO,cAAc,GAC9C,OAAO;AAEX,MAAK,MAAM,UAAU,YAAY;EAC/B,MAAM,iBAAiB,KAAK,KAAK,kBAAkB,GAAG,OAAO,OAAO;AAEpE,MAAI;GACF,MAAM,UAAU,MAAM,MAAM,IAAI,QAAQ,MAAM,KAAK,WAAW,CAAC;GAC/D,MAAM,aAAa,eAAe,QAAQ,QAAQ;AAElD,SAAMA,WAAG,UACP,gBACA,KAAK,UAAU,YAAY,MAAM,EAAE,EACnC,QACD;AAED,UAAO,KACL,eAAe,OAAO,SAAS,OAAO,KAAK,QAAQ,CAAC,OAAO,gBAC5D;WACM,OAAO;AACd,UAAO,MAAM,wBAAwB,OAAO,SAAS,MAAM;AAC3D,WAAQ,KAAK,EAAE;;;;AAKrB,SAAS,2BACP,gBACA,mBAKQ;CACR,IAAI,MAAM;AAEV,KAAI,eAAe,SAAS,GAAG;AAC7B,SAAO;AACP,SAAO,eAAe,KAAK,WAAW,SAAS,OAAO,OAAO,CAAC,KAAK,KAAK;AACxE,SAAO;;AAGT,KAAI,kBAAkB,SAAS,GAAG;AAChC,SAAO;AACP,SAAO,kBACJ,KACE,SACC,SAAS,KAAK,OAAO,IAAI,KAAK,QAAQ,GAAG,KAAK,MAAM,uBACvD,CACA,KAAK,KAAK;AACb,SAAO;;AAGT,QAAO;AACP,QAAO;AACP,QAAO;AACP,QAAO;AAEP,QAAO;;AAGT,SAAS,wBACP,QACQ;CACR,IAAI,MAAM;AAEV,QAAO,OAAO,KAAK,QAAQ,OAAO,IAAI,OAAO,IAAI,IAAI,QAAQ,CAAC,KAAK,KAAK;AAExE,QAAO;AACP,QAAO;AACP,QAAO;AAEP,QAAO"}
@@ -9,6 +9,8 @@ const require_cleanup = require('./cleanup.cjs');
9
9
  const require_use_i18n = require('./transform/use-i18n.cjs');
10
10
  const require_index = require('./transform/index.cjs');
11
11
  const require_code_generator = require('../virtual/code-generator.cjs');
12
+ const require_tracking_events = require('../utils/tracking-events.cjs');
13
+ const require_observability = require('../utils/observability.cjs');
12
14
  let path = require("path");
13
15
  path = require_rolldown_runtime.__toESM(path);
14
16
  let fs = require("fs");
@@ -18,6 +20,12 @@ let unplugin = require("unplugin");
18
20
  //#region src/plugin/unplugin.ts
19
21
  let translationServer;
20
22
  const PLUGIN_NAME = "lingo-compiler";
23
+ let alreadySentBuildStartEvent = false;
24
+ let buildStartTime = null;
25
+ let filesTransformedCount = 0;
26
+ let totalEntriesCount = 0;
27
+ let hasTransformErrors = false;
28
+ let currentFramework = null;
21
29
  function tryLocalOrReturnVirtual(config, fileName, virtualName) {
22
30
  const customPath = path.default.join(config.sourceRoot, config.lingoDir, fileName);
23
31
  if (fs.default.existsSync(customPath)) return customPath;
@@ -49,6 +57,18 @@ const virtualModules = {
49
57
  const virtualModulesResolvers = Object.fromEntries(Object.entries(virtualModules).map(([importPath, module$1]) => [importPath, (config) => module$1.customFileCheck ? tryLocalOrReturnVirtual(config, module$1.customFileCheck, module$1.virtualId) : module$1.virtualId]));
50
58
  const virtualModulesLoaders = Object.fromEntries(Object.values(virtualModules).map((value) => [value.virtualId, value.loader]));
51
59
  /**
60
+ * Send build start tracking event
61
+ */
62
+ function sendBuildStartEvent(framework, config) {
63
+ if (alreadySentBuildStartEvent) return;
64
+ alreadySentBuildStartEvent = true;
65
+ require_observability.default(require_tracking_events.TRACKING_EVENTS.BUILD_START, {
66
+ framework,
67
+ configuration: require_tracking_events.sanitizeConfigForTracking(config),
68
+ environment: config.environment
69
+ });
70
+ }
71
+ /**
52
72
  * Universal plugin for Lingo.dev compiler
53
73
  * Supports Vite, Webpack
54
74
  */
@@ -89,6 +109,12 @@ const lingoUnplugin = (0, unplugin.createUnplugin)((options) => {
89
109
  },
90
110
  async buildStart() {
91
111
  const metadataFilePath = getMetadataPath$1();
112
+ currentFramework = "vite";
113
+ sendBuildStartEvent("vite", config);
114
+ buildStartTime = Date.now();
115
+ filesTransformedCount = 0;
116
+ totalEntriesCount = 0;
117
+ hasTransformErrors = false;
92
118
  require_manager.cleanupExistingMetadata(metadataFilePath);
93
119
  require_cleanup.registerCleanupOnCurrentProcess({ cleanup: () => require_manager.cleanupExistingMetadata(metadataFilePath) });
94
120
  if (isDev && !translationServer) translationServer = await startServer();
@@ -101,9 +127,27 @@ const lingoUnplugin = (0, unplugin.createUnplugin)((options) => {
101
127
  publicOutputPath: "public/translations",
102
128
  metadataFilePath
103
129
  });
130
+ if (buildStartTime && !hasTransformErrors) require_observability.default(require_tracking_events.TRACKING_EVENTS.BUILD_SUCCESS, {
131
+ framework: "vite",
132
+ stats: {
133
+ totalEntries: totalEntriesCount,
134
+ filesTransformed: filesTransformedCount,
135
+ buildDuration: Date.now() - buildStartTime
136
+ },
137
+ environment: config.environment
138
+ });
104
139
  } catch (error) {
105
140
  require_logger.logger.error("Build-time translation processing failed:", error);
106
141
  }
142
+ else if (buildStartTime && !hasTransformErrors) require_observability.default(require_tracking_events.TRACKING_EVENTS.BUILD_SUCCESS, {
143
+ framework: "vite",
144
+ stats: {
145
+ totalEntries: totalEntriesCount,
146
+ filesTransformed: filesTransformedCount,
147
+ buildDuration: Date.now() - buildStartTime
148
+ },
149
+ environment: config.environment
150
+ });
107
151
  }
108
152
  },
109
153
  webpack(compiler) {
@@ -111,6 +155,12 @@ const lingoUnplugin = (0, unplugin.createUnplugin)((options) => {
111
155
  const metadataFilePath = getMetadataPath$1();
112
156
  config.environment = webpackMode;
113
157
  compiler.hooks.initialize.tap(PLUGIN_NAME, () => {
158
+ currentFramework = "webpack";
159
+ sendBuildStartEvent("webpack", config);
160
+ buildStartTime = Date.now();
161
+ filesTransformedCount = 0;
162
+ totalEntriesCount = 0;
163
+ hasTransformErrors = false;
114
164
  require_manager.cleanupExistingMetadata(metadataFilePath);
115
165
  require_cleanup.registerCleanupOnCurrentProcess({ cleanup: () => require_manager.cleanupExistingMetadata(metadataFilePath) });
116
166
  });
@@ -124,10 +174,28 @@ const lingoUnplugin = (0, unplugin.createUnplugin)((options) => {
124
174
  publicOutputPath: "public/translations",
125
175
  metadataFilePath
126
176
  });
177
+ if (buildStartTime && !hasTransformErrors) require_observability.default(require_tracking_events.TRACKING_EVENTS.BUILD_SUCCESS, {
178
+ framework: "webpack",
179
+ stats: {
180
+ totalEntries: totalEntriesCount,
181
+ filesTransformed: filesTransformedCount,
182
+ buildDuration: Date.now() - buildStartTime
183
+ },
184
+ environment: config.environment
185
+ });
127
186
  } catch (error) {
128
187
  require_logger.logger.error("Build-time translation processing failed:", error);
129
188
  throw error;
130
189
  }
190
+ else if (buildStartTime && !hasTransformErrors) require_observability.default(require_tracking_events.TRACKING_EVENTS.BUILD_SUCCESS, {
191
+ framework: "webpack",
192
+ stats: {
193
+ totalEntries: totalEntriesCount,
194
+ filesTransformed: filesTransformedCount,
195
+ buildDuration: Date.now() - buildStartTime
196
+ },
197
+ environment: config.environment
198
+ });
131
199
  });
132
200
  compiler.hooks.shutdown.tapPromise(PLUGIN_NAME, async () => {
133
201
  require_manager.cleanupExistingMetadata(metadataFilePath);
@@ -169,6 +237,8 @@ const lingoUnplugin = (0, unplugin.createUnplugin)((options) => {
169
237
  const metadataManager = new require_manager.MetadataManager(getMetadataPath$1());
170
238
  if (result.newEntries && result.newEntries.length > 0) {
171
239
  await metadataManager.saveMetadataWithEntries(result.newEntries);
240
+ totalEntriesCount += result.newEntries.length;
241
+ filesTransformedCount++;
172
242
  require_logger.logger.debug(`Found ${result.newEntries.length} translatable text(s) in ${id}`);
173
243
  }
174
244
  require_logger.logger.debug(`Returning transformed code for ${id}`);
@@ -177,6 +247,14 @@ const lingoUnplugin = (0, unplugin.createUnplugin)((options) => {
177
247
  map: result.map
178
248
  };
179
249
  } catch (error) {
250
+ hasTransformErrors = true;
251
+ if (currentFramework) require_observability.default(require_tracking_events.TRACKING_EVENTS.BUILD_ERROR, {
252
+ framework: currentFramework,
253
+ errorType: "transform",
254
+ errorMessage: error instanceof Error ? error.message : "Unknown transform error",
255
+ filePath: id,
256
+ environment: config.environment
257
+ });
180
258
  require_logger.logger.error(`Transform error in ${id}:`, error);
181
259
  return null;
182
260
  }
@@ -1 +1 @@
1
- {"version":3,"file":"unplugin.d.cts","names":[],"sources":["../../src/plugin/unplugin.ts"],"sourcesContent":[],"mappings":";;;;KA8BY,kBAAA,GAAqB"}
1
+ {"version":3,"file":"unplugin.d.cts","names":[],"sources":["../../src/plugin/unplugin.ts"],"sourcesContent":[],"mappings":";;;;KAmCY,kBAAA,GAAqB"}
@@ -1 +1 @@
1
- {"version":3,"file":"unplugin.d.mts","names":[],"sources":["../../src/plugin/unplugin.ts"],"sourcesContent":[],"mappings":";;;;KA8BY,kBAAA,GAAqB"}
1
+ {"version":3,"file":"unplugin.d.mts","names":[],"sources":["../../src/plugin/unplugin.ts"],"sourcesContent":[],"mappings":";;;;KAmCY,kBAAA,GAAqB"}
@@ -8,6 +8,8 @@ import { registerCleanupOnCurrentProcess } from "./cleanup.mjs";
8
8
  import { useI18nRegex } from "./transform/use-i18n.mjs";
9
9
  import { transformComponent } from "./transform/index.mjs";
10
10
  import { generateClientLocaleModule, generateConfigModule, generateServerLocaleModule } from "../virtual/code-generator.mjs";
11
+ import { TRACKING_EVENTS, sanitizeConfigForTracking } from "../utils/tracking-events.mjs";
12
+ import trackEvent from "../utils/observability.mjs";
11
13
  import path from "path";
12
14
  import fs from "fs";
13
15
  import { createUnplugin } from "unplugin";
@@ -15,6 +17,12 @@ import { createUnplugin } from "unplugin";
15
17
  //#region src/plugin/unplugin.ts
16
18
  let translationServer;
17
19
  const PLUGIN_NAME = "lingo-compiler";
20
+ let alreadySentBuildStartEvent = false;
21
+ let buildStartTime = null;
22
+ let filesTransformedCount = 0;
23
+ let totalEntriesCount = 0;
24
+ let hasTransformErrors = false;
25
+ let currentFramework = null;
18
26
  function tryLocalOrReturnVirtual(config, fileName, virtualName) {
19
27
  const customPath = path.join(config.sourceRoot, config.lingoDir, fileName);
20
28
  if (fs.existsSync(customPath)) return customPath;
@@ -46,6 +54,18 @@ const virtualModules = {
46
54
  const virtualModulesResolvers = Object.fromEntries(Object.entries(virtualModules).map(([importPath, module]) => [importPath, (config) => module.customFileCheck ? tryLocalOrReturnVirtual(config, module.customFileCheck, module.virtualId) : module.virtualId]));
47
55
  const virtualModulesLoaders = Object.fromEntries(Object.values(virtualModules).map((value) => [value.virtualId, value.loader]));
48
56
  /**
57
+ * Send build start tracking event
58
+ */
59
+ function sendBuildStartEvent(framework, config) {
60
+ if (alreadySentBuildStartEvent) return;
61
+ alreadySentBuildStartEvent = true;
62
+ trackEvent(TRACKING_EVENTS.BUILD_START, {
63
+ framework,
64
+ configuration: sanitizeConfigForTracking(config),
65
+ environment: config.environment
66
+ });
67
+ }
68
+ /**
49
69
  * Universal plugin for Lingo.dev compiler
50
70
  * Supports Vite, Webpack
51
71
  */
@@ -86,6 +106,12 @@ const lingoUnplugin = createUnplugin((options) => {
86
106
  },
87
107
  async buildStart() {
88
108
  const metadataFilePath = getMetadataPath$1();
109
+ currentFramework = "vite";
110
+ sendBuildStartEvent("vite", config);
111
+ buildStartTime = Date.now();
112
+ filesTransformedCount = 0;
113
+ totalEntriesCount = 0;
114
+ hasTransformErrors = false;
89
115
  cleanupExistingMetadata(metadataFilePath);
90
116
  registerCleanupOnCurrentProcess({ cleanup: () => cleanupExistingMetadata(metadataFilePath) });
91
117
  if (isDev && !translationServer) translationServer = await startServer();
@@ -98,9 +124,27 @@ const lingoUnplugin = createUnplugin((options) => {
98
124
  publicOutputPath: "public/translations",
99
125
  metadataFilePath
100
126
  });
127
+ if (buildStartTime && !hasTransformErrors) trackEvent(TRACKING_EVENTS.BUILD_SUCCESS, {
128
+ framework: "vite",
129
+ stats: {
130
+ totalEntries: totalEntriesCount,
131
+ filesTransformed: filesTransformedCount,
132
+ buildDuration: Date.now() - buildStartTime
133
+ },
134
+ environment: config.environment
135
+ });
101
136
  } catch (error) {
102
137
  logger.error("Build-time translation processing failed:", error);
103
138
  }
139
+ else if (buildStartTime && !hasTransformErrors) trackEvent(TRACKING_EVENTS.BUILD_SUCCESS, {
140
+ framework: "vite",
141
+ stats: {
142
+ totalEntries: totalEntriesCount,
143
+ filesTransformed: filesTransformedCount,
144
+ buildDuration: Date.now() - buildStartTime
145
+ },
146
+ environment: config.environment
147
+ });
104
148
  }
105
149
  },
106
150
  webpack(compiler) {
@@ -108,6 +152,12 @@ const lingoUnplugin = createUnplugin((options) => {
108
152
  const metadataFilePath = getMetadataPath$1();
109
153
  config.environment = webpackMode;
110
154
  compiler.hooks.initialize.tap(PLUGIN_NAME, () => {
155
+ currentFramework = "webpack";
156
+ sendBuildStartEvent("webpack", config);
157
+ buildStartTime = Date.now();
158
+ filesTransformedCount = 0;
159
+ totalEntriesCount = 0;
160
+ hasTransformErrors = false;
111
161
  cleanupExistingMetadata(metadataFilePath);
112
162
  registerCleanupOnCurrentProcess({ cleanup: () => cleanupExistingMetadata(metadataFilePath) });
113
163
  });
@@ -121,10 +171,28 @@ const lingoUnplugin = createUnplugin((options) => {
121
171
  publicOutputPath: "public/translations",
122
172
  metadataFilePath
123
173
  });
174
+ if (buildStartTime && !hasTransformErrors) trackEvent(TRACKING_EVENTS.BUILD_SUCCESS, {
175
+ framework: "webpack",
176
+ stats: {
177
+ totalEntries: totalEntriesCount,
178
+ filesTransformed: filesTransformedCount,
179
+ buildDuration: Date.now() - buildStartTime
180
+ },
181
+ environment: config.environment
182
+ });
124
183
  } catch (error) {
125
184
  logger.error("Build-time translation processing failed:", error);
126
185
  throw error;
127
186
  }
187
+ else if (buildStartTime && !hasTransformErrors) trackEvent(TRACKING_EVENTS.BUILD_SUCCESS, {
188
+ framework: "webpack",
189
+ stats: {
190
+ totalEntries: totalEntriesCount,
191
+ filesTransformed: filesTransformedCount,
192
+ buildDuration: Date.now() - buildStartTime
193
+ },
194
+ environment: config.environment
195
+ });
128
196
  });
129
197
  compiler.hooks.shutdown.tapPromise(PLUGIN_NAME, async () => {
130
198
  cleanupExistingMetadata(metadataFilePath);
@@ -166,6 +234,8 @@ const lingoUnplugin = createUnplugin((options) => {
166
234
  const metadataManager = new MetadataManager(getMetadataPath$1());
167
235
  if (result.newEntries && result.newEntries.length > 0) {
168
236
  await metadataManager.saveMetadataWithEntries(result.newEntries);
237
+ totalEntriesCount += result.newEntries.length;
238
+ filesTransformedCount++;
169
239
  logger.debug(`Found ${result.newEntries.length} translatable text(s) in ${id}`);
170
240
  }
171
241
  logger.debug(`Returning transformed code for ${id}`);
@@ -174,6 +244,14 @@ const lingoUnplugin = createUnplugin((options) => {
174
244
  map: result.map
175
245
  };
176
246
  } catch (error) {
247
+ hasTransformErrors = true;
248
+ if (currentFramework) trackEvent(TRACKING_EVENTS.BUILD_ERROR, {
249
+ framework: currentFramework,
250
+ errorType: "transform",
251
+ errorMessage: error instanceof Error ? error.message : "Unknown transform error",
252
+ filePath: id,
253
+ environment: config.environment
254
+ });
177
255
  logger.error(`Transform error in ${id}:`, error);
178
256
  return null;
179
257
  }
@@ -1 +1 @@
1
- {"version":3,"file":"unplugin.mjs","names":["translationServer: TranslationServer","webpackMode: \"development\" | \"production\" | undefined","getMetadataPath","rawGetMetadataPath"],"sources":["../../src/plugin/unplugin.ts"],"sourcesContent":["import { createUnplugin } from \"unplugin\";\nimport { transformComponent } from \"./transform\";\nimport type {\n LingoConfig,\n LingoInternalFields,\n PartialLingoConfig,\n} from \"../types\";\nimport {\n startTranslationServer,\n type TranslationServer,\n} from \"../translation-server\";\nimport {\n cleanupExistingMetadata,\n getMetadataPath as rawGetMetadataPath,\n MetadataManager,\n} from \"../metadata/manager\";\nimport { createLingoConfig } from \"../utils/config-factory\";\nimport { logger } from \"../utils/logger\";\nimport { useI18nRegex } from \"./transform/use-i18n\";\nimport {\n generateClientLocaleModule,\n generateConfigModule,\n generateServerLocaleModule,\n} from \"../virtual/code-generator\";\nimport { processBuildTranslations } from \"./build-translator\";\nimport { registerCleanupOnCurrentProcess } from \"./cleanup\";\nimport path from \"path\";\nimport fs from \"fs\";\nimport { TranslationService } from \"../translators\";\n\nexport type LingoPluginOptions = PartialLingoConfig;\n\nlet translationServer: TranslationServer;\n\nconst PLUGIN_NAME = \"lingo-compiler\";\n\nfunction tryLocalOrReturnVirtual(\n config: LingoConfig,\n fileName: string,\n virtualName: string,\n) {\n const customPath = path.join(config.sourceRoot, config.lingoDir, fileName);\n if (fs.existsSync(customPath)) {\n return customPath;\n }\n return virtualName;\n}\n\n/**\n * Single source of truth for virtual modules\n * Each entry defines both resolver (import path → virtual ID) and loader (virtual ID → code)\n *\n * If customFileCheck is defined, the specified file will be first searched for, and if not found virtual module will be used.\n */\nconst virtualModules = {\n \"@lingo.dev/compiler/virtual/config\": {\n virtualId: \"\\0virtual:lingo-config\",\n loader: (config: LingoConfig) => generateConfigModule(config),\n customFileCheck: undefined,\n },\n \"@lingo.dev/compiler/virtual/locale/server\": {\n virtualId: \"\\0virtual:locale-resolver.server\" as const,\n loader: (config: LingoConfig) => generateServerLocaleModule(config),\n customFileCheck: \"locale-resolver.server.ts\" as const,\n },\n \"@lingo.dev/compiler/virtual/locale/client\": {\n virtualId: \"\\0virtual:locale-resolver.client\" as const,\n loader: (config: LingoConfig) => generateClientLocaleModule(config),\n customFileCheck: \"locale-resolver.client.ts\" as const,\n },\n} as const;\n\n// Derive resolver and loader maps from the single source\nconst virtualModulesResolvers = Object.fromEntries(\n Object.entries(virtualModules).map(([importPath, module]) => [\n importPath,\n (config: LingoConfig) =>\n module.customFileCheck\n ? tryLocalOrReturnVirtual(\n config,\n module.customFileCheck,\n module.virtualId,\n )\n : module.virtualId,\n ]),\n);\n\nconst virtualModulesLoaders = Object.fromEntries(\n Object.values(virtualModules).map((value) => [value.virtualId, value.loader]),\n);\n\n/**\n * Universal plugin for Lingo.dev compiler\n * Supports Vite, Webpack\n */\nexport const lingoUnplugin = createUnplugin<\n LingoPluginOptions & Partial<Pick<LingoConfig, LingoInternalFields>>\n>((options) => {\n const config = createLingoConfig(options);\n\n // Won't work for webpack most likely. Use mode there to set correct environment in configs.\n const isDev = config.environment === \"development\";\n const startPort = config.dev.translationServerStartPort;\n\n // For webpack: store the actual mode and use it to compute the correct metadata path\n let webpackMode: \"development\" | \"production\" | undefined;\n // Should be dynamic, because webpack only tells us the mode inside the plugin, not inside the config.\n const getMetadataPath = () => {\n return rawGetMetadataPath(\n webpackMode ? { ...config, environment: webpackMode } : config,\n );\n };\n\n async function startServer() {\n const server = await startTranslationServer({\n translationService: new TranslationService(config, logger),\n onError: (err) => {\n logger.error(\"Translation server error:\", err);\n },\n onReady: (port) => {\n logger.info(`Translation server started successfully on port: ${port}`);\n },\n config,\n });\n // I don't like this quite a lot. But starting server inside the loader seems lame.\n config.dev.translationServerUrl = server.getUrl();\n registerCleanupOnCurrentProcess({\n asyncCleanup: async () => {\n await translationServer.stop();\n },\n });\n return server;\n }\n\n return {\n name: PLUGIN_NAME,\n enforce: \"pre\", // Run before other plugins (especially before React plugin)\n\n vite: {\n // Vite handles deep merge\n config() {\n // Required for custom virtual like modules to be resolved; otherwise they are bundled with raw source code.\n return {\n optimizeDeps: {\n exclude: [\"@lingo.dev/compiler\"],\n },\n };\n },\n async buildStart() {\n const metadataFilePath = getMetadataPath();\n\n cleanupExistingMetadata(metadataFilePath);\n registerCleanupOnCurrentProcess({\n cleanup: () => cleanupExistingMetadata(metadataFilePath),\n });\n\n if (isDev && !translationServer) {\n translationServer = await startServer();\n }\n },\n\n async buildEnd() {\n const metadataFilePath = getMetadataPath();\n if (!isDev) {\n try {\n await processBuildTranslations({\n config,\n publicOutputPath: \"public/translations\",\n metadataFilePath,\n });\n } catch (error) {\n logger.error(\"Build-time translation processing failed:\", error);\n }\n }\n },\n },\n\n webpack(compiler) {\n webpackMode =\n compiler.options.mode === \"development\" ? \"development\" : \"production\";\n const metadataFilePath = getMetadataPath();\n // Yes, this is dirty play, but webpack runs only for this plugin, and this way we save people from using wrong config\n config.environment = webpackMode;\n\n compiler.hooks.initialize.tap(PLUGIN_NAME, () => {\n cleanupExistingMetadata(metadataFilePath);\n registerCleanupOnCurrentProcess({\n cleanup: () => cleanupExistingMetadata(metadataFilePath),\n });\n });\n\n compiler.hooks.watchRun.tapPromise(PLUGIN_NAME, async () => {\n if (webpackMode === \"development\" && !translationServer) {\n translationServer = await startServer();\n }\n });\n\n compiler.hooks.additionalPass.tapPromise(PLUGIN_NAME, async () => {\n if (webpackMode === \"production\") {\n try {\n await processBuildTranslations({\n config,\n publicOutputPath: \"public/translations\",\n metadataFilePath,\n });\n } catch (error) {\n logger.error(\"Build-time translation processing failed:\", error);\n throw error;\n }\n }\n });\n\n // Duplicates the cleanup process handlers does, but won't hurt since cleanup is idempotent.\n compiler.hooks.shutdown.tapPromise(PLUGIN_NAME, async () => {\n cleanupExistingMetadata(metadataFilePath);\n await translationServer?.stop();\n });\n },\n\n resolveId(id) {\n const handler = virtualModulesResolvers[id];\n if (handler) {\n return handler(config);\n }\n return null;\n },\n\n load: {\n filter: {\n // Without the filter webpack goes mad\n id: /virtual:/,\n },\n handler(id: string) {\n const handler = virtualModulesLoaders[id];\n if (handler) {\n return handler(config);\n }\n return null;\n },\n },\n\n transform: {\n filter: {\n id: {\n include: [/\\.[tj]sx$/],\n exclude: /node_modules/,\n },\n // If useDirective is enabled, only process files with \"use i18n\"\n // This is more efficient than checking in the handler\n code: config.useDirective ? useI18nRegex : undefined,\n },\n async handler(code, id) {\n try {\n // Transform the component\n const result = transformComponent({\n code,\n filePath: id,\n config,\n });\n\n // If no transformation occurred, return original code\n if (!result.transformed) {\n logger.debug(`No transformation needed for ${id}`);\n return null;\n }\n const metadataManager = new MetadataManager(getMetadataPath());\n\n // Update metadata with new entries (thread-safe)\n if (result.newEntries && result.newEntries.length > 0) {\n await metadataManager.saveMetadataWithEntries(result.newEntries);\n\n logger.debug(\n `Found ${result.newEntries.length} translatable text(s) in ${id}`,\n );\n }\n\n logger.debug(`Returning transformed code for ${id}`);\n return {\n code: result.code,\n map: result.map,\n };\n } catch (error) {\n logger.error(`Transform error in ${id}:`, error);\n return null;\n }\n },\n },\n };\n});\n"],"mappings":";;;;;;;;;;;;;;;AAgCA,IAAIA;AAEJ,MAAM,cAAc;AAEpB,SAAS,wBACP,QACA,UACA,aACA;CACA,MAAM,aAAa,KAAK,KAAK,OAAO,YAAY,OAAO,UAAU,SAAS;AAC1E,KAAI,GAAG,WAAW,WAAW,CAC3B,QAAO;AAET,QAAO;;;;;;;;AAST,MAAM,iBAAiB;CACrB,sCAAsC;EACpC,WAAW;EACX,SAAS,WAAwB,qBAAqB,OAAO;EAC7D,iBAAiB;EAClB;CACD,6CAA6C;EAC3C,WAAW;EACX,SAAS,WAAwB,2BAA2B,OAAO;EACnE,iBAAiB;EAClB;CACD,6CAA6C;EAC3C,WAAW;EACX,SAAS,WAAwB,2BAA2B,OAAO;EACnE,iBAAiB;EAClB;CACF;AAGD,MAAM,0BAA0B,OAAO,YACrC,OAAO,QAAQ,eAAe,CAAC,KAAK,CAAC,YAAY,YAAY,CAC3D,aACC,WACC,OAAO,kBACH,wBACE,QACA,OAAO,iBACP,OAAO,UACR,GACD,OAAO,UACd,CAAC,CACH;AAED,MAAM,wBAAwB,OAAO,YACnC,OAAO,OAAO,eAAe,CAAC,KAAK,UAAU,CAAC,MAAM,WAAW,MAAM,OAAO,CAAC,CAC9E;;;;;AAMD,MAAa,gBAAgB,gBAE1B,YAAY;CACb,MAAM,SAAS,kBAAkB,QAAQ;CAGzC,MAAM,QAAQ,OAAO,gBAAgB;AACnB,QAAO,IAAI;CAG7B,IAAIC;CAEJ,MAAMC,0BAAwB;AAC5B,SAAOC,gBACL,cAAc;GAAE,GAAG;GAAQ,aAAa;GAAa,GAAG,OACzD;;CAGH,eAAe,cAAc;EAC3B,MAAM,SAAS,MAAM,uBAAuB;GAC1C,oBAAoB,IAAI,mBAAmB,QAAQ,OAAO;GAC1D,UAAU,QAAQ;AAChB,WAAO,MAAM,6BAA6B,IAAI;;GAEhD,UAAU,SAAS;AACjB,WAAO,KAAK,oDAAoD,OAAO;;GAEzE;GACD,CAAC;AAEF,SAAO,IAAI,uBAAuB,OAAO,QAAQ;AACjD,kCAAgC,EAC9B,cAAc,YAAY;AACxB,SAAM,kBAAkB,MAAM;KAEjC,CAAC;AACF,SAAO;;AAGT,QAAO;EACL,MAAM;EACN,SAAS;EAET,MAAM;GAEJ,SAAS;AAEP,WAAO,EACL,cAAc,EACZ,SAAS,CAAC,sBAAsB,EACjC,EACF;;GAEH,MAAM,aAAa;IACjB,MAAM,mBAAmBD,mBAAiB;AAE1C,4BAAwB,iBAAiB;AACzC,oCAAgC,EAC9B,eAAe,wBAAwB,iBAAiB,EACzD,CAAC;AAEF,QAAI,SAAS,CAAC,kBACZ,qBAAoB,MAAM,aAAa;;GAI3C,MAAM,WAAW;IACf,MAAM,mBAAmBA,mBAAiB;AAC1C,QAAI,CAAC,MACH,KAAI;AACF,WAAM,yBAAyB;MAC7B;MACA,kBAAkB;MAClB;MACD,CAAC;aACK,OAAO;AACd,YAAO,MAAM,6CAA6C,MAAM;;;GAIvE;EAED,QAAQ,UAAU;AAChB,iBACE,SAAS,QAAQ,SAAS,gBAAgB,gBAAgB;GAC5D,MAAM,mBAAmBA,mBAAiB;AAE1C,UAAO,cAAc;AAErB,YAAS,MAAM,WAAW,IAAI,mBAAmB;AAC/C,4BAAwB,iBAAiB;AACzC,oCAAgC,EAC9B,eAAe,wBAAwB,iBAAiB,EACzD,CAAC;KACF;AAEF,YAAS,MAAM,SAAS,WAAW,aAAa,YAAY;AAC1D,QAAI,gBAAgB,iBAAiB,CAAC,kBACpC,qBAAoB,MAAM,aAAa;KAEzC;AAEF,YAAS,MAAM,eAAe,WAAW,aAAa,YAAY;AAChE,QAAI,gBAAgB,aAClB,KAAI;AACF,WAAM,yBAAyB;MAC7B;MACA,kBAAkB;MAClB;MACD,CAAC;aACK,OAAO;AACd,YAAO,MAAM,6CAA6C,MAAM;AAChE,WAAM;;KAGV;AAGF,YAAS,MAAM,SAAS,WAAW,aAAa,YAAY;AAC1D,4BAAwB,iBAAiB;AACzC,UAAM,mBAAmB,MAAM;KAC/B;;EAGJ,UAAU,IAAI;GACZ,MAAM,UAAU,wBAAwB;AACxC,OAAI,QACF,QAAO,QAAQ,OAAO;AAExB,UAAO;;EAGT,MAAM;GACJ,QAAQ,EAEN,IAAI,YACL;GACD,QAAQ,IAAY;IAClB,MAAM,UAAU,sBAAsB;AACtC,QAAI,QACF,QAAO,QAAQ,OAAO;AAExB,WAAO;;GAEV;EAED,WAAW;GACT,QAAQ;IACN,IAAI;KACF,SAAS,CAAC,YAAY;KACtB,SAAS;KACV;IAGD,MAAM,OAAO,eAAe,eAAe;IAC5C;GACD,MAAM,QAAQ,MAAM,IAAI;AACtB,QAAI;KAEF,MAAM,SAAS,mBAAmB;MAChC;MACA,UAAU;MACV;MACD,CAAC;AAGF,SAAI,CAAC,OAAO,aAAa;AACvB,aAAO,MAAM,gCAAgC,KAAK;AAClD,aAAO;;KAET,MAAM,kBAAkB,IAAI,gBAAgBA,mBAAiB,CAAC;AAG9D,SAAI,OAAO,cAAc,OAAO,WAAW,SAAS,GAAG;AACrD,YAAM,gBAAgB,wBAAwB,OAAO,WAAW;AAEhE,aAAO,MACL,SAAS,OAAO,WAAW,OAAO,2BAA2B,KAC9D;;AAGH,YAAO,MAAM,kCAAkC,KAAK;AACpD,YAAO;MACL,MAAM,OAAO;MACb,KAAK,OAAO;MACb;aACM,OAAO;AACd,YAAO,MAAM,sBAAsB,GAAG,IAAI,MAAM;AAChD,YAAO;;;GAGZ;EACF;EACD"}
1
+ {"version":3,"file":"unplugin.mjs","names":["translationServer: TranslationServer","buildStartTime: number | null","currentFramework: \"vite\" | \"webpack\" | \"next\" | null","webpackMode: \"development\" | \"production\" | undefined","getMetadataPath","rawGetMetadataPath"],"sources":["../../src/plugin/unplugin.ts"],"sourcesContent":["import { createUnplugin } from \"unplugin\";\nimport { transformComponent } from \"./transform\";\nimport type {\n LingoConfig,\n LingoInternalFields,\n PartialLingoConfig,\n} from \"../types\";\nimport {\n startTranslationServer,\n type TranslationServer,\n} from \"../translation-server\";\nimport {\n cleanupExistingMetadata,\n getMetadataPath as rawGetMetadataPath,\n MetadataManager,\n} from \"../metadata/manager\";\nimport { createLingoConfig } from \"../utils/config-factory\";\nimport { logger } from \"../utils/logger\";\nimport { useI18nRegex } from \"./transform/use-i18n\";\nimport {\n generateClientLocaleModule,\n generateConfigModule,\n generateServerLocaleModule,\n} from \"../virtual/code-generator\";\nimport { processBuildTranslations } from \"./build-translator\";\nimport { registerCleanupOnCurrentProcess } from \"./cleanup\";\nimport path from \"path\";\nimport fs from \"fs\";\nimport { TranslationService } from \"../translators\";\nimport trackEvent from \"../utils/observability\";\nimport {\n TRACKING_EVENTS,\n sanitizeConfigForTracking,\n} from \"../utils/tracking-events\";\n\nexport type LingoPluginOptions = PartialLingoConfig;\n\nlet translationServer: TranslationServer;\n\nconst PLUGIN_NAME = \"lingo-compiler\";\n\n// Tracking state\nlet alreadySentBuildStartEvent = false;\nlet buildStartTime: number | null = null;\nlet filesTransformedCount = 0;\nlet totalEntriesCount = 0;\nlet hasTransformErrors = false;\nlet currentFramework: \"vite\" | \"webpack\" | \"next\" | null = null;\n\nfunction tryLocalOrReturnVirtual(\n config: LingoConfig,\n fileName: string,\n virtualName: string,\n) {\n const customPath = path.join(config.sourceRoot, config.lingoDir, fileName);\n if (fs.existsSync(customPath)) {\n return customPath;\n }\n return virtualName;\n}\n\n/**\n * Single source of truth for virtual modules\n * Each entry defines both resolver (import path → virtual ID) and loader (virtual ID → code)\n *\n * If customFileCheck is defined, the specified file will be first searched for, and if not found virtual module will be used.\n */\nconst virtualModules = {\n \"@lingo.dev/compiler/virtual/config\": {\n virtualId: \"\\0virtual:lingo-config\",\n loader: (config: LingoConfig) => generateConfigModule(config),\n customFileCheck: undefined,\n },\n \"@lingo.dev/compiler/virtual/locale/server\": {\n virtualId: \"\\0virtual:locale-resolver.server\" as const,\n loader: (config: LingoConfig) => generateServerLocaleModule(config),\n customFileCheck: \"locale-resolver.server.ts\" as const,\n },\n \"@lingo.dev/compiler/virtual/locale/client\": {\n virtualId: \"\\0virtual:locale-resolver.client\" as const,\n loader: (config: LingoConfig) => generateClientLocaleModule(config),\n customFileCheck: \"locale-resolver.client.ts\" as const,\n },\n} as const;\n\n// Derive resolver and loader maps from the single source\nconst virtualModulesResolvers = Object.fromEntries(\n Object.entries(virtualModules).map(([importPath, module]) => [\n importPath,\n (config: LingoConfig) =>\n module.customFileCheck\n ? tryLocalOrReturnVirtual(\n config,\n module.customFileCheck,\n module.virtualId,\n )\n : module.virtualId,\n ]),\n);\n\nconst virtualModulesLoaders = Object.fromEntries(\n Object.values(virtualModules).map((value) => [value.virtualId, value.loader]),\n);\n\n/**\n * Send build start tracking event\n */\nfunction sendBuildStartEvent(\n framework: \"vite\" | \"webpack\" | \"next\",\n config: LingoConfig,\n) {\n if (alreadySentBuildStartEvent) return;\n alreadySentBuildStartEvent = true;\n\n trackEvent(TRACKING_EVENTS.BUILD_START, {\n framework,\n configuration: sanitizeConfigForTracking(config),\n environment: config.environment,\n });\n}\n\n/**\n * Universal plugin for Lingo.dev compiler\n * Supports Vite, Webpack\n */\nexport const lingoUnplugin = createUnplugin<\n LingoPluginOptions & Partial<Pick<LingoConfig, LingoInternalFields>>\n>((options) => {\n const config = createLingoConfig(options);\n\n // Won't work for webpack most likely. Use mode there to set correct environment in configs.\n const isDev = config.environment === \"development\";\n const startPort = config.dev.translationServerStartPort;\n\n // For webpack: store the actual mode and use it to compute the correct metadata path\n let webpackMode: \"development\" | \"production\" | undefined;\n // Should be dynamic, because webpack only tells us the mode inside the plugin, not inside the config.\n const getMetadataPath = () => {\n return rawGetMetadataPath(\n webpackMode ? { ...config, environment: webpackMode } : config,\n );\n };\n\n async function startServer() {\n const server = await startTranslationServer({\n translationService: new TranslationService(config, logger),\n onError: (err) => {\n logger.error(\"Translation server error:\", err);\n },\n onReady: (port) => {\n logger.info(`Translation server started successfully on port: ${port}`);\n },\n config,\n });\n // I don't like this quite a lot. But starting server inside the loader seems lame.\n config.dev.translationServerUrl = server.getUrl();\n registerCleanupOnCurrentProcess({\n asyncCleanup: async () => {\n await translationServer.stop();\n },\n });\n return server;\n }\n\n return {\n name: PLUGIN_NAME,\n enforce: \"pre\", // Run before other plugins (especially before React plugin)\n\n vite: {\n // Vite handles deep merge\n config() {\n // Required for custom virtual like modules to be resolved; otherwise they are bundled with raw source code.\n return {\n optimizeDeps: {\n exclude: [\"@lingo.dev/compiler\"],\n },\n };\n },\n async buildStart() {\n const metadataFilePath = getMetadataPath();\n\n // Track build start\n currentFramework = \"vite\";\n sendBuildStartEvent(\"vite\", config);\n buildStartTime = Date.now();\n filesTransformedCount = 0;\n totalEntriesCount = 0;\n hasTransformErrors = false;\n\n cleanupExistingMetadata(metadataFilePath);\n registerCleanupOnCurrentProcess({\n cleanup: () => cleanupExistingMetadata(metadataFilePath),\n });\n\n if (isDev && !translationServer) {\n translationServer = await startServer();\n }\n },\n\n async buildEnd() {\n const metadataFilePath = getMetadataPath();\n if (!isDev) {\n try {\n await processBuildTranslations({\n config,\n publicOutputPath: \"public/translations\",\n metadataFilePath,\n });\n\n if (buildStartTime && !hasTransformErrors) {\n trackEvent(TRACKING_EVENTS.BUILD_SUCCESS, {\n framework: \"vite\",\n stats: {\n totalEntries: totalEntriesCount,\n filesTransformed: filesTransformedCount,\n buildDuration: Date.now() - buildStartTime,\n },\n environment: config.environment,\n });\n }\n } catch (error) {\n logger.error(\"Build-time translation processing failed:\", error);\n }\n } else if (buildStartTime && !hasTransformErrors) {\n trackEvent(TRACKING_EVENTS.BUILD_SUCCESS, {\n framework: \"vite\",\n stats: {\n totalEntries: totalEntriesCount,\n filesTransformed: filesTransformedCount,\n buildDuration: Date.now() - buildStartTime,\n },\n environment: config.environment,\n });\n }\n },\n },\n\n webpack(compiler) {\n webpackMode =\n compiler.options.mode === \"development\" ? \"development\" : \"production\";\n const metadataFilePath = getMetadataPath();\n // Yes, this is dirty play, but webpack runs only for this plugin, and this way we save people from using wrong config\n config.environment = webpackMode;\n\n compiler.hooks.initialize.tap(PLUGIN_NAME, () => {\n // Track build start\n currentFramework = \"webpack\";\n sendBuildStartEvent(\"webpack\", config);\n buildStartTime = Date.now();\n filesTransformedCount = 0;\n totalEntriesCount = 0;\n hasTransformErrors = false;\n\n cleanupExistingMetadata(metadataFilePath);\n registerCleanupOnCurrentProcess({\n cleanup: () => cleanupExistingMetadata(metadataFilePath),\n });\n });\n\n compiler.hooks.watchRun.tapPromise(PLUGIN_NAME, async () => {\n if (webpackMode === \"development\" && !translationServer) {\n translationServer = await startServer();\n }\n });\n\n compiler.hooks.additionalPass.tapPromise(PLUGIN_NAME, async () => {\n if (webpackMode === \"production\") {\n try {\n await processBuildTranslations({\n config,\n publicOutputPath: \"public/translations\",\n metadataFilePath,\n });\n\n if (buildStartTime && !hasTransformErrors) {\n trackEvent(TRACKING_EVENTS.BUILD_SUCCESS, {\n framework: \"webpack\",\n stats: {\n totalEntries: totalEntriesCount,\n filesTransformed: filesTransformedCount,\n buildDuration: Date.now() - buildStartTime,\n },\n environment: config.environment,\n });\n }\n } catch (error) {\n logger.error(\"Build-time translation processing failed:\", error);\n throw error;\n }\n } else if (buildStartTime && !hasTransformErrors) {\n trackEvent(TRACKING_EVENTS.BUILD_SUCCESS, {\n framework: \"webpack\",\n stats: {\n totalEntries: totalEntriesCount,\n filesTransformed: filesTransformedCount,\n buildDuration: Date.now() - buildStartTime,\n },\n environment: config.environment,\n });\n }\n });\n\n // Duplicates the cleanup process handlers does, but won't hurt since cleanup is idempotent.\n compiler.hooks.shutdown.tapPromise(PLUGIN_NAME, async () => {\n cleanupExistingMetadata(metadataFilePath);\n await translationServer?.stop();\n });\n },\n\n resolveId(id) {\n const handler = virtualModulesResolvers[id];\n if (handler) {\n return handler(config);\n }\n return null;\n },\n\n load: {\n filter: {\n // Without the filter webpack goes mad\n id: /virtual:/,\n },\n handler(id: string) {\n const handler = virtualModulesLoaders[id];\n if (handler) {\n return handler(config);\n }\n return null;\n },\n },\n\n transform: {\n filter: {\n id: {\n include: [/\\.[tj]sx$/],\n exclude: /node_modules/,\n },\n // If useDirective is enabled, only process files with \"use i18n\"\n // This is more efficient than checking in the handler\n code: config.useDirective ? useI18nRegex : undefined,\n },\n async handler(code, id) {\n try {\n // Transform the component\n const result = transformComponent({\n code,\n filePath: id,\n config,\n });\n\n // If no transformation occurred, return original code\n if (!result.transformed) {\n logger.debug(`No transformation needed for ${id}`);\n return null;\n }\n const metadataManager = new MetadataManager(getMetadataPath());\n\n // Update metadata with new entries (thread-safe)\n if (result.newEntries && result.newEntries.length > 0) {\n await metadataManager.saveMetadataWithEntries(result.newEntries);\n\n // Track stats for observability\n totalEntriesCount += result.newEntries.length;\n filesTransformedCount++;\n\n logger.debug(\n `Found ${result.newEntries.length} translatable text(s) in ${id}`,\n );\n }\n\n logger.debug(`Returning transformed code for ${id}`);\n return {\n code: result.code,\n map: result.map,\n };\n } catch (error) {\n hasTransformErrors = true;\n\n // Track error event\n if (currentFramework) {\n trackEvent(TRACKING_EVENTS.BUILD_ERROR, {\n framework: currentFramework,\n errorType: \"transform\",\n errorMessage: error instanceof Error ? error.message : \"Unknown transform error\",\n filePath: id,\n environment: config.environment,\n });\n }\n\n logger.error(`Transform error in ${id}:`, error);\n return null;\n }\n },\n },\n };\n});\n"],"mappings":";;;;;;;;;;;;;;;;;AAqCA,IAAIA;AAEJ,MAAM,cAAc;AAGpB,IAAI,6BAA6B;AACjC,IAAIC,iBAAgC;AACpC,IAAI,wBAAwB;AAC5B,IAAI,oBAAoB;AACxB,IAAI,qBAAqB;AACzB,IAAIC,mBAAuD;AAE3D,SAAS,wBACP,QACA,UACA,aACA;CACA,MAAM,aAAa,KAAK,KAAK,OAAO,YAAY,OAAO,UAAU,SAAS;AAC1E,KAAI,GAAG,WAAW,WAAW,CAC3B,QAAO;AAET,QAAO;;;;;;;;AAST,MAAM,iBAAiB;CACrB,sCAAsC;EACpC,WAAW;EACX,SAAS,WAAwB,qBAAqB,OAAO;EAC7D,iBAAiB;EAClB;CACD,6CAA6C;EAC3C,WAAW;EACX,SAAS,WAAwB,2BAA2B,OAAO;EACnE,iBAAiB;EAClB;CACD,6CAA6C;EAC3C,WAAW;EACX,SAAS,WAAwB,2BAA2B,OAAO;EACnE,iBAAiB;EAClB;CACF;AAGD,MAAM,0BAA0B,OAAO,YACrC,OAAO,QAAQ,eAAe,CAAC,KAAK,CAAC,YAAY,YAAY,CAC3D,aACC,WACC,OAAO,kBACH,wBACE,QACA,OAAO,iBACP,OAAO,UACR,GACD,OAAO,UACd,CAAC,CACH;AAED,MAAM,wBAAwB,OAAO,YACnC,OAAO,OAAO,eAAe,CAAC,KAAK,UAAU,CAAC,MAAM,WAAW,MAAM,OAAO,CAAC,CAC9E;;;;AAKD,SAAS,oBACP,WACA,QACA;AACA,KAAI,2BAA4B;AAChC,8BAA6B;AAE7B,YAAW,gBAAgB,aAAa;EACtC;EACA,eAAe,0BAA0B,OAAO;EAChD,aAAa,OAAO;EACrB,CAAC;;;;;;AAOJ,MAAa,gBAAgB,gBAE1B,YAAY;CACb,MAAM,SAAS,kBAAkB,QAAQ;CAGzC,MAAM,QAAQ,OAAO,gBAAgB;AACnB,QAAO,IAAI;CAG7B,IAAIC;CAEJ,MAAMC,0BAAwB;AAC5B,SAAOC,gBACL,cAAc;GAAE,GAAG;GAAQ,aAAa;GAAa,GAAG,OACzD;;CAGH,eAAe,cAAc;EAC3B,MAAM,SAAS,MAAM,uBAAuB;GAC1C,oBAAoB,IAAI,mBAAmB,QAAQ,OAAO;GAC1D,UAAU,QAAQ;AAChB,WAAO,MAAM,6BAA6B,IAAI;;GAEhD,UAAU,SAAS;AACjB,WAAO,KAAK,oDAAoD,OAAO;;GAEzE;GACD,CAAC;AAEF,SAAO,IAAI,uBAAuB,OAAO,QAAQ;AACjD,kCAAgC,EAC9B,cAAc,YAAY;AACxB,SAAM,kBAAkB,MAAM;KAEjC,CAAC;AACF,SAAO;;AAGT,QAAO;EACL,MAAM;EACN,SAAS;EAET,MAAM;GAEJ,SAAS;AAEP,WAAO,EACL,cAAc,EACZ,SAAS,CAAC,sBAAsB,EACjC,EACF;;GAEH,MAAM,aAAa;IACjB,MAAM,mBAAmBD,mBAAiB;AAG1C,uBAAmB;AACnB,wBAAoB,QAAQ,OAAO;AACnC,qBAAiB,KAAK,KAAK;AAC3B,4BAAwB;AACxB,wBAAoB;AACpB,yBAAqB;AAErB,4BAAwB,iBAAiB;AACzC,oCAAgC,EAC9B,eAAe,wBAAwB,iBAAiB,EACzD,CAAC;AAEF,QAAI,SAAS,CAAC,kBACZ,qBAAoB,MAAM,aAAa;;GAI3C,MAAM,WAAW;IACf,MAAM,mBAAmBA,mBAAiB;AAC1C,QAAI,CAAC,MACH,KAAI;AACF,WAAM,yBAAyB;MAC7B;MACA,kBAAkB;MAClB;MACD,CAAC;AAEF,SAAI,kBAAkB,CAAC,mBACrB,YAAW,gBAAgB,eAAe;MACxC,WAAW;MACX,OAAO;OACL,cAAc;OACd,kBAAkB;OAClB,eAAe,KAAK,KAAK,GAAG;OAC7B;MACD,aAAa,OAAO;MACrB,CAAC;aAEG,OAAO;AACd,YAAO,MAAM,6CAA6C,MAAM;;aAEzD,kBAAkB,CAAC,mBAC5B,YAAW,gBAAgB,eAAe;KACxC,WAAW;KACX,OAAO;MACL,cAAc;MACd,kBAAkB;MAClB,eAAe,KAAK,KAAK,GAAG;MAC7B;KACD,aAAa,OAAO;KACrB,CAAC;;GAGP;EAED,QAAQ,UAAU;AAChB,iBACE,SAAS,QAAQ,SAAS,gBAAgB,gBAAgB;GAC5D,MAAM,mBAAmBA,mBAAiB;AAE1C,UAAO,cAAc;AAErB,YAAS,MAAM,WAAW,IAAI,mBAAmB;AAE/C,uBAAmB;AACnB,wBAAoB,WAAW,OAAO;AACtC,qBAAiB,KAAK,KAAK;AAC3B,4BAAwB;AACxB,wBAAoB;AACpB,yBAAqB;AAErB,4BAAwB,iBAAiB;AACzC,oCAAgC,EAC9B,eAAe,wBAAwB,iBAAiB,EACzD,CAAC;KACF;AAEF,YAAS,MAAM,SAAS,WAAW,aAAa,YAAY;AAC1D,QAAI,gBAAgB,iBAAiB,CAAC,kBACpC,qBAAoB,MAAM,aAAa;KAEzC;AAEF,YAAS,MAAM,eAAe,WAAW,aAAa,YAAY;AAChE,QAAI,gBAAgB,aAClB,KAAI;AACF,WAAM,yBAAyB;MAC7B;MACA,kBAAkB;MAClB;MACD,CAAC;AAEF,SAAI,kBAAkB,CAAC,mBACrB,YAAW,gBAAgB,eAAe;MACxC,WAAW;MACX,OAAO;OACL,cAAc;OACd,kBAAkB;OAClB,eAAe,KAAK,KAAK,GAAG;OAC7B;MACD,aAAa,OAAO;MACrB,CAAC;aAEG,OAAO;AACd,YAAO,MAAM,6CAA6C,MAAM;AAChE,WAAM;;aAEC,kBAAkB,CAAC,mBAC5B,YAAW,gBAAgB,eAAe;KACxC,WAAW;KACX,OAAO;MACL,cAAc;MACd,kBAAkB;MAClB,eAAe,KAAK,KAAK,GAAG;MAC7B;KACD,aAAa,OAAO;KACrB,CAAC;KAEJ;AAGF,YAAS,MAAM,SAAS,WAAW,aAAa,YAAY;AAC1D,4BAAwB,iBAAiB;AACzC,UAAM,mBAAmB,MAAM;KAC/B;;EAGJ,UAAU,IAAI;GACZ,MAAM,UAAU,wBAAwB;AACxC,OAAI,QACF,QAAO,QAAQ,OAAO;AAExB,UAAO;;EAGT,MAAM;GACJ,QAAQ,EAEN,IAAI,YACL;GACD,QAAQ,IAAY;IAClB,MAAM,UAAU,sBAAsB;AACtC,QAAI,QACF,QAAO,QAAQ,OAAO;AAExB,WAAO;;GAEV;EAED,WAAW;GACT,QAAQ;IACN,IAAI;KACF,SAAS,CAAC,YAAY;KACtB,SAAS;KACV;IAGD,MAAM,OAAO,eAAe,eAAe;IAC5C;GACD,MAAM,QAAQ,MAAM,IAAI;AACtB,QAAI;KAEF,MAAM,SAAS,mBAAmB;MAChC;MACA,UAAU;MACV;MACD,CAAC;AAGF,SAAI,CAAC,OAAO,aAAa;AACvB,aAAO,MAAM,gCAAgC,KAAK;AAClD,aAAO;;KAET,MAAM,kBAAkB,IAAI,gBAAgBA,mBAAiB,CAAC;AAG9D,SAAI,OAAO,cAAc,OAAO,WAAW,SAAS,GAAG;AACrD,YAAM,gBAAgB,wBAAwB,OAAO,WAAW;AAGhE,2BAAqB,OAAO,WAAW;AACvC;AAEA,aAAO,MACL,SAAS,OAAO,WAAW,OAAO,2BAA2B,KAC9D;;AAGH,YAAO,MAAM,kCAAkC,KAAK;AACpD,YAAO;MACL,MAAM,OAAO;MACb,KAAK,OAAO;MACb;aACM,OAAO;AACd,0BAAqB;AAGrB,SAAI,iBACF,YAAW,gBAAgB,aAAa;MACtC,WAAW;MACX,WAAW;MACX,cAAc,iBAAiB,QAAQ,MAAM,UAAU;MACvD,UAAU;MACV,aAAa,OAAO;MACrB,CAAC;AAGJ,YAAO,MAAM,sBAAsB,GAAG,IAAI,MAAM;AAChD,YAAO;;;GAGZ;EACF;EACD"}
@@ -1,12 +1,12 @@
1
1
  import { LingoProviderProps } from "../shared/LingoProvider.cjs";
2
- import * as react_jsx_runtime2 from "react/jsx-runtime";
2
+ import * as react_jsx_runtime1 from "react/jsx-runtime";
3
3
 
4
4
  //#region src/react/server/ServerLingoProvider.d.ts
5
5
  declare function LingoProvider({
6
6
  initialLocale,
7
7
  initialTranslations,
8
8
  ...rest
9
- }: LingoProviderProps): Promise<react_jsx_runtime2.JSX.Element>;
9
+ }: LingoProviderProps): Promise<react_jsx_runtime1.JSX.Element>;
10
10
  //#endregion
11
11
  export { LingoProvider };
12
12
  //# sourceMappingURL=ServerLingoProvider.d.cts.map
@@ -1,5 +1,5 @@
1
1
  import { LocaleCode } from "lingo.dev/spec";
2
- import * as react_jsx_runtime1 from "react/jsx-runtime";
2
+ import * as react_jsx_runtime3 from "react/jsx-runtime";
3
3
  import { PropsWithChildren } from "react";
4
4
 
5
5
  //#region src/react/shared/LingoProvider.d.ts
@@ -70,7 +70,7 @@ declare function LingoProvider__Dev({
70
70
  router,
71
71
  devWidget,
72
72
  children
73
- }: LingoProviderProps): react_jsx_runtime1.JSX.Element;
73
+ }: LingoProviderProps): react_jsx_runtime3.JSX.Element;
74
74
  //#endregion
75
75
  export { LingoProvider, LingoProviderProps };
76
76
  //# sourceMappingURL=LingoProvider.d.cts.map
@@ -1,5 +1,5 @@
1
1
  import { LocaleCode } from "lingo.dev/spec";
2
- import * as react_jsx_runtime3 from "react/jsx-runtime";
2
+ import * as react_jsx_runtime2 from "react/jsx-runtime";
3
3
  import { CSSProperties } from "react";
4
4
 
5
5
  //#region src/react/shared/LocaleSwitcher.d.ts
@@ -65,7 +65,7 @@ declare function LocaleSwitcher({
65
65
  style,
66
66
  className,
67
67
  showLoadingState
68
- }: LocaleSwitcherProps): react_jsx_runtime3.JSX.Element;
68
+ }: LocaleSwitcherProps): react_jsx_runtime2.JSX.Element;
69
69
  //#endregion
70
70
  export { LocaleSwitcher };
71
71
  //# sourceMappingURL=LocaleSwitcher.d.cts.map
@@ -1,6 +1,6 @@
1
1
  import { dictionaryFrom } from "./api.mjs";
2
2
  import { DEFAULT_TIMEOUTS, withTimeout } from "../utils/timeout.mjs";
3
- import * as fs$1 from "fs/promises";
3
+ import * as fs from "fs/promises";
4
4
  import * as path$1 from "path";
5
5
 
6
6
  //#region src/translators/local-cache.ts
@@ -27,7 +27,7 @@ var LocalTranslationCache = class {
27
27
  async getDictionary(locale) {
28
28
  try {
29
29
  const cachePath = this.getCachePath(locale);
30
- const content = await withTimeout(fs$1.readFile(cachePath, "utf-8"), DEFAULT_TIMEOUTS.FILE_IO, `Read cache for ${locale}`);
30
+ const content = await withTimeout(fs.readFile(cachePath, "utf-8"), DEFAULT_TIMEOUTS.FILE_IO, `Read cache for ${locale}`);
31
31
  return JSON.parse(content);
32
32
  } catch {
33
33
  return null;
@@ -41,8 +41,8 @@ var LocalTranslationCache = class {
41
41
  try {
42
42
  const cachePath = this.getCachePath(locale);
43
43
  const cacheDir = path$1.dirname(cachePath);
44
- await withTimeout(fs$1.mkdir(cacheDir, { recursive: true }), DEFAULT_TIMEOUTS.FILE_IO, `Create cache directory for ${locale}`);
45
- await withTimeout(fs$1.writeFile(cachePath, JSON.stringify(dictionary, null, 2), "utf-8"), DEFAULT_TIMEOUTS.FILE_IO, `Write cache for ${locale}`);
44
+ await withTimeout(fs.mkdir(cacheDir, { recursive: true }), DEFAULT_TIMEOUTS.FILE_IO, `Create cache directory for ${locale}`);
45
+ await withTimeout(fs.writeFile(cachePath, JSON.stringify(dictionary, null, 2), "utf-8"), DEFAULT_TIMEOUTS.FILE_IO, `Write cache for ${locale}`);
46
46
  } catch (error) {
47
47
  this.logger.error(`Failed to write cache for locale ${locale}:`, error);
48
48
  throw error;
@@ -82,7 +82,7 @@ var LocalTranslationCache = class {
82
82
  async has(locale) {
83
83
  try {
84
84
  const cachePath = this.getCachePath(locale);
85
- await fs$1.access(cachePath);
85
+ await fs.access(cachePath);
86
86
  return true;
87
87
  } catch {
88
88
  return false;
@@ -94,7 +94,7 @@ var LocalTranslationCache = class {
94
94
  async clear(locale) {
95
95
  try {
96
96
  const cachePath = this.getCachePath(locale);
97
- await fs$1.unlink(cachePath);
97
+ await fs.unlink(cachePath);
98
98
  } catch {}
99
99
  }
100
100
  /**
@@ -102,8 +102,8 @@ var LocalTranslationCache = class {
102
102
  */
103
103
  async clearAll() {
104
104
  try {
105
- const files = await fs$1.readdir(this.config.cacheDir);
106
- await Promise.all(files.filter((file) => file.endsWith(".json")).map((file) => fs$1.unlink(path$1.join(this.config.cacheDir, file))));
105
+ const files = await fs.readdir(this.config.cacheDir);
106
+ await Promise.all(files.filter((file) => file.endsWith(".json")).map((file) => fs.unlink(path$1.join(this.config.cacheDir, file))));
107
107
  } catch {}
108
108
  }
109
109
  };
@@ -1 +1 @@
1
- {"version":3,"file":"local-cache.mjs","names":["logger: Logger","path","fs"],"sources":["../../src/translators/local-cache.ts"],"sourcesContent":["/**\n * Local disk-based translation cache implementation\n */\n\nimport * as fs from \"fs/promises\";\nimport * as path from \"path\";\nimport type { LocalCacheConfig, TranslationCache } from \"./cache\";\nimport { dictionaryFrom, type DictionarySchema } from \"./api\";\nimport { DEFAULT_TIMEOUTS, withTimeout } from \"../utils/timeout\";\nimport type { Logger } from \"../utils/logger\";\nimport type { LocaleCode } from \"lingo.dev/spec\";\n\n/**\n * Local file system cache for translations\n * Stores translations as JSON files in .lingo/cache/\n */\nexport class LocalTranslationCache implements TranslationCache {\n private config: LocalCacheConfig;\n\n constructor(\n config: LocalCacheConfig,\n private logger: Logger,\n ) {\n this.config = config;\n }\n\n private getCachePath(locale: LocaleCode): string {\n return path.join(this.config.cacheDir, `${locale}.json`);\n }\n\n /**\n * Read dictionary file from disk\n * Times out after 10 seconds to prevent indefinite hangs\n */\n async getDictionary(locale: LocaleCode): Promise<DictionarySchema | null> {\n try {\n const cachePath = this.getCachePath(locale);\n const content = await withTimeout(\n fs.readFile(cachePath, \"utf-8\"),\n DEFAULT_TIMEOUTS.FILE_IO,\n `Read cache for ${locale}`,\n );\n return JSON.parse(content);\n } catch {\n return null;\n }\n }\n\n /**\n * Write dictionary file to disk\n * Times out after 10 seconds to prevent indefinite hangs\n */\n private async setDictionary(\n locale: LocaleCode,\n dictionary: DictionarySchema,\n ): Promise<void> {\n try {\n const cachePath = this.getCachePath(locale);\n const cacheDir = path.dirname(cachePath);\n\n // Ensure cache directory exists\n await withTimeout(\n fs.mkdir(cacheDir, { recursive: true }),\n DEFAULT_TIMEOUTS.FILE_IO,\n `Create cache directory for ${locale}`,\n );\n\n // Write cache file\n await withTimeout(\n fs.writeFile(cachePath, JSON.stringify(dictionary, null, 2), \"utf-8\"),\n DEFAULT_TIMEOUTS.FILE_IO,\n `Write cache for ${locale}`,\n );\n } catch (error) {\n this.logger.error(`Failed to write cache for locale ${locale}:`, error);\n throw error;\n }\n }\n\n /**\n * Get cached translations for a locale\n */\n async get(\n locale: LocaleCode,\n hashes?: string[],\n ): Promise<Record<string, string>> {\n const dictionary = await this.getDictionary(locale);\n if (!dictionary) {\n return {};\n }\n if (hashes) {\n return hashes.reduce(\n (acc, hash) => ({ ...acc, [hash]: dictionary.entries[hash] }),\n {},\n );\n }\n return dictionary.entries || {};\n }\n\n /**\n * Update cache with new translations (merge)\n */\n async update(\n locale: LocaleCode,\n translations: Record<string, string>,\n ): Promise<void> {\n const existing = await this.get(locale);\n\n const merged = { ...existing, ...translations };\n\n await this.set(locale, merged);\n }\n\n /**\n * Replace entire cache for a locale\n */\n async set(\n locale: LocaleCode,\n translations: Record<string, string>,\n ): Promise<void> {\n await this.setDictionary(locale, dictionaryFrom(locale, translations));\n }\n\n /**\n * Check if cache exists for a locale\n */\n async has(locale: LocaleCode): Promise<boolean> {\n try {\n const cachePath = this.getCachePath(locale);\n await fs.access(cachePath);\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Clear cache for a specific locale\n */\n async clear(locale: LocaleCode): Promise<void> {\n try {\n const cachePath = this.getCachePath(locale);\n await fs.unlink(cachePath);\n } catch {\n // Ignore errors if file doesn't exist\n }\n }\n\n /**\n * Clear all cached translations\n */\n async clearAll(): Promise<void> {\n try {\n const files = await fs.readdir(this.config.cacheDir);\n\n await Promise.all(\n files\n .filter((file) => file.endsWith(\".json\"))\n .map((file) => fs.unlink(path.join(this.config.cacheDir, file))),\n );\n } catch {\n // Ignore errors if directory doesn't exist\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;AAgBA,IAAa,wBAAb,MAA+D;CAC7D,AAAQ;CAER,YACE,QACA,AAAQA,QACR;EADQ;AAER,OAAK,SAAS;;CAGhB,AAAQ,aAAa,QAA4B;AAC/C,SAAOC,OAAK,KAAK,KAAK,OAAO,UAAU,GAAG,OAAO,OAAO;;;;;;CAO1D,MAAM,cAAc,QAAsD;AACxE,MAAI;GACF,MAAM,YAAY,KAAK,aAAa,OAAO;GAC3C,MAAM,UAAU,MAAM,YACpBC,KAAG,SAAS,WAAW,QAAQ,EAC/B,iBAAiB,SACjB,kBAAkB,SACnB;AACD,UAAO,KAAK,MAAM,QAAQ;UACpB;AACN,UAAO;;;;;;;CAQX,MAAc,cACZ,QACA,YACe;AACf,MAAI;GACF,MAAM,YAAY,KAAK,aAAa,OAAO;GAC3C,MAAM,WAAWD,OAAK,QAAQ,UAAU;AAGxC,SAAM,YACJC,KAAG,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC,EACvC,iBAAiB,SACjB,8BAA8B,SAC/B;AAGD,SAAM,YACJA,KAAG,UAAU,WAAW,KAAK,UAAU,YAAY,MAAM,EAAE,EAAE,QAAQ,EACrE,iBAAiB,SACjB,mBAAmB,SACpB;WACM,OAAO;AACd,QAAK,OAAO,MAAM,oCAAoC,OAAO,IAAI,MAAM;AACvE,SAAM;;;;;;CAOV,MAAM,IACJ,QACA,QACiC;EACjC,MAAM,aAAa,MAAM,KAAK,cAAc,OAAO;AACnD,MAAI,CAAC,WACH,QAAO,EAAE;AAEX,MAAI,OACF,QAAO,OAAO,QACX,KAAK,UAAU;GAAE,GAAG;IAAM,OAAO,WAAW,QAAQ;GAAO,GAC5D,EAAE,CACH;AAEH,SAAO,WAAW,WAAW,EAAE;;;;;CAMjC,MAAM,OACJ,QACA,cACe;EAGf,MAAM,SAAS;GAAE,GAFA,MAAM,KAAK,IAAI,OAAO;GAET,GAAG;GAAc;AAE/C,QAAM,KAAK,IAAI,QAAQ,OAAO;;;;;CAMhC,MAAM,IACJ,QACA,cACe;AACf,QAAM,KAAK,cAAc,QAAQ,eAAe,QAAQ,aAAa,CAAC;;;;;CAMxE,MAAM,IAAI,QAAsC;AAC9C,MAAI;GACF,MAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,SAAMA,KAAG,OAAO,UAAU;AAC1B,UAAO;UACD;AACN,UAAO;;;;;;CAOX,MAAM,MAAM,QAAmC;AAC7C,MAAI;GACF,MAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,SAAMA,KAAG,OAAO,UAAU;UACpB;;;;;CAQV,MAAM,WAA0B;AAC9B,MAAI;GACF,MAAM,QAAQ,MAAMA,KAAG,QAAQ,KAAK,OAAO,SAAS;AAEpD,SAAM,QAAQ,IACZ,MACG,QAAQ,SAAS,KAAK,SAAS,QAAQ,CAAC,CACxC,KAAK,SAASA,KAAG,OAAOD,OAAK,KAAK,KAAK,OAAO,UAAU,KAAK,CAAC,CAAC,CACnE;UACK"}
1
+ {"version":3,"file":"local-cache.mjs","names":["logger: Logger","path"],"sources":["../../src/translators/local-cache.ts"],"sourcesContent":["/**\n * Local disk-based translation cache implementation\n */\n\nimport * as fs from \"fs/promises\";\nimport * as path from \"path\";\nimport type { LocalCacheConfig, TranslationCache } from \"./cache\";\nimport { dictionaryFrom, type DictionarySchema } from \"./api\";\nimport { DEFAULT_TIMEOUTS, withTimeout } from \"../utils/timeout\";\nimport type { Logger } from \"../utils/logger\";\nimport type { LocaleCode } from \"lingo.dev/spec\";\n\n/**\n * Local file system cache for translations\n * Stores translations as JSON files in .lingo/cache/\n */\nexport class LocalTranslationCache implements TranslationCache {\n private config: LocalCacheConfig;\n\n constructor(\n config: LocalCacheConfig,\n private logger: Logger,\n ) {\n this.config = config;\n }\n\n private getCachePath(locale: LocaleCode): string {\n return path.join(this.config.cacheDir, `${locale}.json`);\n }\n\n /**\n * Read dictionary file from disk\n * Times out after 10 seconds to prevent indefinite hangs\n */\n async getDictionary(locale: LocaleCode): Promise<DictionarySchema | null> {\n try {\n const cachePath = this.getCachePath(locale);\n const content = await withTimeout(\n fs.readFile(cachePath, \"utf-8\"),\n DEFAULT_TIMEOUTS.FILE_IO,\n `Read cache for ${locale}`,\n );\n return JSON.parse(content);\n } catch {\n return null;\n }\n }\n\n /**\n * Write dictionary file to disk\n * Times out after 10 seconds to prevent indefinite hangs\n */\n private async setDictionary(\n locale: LocaleCode,\n dictionary: DictionarySchema,\n ): Promise<void> {\n try {\n const cachePath = this.getCachePath(locale);\n const cacheDir = path.dirname(cachePath);\n\n // Ensure cache directory exists\n await withTimeout(\n fs.mkdir(cacheDir, { recursive: true }),\n DEFAULT_TIMEOUTS.FILE_IO,\n `Create cache directory for ${locale}`,\n );\n\n // Write cache file\n await withTimeout(\n fs.writeFile(cachePath, JSON.stringify(dictionary, null, 2), \"utf-8\"),\n DEFAULT_TIMEOUTS.FILE_IO,\n `Write cache for ${locale}`,\n );\n } catch (error) {\n this.logger.error(`Failed to write cache for locale ${locale}:`, error);\n throw error;\n }\n }\n\n /**\n * Get cached translations for a locale\n */\n async get(\n locale: LocaleCode,\n hashes?: string[],\n ): Promise<Record<string, string>> {\n const dictionary = await this.getDictionary(locale);\n if (!dictionary) {\n return {};\n }\n if (hashes) {\n return hashes.reduce(\n (acc, hash) => ({ ...acc, [hash]: dictionary.entries[hash] }),\n {},\n );\n }\n return dictionary.entries || {};\n }\n\n /**\n * Update cache with new translations (merge)\n */\n async update(\n locale: LocaleCode,\n translations: Record<string, string>,\n ): Promise<void> {\n const existing = await this.get(locale);\n\n const merged = { ...existing, ...translations };\n\n await this.set(locale, merged);\n }\n\n /**\n * Replace entire cache for a locale\n */\n async set(\n locale: LocaleCode,\n translations: Record<string, string>,\n ): Promise<void> {\n await this.setDictionary(locale, dictionaryFrom(locale, translations));\n }\n\n /**\n * Check if cache exists for a locale\n */\n async has(locale: LocaleCode): Promise<boolean> {\n try {\n const cachePath = this.getCachePath(locale);\n await fs.access(cachePath);\n return true;\n } catch {\n return false;\n }\n }\n\n /**\n * Clear cache for a specific locale\n */\n async clear(locale: LocaleCode): Promise<void> {\n try {\n const cachePath = this.getCachePath(locale);\n await fs.unlink(cachePath);\n } catch {\n // Ignore errors if file doesn't exist\n }\n }\n\n /**\n * Clear all cached translations\n */\n async clearAll(): Promise<void> {\n try {\n const files = await fs.readdir(this.config.cacheDir);\n\n await Promise.all(\n files\n .filter((file) => file.endsWith(\".json\"))\n .map((file) => fs.unlink(path.join(this.config.cacheDir, file))),\n );\n } catch {\n // Ignore errors if directory doesn't exist\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;;AAgBA,IAAa,wBAAb,MAA+D;CAC7D,AAAQ;CAER,YACE,QACA,AAAQA,QACR;EADQ;AAER,OAAK,SAAS;;CAGhB,AAAQ,aAAa,QAA4B;AAC/C,SAAOC,OAAK,KAAK,KAAK,OAAO,UAAU,GAAG,OAAO,OAAO;;;;;;CAO1D,MAAM,cAAc,QAAsD;AACxE,MAAI;GACF,MAAM,YAAY,KAAK,aAAa,OAAO;GAC3C,MAAM,UAAU,MAAM,YACpB,GAAG,SAAS,WAAW,QAAQ,EAC/B,iBAAiB,SACjB,kBAAkB,SACnB;AACD,UAAO,KAAK,MAAM,QAAQ;UACpB;AACN,UAAO;;;;;;;CAQX,MAAc,cACZ,QACA,YACe;AACf,MAAI;GACF,MAAM,YAAY,KAAK,aAAa,OAAO;GAC3C,MAAM,WAAWA,OAAK,QAAQ,UAAU;AAGxC,SAAM,YACJ,GAAG,MAAM,UAAU,EAAE,WAAW,MAAM,CAAC,EACvC,iBAAiB,SACjB,8BAA8B,SAC/B;AAGD,SAAM,YACJ,GAAG,UAAU,WAAW,KAAK,UAAU,YAAY,MAAM,EAAE,EAAE,QAAQ,EACrE,iBAAiB,SACjB,mBAAmB,SACpB;WACM,OAAO;AACd,QAAK,OAAO,MAAM,oCAAoC,OAAO,IAAI,MAAM;AACvE,SAAM;;;;;;CAOV,MAAM,IACJ,QACA,QACiC;EACjC,MAAM,aAAa,MAAM,KAAK,cAAc,OAAO;AACnD,MAAI,CAAC,WACH,QAAO,EAAE;AAEX,MAAI,OACF,QAAO,OAAO,QACX,KAAK,UAAU;GAAE,GAAG;IAAM,OAAO,WAAW,QAAQ;GAAO,GAC5D,EAAE,CACH;AAEH,SAAO,WAAW,WAAW,EAAE;;;;;CAMjC,MAAM,OACJ,QACA,cACe;EAGf,MAAM,SAAS;GAAE,GAFA,MAAM,KAAK,IAAI,OAAO;GAET,GAAG;GAAc;AAE/C,QAAM,KAAK,IAAI,QAAQ,OAAO;;;;;CAMhC,MAAM,IACJ,QACA,cACe;AACf,QAAM,KAAK,cAAc,QAAQ,eAAe,QAAQ,aAAa,CAAC;;;;;CAMxE,MAAM,IAAI,QAAsC;AAC9C,MAAI;GACF,MAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,SAAM,GAAG,OAAO,UAAU;AAC1B,UAAO;UACD;AACN,UAAO;;;;;;CAOX,MAAM,MAAM,QAAmC;AAC7C,MAAI;GACF,MAAM,YAAY,KAAK,aAAa,OAAO;AAC3C,SAAM,GAAG,OAAO,UAAU;UACpB;;;;;CAQV,MAAM,WAA0B;AAC9B,MAAI;GACF,MAAM,QAAQ,MAAM,GAAG,QAAQ,KAAK,OAAO,SAAS;AAEpD,SAAM,QAAQ,IACZ,MACG,QAAQ,SAAS,KAAK,SAAS,QAAQ,CAAC,CACxC,KAAK,SAAS,GAAG,OAAOA,OAAK,KAAK,KAAK,OAAO,UAAU,KAAK,CAAC,CAAC,CACnE;UACK"}
@@ -0,0 +1,84 @@
1
+ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
+ const require_rc = require('./rc.cjs');
3
+ const require_repository_id = require('./repository-id.cjs');
4
+ const require_tracking_events = require('./tracking-events.cjs');
5
+ let node_machine_id = require("node-machine-id");
6
+ node_machine_id = require_rolldown_runtime.__toESM(node_machine_id);
7
+
8
+ //#region src/utils/observability.ts
9
+ async function trackEvent(event, properties) {
10
+ if (process.env.DO_NOT_TRACK === "1") return;
11
+ try {
12
+ const identityInfo = await getDistinctId();
13
+ if (process.env.DEBUG === "true") console.log(`[Tracking] Event: ${event}, ID: ${identityInfo.distinct_id}, Source: ${identityInfo.distinct_id_source}`);
14
+ const { PostHog } = await import("posthog-node");
15
+ const posthog = new PostHog("phc_eR0iSoQufBxNY36k0f0T15UvHJdTfHlh8rJcxsfhfXk", {
16
+ host: "https://eu.i.posthog.com",
17
+ flushAt: 1,
18
+ flushInterval: 0
19
+ });
20
+ await posthog.capture({
21
+ distinctId: identityInfo.distinct_id,
22
+ event,
23
+ properties: {
24
+ ...properties,
25
+ isByokMode: properties?.models !== "lingo.dev",
26
+ tracking_version: require_tracking_events.TRACKING_VERSION,
27
+ compiler_package: require_tracking_events.COMPILER_PACKAGE,
28
+ distinct_id_source: identityInfo.distinct_id_source,
29
+ project_id: identityInfo.project_id,
30
+ meta: {
31
+ version: process.env.npm_package_version,
32
+ isCi: process.env.CI === "true"
33
+ }
34
+ }
35
+ });
36
+ await posthog.shutdown();
37
+ } catch (error) {
38
+ if (process.env.DEBUG === "true") console.error("[Tracking] Error:", error);
39
+ }
40
+ }
41
+ async function getDistinctId() {
42
+ const email = await tryGetEmail();
43
+ if (email) return {
44
+ distinct_id: email,
45
+ distinct_id_source: "email",
46
+ project_id: require_repository_id.getRepositoryId()
47
+ };
48
+ const repoId = require_repository_id.getRepositoryId();
49
+ if (repoId) return {
50
+ distinct_id: repoId,
51
+ distinct_id_source: "git_repo",
52
+ project_id: repoId
53
+ };
54
+ const deviceId = `device-${await node_machine_id.machineId()}`;
55
+ if (process.env.DEBUG === "true") console.warn("[Tracking] Using device ID fallback. Consider using git repository for consistent tracking.");
56
+ return {
57
+ distinct_id: deviceId,
58
+ distinct_id_source: "device",
59
+ project_id: null
60
+ };
61
+ }
62
+ async function tryGetEmail() {
63
+ const rc = require_rc.getRc();
64
+ const apiKey = process.env.LINGODOTDEV_API_KEY || rc?.auth?.apiKey;
65
+ const apiUrl = process.env.LINGODOTDEV_API_URL || rc?.auth?.apiUrl || "https://engine.lingo.dev";
66
+ if (!apiKey) return null;
67
+ try {
68
+ const res = await fetch(`${apiUrl}/whoami`, {
69
+ method: "POST",
70
+ headers: {
71
+ Authorization: `Bearer ${apiKey}`,
72
+ ContentType: "application/json"
73
+ }
74
+ });
75
+ if (res.ok) {
76
+ const payload = await res.json();
77
+ if (payload?.email) return payload.email;
78
+ }
79
+ } catch (err) {}
80
+ return null;
81
+ }
82
+
83
+ //#endregion
84
+ exports.default = trackEvent;
@@ -0,0 +1,83 @@
1
+ import { getRc } from "./rc.mjs";
2
+ import { getRepositoryId } from "./repository-id.mjs";
3
+ import { COMPILER_PACKAGE, TRACKING_VERSION } from "./tracking-events.mjs";
4
+ import * as machineIdLib from "node-machine-id";
5
+
6
+ //#region src/utils/observability.ts
7
+ async function trackEvent(event, properties) {
8
+ if (process.env.DO_NOT_TRACK === "1") return;
9
+ try {
10
+ const identityInfo = await getDistinctId();
11
+ if (process.env.DEBUG === "true") console.log(`[Tracking] Event: ${event}, ID: ${identityInfo.distinct_id}, Source: ${identityInfo.distinct_id_source}`);
12
+ const { PostHog } = await import("posthog-node");
13
+ const posthog = new PostHog("phc_eR0iSoQufBxNY36k0f0T15UvHJdTfHlh8rJcxsfhfXk", {
14
+ host: "https://eu.i.posthog.com",
15
+ flushAt: 1,
16
+ flushInterval: 0
17
+ });
18
+ await posthog.capture({
19
+ distinctId: identityInfo.distinct_id,
20
+ event,
21
+ properties: {
22
+ ...properties,
23
+ isByokMode: properties?.models !== "lingo.dev",
24
+ tracking_version: TRACKING_VERSION,
25
+ compiler_package: COMPILER_PACKAGE,
26
+ distinct_id_source: identityInfo.distinct_id_source,
27
+ project_id: identityInfo.project_id,
28
+ meta: {
29
+ version: process.env.npm_package_version,
30
+ isCi: process.env.CI === "true"
31
+ }
32
+ }
33
+ });
34
+ await posthog.shutdown();
35
+ } catch (error) {
36
+ if (process.env.DEBUG === "true") console.error("[Tracking] Error:", error);
37
+ }
38
+ }
39
+ async function getDistinctId() {
40
+ const email = await tryGetEmail();
41
+ if (email) return {
42
+ distinct_id: email,
43
+ distinct_id_source: "email",
44
+ project_id: getRepositoryId()
45
+ };
46
+ const repoId = getRepositoryId();
47
+ if (repoId) return {
48
+ distinct_id: repoId,
49
+ distinct_id_source: "git_repo",
50
+ project_id: repoId
51
+ };
52
+ const deviceId = `device-${await machineIdLib.machineId()}`;
53
+ if (process.env.DEBUG === "true") console.warn("[Tracking] Using device ID fallback. Consider using git repository for consistent tracking.");
54
+ return {
55
+ distinct_id: deviceId,
56
+ distinct_id_source: "device",
57
+ project_id: null
58
+ };
59
+ }
60
+ async function tryGetEmail() {
61
+ const rc = getRc();
62
+ const apiKey = process.env.LINGODOTDEV_API_KEY || rc?.auth?.apiKey;
63
+ const apiUrl = process.env.LINGODOTDEV_API_URL || rc?.auth?.apiUrl || "https://engine.lingo.dev";
64
+ if (!apiKey) return null;
65
+ try {
66
+ const res = await fetch(`${apiUrl}/whoami`, {
67
+ method: "POST",
68
+ headers: {
69
+ Authorization: `Bearer ${apiKey}`,
70
+ ContentType: "application/json"
71
+ }
72
+ });
73
+ if (res.ok) {
74
+ const payload = await res.json();
75
+ if (payload?.email) return payload.email;
76
+ }
77
+ } catch (err) {}
78
+ return null;
79
+ }
80
+
81
+ //#endregion
82
+ export { trackEvent as default };
83
+ //# sourceMappingURL=observability.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"observability.mjs","names":[],"sources":["../../src/utils/observability.ts"],"sourcesContent":["import * as machineIdLib from \"node-machine-id\";\nimport { getRc } from \"./rc\";\nimport { getRepositoryId } from \"./repository-id\";\nimport { TRACKING_VERSION, COMPILER_PACKAGE } from \"./tracking-events\";\n\nexport default async function trackEvent(\n event: string,\n properties?: Record<string, any>,\n) {\n if (process.env.DO_NOT_TRACK === \"1\") {\n return;\n }\n\n try {\n const identityInfo = await getDistinctId();\n\n if (process.env.DEBUG === \"true\") {\n console.log(\n `[Tracking] Event: ${event}, ID: ${identityInfo.distinct_id}, Source: ${identityInfo.distinct_id_source}`,\n );\n }\n\n const { PostHog } = await import(\"posthog-node\");\n const posthog = new PostHog(\n \"phc_eR0iSoQufBxNY36k0f0T15UvHJdTfHlh8rJcxsfhfXk\",\n {\n host: \"https://eu.i.posthog.com\",\n flushAt: 1,\n flushInterval: 0,\n },\n );\n\n await posthog.capture({\n distinctId: identityInfo.distinct_id,\n event,\n properties: {\n ...properties,\n isByokMode: properties?.models !== \"lingo.dev\",\n tracking_version: TRACKING_VERSION,\n compiler_package: COMPILER_PACKAGE,\n distinct_id_source: identityInfo.distinct_id_source,\n project_id: identityInfo.project_id,\n meta: {\n version: process.env.npm_package_version,\n isCi: process.env.CI === \"true\",\n },\n },\n });\n\n await posthog.shutdown();\n } catch (error) {\n if (process.env.DEBUG === \"true\") {\n console.error(\"[Tracking] Error:\", error);\n }\n }\n}\n\nasync function getDistinctId(): Promise<{\n distinct_id: string;\n distinct_id_source: string;\n project_id: string | null;\n}> {\n const email = await tryGetEmail();\n if (email) {\n const projectId = getRepositoryId();\n return {\n distinct_id: email,\n distinct_id_source: \"email\",\n project_id: projectId,\n };\n }\n\n const repoId = getRepositoryId();\n if (repoId) {\n return {\n distinct_id: repoId,\n distinct_id_source: \"git_repo\",\n project_id: repoId,\n };\n }\n\n const deviceId = `device-${await machineIdLib.machineId()}`;\n if (process.env.DEBUG === \"true\") {\n console.warn(\n \"[Tracking] Using device ID fallback. Consider using git repository for consistent tracking.\",\n );\n }\n return {\n distinct_id: deviceId,\n distinct_id_source: \"device\",\n project_id: null,\n };\n}\n\nasync function tryGetEmail(): Promise<string | null> {\n const rc = getRc();\n const apiKey = process.env.LINGODOTDEV_API_KEY || rc?.auth?.apiKey;\n const apiUrl =\n process.env.LINGODOTDEV_API_URL ||\n rc?.auth?.apiUrl ||\n \"https://engine.lingo.dev\";\n\n if (!apiKey) {\n return null;\n }\n\n try {\n const res = await fetch(`${apiUrl}/whoami`, {\n method: \"POST\",\n headers: {\n Authorization: `Bearer ${apiKey}`,\n ContentType: \"application/json\",\n },\n });\n if (res.ok) {\n const payload = await res.json();\n if (payload?.email) {\n return payload.email;\n }\n }\n } catch (err) {\n // ignore\n }\n\n return null;\n}\n"],"mappings":";;;;;;AAKA,eAA8B,WAC5B,OACA,YACA;AACA,KAAI,QAAQ,IAAI,iBAAiB,IAC/B;AAGF,KAAI;EACF,MAAM,eAAe,MAAM,eAAe;AAE1C,MAAI,QAAQ,IAAI,UAAU,OACxB,SAAQ,IACN,qBAAqB,MAAM,QAAQ,aAAa,YAAY,YAAY,aAAa,qBACtF;EAGH,MAAM,EAAE,YAAY,MAAM,OAAO;EACjC,MAAM,UAAU,IAAI,QAClB,mDACA;GACE,MAAM;GACN,SAAS;GACT,eAAe;GAChB,CACF;AAED,QAAM,QAAQ,QAAQ;GACpB,YAAY,aAAa;GACzB;GACA,YAAY;IACV,GAAG;IACH,YAAY,YAAY,WAAW;IACnC,kBAAkB;IAClB,kBAAkB;IAClB,oBAAoB,aAAa;IACjC,YAAY,aAAa;IACzB,MAAM;KACJ,SAAS,QAAQ,IAAI;KACrB,MAAM,QAAQ,IAAI,OAAO;KAC1B;IACF;GACF,CAAC;AAEF,QAAM,QAAQ,UAAU;UACjB,OAAO;AACd,MAAI,QAAQ,IAAI,UAAU,OACxB,SAAQ,MAAM,qBAAqB,MAAM;;;AAK/C,eAAe,gBAIZ;CACD,MAAM,QAAQ,MAAM,aAAa;AACjC,KAAI,MAEF,QAAO;EACL,aAAa;EACb,oBAAoB;EACpB,YAJgB,iBAAiB;EAKlC;CAGH,MAAM,SAAS,iBAAiB;AAChC,KAAI,OACF,QAAO;EACL,aAAa;EACb,oBAAoB;EACpB,YAAY;EACb;CAGH,MAAM,WAAW,UAAU,MAAM,aAAa,WAAW;AACzD,KAAI,QAAQ,IAAI,UAAU,OACxB,SAAQ,KACN,8FACD;AAEH,QAAO;EACL,aAAa;EACb,oBAAoB;EACpB,YAAY;EACb;;AAGH,eAAe,cAAsC;CACnD,MAAM,KAAK,OAAO;CAClB,MAAM,SAAS,QAAQ,IAAI,uBAAuB,IAAI,MAAM;CAC5D,MAAM,SACJ,QAAQ,IAAI,uBACZ,IAAI,MAAM,UACV;AAEF,KAAI,CAAC,OACH,QAAO;AAGT,KAAI;EACF,MAAM,MAAM,MAAM,MAAM,GAAG,OAAO,UAAU;GAC1C,QAAQ;GACR,SAAS;IACP,eAAe,UAAU;IACzB,aAAa;IACd;GACF,CAAC;AACF,MAAI,IAAI,IAAI;GACV,MAAM,UAAU,MAAM,IAAI,MAAM;AAChC,OAAI,SAAS,MACX,QAAO,QAAQ;;UAGZ,KAAK;AAId,QAAO"}
@@ -0,0 +1,21 @@
1
+ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
+ let path = require("path");
3
+ path = require_rolldown_runtime.__toESM(path);
4
+ let fs = require("fs");
5
+ fs = require_rolldown_runtime.__toESM(fs);
6
+ let os = require("os");
7
+ os = require_rolldown_runtime.__toESM(os);
8
+ let ini = require("ini");
9
+ ini = require_rolldown_runtime.__toESM(ini);
10
+
11
+ //#region src/utils/rc.ts
12
+ function getRc() {
13
+ const settingsFile = ".lingodotdevrc";
14
+ const homedir = os.default.homedir();
15
+ const settingsFilePath = path.default.join(homedir, settingsFile);
16
+ const content = fs.default.existsSync(settingsFilePath) ? fs.default.readFileSync(settingsFilePath, "utf-8") : "";
17
+ return ini.default.parse(content);
18
+ }
19
+
20
+ //#endregion
21
+ exports.getRc = getRc;
@@ -0,0 +1,17 @@
1
+ import path from "path";
2
+ import fs from "fs";
3
+ import os from "os";
4
+ import Ini from "ini";
5
+
6
+ //#region src/utils/rc.ts
7
+ function getRc() {
8
+ const settingsFile = ".lingodotdevrc";
9
+ const homedir = os.homedir();
10
+ const settingsFilePath = path.join(homedir, settingsFile);
11
+ const content = fs.existsSync(settingsFilePath) ? fs.readFileSync(settingsFilePath, "utf-8") : "";
12
+ return Ini.parse(content);
13
+ }
14
+
15
+ //#endregion
16
+ export { getRc };
17
+ //# sourceMappingURL=rc.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"rc.mjs","names":[],"sources":["../../src/utils/rc.ts"],"sourcesContent":["import os from \"os\";\nimport path from \"path\";\nimport fs from \"fs\";\nimport Ini from \"ini\";\n\nexport function getRc() {\n const settingsFile = \".lingodotdevrc\";\n const homedir = os.homedir();\n const settingsFilePath = path.join(homedir, settingsFile);\n const content = fs.existsSync(settingsFilePath)\n ? fs.readFileSync(settingsFilePath, \"utf-8\")\n : \"\";\n const data = Ini.parse(content);\n return data;\n}\n"],"mappings":";;;;;;AAKA,SAAgB,QAAQ;CACtB,MAAM,eAAe;CACrB,MAAM,UAAU,GAAG,SAAS;CAC5B,MAAM,mBAAmB,KAAK,KAAK,SAAS,aAAa;CACzD,MAAM,UAAU,GAAG,WAAW,iBAAiB,GAC3C,GAAG,aAAa,kBAAkB,QAAQ,GAC1C;AAEJ,QADa,IAAI,MAAM,QAAQ"}
@@ -0,0 +1,64 @@
1
+ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
2
+ let crypto = require("crypto");
3
+ let child_process = require("child_process");
4
+
5
+ //#region src/utils/repository-id.ts
6
+ let cachedGitRepoId = void 0;
7
+ function hashProjectName(fullPath) {
8
+ const parts = fullPath.split("/");
9
+ if (parts.length !== 2) return (0, crypto.createHash)("sha256").update(fullPath).digest("hex").slice(0, 8);
10
+ const [org, project] = parts;
11
+ return `${org}/${(0, crypto.createHash)("sha256").update(project).digest("hex").slice(0, 8)}`;
12
+ }
13
+ function getRepositoryId() {
14
+ const ciRepoId = getCIRepositoryId();
15
+ if (ciRepoId) return ciRepoId;
16
+ const gitRepoId = getGitRepositoryId();
17
+ if (gitRepoId) return gitRepoId;
18
+ return null;
19
+ }
20
+ function getCIRepositoryId() {
21
+ if (process.env.GITHUB_REPOSITORY) return `github:${hashProjectName(process.env.GITHUB_REPOSITORY)}`;
22
+ if (process.env.CI_PROJECT_PATH) return `gitlab:${hashProjectName(process.env.CI_PROJECT_PATH)}`;
23
+ if (process.env.BITBUCKET_REPO_FULL_NAME) return `bitbucket:${hashProjectName(process.env.BITBUCKET_REPO_FULL_NAME)}`;
24
+ return null;
25
+ }
26
+ function getGitRepositoryId() {
27
+ if (cachedGitRepoId !== void 0) return cachedGitRepoId;
28
+ try {
29
+ const remoteUrl = (0, child_process.execSync)("git config --get remote.origin.url", {
30
+ encoding: "utf8",
31
+ stdio: [
32
+ "pipe",
33
+ "pipe",
34
+ "ignore"
35
+ ]
36
+ }).trim();
37
+ if (!remoteUrl) {
38
+ cachedGitRepoId = null;
39
+ return null;
40
+ }
41
+ cachedGitRepoId = parseGitUrl(remoteUrl);
42
+ return cachedGitRepoId;
43
+ } catch {
44
+ cachedGitRepoId = null;
45
+ return null;
46
+ }
47
+ }
48
+ function parseGitUrl(url) {
49
+ const cleanUrl = url.replace(/\.git$/, "");
50
+ let platform = null;
51
+ if (cleanUrl.includes("github.com")) platform = "github";
52
+ else if (cleanUrl.includes("gitlab.com")) platform = "gitlab";
53
+ else if (cleanUrl.includes("bitbucket.org")) platform = "bitbucket";
54
+ const sshMatch = cleanUrl.match(/[@:]([^:/@]+\/[^:/@]+)$/);
55
+ const httpsMatch = cleanUrl.match(/\/([^/]+\/[^/]+)$/);
56
+ const repoPath = sshMatch?.[1] || httpsMatch?.[1];
57
+ if (!repoPath) return null;
58
+ const hashedPath = hashProjectName(repoPath);
59
+ if (platform) return `${platform}:${hashedPath}`;
60
+ return `git:${hashedPath}`;
61
+ }
62
+
63
+ //#endregion
64
+ exports.getRepositoryId = getRepositoryId;
@@ -0,0 +1,64 @@
1
+ import { createHash } from "crypto";
2
+ import { execSync } from "child_process";
3
+
4
+ //#region src/utils/repository-id.ts
5
+ let cachedGitRepoId = void 0;
6
+ function hashProjectName(fullPath) {
7
+ const parts = fullPath.split("/");
8
+ if (parts.length !== 2) return createHash("sha256").update(fullPath).digest("hex").slice(0, 8);
9
+ const [org, project] = parts;
10
+ return `${org}/${createHash("sha256").update(project).digest("hex").slice(0, 8)}`;
11
+ }
12
+ function getRepositoryId() {
13
+ const ciRepoId = getCIRepositoryId();
14
+ if (ciRepoId) return ciRepoId;
15
+ const gitRepoId = getGitRepositoryId();
16
+ if (gitRepoId) return gitRepoId;
17
+ return null;
18
+ }
19
+ function getCIRepositoryId() {
20
+ if (process.env.GITHUB_REPOSITORY) return `github:${hashProjectName(process.env.GITHUB_REPOSITORY)}`;
21
+ if (process.env.CI_PROJECT_PATH) return `gitlab:${hashProjectName(process.env.CI_PROJECT_PATH)}`;
22
+ if (process.env.BITBUCKET_REPO_FULL_NAME) return `bitbucket:${hashProjectName(process.env.BITBUCKET_REPO_FULL_NAME)}`;
23
+ return null;
24
+ }
25
+ function getGitRepositoryId() {
26
+ if (cachedGitRepoId !== void 0) return cachedGitRepoId;
27
+ try {
28
+ const remoteUrl = execSync("git config --get remote.origin.url", {
29
+ encoding: "utf8",
30
+ stdio: [
31
+ "pipe",
32
+ "pipe",
33
+ "ignore"
34
+ ]
35
+ }).trim();
36
+ if (!remoteUrl) {
37
+ cachedGitRepoId = null;
38
+ return null;
39
+ }
40
+ cachedGitRepoId = parseGitUrl(remoteUrl);
41
+ return cachedGitRepoId;
42
+ } catch {
43
+ cachedGitRepoId = null;
44
+ return null;
45
+ }
46
+ }
47
+ function parseGitUrl(url) {
48
+ const cleanUrl = url.replace(/\.git$/, "");
49
+ let platform = null;
50
+ if (cleanUrl.includes("github.com")) platform = "github";
51
+ else if (cleanUrl.includes("gitlab.com")) platform = "gitlab";
52
+ else if (cleanUrl.includes("bitbucket.org")) platform = "bitbucket";
53
+ const sshMatch = cleanUrl.match(/[@:]([^:/@]+\/[^:/@]+)$/);
54
+ const httpsMatch = cleanUrl.match(/\/([^/]+\/[^/]+)$/);
55
+ const repoPath = sshMatch?.[1] || httpsMatch?.[1];
56
+ if (!repoPath) return null;
57
+ const hashedPath = hashProjectName(repoPath);
58
+ if (platform) return `${platform}:${hashedPath}`;
59
+ return `git:${hashedPath}`;
60
+ }
61
+
62
+ //#endregion
63
+ export { getRepositoryId };
64
+ //# sourceMappingURL=repository-id.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"repository-id.mjs","names":["cachedGitRepoId: string | null | undefined","platform: string | null"],"sources":["../../src/utils/repository-id.ts"],"sourcesContent":["import { execSync } from \"child_process\";\nimport { createHash } from \"crypto\";\n\nlet cachedGitRepoId: string | null | undefined = undefined;\n\nfunction hashProjectName(fullPath: string): string {\n const parts = fullPath.split(\"/\");\n if (parts.length !== 2) {\n return createHash(\"sha256\").update(fullPath).digest(\"hex\").slice(0, 8);\n }\n\n const [org, project] = parts;\n const hashedProject = createHash(\"sha256\")\n .update(project)\n .digest(\"hex\")\n .slice(0, 8);\n\n return `${org}/${hashedProject}`;\n}\n\nexport function getRepositoryId(): string | null {\n const ciRepoId = getCIRepositoryId();\n if (ciRepoId) return ciRepoId;\n\n const gitRepoId = getGitRepositoryId();\n if (gitRepoId) return gitRepoId;\n\n return null;\n}\n\nfunction getCIRepositoryId(): string | null {\n if (process.env.GITHUB_REPOSITORY) {\n const hashed = hashProjectName(process.env.GITHUB_REPOSITORY);\n return `github:${hashed}`;\n }\n\n if (process.env.CI_PROJECT_PATH) {\n const hashed = hashProjectName(process.env.CI_PROJECT_PATH);\n return `gitlab:${hashed}`;\n }\n\n if (process.env.BITBUCKET_REPO_FULL_NAME) {\n const hashed = hashProjectName(process.env.BITBUCKET_REPO_FULL_NAME);\n return `bitbucket:${hashed}`;\n }\n\n return null;\n}\n\nfunction getGitRepositoryId(): string | null {\n if (cachedGitRepoId !== undefined) {\n return cachedGitRepoId;\n }\n\n try {\n const remoteUrl = execSync(\"git config --get remote.origin.url\", {\n encoding: \"utf8\",\n stdio: [\"pipe\", \"pipe\", \"ignore\"],\n }).trim();\n\n if (!remoteUrl) {\n cachedGitRepoId = null;\n return null;\n }\n\n cachedGitRepoId = parseGitUrl(remoteUrl);\n return cachedGitRepoId;\n } catch {\n cachedGitRepoId = null;\n return null;\n }\n}\n\nfunction parseGitUrl(url: string): string | null {\n const cleanUrl = url.replace(/\\.git$/, \"\");\n\n let platform: string | null = null;\n if (cleanUrl.includes(\"github.com\")) {\n platform = \"github\";\n } else if (cleanUrl.includes(\"gitlab.com\")) {\n platform = \"gitlab\";\n } else if (cleanUrl.includes(\"bitbucket.org\")) {\n platform = \"bitbucket\";\n }\n\n const sshMatch = cleanUrl.match(/[@:]([^:/@]+\\/[^:/@]+)$/);\n const httpsMatch = cleanUrl.match(/\\/([^/]+\\/[^/]+)$/);\n\n const repoPath = sshMatch?.[1] || httpsMatch?.[1];\n\n if (!repoPath) return null;\n\n const hashedPath = hashProjectName(repoPath);\n\n if (platform) {\n return `${platform}:${hashedPath}`;\n }\n\n return `git:${hashedPath}`;\n}\n"],"mappings":";;;;AAGA,IAAIA,kBAA6C;AAEjD,SAAS,gBAAgB,UAA0B;CACjD,MAAM,QAAQ,SAAS,MAAM,IAAI;AACjC,KAAI,MAAM,WAAW,EACnB,QAAO,WAAW,SAAS,CAAC,OAAO,SAAS,CAAC,OAAO,MAAM,CAAC,MAAM,GAAG,EAAE;CAGxE,MAAM,CAAC,KAAK,WAAW;AAMvB,QAAO,GAAG,IAAI,GALQ,WAAW,SAAS,CACvC,OAAO,QAAQ,CACf,OAAO,MAAM,CACb,MAAM,GAAG,EAAE;;AAKhB,SAAgB,kBAAiC;CAC/C,MAAM,WAAW,mBAAmB;AACpC,KAAI,SAAU,QAAO;CAErB,MAAM,YAAY,oBAAoB;AACtC,KAAI,UAAW,QAAO;AAEtB,QAAO;;AAGT,SAAS,oBAAmC;AAC1C,KAAI,QAAQ,IAAI,kBAEd,QAAO,UADQ,gBAAgB,QAAQ,IAAI,kBAAkB;AAI/D,KAAI,QAAQ,IAAI,gBAEd,QAAO,UADQ,gBAAgB,QAAQ,IAAI,gBAAgB;AAI7D,KAAI,QAAQ,IAAI,yBAEd,QAAO,aADQ,gBAAgB,QAAQ,IAAI,yBAAyB;AAItE,QAAO;;AAGT,SAAS,qBAAoC;AAC3C,KAAI,oBAAoB,OACtB,QAAO;AAGT,KAAI;EACF,MAAM,YAAY,SAAS,sCAAsC;GAC/D,UAAU;GACV,OAAO;IAAC;IAAQ;IAAQ;IAAS;GAClC,CAAC,CAAC,MAAM;AAET,MAAI,CAAC,WAAW;AACd,qBAAkB;AAClB,UAAO;;AAGT,oBAAkB,YAAY,UAAU;AACxC,SAAO;SACD;AACN,oBAAkB;AAClB,SAAO;;;AAIX,SAAS,YAAY,KAA4B;CAC/C,MAAM,WAAW,IAAI,QAAQ,UAAU,GAAG;CAE1C,IAAIC,WAA0B;AAC9B,KAAI,SAAS,SAAS,aAAa,CACjC,YAAW;UACF,SAAS,SAAS,aAAa,CACxC,YAAW;UACF,SAAS,SAAS,gBAAgB,CAC3C,YAAW;CAGb,MAAM,WAAW,SAAS,MAAM,0BAA0B;CAC1D,MAAM,aAAa,SAAS,MAAM,oBAAoB;CAEtD,MAAM,WAAW,WAAW,MAAM,aAAa;AAE/C,KAAI,CAAC,SAAU,QAAO;CAEtB,MAAM,aAAa,gBAAgB,SAAS;AAE5C,KAAI,SACF,QAAO,GAAG,SAAS,GAAG;AAGxB,QAAO,OAAO"}
@@ -0,0 +1,28 @@
1
+
2
+ //#region src/utils/tracking-events.ts
3
+ const TRACKING_EVENTS = {
4
+ BUILD_START: "compiler.build.start",
5
+ BUILD_SUCCESS: "compiler.build.success",
6
+ BUILD_ERROR: "compiler.build.error"
7
+ };
8
+ const TRACKING_VERSION = "3.0";
9
+ const COMPILER_PACKAGE = "@lingo.dev/compiler";
10
+ function sanitizeConfigForTracking(config) {
11
+ return {
12
+ sourceLocale: config.sourceLocale,
13
+ targetLocalesCount: config.targetLocales.length,
14
+ hasCustomModels: config.models !== "lingo.dev",
15
+ isByokMode: config.models !== "lingo.dev",
16
+ useDirective: config.useDirective,
17
+ buildMode: config.buildMode,
18
+ hasPluralisation: config.pluralization.enabled,
19
+ hasCustomPrompt: !!config.prompt,
20
+ hasCustomLocaleResolver: false
21
+ };
22
+ }
23
+
24
+ //#endregion
25
+ exports.COMPILER_PACKAGE = COMPILER_PACKAGE;
26
+ exports.TRACKING_EVENTS = TRACKING_EVENTS;
27
+ exports.TRACKING_VERSION = TRACKING_VERSION;
28
+ exports.sanitizeConfigForTracking = sanitizeConfigForTracking;
@@ -0,0 +1,25 @@
1
+ //#region src/utils/tracking-events.ts
2
+ const TRACKING_EVENTS = {
3
+ BUILD_START: "compiler.build.start",
4
+ BUILD_SUCCESS: "compiler.build.success",
5
+ BUILD_ERROR: "compiler.build.error"
6
+ };
7
+ const TRACKING_VERSION = "3.0";
8
+ const COMPILER_PACKAGE = "@lingo.dev/compiler";
9
+ function sanitizeConfigForTracking(config) {
10
+ return {
11
+ sourceLocale: config.sourceLocale,
12
+ targetLocalesCount: config.targetLocales.length,
13
+ hasCustomModels: config.models !== "lingo.dev",
14
+ isByokMode: config.models !== "lingo.dev",
15
+ useDirective: config.useDirective,
16
+ buildMode: config.buildMode,
17
+ hasPluralisation: config.pluralization.enabled,
18
+ hasCustomPrompt: !!config.prompt,
19
+ hasCustomLocaleResolver: false
20
+ };
21
+ }
22
+
23
+ //#endregion
24
+ export { COMPILER_PACKAGE, TRACKING_EVENTS, TRACKING_VERSION, sanitizeConfigForTracking };
25
+ //# sourceMappingURL=tracking-events.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tracking-events.mjs","names":[],"sources":["../../src/utils/tracking-events.ts"],"sourcesContent":["import type { LingoConfig } from \"../types\";\n\nexport const TRACKING_EVENTS = {\n BUILD_START: \"compiler.build.start\",\n BUILD_SUCCESS: \"compiler.build.success\",\n BUILD_ERROR: \"compiler.build.error\",\n} as const;\n\nexport const TRACKING_VERSION = \"3.0\";\n\nexport const COMPILER_PACKAGE = \"@lingo.dev/compiler\";\n\nexport function sanitizeConfigForTracking(config: LingoConfig) {\n return {\n sourceLocale: config.sourceLocale,\n targetLocalesCount: config.targetLocales.length,\n hasCustomModels: config.models !== \"lingo.dev\",\n isByokMode: config.models !== \"lingo.dev\",\n useDirective: config.useDirective,\n buildMode: config.buildMode,\n hasPluralisation: config.pluralization.enabled,\n hasCustomPrompt: !!config.prompt,\n hasCustomLocaleResolver: false,\n };\n}\n"],"mappings":";AAEA,MAAa,kBAAkB;CAC7B,aAAa;CACb,eAAe;CACf,aAAa;CACd;AAED,MAAa,mBAAmB;AAEhC,MAAa,mBAAmB;AAEhC,SAAgB,0BAA0B,QAAqB;AAC7D,QAAO;EACL,cAAc,OAAO;EACrB,oBAAoB,OAAO,cAAc;EACzC,iBAAiB,OAAO,WAAW;EACnC,YAAY,OAAO,WAAW;EAC9B,cAAc,OAAO;EACrB,WAAW,OAAO;EAClB,kBAAkB,OAAO,cAAc;EACvC,iBAAiB,CAAC,CAAC,OAAO;EAC1B,yBAAyB;EAC1B"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@lingo.dev/compiler",
3
- "version": "0.1.3",
3
+ "version": "0.1.4",
4
4
  "description": "Lingo.dev Compiler",
5
5
  "private": false,
6
6
  "repository": {
@@ -124,6 +124,7 @@
124
124
  "@types/babel__core": "^7.20.5",
125
125
  "@types/babel__generator": "^7.27.0",
126
126
  "@types/babel__traverse": "^7.28.0",
127
+ "@types/ini": "4.1.1",
127
128
  "@types/node": "^25.0.3",
128
129
  "@types/proper-lockfile": "^4.1.4",
129
130
  "@types/react": "^19.2.7",
@@ -153,11 +154,14 @@
153
154
  "ai-sdk-ollama": "^3.0.0",
154
155
  "dotenv": "^17.2.3",
155
156
  "fast-xml-parser": "^5.3.3",
157
+ "ini": "5.0.0",
156
158
  "intl-messageformat": "^11.0.6",
157
- "lingo.dev": "^0.117.23",
158
159
  "lodash": "^4.17.21",
160
+ "node-machine-id": "1.1.12",
161
+ "posthog-node": "5.14.0",
159
162
  "proper-lockfile": "^4.1.2",
160
- "ws": "^8.18.3"
163
+ "ws": "^8.18.3",
164
+ "lingo.dev": "^0.117.25"
161
165
  },
162
166
  "peerDependencies": {
163
167
  "next": "^15.0.0 || ^16.0.4",