@wp-blocks/make-pot 1.6.3 → 1.6.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/biome.json +1 -1
- package/lib/cli/parseCli.js +1 -1
- package/lib/cli/parseCli.js.map +2 -2
- package/lib/const.js +1 -1
- package/lib/const.js.map +1 -1
- package/lib/extractors/auditStrings.js +4 -3
- package/lib/extractors/auditStrings.js.map +2 -2
- package/lib/extractors/headers.js +7 -6
- package/lib/extractors/headers.js.map +3 -3
- package/lib/extractors/json.js +1 -1
- package/lib/extractors/json.js.map +2 -2
- package/lib/extractors/schema.js +3 -4
- package/lib/extractors/schema.js.map +2 -2
- package/lib/fs/fs.js +2 -2
- package/lib/fs/fs.js.map +2 -2
- package/lib/fs/glob.js +1 -1
- package/lib/fs/glob.js.map +2 -2
- package/lib/parser/exec.js +3 -3
- package/lib/parser/exec.js.map +2 -2
- package/lib/parser/makeJson.js +1 -1
- package/lib/parser/makeJson.js.map +2 -2
- package/lib/parser/makePot.js.map +1 -1
- package/lib/parser/process.js +1 -1
- package/lib/parser/process.js.map +2 -2
- package/lib/parser/progress.js +1 -1
- package/lib/parser/progress.js.map +2 -2
- package/lib/parser/taskRunner.js +2 -2
- package/lib/parser/taskRunner.js.map +2 -2
- package/lib/parser/tree.js +2 -1
- package/lib/parser/tree.js.map +2 -2
- package/lib/types.js.map +1 -1
- package/lib/utils/common.js +2 -2
- package/lib/utils/common.js.map +2 -2
- package/lib/utils/extractors.js +1 -1
- package/lib/utils/extractors.js.map +2 -2
- package/lib/utils/output.js +1 -1
- package/lib/utils/output.js.map +2 -2
- package/package.json +16 -12
- package/tests/extract-headers.test.js +69 -1
- package/tests/generate-header.test.js +27 -0
- package/tests/tree.test.js +74 -1
package/lib/utils/common.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/common.ts"],
|
|
4
|
-
"sourcesContent": ["import fs from \"node:fs\";\r\nimport { cpus, totalmem } from \"node:os\";\r\nimport path from \"node:path\";\r\nimport { modulePath } from \"../const.js\";\r\nimport type { Patterns } from \"../types.js\";\r\n\r\n/**\r\n * A function that removes comment markup from a given string.\r\n *\r\n * @param {string} input - The input string with comment markup.\r\n * @return {string} - The input string without comment markup.\r\n */\r\nexport function getCommentBlock(input: string): string {\r\n\tconst commentBlock = input.match(/\\/\\*\\*?[\\s\\S]*?\\*\\//);\r\n\treturn commentBlock !== null ? commentBlock[0] : input;\r\n}\r\n\r\n/**\r\n * A function that starts to capture the text after the first letter.\r\n *\r\n * @param {string} input - The input string with comment markup.\r\n * @return {string} - The input string without comment markup.\r\n */\r\nexport function removeCommentMarkup(input: string): string[] | null {\r\n\treturn input.match(/[a-zA-Z].*/gm);\r\n}\r\n\r\n/**\r\n * Removes the markup from a comment string.\r\n *\r\n * @param {string} comment - The comment string to remove markup from.\r\n * @return {string} The comment text without the markers.\r\n */\r\nexport function stripTranslationMarkup(comment: string): string {\r\n\tconst commentPattern =\r\n\t\t/\\/\\*\\*?\\s*(?:translators:)\\s*([\\s\\S]*?)\\s*\\*\\/|\\/\\/\\s*(?:translators:)\\s*(.*)$/i;\r\n\tconst matches = comment.match(commentPattern);\r\n\treturn matches ? matches[1] : comment;\r\n}\r\n\r\n/**\r\n * Splits a string into an array of strings based on the presence of a comma.\r\n *\r\n * @param {string} string - The string to be split.\r\n * @return {string[]} An array of strings after splitting the input string.\r\n */\r\nexport function stringstring(string: string | string[] | undefined): string[] {\r\n\tif (typeof string === \"string\") {\r\n\t\tif (string.includes(\",\")) {\r\n\t\t\treturn string.split(\",\");\r\n\t\t}\r\n\t\treturn [string];\r\n\t}\r\n\treturn [];\r\n}\r\n\r\n/**\r\n * Determines if a pattern represents a file, a directory, or a glob pattern.\r\n * @param pattern - The pattern string to evaluate.\r\n * @returns 'file', 'directory', or 'glob'.\r\n */\r\nexport function detectPatternType(\r\n\tpattern: string,\r\n): \"file\" | \"directory\" | \"glob\" {\r\n\tconst containsFileExtension = pattern.includes(\".\");\r\n\tconst containsDirectorySeparator = pattern.includes(
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAe;AACf,qBAA+B;AAC/B,uBAAiB;AACjB,mBAA2B;AASpB,SAAS,gBAAgB,OAAuB;AACtD,QAAM,eAAe,MAAM,MAAM,qBAAqB;AACtD,SAAO,iBAAiB,OAAO,aAAa,CAAC,IAAI;AAClD;AAQO,SAAS,oBAAoB,OAAgC;AACnE,SAAO,MAAM,MAAM,cAAc;AAClC;AAQO,SAAS,uBAAuB,SAAyB;AAC/D,QAAM,iBACL;AACD,QAAM,UAAU,QAAQ,MAAM,cAAc;AAC5C,SAAO,UAAU,QAAQ,CAAC,IAAI;AAC/B;AAQO,SAAS,aAAa,QAAiD;AAC7E,MAAI,OAAO,WAAW,UAAU;AAC/B,QAAI,OAAO,SAAS,GAAG,GAAG;AACzB,aAAO,OAAO,MAAM,GAAG;AAAA,IACxB;AACA,WAAO,CAAC,MAAM;AAAA,EACf;AACA,SAAO,CAAC;AACT;AAOO,SAAS,kBACf,SACgC;AAChC,QAAM,wBAAwB,QAAQ,SAAS,GAAG;AAClD,QAAM,6BAA6B,QAAQ,SAAS,
|
|
4
|
+
"sourcesContent": ["import fs from \"node:fs\";\r\nimport { cpus, totalmem } from \"node:os\";\r\nimport path from \"node:path\";\r\nimport { modulePath } from \"../const.js\";\r\nimport type { Patterns } from \"../types.js\";\r\n\r\n/**\r\n * A function that removes comment markup from a given string.\r\n *\r\n * @param {string} input - The input string with comment markup.\r\n * @return {string} - The input string without comment markup.\r\n */\r\nexport function getCommentBlock(input: string): string {\r\n\tconst commentBlock = input.match(/\\/\\*\\*?[\\s\\S]*?\\*\\//);\r\n\treturn commentBlock !== null ? commentBlock[0] : input;\r\n}\r\n\r\n/**\r\n * A function that starts to capture the text after the first letter.\r\n *\r\n * @param {string} input - The input string with comment markup.\r\n * @return {string} - The input string without comment markup.\r\n */\r\nexport function removeCommentMarkup(input: string): string[] | null {\r\n\treturn input.match(/[a-zA-Z].*/gm);\r\n}\r\n\r\n/**\r\n * Removes the markup from a comment string.\r\n *\r\n * @param {string} comment - The comment string to remove markup from.\r\n * @return {string} The comment text without the markers.\r\n */\r\nexport function stripTranslationMarkup(comment: string): string {\r\n\tconst commentPattern =\r\n\t\t/\\/\\*\\*?\\s*(?:translators:)\\s*([\\s\\S]*?)\\s*\\*\\/|\\/\\/\\s*(?:translators:)\\s*(.*)$/i;\r\n\tconst matches = comment.match(commentPattern);\r\n\treturn matches ? matches[1] : comment;\r\n}\r\n\r\n/**\r\n * Splits a string into an array of strings based on the presence of a comma.\r\n *\r\n * @param {string} string - The string to be split.\r\n * @return {string[]} An array of strings after splitting the input string.\r\n */\r\nexport function stringstring(string: string | string[] | undefined): string[] {\r\n\tif (typeof string === \"string\") {\r\n\t\tif (string.includes(\",\")) {\r\n\t\t\treturn string.split(\",\");\r\n\t\t}\r\n\t\treturn [string];\r\n\t}\r\n\treturn [];\r\n}\r\n\r\n/**\r\n * Determines if a pattern represents a file, a directory, or a glob pattern.\r\n * @param pattern - The pattern string to evaluate.\r\n * @returns 'file', 'directory', or 'glob'.\r\n */\r\nexport function detectPatternType(\r\n\tpattern: string,\r\n): \"file\" | \"directory\" | \"glob\" {\r\n\tconst containsFileExtension = pattern.includes(\".\");\r\n\tconst containsDirectorySeparator = pattern.includes(\"/\");\r\n\r\n\tif (pattern.includes(\"*\")) {\r\n\t\treturn \"glob\";\r\n\t}\r\n\tif (!containsFileExtension && !containsDirectorySeparator) {\r\n\t\treturn \"directory\";\r\n\t}\r\n\tif (containsFileExtension && !containsDirectorySeparator) {\r\n\t\treturn \"file\";\r\n\t}\r\n\treturn \"glob\";\r\n}\r\n\r\n/**\r\n * Gets the file extension from a filename.\r\n * @param filename - The name of the file to extract the extension from.\r\n * @returns The file extension, or 'blade.php' for Blade templates.\r\n */\r\nexport function getFileExtension(filename: string): string {\r\n\tif (filename.endsWith(\".blade.php\")) {\r\n\t\treturn \"blade.php\";\r\n\t}\r\n\treturn filename.split(\".\").pop() || \"\";\r\n}\r\n\r\n/**\r\n * Generates a copyright comment for the specified slug and license.\r\n *\r\n * @param slug - The slug to include in the copyright comment\r\n * @param [license='GPL v2 or later'] - The license to use in the copyright comment\r\n * @return The generated copyright comment\r\n */\r\nexport function getCopyright(\r\n\tslug: string,\r\n\tlicense = \"GPL v2 or later\",\r\n): string {\r\n\treturn (\r\n\t\t`# Copyright (C) ${new Date().getFullYear()} ${slug}\\n` +\r\n\t\t`# This file is distributed under the ${license} license.`\r\n\t);\r\n}\r\n\r\n/**\r\n * Reverse slashes in a path, and replace backward slashes with forward slashes\r\n *\r\n * @param filePath - The path to be reversed.\r\n * @return {string} The reversed path.\r\n */\r\nexport function reverseSlashes(filePath: string): string {\r\n\t// Replace backward slashes with forward slashes\r\n\treturn filePath.replace(/\\\\/g, \"/\");\r\n}\r\n\r\n/**\r\n * The makepot package.json file data\r\n * @arguments {string[]} fields - The fields to extract\r\n * @return {Record<string, unknown>} - The package.json data\r\n */\r\nexport function getPkgJsonData(\r\n\tlocation?: string,\r\n\t...fields: string[]\r\n): Record<string, unknown> {\r\n\tconst requested: Record<string, unknown> = {};\r\n\t// read the package.json file the is in the root directory\r\n\tconst pkgJsonPath = path.join(location || process.cwd(), \"package.json\");\r\n\t// read the package.json file or return an empty object\r\n\tconst pkgJson: Record<string, unknown> = fs.existsSync(pkgJsonPath)\r\n\t\t? require(pkgJsonPath)\r\n\t\t: {};\r\n\t// extract the requested fields from the package.json\r\n\tfor (const field of fields) {\r\n\t\tif (pkgJson[field]) {\r\n\t\t\trequested[field] = pkgJson[field];\r\n\t\t}\r\n\t}\r\n\treturn requested;\r\n}\r\n\r\n/**\r\n * Print the module header with the current version and name\r\n */\r\nexport function printModuleInfo() {\r\n\tconst { version, name } = getPkgJsonData(modulePath, \"name\", \"version\");\r\n\t/* print the version */\r\n\tconsole.log(`${name} version: ${version}`);\r\n}\r\n\r\n/**\r\n * Output to the console the time elapsed in milliseconds between two dates\r\n * @param scriptName the name of the script\r\n * @param timeStart the start time\r\n * @param timeEnd the end time\r\n */\r\nexport function printTimeElapsed(\r\n\tscriptName: \"Make-Pot\" | \"Make-Json\",\r\n\ttimeStart: Date,\r\n\ttimeEnd: Date = new Date(),\r\n) {\r\n\tconsole.log(\r\n\t\t`\\n\uD83D\uDE80 ${scriptName}: Task completed! ${scriptName.split(\"-\")[1]} file created in ${timeEnd.getTime() - timeStart.getTime()\r\n\t\t}ms`,\r\n\t);\r\n}\r\n\r\n/**\r\n/**\r\n* Prints the memory usage and cpu usage of the system\r\n */\r\nexport function printStats() {\r\n\tconsole.log(\r\n\t\t\"Memory usage:\",\r\n\t\t(process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2),\r\n\t\t\"MB (Free:\",\r\n\t\t(totalmem() / 1024 / 1024 / 1024).toFixed(2),\r\n\t\t\"GB)\\nCpu User:\",\r\n\t\t(process.cpuUsage().user / 1000000).toFixed(2),\r\n\t\t\"ms Cpu System:\",\r\n\t\t(process.cpuUsage().system / 1000000).toFixed(2),\r\n\t\t\"ms of\",\r\n\t\tcpus().length,\r\n\t\t\"cores\",\r\n\t);\r\n}\r\n\r\n/**\r\n * Returns the output path recap\r\n *\r\n * @param {string} cwd - The current working directory\r\n * @param {Patterns} patterns - The patterns to be used for the extraction process\r\n * @return {string} - The output path recap\r\n */\r\nexport function outputPathRecap(cwd: string, patterns: Patterns): string {\r\n\treturn `\\nScript Path: ${cwd}\\nfor ${patterns.include.join()}\\nignoring patterns: ${patterns.exclude.join()}\\n`;\r\n}\r\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAe;AACf,qBAA+B;AAC/B,uBAAiB;AACjB,mBAA2B;AASpB,SAAS,gBAAgB,OAAuB;AACtD,QAAM,eAAe,MAAM,MAAM,qBAAqB;AACtD,SAAO,iBAAiB,OAAO,aAAa,CAAC,IAAI;AAClD;AAQO,SAAS,oBAAoB,OAAgC;AACnE,SAAO,MAAM,MAAM,cAAc;AAClC;AAQO,SAAS,uBAAuB,SAAyB;AAC/D,QAAM,iBACL;AACD,QAAM,UAAU,QAAQ,MAAM,cAAc;AAC5C,SAAO,UAAU,QAAQ,CAAC,IAAI;AAC/B;AAQO,SAAS,aAAa,QAAiD;AAC7E,MAAI,OAAO,WAAW,UAAU;AAC/B,QAAI,OAAO,SAAS,GAAG,GAAG;AACzB,aAAO,OAAO,MAAM,GAAG;AAAA,IACxB;AACA,WAAO,CAAC,MAAM;AAAA,EACf;AACA,SAAO,CAAC;AACT;AAOO,SAAS,kBACf,SACgC;AAChC,QAAM,wBAAwB,QAAQ,SAAS,GAAG;AAClD,QAAM,6BAA6B,QAAQ,SAAS,GAAG;AAEvD,MAAI,QAAQ,SAAS,GAAG,GAAG;AAC1B,WAAO;AAAA,EACR;AACA,MAAI,CAAC,yBAAyB,CAAC,4BAA4B;AAC1D,WAAO;AAAA,EACR;AACA,MAAI,yBAAyB,CAAC,4BAA4B;AACzD,WAAO;AAAA,EACR;AACA,SAAO;AACR;AAOO,SAAS,iBAAiB,UAA0B;AAC1D,MAAI,SAAS,SAAS,YAAY,GAAG;AACpC,WAAO;AAAA,EACR;AACA,SAAO,SAAS,MAAM,GAAG,EAAE,IAAI,KAAK;AACrC;AASO,SAAS,aACf,MACA,UAAU,mBACD;AACT,SACC,oBAAmB,oBAAI,KAAK,GAAE,YAAY,CAAC,IAAI,IAAI;AAAA,uCACX,OAAO;AAEjD;AAQO,SAAS,eAAe,UAA0B;AAExD,SAAO,SAAS,QAAQ,OAAO,GAAG;AACnC;AAOO,SAAS,eACf,aACG,QACuB;AAC1B,QAAM,YAAqC,CAAC;AAE5C,QAAM,cAAc,iBAAAA,QAAK,KAAK,YAAY,QAAQ,IAAI,GAAG,cAAc;AAEvE,QAAM,UAAmC,eAAAC,QAAG,WAAW,WAAW,IAC/D,QAAQ,WAAW,IACnB,CAAC;AAEJ,aAAW,SAAS,QAAQ;AAC3B,QAAI,QAAQ,KAAK,GAAG;AACnB,gBAAU,KAAK,IAAI,QAAQ,KAAK;AAAA,IACjC;AAAA,EACD;AACA,SAAO;AACR;AAKO,SAAS,kBAAkB;AACjC,QAAM,EAAE,SAAS,KAAK,IAAI,eAAe,yBAAY,QAAQ,SAAS;AAEtE,UAAQ,IAAI,GAAG,IAAI,aAAa,OAAO,EAAE;AAC1C;AAQO,SAAS,iBACf,YACA,WACA,UAAgB,oBAAI,KAAK,GACxB;AACD,UAAQ;AAAA,IACP;AAAA,YAAQ,UAAU,qBAAqB,WAAW,MAAM,GAAG,EAAE,CAAC,CAAC,oBAAoB,QAAQ,QAAQ,IAAI,UAAU,QAAQ,CACzH;AAAA,EACD;AACD;AAMO,SAAS,aAAa;AAC5B,UAAQ;AAAA,IACP;AAAA,KACC,QAAQ,YAAY,EAAE,WAAW,OAAO,MAAM,QAAQ,CAAC;AAAA,IACxD;AAAA,SACC,yBAAS,IAAI,OAAO,OAAO,MAAM,QAAQ,CAAC;AAAA,IAC3C;AAAA,KACC,QAAQ,SAAS,EAAE,OAAO,KAAS,QAAQ,CAAC;AAAA,IAC7C;AAAA,KACC,QAAQ,SAAS,EAAE,SAAS,KAAS,QAAQ,CAAC;AAAA,IAC/C;AAAA,QACA,qBAAK,EAAE;AAAA,IACP;AAAA,EACD;AACD;AASO,SAAS,gBAAgB,KAAa,UAA4B;AACxE,SAAO;AAAA,eAAkB,GAAG;AAAA,MAAS,SAAS,QAAQ,KAAK,CAAC;AAAA,qBAAwB,SAAS,QAAQ,KAAK,CAAC;AAAA;AAC5G;",
|
|
6
6
|
"names": ["path", "fs"]
|
|
7
7
|
}
|
package/lib/utils/extractors.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";var c=Object.defineProperty;var
|
|
1
|
+
"use strict";var c=Object.defineProperty;var f=Object.getOwnPropertyDescriptor;var m=Object.getOwnPropertyNames;var g=Object.prototype.hasOwnProperty;var u=(n,t)=>{for(var s in t)c(n,s,{get:t[s],enumerable:!0})},l=(n,t,s,e)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of m(t))!g.call(n,o)&&o!==s&&c(n,o,{get:()=>t[o],enumerable:!(e=f(t,o))||e.enumerable});return n};var k=n=>l(c({},"__esModule",{value:!0}),n);var x={};u(x,{buildBlock:()=>d,getKeyByValue:()=>B,yieldParsedData:()=>a});module.exports=k(x);var r=require("gettext-merger");function B(n,t){return Object.keys(n).find(s=>n[s]===t)??void 0}const d=(n,t,s=void 0)=>{const e=new r.Block([]);return e.msgctxt=void 0,e.msgid=t,e.msgid_plural="",e.msgstr=[],e.comments={},n&&(e.comments.extracted=[n]),s?.length&&(e.comments.reference=s),e};function a(n,t,s){const e=new r.SetOfBlocks([],s);if(!n)return e;e.path=s;for(const o of n){const i=d(o.msgid,o.msgctxt,o.comments?.reference);i&&e.blocks.push(i)}return e}0&&(module.exports={buildBlock,getKeyByValue,yieldParsedData});
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/extractors.ts"],
|
|
4
|
-
"sourcesContent": ["import { Block, SetOfBlocks } from \"gettext-merger\";\n\n/**\n * Returns the key of an object based on its value\n *\n * @param object the object that contains the key\n * @param value the key that we want to get\n * @return {Record<string, string>} the filtered keys\n */\nexport function getKeyByValue(\n\tobject: Record<string, unknown>,\n\tvalue: string,\n): string | undefined {\n\treturn Object.keys(object).find((key) => object[key] === value) ?? undefined;\n}\n\n/**\n * Returns a gettext translation object\n *\n * @param label the label of the translation\n * @param string the string of the translation\n * @param filePath the file path of the translation\n */\nexport const buildBlock = (\n\tlabel: string,\n\tstring: string,\n\tfilePath: string[] | undefined = undefined,\n): Block => {\n\tconst block = new Block([]);\n\tblock.msgctxt = undefined;\n\tblock.msgid = string;\n\tblock.msgid_plural = \"\";\n\tblock.msgstr = [];\n\tblock.comments = {};\n\tif (label) {\n\t\tblock.comments.extracted = [label];\n\t}\n\tif (filePath?.length) {\n\t\tblock.comments.reference = filePath;\n\t}\n\treturn block;\n};\n\n/**\n * Extracts strings from parsed JSON data.\n *\n * @param {Record<string, any> | Parser.SyntaxNode} parsed - The parsed JSON data or syntax node.\n * @param {string | Parser}
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAAmC;AAS5B,SAAS,cACf,QACA,OACqB;AACrB,SAAO,OAAO,KAAK,MAAM,EAAE,KAAK,CAAC,QAAQ,OAAO,GAAG,MAAM,KAAK,KAAK;AACpE;AASO,MAAM,aAAa,CACzB,OACA,QACA,WAAiC,WACtB;AACX,QAAM,QAAQ,IAAI,4BAAM,CAAC,CAAC;AAC1B,QAAM,UAAU;AAChB,QAAM,QAAQ;AACd,QAAM,eAAe;AACrB,QAAM,SAAS,CAAC;AAChB,QAAM,WAAW,CAAC;AAClB,MAAI,OAAO;AACV,UAAM,SAAS,YAAY,CAAC,KAAK;AAAA,EAClC;AACA,MAAI,UAAU,QAAQ;AACrB,UAAM,SAAS,YAAY;AAAA,EAC5B;AACA,SAAO;AACR;AAUO,SAAS,gBACf,QACA,
|
|
4
|
+
"sourcesContent": ["import { Block, SetOfBlocks } from \"gettext-merger\";\n\n/**\n * Returns the key of an object based on its value\n *\n * @param object the object that contains the key\n * @param value the key that we want to get\n * @return {Record<string, string>} the filtered keys\n */\nexport function getKeyByValue(\n\tobject: Record<string, unknown>,\n\tvalue: string,\n): string | undefined {\n\treturn Object.keys(object).find((key) => object[key] === value) ?? undefined;\n}\n\n/**\n * Returns a gettext translation object\n *\n * @param label the label of the translation\n * @param string the string of the translation\n * @param filePath the file path of the translation\n */\nexport const buildBlock = (\n\tlabel: string,\n\tstring: string,\n\tfilePath: string[] | undefined = undefined,\n): Block => {\n\tconst block = new Block([]);\n\tblock.msgctxt = undefined;\n\tblock.msgid = string;\n\tblock.msgid_plural = \"\";\n\tblock.msgstr = [];\n\tblock.comments = {};\n\tif (label) {\n\t\tblock.comments.extracted = [label];\n\t}\n\tif (filePath?.length) {\n\t\tblock.comments.reference = filePath;\n\t}\n\treturn block;\n};\n\n/**\n * Extracts strings from parsed JSON data.\n *\n * @param {Record<string, any> | Parser.SyntaxNode} parsed - The parsed JSON data or syntax node.\n * @param {string | Parser} _filename - The filename or parser.\n * @param filepath - the path to the file being parsed\n * @return {SetOfBlocks} An array of translation strings.\n */\nexport function yieldParsedData(\n\tparsed: Block[] | undefined,\n\t_filename: \"block.json\" | \"theme.json\",\n\tfilepath: string,\n): SetOfBlocks {\n\tconst gettextTranslations: SetOfBlocks = new SetOfBlocks([], filepath);\n\n\tif (!parsed) {\n\t\treturn gettextTranslations;\n\t}\n\n\t// set the path of the translation\n\tgettextTranslations.path = filepath;\n\n\tfor (const item of parsed) {\n\t\tconst block = buildBlock(\n\t\t\titem.msgid,\n\t\t\titem.msgctxt as string,\n\t\t\titem.comments?.reference,\n\t\t);\n\n\t\tif (block) {\n\t\t\tgettextTranslations.blocks.push(block);\n\t\t}\n\t}\n\n\treturn gettextTranslations;\n}\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAAmC;AAS5B,SAAS,cACf,QACA,OACqB;AACrB,SAAO,OAAO,KAAK,MAAM,EAAE,KAAK,CAAC,QAAQ,OAAO,GAAG,MAAM,KAAK,KAAK;AACpE;AASO,MAAM,aAAa,CACzB,OACA,QACA,WAAiC,WACtB;AACX,QAAM,QAAQ,IAAI,4BAAM,CAAC,CAAC;AAC1B,QAAM,UAAU;AAChB,QAAM,QAAQ;AACd,QAAM,eAAe;AACrB,QAAM,SAAS,CAAC;AAChB,QAAM,WAAW,CAAC;AAClB,MAAI,OAAO;AACV,UAAM,SAAS,YAAY,CAAC,KAAK;AAAA,EAClC;AACA,MAAI,UAAU,QAAQ;AACrB,UAAM,SAAS,YAAY;AAAA,EAC5B;AACA,SAAO;AACR;AAUO,SAAS,gBACf,QACA,WACA,UACc;AACd,QAAM,sBAAmC,IAAI,kCAAY,CAAC,GAAG,QAAQ;AAErE,MAAI,CAAC,QAAQ;AACZ,WAAO;AAAA,EACR;AAGA,sBAAoB,OAAO;AAE3B,aAAW,QAAQ,QAAQ;AAC1B,UAAM,QAAQ;AAAA,MACb,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK,UAAU;AAAA,IAChB;AAEA,QAAI,OAAO;AACV,0BAAoB,OAAO,KAAK,KAAK;AAAA,IACtC;AAAA,EACD;AAEA,SAAO;AACR;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/lib/utils/output.js
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";var p=Object.create;var
|
|
1
|
+
"use strict";var p=Object.create;var s=Object.defineProperty;var a=Object.getOwnPropertyDescriptor;var m=Object.getOwnPropertyNames;var f=Object.getPrototypeOf,l=Object.prototype.hasOwnProperty;var c=(t,r)=>{for(var e in r)s(t,e,{get:r[e],enumerable:!0})},i=(t,r,e,o)=>{if(r&&typeof r=="object"||typeof r=="function")for(let n of m(r))!l.call(t,n)&&n!==e&&s(t,n,{get:()=>r[n],enumerable:!(o=a(r,n))||o.enumerable});return t};var u=(t,r,e)=>(e=t!=null?p(f(t)):{},i(r||!t||!t.__esModule?s(e,"default",{value:t,enumerable:!0}):e,t)),y=t=>i(s({},"__esModule",{value:!0}),t);var T={};c(T,{outputJson:()=>x});module.exports=y(T);var g=u(require("tannin"));function x(t,r,e){const o={[t.slug]:{"":r??{},...e.toJson()}};return new g.default(o).toString()}0&&(module.exports={outputJson});
|
package/lib/utils/output.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../../src/utils/output.ts"],
|
|
4
|
-
"sourcesContent": ["import type { SetOfBlocks } from \"gettext-merger\";\r\nimport Tannin from \"tannin\";\r\nimport type { Args } from \"../types.js\";\r\n\r\n/**\r\n * Outputs the pot file in json format based on the command line arguments --json option\r\n *\r\n * @param {Args} args - The command line arguments\r\n * @param {Record<string, string>} potHeader - The pot file header\r\n * @param {SetOfBlocks} translationsUnion - The translations union\r\n * @return {string} - The output pot file\r\n */\r\nexport function outputJson(\r\n\targs: Args,\r\n\tpotHeader: Record<string, string> | null,\r\n\ttranslationsUnion: SetOfBlocks,\r\n) {\r\n\tconst jedData
|
|
5
|
-
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAmB;
|
|
4
|
+
"sourcesContent": ["import type { SetOfBlocks } from \"gettext-merger\";\r\nimport Tannin from \"tannin\";\r\nimport type { Args } from \"../types.js\";\r\nimport type { GetTextTranslation } from 'gettext-parser'\r\n\r\n/**\r\n * Outputs the pot file in json format based on the command line arguments --json option\r\n *\r\n * @param {Args} args - The command line arguments\r\n * @param {Record<string, string>} potHeader - The pot file header\r\n * @param {SetOfBlocks} translationsUnion - The translations union\r\n * @return {string} - The output pot file\r\n */\r\nexport function outputJson(\r\n\targs: Args,\r\n\tpotHeader: Record<string, string> | null,\r\n\ttranslationsUnion: SetOfBlocks,\r\n): string {\r\n\tconst jedData = {\r\n\t\t[args.slug]: {\r\n\t\t\t\"\": potHeader ?? {},\r\n\t\t\t...(translationsUnion.toJson() as{\r\n\t\t\t\t[key: string]: {\r\n\t\t\t\t\t[key: string]: GetTextTranslation;\r\n\t\t\t\t};\r\n\t\t\t}),\r\n\t\t},\r\n\t};\r\n\tconst i18n = new Tannin(jedData);\r\n\r\n\treturn i18n.toString();\r\n}\r\n"],
|
|
5
|
+
"mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,oBAAmB;AAYZ,SAAS,WACf,MACA,WACA,mBACS;AACT,QAAM,UAAU;AAAA,IACf,CAAC,KAAK,IAAI,GAAG;AAAA,MACZ,IAAI,aAAa,CAAC;AAAA,MAClB,GAAI,kBAAkB,OAAO;AAAA,IAK9B;AAAA,EACD;AACA,QAAM,OAAO,IAAI,cAAAA,QAAO,OAAO;AAE/B,SAAO,KAAK,SAAS;AACtB;",
|
|
6
6
|
"names": ["Tannin"]
|
|
7
7
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@wp-blocks/make-pot",
|
|
3
|
-
"version": "1.6.
|
|
3
|
+
"version": "1.6.5",
|
|
4
4
|
"license": "GPL-3.0-or-later",
|
|
5
5
|
"homepage": "https://wp-blocks.github.io/make-pot/",
|
|
6
6
|
"description": "A Node.js script for generating a POT file from source code",
|
|
@@ -22,7 +22,7 @@
|
|
|
22
22
|
},
|
|
23
23
|
"repository": {
|
|
24
24
|
"type": "git",
|
|
25
|
-
"url": "https://github.com/wp-blocks/makePot.git"
|
|
25
|
+
"url": "git+https://github.com/wp-blocks/makePot.git"
|
|
26
26
|
},
|
|
27
27
|
"bugs": {
|
|
28
28
|
"url": "https://github.com/wp-blocks/makePot/issues"
|
|
@@ -50,8 +50,11 @@
|
|
|
50
50
|
"build": "npx esbuild ./src/**/* ./src/*.ts --format=cjs --minify --outdir=lib --platform=node",
|
|
51
51
|
"watch": "tsc --watch",
|
|
52
52
|
"lint": "npx @biomejs/biome check --write src",
|
|
53
|
+
"type-check": "npx tsc --noEmit",
|
|
54
|
+
"update-pkg": "npm upgrade -S",
|
|
55
|
+
"publish-check": "npm run build && npx publint",
|
|
53
56
|
"rm": "rmdir /s /q lib",
|
|
54
|
-
"test:build": "npx esbuild ./src/**/* --format=cjs --sourcemap --outdir=lib --platform=node",
|
|
57
|
+
"test:build": "npx esbuild ./src/**/* ./src/*.ts --format=cjs --sourcemap --outdir=lib --platform=node",
|
|
55
58
|
"test": "npm run test:build && node --test",
|
|
56
59
|
"build:build-ci": "npx esbuild ./src/index.ts --format=cjs --outdir=lib --bundle --external:tree-sitter --external:tree-sitter-typescript --external:tree-sitter-php --external:tree-sitter-javascript --external:@babel/preset-typescript --platform=node",
|
|
57
60
|
"test:ci": "npm run build:build-ci && npm run test",
|
|
@@ -59,26 +62,27 @@
|
|
|
59
62
|
"test:coverage": "node --test --experimental-test-coverage"
|
|
60
63
|
},
|
|
61
64
|
"dependencies": {
|
|
62
|
-
"@babel/core": "^7.
|
|
63
|
-
"@babel/preset-env": "^7.
|
|
65
|
+
"@babel/core": "^7.29.0",
|
|
66
|
+
"@babel/preset-env": "^7.29.0",
|
|
64
67
|
"cli-progress": "^3.12.0",
|
|
65
68
|
"gettext-merger": "^1.2.1",
|
|
66
|
-
"gettext-parser": "^4.0
|
|
67
|
-
"glob": "^11.0
|
|
69
|
+
"gettext-parser": "^4.2.0",
|
|
70
|
+
"glob": "^11.1.0",
|
|
68
71
|
"tannin": "^1.2.0",
|
|
69
72
|
"tree-sitter": "^0.21.1",
|
|
70
73
|
"tree-sitter-javascript": "^0.23.1",
|
|
71
74
|
"tree-sitter-php": "^0.23.12",
|
|
72
75
|
"tree-sitter-typescript": "^0.23.2",
|
|
73
|
-
"yargs": "^17.7.
|
|
76
|
+
"yargs": "^17.7.2"
|
|
74
77
|
},
|
|
75
78
|
"devDependencies": {
|
|
76
|
-
"@biomejs/biome": "2.
|
|
79
|
+
"@biomejs/biome": "2.3.14",
|
|
80
|
+
"@types/babel__core": "^7.20.5",
|
|
77
81
|
"@types/cli-progress": "^3.11.6",
|
|
78
82
|
"@types/gettext-parser": "^4.0.4",
|
|
79
|
-
"@types/node": "^22.
|
|
80
|
-
"@types/yargs": "^17.0.
|
|
83
|
+
"@types/node": "^22.19.11",
|
|
84
|
+
"@types/yargs": "^17.0.35",
|
|
81
85
|
"esbuild": "0.25.9",
|
|
82
|
-
"typescript": "^5.9.
|
|
86
|
+
"typescript": "^5.9.3"
|
|
83
87
|
}
|
|
84
88
|
}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
const { describe, it } = require("node:test");
|
|
2
2
|
const { join } = require("node:path");
|
|
3
3
|
const assert = require("node:assert");
|
|
4
|
-
const { extractMainFileData } = require("../lib");
|
|
4
|
+
const { extractMainFileData, getAuthorFromPackage } = require("../lib");
|
|
5
5
|
|
|
6
6
|
describe("should parse plugin main file", () => {
|
|
7
7
|
describe("should parse plugin.php", () => {
|
|
@@ -52,3 +52,71 @@ describe("should parse theme main file", () => {
|
|
|
52
52
|
});
|
|
53
53
|
});
|
|
54
54
|
});
|
|
55
|
+
|
|
56
|
+
describe("getAuthorFromPackage", () => {
|
|
57
|
+
it("extracts author from string with name and email", () => {
|
|
58
|
+
const pkgJson = {
|
|
59
|
+
author: "My Name <myname@example.com>",
|
|
60
|
+
};
|
|
61
|
+
const author = getAuthorFromPackage(pkgJson);
|
|
62
|
+
assert.deepStrictEqual(author, {
|
|
63
|
+
name: "My Name",
|
|
64
|
+
email: "myname@example.com",
|
|
65
|
+
website: undefined,
|
|
66
|
+
});
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
it("extracts author from string with specific user format", () => {
|
|
70
|
+
const pkgJson = {
|
|
71
|
+
author: "my name <myname@asdasdasdasd.it>",
|
|
72
|
+
};
|
|
73
|
+
const author = getAuthorFromPackage(pkgJson);
|
|
74
|
+
assert.deepStrictEqual(author, {
|
|
75
|
+
name: "my name",
|
|
76
|
+
email: "myname@asdasdasdasd.it",
|
|
77
|
+
website: undefined,
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
|
|
81
|
+
it("extracts author from string with name, email and url", () => {
|
|
82
|
+
const pkgJson = {
|
|
83
|
+
author: "My Name <myname@example.com> (https://example.com)",
|
|
84
|
+
};
|
|
85
|
+
const author = getAuthorFromPackage(pkgJson);
|
|
86
|
+
assert.deepStrictEqual(author, {
|
|
87
|
+
name: "My Name",
|
|
88
|
+
email: "myname@example.com",
|
|
89
|
+
website: "https://example.com",
|
|
90
|
+
});
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
it("extracts author from object", () => {
|
|
94
|
+
const pkgJson = {
|
|
95
|
+
author: {
|
|
96
|
+
name: "Object Author",
|
|
97
|
+
email: "obj@example.com",
|
|
98
|
+
website: "https://obj.example.com"
|
|
99
|
+
}
|
|
100
|
+
};
|
|
101
|
+
const author = getAuthorFromPackage(pkgJson);
|
|
102
|
+
assert.deepStrictEqual(author, {
|
|
103
|
+
name: "Object Author",
|
|
104
|
+
email: "obj@example.com",
|
|
105
|
+
website: "https://obj.example.com",
|
|
106
|
+
});
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
it("extracts author from array of strings", () => {
|
|
110
|
+
const pkgJson = {
|
|
111
|
+
authors: [
|
|
112
|
+
"Array Author <array@example.com>"
|
|
113
|
+
]
|
|
114
|
+
};
|
|
115
|
+
const author = getAuthorFromPackage(pkgJson);
|
|
116
|
+
assert.deepStrictEqual(author, {
|
|
117
|
+
name: "Array Author",
|
|
118
|
+
email: "array@example.com",
|
|
119
|
+
website: undefined
|
|
120
|
+
});
|
|
121
|
+
});
|
|
122
|
+
});
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
const { describe, it } = require("node:test");
|
|
2
|
+
const assert = require("node:assert");
|
|
3
|
+
const { generateHeader } = require("../lib/extractors/headers");
|
|
4
|
+
const process = require("node:process");
|
|
5
|
+
|
|
6
|
+
describe("generateHeader", () => {
|
|
7
|
+
it("should return default headers when silent is true and fields are missing", async () => {
|
|
8
|
+
const args = {
|
|
9
|
+
slug: "test-slug",
|
|
10
|
+
debug: false,
|
|
11
|
+
domain: "plugin",
|
|
12
|
+
paths: { cwd: process.cwd(), out: "languages" },
|
|
13
|
+
options: { silent: true },
|
|
14
|
+
headers: {
|
|
15
|
+
version: "0.0.1",
|
|
16
|
+
author: "AUTHOR",
|
|
17
|
+
email: "AUTHOR EMAIL"
|
|
18
|
+
},
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
const headers = await generateHeader(args);
|
|
22
|
+
|
|
23
|
+
assert.ok(headers, "Headers should be generated");
|
|
24
|
+
assert.strictEqual(headers["Project-Id-Version"], "test-slug 0.0.1");
|
|
25
|
+
assert.strictEqual(headers["Last-Translator"], "AUTHOR <AUTHOR EMAIL>");
|
|
26
|
+
});
|
|
27
|
+
});
|
package/tests/tree.test.js
CHANGED
|
@@ -35,7 +35,7 @@ describe("doTree php", () => {
|
|
|
35
35
|
translator: undefined,
|
|
36
36
|
},
|
|
37
37
|
msgctxt: undefined,
|
|
38
|
-
msgid: "You
|
|
38
|
+
msgid: "You're a silly monkey",
|
|
39
39
|
msgid_plural: undefined,
|
|
40
40
|
msgstr: [""],
|
|
41
41
|
});
|
|
@@ -214,3 +214,76 @@ describe("doTree php _n, _nx", async () => {
|
|
|
214
214
|
assert.strictEqual(r.filter((block) => block.msgctxt === 'context').length, 1);
|
|
215
215
|
});
|
|
216
216
|
});
|
|
217
|
+
|
|
218
|
+
describe("doTree php escape sequences", async () => {
|
|
219
|
+
it("should correctly unescape newlines, tabs, and quotes in double-quoted strings", () => {
|
|
220
|
+
const content = `<?php
|
|
221
|
+
// Double quotes with escape sequences
|
|
222
|
+
__("Line 1\\nLine 2", "text-domain");
|
|
223
|
+
|
|
224
|
+
// Double quotes with escaped quotes
|
|
225
|
+
_e("Hello \\"World\\"", "text-domain");
|
|
226
|
+
|
|
227
|
+
// Double quotes with tabs
|
|
228
|
+
__("Col1\\tCol2", "text-domain");
|
|
229
|
+
`;
|
|
230
|
+
|
|
231
|
+
const filename = "escapes.php";
|
|
232
|
+
const r = doTree(content, filename).blocks;
|
|
233
|
+
|
|
234
|
+
// 1. Verify newline
|
|
235
|
+
const newlineBlock = r.find(b => b.msgid.includes('Line 1'));
|
|
236
|
+
// The msgid should contain an actual newline character, not the literal characters '\' and 'n'
|
|
237
|
+
assert.strictEqual(newlineBlock?.msgid, "Line 1\nLine 2");
|
|
238
|
+
|
|
239
|
+
// 2. Verify escaped quotes
|
|
240
|
+
const quoteBlock = r.find(b => b.msgid.includes('Hello'));
|
|
241
|
+
assert.strictEqual(quoteBlock?.msgid, 'Hello "World"');
|
|
242
|
+
|
|
243
|
+
// 3. Verify tabs
|
|
244
|
+
const tabBlock = r.find(b => b.msgid.includes('Col1'));
|
|
245
|
+
assert.strictEqual(tabBlock?.msgid, "Col1\tCol2");
|
|
246
|
+
});
|
|
247
|
+
});
|
|
248
|
+
|
|
249
|
+
describe("doTree php single quotes", async () => {
|
|
250
|
+
it("should treat escape sequences as literals in single-quoted strings", () => {
|
|
251
|
+
const content = `<?php
|
|
252
|
+
// Single quotes should NOT interpret \\n as newline
|
|
253
|
+
__('Line 1\\nLine 2', 'text-domain');
|
|
254
|
+
|
|
255
|
+
// Single quotes SHOULD handle escaped single quotes
|
|
256
|
+
__('It\\'s a sunny day', 'text-domain');
|
|
257
|
+
`;
|
|
258
|
+
|
|
259
|
+
const filename = "single_quotes.php";
|
|
260
|
+
const r = doTree(content, filename).blocks;
|
|
261
|
+
|
|
262
|
+
// 1. Verify literal \\n
|
|
263
|
+
const literalBlock = r.find(b => b.msgid.includes('Line 1'));
|
|
264
|
+
// In single quotes, \\n is two characters: backslash and n
|
|
265
|
+
assert.strictEqual(literalBlock?.msgid, "Line 1\\nLine 2");
|
|
266
|
+
|
|
267
|
+
// 2. Verify escaped single quote
|
|
268
|
+
const quoteBlock = r.find(b => b.msgid.includes('sunny'));
|
|
269
|
+
assert.strictEqual(quoteBlock?.msgid, "It's a sunny day");
|
|
270
|
+
});
|
|
271
|
+
});
|
|
272
|
+
|
|
273
|
+
describe("doTree php variables in strings", async () => {
|
|
274
|
+
it("should preserve PHP variables inside double-quoted strings", () => {
|
|
275
|
+
const content = `<?php
|
|
276
|
+
$name = 'John';
|
|
277
|
+
// Variable interpolation
|
|
278
|
+
__("Hello $name, how are you?", "text-domain");
|
|
279
|
+
`;
|
|
280
|
+
|
|
281
|
+
const filename = "variables.php";
|
|
282
|
+
const r = doTree(content, filename).blocks;
|
|
283
|
+
|
|
284
|
+
const varBlock = r.find(b => b.msgid.startsWith('Hello'));
|
|
285
|
+
|
|
286
|
+
// We expect the variable name to be preserved in the msgid
|
|
287
|
+
assert.strictEqual(varBlock?.msgid, "Hello $name, how are you?");
|
|
288
|
+
});
|
|
289
|
+
});
|