@wp-blocks/make-pot 1.5.0 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/README.md +3 -3
  2. package/lib/assets/block-i18n.js.map +7 -0
  3. package/lib/assets/package-i18n.js.map +7 -0
  4. package/lib/assets/theme-i18n.js.map +7 -0
  5. package/lib/assets/wp-plugin-i18n.js.map +7 -0
  6. package/lib/assets/wp-theme-i18n.js.map +7 -0
  7. package/lib/cli/getArgs.js.map +7 -0
  8. package/lib/cli/getJsonArgs.js +1 -1
  9. package/lib/cli/getJsonArgs.js.map +7 -0
  10. package/lib/cli/parseCli.js.map +7 -0
  11. package/lib/cli.js +1 -1
  12. package/lib/cli.js.map +7 -0
  13. package/lib/const.js +1 -1
  14. package/lib/const.js.map +7 -0
  15. package/lib/extractors/css.js +1 -1
  16. package/lib/extractors/css.js.map +7 -0
  17. package/lib/extractors/headers.js +3 -3
  18. package/lib/extractors/headers.js.map +7 -0
  19. package/lib/extractors/json.js +1 -1
  20. package/lib/extractors/json.js.map +7 -0
  21. package/lib/extractors/packageJson.js +1 -0
  22. package/lib/extractors/packageJson.js.map +7 -0
  23. package/lib/extractors/php.js +2 -2
  24. package/lib/extractors/php.js.map +7 -0
  25. package/lib/extractors/schema.js +4 -4
  26. package/lib/extractors/schema.js.map +7 -0
  27. package/lib/extractors/text.js +1 -1
  28. package/lib/extractors/text.js.map +7 -0
  29. package/lib/fs/fs.js +2 -1
  30. package/lib/fs/fs.js.map +7 -0
  31. package/lib/fs/glob.js.map +7 -0
  32. package/lib/index.js +1 -1
  33. package/lib/index.js.map +7 -0
  34. package/lib/jsonCommand.js +1 -1
  35. package/lib/jsonCommand.js.map +7 -0
  36. package/lib/makeJson.js +1 -1
  37. package/lib/makeJson.js.map +7 -0
  38. package/lib/makePot.js +1 -1
  39. package/lib/makePot.js.map +7 -0
  40. package/lib/parser/exec.js +3 -3
  41. package/lib/parser/exec.js.map +7 -0
  42. package/lib/parser/makeJson.js +1 -1
  43. package/lib/parser/makeJson.js.map +7 -0
  44. package/lib/parser/makePot.js +1 -1
  45. package/lib/parser/makePot.js.map +7 -0
  46. package/lib/parser/patterns.js.map +7 -0
  47. package/lib/parser/process.js.map +7 -0
  48. package/lib/parser/progress.js +1 -1
  49. package/lib/parser/progress.js.map +7 -0
  50. package/lib/parser/taskRunner.js +2 -1
  51. package/lib/parser/taskRunner.js.map +7 -0
  52. package/lib/parser/tree.js +1 -1
  53. package/lib/parser/tree.js.map +7 -0
  54. package/lib/potCommand.js +1 -1
  55. package/lib/potCommand.js.map +7 -0
  56. package/lib/types.js.map +7 -0
  57. package/lib/utils/common.js +3 -2
  58. package/lib/utils/common.js.map +7 -0
  59. package/lib/utils/extractors.js +1 -0
  60. package/lib/utils/extractors.js.map +7 -0
  61. package/package.json +13 -12
  62. package/tests/extract-headers.test.js +54 -0
  63. package/tests/extract.test.js +1 -1
  64. package/tests/jsonParse.test.js +167 -0
  65. package/tests/parse-headers.test.js +74 -0
  66. package/lib/extractors/utils.js +0 -1
  67. package/tests/getFiles.test.no.js +0 -85
  68. package/tests/jsonParse.no.js +0 -49
  69. package/tests/wpcliCompare.no.js +0 -32
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/jsonCommand.ts"],
4
+ "sourcesContent": ["import MakeJsonCommand from \"./parser/makeJson.js\";\nimport type { MakeJsonArgs } from \"./types.js\";\nimport { printMakePotModuleInfo, printTimeElapsed } from \"./utils/common.js\";\n\nexport default function makeJsonCommand(args: MakeJsonArgs) {\n\tconst makeJsonCommand = new MakeJsonCommand(args);\n\n\tif (Object.keys(args).length > 0) {\n\t\tprintMakePotModuleInfo();\n\t\t/* capture the start time */\n\t\tconst timeStart = new Date();\n\t\tmakeJsonCommand\n\t\t\t.exec()\n\t\t\t.then((result) => {\n\t\t\t\tif (args.debug) {\n\t\t\t\t\tconsole.log(result);\n\t\t\t\t}\n\t\t\t\t/* output the end time */\n\t\t\t\tprintTimeElapsed(\"Make-Json\", timeStart);\n\t\t\t})\n\t\t\t.catch((error) => {\n\t\t\t\tconsole.error(`\uD83E\uDEE4 make-json - Error: ${error}`);\n\t\t\t});\n\t}\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sBAA4B;AAE5B,oBAAyD;AAE1C,SAAR,gBAAiC,MAAoB;AAC3D,QAAMA,mBAAkB,IAAI,gBAAAC,QAAgB,IAAI;AAEhD,MAAI,OAAO,KAAK,IAAI,EAAE,SAAS,GAAG;AACjC,8CAAuB;AAEvB,UAAM,YAAY,oBAAI,KAAK;AAC3B,IAAAD,iBACE,KAAK,EACL,KAAK,CAAC,WAAW;AACjB,UAAI,KAAK,OAAO;AACf,gBAAQ,IAAI,MAAM;AAAA,MACnB;AAEA,0CAAiB,aAAa,SAAS;AAAA,IACxC,CAAC,EACA,MAAM,CAAC,UAAU;AACjB,cAAQ,MAAM,gCAAyB,KAAK,EAAE;AAAA,IAC/C,CAAC;AAAA,EACH;AACD;",
6
+ "names": ["makeJsonCommand", "MakeJsonCommand"]
7
+ }
package/lib/makeJson.js CHANGED
@@ -1,2 +1,2 @@
1
1
  #!/usr/bin/env node
2
- "use strict";var i=Object.create;var e=Object.defineProperty;var a=Object.getOwnPropertyDescriptor;var f=Object.getOwnPropertyNames;var p=Object.getPrototypeOf,J=Object.prototype.hasOwnProperty;var b=(o,r,n,m)=>{if(r&&typeof r=="object"||typeof r=="function")for(let s of f(r))!J.call(o,s)&&s!==n&&e(o,s,{get:()=>r[s],enumerable:!(m=a(r,s))||m.enumerable});return o};var c=(o,r,n)=>(n=o!=null?i(p(o)):{},b(r||!o||!o.__esModule?e(n,"default",{value:o,enumerable:!0}):n,o));var t=require("./cli/getJsonArgs"),g=c(require("./jsonCommand"));const d=(0,t.getJsonArgs)();(0,g.default)(d);
2
+ "use strict";var i=Object.create;var e=Object.defineProperty;var a=Object.getOwnPropertyDescriptor;var f=Object.getOwnPropertyNames;var p=Object.getPrototypeOf,J=Object.prototype.hasOwnProperty;var b=(o,r,n,m)=>{if(r&&typeof r=="object"||typeof r=="function")for(let s of f(r))!J.call(o,s)&&s!==n&&e(o,s,{get:()=>r[s],enumerable:!(m=a(r,s))||m.enumerable});return o};var c=(o,r,n)=>(n=o!=null?i(p(o)):{},b(r||!o||!o.__esModule?e(n,"default",{value:o,enumerable:!0}):n,o));var t=require("./cli/getJsonArgs.js"),g=c(require("./jsonCommand.js"));const d=(0,t.getJsonArgs)();(0,g.default)(d);
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/makeJson.ts"],
4
+ "sourcesContent": ["#!/usr/bin/env node\n\nimport { getJsonArgs } from \"./cli/getJsonArgs.js\";\nimport makeJson from \"./jsonCommand.js\";\n\nconst args = getJsonArgs();\n\nmakeJson(args);\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAEA,yBAA4B;AAC5B,yBAAqB;AAErB,MAAM,WAAO,gCAAY;AAAA,IAEzB,mBAAAA,SAAS,IAAI;",
6
+ "names": ["makeJson"]
7
+ }
package/lib/makePot.js CHANGED
@@ -1,2 +1,2 @@
1
1
  #!/usr/bin/env node
2
- "use strict";var i=Object.create;var p=Object.defineProperty;var n=Object.getOwnPropertyDescriptor;var a=Object.getOwnPropertyNames;var f=Object.getPrototypeOf,A=Object.prototype.hasOwnProperty;var d=(r,m,o,e)=>{if(m&&typeof m=="object"||typeof m=="function")for(let t of a(m))!A.call(r,t)&&t!==o&&p(r,t,{get:()=>m[t],enumerable:!(e=n(m,t))||e.enumerable});return r};var y=(r,m,o)=>(o=r!=null?i(f(r)):{},d(m||!r||!r.__esModule?p(o,"default",{value:r,enumerable:!0}):o,r));var s=require("./cli/getArgs.js"),g=y(require("./potCommand"));(0,g.default)((0,s.getArgs)());
2
+ "use strict";var i=Object.create;var e=Object.defineProperty;var n=Object.getOwnPropertyDescriptor;var a=Object.getOwnPropertyNames;var f=Object.getPrototypeOf,A=Object.prototype.hasOwnProperty;var d=(r,m,o,t)=>{if(m&&typeof m=="object"||typeof m=="function")for(let s of a(m))!A.call(r,s)&&s!==o&&e(r,s,{get:()=>m[s],enumerable:!(t=n(m,s))||t.enumerable});return r};var y=(r,m,o)=>(o=r!=null?i(f(r)):{},d(m||!r||!r.__esModule?e(o,"default",{value:r,enumerable:!0}):o,r));var p=require("./cli/getArgs.js"),g=y(require("./potCommand.js"));(0,g.default)((0,p.getArgs)());
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/makePot.ts"],
4
+ "sourcesContent": ["#!/usr/bin/env node\n\nimport { getArgs } from \"./cli/getArgs.js\";\nimport makepotCommand from \"./potCommand.js\";\nimport type { Args } from \"./types.js\";\n\n/** Main execution */\nmakepotCommand(getArgs() as Args);\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;AAEA,qBAAwB;AACxB,wBAA2B;AAAA,IAI3B,kBAAAA,aAAe,wBAAQ,CAAS;",
6
+ "names": ["makepotCommand"]
7
+ }
@@ -1,7 +1,7 @@
1
- "use strict";var C=Object.create;var s=Object.defineProperty;var j=Object.getOwnPropertyDescriptor;var w=Object.getOwnPropertyNames;var $=Object.getPrototypeOf,k=Object.prototype.hasOwnProperty;var A=(t,n)=>{for(var o in n)s(t,o,{get:n[o],enumerable:!0})},l=(t,n,o,e)=>{if(n&&typeof n=="object"||typeof n=="function")for(let i of w(n))!k.call(t,i)&&i!==o&&s(t,i,{get:()=>n[i],enumerable:!(e=j(n,i))||e.enumerable});return t};var m=(t,n,o)=>(o=t!=null?C($(t)):{},l(n||!t||!t.__esModule?s(o,"default",{value:t,enumerable:!0}):o,t)),B=t=>l(s({},"__esModule",{value:!0}),t);var H={};A(H,{exec:()=>G});module.exports=B(H);var c=m(require("node:path")),g=require("gettext-parser"),f=m(require("tannin")),r=require("../extractors/headers.js"),p=require("../fs/fs"),a=require("../utils/common.js"),d=require("./patterns.js"),u=require("./process.js"),h=require("./progress.js"),P=require("./taskRunner.js");function D(t,n){console.log(`
1
+ "use strict";var w=Object.create;var s=Object.defineProperty;var x=Object.getOwnPropertyDescriptor;var C=Object.getOwnPropertyNames;var j=Object.getPrototypeOf,v=Object.prototype.hasOwnProperty;var k=(t,n)=>{for(var o in n)s(t,o,{get:n[o],enumerable:!0})},c=(t,n,o,i)=>{if(n&&typeof n=="object"||typeof n=="function")for(let e of C(n))!v.call(t,e)&&e!==o&&s(t,e,{get:()=>n[e],enumerable:!(i=x(n,e))||i.enumerable});return t};var g=(t,n,o)=>(o=t!=null?w(j(t)):{},c(n||!t||!t.__esModule?s(o,"default",{value:t,enumerable:!0}):o,t)),A=t=>c(s({},"__esModule",{value:!0}),t);var G={};k(G,{exec:()=>D});module.exports=A(G);var l=g(require("node:path")),f=require("gettext-parser"),u=g(require("tannin")),r=require("../extractors/headers.js"),p=require("../fs/fs"),a=require("../utils/common.js"),h=require("./patterns.js"),d=require("./process.js"),P=require("./progress.js"),T=require("./taskRunner.js");function B(t,n){return`
2
2
  Script Path: ${t}
3
3
  for ${n.include.join()}
4
4
  ignoring patterns: ${n.exclude.join()}
5
- `)}async function G(t){t.options?.silent||(console.log("\u{1F4DD} Starting makePot for",t?.slug),(0,a.printStats)()),t.options?.skip.audit&&(console.log(`
6
- Audit strings...`),console.log("TODO"),console.log("\u2705 Done"));const n=await(0,r.generateHeader)(t);let o=(0,r.translationsHeaders)(t);const e=(0,d.getPatterns)(t);t.options?.silent||D(c.default.resolve(t.paths.cwd),e);const i=(0,h.initProgress)(t,0)??void 0,T=await(0,u.processFiles)(e,t,i);if(o=await(0,P.taskRunner)(T,o,t,i),t.options?.json){const x={[t.slug]:{"":n,...o.toJson()}};return new f.default(x).toString()}const y={charset:(0,p.getEncodingCharset)(t.options?.charset),headers:n,translations:o.toJson()},S=g.po.compile(y).toString((0,p.getCharset)(t.options?.charset));return`${t.options?.fileComment||(0,a.getCopyright)(t.slug,t.headers?.license??"GPL v2 or later")}
5
+ `}async function D(t){t.options?.silent||(console.log("\u{1F4DD} Starting makePot for",t?.slug),(0,a.printStats)()),t.options?.skip.audit&&(console.log(`
6
+ Audit strings...`),console.log("TODO"),console.log("\u2705 Done"));const n=await(0,r.generateHeader)(t);let o=(0,r.translationsHeaders)(t);t.options?.silent||B(l.default.resolve(t.paths.cwd),t.patterns);const i=(0,P.initProgress)(t,0);i.start(3,1,{filename:`Resolving files in ${l.default.resolve(t.paths.cwd)}`});const e=(0,h.getPatterns)(t),m=await(0,d.processFiles)(e,t);if(i.update(2,{filename:`Found ${m.length} files`}),o=await(0,T.taskRunner)(m,o,t,i),t.options?.json){const $={[t.slug]:{"":n,...o.toJson()}};return new u.default($).toString()}const y={charset:(0,p.getEncodingCharset)(t.options?.charset),headers:n,translations:o.toJson()},S=f.po.compile(y).toString((0,p.getCharset)(t.options?.charset));return`${t.options?.fileComment||(0,a.getCopyright)(t.slug,t.headers?.license??"GPL v2 or later")}
7
7
  ${S}`}0&&(module.exports={exec});
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/parser/exec.ts"],
4
+ "sourcesContent": ["import path from \"node:path\";\nimport type { SingleBar } from \"cli-progress\";\nimport { type GetTextTranslations, po } from \"gettext-parser\";\nimport Tannin from \"tannin\";\nimport { generateHeader, translationsHeaders } from \"../extractors/headers.js\";\nimport { getCharset, getEncodingCharset } from \"../fs/fs\";\nimport type { Args, Patterns } from \"../types.js\";\nimport { getCopyright, printStats } from \"../utils/common.js\";\nimport { getPatterns } from \"./patterns.js\";\nimport { processFiles } from \"./process.js\";\nimport { initProgress } from \"./progress.js\";\nimport { taskRunner } from \"./taskRunner.js\";\n\n/**\n * Returns the output path recap\n *\n * @param {string} cwd - The current working directory\n * @param {Patterns} patterns - The patterns to be used for the extraction process\n * @return {string} - The output path recap\n */\nfunction outputPathRecap(cwd: string, patterns: Patterns): string {\n\treturn `\\nScript Path: ${cwd}\\nfor ${patterns.include.join()}\\nignoring patterns: ${patterns.exclude.join()}\\n`;\n}\n\n/**\n * Runs the parser and generates the pot file or the json file based on the command line arguments\n *\n * @param {Args} args - The command line arguments\n * @return {Promise<string>} - A promise that resolves with the generated pot file\n */\nexport async function exec(args: Args): Promise<string> {\n\tif (!args.options?.silent) {\n\t\tconsole.log(\"\uD83D\uDCDD Starting makePot for\", args?.slug);\n\t\tprintStats();\n\t}\n\n\t// audit\n\tif (args.options?.skip.audit) {\n\t\tconsole.log(\"\\nAudit strings...\");\n\t\tconsole.log(\"TODO\");\n\t\t/**\n\t\t * TODO audit strings\n\t\t *\n\t\t * Skips string audit where it tries to find possible mistakes in translatable strings. Useful when running in an automated environment.\n\t\t *\n\t\t **/\n\t\tconsole.log(\"\u2705 Done\");\n\t}\n\n\t/** The pot file header contains the data about the plugin or theme */\n\tconst potHeader = await generateHeader(args);\n\n\t/** We need to find the main file data so that the definitions are extracted from the plugin or theme files */\n\tlet translationsUnion = translationsHeaders(args);\n\n\tif (!args.options?.silent)\n\t\toutputPathRecap(path.resolve(args.paths.cwd), args.patterns);\n\n\t/**\n\t * The progress bar that is used to show the progress of the extraction process.\n\t */\n\tconst progressBar: SingleBar = initProgress(args, 0);\n\n\tprogressBar.start(3, 1, {\n\t\tfilename: `Resolving files in ${path.resolve(args.paths.cwd)}`,\n\t});\n\n\t/**\n\t * Extract the strings from the files\n\t */\n\tconst patterns = getPatterns(args);\n\tconst files = await processFiles(patterns, args);\n\n\tprogressBar.update(2, {\n\t\tfilename: `Found ${files.length} files`,\n\t});\n\n\ttranslationsUnion = await taskRunner(\n\t\tfiles,\n\t\ttranslationsUnion,\n\t\targs,\n\t\tprogressBar,\n\t);\n\n\tif (args.options?.json) {\n\t\t// generate the json file\n\t\tconst jedData: {\n\t\t\t[p: string]: { [p: string]: [string, string] };\n\t\t} = {\n\t\t\t[args.slug]: {\n\t\t\t\t\"\": potHeader,\n\t\t\t\t...(translationsUnion.toJson() as { [p: string]: [string, string] }),\n\t\t\t},\n\t\t};\n\t\tconst i18n = new Tannin(jedData);\n\n\t\treturn i18n.toString();\n\t}\n\n\t// generate the pot file json\n\tconst getTextTranslations: GetTextTranslations = {\n\t\tcharset: getEncodingCharset(args.options?.charset),\n\t\theaders: potHeader as { [headerName: string]: string },\n\t\ttranslations: translationsUnion.toJson(),\n\t};\n\n\t// And then compile the pot file to a string\n\tconst pluginTranslations = po\n\t\t.compile(getTextTranslations)\n\t\t.toString(getCharset(args.options?.charset));\n\n\t// return the pot file as a string, prefixed with the header\n\tconst copyrightComment =\n\t\targs.options?.fileComment ||\n\t\tgetCopyright(\n\t\t\targs.slug,\n\t\t\t(args.headers?.license as string) ?? \"GPL v2 or later\",\n\t\t);\n\treturn `${copyrightComment}\\n${pluginTranslations}`;\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAAiB;AAEjB,4BAA6C;AAC7C,oBAAmB;AACnB,qBAAoD;AACpD,gBAA+C;AAE/C,oBAAyC;AACzC,sBAA4B;AAC5B,qBAA6B;AAC7B,sBAA6B;AAC7B,wBAA2B;AAS3B,SAAS,gBAAgB,KAAa,UAA4B;AACjE,SAAO;AAAA,eAAkB,GAAG;AAAA,MAAS,SAAS,QAAQ,KAAK,CAAC;AAAA,qBAAwB,SAAS,QAAQ,KAAK,CAAC;AAAA;AAC5G;AAQA,eAAsB,KAAK,MAA6B;AACvD,MAAI,CAAC,KAAK,SAAS,QAAQ;AAC1B,YAAQ,IAAI,kCAA2B,MAAM,IAAI;AACjD,kCAAW;AAAA,EACZ;AAGA,MAAI,KAAK,SAAS,KAAK,OAAO;AAC7B,YAAQ,IAAI,oBAAoB;AAChC,YAAQ,IAAI,MAAM;AAOlB,YAAQ,IAAI,aAAQ;AAAA,EACrB;AAGA,QAAM,YAAY,UAAM,+BAAe,IAAI;AAG3C,MAAI,wBAAoB,oCAAoB,IAAI;AAEhD,MAAI,CAAC,KAAK,SAAS;AAClB,oBAAgB,iBAAAA,QAAK,QAAQ,KAAK,MAAM,GAAG,GAAG,KAAK,QAAQ;AAK5D,QAAM,kBAAyB,8BAAa,MAAM,CAAC;AAEnD,cAAY,MAAM,GAAG,GAAG;AAAA,IACvB,UAAU,sBAAsB,iBAAAA,QAAK,QAAQ,KAAK,MAAM,GAAG,CAAC;AAAA,EAC7D,CAAC;AAKD,QAAM,eAAW,6BAAY,IAAI;AACjC,QAAM,QAAQ,UAAM,6BAAa,UAAU,IAAI;AAE/C,cAAY,OAAO,GAAG;AAAA,IACrB,UAAU,SAAS,MAAM,MAAM;AAAA,EAChC,CAAC;AAED,sBAAoB,UAAM;AAAA,IACzB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD;AAEA,MAAI,KAAK,SAAS,MAAM;AAEvB,UAAM,UAEF;AAAA,MACH,CAAC,KAAK,IAAI,GAAG;AAAA,QACZ,IAAI;AAAA,QACJ,GAAI,kBAAkB,OAAO;AAAA,MAC9B;AAAA,IACD;AACA,UAAM,OAAO,IAAI,cAAAC,QAAO,OAAO;AAE/B,WAAO,KAAK,SAAS;AAAA,EACtB;AAGA,QAAM,sBAA2C;AAAA,IAChD,aAAS,8BAAmB,KAAK,SAAS,OAAO;AAAA,IACjD,SAAS;AAAA,IACT,cAAc,kBAAkB,OAAO;AAAA,EACxC;AAGA,QAAM,qBAAqB,yBACzB,QAAQ,mBAAmB,EAC3B,aAAS,sBAAW,KAAK,SAAS,OAAO,CAAC;AAG5C,QAAM,mBACL,KAAK,SAAS,mBACd;AAAA,IACC,KAAK;AAAA,IACJ,KAAK,SAAS,WAAsB;AAAA,EACtC;AACD,SAAO,GAAG,gBAAgB;AAAA,EAAK,kBAAkB;AAClD;",
6
+ "names": ["path", "Tannin"]
7
+ }
@@ -1 +1 @@
1
- "use strict";var b=Object.create;var p=Object.defineProperty;var x=Object.getOwnPropertyDescriptor;var J=Object.getOwnPropertyNames;var w=Object.getPrototypeOf,N=Object.prototype.hasOwnProperty;var P=(n,t)=>{for(var e in t)p(n,e,{get:t[e],enumerable:!0})},h=(n,t,e,s)=>{if(t&&typeof t=="object"||typeof t=="function")for(let i of J(t))!N.call(n,i)&&i!==e&&p(n,i,{get:()=>t[i],enumerable:!(s=x(t,i))||s.enumerable});return n};var m=(n,t,e)=>(e=n!=null?b(w(n)):{},h(t||!n||!n.__esModule?p(e,"default",{value:n,enumerable:!0}):e,n)),S=n=>h(p({},"__esModule",{value:!0}),n);var T={};P(T,{MakeJsonCommand:()=>v,default:()=>D});module.exports=S(T);var f=m(require("node:crypto")),c=m(require("node:fs")),o=m(require("node:path")),y=require("gettext-parser"),g=require("glob"),u=require("../const");class v{source;destination;allowedFormats;purge;prettyPrint;debug;scriptName;paths;sourceDir;constructor(t){if(this.sourceDir=o.default.relative(t.paths.cwd,t.source??""),!c.existsSync(this.sourceDir))throw console.error("Source directory not found",t),new Error(`Source directory ${this.sourceDir} not found`);this.scriptName=t.scriptName,this.source=t.source,this.destination=t.destination,this.allowedFormats=t.allowedFormats??[".ts",".tsx",".js",".jsx",".mjs",".cjs"],this.purge=t.purge,this.prettyPrint=t.prettyPrint,this.debug=t.debug,this.paths=t.paths}async invoke(){const t=await(0,g.glob)("**/*.po",{cwd:this.destination,nodir:!0});console.log("Found po files",t,"in",this.destination,"folder");const e={};for(const s of t)if(this.scriptName||(this.scriptName=await(0,g.glob)("*.js",{cwd:this.source,nodir:!0}),console.log("Found script:",this.scriptName,"in",this.source,"folder")),typeof this.scriptName=="string"){const i=this.addPot(s,this.scriptName);e[i.filename]=i.data}else if(Array.isArray(this.scriptName))for(const i of this.scriptName){const r=this.addPot(s,i);e[r.filename]=r.data}for(const[s,i]of Object.entries(e)){let r;if(this.purge)c.existsSync(o.default.join(this.destination,s))&&(console.log(`Removing ${o.default.join(this.destination,s)} as the purge option is enabled`),c.unlinkSync(o.default.join(this.destination,s))),r=JSON.stringify(i,null,this?.prettyPrint?2:0);else{const l=c.readFileSync(o.default.join(this.source,s),"utf8");r=JSON.stringify({...i,...JSON.parse(l)},null,this?.prettyPrint?2:0)}const a=o.default.join(this.destination,s);c.writeFileSync(a,r),console.log(`JSON file written to ${a} with ${s}`)}return e}processFile(t,e="utf8"){const s=c.readFileSync(t,e),i=this.extractIsoCode(t),r=this.parsePoFile(s);return this.convertToJed(r.headers,r.translations,i)}parsePoFile(t){return y.po.parse(t)}convertToJed(t,e,s){const i="messages",r={[i]:{"":{domain:i,lang:s||t.Language||"en",plural_forms:t["Plural-Forms"]||"nplurals=2; plural=(n != 1);"}}};for(const a of Object.keys(e)){const l=e[a];for(const d of Object.keys(l)){const F=l[d];if(d==="")continue;const j=a&&a!==""?`${a}${d}`:d;r[i][j]=F.msgstr}}return r}extractIsoCode(t){const e=t.match(u.IsoCodeRegex);return e?e[1]:void 0}getPluralForms(t){const e=t.match(/Plural-Forms:\s*(.*?)\n/);return e?e[1]:"nplurals=2; plural=(n != 1);"}getLanguage(t){const e=t.match(/Language:\s*(.*?)\n/);return e?e[1]:u.defaultLocale}isCompatibleFile(t){return this.allowedFormats?t.some(e=>this.allowedFormats.some(s=>e.endsWith(s))):!0}md5(t){return f.default.createHash("md5").update(t).digest("hex")}addPot(t,e){const s=this.md5(e);return{filename:t.replace(".po",`-${s}.json`),data:this.processFile(o.default.join(this.destination,t))}}}var D=v;0&&(module.exports={MakeJsonCommand});
1
+ "use strict";var N=Object.create;var g=Object.defineProperty;var S=Object.getOwnPropertyDescriptor;var F=Object.getOwnPropertyNames;var w=Object.getPrototypeOf,D=Object.prototype.hasOwnProperty;var T=(i,t)=>{for(var e in t)g(i,e,{get:t[e],enumerable:!0})},v=(i,t,e,n)=>{if(t&&typeof t=="object"||typeof t=="function")for(let s of F(t))!D.call(i,s)&&s!==e&&g(i,s,{get:()=>t[s],enumerable:!(n=S(t,s))||n.enumerable});return i};var m=(i,t,e)=>(e=i!=null?N(w(i)):{},v(t||!i||!i.__esModule?g(e,"default",{value:i,enumerable:!0}):e,i)),M=i=>v(g({},"__esModule",{value:!0}),i);var O={};T(O,{MakeJsonCommand:()=>x,default:()=>$});module.exports=M(O);var J=m(require("node:crypto")),d=m(require("node:fs")),r=m(require("node:path")),j=require("gettext-parser"),h=require("glob"),u=require("../const.js"),k=require("../utils/common.js");class x{source;destination;allowedFormats;purge;prettyPrint;debug;scriptName;paths;sourceDir;constructor(t){if(this.sourceDir=r.default.relative(t.paths.cwd,t.source??""),!d.existsSync(this.sourceDir))throw console.error("Source directory not found",t),new Error(`Source directory ${this.sourceDir} not found`);this.scriptName=t.scriptName,this.source=t.source,this.destination=t.destination,this.allowedFormats=t.allowedFormats??[".ts",".tsx",".js",".jsx",".mjs",".cjs"],this.purge=t.purge,this.prettyPrint=t.prettyPrint,this.debug=t.debug,this.paths=t.paths}async exec(){const t=await(0,h.glob)("**/*.po",{cwd:this.destination,nodir:!0});console.log("Found po files",t,"in",this.destination,"folder");const e={};for(const n of t)if(this.scriptName||(this.scriptName=await(0,h.glob)("*.js",{cwd:this.source,nodir:!0}),console.log(`Found script: ${this.scriptName} in ${this.source} folder`)),typeof this.scriptName=="string"){const s=this.addPot(n,this.scriptName);e[s.filename]=s.data}else if(Array.isArray(this.scriptName))for(const s of this.scriptName){const o=this.addPot(n,s);e[o.filename]=o.data}for(const[n,s]of Object.entries(e)){let o;if(this.purge)d.existsSync(r.default.join(this.destination,n))&&(console.log(`Removing ${r.default.join(this.destination,n)} as the purge option is enabled`),d.unlinkSync(r.default.join(this.destination,n))),o=JSON.stringify(s,null,this?.prettyPrint?2:0);else{const c=d.readFileSync(r.default.join(this.source,n),"utf8");o=JSON.stringify({...s,...JSON.parse(c)},null,this?.prettyPrint?2:0)}const a=r.default.join(this.destination,n);d.writeFileSync(a,o),console.log(`JSON file written to ${a}`)}return e}processFile(t,e,n="utf8"){const s=r.default.join(this.destination,t),o=d.readFileSync(s,n),a=this.extractIsoCode(s),c=this.parsePoFile(o);return this.convertToJed(c.headers,c.translations,e,a)}parsePoFile(t){return j.po.parse(t)}convertToJed(t,e,n,s){const o=(0,k.getPkgJsonData)(u.modulePath,"name","version"),a="messages",c=`${o.name}/${o.version}`,f={[a]:{"":{domain:a,lang:s||t.Language||"en",plural_forms:t["Plural-Forms"]||"nplurals=2; plural=(n != 1);"}}};for(const l of Object.keys(e)){const y=e[l];for(const p of Object.keys(y)){const P=y[p];if(p==="")continue;const b=l&&l!==""?`${l}${p}`:p;f[a][b]=P.msgstr}}return{"translation-revision-date":new Date().toISOString(),generator:c,source:r.default.join(this.sourceDir,n).replace(/\\/g,"/"),domain:a,locale_data:f}}extractIsoCode(t){const e=t.match(u.IsoCodeRegex);return e?e[1]:void 0}md5(t){return J.default.createHash("md5").update(t).digest("hex")}generateFilename(t,e){const n=this.md5(t);return e.replace(".po",`-${n}.json`)}addPot(t,e){return{filename:this.generateFilename(r.default.join(this.source,e).replace(/\\/g,"/"),t),data:this.processFile(t,e)}}}var $=x;0&&(module.exports={MakeJsonCommand});
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/parser/makeJson.ts"],
4
+ "sourcesContent": ["import crypto from \"node:crypto\";\nimport * as fs from \"node:fs\";\nimport path from \"node:path\";\nimport {\n\ttype GetTextTranslation,\n\ttype GetTextTranslations,\n\tpo,\n} from \"gettext-parser\";\nimport { glob } from \"glob\";\nimport { IsoCodeRegex, modulePath } from \"../const.js\";\nimport type { JedData, MakeJson, MakeJsonArgs } from \"../types.js\";\nimport { getPkgJsonData } from \"../utils/common.js\";\n\nexport class MakeJsonCommand {\n\t/**\n\t * The source file path.\n\t * Should be the \"build\" directory containing .js files\n\t * @private\n\t */\n\tprivate readonly source: string;\n\t/**\n\t * The destination file path.\n\t * Should be the \"languages\" directory containing .po files\n\t * @private\n\t */\n\tprivate readonly destination: string;\n\t/**\n\t * The allowed file extensions.\n\t * @private\n\t */\n\tprivate readonly allowedFormats: string[];\n\t/**\n\t * Remove old POT files.\n\t * @private\n\t */\n\tprivate readonly purge: boolean;\n\t/**\n\t * Pretty print JSON.\n\t * @private\n\t */\n\tprivate readonly prettyPrint: boolean;\n\t/**\n\t * Enable debug mode.\n\t * @private\n\t */\n\tprivate debug: boolean;\n\t/**\n\t * The script to be translated.\n\t * @private\n\t */\n\tprivate scriptName: string | string[] | undefined;\n\t/**\n\t * The paths to be translated.\n\t * @private\n\t */\n\tprivate paths: object | undefined;\n\t/**\n\t * The source directory.\n\t * @private\n\t */\n\tprivate readonly sourceDir: string;\n\n\t/**\n\t * The constructor.\n\t * @param args - The arguments to the command.\n\t */\n\tpublic constructor(args: MakeJsonArgs) {\n\t\tthis.sourceDir = path.relative(args.paths.cwd, args.source ?? \"\");\n\t\tif (!fs.existsSync(this.sourceDir)) {\n\t\t\tconsole.error(\"Source directory not found\", args);\n\t\t\tthrow new Error(`Source directory ${this.sourceDir} not found`);\n\t\t}\n\n\t\tthis.scriptName = args.scriptName;\n\t\tthis.source = args.source;\n\t\tthis.destination = args.destination;\n\t\tthis.allowedFormats = args.allowedFormats ?? [\n\t\t\t\".ts\",\n\t\t\t\".tsx\",\n\t\t\t\".js\",\n\t\t\t\".jsx\",\n\t\t\t\".mjs\",\n\t\t\t\".cjs\",\n\t\t];\n\t\tthis.purge = args.purge;\n\t\tthis.prettyPrint = args.prettyPrint;\n\t\tthis.debug = args.debug;\n\t\tthis.paths = args.paths;\n\t}\n\n\t/**\n\t * The main function. Parses the PO files and generates the JSON files.\n\t */\n\tpublic async exec(): Promise<Record<string, MakeJson>> {\n\t\t// get all the files in the source directory\n\t\tconst files = await glob(\"**/*.po\", { cwd: this.destination, nodir: true });\n\n\t\tconsole.log(\"Found po files\", files, \"in\", this.destination, \"folder\");\n\n\t\t// get all the po files\n\t\tconst output: Record<string, MakeJson> = {};\n\t\tfor (const file of files) {\n\t\t\tif (!this.scriptName) {\n\t\t\t\tthis.scriptName = await glob(\"*.js\", {\n\t\t\t\t\tcwd: this.source,\n\t\t\t\t\tnodir: true,\n\t\t\t\t});\n\t\t\t\tconsole.log(\n\t\t\t\t\t`Found script: ${this.scriptName} in ${this.source} folder`,\n\t\t\t\t);\n\t\t\t}\n\n\t\t\t// TODO: tree the script to get the translations used in there, then use reduce to filter the translations\n\n\t\t\tif (typeof this.scriptName === \"string\") {\n\t\t\t\tconst pot = this.addPot(file, this.scriptName);\n\t\t\t\toutput[pot.filename] = pot.data;\n\t\t\t} else if (Array.isArray(this.scriptName)) {\n\t\t\t\tfor (const script of this.scriptName) {\n\t\t\t\t\tconst pot = this.addPot(file, script);\n\t\t\t\t\toutput[pot.filename] = pot.data;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t// write the json files\n\t\tfor (const [filename, content] of Object.entries(output)) {\n\t\t\tlet contentString: string;\n\t\t\tif (this.purge) {\n\t\t\t\tif (fs.existsSync(path.join(this.destination, filename))) {\n\t\t\t\t\tconsole.log(\n\t\t\t\t\t\t`Removing ${path.join(this.destination, filename)} as the purge option is enabled`,\n\t\t\t\t\t);\n\t\t\t\t\tfs.unlinkSync(path.join(this.destination, filename));\n\t\t\t\t}\n\t\t\t\tcontentString = JSON.stringify(\n\t\t\t\t\tcontent,\n\t\t\t\t\tnull,\n\t\t\t\t\tthis?.prettyPrint ? 2 : 0,\n\t\t\t\t);\n\t\t\t} else {\n\t\t\t\tconst oldJedContent = fs.readFileSync(\n\t\t\t\t\tpath.join(this.source, filename),\n\t\t\t\t\t\"utf8\",\n\t\t\t\t);\n\n\t\t\t\tcontentString = JSON.stringify(\n\t\t\t\t\t{ ...content, ...JSON.parse(oldJedContent) },\n\t\t\t\t\tnull,\n\t\t\t\t\tthis?.prettyPrint ? 2 : 0,\n\t\t\t\t);\n\t\t\t}\n\n\t\t\tconst destinationPath = path.join(this.destination, filename);\n\t\t\tfs.writeFileSync(destinationPath, contentString);\n\t\t\tconsole.log(`JSON file written to ${destinationPath}`);\n\t\t}\n\n\t\t// return the output\n\t\treturn output;\n\t}\n\n\t/**\n\t * Process a PO file and return the JSON data.\n\t * @param file - The path to the PO file.\n\t * @param script - The script to be translated.\n\t * @param encoding - The encoding of the PO file.\n\t */\n\tpublic processFile(\n\t\tfile: string,\n\t\tscript: string,\n\t\tencoding: BufferEncoding = \"utf8\",\n\t): MakeJson {\n\t\t// Get the file path\n\t\tconst filePath = path.join(this.destination, file);\n\n\t\t// Read the source file\n\t\tconst content = fs.readFileSync(filePath, encoding) as string;\n\n\t\t// Extract the ISO code\n\t\tconst languageIsoCode = this.extractIsoCode(filePath);\n\n\t\t// Parse the source file\n\t\tconst poContent = this.parsePoFile(content);\n\n\t\t// Convert to Jed json dataset\n\t\treturn this.convertToJed(\n\t\t\tpoContent.headers,\n\t\t\tpoContent.translations,\n\t\t\tscript,\n\t\t\tlanguageIsoCode,\n\t\t);\n\t}\n\n\t/**\n\t * Takes a PO file and returns the header and translations.\n\t * @param content - The content of the PO file.\n\t * @private\n\t *\n\t * @returns An object containing the header and translations.\n\t */\n\tprivate parsePoFile(content: string): GetTextTranslations {\n\t\treturn po.parse(content);\n\t}\n\n\t/**\n\t * Converts PO data to Jed data.\n\t * @param header - The header of the PO file.\n\t * @param translations - The translations of the PO file.\n\t * @param source - The source of the PO file.\n\t * @param languageIsoCode - The ISO code of the language.\n\t * @private\n\t *\n\t * @return An object containing the Jed data.\n\t */\n\tprivate convertToJed(\n\t\theader: Record<string, string>,\n\t\ttranslations: {\n\t\t\t[msgctxt: string]: { [msgId: string]: GetTextTranslation };\n\t\t},\n\t\tsource: string,\n\t\tlanguageIsoCode?: string,\n\t): MakeJson {\n\t\tconst packageJson = getPkgJsonData(modulePath, \"name\", \"version\") as {\n\t\t\tname: string;\n\t\t\tversion: string;\n\t\t};\n\n\t\t// Domain name to use for the Jed format\n\t\tconst domain = \"messages\";\n\n\t\tconst generator = `${packageJson.name}/${packageJson.version}`;\n\n\t\t// Initialize the Jed-compatible structure\n\t\tconst jedData: JedData = {\n\t\t\t[domain]: {\n\t\t\t\t\"\": {\n\t\t\t\t\tdomain: domain,\n\t\t\t\t\tlang: languageIsoCode || header.Language || \"en\",\n\t\t\t\t\tplural_forms:\n\t\t\t\t\t\theader[\"Plural-Forms\"] || \"nplurals=2; plural=(n != 1);\",\n\t\t\t\t},\n\t\t\t},\n\t\t};\n\n\t\t// Process all translations\n\t\tfor (const msgctxt of Object.keys(translations)) {\n\t\t\tconst contextTranslations = translations[msgctxt];\n\n\t\t\tfor (const msgid of Object.keys(contextTranslations)) {\n\t\t\t\tconst translation = contextTranslations[msgid];\n\n\t\t\t\t// Skip empty msgid (header) as we've already handled it\n\t\t\t\tif (msgid === \"\") continue;\n\n\t\t\t\t// Construct the key using context if available\n\t\t\t\tconst key =\n\t\t\t\t\tmsgctxt && msgctxt !== \"\" ? `${msgctxt}\\u0004${msgid}` : msgid;\n\n\t\t\t\t// Add the translation to the Jed data structure\n\t\t\t\tjedData[domain][key] = translation.msgstr;\n\t\t\t}\n\t\t}\n\n\t\tconst makeJson: {\n\t\t\tdomain: string;\n\t\t\tgenerator: string;\n\t\t\t\"translation-revision-date\": string;\n\t\t\tsource: string;\n\t\t\tlocale_data: JedData;\n\t\t} = {\n\t\t\t\"translation-revision-date\": new Date().toISOString(),\n\t\t\tgenerator: generator,\n\t\t\tsource: path.join(this.sourceDir, source).replace(/\\\\/g, \"/\"),\n\t\t\tdomain,\n\t\t\tlocale_data: jedData,\n\t\t};\n\n\t\treturn makeJson as MakeJson;\n\t}\n\n\t/**\n\t * Gets the ISO code from the filename.\n\t * @param filename The filename to extract the ISO code from.\n\t * @private\n\t *\n\t * @returns The ISO code if found, otherwise null.\n\t */\n\tprivate extractIsoCode(filename: string): string | undefined {\n\t\tconst match = filename.match(IsoCodeRegex);\n\t\treturn match ? match[1] : undefined;\n\t}\n\n\tprivate md5(text: string): string {\n\t\treturn crypto.createHash(\"md5\").update(text).digest(\"hex\");\n\t}\n\n\tprivate generateFilename(script: string, file: string): string {\n\t\tconst scriptName = this.md5(script);\n\t\t//build the filename for the json file using the po files\n\t\treturn file.replace(\".po\", `-${scriptName}.json`);\n\t}\n\n\t/**\n\t * Adds a script to the output object.\n\t * @private\n\t *\n\t * @param potFile - The pot file to parse.\n\t * @param script - The script to add.\n\t * @return {Record<string, JedData>} - The output object.\n\t * */\n\tprivate addPot(\n\t\tpotFile: string,\n\t\tscript: string,\n\t): { filename: string; data: MakeJson } {\n\t\tconst filename = this.generateFilename(\n\t\t\tpath.join(this.source, script).replace(/\\\\/g, \"/\"),\n\t\t\tpotFile,\n\t\t);\n\t\t// build the output object\n\t\treturn {\n\t\t\tfilename,\n\t\t\tdata: this.processFile(potFile, script),\n\t\t};\n\t}\n}\n\nexport default MakeJsonCommand;\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAmB;AACnB,SAAoB;AACpB,uBAAiB;AACjB,4BAIO;AACP,kBAAqB;AACrB,mBAAyC;AAEzC,oBAA+B;AAExB,MAAM,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMX;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKT;AAAA;AAAA;AAAA;AAAA;AAAA,EAKA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKS;AAAA;AAAA;AAAA;AAAA;AAAA,EAMV,YAAY,MAAoB;AACtC,SAAK,YAAY,iBAAAA,QAAK,SAAS,KAAK,MAAM,KAAK,KAAK,UAAU,EAAE;AAChE,QAAI,CAAC,GAAG,WAAW,KAAK,SAAS,GAAG;AACnC,cAAQ,MAAM,8BAA8B,IAAI;AAChD,YAAM,IAAI,MAAM,oBAAoB,KAAK,SAAS,YAAY;AAAA,IAC/D;AAEA,SAAK,aAAa,KAAK;AACvB,SAAK,SAAS,KAAK;AACnB,SAAK,cAAc,KAAK;AACxB,SAAK,iBAAiB,KAAK,kBAAkB;AAAA,MAC5C;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACD;AACA,SAAK,QAAQ,KAAK;AAClB,SAAK,cAAc,KAAK;AACxB,SAAK,QAAQ,KAAK;AAClB,SAAK,QAAQ,KAAK;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA,EAKA,MAAa,OAA0C;AAEtD,UAAM,QAAQ,UAAM,kBAAK,WAAW,EAAE,KAAK,KAAK,aAAa,OAAO,KAAK,CAAC;AAE1E,YAAQ,IAAI,kBAAkB,OAAO,MAAM,KAAK,aAAa,QAAQ;AAGrE,UAAM,SAAmC,CAAC;AAC1C,eAAW,QAAQ,OAAO;AACzB,UAAI,CAAC,KAAK,YAAY;AACrB,aAAK,aAAa,UAAM,kBAAK,QAAQ;AAAA,UACpC,KAAK,KAAK;AAAA,UACV,OAAO;AAAA,QACR,CAAC;AACD,gBAAQ;AAAA,UACP,iBAAiB,KAAK,UAAU,OAAO,KAAK,MAAM;AAAA,QACnD;AAAA,MACD;AAIA,UAAI,OAAO,KAAK,eAAe,UAAU;AACxC,cAAM,MAAM,KAAK,OAAO,MAAM,KAAK,UAAU;AAC7C,eAAO,IAAI,QAAQ,IAAI,IAAI;AAAA,MAC5B,WAAW,MAAM,QAAQ,KAAK,UAAU,GAAG;AAC1C,mBAAW,UAAU,KAAK,YAAY;AACrC,gBAAM,MAAM,KAAK,OAAO,MAAM,MAAM;AACpC,iBAAO,IAAI,QAAQ,IAAI,IAAI;AAAA,QAC5B;AAAA,MACD;AAAA,IACD;AAGA,eAAW,CAAC,UAAU,OAAO,KAAK,OAAO,QAAQ,MAAM,GAAG;AACzD,UAAI;AACJ,UAAI,KAAK,OAAO;AACf,YAAI,GAAG,WAAW,iBAAAA,QAAK,KAAK,KAAK,aAAa,QAAQ,CAAC,GAAG;AACzD,kBAAQ;AAAA,YACP,YAAY,iBAAAA,QAAK,KAAK,KAAK,aAAa,QAAQ,CAAC;AAAA,UAClD;AACA,aAAG,WAAW,iBAAAA,QAAK,KAAK,KAAK,aAAa,QAAQ,CAAC;AAAA,QACpD;AACA,wBAAgB,KAAK;AAAA,UACpB;AAAA,UACA;AAAA,UACA,MAAM,cAAc,IAAI;AAAA,QACzB;AAAA,MACD,OAAO;AACN,cAAM,gBAAgB,GAAG;AAAA,UACxB,iBAAAA,QAAK,KAAK,KAAK,QAAQ,QAAQ;AAAA,UAC/B;AAAA,QACD;AAEA,wBAAgB,KAAK;AAAA,UACpB,EAAE,GAAG,SAAS,GAAG,KAAK,MAAM,aAAa,EAAE;AAAA,UAC3C;AAAA,UACA,MAAM,cAAc,IAAI;AAAA,QACzB;AAAA,MACD;AAEA,YAAM,kBAAkB,iBAAAA,QAAK,KAAK,KAAK,aAAa,QAAQ;AAC5D,SAAG,cAAc,iBAAiB,aAAa;AAC/C,cAAQ,IAAI,wBAAwB,eAAe,EAAE;AAAA,IACtD;AAGA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQO,YACN,MACA,QACA,WAA2B,QAChB;AAEX,UAAM,WAAW,iBAAAA,QAAK,KAAK,KAAK,aAAa,IAAI;AAGjD,UAAM,UAAU,GAAG,aAAa,UAAU,QAAQ;AAGlD,UAAM,kBAAkB,KAAK,eAAe,QAAQ;AAGpD,UAAM,YAAY,KAAK,YAAY,OAAO;AAG1C,WAAO,KAAK;AAAA,MACX,UAAU;AAAA,MACV,UAAU;AAAA,MACV;AAAA,MACA;AAAA,IACD;AAAA,EACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,YAAY,SAAsC;AACzD,WAAO,yBAAG,MAAM,OAAO;AAAA,EACxB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAYQ,aACP,QACA,cAGA,QACA,iBACW;AACX,UAAM,kBAAc,8BAAe,yBAAY,QAAQ,SAAS;AAMhE,UAAM,SAAS;AAEf,UAAM,YAAY,GAAG,YAAY,IAAI,IAAI,YAAY,OAAO;AAG5D,UAAM,UAAmB;AAAA,MACxB,CAAC,MAAM,GAAG;AAAA,QACT,IAAI;AAAA,UACH;AAAA,UACA,MAAM,mBAAmB,OAAO,YAAY;AAAA,UAC5C,cACC,OAAO,cAAc,KAAK;AAAA,QAC5B;AAAA,MACD;AAAA,IACD;AAGA,eAAW,WAAW,OAAO,KAAK,YAAY,GAAG;AAChD,YAAM,sBAAsB,aAAa,OAAO;AAEhD,iBAAW,SAAS,OAAO,KAAK,mBAAmB,GAAG;AACrD,cAAM,cAAc,oBAAoB,KAAK;AAG7C,YAAI,UAAU,GAAI;AAGlB,cAAM,MACL,WAAW,YAAY,KAAK,GAAG,OAAO,IAAS,KAAK,KAAK;AAG1D,gBAAQ,MAAM,EAAE,GAAG,IAAI,YAAY;AAAA,MACpC;AAAA,IACD;AAEA,UAAM,WAMF;AAAA,MACH,8BAA6B,oBAAI,KAAK,GAAE,YAAY;AAAA,MACpD;AAAA,MACA,QAAQ,iBAAAA,QAAK,KAAK,KAAK,WAAW,MAAM,EAAE,QAAQ,OAAO,GAAG;AAAA,MAC5D;AAAA,MACA,aAAa;AAAA,IACd;AAEA,WAAO;AAAA,EACR;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASQ,eAAe,UAAsC;AAC5D,UAAM,QAAQ,SAAS,MAAM,yBAAY;AACzC,WAAO,QAAQ,MAAM,CAAC,IAAI;AAAA,EAC3B;AAAA,EAEQ,IAAI,MAAsB;AACjC,WAAO,mBAAAC,QAAO,WAAW,KAAK,EAAE,OAAO,IAAI,EAAE,OAAO,KAAK;AAAA,EAC1D;AAAA,EAEQ,iBAAiB,QAAgB,MAAsB;AAC9D,UAAM,aAAa,KAAK,IAAI,MAAM;AAElC,WAAO,KAAK,QAAQ,OAAO,IAAI,UAAU,OAAO;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUQ,OACP,SACA,QACuC;AACvC,UAAM,WAAW,KAAK;AAAA,MACrB,iBAAAD,QAAK,KAAK,KAAK,QAAQ,MAAM,EAAE,QAAQ,OAAO,GAAG;AAAA,MACjD;AAAA,IACD;AAEA,WAAO;AAAA,MACN;AAAA,MACA,MAAM,KAAK,YAAY,SAAS,MAAM;AAAA,IACvC;AAAA,EACD;AACD;AAEA,IAAO,mBAAQ;",
6
+ "names": ["path", "crypto"]
7
+ }
@@ -1 +1 @@
1
- "use strict";var a=Object.defineProperty;var s=Object.getOwnPropertyDescriptor;var f=Object.getOwnPropertyNames;var h=Object.prototype.hasOwnProperty;var d=(e,t)=>{for(var o in t)a(e,o,{get:t[o],enumerable:!0})},x=(e,t,o,r)=>{if(t&&typeof t=="object"||typeof t=="function")for(let m of f(t))!h.call(e,m)&&m!==o&&a(e,m,{get:()=>t[m],enumerable:!(r=s(t,m))||r.enumerable});return e};var k=e=>x(a({},"__esModule",{value:!0}),e);var y={};d(y,{makePot:()=>u});module.exports=k(y);var c=require("../extractors/headers.js"),i=require("../extractors/json.js"),n=require("../fs/fs.js"),p=require("./exec.js");async function u(e){const t=(0,i.extractPackageJson)(e),o=(0,c.extractMainFileData)(e);e.headers={...e.headers,...t,...o},(0,p.exec)(e).then(r=>((0,n.writeFile)(r,e),r)).catch(r=>(console.error(r),""))}0&&(module.exports={makePot});
1
+ "use strict";var m=Object.defineProperty;var s=Object.getOwnPropertyDescriptor;var f=Object.getOwnPropertyNames;var h=Object.prototype.hasOwnProperty;var d=(t,e)=>{for(var o in e)m(t,o,{get:e[o],enumerable:!0})},u=(t,e,o,r)=>{if(e&&typeof e=="object"||typeof e=="function")for(let a of f(e))!h.call(t,a)&&a!==o&&m(t,a,{get:()=>e[a],enumerable:!(r=s(e,a))||r.enumerable});return t};var x=t=>u(m({},"__esModule",{value:!0}),t);var y={};d(y,{makePot:()=>k});module.exports=x(y);var i=require("../extractors/headers.js"),c=require("../extractors/packageJson.js"),n=require("../fs/fs.js"),p=require("./exec.js");async function k(t){const e=(0,c.extractPackageJson)(t),o=(0,i.extractMainFileData)(t);return t.headers={...t.headers,...e,...o},await(0,p.exec)(t).then(r=>((0,n.writeFile)(r,t),r)).catch(r=>(console.error(r),""))}0&&(module.exports={makePot});
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/parser/makePot.ts"],
4
+ "sourcesContent": ["import { extractMainFileData } from \"../extractors/headers.js\";\nimport { extractPackageJson } from \"../extractors/packageJson.js\";\nimport { writeFile } from \"../fs/fs.js\";\nimport type { Args } from \"../types.js\";\nimport { exec } from \"./exec.js\";\n\n/**\n * Generates a pot file for localization.\n *\n * @param args - the command line arguments\n * @return {string} - a promise that resolves when the pot file is generated\n */\nexport async function makePot(args: Args): Promise<string> {\n\t/** Collect metadata from the get package json */\n\tconst pkgData = extractPackageJson(args);\n\n\t/** Get metadata from the main file (theme and plugin) */\n\tconst metadata = extractMainFileData(args);\n\n\t/** Merge the metadata to get a single object with all the headers */\n\targs.headers = {\n\t\t...args.headers,\n\t\t...pkgData,\n\t\t...metadata,\n\t} as Args[\"headers\"];\n\n\t/** Generate the pot file */\n\treturn await exec(args)\n\t\t.then((jsonTranslations) => {\n\t\t\twriteFile(jsonTranslations, args);\n\n\t\t\treturn jsonTranslations;\n\t\t})\n\t\t.catch((error) => {\n\t\t\tconsole.error(error);\n\n\t\t\treturn \"\";\n\t\t});\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAoC;AACpC,yBAAmC;AACnC,gBAA0B;AAE1B,kBAAqB;AAQrB,eAAsB,QAAQ,MAA6B;AAE1D,QAAM,cAAU,uCAAmB,IAAI;AAGvC,QAAM,eAAW,oCAAoB,IAAI;AAGzC,OAAK,UAAU;AAAA,IACd,GAAG,KAAK;AAAA,IACR,GAAG;AAAA,IACH,GAAG;AAAA,EACJ;AAGA,SAAO,UAAM,kBAAK,IAAI,EACpB,KAAK,CAAC,qBAAqB;AAC3B,6BAAU,kBAAkB,IAAI;AAEhC,WAAO;AAAA,EACR,CAAC,EACA,MAAM,CAAC,UAAU;AACjB,YAAQ,MAAM,KAAK;AAEnB,WAAO;AAAA,EACR,CAAC;AACH;",
6
+ "names": []
7
+ }
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/parser/patterns.ts"],
4
+ "sourcesContent": ["import type { Args, Patterns } from \"../types.js\";\n\n/**\n * Returns the patterns based on the given arguments.\n *\n * @param args - The arguments for the extract process.\n */\nexport function getPatterns(args: Args) {\n\tconst pattern = {\n\t\tinclude: args.patterns.include || [],\n\t\texclude: args.patterns.exclude || [],\n\t\tmergePaths: args.patterns.mergePaths,\n\t\tsubtractPaths: args.patterns.subtractPaths,\n\t\tsubtractAndMerge: args.patterns.subtractAndMerge,\n\t} as Patterns;\n\n\t// Additional logic to handle different file types and formats\n\tif (args.options) {\n\t\t// js typescript mjs cjs etc\n\t\tif (args.options.skip.blade) {\n\t\t\tpattern.exclude.push(\"**/blade.php\");\n\t\t} else if (args.options.skip.php) {\n\t\t\tpattern.exclude.push(\"**/*.php\", \"**/*.blade.php\");\n\t\t}\n\n\t\t// js typescript mjs cjs etc\n\t\tif (args.options.skip.js) {\n\t\t\tpattern.exclude.push(\"**/*.{js,jsx,ts,tsx,mjs,cjs}\");\n\t\t}\n\n\t\tif (args.options.skip.blockJson) {\n\t\t\tpattern.exclude.push(\"block.json\");\n\t\t}\n\n\t\tif (args.options.skip.themeJson) {\n\t\t\tpattern.exclude.push(\"theme.json\");\n\t\t}\n\t}\n\n\treturn pattern;\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAOO,SAAS,YAAY,MAAY;AACvC,QAAM,UAAU;AAAA,IACf,SAAS,KAAK,SAAS,WAAW,CAAC;AAAA,IACnC,SAAS,KAAK,SAAS,WAAW,CAAC;AAAA,IACnC,YAAY,KAAK,SAAS;AAAA,IAC1B,eAAe,KAAK,SAAS;AAAA,IAC7B,kBAAkB,KAAK,SAAS;AAAA,EACjC;AAGA,MAAI,KAAK,SAAS;AAEjB,QAAI,KAAK,QAAQ,KAAK,OAAO;AAC5B,cAAQ,QAAQ,KAAK,cAAc;AAAA,IACpC,WAAW,KAAK,QAAQ,KAAK,KAAK;AACjC,cAAQ,QAAQ,KAAK,YAAY,gBAAgB;AAAA,IAClD;AAGA,QAAI,KAAK,QAAQ,KAAK,IAAI;AACzB,cAAQ,QAAQ,KAAK,8BAA8B;AAAA,IACpD;AAEA,QAAI,KAAK,QAAQ,KAAK,WAAW;AAChC,cAAQ,QAAQ,KAAK,YAAY;AAAA,IAClC;AAEA,QAAI,KAAK,QAAQ,KAAK,WAAW;AAChC,cAAQ,QAAQ,KAAK,YAAY;AAAA,IAClC;AAAA,EACD;AAEA,SAAO;AACR;",
6
+ "names": []
7
+ }
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/parser/process.ts"],
4
+ "sourcesContent": ["import path from \"node:path\";\nimport type { SingleBar } from \"cli-progress\";\nimport type { SetOfBlocks } from \"gettext-merger\";\nimport { allowedFormats } from \"../const.js\";\nimport { parseJsonCallback } from \"../extractors/json.js\";\nimport { readFileAsync } from \"../fs/fs.js\";\nimport { getFiles } from \"../fs/glob.js\";\nimport type { Args, Patterns } from \"../types.js\";\nimport { doTree } from \"./tree.js\";\n\n/**\n * Processes the given files and returns an array of promises that resolve to TranslationStrings.\n *\n * @param patterns\n * @param {Args} args - The arguments for processing the files.\n * @param progressBar - The progress bar element.\n * @return {Promise<SetOfBlocks[]>} - An array of promises that resolve to TranslationStrings.\n */\nexport async function processFiles(\n\tpatterns: Patterns,\n\targs: Args,\n\tprogressBar?: SingleBar,\n): Promise<Promise<SetOfBlocks>[]> {\n\tconst tasks: Promise<SetOfBlocks>[] = [];\n\tlet filesCount = 0;\n\n\tconst files = getFiles(args, patterns);\n\n\t// loop through the files and parse them\n\tfor await (const file of files) {\n\t\tfilesCount++;\n\t\tconst filename = path.basename(file);\n\t\tconst ext = path.extname(file).replace(/^./, \"\");\n\t\tconst fileRealPath = path.resolve(args.paths.cwd, file);\n\n\t\tif (filename === \"theme.json\" || filename === \"block.json\") {\n\t\t\ttasks.push(\n\t\t\t\treadFileAsync(fileRealPath).then((sourceCode) =>\n\t\t\t\t\tparseJsonCallback(sourceCode, args.paths.cwd, filename),\n\t\t\t\t),\n\t\t\t);\n\t\t} else if (allowedFormats.includes(ext)) {\n\t\t\ttasks.push(\n\t\t\t\treadFileAsync(fileRealPath).then((content) => doTree(content, file)),\n\t\t\t);\n\t\t}\n\n\t\tif (progressBar) {\n\t\t\tprogressBar.update(filesCount, { filename: filename });\n\t\t\tprogressBar.setTotal(Object.values(files).length);\n\t\t\tprogressBar.render();\n\t\t}\n\t}\n\n\treturn tasks;\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uBAAiB;AAGjB,mBAA+B;AAC/B,kBAAkC;AAClC,gBAA8B;AAC9B,kBAAyB;AAEzB,kBAAuB;AAUvB,eAAsB,aACrB,UACA,MACA,aACkC;AAClC,QAAM,QAAgC,CAAC;AACvC,MAAI,aAAa;AAEjB,QAAM,YAAQ,sBAAS,MAAM,QAAQ;AAGrC,mBAAiB,QAAQ,OAAO;AAC/B;AACA,UAAM,WAAW,iBAAAA,QAAK,SAAS,IAAI;AACnC,UAAM,MAAM,iBAAAA,QAAK,QAAQ,IAAI,EAAE,QAAQ,MAAM,EAAE;AAC/C,UAAM,eAAe,iBAAAA,QAAK,QAAQ,KAAK,MAAM,KAAK,IAAI;AAEtD,QAAI,aAAa,gBAAgB,aAAa,cAAc;AAC3D,YAAM;AAAA,YACL,yBAAc,YAAY,EAAE;AAAA,UAAK,CAAC,mBACjC,+BAAkB,YAAY,KAAK,MAAM,KAAK,QAAQ;AAAA,QACvD;AAAA,MACD;AAAA,IACD,WAAW,4BAAe,SAAS,GAAG,GAAG;AACxC,YAAM;AAAA,YACL,yBAAc,YAAY,EAAE,KAAK,CAAC,gBAAY,oBAAO,SAAS,IAAI,CAAC;AAAA,MACpE;AAAA,IACD;AAEA,QAAI,aAAa;AAChB,kBAAY,OAAO,YAAY,EAAE,SAAmB,CAAC;AACrD,kBAAY,SAAS,OAAO,OAAO,KAAK,EAAE,MAAM;AAChD,kBAAY,OAAO;AAAA,IACpB;AAAA,EACD;AAEA,SAAO;AACR;",
6
+ "names": ["path"]
7
+ }
@@ -1 +1 @@
1
- "use strict";var l=Object.create;var n=Object.defineProperty;var u=Object.getOwnPropertyDescriptor;var f=Object.getOwnPropertyNames;var p=Object.getPrototypeOf,g=Object.prototype.hasOwnProperty;var m=(e,r)=>{for(var t in r)n(e,t,{get:r[t],enumerable:!0})},o=(e,r,t,a)=>{if(r&&typeof r=="object"||typeof r=="function")for(let s of f(r))!g.call(e,s)&&s!==t&&n(e,s,{get:()=>r[s],enumerable:!(a=u(r,s))||a.enumerable});return e};var c=(e,r,t)=>(t=e!=null?l(p(e)):{},o(r||!e||!e.__esModule?n(t,"default",{value:e,enumerable:!0}):t,e)),d=e=>o(n({},"__esModule",{value:!0}),e);var y={};m(y,{initProgress:()=>B});module.exports=d(y);var i=c(require("cli-progress"));function B(e,r){if(e.options?.silent)return;const t=new i.default.SingleBar({clearOnComplete:!0,etaBuffer:1e3,hideCursor:!0,format:" {bar} {percentage}% | ETA: {eta}s | {filename} | {value}/{total}"},i.default.Presets.shades_classic);return t.start(r,0),t}0&&(module.exports={initProgress});
1
+ "use strict";var l=Object.create;var a=Object.defineProperty;var g=Object.getOwnPropertyDescriptor;var u=Object.getOwnPropertyNames;var f=Object.getPrototypeOf,m=Object.prototype.hasOwnProperty;var p=(e,r)=>{for(var t in r)a(e,t,{get:r[t],enumerable:!0})},o=(e,r,t,n)=>{if(r&&typeof r=="object"||typeof r=="function")for(let s of u(r))!m.call(e,s)&&s!==t&&a(e,s,{get:()=>r[s],enumerable:!(n=g(r,s))||n.enumerable});return e};var c=(e,r,t)=>(t=e!=null?l(f(e)):{},o(r||!e||!e.__esModule?a(t,"default",{value:e,enumerable:!0}):t,e)),B=e=>o(a({},"__esModule",{value:!0}),e);var A={};p(A,{initProgress:()=>y});module.exports=B(A);var i=c(require("cli-progress"));function y(e,r){return new i.default.SingleBar({clearOnComplete:!0,etaBuffer:1e3,hideCursor:!0,format:" {bar} {percentage}% | ETA: {eta}s | {filename} | {value}/{total}"},i.default.Presets.shades_classic)}0&&(module.exports={initProgress});
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/parser/progress.ts"],
4
+ "sourcesContent": ["import cliProgress, { type SingleBar } from \"cli-progress\";\nimport type { Args } from \"../types.js\";\n\n/**\n * Initializes a progress bar and returns the progress bar element.\n *\n * @param {Args} args - The argument object containing the source directory and other options.\n * @param {number} filesCount - An array of file names.\n * @return {cliProgress.SingleBar} The progress bar element.\n */\nexport function initProgress(args: Args, filesCount: number): SingleBar {\n\t// Set up the progress bar\n\treturn new cliProgress.SingleBar(\n\t\t{\n\t\t\tclearOnComplete: true,\n\t\t\tetaBuffer: 1000,\n\t\t\thideCursor: true,\n\t\t\tformat:\n\t\t\t\t\" {bar} {percentage}% | ETA: {eta}s | {filename} | {value}/{total}\",\n\t\t},\n\t\tcliProgress.Presets.shades_classic,\n\t);\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAA4C;AAUrC,SAAS,aAAa,MAAY,YAA+B;AAEvE,SAAO,IAAI,oBAAAA,QAAY;AAAA,IACtB;AAAA,MACC,iBAAiB;AAAA,MACjB,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,QACC;AAAA,IACF;AAAA,IACA,oBAAAA,QAAY,QAAQ;AAAA,EACrB;AACD;",
6
+ "names": ["cliProgress"]
7
+ }
@@ -1 +1,2 @@
1
- "use strict";var n=Object.defineProperty;var c=Object.getOwnPropertyDescriptor;var a=Object.getOwnPropertyNames;var p=Object.prototype.hasOwnProperty;var f=(t,o)=>{for(var s in o)n(t,s,{get:o[s],enumerable:!0})},g=(t,o,s,r)=>{if(o&&typeof o=="object"||typeof o=="function")for(let e of a(o))!p.call(t,e)&&e!==s&&n(t,e,{get:()=>o[e],enumerable:!(r=c(o,e))||r.enumerable});return t};var m=t=>g(n({},"__esModule",{value:!0}),t);var h={};f(h,{taskRunner:()=>u});module.exports=m(h);async function u(t,o,s,r){return await Promise.allSettled(t).then(e=>e.map(l=>l.status==="fulfilled"&&l.value).filter(Boolean)).then(e=>{if(r?.stop(),s.options?.silent!==!0)for(const l of e)l.blocks.length>0?(o.addArray(l.blocks),console.log(`\u2705 ${l.path} [${l.blocks.map(i=>i.msgid).join(", ")}]`)):console.log("\u274C ",`${l.path} has no strings`)}).catch(e=>{console.log("\u274C Failed!",e),process.exit(1)}),s.options?.silent||(console.log("\u{1F389} Done!"),console.log(`\u{1F4DD} Found ${Object.values(o.blocks).length} translation strings in ${s.paths.cwd}`)),o}0&&(module.exports={taskRunner});
1
+ "use strict";var m=Object.create;var a=Object.defineProperty;var g=Object.getOwnPropertyDescriptor;var u=Object.getOwnPropertyNames;var h=Object.getPrototypeOf,k=Object.prototype.hasOwnProperty;var B=(o,e)=>{for(var s in e)a(o,s,{get:e[s],enumerable:!0})},i=(o,e,s,n)=>{if(e&&typeof e=="object"||typeof e=="function")for(let t of u(e))!k.call(o,t)&&t!==s&&a(o,t,{get:()=>e[t],enumerable:!(n=g(e,t))||n.enumerable});return o};var p=(o,e,s)=>(s=o!=null?m(h(o)):{},i(e||!o||!o.__esModule?a(s,"default",{value:o,enumerable:!0}):s,o)),S=o=>i(a({},"__esModule",{value:!0}),o);var d={};B(d,{taskRunner:()=>O});module.exports=S(d);var y=p(require("node:os")),c=p(require("node:path"));async function O(o,e,s,n){const t=[];return await Promise.allSettled(o).then(l=>l.map(r=>r.status==="fulfilled"&&r.value).filter(Boolean)).then(l=>{if(s.options?.silent!==!0)for(const r of l)r.blocks.length>0?(e.addArray(r.blocks),t.push(`\u2705 ${r.path} [${r.blocks.map(f=>f.msgid).join(", ")}]`)):t.push(`\u274C ${r.path} has no strings`)}).catch(l=>new Error(l)),n.stop(),console.log(`
2
+ \u{1F389} Done!`),console.log(`\u{1F4DD} Found ${Object.values(e.blocks).length} translation strings in ${c.default.resolve(s.paths.cwd)}.`),console.log(t.join(y.EOL)),e}0&&(module.exports={taskRunner});
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/parser/taskRunner.ts"],
4
+ "sourcesContent": ["import * as os from \"node:os\";\nimport path from \"node:path\";\nimport type { SingleBar } from \"cli-progress\";\nimport type { SetOfBlocks } from \"gettext-merger\";\nimport type { Args } from \"../types.js\";\n\n/**\n * Task runner for the extraction process.\n *\n * @param tasks - The tasks to run\n * @param destination - The destination\n * @param args - The command line arguments\n * @param progressBar\n */\nexport async function taskRunner(\n\ttasks: Promise<SetOfBlocks>[],\n\tdestination: SetOfBlocks,\n\targs: Args,\n\tprogressBar: SingleBar,\n) {\n\tconst messages = [];\n\tawait Promise.allSettled(tasks)\n\t\t.then((strings) => {\n\t\t\t/**\n\t\t\t * Return the strings that are not rejected (they are fulfilled)\n\t\t\t */\n\t\t\treturn strings\n\t\t\t\t.map((block) => block.status === \"fulfilled\" && block.value)\n\t\t\t\t.filter(Boolean) as SetOfBlocks[]; // remove nullish\n\t\t})\n\t\t.then((consolidated) => {\n\t\t\t/** Log the results */\n\t\t\tif (args.options?.silent !== true) {\n\t\t\t\tfor (const result of consolidated) {\n\t\t\t\t\tif (result.blocks.length > 0) {\n\t\t\t\t\t\t/**\n\t\t\t\t\t\t * Add the strings to the destination set\n\t\t\t\t\t\t */\n\t\t\t\t\t\tdestination.addArray(result.blocks);\n\t\t\t\t\t\t/* Log the results */\n\t\t\t\t\t\tmessages.push(\n\t\t\t\t\t\t\t`\u2705 ${result.path} [${result.blocks.map((b) => b.msgid).join(\", \")}]`,\n\t\t\t\t\t\t);\n\t\t\t\t\t} else messages.push(`\u274C ${result.path} has no strings`);\n\t\t\t\t}\n\t\t\t}\n\t\t})\n\t\t.catch((err) => {\n\t\t\treturn new Error(err);\n\t\t});\n\n\tprogressBar.stop();\n\n\tconsole.log(\"\\n\uD83C\uDF89 Done!\");\n\tconsole.log(\n\t\t`\uD83D\uDCDD Found ${Object.values(destination.blocks).length} translation strings in ${path.resolve(args.paths.cwd)}.`,\n\t);\n\n\tconsole.log(messages.join(os.EOL));\n\n\treturn destination;\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAAoB;AACpB,uBAAiB;AAajB,eAAsB,WACrB,OACA,aACA,MACA,aACC;AACD,QAAM,WAAW,CAAC;AAClB,QAAM,QAAQ,WAAW,KAAK,EAC5B,KAAK,CAAC,YAAY;AAIlB,WAAO,QACL,IAAI,CAAC,UAAU,MAAM,WAAW,eAAe,MAAM,KAAK,EAC1D,OAAO,OAAO;AAAA,EACjB,CAAC,EACA,KAAK,CAAC,iBAAiB;AAEvB,QAAI,KAAK,SAAS,WAAW,MAAM;AAClC,iBAAW,UAAU,cAAc;AAClC,YAAI,OAAO,OAAO,SAAS,GAAG;AAI7B,sBAAY,SAAS,OAAO,MAAM;AAElC,mBAAS;AAAA,YACR,UAAK,OAAO,IAAI,KAAK,OAAO,OAAO,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,KAAK,IAAI,CAAC;AAAA,UAClE;AAAA,QACD,MAAO,UAAS,KAAK,UAAK,OAAO,IAAI,iBAAiB;AAAA,MACvD;AAAA,IACD;AAAA,EACD,CAAC,EACA,MAAM,CAAC,QAAQ;AACf,WAAO,IAAI,MAAM,GAAG;AAAA,EACrB,CAAC;AAEF,cAAY,KAAK;AAEjB,UAAQ,IAAI,mBAAY;AACxB,UAAQ;AAAA,IACP,mBAAY,OAAO,OAAO,YAAY,MAAM,EAAE,MAAM,2BAA2B,iBAAAA,QAAK,QAAQ,KAAK,MAAM,GAAG,CAAC;AAAA,EAC5G;AAEA,UAAQ,IAAI,SAAS,KAAK,GAAG,GAAG,CAAC;AAEjC,SAAO;AACR;",
6
+ "names": ["path"]
7
+ }
@@ -1 +1 @@
1
- "use strict";var O=Object.create;var g=Object.defineProperty;var P=Object.getOwnPropertyDescriptor;var K=Object.getOwnPropertyNames;var L=Object.getPrototypeOf,F=Object.prototype.hasOwnProperty;var M=(e,t)=>{for(var n in t)g(e,n,{get:t[n],enumerable:!0})},b=(e,t,n,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let s of K(t))!F.call(e,s)&&s!==n&&g(e,s,{get:()=>t[s],enumerable:!(o=P(t,s))||o.enumerable});return e};var j=(e,t,n)=>(n=e!=null?O(L(e)):{},b(t||!e||!e.__esModule?g(n,"default",{value:e,enumerable:!0}):n,e)),E=e=>b(g({},"__esModule",{value:!0}),e);var V={};M(V,{doTree:()=>U});module.exports=E(V);var k=j(require("tree-sitter")),y=require("../const.js"),a=require("gettext-merger"),w=require("../fs/glob.js"),m=require("../utils/common.js");function I(e){let t=e,n=0;for(;t&&n<6;){if(t?.previousSibling?.type==="comment"&&t?.previousSibling?.text.toLowerCase().includes("translators"))return t?.previousSibling?.text?(0,m.stripTranslationMarkup)(t.previousSibling.text):void 0;n++,t=t.parent}}function U(e,t){const n=new k.default,o=(0,w.getParser)(t);if(!o)return new a.SetOfBlocks([],t);n.setLanguage(o);const s=n.parse(e),x=new a.SetOfBlocks([],t),v=t.split(".").pop()?.toLowerCase()!=="php"?"call_expression":"function_call_expression",N=["string","string_value","variable_name","binary_expression","member_expression","subscript_expression","function_call_expression","encapsed_string"];function _(r){if(r?.children.length)for(const c of r.children)_(c);if(r?.type===v){const c=r.firstChild?.text??null;if(c===null||!Object.keys(y.i18nFunctions).includes(c))return;const p=r.lastChild;if(p===null||p.childCount===0||p.type!=="arguments")return;const[q,C]=r.children,l={},h=y.i18nFunctions[c],$=C.children.slice(1,-1);let f=0;for(const u of $){let i=u,d=i.text;if(u.type==="argument"){if(u.children.length===0)continue;i=u.children[0]}if(i?.type===",")continue;if(i?.type&&N.includes(i.type))d=d.slice(1,-1);else{console.warn(`Unexpected node type: ${i?.type} is ${h[f]} for ${d} in ${t}`);continue}const T=h[f];l[T]=d,f+=1}const S=I(p),B=new a.Block({msgctxt:l.msgctxt,msgid:l.msgid??"",msgid_plural:l.msgid_plural,msgstr:l.msgid_plural?["",""]:[""],comments:{translator:S?[S]:void 0,reference:[`${(0,m.reverseSlashes)(t)}:${r.startPosition.row+1}`]}});x.add(B)}}return _(s.rootNode),x}0&&(module.exports={doTree});
1
+ "use strict";var O=Object.create;var m=Object.defineProperty;var P=Object.getOwnPropertyDescriptor;var K=Object.getOwnPropertyNames;var L=Object.getPrototypeOf,F=Object.prototype.hasOwnProperty;var M=(t,e)=>{for(var n in e)m(t,n,{get:e[n],enumerable:!0})},b=(t,e,n,o)=>{if(e&&typeof e=="object"||typeof e=="function")for(let s of K(e))!F.call(t,s)&&s!==n&&m(t,s,{get:()=>e[s],enumerable:!(o=P(e,s))||o.enumerable});return t};var j=(t,e,n)=>(n=t!=null?O(L(t)):{},b(e||!t||!t.__esModule?m(n,"default",{value:t,enumerable:!0}):n,t)),E=t=>b(m({},"__esModule",{value:!0}),t);var V={};M(V,{doTree:()=>U});module.exports=E(V);var k=j(require("tree-sitter")),y=require("../const.js"),a=require("gettext-merger"),w=require("../fs/glob.js"),g=require("../utils/common.js");function I(t){let e=t,n=0;for(;e&&n<6;){if(e?.previousSibling?.type==="comment"&&e?.previousSibling?.text.toLowerCase().includes("translators"))return e?.previousSibling?.text?(0,g.stripTranslationMarkup)(e.previousSibling.text):void 0;n++,e=e.parent}}function U(t,e){const n=new k.default,o=(0,w.getParser)(e);if(!o)return new a.SetOfBlocks([],e);n.setLanguage(o);const s=n.parse(t),x=new a.SetOfBlocks([],e),v=e.split(".").pop()?.toLowerCase()!=="php"?"call_expression":"function_call_expression",N=["name","string","string_value","variable_name","binary_expression","member_expression","subscript_expression","shell_command_expression","function_call_expression","encapsed_string"];function _(r){if(r?.children.length)for(const l of r.children)_(l);if(r?.type===v){const l=r.firstChild?.text??null;if(l===null||!Object.keys(y.i18nFunctions).includes(l))return;const p=r.lastChild;if(p===null||p.childCount===0||p.type!=="arguments")return;const[q,C]=r.children,c={},h=y.i18nFunctions[l],$=C.children.slice(1,-1);let f=0;for(const u of $){let i=u,d=i.text;if(u.type==="argument"){if(u.children.length===0)continue;i=u.children[0]}if(i?.type===",")continue;if(i?.type&&N.includes(i.type))d=d.slice(1,-1);else{console.warn(`Unexpected node type: ${i?.type} is ${h[f]} for ${d} in ${e}`);continue}const T=h[f];c[T]=d,f+=1}const S=I(p),B=new a.Block({msgctxt:c.msgctxt,msgid:c.msgid??"",msgid_plural:c.msgid_plural,msgstr:c.msgid_plural?["",""]:[""],comments:{translator:S?[S]:void 0,reference:[`${(0,g.reverseSlashes)(e)}:${r.startPosition.row+1}`]}});x.add(B)}}return _(s.rootNode),x}0&&(module.exports={doTree});
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/parser/tree.ts"],
4
+ "sourcesContent": ["import Parser, { type SyntaxNode } from \"tree-sitter\";\nimport { i18nFunctions } from \"../const.js\";\n\nimport { Block, SetOfBlocks } from \"gettext-merger\";\nimport { getParser } from \"../fs/glob.js\";\nimport { reverseSlashes, stripTranslationMarkup } from \"../utils/common.js\";\n\n/**\n * Collect comments from the AST node and its preceding siblings.\n *\n * @param {SyntaxNode} node - The AST node.\n * @return {string[]} An array of collected comments.\n */\nfunction collectComments(node: SyntaxNode): string | undefined {\n\tlet currentNode = node;\n\tlet depth = 0;\n\n\t// Check the node's preceding siblings for comments\n\twhile (currentNode && depth < 6) {\n\t\tif (\n\t\t\tcurrentNode?.previousSibling?.type === \"comment\" &&\n\t\t\tcurrentNode?.previousSibling?.text.toLowerCase().includes(\"translators\")\n\t\t) {\n\t\t\treturn currentNode?.previousSibling?.text\n\t\t\t\t? stripTranslationMarkup(currentNode.previousSibling.text)\n\t\t\t\t: undefined;\n\t\t}\n\t\tdepth++;\n\t\tcurrentNode = currentNode.parent as SyntaxNode;\n\t}\n}\n\n/**\n * Parses the source code using the specified language parser and extracts the strings from the file.\n *\n * @param {string} sourceCode - The source code to be parsed.\n * @param {string} filepath - The path to the file being parsed.\n * @return {SetOfBlocks} An array of translation strings.\n */\nexport function doTree(sourceCode: string, filepath: string): SetOfBlocks {\n\t// set up the parser\n\tconst parser = new Parser();\n\tconst parserExt = getParser(filepath);\n\t// if no parser is found return empty\n\tif (!parserExt) return new SetOfBlocks([], filepath);\n\t// set the parser language\n\tparser.setLanguage(parserExt);\n\n\t// parse the file\n\tconst tree = parser.parse(sourceCode);\n\n\t// set up the translation object\n\tconst gettextTranslations: SetOfBlocks = new SetOfBlocks([], filepath);\n\n\tconst typeToMatch =\n\t\tfilepath.split(\".\").pop()?.toLowerCase() !== \"php\"\n\t\t\t? \"call_expression\"\n\t\t\t: \"function_call_expression\";\n\n\tconst stringType = [\n\t\t\"name\",\n\t\t\"string\",\n\t\t\"string_value\",\n\t\t\"variable_name\",\n\t\t\"binary_expression\",\n\t\t\"member_expression\",\n\t\t\"subscript_expression\",\n\t\t\"shell_command_expression\",\n\t\t\"function_call_expression\",\n\t\t\"encapsed_string\",\n\t];\n\n\t/**\n\t * Traverse the tree \uD83C\uDF33\n\t *\n\t * @param {SyntaxNode} node The node to traverse through\n\t */\n\tfunction traverse(node: SyntaxNode): void {\n\t\t// Walk the tree\n\t\tif (node?.children.length)\n\t\t\tfor (const child of node.children) {\n\t\t\t\ttraverse(child);\n\t\t\t}\n\n\t\t// Check if the node matches\n\t\tif (node?.type === typeToMatch) {\n\t\t\t// The function name is the first child\n\t\t\tconst functionName = node.firstChild?.text ?? null;\n\t\t\tif (\n\t\t\t\tfunctionName === null ||\n\t\t\t\t!Object.keys(i18nFunctions).includes(functionName)\n\t\t\t) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// The arguments are the last child\n\t\t\tconst argsNode = node.lastChild;\n\t\t\tif (\n\t\t\t\targsNode === null ||\n\t\t\t\targsNode.childCount === 0 ||\n\t\t\t\targsNode.type !== \"arguments\"\n\t\t\t) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// Get the whole gettext translation string\n\t\t\t// eslint-disable-next-line @typescript-eslint/no-unused-vars\n\t\t\tconst [_fn, raw] = node.children;\n\t\t\tconst translation: Partial<{\n\t\t\t\tmsgctxt: string;\n\t\t\t\tmsgid: string;\n\t\t\t\tmsgid_plural: string;\n\t\t\t\tmsgstr: string;\n\t\t\t}> = {};\n\n\t\t\tconst translationKeys =\n\t\t\t\ti18nFunctions[functionName as keyof typeof i18nFunctions];\n\n\t\t\tconst children = raw.children.slice(1, -1);\n\t\t\tlet translationKeyIndex = 0;\n\n\t\t\t// Get the translation from the arguments (the quoted strings)\n\t\t\tfor (const child of children) {\n\t\t\t\tlet node = child;\n\t\t\t\tlet nodeValue: string | string[] = node.text;\n\n\t\t\t\t// unwrap the argument node, which is used in PHP.\n\t\t\t\tif (child.type === \"argument\") {\n\t\t\t\t\tif (child.children.length === 0) continue;\n\t\t\t\t\tnode = child.children[0];\n\t\t\t\t}\n\n\t\t\t\tif (node?.type === \",\") {\n\t\t\t\t\t// skip the comma between arguments\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\tif (node?.type && stringType.includes(node.type)) {\n\t\t\t\t\t// unquote the strings\n\t\t\t\t\tnodeValue = nodeValue.slice(1, -1);\n\t\t\t\t} else {\n\t\t\t\t\t// unexpected node type this string is not translatable and should be skipped\n\t\t\t\t\tconsole.warn(\n\t\t\t\t\t\t`Unexpected node type: ${node?.type} is ${translationKeys[translationKeyIndex]} for ${nodeValue} in ${filepath}`,\n\t\t\t\t\t);\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\n\t\t\t\t// the translation key (eg. msgid)\n\t\t\t\tconst currentKey = translationKeys[\n\t\t\t\t\ttranslationKeyIndex\n\t\t\t\t] as keyof typeof translation;\n\n\t\t\t\t// the value of that key\n\t\t\t\ttranslation[currentKey] = nodeValue;\n\n\t\t\t\t// increment the index of the translation key\n\t\t\t\ttranslationKeyIndex += 1;\n\t\t\t}\n\n\t\t\t// TODO: Alert about wrong translation domain?\n\t\t\tconst comments = collectComments(argsNode);\n\n\t\t\t// Get the translation data\n\t\t\tconst block = new Block({\n\t\t\t\tmsgctxt: translation.msgctxt,\n\t\t\t\tmsgid: translation.msgid ?? \"\",\n\t\t\t\tmsgid_plural: translation.msgid_plural,\n\t\t\t\tmsgstr: translation.msgid_plural ? [\"\", \"\"] : [\"\"],\n\t\t\t\tcomments: {\n\t\t\t\t\ttranslator: comments ? [comments] : undefined,\n\t\t\t\t\treference: [\n\t\t\t\t\t\t`${reverseSlashes(filepath)}:${node.startPosition.row + 1}`,\n\t\t\t\t\t],\n\t\t\t\t},\n\t\t\t} as Block);\n\n\t\t\tgettextTranslations.add(block);\n\t\t}\n\t}\n\n\ttraverse(tree.rootNode);\n\n\t// Return both matches and entries\n\treturn gettextTranslations;\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAwC;AACxC,mBAA8B;AAE9B,4BAAmC;AACnC,kBAA0B;AAC1B,oBAAuD;AAQvD,SAAS,gBAAgB,MAAsC;AAC9D,MAAI,cAAc;AAClB,MAAI,QAAQ;AAGZ,SAAO,eAAe,QAAQ,GAAG;AAChC,QACC,aAAa,iBAAiB,SAAS,aACvC,aAAa,iBAAiB,KAAK,YAAY,EAAE,SAAS,aAAa,GACtE;AACD,aAAO,aAAa,iBAAiB,WAClC,sCAAuB,YAAY,gBAAgB,IAAI,IACvD;AAAA,IACJ;AACA;AACA,kBAAc,YAAY;AAAA,EAC3B;AACD;AASO,SAAS,OAAO,YAAoB,UAA+B;AAEzE,QAAM,SAAS,IAAI,mBAAAA,QAAO;AAC1B,QAAM,gBAAY,uBAAU,QAAQ;AAEpC,MAAI,CAAC,UAAW,QAAO,IAAI,kCAAY,CAAC,GAAG,QAAQ;AAEnD,SAAO,YAAY,SAAS;AAG5B,QAAM,OAAO,OAAO,MAAM,UAAU;AAGpC,QAAM,sBAAmC,IAAI,kCAAY,CAAC,GAAG,QAAQ;AAErE,QAAM,cACL,SAAS,MAAM,GAAG,EAAE,IAAI,GAAG,YAAY,MAAM,QAC1C,oBACA;AAEJ,QAAM,aAAa;AAAA,IAClB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACD;AAOA,WAAS,SAAS,MAAwB;AAEzC,QAAI,MAAM,SAAS;AAClB,iBAAW,SAAS,KAAK,UAAU;AAClC,iBAAS,KAAK;AAAA,MACf;AAGD,QAAI,MAAM,SAAS,aAAa;AAE/B,YAAM,eAAe,KAAK,YAAY,QAAQ;AAC9C,UACC,iBAAiB,QACjB,CAAC,OAAO,KAAK,0BAAa,EAAE,SAAS,YAAY,GAChD;AACD;AAAA,MACD;AAGA,YAAM,WAAW,KAAK;AACtB,UACC,aAAa,QACb,SAAS,eAAe,KACxB,SAAS,SAAS,aACjB;AACD;AAAA,MACD;AAIA,YAAM,CAAC,KAAK,GAAG,IAAI,KAAK;AACxB,YAAM,cAKD,CAAC;AAEN,YAAM,kBACL,2BAAc,YAA0C;AAEzD,YAAM,WAAW,IAAI,SAAS,MAAM,GAAG,EAAE;AACzC,UAAI,sBAAsB;AAG1B,iBAAW,SAAS,UAAU;AAC7B,YAAIC,QAAO;AACX,YAAI,YAA+BA,MAAK;AAGxC,YAAI,MAAM,SAAS,YAAY;AAC9B,cAAI,MAAM,SAAS,WAAW,EAAG;AACjC,UAAAA,QAAO,MAAM,SAAS,CAAC;AAAA,QACxB;AAEA,YAAIA,OAAM,SAAS,KAAK;AAEvB;AAAA,QACD;AAEA,YAAIA,OAAM,QAAQ,WAAW,SAASA,MAAK,IAAI,GAAG;AAEjD,sBAAY,UAAU,MAAM,GAAG,EAAE;AAAA,QAClC,OAAO;AAEN,kBAAQ;AAAA,YACP,yBAAyBA,OAAM,IAAI,OAAO,gBAAgB,mBAAmB,CAAC,SAAS,SAAS,OAAO,QAAQ;AAAA,UAChH;AACA;AAAA,QACD;AAGA,cAAM,aAAa,gBAClB,mBACD;AAGA,oBAAY,UAAU,IAAI;AAG1B,+BAAuB;AAAA,MACxB;AAGA,YAAM,WAAW,gBAAgB,QAAQ;AAGzC,YAAM,QAAQ,IAAI,4BAAM;AAAA,QACvB,SAAS,YAAY;AAAA,QACrB,OAAO,YAAY,SAAS;AAAA,QAC5B,cAAc,YAAY;AAAA,QAC1B,QAAQ,YAAY,eAAe,CAAC,IAAI,EAAE,IAAI,CAAC,EAAE;AAAA,QACjD,UAAU;AAAA,UACT,YAAY,WAAW,CAAC,QAAQ,IAAI;AAAA,UACpC,WAAW;AAAA,YACV,OAAG,8BAAe,QAAQ,CAAC,IAAI,KAAK,cAAc,MAAM,CAAC;AAAA,UAC1D;AAAA,QACD;AAAA,MACD,CAAU;AAEV,0BAAoB,IAAI,KAAK;AAAA,IAC9B;AAAA,EACD;AAEA,WAAS,KAAK,QAAQ;AAGtB,SAAO;AACR;",
6
+ "names": ["Parser", "node"]
7
+ }
package/lib/potCommand.js CHANGED
@@ -1 +1 @@
1
- "use strict";var n=Object.defineProperty;var f=Object.getOwnPropertyDescriptor;var s=Object.getOwnPropertyNames;var c=Object.prototype.hasOwnProperty;var l=(e,t)=>{for(var o in t)n(e,o,{get:t[o],enumerable:!0})},d=(e,t,o,p)=>{if(t&&typeof t=="object"||typeof t=="function")for(let r of s(t))!c.call(e,r)&&r!==o&&n(e,r,{get:()=>t[r],enumerable:!(p=f(t,r))||p.enumerable});return e};var k=e=>d(n({},"__esModule",{value:!0}),e);var h={};l(h,{default:()=>a});module.exports=k(h);var i=require("./parser/makePot.js"),m=require("./utils/common");function a(e){if(Object.keys(e).length>0){(0,m.printMakePotModuleInfo)();const t=new Date;(0,i.makePot)(e).then(()=>{(0,m.printTimeElapsed)(t)}).catch(o=>{console.error(`\u{1FAE4} Make-pot - ${o}`)})}}
1
+ "use strict";var n=Object.defineProperty;var f=Object.getOwnPropertyDescriptor;var s=Object.getOwnPropertyNames;var c=Object.prototype.hasOwnProperty;var k=(e,t)=>{for(var o in t)n(e,o,{get:t[o],enumerable:!0})},l=(e,t,o,p)=>{if(t&&typeof t=="object"||typeof t=="function")for(let r of s(t))!c.call(e,r)&&r!==o&&n(e,r,{get:()=>t[r],enumerable:!(p=f(t,r))||p.enumerable});return e};var d=e=>l(n({},"__esModule",{value:!0}),e);var M={};k(M,{default:()=>i});module.exports=d(M);var a=require("./parser/makePot.js"),m=require("./utils/common.js");function i(e){if(Object.keys(e).length>0){(0,m.printMakePotModuleInfo)();const t=new Date;(0,a.makePot)(e).then(()=>{(0,m.printTimeElapsed)("Make-Pot",t)}).catch(o=>{console.error(`\u{1FAE4} Make-pot - ${o}`)})}}
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/potCommand.ts"],
4
+ "sourcesContent": ["import { makePot } from \"./parser/makePot.js\";\n\nimport type { Args } from \"./types.js\";\nimport { printMakePotModuleInfo, printTimeElapsed } from \"./utils/common.js\";\n\nexport default function potCommand(args: Args) {\n\tif (Object.keys(args).length > 0) {\n\t\tprintMakePotModuleInfo();\n\t\t/* capture the start time */\n\t\tconst timeStart = new Date();\n\t\t/** make the pot file */\n\t\tmakePot(args)\n\t\t\t.then(() => {\n\t\t\t\t/* output the end time */\n\t\t\t\tprintTimeElapsed(\"Make-Pot\", timeStart);\n\t\t\t})\n\t\t\t.catch((error) => {\n\t\t\t\tconsole.error(`\uD83E\uDEE4 Make-pot - ${error}`);\n\t\t\t});\n\t}\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAwB;AAGxB,oBAAyD;AAE1C,SAAR,WAA4B,MAAY;AAC9C,MAAI,OAAO,KAAK,IAAI,EAAE,SAAS,GAAG;AACjC,8CAAuB;AAEvB,UAAM,YAAY,oBAAI,KAAK;AAE3B,gCAAQ,IAAI,EACV,KAAK,MAAM;AAEX,0CAAiB,YAAY,SAAS;AAAA,IACvC,CAAC,EACA,MAAM,CAAC,UAAU;AACjB,cAAQ,MAAM,wBAAiB,KAAK,EAAE;AAAA,IACvC,CAAC;AAAA,EACH;AACD;",
6
+ "names": []
7
+ }
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../src/types.ts"],
4
+ "sourcesContent": ["import type { GetTextTranslation } from \"gettext-parser\";\nimport type { pkgJsonHeaders, pluginHeaders, themeHeaders } from \"./const.js\";\n\nexport type ThemeHeadersType = typeof themeHeaders;\nexport type PluginHeadersType = typeof pluginHeaders;\nexport type PkgHeadersType = typeof pkgJsonHeaders;\n\n/**\n * The args headers Object types\n */\nexport type PotHeaders =\n\t| keyof PkgHeadersType\n\t| keyof PluginHeadersType\n\t| keyof ThemeHeadersType\n\t| \"license\"\n\t| \"email\"\n\t| \"language\"\n\t| \"domain\"\n\t| \"bugs\";\n\n// type is the value of the themeHeader Object\nexport type DomainType =\n\t| \"plugin\"\n\t| \"theme\"\n\t| \"block\"\n\t| \"theme-block\"\n\t| \"generic\";\n\n/**\n * The patterns to use when extracting strings from files.\n *\n * @param {string} mergePaths - Comma-separated list of POT files whose contents should be merged with the extracted strings.\n * If left empty, defaults to the destination POT file. POT file headers will be ignored.\n * @param {string} subtractPaths - Comma-separated list of POT files whose contents should act as some sort of denylist\n * for string extraction. Any string which is found on that denylist will not be extracted. This can be useful when\n * you want to create multiple POT files from the same source directory with slightly different content and no duplicate\n * strings between them.\n * @param {boolean} subtractAndMerge - Whether source code references and comments from the generated POT file should be\n * instead added to the POT file used for subtraction. Warning: this modifies the files passed to `subtractPaths`!\n * @param {string} include - Comma-separated list of files and paths that should be used for string extraction.\n * If provided, only these files and folders will be taken into account for string extraction.\n * For example, `--include=\"src,my-file.php` will ignore anything besides `my-file.php` and files in the `src`\n * directory. Simple glob patterns can be used, i.e. `--include=foo-*.php` includes any PHP file with the `foo-`\n * prefix. Leading and trailing slashes are ignored, i.e. `/my/directory/` is the same as `my/directory`.\n * @param {string} exclude - Comma-separated list of files and paths that should be skipped for string extraction.\n * For example, `--exclude=.github,myfile.php` would ignore any strings found within `myfile.php` or the `.github`\n * folder. Simple glob patterns can be used, i.e. `--exclude=foo-*.php` excludes any PHP file with the `foo-`\n * prefix. Leading and trailing slashes are ignored, i.e. `/my/directory/` is the same as `my/directory`.\n * The following files and folders are always excluded: node_modules, .git, .svn, .CVS, .hg, vendor, *.min.js.\n */\nexport interface Patterns {\n\tmergePaths?: string[];\n\tsubtractPaths?: string[];\n\tsubtractAndMerge?: boolean;\n\tinclude: string[];\n\texclude: string[];\n}\n\n/**\n * Create a POT file for a WordPress project.\n *\n * Scans PHP, Blade-PHP, and JavaScript files for translatable strings, as well as theme stylesheets and plugin files\n * if the source directory is detected as either a plugin or theme.\n *\n * @param {string} sourceDirectory - Directory to scan for string extraction.\n * @param {string} destination - Name of the resulting POT file.\n * @param {string | undefined} slug - Plugin or theme slug. Defaults to the source directory's basename.\n * @param {'plugin' | 'theme' | 'block' | 'theme-block' | 'generic'} domain - Text domain to look for in the source code,\n * unless the `ignoreDomain` option is used. By default, the \"Text Domain\" header of the plugin or theme is used.\n * If none is provided, it falls back to the project slug.\n * @param {boolean} ignoreDomain - Ignore the text domain completely and extract strings with any text domain.\n * @param {{}} headers - Array in JSON format of custom headers which will be added to the POT file. Defaults to empty array.\n * @param {boolean} location - Whether to write `#: filename:line` lines. Defaults to true, use `--no-location`\n * to skip the removal. Note that disabling this option makes it harder for technically skilled translators\n * to understand each message\u2019s context.\n * @param {boolean} skipJs - Skips JavaScript string extraction. Useful when this is done in another build step, e.g. through Babel.\n * @param {boolean} skipPhp - Skips PHP string extraction.\n * @param {boolean} skipBlade - Skips Blade-PHP string extraction.\n * @param {boolean} skipBlockJson - Skips string extraction from block.json files.\n * @param {boolean} skipThemeJson - Skips string extraction from theme.json files.\n * @param {boolean} skipAudit - Skips string audit where it tries to find possible mistakes in translatable strings.\n * Useful when running in an automated environment.\n * @param {string} fileComment - String that should be added as a comment to the top of the resulting POT file.\n * By default, a copyright comment is added for WordPress plugins and themes.\n * @param {string} packageName - Name to use for the package name in the resulting POT file's `Project-Id-Version` header.\n * Overrides the plugin or theme name, if applicable.\n * @param {boolean} silent - Whether to hide progress information.\n */\nexport interface Args {\n\tslug: string;\n\tdomain: DomainType;\n\tpaths: {\n\t\tcwd: string;\n\t\tout: string;\n\t\troot?: string;\n\t};\n\toptions?: {\n\t\tignoreDomain?: boolean;\n\t\tsilent?: boolean;\n\t\tjson?: boolean;\n\t\tlocation?: boolean;\n\t\tpackageName?: string;\n\t\toutput?: boolean;\n\t\tfileComment?: string;\n\t\tcharset?: string;\n\t\tskip: {\n\t\t\tjs?: boolean;\n\t\t\tphp?: boolean;\n\t\t\tblade?: boolean;\n\t\t\tblockJson?: boolean;\n\t\t\tthemeJson?: boolean;\n\t\t\taudit?: boolean;\n\t\t};\n\t};\n\theaders?: { [key in PotHeaders]: string };\n\tpatterns: Patterns;\n}\n\n/**\n * The arguments for the `makeJson` command.\n * \t@param {string} source the source directory\n * \t@param {string | null} destination the destination directory (defaults to source)\n * \t@param {string[] | null} allowedFormats the allowed files\n * \t@param {boolean} purge remove old json files (otherwise the content will be merged)\n * \t@param {boolean} prettyPrint?: pretty print json\n * \t@param {boolean} debug: enable debug mode\n */\nexport interface MakeJsonArgs {\n\ttimeStart: number;\n\tprettyPrint: boolean;\n\tdebug: boolean;\n\tdestination: string;\n\tscriptName?: string;\n\tpurge: boolean;\n\tsource: string;\n\tslug: string;\n\tallowedFormats?: string[];\n\tpaths: {\n\t\tcwd: string;\n\t\tout?: string;\n\t\troot?: string;\n\t};\n}\n\nexport interface I18nSchema {\n\t[key: string]: string | string[] | I18nSchema | I18nSchema[];\n}\n\n/**\n * Translation string metadata.\n * Gettext format: https://www.gnu.org/savannah-checkouts/gnu/gettext/FAQ.html\n *\n * @property {string} msgctxt - context for this translation, if not present the default context applies\n * @property {string} msgid - string to be translated\n * @property {string} msgid_plural the plural form of the original string (might not be present)\n * @property {string[]} msgstr an array of translations\n * @property {{}} comments an object with the following properties: translator, reference, extracted, flag, previous.\n */\nexport interface TranslationStrings {\n\t[msgctxt: string]: { [msgId: string]: GetTextTranslation };\n}\n\n/**\n * The JSON data returned by the `makeJson` command.\n * @param {string} domain\n * @param {Record<string, unknown>} locale_data\n */\nexport interface JedData {\n\t[domain: string]: {\n\t\t[key: string]: string | string[];\n\t};\n}\n\nexport interface MakeJson {\n\tdomain: string;\n\tgenerator: string;\n\t\"translation-revision-date\": string;\n\tsource: string;\n\tlocale_data: JedData;\n}\n\n/**\n * The header data of the current plugin / theme as returned by the `extractHeaders` command.\n */\nexport interface I18nHeaders {\n\tauthorString: string;\n\tbugs: string;\n\tlicense: string;\n\tauthor?: string;\n\txDomain: string;\n\tlanguage: string;\n\tversion: string;\n\tslug: string;\n\temail: string | undefined;\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;AAAA;AAAA;",
6
+ "names": []
7
+ }
@@ -1,3 +1,4 @@
1
- "use strict";var l=Object.create;var s=Object.defineProperty;var m=Object.getOwnPropertyDescriptor;var f=Object.getOwnPropertyNames;var d=Object.getPrototypeOf,x=Object.prototype.hasOwnProperty;var k=(e,t)=>{for(var n in t)s(e,n,{get:t[n],enumerable:!0})},u=(e,t,n,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let r of f(t))!x.call(e,r)&&r!==n&&s(e,r,{get:()=>t[r],enumerable:!(o=m(t,r))||o.enumerable});return e};var a=(e,t,n)=>(n=e!=null?l(d(e)):{},u(t||!e||!e.__esModule?s(n,"default",{value:e,enumerable:!0}):n,e)),h=e=>u(s({},"__esModule",{value:!0}),e);var b={};k(b,{detectPatternType:()=>F,getCommentBlock:()=>y,getCopyright:()=>M,getPkgJsonData:()=>p,printMakePotModuleInfo:()=>$,printStats:()=>T,printTimeElapsed:()=>D,removeCommentMarkup:()=>P,reverseSlashes:()=>S,stringstring:()=>C,stripTranslationMarkup:()=>v});module.exports=h(b);var g=a(require("node:fs")),i=require("node:os"),c=a(require("node:path"));function y(e){const t=e.match(/\/\*\*?[\s\S]*?\*\//);return t!==null?t[0]:e}function P(e){return e.match(/[a-zA-Z].*/gm)}function v(e){const t=/\/\*\*?\s*(?:translators:)\s*([\s\S]*?)\s*\*\/|\/\/\s*(?:translators:)\s*(.*)$/i,n=e.match(t);return n?n[1]:e}function C(e){return typeof e=="string"?e.includes(",")?e.split(","):[e]:[]}function F(e){const t=e.includes("."),n=e.includes(c.default.sep);return e.includes("*")?"glob":!t&&!n?"directory":t&&!n?"file":"glob"}function M(e,t="GPL v2 or later"){return`# Copyright (C) ${new Date().getFullYear()} ${e}
2
- # This file is distributed under the ${t} license.`}function S(e){return e.replace(/\\/g,"/")}function p(...e){const t={},n=c.default.join(__dirname,"..","..","package.json"),o=g.default.existsSync(n)?require(n):{name:"makepot",version:""};for(const r of e)o[r]&&(t[r]=o[r]);return t}function $(){const{version:e,name:t}=p("name","version");console.log(`${t} version: ${e}`)}function D(e,t=new Date){console.log(`\u{1F680} Make-Pot: Job completed! Pot file created in ${t.getTime()-e.getTime()}ms`)}function T(){console.log("Memory usage:",(process.memoryUsage().heapUsed/1024/1024).toFixed(2),"MB (Free:",((0,i.totalmem)()/1024/1024/1024).toFixed(2),`GB)
1
+ "use strict";var f=Object.create;var s=Object.defineProperty;var d=Object.getOwnPropertyDescriptor;var x=Object.getOwnPropertyNames;var h=Object.getPrototypeOf,k=Object.prototype.hasOwnProperty;var y=(e,t)=>{for(var n in t)s(e,n,{get:t[n],enumerable:!0})},a=(e,t,n,o)=>{if(t&&typeof t=="object"||typeof t=="function")for(let r of x(t))!k.call(e,r)&&r!==n&&s(e,r,{get:()=>t[r],enumerable:!(o=d(t,r))||o.enumerable});return e};var g=(e,t,n)=>(n=e!=null?f(h(e)):{},a(t||!e||!e.__esModule?s(n,"default",{value:e,enumerable:!0}):n,e)),$=e=>a(s({},"__esModule",{value:!0}),e);var J={};y(J,{detectPatternType:()=>S,getCommentBlock:()=>M,getCopyright:()=>v,getPkgJsonData:()=>m,printMakePotModuleInfo:()=>D,printStats:()=>b,printTimeElapsed:()=>T,removeCommentMarkup:()=>P,reverseSlashes:()=>w,stringstring:()=>F,stripTranslationMarkup:()=>C});module.exports=$(J);var l=g(require("node:fs")),i=require("node:os"),u=g(require("node:path")),p=require("../const.js");function M(e){const t=e.match(/\/\*\*?[\s\S]*?\*\//);return t!==null?t[0]:e}function P(e){return e.match(/[a-zA-Z].*/gm)}function C(e){const t=/\/\*\*?\s*(?:translators:)\s*([\s\S]*?)\s*\*\/|\/\/\s*(?:translators:)\s*(.*)$/i,n=e.match(t);return n?n[1]:e}function F(e){return typeof e=="string"?e.includes(",")?e.split(","):[e]:[]}function S(e){const t=e.includes("."),n=e.includes(u.default.sep);return e.includes("*")?"glob":!t&&!n?"directory":t&&!n?"file":"glob"}function v(e,t="GPL v2 or later"){return`# Copyright (C) ${new Date().getFullYear()} ${e}
2
+ # This file is distributed under the ${t} license.`}function w(e){return e.replace(/\\/g,"/")}function m(e,...t){const n={},o=u.default.join(e||process.cwd(),"package.json"),r=l.default.existsSync(o)?require(o):{};for(const c of t)r[c]&&(n[c]=r[c]);return n}function D(){const{version:e,name:t}=m(p.modulePath,"name","version");console.log(`${t} version: ${e}`)}function T(e,t,n=new Date){console.log(`
3
+ \u{1F680} ${e}: Job completed! ${e.split("-")[1]} file created in ${n.getTime()-t.getTime()}ms`)}function b(){console.log("Memory usage:",(process.memoryUsage().heapUsed/1024/1024).toFixed(2),"MB (Free:",((0,i.totalmem)()/1024/1024/1024).toFixed(2),`GB)
3
4
  Cpu User:`,(process.cpuUsage().user/1e6).toFixed(2),"ms Cpu System:",(process.cpuUsage().system/1e6).toFixed(2),"ms of",(0,i.cpus)().length,"cores")}0&&(module.exports={detectPatternType,getCommentBlock,getCopyright,getPkgJsonData,printMakePotModuleInfo,printStats,printTimeElapsed,removeCommentMarkup,reverseSlashes,stringstring,stripTranslationMarkup});
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/utils/common.ts"],
4
+ "sourcesContent": ["import fs from \"node:fs\";\nimport { cpus, totalmem } from \"node:os\";\nimport path from \"node:path\";\nimport { modulePath } from \"../const.js\";\n\n/**\n * A function that removes comment markup from a given string.\n *\n * @param {string} input - The input string with comment markup.\n * @return {string} - The input string without comment markup.\n */\nexport function getCommentBlock(input: string): string {\n\tconst commentBlock = input.match(/\\/\\*\\*?[\\s\\S]*?\\*\\//);\n\treturn commentBlock !== null ? commentBlock[0] : input;\n}\n\n/**\n * A function that starts to capture the text after the first letter.\n *\n * @param {string} input - The input string with comment markup.\n * @return {string} - The input string without comment markup.\n */\nexport function removeCommentMarkup(input: string): string[] | null {\n\treturn input.match(/[a-zA-Z].*/gm);\n}\n\n/**\n * Removes the markup from a comment string.\n *\n * @param {string} comment - The comment string to remove markup from.\n * @return {string} The comment text without the markers.\n */\nexport function stripTranslationMarkup(comment: string): string {\n\tconst commentPattern =\n\t\t/\\/\\*\\*?\\s*(?:translators:)\\s*([\\s\\S]*?)\\s*\\*\\/|\\/\\/\\s*(?:translators:)\\s*(.*)$/i;\n\tconst matches = comment.match(commentPattern);\n\treturn matches ? matches[1] : comment;\n}\n\n/**\n * Splits a string into an array of strings based on the presence of a comma.\n *\n * @param {string} string - The string to be split.\n * @return {string[]} An array of strings after splitting the input string.\n */\nexport function stringstring(string: string | string[] | undefined): string[] {\n\tif (typeof string === \"string\") {\n\t\tif (string.includes(\",\")) {\n\t\t\treturn string.split(\",\");\n\t\t}\n\t\treturn [string];\n\t}\n\treturn [];\n}\n\n/**\n * Determines if a pattern represents a file, a directory, or a glob pattern.\n * @param pattern - The pattern string to evaluate.\n * @returns 'file', 'directory', or 'glob'.\n */\nexport function detectPatternType(\n\tpattern: string,\n): \"file\" | \"directory\" | \"glob\" {\n\tconst containsFileExtension = pattern.includes(\".\");\n\tconst containsDirectorySeparator = pattern.includes(path.sep);\n\n\tif (pattern.includes(\"*\")) {\n\t\treturn \"glob\";\n\t}\n\tif (!containsFileExtension && !containsDirectorySeparator) {\n\t\treturn \"directory\";\n\t}\n\tif (containsFileExtension && !containsDirectorySeparator) {\n\t\treturn \"file\";\n\t}\n\treturn \"glob\";\n}\n\n/**\n * Generates a copyright comment for the specified slug and license.\n *\n * @param slug - The slug to include in the copyright comment\n * @param [license='GPL v2 or later'] - The license to use in the copyright comment\n * @return The generated copyright comment\n */\nexport function getCopyright(\n\tslug: string,\n\tlicense = \"GPL v2 or later\",\n): string {\n\treturn (\n\t\t`# Copyright (C) ${new Date().getFullYear()} ${slug}\\n` +\n\t\t`# This file is distributed under the ${license} license.`\n\t);\n}\n\n/**\n * Reverse slashes in a path, and replace backward slashes with forward slashes\n *\n * @param filePath - The path to be reversed.\n * @return {string} The reversed path.\n */\nexport function reverseSlashes(filePath: string): string {\n\t// Replace backward slashes with forward slashes\n\treturn filePath.replace(/\\\\/g, \"/\");\n}\n\n/**\n * The makepot package.json file data\n * @arguments {string[]} fields - The fields to extract\n * @return {Record<string, unknown>} - The package.json data\n */\nexport function getPkgJsonData(\n\tlocation?: string,\n\t...fields: string[]\n): Record<string, unknown> {\n\tconst requested: Record<string, unknown> = {};\n\t// read the package.json file the is in the root directory\n\tconst pkgJsonPath = path.join(location || process.cwd(), \"package.json\");\n\t// read the package.json file or return an empty object\n\tconst pkgJson: Record<string, unknown> = fs.existsSync(pkgJsonPath)\n\t\t? require(pkgJsonPath)\n\t\t: {};\n\t// extract the requested fields from the package.json\n\tfor (const field of fields) {\n\t\tif (pkgJson[field]) {\n\t\t\trequested[field] = pkgJson[field];\n\t\t}\n\t}\n\treturn requested;\n}\n\n/**\n * Print the module header with the current version and name\n */\nexport function printMakePotModuleInfo() {\n\tconst { version, name } = getPkgJsonData(modulePath, \"name\", \"version\");\n\t/* print the version */\n\tconsole.log(`${name} version: ${version}`);\n}\n\n/**\n * Output to the console the time elapsed in milliseconds between two dates\n * @param scriptName the name of the script\n * @param timeStart the start time\n * @param timeEnd the end time\n */\nexport function printTimeElapsed(\n\tscriptName: \"Make-Pot\" | \"Make-Json\",\n\ttimeStart: Date,\n\ttimeEnd: Date = new Date(),\n) {\n\tconsole.log(\n\t\t`\\n\uD83D\uDE80 ${scriptName}: Job completed! ${scriptName.split(\"-\")[1]} file created in ${\n\t\t\ttimeEnd.getTime() - timeStart.getTime()\n\t\t}ms`,\n\t);\n}\n\n/**\n/**\n* Prints the memory usage and cpu usage of the system\n */\nexport function printStats() {\n\tconsole.log(\n\t\t\"Memory usage:\",\n\t\t(process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2),\n\t\t\"MB (Free:\",\n\t\t(totalmem() / 1024 / 1024 / 1024).toFixed(2),\n\t\t\"GB)\\nCpu User:\",\n\t\t(process.cpuUsage().user / 1000000).toFixed(2),\n\t\t\"ms Cpu System:\",\n\t\t(process.cpuUsage().system / 1000000).toFixed(2),\n\t\t\"ms of\",\n\t\tcpus().length,\n\t\t\"cores\",\n\t);\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAAe;AACf,qBAA+B;AAC/B,uBAAiB;AACjB,mBAA2B;AAQpB,SAAS,gBAAgB,OAAuB;AACtD,QAAM,eAAe,MAAM,MAAM,qBAAqB;AACtD,SAAO,iBAAiB,OAAO,aAAa,CAAC,IAAI;AAClD;AAQO,SAAS,oBAAoB,OAAgC;AACnE,SAAO,MAAM,MAAM,cAAc;AAClC;AAQO,SAAS,uBAAuB,SAAyB;AAC/D,QAAM,iBACL;AACD,QAAM,UAAU,QAAQ,MAAM,cAAc;AAC5C,SAAO,UAAU,QAAQ,CAAC,IAAI;AAC/B;AAQO,SAAS,aAAa,QAAiD;AAC7E,MAAI,OAAO,WAAW,UAAU;AAC/B,QAAI,OAAO,SAAS,GAAG,GAAG;AACzB,aAAO,OAAO,MAAM,GAAG;AAAA,IACxB;AACA,WAAO,CAAC,MAAM;AAAA,EACf;AACA,SAAO,CAAC;AACT;AAOO,SAAS,kBACf,SACgC;AAChC,QAAM,wBAAwB,QAAQ,SAAS,GAAG;AAClD,QAAM,6BAA6B,QAAQ,SAAS,iBAAAA,QAAK,GAAG;AAE5D,MAAI,QAAQ,SAAS,GAAG,GAAG;AAC1B,WAAO;AAAA,EACR;AACA,MAAI,CAAC,yBAAyB,CAAC,4BAA4B;AAC1D,WAAO;AAAA,EACR;AACA,MAAI,yBAAyB,CAAC,4BAA4B;AACzD,WAAO;AAAA,EACR;AACA,SAAO;AACR;AASO,SAAS,aACf,MACA,UAAU,mBACD;AACT,SACC,oBAAmB,oBAAI,KAAK,GAAE,YAAY,CAAC,IAAI,IAAI;AAAA,uCACX,OAAO;AAEjD;AAQO,SAAS,eAAe,UAA0B;AAExD,SAAO,SAAS,QAAQ,OAAO,GAAG;AACnC;AAOO,SAAS,eACf,aACG,QACuB;AAC1B,QAAM,YAAqC,CAAC;AAE5C,QAAM,cAAc,iBAAAA,QAAK,KAAK,YAAY,QAAQ,IAAI,GAAG,cAAc;AAEvE,QAAM,UAAmC,eAAAC,QAAG,WAAW,WAAW,IAC/D,QAAQ,WAAW,IACnB,CAAC;AAEJ,aAAW,SAAS,QAAQ;AAC3B,QAAI,QAAQ,KAAK,GAAG;AACnB,gBAAU,KAAK,IAAI,QAAQ,KAAK;AAAA,IACjC;AAAA,EACD;AACA,SAAO;AACR;AAKO,SAAS,yBAAyB;AACxC,QAAM,EAAE,SAAS,KAAK,IAAI,eAAe,yBAAY,QAAQ,SAAS;AAEtE,UAAQ,IAAI,GAAG,IAAI,aAAa,OAAO,EAAE;AAC1C;AAQO,SAAS,iBACf,YACA,WACA,UAAgB,oBAAI,KAAK,GACxB;AACD,UAAQ;AAAA,IACP;AAAA,YAAQ,UAAU,oBAAoB,WAAW,MAAM,GAAG,EAAE,CAAC,CAAC,oBAC7D,QAAQ,QAAQ,IAAI,UAAU,QAAQ,CACvC;AAAA,EACD;AACD;AAMO,SAAS,aAAa;AAC5B,UAAQ;AAAA,IACP;AAAA,KACC,QAAQ,YAAY,EAAE,WAAW,OAAO,MAAM,QAAQ,CAAC;AAAA,IACxD;AAAA,SACC,yBAAS,IAAI,OAAO,OAAO,MAAM,QAAQ,CAAC;AAAA,IAC3C;AAAA,KACC,QAAQ,SAAS,EAAE,OAAO,KAAS,QAAQ,CAAC;AAAA,IAC7C;AAAA,KACC,QAAQ,SAAS,EAAE,SAAS,KAAS,QAAQ,CAAC;AAAA,IAC/C;AAAA,QACA,qBAAK,EAAE;AAAA,IACP;AAAA,EACD;AACD;",
6
+ "names": ["path", "fs"]
7
+ }
@@ -0,0 +1 @@
1
+ "use strict";var c=Object.defineProperty;var m=Object.getOwnPropertyDescriptor;var d=Object.getOwnPropertyNames;var g=Object.prototype.hasOwnProperty;var l=(n,t)=>{for(var s in t)c(n,s,{get:t[s],enumerable:!0})},u=(n,t,s,e)=>{if(t&&typeof t=="object"||typeof t=="function")for(let o of d(t))!g.call(n,o)&&o!==s&&c(n,o,{get:()=>t[o],enumerable:!(e=m(t,o))||e.enumerable});return n};var k=n=>u(c({},"__esModule",{value:!0}),n);var x={};l(x,{buildBlock:()=>f,getKeyByValue:()=>B,yieldParsedData:()=>a});module.exports=k(x);var r=require("gettext-merger");function B(n,t){return Object.keys(n).find(s=>n[s]===t)??void 0}const f=(n,t,s=void 0)=>{const e=new r.Block([]);return e.msgctxt=void 0,e.msgid=t,e.msgid_plural="",e.msgstr=[],e.comments={},n&&(e.comments.extracted=[n]),s?.length&&(e.comments.reference=s),e};function a(n,t,s){const e=new r.SetOfBlocks([],s);if(n.length===0)return e;e.path=s;for(const o of n){const i=f(o.msgid,o.msgctxt,o.comments?.reference);i&&e.blocks.push(i)}return e}0&&(module.exports={buildBlock,getKeyByValue,yieldParsedData});
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../src/utils/extractors.ts"],
4
+ "sourcesContent": ["import { Block, SetOfBlocks } from \"gettext-merger\";\n\n/**\n * Returns the key of an object based on its value\n *\n * @param object the object that contains the key\n * @param value the key that we want to get\n * @return {Record<string, string>} the filtered keys\n */\nexport function getKeyByValue(\n\tobject: Record<string, unknown>,\n\tvalue: string,\n): string | undefined {\n\treturn Object.keys(object).find((key) => object[key] === value) ?? undefined;\n}\n\n/**\n * Returns a gettext translation object\n *\n * @param label the label of the translation\n * @param string the string of the translation\n * @param filePath the file path of the translation\n */\nexport const buildBlock = (\n\tlabel: string,\n\tstring: string,\n\tfilePath: string[] | undefined = undefined,\n): Block => {\n\tconst block = new Block([]);\n\tblock.msgctxt = undefined;\n\tblock.msgid = string;\n\tblock.msgid_plural = \"\";\n\tblock.msgstr = [];\n\tblock.comments = {};\n\tif (label) {\n\t\tblock.comments.extracted = [label];\n\t}\n\tif (filePath?.length) {\n\t\tblock.comments.reference = filePath;\n\t}\n\treturn block;\n};\n\n/**\n * Extracts strings from parsed JSON data.\n *\n * @param {Record<string, any> | Parser.SyntaxNode} parsed - The parsed JSON data or syntax node.\n * @param {string | Parser} filename - The filename or parser.\n * @param filepath - the path to the file being parsed\n * @return {SetOfBlocks} An array of translation strings.\n */\nexport function yieldParsedData(\n\tparsed: Block[],\n\tfilename: \"block.json\" | \"theme.json\",\n\tfilepath: string,\n): SetOfBlocks {\n\tconst gettextTranslations: SetOfBlocks = new SetOfBlocks([], filepath);\n\n\tif (parsed.length === 0) {\n\t\treturn gettextTranslations;\n\t}\n\n\t// set the path of the translation\n\tgettextTranslations.path = filepath;\n\n\tfor (const item of parsed) {\n\t\tconst block = buildBlock(\n\t\t\titem.msgid,\n\t\t\titem.msgctxt as string,\n\t\t\titem.comments?.reference,\n\t\t);\n\n\t\tif (block) {\n\t\t\tgettextTranslations.blocks.push(block);\n\t\t}\n\t}\n\n\treturn gettextTranslations;\n}\n"],
5
+ "mappings": ";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAAmC;AAS5B,SAAS,cACf,QACA,OACqB;AACrB,SAAO,OAAO,KAAK,MAAM,EAAE,KAAK,CAAC,QAAQ,OAAO,GAAG,MAAM,KAAK,KAAK;AACpE;AASO,MAAM,aAAa,CACzB,OACA,QACA,WAAiC,WACtB;AACX,QAAM,QAAQ,IAAI,4BAAM,CAAC,CAAC;AAC1B,QAAM,UAAU;AAChB,QAAM,QAAQ;AACd,QAAM,eAAe;AACrB,QAAM,SAAS,CAAC;AAChB,QAAM,WAAW,CAAC;AAClB,MAAI,OAAO;AACV,UAAM,SAAS,YAAY,CAAC,KAAK;AAAA,EAClC;AACA,MAAI,UAAU,QAAQ;AACrB,UAAM,SAAS,YAAY;AAAA,EAC5B;AACA,SAAO;AACR;AAUO,SAAS,gBACf,QACA,UACA,UACc;AACd,QAAM,sBAAmC,IAAI,kCAAY,CAAC,GAAG,QAAQ;AAErE,MAAI,OAAO,WAAW,GAAG;AACxB,WAAO;AAAA,EACR;AAGA,sBAAoB,OAAO;AAE3B,aAAW,QAAQ,QAAQ;AAC1B,UAAM,QAAQ;AAAA,MACb,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK,UAAU;AAAA,IAChB;AAEA,QAAI,OAAO;AACV,0BAAoB,OAAO,KAAK,KAAK;AAAA,IACtC;AAAA,EACD;AAEA,SAAO;AACR;",
6
+ "names": []
7
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@wp-blocks/make-pot",
3
- "version": "1.5.0",
3
+ "version": "1.5.1",
4
4
  "license": "GPL-3.0-or-later",
5
5
  "homepage": "https://wp-blocks.github.io/make-pot/",
6
6
  "description": "A Node.js script for generating a POT file from source code",
@@ -9,13 +9,13 @@
9
9
  "John Hooks <bitmachina@outlook.com> (https://johnhooks.io/)"
10
10
  ],
11
11
  "type": "commonjs",
12
- "main": "lib/cli.js",
12
+ "main": "lib/index.js",
13
13
  "bin": {
14
- "default": "lib/cli.js",
15
- "make-pot": "lib/makePot.js",
16
- "makepot": "lib/makePot.js",
17
- "make-json": "lib/makeJson.js",
18
- "makejson": "lib/makeJson.js"
14
+ "default": "./lib/cli.js",
15
+ "make-pot": "./lib/makePot.js",
16
+ "makepot": "./lib/makePot.js",
17
+ "make-json": "./lib/makeJson.js",
18
+ "makejson": "./lib/makeJson.js"
19
19
  },
20
20
  "engines": {
21
21
  "node": ">=16.0.0"
@@ -48,12 +48,13 @@
48
48
  "scripts": {
49
49
  "postinstall": "npm rebuild tree-sitter tree-sitter-typescript tree-sitter-php tree-sitter-javascript --force",
50
50
  "build": "npx esbuild ./src/**/* --format=cjs --minify --outdir=lib --platform=node",
51
- "build:ci": "npx esbuild ./src/index.ts --format=cjs --outdir=lib --bundle --external:tree-sitter --external:tree-sitter-typescript --external:tree-sitter-php --external:tree-sitter-javascript --platform=node",
52
51
  "watch": "tsc --watch",
53
- "lint": "npx @biomejs/biome check --apply src",
52
+ "lint": "npx @biomejs/biome check --write src",
54
53
  "rm": "rmdir /s /q lib",
55
- "test": "node --test",
56
- "test:ci": "npm run build:ci && npm run test",
54
+ "test:build": "npx esbuild ./src/**/* --format=cjs --sourcemap --outdir=lib --platform=node",
55
+ "test": "npm run test:build && node --test",
56
+ "build:build-ci": "npx esbuild ./src/index.ts --format=cjs --outdir=lib --bundle --external:tree-sitter --external:tree-sitter-typescript --external:tree-sitter-php --external:tree-sitter-javascript --platform=node",
57
+ "test:ci": "npm run build:build-ci && npm run test",
57
58
  "test:watch": "node --test --watch",
58
59
  "test:coverage": "node --test --experimental-test-coverage"
59
60
  },
@@ -62,11 +63,11 @@
62
63
  "gettext-merger": "^1.2.1",
63
64
  "gettext-parser": "^4.0.4",
64
65
  "glob": "^11.0.2",
66
+ "tannin": "^1.2.0",
65
67
  "tree-sitter": "^0.20.6",
66
68
  "tree-sitter-javascript": "^0.20.4",
67
69
  "tree-sitter-php": "^0.20.0",
68
70
  "tree-sitter-typescript": "^0.20.5",
69
- "tannin": "^1.2.0",
70
71
  "yargs": "^17.7.1"
71
72
  },
72
73
  "devDependencies": {
@@ -0,0 +1,54 @@
1
+ const { describe, it } = require("node:test");
2
+ const { join } = require("node:path");
3
+ const assert = require("node:assert");
4
+ const { extractMainFileData } = require("../lib");
5
+
6
+ describe("should parse plugin main file", () => {
7
+ describe("should parse plugin.php", () => {
8
+ it("correctly extracts plugin headers", async () => {
9
+ const fileParsed = extractMainFileData({
10
+ domain: "plugin",
11
+ paths: {
12
+ cwd: join(process.cwd(), "tests/fixtures/plugin"),
13
+ },
14
+ slug: "plugin",
15
+ });
16
+ assert.deepStrictEqual(fileParsed, {
17
+ author: "Erik yo",
18
+ authorUri: "https://codekraft.it",
19
+ description:
20
+ "An example plugin to demo how to do internationalization in a WordPress plugin.",
21
+ domainPath: "/languages",
22
+ license: "GPL3+",
23
+ licenseUri: "http://www.gnu.org/licenses/gpl-3.0.html",
24
+ name: "plugin",
25
+ textDomain: "i18n-example",
26
+ version: "1.0.0",
27
+ });
28
+ });
29
+ });
30
+ });
31
+
32
+ describe("should parse theme main file", () => {
33
+ describe("should parse style.css", () => {
34
+ it("correctly extracts theme headers", async () => {
35
+ const fileParsed = extractMainFileData({
36
+ domain: "theme",
37
+ paths: {
38
+ cwd: join(process.cwd(), "tests/fixtures/theme"),
39
+ },
40
+ slug: "fabled-sunset",
41
+ });
42
+ assert.deepStrictEqual(fileParsed, {
43
+ name: "Theme name",
44
+ description: "Custom theme description...",
45
+ version: "1.0.0",
46
+ author: "Author Name",
47
+ tags: "block-patterns, full-site-editing",
48
+ textDomain: "fabled-sunset",
49
+ domainPath: "/assets/lang",
50
+ license: "GNU General Public License v2.0 or later",
51
+ });
52
+ });
53
+ });
54
+ });
@@ -283,7 +283,7 @@ describe("getStrings wp cli", () => {
283
283
 
284
284
  const result = doTree(content, filename);
285
285
 
286
- assert.strictEqual(result.blocks.length, 27);
286
+ assert.strictEqual(result.blocks.length, 26);
287
287
  });
288
288
 
289
289
  /** see wp cli tests */