unplugin-keywords 2.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 cueaz
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,256 @@
1
+ # unplugin-keywords
2
+
3
+ [![NPM][npm-badge]][npm-url]
4
+ [![Github CI][ci-badge]][ci-url]
5
+ [![MIT licensed][license-badge]][license-url]
6
+
7
+ [npm-badge]: https://img.shields.io/npm/v/unplugin-keywords.svg
8
+ [npm-url]: https://www.npmjs.com/package/unplugin-keywords
9
+ [ci-badge]: https://github.com/cueaz/unplugin-keywords/actions/workflows/check.yaml/badge.svg
10
+ [ci-url]: https://github.com/cueaz/unplugin-keywords/actions/workflows/check.yaml
11
+ [license-badge]: https://img.shields.io/badge/license-MIT-blue.svg
12
+ [license-url]: https://github.com/cueaz/unplugin-keywords/blob/main/LICENSE
13
+
14
+ A build plugin for structural string literal minification and obfuscation.
15
+
16
+ `unplugin-keywords` addresses a fundamental limitation in JavaScript minification: the inability to safely mangle string literals used as object keys, custom event types, or structural constants. By explicitly importing these identifiers from a virtual module, the plugin extracts them at the AST level and maps them to deterministic, short hashes during the build process. This explicit opt-in mechanism empowers bundlers to inline and obfuscate application internals without breaking semantic contracts.
17
+
18
+ ## Motivation vs. Property Mangling
19
+
20
+ Traditional JavaScript minifiers rely on property mangling (e.g., Terser's `mangle.properties`) to reduce structural identifiers. `unplugin-keywords` provides a module-based alternative that addresses the structural limitations of global mangling.
21
+
22
+ * **Explicit Opt-In:**
23
+ Traditional property mangling requires maintaining complex, global exclusion rules (e.g., [`mangle.json`](https://github.com/preactjs/signals/blob/main/mangle.json)), which are fragile and hard to scale. `unplugin-keywords` utilizes explicit imports (`import * as K`). Developers unambiguously declare which identifiers are safe to obfuscate directly in the source code.
24
+ * **Gradual Adoption:**
25
+ Unlike global mangling flags that affect the entire codebase simultaneously, installing this plugin alters nothing by default. It allows incremental adoption on a per-file or per-module basis.
26
+ * **Cross-Boundary Consistency:**
27
+ Standard mangled properties cannot safely cross package boundaries; a property mangled to `a` in Package A will not map to `a` in Package B. Because `virtual:keywords` relies on deterministic hashing, identical keys inherently produce identical hashes across independent builds (provided they share the same `secret` configuration), preserving structural contracts.
28
+ * **Universal Application:**
29
+ Standard minifiers only mangle object keys, leaving string literal values intact. This plugin processes both keys and values uniformly (e.g., `[K.type]: K.SET_USER`). It extends obfuscation to literal types (`const mode: typeof K.extract | typeof K.transform = K.extract`) and even arbitrary static strings (`throw new Error(K['Invalid State'])`).
30
+ * **Trade-offs:**
31
+ This explicit approach sacrifices some source code readability. Furthermore, as demonstrated in the benchmarks below, standard gzip compression handles unmodified semantic strings highly effectively. If reducing the gzipped network payload is the sole objective, the architectural overhead of this plugin outweighs the minimal payload reduction.
32
+
33
+ ## Visual Demo: `@preact/signals-core`
34
+
35
+ A side-by-side comparison of minified bundles:
36
+
37
+ | [Unmodified](https://github.com/cueaz/unplugin-keywords/blob/main/demo/signals/src/original.ts) (Standard Minification) | [Keywordified](https://github.com/cueaz/unplugin-keywords/blob/main/demo/signals/src/keywordified.ts) (Literal Obfuscation) |
38
+ |:---:|:---:|
39
+ | <picture><source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/cueaz/unplugin-keywords/refs/heads/main/demo/signals/dist_sample/original.min.js.light.png" width="400"><img src="https://raw.githubusercontent.com/cueaz/unplugin-keywords/refs/heads/main/demo/signals/dist_sample/original.min.js.dark.png" width="400" alt="Original"></picture> | <picture><source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/cueaz/unplugin-keywords/refs/heads/main/demo/signals/dist_sample/keywordified.min.js.light.png" width="400"><img src="https://raw.githubusercontent.com/cueaz/unplugin-keywords/refs/heads/main/demo/signals/dist_sample/keywordified.min.js.dark.png" width="400" alt="Keywordified"></picture> |
40
+ | 6.86 kB │ gzip: **2.09 kB** | **5.40 kB** │ gzip: 2.03 kB |
41
+
42
+ > [!NOTE]
43
+ > **Baseline Metrics:** The "Unmodified" metrics represent standard `tsdown` minification. The official [`@preact/signals-core@1.14.1`](https://bundlephobia.com/package/@preact/signals-core@1.14.1) release achieves a smaller footprint (5.4 kB Minified / 1.9 kB Gzipped) by employing a hand-crafted [`mangle.json`](https://github.com/preactjs/signals/blob/main/mangle.json) for manual property obfuscation.
44
+ >
45
+ > **Compression Efficiency:** While the raw bundle size is reduced by 21.3%, the gzipped size is only 2.9% smaller. This demonstrates the effectiveness of standard gzip compression on unmodified code: if minimizing the gzipped network payload is the sole objective, adopting this plugin is unnecessary.
46
+
47
+ *For more information, see the [demo documentation](https://github.com/cueaz/unplugin-keywords/blob/main/demo/signals/README.md).*
48
+
49
+ ## How It Works
50
+
51
+ Standard minifiers operate exclusively on variable bindings and function names, leaving structural strings intact. While this preserves the semantic contract, it inflates bundle size and exposes internal state architecture (e.g., Redux action types, state machine nodes).
52
+
53
+ `unplugin-keywords` shifts this paradigm by treating structural strings as imported module bindings.
54
+
55
+ **1. Source Code (Development):**
56
+ Developers reference strings via a virtual module. The strongly recommended pattern is to use a namespace import (`import * as K`), which clearly demarcates keyword usage throughout the file.
57
+
58
+ ```ts
59
+ import * as K from 'virtual:keywords';
60
+
61
+ const action = {
62
+ [K.type]: K.SET_USER,
63
+ [K.payload]: data,
64
+ };
65
+ ```
66
+
67
+ **2. AST Transformation:**
68
+ During the build phase, the plugin traverses the AST, resolving bindings and statically resolving member expressions. It replaces valid identifier access with a generated AST node pointing to a deterministic base62 hash or a minimal lexical sequence.
69
+
70
+ **3. Minified Output (Production):**
71
+ The bundler receives the transformed code and processes the hashed literals. Depending on the frequency of usage, the minifier will either inline the strings directly or extract them into single-character variables to save bytes.
72
+
73
+ ```ts
74
+ // Example of minifier output: strings may be inlined or assigned to variables if used multiple times
75
+ const _="z2pL21k";const a={a3fB9zX:_,k1Mw8pA:data};
76
+ ```
77
+
78
+ ## Dual-Module Architecture
79
+
80
+ `unplugin-keywords` provides two distinct virtual modules. While exclusively using `K.*` is a perfectly valid and robust approach, the dual-module system allows further bundle size reduction.
81
+
82
+ * **`virtual:keywords` (Stable Hash):**
83
+ Generates deterministic, key-derived hashes (e.g., `"z2pL21k"`). Designed for **public-facing APIs** and structural contracts that must remain consistent across package boundaries (e.g., `package.json` exports).
84
+ *Convention:* `import * as K from 'virtual:keywords';`
85
+
86
+ * **`virtual:keywords/local` (Lexical Counter):**
87
+ Generates the shortest possible sequential identifiers via bijective numeration (e.g., `"_a"`, `"_b"`, `"_c"`). Strictly designated for **internal and local** implementations where cross-boundary stability is irrelevant.
88
+ *Convention:* `import * as L from 'virtual:keywords/local';`
89
+
90
+ **Module Separation:**
91
+ To minimize bundle size, identifiers can be partitioned: bind public interfaces to `K.*`, and obscure all internal state and private members behind `L.*`.
92
+
93
+ ## Integration
94
+
95
+ Install the package:
96
+
97
+ ```bash
98
+ npm install -D unplugin-keywords
99
+ ```
100
+
101
+ Configure your bundler. Example for Vite:
102
+
103
+ ```ts
104
+ import { defineConfig } from 'vite';
105
+ import keywords from 'unplugin-keywords/vite';
106
+
107
+ export default defineConfig(({ mode }) => ({
108
+ plugins: [
109
+ keywords({
110
+ // Preserves keyword suffix in development for debugging (e.g., "zXpL21k.SET_USER")
111
+ isDev: mode === 'development',
112
+ // Initializes the hashing algorithm. Modify to rotate hashes globally.
113
+ secret: 'my-secret-key',
114
+ }),
115
+ ],
116
+ }));
117
+ ```
118
+
119
+ To enable type checking and IDE auto-completion, execute the CLI and register the output in `tsconfig.json`:
120
+
121
+ ```bash
122
+ npx keywords
123
+ ```
124
+
125
+ ```jsonc
126
+ {
127
+ "compilerOptions": {
128
+ "paths": {
129
+ "virtual:keywords": ["./node_modules/.keywords/index.d.ts"],
130
+ "virtual:keywords/local": ["./node_modules/.keywords/local.d.ts"]
131
+ }
132
+ }
133
+ }
134
+ ```
135
+
136
+ > [!TIP]
137
+ > During development, the plugin automatically runs a background type generation process while the bundler is running. Manual CLI execution is only necessary for pre-flight type checking (e.g., in CI) before the bundler runs.
138
+
139
+ ## Example: Class-Based Architectures
140
+
141
+ The namespace import pattern is applicable in class-based architectures where structural symbols are heavily used for internal state and lifecycle methods.
142
+
143
+ > [!IMPORTANT]
144
+ > Overriding lifecycle methods (e.g., `[K.render]`) requires a modified base class—such as a custom build of Lit—compiled with `unplugin-keywords` to dispatch the hashed keys. Sharing this dictionary across the ecosystem enables consistent obfuscation.
145
+
146
+ ```ts
147
+ // Source: https://github.com/lit/lit/blob/main/packages/lit-html/src/directives/async-replace.ts
148
+ /**
149
+ * @license
150
+ * Copyright 2017 Google LLC
151
+ * SPDX-License-Identifier: BSD-3-Clause
152
+ */
153
+ import * as K from 'virtual:keywords';
154
+ import * as L from 'virtual:keywords/local';
155
+ import {
156
+ AsyncDirective,
157
+ type DirectiveParameters,
158
+ } from '../async-directive.js';
159
+ import { type ChildPart, noChange } from '../lit-html.js';
160
+ import { forAwaitOf, Pauser, PseudoWeakRef } from './private-async-helpers.js';
161
+
162
+ type Mapper<T> = (v: T, index?: number) => unknown;
163
+
164
+ export class AsyncReplaceDirective extends AsyncDirective {
165
+ private [L.__value]?: AsyncIterable<unknown>;
166
+ private [L.__weakThis] = new PseudoWeakRef(this);
167
+ private [L.__pauser] = new Pauser();
168
+
169
+ [K.render]<T>(_value: AsyncIterable<T>, _mapper?: Mapper<T>) {
170
+ return noChange;
171
+ }
172
+
173
+ override [K.update](_part: ChildPart, [value, mapper]: DirectiveParameters<this>) {
174
+ if (!this[K.isConnected]) {
175
+ this[K.disconnected]();
176
+ }
177
+
178
+ if (value === this[L.__value]) {
179
+ return noChange;
180
+ }
181
+ this[L.__value] = value;
182
+ let i = 0;
183
+ const { [L.__weakThis]: weakThis, [L.__pauser]: pauser } = this;
184
+
185
+ forAwaitOf(value, async (v: unknown) => {
186
+ while (pauser[L.get]()) {
187
+ await pauser[L.get]();
188
+ }
189
+
190
+ const _this = weakThis[L.deref]();
191
+ if (_this !== undefined) {
192
+ if (_this[L.__value] !== value) {
193
+ return false;
194
+ }
195
+ if (mapper !== undefined) {
196
+ v = mapper(v, i);
197
+ }
198
+ _this[K.commitValue](v, i);
199
+ i++;
200
+ }
201
+ return true;
202
+ });
203
+
204
+ return noChange;
205
+ }
206
+
207
+ protected [K.commitValue](value: unknown, _index: number) {
208
+ this[K.setValue](value);
209
+ }
210
+
211
+ override [K.disconnected]() {
212
+ this[L.__weakThis][L.disconnect]();
213
+ this[L.__pauser][L.pause]();
214
+ }
215
+
216
+ override [K.reconnected]() {
217
+ this[L.__weakThis][L.reconnect](this);
218
+ this[L.__pauser][L.resume]();
219
+ }
220
+ }
221
+ ```
222
+ *In production, all internal properties (e.g., `__value`, `__pauser`) will be completely minified to short sequence identifiers (via `virtual:keywords/local`), obfuscating internal property names from the bundled Lit component.*
223
+
224
+ > [!TIP]
225
+ > Native ECMAScript private fields (`#prop`) are safely mangled by standard minifiers, eliminating the need for plugin obfuscation for internal class state.
226
+
227
+
228
+ ## Other Supported Patterns
229
+
230
+ ```ts
231
+ // Modular Imports
232
+ import { type, 'kebab-case' as kebab } from 'virtual:keywords';
233
+
234
+ // JSX Injection
235
+ const View = () => (
236
+ <K.Container>
237
+ <div />
238
+ </K.Container>
239
+ );
240
+
241
+ // Advanced TypeScript Inference
242
+ interface StateMachine {
243
+ [K.idle]: typeof K.active;
244
+ value: (typeof K)['kebab-case'];
245
+ }
246
+
247
+ // Module Re-exports
248
+ export { internalState as state } from 'virtual:keywords';
249
+
250
+ // UNSUPPORTED: Export All (Lacks static traceability)
251
+ export * from 'virtual:keywords';
252
+ ```
253
+
254
+ ## License
255
+
256
+ MIT
package/bin/cli.js ADDED
@@ -0,0 +1,4 @@
1
+ import { createRunner } from '../dist/api.js';
2
+
3
+ const runner = createRunner();
4
+ await runner.run();
package/dist/api.d.ts ADDED
@@ -0,0 +1,33 @@
1
+ import { BabelFileResult } from "@babel/core";
2
+
3
+ //#region src/internal/transform.d.ts
4
+ interface KeywordSet {
5
+ main: Set<string>;
6
+ local: Set<string>;
7
+ }
8
+ declare const transformCode: (code: string, id: string) => {
9
+ code: string;
10
+ map: NonNullable<BabelFileResult["map"]> | null;
11
+ keywords: KeywordSet;
12
+ } | null;
13
+ declare const extractKeywords: (code: string) => KeywordSet | null;
14
+ //#endregion
15
+ //#region src/internal/cli.d.ts
16
+ interface RunnerOptions {
17
+ root: string;
18
+ silent: boolean;
19
+ outDir: string;
20
+ }
21
+ declare const createRunner: (options?: Partial<RunnerOptions>) => {
22
+ collect(): Promise<KeywordSet>;
23
+ save(keywords: KeywordSet): Promise<void>;
24
+ run(): Promise<void>;
25
+ };
26
+ //#endregion
27
+ //#region src/internal/hash.d.ts
28
+ type Hasher = (input: string) => string;
29
+ declare const createHasher: (secret: string) => Hasher;
30
+ declare const createCounter: () => Hasher;
31
+ //#endregion
32
+ export { createCounter, createHasher, createRunner, extractKeywords, transformCode };
33
+ //# sourceMappingURL=api.d.ts.map
package/dist/api.js ADDED
@@ -0,0 +1,2 @@
1
+ import { a as transformCode, i as extractKeywords, n as createHasher, r as createRunner, t as createCounter } from "./hash-DMsO2VqQ.js";
2
+ export { createCounter, createHasher, createRunner, extractKeywords, transformCode };
@@ -0,0 +1,7 @@
1
+ import { t as Options } from "./plugin-Coz4K9xU.js";
2
+
3
+ //#region src/esbuild.d.ts
4
+ declare const _default: (options: Options) => EsbuildPlugin;
5
+ //#endregion
6
+ export { type Options, _default as default };
7
+ //# sourceMappingURL=esbuild.d.ts.map
@@ -0,0 +1,8 @@
1
+ import { t as unpluginFactory } from "./plugin-Cj1x2KK1.js";
2
+ import { createEsbuildPlugin } from "unplugin";
3
+ //#region src/esbuild.ts
4
+ var esbuild_default = createEsbuildPlugin(unpluginFactory);
5
+ //#endregion
6
+ export { esbuild_default as default };
7
+
8
+ //# sourceMappingURL=esbuild.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"esbuild.js","names":[],"sources":["../src/esbuild.ts"],"sourcesContent":["import { createEsbuildPlugin } from 'unplugin';\nimport { type Options, unpluginFactory } from './internal/plugin.js';\n\nexport default createEsbuildPlugin(unpluginFactory);\nexport type { Options };\n"],"mappings":";;;AAGA,IAAA,kBAAe,oBAAoB,gBAAgB"}
@@ -0,0 +1,360 @@
1
+ import { mkdir, readFile, writeFile } from "node:fs/promises";
2
+ import * as path from "node:path";
3
+ import { globby } from "globby";
4
+ import pLimit from "p-limit";
5
+ import { transformSync, types } from "@babel/core";
6
+ import { createHmac } from "node:crypto";
7
+ //#region src/internal/constants.ts
8
+ const VIRTUAL_MODULE_ID = "virtual:keywords";
9
+ const VIRTUAL_LOCAL_MODULE_ID = "virtual:keywords/local";
10
+ const PLUGIN_NAME = "unplugin-keywords";
11
+ //#endregion
12
+ //#region src/internal/encode.ts
13
+ const encodeIdentifier = (identifier) => {
14
+ let encoded = "";
15
+ for (let i = 0; i < identifier.length; i++) {
16
+ const c = identifier[i];
17
+ if (/[a-zA-Z0-9_]/.test(c)) encoded += c;
18
+ else if (c === "$") encoded += "$$";
19
+ else encoded += `$${c.charCodeAt(0).toString(16).padStart(4, "0")}`;
20
+ }
21
+ return encoded;
22
+ };
23
+ const toSafeVarName = (encoded) => `_$${encoded}`;
24
+ //#endregion
25
+ //#region src/internal/transform.ts
26
+ const isPureTypeSpace = (path) => {
27
+ let current = path;
28
+ while (current) {
29
+ const parent = current.parentPath;
30
+ if (!parent) break;
31
+ if (parent.isTSTypeQuery()) return false;
32
+ if ("computed" in parent.node && parent.node.computed) {
33
+ if (current.key === "key" || current.key === "property") return false;
34
+ }
35
+ if (parent.isTSType() || parent.isTSTypeParameterDeclaration() || parent.isTSTypeParameterInstantiation() || parent.isTSExpressionWithTypeArguments()) return true;
36
+ if (parent.isTSInterfaceDeclaration() || parent.isTSTypeAliasDeclaration() || parent.isTSEnumDeclaration() || parent.isTSModuleDeclaration()) {
37
+ if (current.key === "id") return true;
38
+ }
39
+ if (parent.isTSQualifiedName() || parent.isTSEntityName()) {
40
+ current = current.parentPath;
41
+ continue;
42
+ }
43
+ if (parent.isExpression() || parent.isStatement()) break;
44
+ current = current.parentPath;
45
+ }
46
+ return false;
47
+ };
48
+ const transformPlugin = (mode) => {
49
+ return {
50
+ name: `${PLUGIN_NAME}:${mode}`,
51
+ visitor: {
52
+ Program: {
53
+ enter(_, state) {
54
+ state.keywords = {
55
+ main: /* @__PURE__ */ new Set(),
56
+ local: /* @__PURE__ */ new Set()
57
+ };
58
+ state.keywordUids = {
59
+ main: /* @__PURE__ */ new Map(),
60
+ local: /* @__PURE__ */ new Map()
61
+ };
62
+ },
63
+ exit(path, state) {
64
+ const metadata = state.file.metadata;
65
+ metadata.keywords = {
66
+ main: Array.from(state.keywords.main),
67
+ local: Array.from(state.keywords.local)
68
+ };
69
+ if (mode === "transform") {
70
+ const newImports = [];
71
+ for (const [keyword, safeId] of state.keywordUids.main.entries()) {
72
+ const encoded = encodeIdentifier(keyword);
73
+ newImports.push(types.importDeclaration([types.importDefaultSpecifier(safeId)], types.stringLiteral(`${VIRTUAL_MODULE_ID}/_/${encoded}`)));
74
+ }
75
+ for (const [keyword, safeId] of state.keywordUids.local.entries()) {
76
+ const encoded = encodeIdentifier(keyword);
77
+ newImports.push(types.importDeclaration([types.importDefaultSpecifier(safeId)], types.stringLiteral(`${VIRTUAL_LOCAL_MODULE_ID}/_/${encoded}`)));
78
+ }
79
+ if (newImports.length > 0) path.unshiftContainer("body", newImports);
80
+ }
81
+ }
82
+ },
83
+ ImportDeclaration(path, state) {
84
+ const sourceValue = path.node.source.value;
85
+ if (sourceValue !== "virtual:keywords" && sourceValue !== "virtual:keywords/local") return;
86
+ const isLocal = sourceValue === VIRTUAL_LOCAL_MODULE_ID;
87
+ const targetSet = isLocal ? state.keywords.local : state.keywords.main;
88
+ const targetMap = isLocal ? state.keywordUids.local : state.keywordUids.main;
89
+ const programScope = path.scope.getProgramParent();
90
+ const processKeyword = (keyword) => {
91
+ targetSet.add(keyword);
92
+ if (mode === "extract") return null;
93
+ if (targetMap.has(keyword)) return targetMap.get(keyword);
94
+ const safeName = toSafeVarName(encodeIdentifier(keyword));
95
+ const uid = programScope.generateUidIdentifier(safeName);
96
+ targetMap.set(keyword, uid);
97
+ return uid;
98
+ };
99
+ for (const specifierPath of path.get("specifiers")) {
100
+ const localName = specifierPath.node.local.name;
101
+ const binding = path.scope.getBinding(localName);
102
+ if (!binding) continue;
103
+ if (specifierPath.isImportDefaultSpecifier() || specifierPath.isImportSpecifier()) {
104
+ let keyword;
105
+ if (specifierPath.isImportDefaultSpecifier()) keyword = "default";
106
+ else {
107
+ const imported = specifierPath.node.imported;
108
+ keyword = types.isIdentifier(imported) ? imported.name : imported.value;
109
+ }
110
+ const uidNode = processKeyword(keyword);
111
+ if (!uidNode) continue;
112
+ for (const refPath of binding.referencePaths) {
113
+ if (isPureTypeSpace(refPath)) continue;
114
+ if (refPath.isJSXIdentifier()) refPath.replaceWith(types.jsxIdentifier(uidNode.name));
115
+ else refPath.replaceWith(types.cloneNode(uidNode));
116
+ }
117
+ path.parentPath.traverse({ TSTypeQuery(tsPath) {
118
+ if (types.isIdentifier(tsPath.node.exprName) && tsPath.node.exprName.name === localName && tsPath.scope.getBinding(localName) === binding) tsPath.get("exprName").replaceWith(types.cloneNode(uidNode));
119
+ } });
120
+ } else if (specifierPath.isImportNamespaceSpecifier()) {
121
+ for (const refPath of binding.referencePaths) {
122
+ if (isPureTypeSpace(refPath)) continue;
123
+ const parentPath = refPath.parentPath;
124
+ if (!parentPath) continue;
125
+ if (parentPath.isMemberExpression() && parentPath.node.object === refPath.node) {
126
+ const propNode = parentPath.node.property;
127
+ let keyword;
128
+ if (!parentPath.node.computed && types.isIdentifier(propNode)) keyword = propNode.name;
129
+ else if (parentPath.node.computed && types.isStringLiteral(propNode)) keyword = propNode.value;
130
+ if (keyword) {
131
+ const uidNode = processKeyword(keyword);
132
+ if (uidNode) parentPath.replaceWith(types.cloneNode(uidNode));
133
+ }
134
+ } else if (parentPath.isJSXMemberExpression() && parentPath.node.object === refPath.node) {
135
+ const keyword = parentPath.node.property.name;
136
+ const uidNode = processKeyword(keyword);
137
+ if (uidNode) parentPath.replaceWith(types.jsxIdentifier(uidNode.name));
138
+ }
139
+ }
140
+ path.parentPath.traverse({
141
+ TSTypeQuery(tsPath) {
142
+ const expr = tsPath.node.exprName;
143
+ if (types.isTSQualifiedName(expr) && types.isIdentifier(expr.left) && expr.left.name === localName && tsPath.scope.getBinding(localName) === binding) {
144
+ const keyword = expr.right.name;
145
+ const uidNode = processKeyword(keyword);
146
+ if (uidNode) tsPath.get("exprName").replaceWith(types.cloneNode(uidNode));
147
+ }
148
+ },
149
+ TSIndexedAccessType(tsPath) {
150
+ let objPath = tsPath.get("objectType");
151
+ while (objPath.isTSParenthesizedType()) objPath = objPath.get("typeAnnotation");
152
+ if (objPath.isTSTypeQuery() && types.isIdentifier(objPath.node.exprName) && objPath.node.exprName.name === localName && tsPath.scope.getBinding(localName) === binding) {
153
+ const indexNode = tsPath.node.indexType;
154
+ if (types.isTSLiteralType(indexNode) && types.isStringLiteral(indexNode.literal)) {
155
+ const keyword = indexNode.literal.value;
156
+ const uidNode = processKeyword(keyword);
157
+ if (uidNode) tsPath.replaceWith(types.tsTypeQuery(types.cloneNode(uidNode)));
158
+ }
159
+ }
160
+ }
161
+ });
162
+ }
163
+ }
164
+ if (mode === "transform") path.remove();
165
+ },
166
+ ExportNamedDeclaration(path, state) {
167
+ const sourceValue = path.node.source?.value;
168
+ if (sourceValue !== "virtual:keywords" && sourceValue !== "virtual:keywords/local") return;
169
+ const targetSet = sourceValue === "virtual:keywords/local" ? state.keywords.local : state.keywords.main;
170
+ if (mode === "extract") {
171
+ for (const specifierPath of path.get("specifiers")) if (specifierPath.isExportSpecifier()) {
172
+ const local = specifierPath.node.local;
173
+ const keyword = types.isIdentifier(local) ? local.name : local.value;
174
+ targetSet.add(keyword);
175
+ }
176
+ return;
177
+ }
178
+ const newExports = path.get("specifiers").map((specifierPath) => {
179
+ if (specifierPath.isExportSpecifier()) {
180
+ const local = specifierPath.node.local;
181
+ const keyword = types.isIdentifier(local) ? local.name : local.value;
182
+ targetSet.add(keyword);
183
+ const encoded = encodeIdentifier(keyword);
184
+ return types.exportNamedDeclaration(null, [types.exportSpecifier(types.identifier("default"), specifierPath.node.exported)], types.stringLiteral(`${sourceValue}/_/${encoded}`));
185
+ }
186
+ return null;
187
+ }).filter((node) => node !== null);
188
+ if (newExports.length > 0) path.replaceWithMultiple(newExports);
189
+ else path.remove();
190
+ }
191
+ }
192
+ };
193
+ };
194
+ const transformCode = (code, id) => {
195
+ if (!code.includes("virtual:keywords") && !code.includes("virtual:keywords/local")) return null;
196
+ const result = transformSync(code, {
197
+ babelrc: false,
198
+ configFile: false,
199
+ filename: id,
200
+ sourceMaps: true,
201
+ ast: false,
202
+ plugins: [transformPlugin("transform")],
203
+ parserOpts: { plugins: ["jsx", "typescript"] }
204
+ });
205
+ if (!result) return null;
206
+ const metadata = result.metadata;
207
+ const keywords = {
208
+ main: new Set(metadata?.keywords?.main ?? []),
209
+ local: new Set(metadata?.keywords?.local ?? [])
210
+ };
211
+ return {
212
+ code: result.code ?? "",
213
+ map: result.map ?? null,
214
+ keywords
215
+ };
216
+ };
217
+ const extractKeywords = (code) => {
218
+ if (!code.includes("virtual:keywords") && !code.includes("virtual:keywords/local")) return null;
219
+ let result;
220
+ try {
221
+ result = transformSync(code, {
222
+ babelrc: false,
223
+ configFile: false,
224
+ sourceMaps: false,
225
+ ast: false,
226
+ code: false,
227
+ plugins: [transformPlugin("extract")],
228
+ parserOpts: {
229
+ plugins: ["jsx", "typescript"],
230
+ errorRecovery: true
231
+ }
232
+ });
233
+ } catch {
234
+ return null;
235
+ }
236
+ if (!result) return null;
237
+ const metadata = result.metadata;
238
+ return {
239
+ main: new Set(metadata?.keywords?.main ?? []),
240
+ local: new Set(metadata?.keywords?.local ?? [])
241
+ };
242
+ };
243
+ //#endregion
244
+ //#region src/internal/typegen.ts
245
+ const generateTypeDeclaration = (keywords, isLocal = false) => {
246
+ const sortedKeywords = Array.from(keywords).sort();
247
+ const content = [];
248
+ for (const keyword of sortedKeywords) {
249
+ const safeName = toSafeVarName(encodeIdentifier(keyword));
250
+ const value = `${isLocal ? "==" : "*".repeat(7)}.${keyword}`;
251
+ content.push(`declare const ${safeName}: ${JSON.stringify(value)};`);
252
+ }
253
+ content.push("");
254
+ content.push("export {");
255
+ for (const keyword of sortedKeywords) {
256
+ const safeName = toSafeVarName(encodeIdentifier(keyword));
257
+ content.push(` ${safeName} as ${JSON.stringify(keyword)},`);
258
+ }
259
+ content.push("};");
260
+ content.push("");
261
+ return content.join("\n");
262
+ };
263
+ //#endregion
264
+ //#region src/internal/cli.ts
265
+ const collectKeywordsFromRoot = async (root, silent, ignoredDirs = [], concurrency = 100) => {
266
+ const collectedKeywords = {
267
+ main: /* @__PURE__ */ new Set(),
268
+ local: /* @__PURE__ */ new Set()
269
+ };
270
+ const start = performance.now();
271
+ if (!silent) console.error("Scanning project files for keywords...");
272
+ const files = await globby("**/*.{js,ts,mjs,mts,jsx,tsx,mjsx,mtsx}", {
273
+ cwd: root,
274
+ absolute: false,
275
+ ignore: ["**/node_modules/**", ...ignoredDirs.map((dir) => `${dir}/**`)],
276
+ gitignore: true
277
+ });
278
+ let processed = 0;
279
+ await pLimit({ concurrency }).map(files, async (file) => {
280
+ try {
281
+ const keywords = extractKeywords(await readFile(file, "utf-8"));
282
+ if (!keywords) return;
283
+ for (const keyword of keywords.main) collectedKeywords.main.add(keyword);
284
+ for (const keyword of keywords.local) collectedKeywords.local.add(keyword);
285
+ processed++;
286
+ } catch (error) {
287
+ if (!silent) console.error(`Failed to process ${file}: ${error}`);
288
+ }
289
+ });
290
+ const elapsed = performance.now() - start;
291
+ if (!silent) console.error(`Scan complete: ${processed}/${files.length} files, ${collectedKeywords.main.size} main, ${collectedKeywords.local.size} local keywords (${elapsed.toFixed(2)}ms).`);
292
+ return collectedKeywords;
293
+ };
294
+ const createRunner = (options) => {
295
+ const { root = process.cwd(), silent = false, outDir = path.join("node_modules", ".keywords") } = options ?? {};
296
+ return {
297
+ async collect() {
298
+ return collectKeywordsFromRoot(root, silent);
299
+ },
300
+ async save(keywords) {
301
+ const content = generateTypeDeclaration(keywords.main);
302
+ const localContent = generateTypeDeclaration(keywords.local, true);
303
+ const outPath = path.join(root, outDir);
304
+ await mkdir(outPath, { recursive: true });
305
+ await writeFile(path.join(outPath, "index.d.ts"), `${content.trim()}\n`);
306
+ await writeFile(path.join(outPath, "local.d.ts"), `${localContent.trim()}\n`);
307
+ },
308
+ async run() {
309
+ const keywords = await this.collect();
310
+ await this.save(keywords);
311
+ }
312
+ };
313
+ };
314
+ //#endregion
315
+ //#region src/internal/hash.ts
316
+ const ALPHA_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
317
+ const DIGIT_CHARS = "0123456789";
318
+ const BASE62_CHARS = ALPHA_CHARS + DIGIT_CHARS;
319
+ const ALPHA_LEN = BigInt(52);
320
+ const DIGIT_LEN = BigInt(10);
321
+ const BASE62_LEN = BigInt(BASE62_CHARS.length);
322
+ const createHasher = (secret) => {
323
+ const base62TailLength = 5;
324
+ if (base62TailLength < 0 || base62TailLength > 9) throw new Error("Invalid MAX_HASH_LENGTH");
325
+ const cache = /* @__PURE__ */ new Map();
326
+ return (input) => {
327
+ if (cache.has(input)) return cache.get(input);
328
+ let entropy = createHmac("sha256", secret).update(input).digest().readBigUInt64BE(0);
329
+ let result = "";
330
+ result += ALPHA_CHARS[Number(entropy % ALPHA_LEN)];
331
+ entropy /= ALPHA_LEN;
332
+ result += DIGIT_CHARS[Number(entropy % DIGIT_LEN)];
333
+ entropy /= DIGIT_LEN;
334
+ for (let i = 0; i < base62TailLength; i++) {
335
+ result += BASE62_CHARS[Number(entropy % BASE62_LEN)];
336
+ entropy /= BASE62_LEN;
337
+ }
338
+ cache.set(input, result);
339
+ return result;
340
+ };
341
+ };
342
+ const createCounter = () => {
343
+ let index = 0;
344
+ return () => {
345
+ let result = "_";
346
+ result += ALPHA_CHARS[index % 52];
347
+ let remain = Math.floor(index / 52);
348
+ while (remain > 0) {
349
+ remain--;
350
+ result += BASE62_CHARS[remain % BASE62_CHARS.length];
351
+ remain = Math.floor(remain / BASE62_CHARS.length);
352
+ }
353
+ index++;
354
+ return result;
355
+ };
356
+ };
357
+ //#endregion
358
+ export { transformCode as a, VIRTUAL_LOCAL_MODULE_ID as c, extractKeywords as i, VIRTUAL_MODULE_ID as l, createHasher as n, encodeIdentifier as o, createRunner as r, PLUGIN_NAME as s, createCounter as t };
359
+
360
+ //# sourceMappingURL=hash-DMsO2VqQ.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hash-DMsO2VqQ.js","names":["t"],"sources":["../src/internal/constants.ts","../src/internal/encode.ts","../src/internal/transform.ts","../src/internal/typegen.ts","../src/internal/cli.ts","../src/internal/hash.ts"],"sourcesContent":["export const VIRTUAL_MODULE_ID = 'virtual:keywords';\nexport const VIRTUAL_LOCAL_MODULE_ID = 'virtual:keywords/local';\n\nexport const PLUGIN_NAME = 'unplugin-keywords';\n\nexport const HASH_LENGTH = 7;\n\nexport const KEYWORD_ROUTE_SEGMENT = '_';\n\n// URL-safe so that K.abc can be used in URL\nexport const DEBUG_SEPARATOR = '.';\n","declare const __encoded__: unique symbol;\ntype Encoded = string & { [__encoded__]: never };\n\nexport const encodeIdentifier = (identifier: string): Encoded => {\n let encoded = '';\n for (let i = 0; i < identifier.length; i++) {\n const c = identifier[i] as string;\n if (/[a-zA-Z0-9_]/.test(c)) {\n encoded += c;\n } else if (c === '$') {\n encoded += '$$';\n } else {\n encoded += `$${c.charCodeAt(0).toString(16).padStart(4, '0')}`;\n }\n }\n return encoded as Encoded;\n};\n\nexport const toSafeVarName = (encoded: Encoded): string => `_$${encoded}`;\n","import {\n type BabelFileResult,\n type NodePath,\n type PluginObj,\n type PluginPass,\n types as t,\n transformSync,\n} from '@babel/core';\nimport {\n KEYWORD_ROUTE_SEGMENT,\n PLUGIN_NAME,\n VIRTUAL_LOCAL_MODULE_ID,\n VIRTUAL_MODULE_ID,\n} from './constants.js';\nimport { encodeIdentifier, toSafeVarName } from './encode.js';\n\nexport interface KeywordSet {\n main: Set<string>;\n local: Set<string>;\n}\n\nconst isPureTypeSpace = (path: NodePath): boolean => {\n let current: NodePath | null = path;\n while (current) {\n const parent = current.parentPath;\n if (!parent) {\n break;\n }\n // 1. Value crossings via `typeof`\n if (parent.isTSTypeQuery()) {\n return false;\n }\n // 2. Computed keys (e.g., interface I { [Abc]: string })\n if ('computed' in parent.node && parent.node.computed) {\n if (current.key === 'key' || current.key === 'property') {\n return false;\n }\n }\n // 3-A. Definitive Type Contexts\n if (\n parent.isTSType() ||\n parent.isTSTypeParameterDeclaration() ||\n parent.isTSTypeParameterInstantiation() ||\n parent.isTSExpressionWithTypeArguments()\n ) {\n return true;\n }\n // 3-B. Type Declaration Identifiers (e.g., interface Abc {}, type Abc = {})\n if (\n parent.isTSInterfaceDeclaration() ||\n parent.isTSTypeAliasDeclaration() ||\n parent.isTSEnumDeclaration() ||\n parent.isTSModuleDeclaration()\n ) {\n if (current.key === 'id') {\n return true;\n }\n }\n // 4. Continue up structural TS nodes (A.B.C)\n if (parent.isTSQualifiedName() || parent.isTSEntityName()) {\n current = current.parentPath;\n continue;\n }\n // 5. If we reach standard JS statements/expressions, it implies Value Space.\n if (parent.isExpression() || parent.isStatement()) {\n break;\n }\n current = current.parentPath;\n }\n return false;\n};\n\ninterface TransformState extends PluginPass {\n keywords: KeywordSet;\n keywordUids: {\n main: Map<string, t.Identifier>;\n local: Map<string, t.Identifier>;\n };\n}\n\ninterface TransformMetadata {\n keywords?: { main: string[]; local: string[] };\n}\n\nconst transformPlugin = (\n mode: 'extract' | 'transform',\n): PluginObj<TransformState> => {\n return {\n name: `${PLUGIN_NAME}:${mode}`,\n\n visitor: {\n Program: {\n enter(_, state) {\n state.keywords = { main: new Set(), local: new Set() };\n state.keywordUids = { main: new Map(), local: new Map() };\n },\n\n exit(path, state) {\n const metadata = state.file.metadata as TransformMetadata;\n metadata.keywords = {\n main: Array.from(state.keywords.main),\n local: Array.from(state.keywords.local),\n };\n\n if (mode === 'transform') {\n const newImports = [];\n for (const [keyword, safeId] of state.keywordUids.main.entries()) {\n const encoded = encodeIdentifier(keyword);\n newImports.push(\n t.importDeclaration(\n [t.importDefaultSpecifier(safeId)],\n t.stringLiteral(\n `${VIRTUAL_MODULE_ID}/${KEYWORD_ROUTE_SEGMENT}/${encoded}`,\n ),\n ),\n );\n }\n for (const [keyword, safeId] of state.keywordUids.local.entries()) {\n const encoded = encodeIdentifier(keyword);\n newImports.push(\n t.importDeclaration(\n [t.importDefaultSpecifier(safeId)],\n t.stringLiteral(\n `${VIRTUAL_LOCAL_MODULE_ID}/${KEYWORD_ROUTE_SEGMENT}/${encoded}`,\n ),\n ),\n );\n }\n if (newImports.length > 0) {\n path.unshiftContainer('body', newImports);\n }\n }\n },\n },\n\n ImportDeclaration(path, state) {\n const sourceValue = path.node.source.value;\n if (\n sourceValue !== VIRTUAL_MODULE_ID &&\n sourceValue !== VIRTUAL_LOCAL_MODULE_ID\n ) {\n return;\n }\n const isLocal = sourceValue === VIRTUAL_LOCAL_MODULE_ID;\n const targetSet = isLocal ? state.keywords.local : state.keywords.main;\n const targetMap = isLocal\n ? state.keywordUids.local\n : state.keywordUids.main;\n\n const programScope = path.scope.getProgramParent();\n const processKeyword = (keyword: string): t.Identifier | null => {\n targetSet.add(keyword);\n if (mode === 'extract') {\n return null;\n }\n if (targetMap.has(keyword)) {\n return targetMap.get(keyword) as t.Identifier;\n }\n const encoded = encodeIdentifier(keyword);\n const safeName = toSafeVarName(encoded);\n const uid = programScope.generateUidIdentifier(safeName);\n targetMap.set(keyword, uid);\n return uid;\n };\n\n for (const specifierPath of path.get('specifiers')) {\n const localName = specifierPath.node.local.name;\n const binding = path.scope.getBinding(localName);\n if (!binding) {\n continue;\n }\n\n // Case A: Default & Named Imports\n if (\n specifierPath.isImportDefaultSpecifier() ||\n specifierPath.isImportSpecifier()\n ) {\n let keyword: string;\n if (specifierPath.isImportDefaultSpecifier()) {\n keyword = 'default';\n } else {\n const imported = specifierPath.node.imported;\n keyword = t.isIdentifier(imported)\n ? imported.name\n : imported.value;\n }\n const uidNode = processKeyword(keyword);\n if (!uidNode) {\n continue;\n }\n\n // 1) Fast Path: Values & JSX\n for (const refPath of binding.referencePaths) {\n if (isPureTypeSpace(refPath)) {\n continue;\n }\n if (refPath.isJSXIdentifier()) {\n refPath.replaceWith(t.jsxIdentifier(uidNode.name));\n } else {\n refPath.replaceWith(t.cloneNode(uidNode));\n }\n }\n\n // 2) Slow Path: TS Types\n // NOTE: Can be skipped due to type erasure, but for consistency\n path.parentPath.traverse({\n // e.g., type T = typeof abc;\n TSTypeQuery(tsPath) {\n if (\n t.isIdentifier(tsPath.node.exprName) &&\n tsPath.node.exprName.name === localName &&\n tsPath.scope.getBinding(localName) === binding\n ) {\n tsPath.get('exprName').replaceWith(t.cloneNode(uidNode));\n }\n },\n });\n }\n\n // Case B: Namespace Imports\n else if (specifierPath.isImportNamespaceSpecifier()) {\n // 1) Fast Path: JS Values & JSX accesses\n for (const refPath of binding.referencePaths) {\n if (isPureTypeSpace(refPath)) {\n continue;\n }\n const parentPath = refPath.parentPath;\n if (!parentPath) {\n continue;\n }\n if (\n parentPath.isMemberExpression() &&\n parentPath.node.object === refPath.node\n ) {\n const propNode = parentPath.node.property;\n let keyword: string | undefined;\n if (!parentPath.node.computed && t.isIdentifier(propNode)) {\n keyword = propNode.name;\n } else if (\n parentPath.node.computed &&\n t.isStringLiteral(propNode)\n ) {\n keyword = propNode.value;\n }\n if (keyword) {\n const uidNode = processKeyword(keyword);\n if (uidNode) {\n parentPath.replaceWith(t.cloneNode(uidNode));\n }\n }\n } else if (\n parentPath.isJSXMemberExpression() &&\n parentPath.node.object === refPath.node\n ) {\n const keyword = parentPath.node.property.name;\n const uidNode = processKeyword(keyword);\n if (uidNode) {\n parentPath.replaceWith(t.jsxIdentifier(uidNode.name));\n }\n }\n }\n\n // 2) Slow Path: TS Namespace Types\n path.parentPath.traverse({\n // e.g., type T = typeof A.abc;\n TSTypeQuery(tsPath) {\n const expr = tsPath.node.exprName;\n if (\n t.isTSQualifiedName(expr) &&\n t.isIdentifier(expr.left) &&\n expr.left.name === localName &&\n tsPath.scope.getBinding(localName) === binding\n ) {\n const keyword = expr.right.name;\n const uidNode = processKeyword(keyword);\n if (uidNode) {\n tsPath.get('exprName').replaceWith(t.cloneNode(uidNode));\n }\n }\n },\n\n // e.g., type T = ((typeof A))['prop'];\n TSIndexedAccessType(tsPath) {\n let objPath = tsPath.get('objectType') as NodePath;\n // Unpack highly nested parentheses gracefully\n while (objPath.isTSParenthesizedType()) {\n objPath = objPath.get('typeAnnotation') as NodePath;\n }\n if (\n objPath.isTSTypeQuery() &&\n t.isIdentifier(objPath.node.exprName) &&\n objPath.node.exprName.name === localName &&\n tsPath.scope.getBinding(localName) === binding\n ) {\n const indexNode = tsPath.node.indexType;\n if (\n t.isTSLiteralType(indexNode) &&\n t.isStringLiteral(indexNode.literal)\n ) {\n const keyword = indexNode.literal.value;\n const uidNode = processKeyword(keyword);\n if (uidNode) {\n tsPath.replaceWith(t.tsTypeQuery(t.cloneNode(uidNode)));\n }\n }\n }\n },\n });\n }\n }\n\n if (mode === 'transform') {\n path.remove();\n }\n },\n\n ExportNamedDeclaration(path, state) {\n const sourceValue = path.node.source?.value;\n if (\n sourceValue !== VIRTUAL_MODULE_ID &&\n sourceValue !== VIRTUAL_LOCAL_MODULE_ID\n ) {\n return;\n }\n const isLocal = sourceValue === VIRTUAL_LOCAL_MODULE_ID;\n const targetSet = isLocal ? state.keywords.local : state.keywords.main;\n\n if (mode === 'extract') {\n for (const specifierPath of path.get('specifiers')) {\n if (specifierPath.isExportSpecifier()) {\n const local = specifierPath.node.local as\n | t.Identifier\n | t.StringLiteral; // local can be a StringLiteral in ES2022\n const keyword = t.isIdentifier(local) ? local.name : local.value;\n targetSet.add(keyword);\n }\n }\n return;\n }\n\n const newExports = path\n .get('specifiers')\n .map((specifierPath) => {\n if (specifierPath.isExportSpecifier()) {\n const local = specifierPath.node.local as\n | t.Identifier\n | t.StringLiteral; // local can be a StringLiteral in ES2022\n const keyword = t.isIdentifier(local) ? local.name : local.value;\n targetSet.add(keyword);\n const encoded = encodeIdentifier(keyword);\n return t.exportNamedDeclaration(\n null,\n [\n t.exportSpecifier(\n t.identifier('default'),\n specifierPath.node.exported,\n ),\n ],\n t.stringLiteral(\n `${sourceValue}/${KEYWORD_ROUTE_SEGMENT}/${encoded}`,\n ),\n );\n }\n return null;\n })\n .filter((node): node is t.ExportNamedDeclaration => node !== null);\n\n if (newExports.length > 0) {\n path.replaceWithMultiple(newExports);\n } else {\n path.remove();\n }\n },\n },\n };\n};\n\nexport const transformCode = (\n code: string,\n id: string,\n): {\n code: string;\n map: NonNullable<BabelFileResult['map']> | null;\n keywords: KeywordSet;\n} | null => {\n if (\n !code.includes(VIRTUAL_MODULE_ID) &&\n !code.includes(VIRTUAL_LOCAL_MODULE_ID)\n ) {\n return null;\n }\n const result = transformSync(code, {\n babelrc: false,\n configFile: false,\n filename: id,\n sourceMaps: true,\n ast: false,\n plugins: [transformPlugin('transform')],\n parserOpts: {\n plugins: ['jsx', 'typescript'],\n },\n });\n if (!result) {\n return null;\n }\n const metadata = result.metadata as TransformMetadata | undefined;\n const keywords: KeywordSet = {\n main: new Set(metadata?.keywords?.main ?? []),\n local: new Set(metadata?.keywords?.local ?? []),\n };\n return {\n code: result.code ?? '',\n map: result.map ?? null,\n keywords,\n };\n};\n\nexport const extractKeywords = (code: string): KeywordSet | null => {\n if (\n !code.includes(VIRTUAL_MODULE_ID) &&\n !code.includes(VIRTUAL_LOCAL_MODULE_ID)\n ) {\n return null;\n }\n let result: BabelFileResult | null;\n try {\n result = transformSync(code, {\n babelrc: false,\n configFile: false,\n sourceMaps: false,\n ast: false,\n code: false,\n plugins: [transformPlugin('extract')],\n parserOpts: {\n plugins: ['jsx', 'typescript'],\n errorRecovery: true,\n },\n });\n } catch {\n return null;\n }\n if (!result) {\n return null;\n }\n const metadata = result.metadata as TransformMetadata | undefined;\n return {\n main: new Set(metadata?.keywords?.main ?? []),\n local: new Set(metadata?.keywords?.local ?? []),\n };\n};\n","import { DEBUG_SEPARATOR, HASH_LENGTH } from './constants.js';\nimport { encodeIdentifier, toSafeVarName } from './encode.js';\n\nexport const generateTypeDeclaration = (\n keywords: Set<string>,\n isLocal: boolean = false,\n): string => {\n const sortedKeywords = Array.from(keywords).sort();\n const content = [];\n // content.push(\n // 'type Keyword<K extends string, V extends string> = V & { readonly __keyword__: K };',\n // );\n // content.push('');\n\n for (const keyword of sortedKeywords) {\n const encoded = encodeIdentifier(keyword);\n const safeName = toSafeVarName(encoded);\n const hash = isLocal ? '==' : '*'.repeat(HASH_LENGTH);\n const value = `${hash}${DEBUG_SEPARATOR}${keyword}`;\n // content.push(\n // `declare const ${safeName}: Keyword<${JSON.stringify(keyword)}, ${JSON.stringify(value)}>;`,\n // );\n content.push(`declare const ${safeName}: ${JSON.stringify(value)};`);\n }\n content.push('');\n\n content.push('export {');\n for (const keyword of sortedKeywords) {\n const encoded = encodeIdentifier(keyword);\n const safeName = toSafeVarName(encoded);\n content.push(` ${safeName} as ${JSON.stringify(keyword)},`);\n }\n content.push('};');\n content.push('');\n\n return content.join('\\n');\n};\n","import { mkdir, readFile, writeFile } from 'node:fs/promises';\nimport * as path from 'node:path';\nimport { globby } from 'globby';\nimport pLimit from 'p-limit';\nimport { extractKeywords, type KeywordSet } from './transform.js';\nimport { generateTypeDeclaration } from './typegen.js';\n\nconst collectKeywordsFromRoot = async (\n root: string,\n silent: boolean,\n ignoredDirs: string[] = [],\n concurrency: number = 100,\n): Promise<KeywordSet> => {\n const collectedKeywords: KeywordSet = { main: new Set(), local: new Set() };\n\n const start = performance.now();\n if (!silent) {\n console.error('Scanning project files for keywords...');\n }\n\n const files = await globby('**/*.{js,ts,mjs,mts,jsx,tsx,mjsx,mtsx}', {\n cwd: root,\n absolute: false,\n ignore: ['**/node_modules/**', ...ignoredDirs.map((dir) => `${dir}/**`)],\n gitignore: true,\n });\n\n let processed = 0;\n const limit = pLimit({ concurrency });\n await limit.map(files, async (file) => {\n try {\n const code = await readFile(file, 'utf-8');\n const keywords = extractKeywords(code);\n if (!keywords) {\n return;\n }\n for (const keyword of keywords.main) {\n collectedKeywords.main.add(keyword);\n }\n for (const keyword of keywords.local) {\n collectedKeywords.local.add(keyword);\n }\n processed++;\n } catch (error) {\n if (!silent) {\n console.error(`Failed to process ${file}: ${error}`);\n }\n }\n });\n\n const elapsed = performance.now() - start;\n if (!silent) {\n console.error(\n `Scan complete: ${processed}/${files.length} files, ${collectedKeywords.main.size} main, ${collectedKeywords.local.size} local keywords (${elapsed.toFixed(2)}ms).`,\n );\n }\n\n return collectedKeywords;\n};\n\ninterface RunnerOptions {\n root: string;\n silent: boolean;\n outDir: string;\n}\n\nexport const createRunner = (options?: Partial<RunnerOptions>) => {\n const {\n root = process.cwd(),\n silent = false,\n outDir = path.join('node_modules', '.keywords'),\n } = options ?? {};\n return {\n async collect(): Promise<KeywordSet> {\n return collectKeywordsFromRoot(root, silent);\n },\n\n async save(keywords: KeywordSet): Promise<void> {\n const content = generateTypeDeclaration(keywords.main);\n const localContent = generateTypeDeclaration(keywords.local, true);\n const outPath = path.join(root, outDir);\n await mkdir(outPath, { recursive: true });\n await writeFile(path.join(outPath, 'index.d.ts'), `${content.trim()}\\n`);\n await writeFile(\n path.join(outPath, 'local.d.ts'),\n `${localContent.trim()}\\n`,\n );\n },\n\n async run(): Promise<void> {\n const keywords = await this.collect();\n await this.save(keywords);\n },\n };\n};\n","import { createHmac } from 'node:crypto';\nimport { HASH_LENGTH } from './constants.js';\n\nconst ALPHA_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';\nconst DIGIT_CHARS = '0123456789';\nconst BASE62_CHARS = ALPHA_CHARS + DIGIT_CHARS;\n\nconst ALPHA_LEN = BigInt(ALPHA_CHARS.length); // 52n\nconst DIGIT_LEN = BigInt(DIGIT_CHARS.length); // 10n\nconst BASE62_LEN = BigInt(BASE62_CHARS.length); // 62n\n\nexport type Hasher = (input: string) => string;\n\n// Format: [1 Alpha] + [1 Digit] + [N Base62]\n// Avoids any collisions with standard JS API identifiers\nexport const createHasher = (secret: string): Hasher => {\n const base62TailLength = HASH_LENGTH - 2;\n if (base62TailLength < 0 || base62TailLength > 9) {\n // 520 * 62^9 < 2^64 < 520 * 62^10\n throw new Error('Invalid MAX_HASH_LENGTH');\n }\n\n const cache = new Map<string, string>();\n return (input) => {\n if (cache.has(input)) {\n return cache.get(input) as string;\n }\n\n const hasher = createHmac('sha256', secret);\n const buffer = hasher.update(input).digest();\n\n let entropy = buffer.readBigUInt64BE(0);\n let result = '';\n\n result += ALPHA_CHARS[Number(entropy % ALPHA_LEN)];\n entropy /= ALPHA_LEN;\n result += DIGIT_CHARS[Number(entropy % DIGIT_LEN)];\n entropy /= DIGIT_LEN;\n for (let i = 0; i < base62TailLength; i++) {\n result += BASE62_CHARS[Number(entropy % BASE62_LEN)];\n entropy /= BASE62_LEN;\n }\n\n cache.set(input, result);\n return result;\n };\n};\n\nexport const createCounter = (): Hasher => {\n let index = 0;\n return () => {\n let result = '_';\n\n result += ALPHA_CHARS[index % ALPHA_CHARS.length];\n let remain = Math.floor(index / ALPHA_CHARS.length);\n while (remain > 0) {\n remain--;\n result += BASE62_CHARS[remain % BASE62_CHARS.length];\n remain = Math.floor(remain / BASE62_CHARS.length);\n }\n\n index++;\n return result;\n };\n};\n"],"mappings":";;;;;;;AAAA,MAAa,oBAAoB;AACjC,MAAa,0BAA0B;AAEvC,MAAa,cAAc;;;ACA3B,MAAa,oBAAoB,eAAgC;CAC/D,IAAI,UAAU;CACd,KAAK,IAAI,IAAI,GAAG,IAAI,WAAW,QAAQ,KAAK;EAC1C,MAAM,IAAI,WAAW;EACrB,IAAI,eAAe,KAAK,EAAE,EACxB,WAAW;OACN,IAAI,MAAM,KACf,WAAW;OAEX,WAAW,IAAI,EAAE,WAAW,EAAE,CAAC,SAAS,GAAG,CAAC,SAAS,GAAG,IAAI;;CAGhE,OAAO;;AAGT,MAAa,iBAAiB,YAA6B,KAAK;;;ACGhE,MAAM,mBAAmB,SAA4B;CACnD,IAAI,UAA2B;CAC/B,OAAO,SAAS;EACd,MAAM,SAAS,QAAQ;EACvB,IAAI,CAAC,QACH;EAGF,IAAI,OAAO,eAAe,EACxB,OAAO;EAGT,IAAI,cAAc,OAAO,QAAQ,OAAO,KAAK;OACvC,QAAQ,QAAQ,SAAS,QAAQ,QAAQ,YAC3C,OAAO;;EAIX,IACE,OAAO,UAAU,IACjB,OAAO,8BAA8B,IACrC,OAAO,gCAAgC,IACvC,OAAO,iCAAiC,EAExC,OAAO;EAGT,IACE,OAAO,0BAA0B,IACjC,OAAO,0BAA0B,IACjC,OAAO,qBAAqB,IAC5B,OAAO,uBAAuB;OAE1B,QAAQ,QAAQ,MAClB,OAAO;;EAIX,IAAI,OAAO,mBAAmB,IAAI,OAAO,gBAAgB,EAAE;GACzD,UAAU,QAAQ;GAClB;;EAGF,IAAI,OAAO,cAAc,IAAI,OAAO,aAAa,EAC/C;EAEF,UAAU,QAAQ;;CAEpB,OAAO;;AAeT,MAAM,mBACJ,SAC8B;CAC9B,OAAO;EACL,MAAM,GAAG,YAAY,GAAG;EAExB,SAAS;GACP,SAAS;IACP,MAAM,GAAG,OAAO;KACd,MAAM,WAAW;MAAE,sBAAM,IAAI,KAAK;MAAE,uBAAO,IAAI,KAAK;MAAE;KACtD,MAAM,cAAc;MAAE,sBAAM,IAAI,KAAK;MAAE,uBAAO,IAAI,KAAK;MAAE;;IAG3D,KAAK,MAAM,OAAO;KAChB,MAAM,WAAW,MAAM,KAAK;KAC5B,SAAS,WAAW;MAClB,MAAM,MAAM,KAAK,MAAM,SAAS,KAAK;MACrC,OAAO,MAAM,KAAK,MAAM,SAAS,MAAM;MACxC;KAED,IAAI,SAAS,aAAa;MACxB,MAAM,aAAa,EAAE;MACrB,KAAK,MAAM,CAAC,SAAS,WAAW,MAAM,YAAY,KAAK,SAAS,EAAE;OAChE,MAAM,UAAU,iBAAiB,QAAQ;OACzC,WAAW,KACTA,MAAE,kBACA,CAACA,MAAE,uBAAuB,OAAO,CAAC,EAClCA,MAAE,cACA,GAAG,kBAAkB,KAA4B,UAClD,CACF,CACF;;MAEH,KAAK,MAAM,CAAC,SAAS,WAAW,MAAM,YAAY,MAAM,SAAS,EAAE;OACjE,MAAM,UAAU,iBAAiB,QAAQ;OACzC,WAAW,KACTA,MAAE,kBACA,CAACA,MAAE,uBAAuB,OAAO,CAAC,EAClCA,MAAE,cACA,GAAG,wBAAwB,KAA4B,UACxD,CACF,CACF;;MAEH,IAAI,WAAW,SAAS,GACtB,KAAK,iBAAiB,QAAQ,WAAW;;;IAIhD;GAED,kBAAkB,MAAM,OAAO;IAC7B,MAAM,cAAc,KAAK,KAAK,OAAO;IACrC,IACE,gBAAA,sBACA,gBAAA,0BAEA;IAEF,MAAM,UAAU,gBAAgB;IAChC,MAAM,YAAY,UAAU,MAAM,SAAS,QAAQ,MAAM,SAAS;IAClE,MAAM,YAAY,UACd,MAAM,YAAY,QAClB,MAAM,YAAY;IAEtB,MAAM,eAAe,KAAK,MAAM,kBAAkB;IAClD,MAAM,kBAAkB,YAAyC;KAC/D,UAAU,IAAI,QAAQ;KACtB,IAAI,SAAS,WACX,OAAO;KAET,IAAI,UAAU,IAAI,QAAQ,EACxB,OAAO,UAAU,IAAI,QAAQ;KAG/B,MAAM,WAAW,cADD,iBAAiB,QACK,CAAC;KACvC,MAAM,MAAM,aAAa,sBAAsB,SAAS;KACxD,UAAU,IAAI,SAAS,IAAI;KAC3B,OAAO;;IAGT,KAAK,MAAM,iBAAiB,KAAK,IAAI,aAAa,EAAE;KAClD,MAAM,YAAY,cAAc,KAAK,MAAM;KAC3C,MAAM,UAAU,KAAK,MAAM,WAAW,UAAU;KAChD,IAAI,CAAC,SACH;KAIF,IACE,cAAc,0BAA0B,IACxC,cAAc,mBAAmB,EACjC;MACA,IAAI;MACJ,IAAI,cAAc,0BAA0B,EAC1C,UAAU;WACL;OACL,MAAM,WAAW,cAAc,KAAK;OACpC,UAAUA,MAAE,aAAa,SAAS,GAC9B,SAAS,OACT,SAAS;;MAEf,MAAM,UAAU,eAAe,QAAQ;MACvC,IAAI,CAAC,SACH;MAIF,KAAK,MAAM,WAAW,QAAQ,gBAAgB;OAC5C,IAAI,gBAAgB,QAAQ,EAC1B;OAEF,IAAI,QAAQ,iBAAiB,EAC3B,QAAQ,YAAYA,MAAE,cAAc,QAAQ,KAAK,CAAC;YAElD,QAAQ,YAAYA,MAAE,UAAU,QAAQ,CAAC;;MAM7C,KAAK,WAAW,SAAS,EAEvB,YAAY,QAAQ;OAClB,IACEA,MAAE,aAAa,OAAO,KAAK,SAAS,IACpC,OAAO,KAAK,SAAS,SAAS,aAC9B,OAAO,MAAM,WAAW,UAAU,KAAK,SAEvC,OAAO,IAAI,WAAW,CAAC,YAAYA,MAAE,UAAU,QAAQ,CAAC;SAG7D,CAAC;YAIC,IAAI,cAAc,4BAA4B,EAAE;MAEnD,KAAK,MAAM,WAAW,QAAQ,gBAAgB;OAC5C,IAAI,gBAAgB,QAAQ,EAC1B;OAEF,MAAM,aAAa,QAAQ;OAC3B,IAAI,CAAC,YACH;OAEF,IACE,WAAW,oBAAoB,IAC/B,WAAW,KAAK,WAAW,QAAQ,MACnC;QACA,MAAM,WAAW,WAAW,KAAK;QACjC,IAAI;QACJ,IAAI,CAAC,WAAW,KAAK,YAAYA,MAAE,aAAa,SAAS,EACvD,UAAU,SAAS;aACd,IACL,WAAW,KAAK,YAChBA,MAAE,gBAAgB,SAAS,EAE3B,UAAU,SAAS;QAErB,IAAI,SAAS;SACX,MAAM,UAAU,eAAe,QAAQ;SACvC,IAAI,SACF,WAAW,YAAYA,MAAE,UAAU,QAAQ,CAAC;;cAG3C,IACL,WAAW,uBAAuB,IAClC,WAAW,KAAK,WAAW,QAAQ,MACnC;QACA,MAAM,UAAU,WAAW,KAAK,SAAS;QACzC,MAAM,UAAU,eAAe,QAAQ;QACvC,IAAI,SACF,WAAW,YAAYA,MAAE,cAAc,QAAQ,KAAK,CAAC;;;MAM3D,KAAK,WAAW,SAAS;OAEvB,YAAY,QAAQ;QAClB,MAAM,OAAO,OAAO,KAAK;QACzB,IACEA,MAAE,kBAAkB,KAAK,IACzBA,MAAE,aAAa,KAAK,KAAK,IACzB,KAAK,KAAK,SAAS,aACnB,OAAO,MAAM,WAAW,UAAU,KAAK,SACvC;SACA,MAAM,UAAU,KAAK,MAAM;SAC3B,MAAM,UAAU,eAAe,QAAQ;SACvC,IAAI,SACF,OAAO,IAAI,WAAW,CAAC,YAAYA,MAAE,UAAU,QAAQ,CAAC;;;OAM9D,oBAAoB,QAAQ;QAC1B,IAAI,UAAU,OAAO,IAAI,aAAa;QAEtC,OAAO,QAAQ,uBAAuB,EACpC,UAAU,QAAQ,IAAI,iBAAiB;QAEzC,IACE,QAAQ,eAAe,IACvBA,MAAE,aAAa,QAAQ,KAAK,SAAS,IACrC,QAAQ,KAAK,SAAS,SAAS,aAC/B,OAAO,MAAM,WAAW,UAAU,KAAK,SACvC;SACA,MAAM,YAAY,OAAO,KAAK;SAC9B,IACEA,MAAE,gBAAgB,UAAU,IAC5BA,MAAE,gBAAgB,UAAU,QAAQ,EACpC;UACA,MAAM,UAAU,UAAU,QAAQ;UAClC,MAAM,UAAU,eAAe,QAAQ;UACvC,IAAI,SACF,OAAO,YAAYA,MAAE,YAAYA,MAAE,UAAU,QAAQ,CAAC,CAAC;;;;OAKhE,CAAC;;;IAIN,IAAI,SAAS,aACX,KAAK,QAAQ;;GAIjB,uBAAuB,MAAM,OAAO;IAClC,MAAM,cAAc,KAAK,KAAK,QAAQ;IACtC,IACE,gBAAA,sBACA,gBAAA,0BAEA;IAGF,MAAM,YADU,gBAAA,2BACY,MAAM,SAAS,QAAQ,MAAM,SAAS;IAElE,IAAI,SAAS,WAAW;KACtB,KAAK,MAAM,iBAAiB,KAAK,IAAI,aAAa,EAChD,IAAI,cAAc,mBAAmB,EAAE;MACrC,MAAM,QAAQ,cAAc,KAAK;MAGjC,MAAM,UAAUA,MAAE,aAAa,MAAM,GAAG,MAAM,OAAO,MAAM;MAC3D,UAAU,IAAI,QAAQ;;KAG1B;;IAGF,MAAM,aAAa,KAChB,IAAI,aAAa,CACjB,KAAK,kBAAkB;KACtB,IAAI,cAAc,mBAAmB,EAAE;MACrC,MAAM,QAAQ,cAAc,KAAK;MAGjC,MAAM,UAAUA,MAAE,aAAa,MAAM,GAAG,MAAM,OAAO,MAAM;MAC3D,UAAU,IAAI,QAAQ;MACtB,MAAM,UAAU,iBAAiB,QAAQ;MACzC,OAAOA,MAAE,uBACP,MACA,CACEA,MAAE,gBACAA,MAAE,WAAW,UAAU,EACvB,cAAc,KAAK,SACpB,CACF,EACDA,MAAE,cACA,GAAG,YAAY,KAA4B,UAC5C,CACF;;KAEH,OAAO;MACP,CACD,QAAQ,SAA2C,SAAS,KAAK;IAEpE,IAAI,WAAW,SAAS,GACtB,KAAK,oBAAoB,WAAW;SAEpC,KAAK,QAAQ;;GAGlB;EACF;;AAGH,MAAa,iBACX,MACA,OAKU;CACV,IACE,CAAC,KAAK,SAAA,mBAA2B,IACjC,CAAC,KAAK,SAAA,yBAAiC,EAEvC,OAAO;CAET,MAAM,SAAS,cAAc,MAAM;EACjC,SAAS;EACT,YAAY;EACZ,UAAU;EACV,YAAY;EACZ,KAAK;EACL,SAAS,CAAC,gBAAgB,YAAY,CAAC;EACvC,YAAY,EACV,SAAS,CAAC,OAAO,aAAa,EAC/B;EACF,CAAC;CACF,IAAI,CAAC,QACH,OAAO;CAET,MAAM,WAAW,OAAO;CACxB,MAAM,WAAuB;EAC3B,MAAM,IAAI,IAAI,UAAU,UAAU,QAAQ,EAAE,CAAC;EAC7C,OAAO,IAAI,IAAI,UAAU,UAAU,SAAS,EAAE,CAAC;EAChD;CACD,OAAO;EACL,MAAM,OAAO,QAAQ;EACrB,KAAK,OAAO,OAAO;EACnB;EACD;;AAGH,MAAa,mBAAmB,SAAoC;CAClE,IACE,CAAC,KAAK,SAAA,mBAA2B,IACjC,CAAC,KAAK,SAAA,yBAAiC,EAEvC,OAAO;CAET,IAAI;CACJ,IAAI;EACF,SAAS,cAAc,MAAM;GAC3B,SAAS;GACT,YAAY;GACZ,YAAY;GACZ,KAAK;GACL,MAAM;GACN,SAAS,CAAC,gBAAgB,UAAU,CAAC;GACrC,YAAY;IACV,SAAS,CAAC,OAAO,aAAa;IAC9B,eAAe;IAChB;GACF,CAAC;SACI;EACN,OAAO;;CAET,IAAI,CAAC,QACH,OAAO;CAET,MAAM,WAAW,OAAO;CACxB,OAAO;EACL,MAAM,IAAI,IAAI,UAAU,UAAU,QAAQ,EAAE,CAAC;EAC7C,OAAO,IAAI,IAAI,UAAU,UAAU,SAAS,EAAE,CAAC;EAChD;;;;AC7bH,MAAa,2BACX,UACA,UAAmB,UACR;CACX,MAAM,iBAAiB,MAAM,KAAK,SAAS,CAAC,MAAM;CAClD,MAAM,UAAU,EAAE;CAMlB,KAAK,MAAM,WAAW,gBAAgB;EAEpC,MAAM,WAAW,cADD,iBAAiB,QACK,CAAC;EAEvC,MAAM,QAAQ,GADD,UAAU,OAAO,IAAI,OAAA,EAAmB,IACX;EAI1C,QAAQ,KAAK,iBAAiB,SAAS,IAAI,KAAK,UAAU,MAAM,CAAC,GAAG;;CAEtE,QAAQ,KAAK,GAAG;CAEhB,QAAQ,KAAK,WAAW;CACxB,KAAK,MAAM,WAAW,gBAAgB;EAEpC,MAAM,WAAW,cADD,iBAAiB,QACK,CAAC;EACvC,QAAQ,KAAK,KAAK,SAAS,MAAM,KAAK,UAAU,QAAQ,CAAC,GAAG;;CAE9D,QAAQ,KAAK,KAAK;CAClB,QAAQ,KAAK,GAAG;CAEhB,OAAO,QAAQ,KAAK,KAAK;;;;AC5B3B,MAAM,0BAA0B,OAC9B,MACA,QACA,cAAwB,EAAE,EAC1B,cAAsB,QACE;CACxB,MAAM,oBAAgC;EAAE,sBAAM,IAAI,KAAK;EAAE,uBAAO,IAAI,KAAK;EAAE;CAE3E,MAAM,QAAQ,YAAY,KAAK;CAC/B,IAAI,CAAC,QACH,QAAQ,MAAM,yCAAyC;CAGzD,MAAM,QAAQ,MAAM,OAAO,0CAA0C;EACnE,KAAK;EACL,UAAU;EACV,QAAQ,CAAC,sBAAsB,GAAG,YAAY,KAAK,QAAQ,GAAG,IAAI,KAAK,CAAC;EACxE,WAAW;EACZ,CAAC;CAEF,IAAI,YAAY;CAEhB,MADc,OAAO,EAAE,aAAa,CACzB,CAAC,IAAI,OAAO,OAAO,SAAS;EACrC,IAAI;GAEF,MAAM,WAAW,gBAAgB,MADd,SAAS,MAAM,QAAQ,CACJ;GACtC,IAAI,CAAC,UACH;GAEF,KAAK,MAAM,WAAW,SAAS,MAC7B,kBAAkB,KAAK,IAAI,QAAQ;GAErC,KAAK,MAAM,WAAW,SAAS,OAC7B,kBAAkB,MAAM,IAAI,QAAQ;GAEtC;WACO,OAAO;GACd,IAAI,CAAC,QACH,QAAQ,MAAM,qBAAqB,KAAK,IAAI,QAAQ;;GAGxD;CAEF,MAAM,UAAU,YAAY,KAAK,GAAG;CACpC,IAAI,CAAC,QACH,QAAQ,MACN,kBAAkB,UAAU,GAAG,MAAM,OAAO,UAAU,kBAAkB,KAAK,KAAK,SAAS,kBAAkB,MAAM,KAAK,mBAAmB,QAAQ,QAAQ,EAAE,CAAC,MAC/J;CAGH,OAAO;;AAST,MAAa,gBAAgB,YAAqC;CAChE,MAAM,EACJ,OAAO,QAAQ,KAAK,EACpB,SAAS,OACT,SAAS,KAAK,KAAK,gBAAgB,YAAY,KAC7C,WAAW,EAAE;CACjB,OAAO;EACL,MAAM,UAA+B;GACnC,OAAO,wBAAwB,MAAM,OAAO;;EAG9C,MAAM,KAAK,UAAqC;GAC9C,MAAM,UAAU,wBAAwB,SAAS,KAAK;GACtD,MAAM,eAAe,wBAAwB,SAAS,OAAO,KAAK;GAClE,MAAM,UAAU,KAAK,KAAK,MAAM,OAAO;GACvC,MAAM,MAAM,SAAS,EAAE,WAAW,MAAM,CAAC;GACzC,MAAM,UAAU,KAAK,KAAK,SAAS,aAAa,EAAE,GAAG,QAAQ,MAAM,CAAC,IAAI;GACxE,MAAM,UACJ,KAAK,KAAK,SAAS,aAAa,EAChC,GAAG,aAAa,MAAM,CAAC,IACxB;;EAGH,MAAM,MAAqB;GACzB,MAAM,WAAW,MAAM,KAAK,SAAS;GACrC,MAAM,KAAK,KAAK,SAAS;;EAE5B;;;;AC1FH,MAAM,cAAc;AACpB,MAAM,cAAc;AACpB,MAAM,eAAe,cAAc;AAEnC,MAAM,YAAY,OAAO,GAAmB;AAC5C,MAAM,YAAY,OAAO,GAAmB;AAC5C,MAAM,aAAa,OAAO,aAAa,OAAO;AAM9C,MAAa,gBAAgB,WAA2B;CACtD,MAAM,mBAAA;CACN,IAAI,mBAAmB,KAAK,mBAAmB,GAE7C,MAAM,IAAI,MAAM,0BAA0B;CAG5C,MAAM,wBAAQ,IAAI,KAAqB;CACvC,QAAQ,UAAU;EAChB,IAAI,MAAM,IAAI,MAAM,EAClB,OAAO,MAAM,IAAI,MAAM;EAMzB,IAAI,UAHW,WAAW,UAAU,OACf,CAAC,OAAO,MAAM,CAAC,QAEhB,CAAC,gBAAgB,EAAE;EACvC,IAAI,SAAS;EAEb,UAAU,YAAY,OAAO,UAAU,UAAU;EACjD,WAAW;EACX,UAAU,YAAY,OAAO,UAAU,UAAU;EACjD,WAAW;EACX,KAAK,IAAI,IAAI,GAAG,IAAI,kBAAkB,KAAK;GACzC,UAAU,aAAa,OAAO,UAAU,WAAW;GACnD,WAAW;;EAGb,MAAM,IAAI,OAAO,OAAO;EACxB,OAAO;;;AAIX,MAAa,sBAA8B;CACzC,IAAI,QAAQ;CACZ,aAAa;EACX,IAAI,SAAS;EAEb,UAAU,YAAY,QAAQ;EAC9B,IAAI,SAAS,KAAK,MAAM,QAAQ,GAAmB;EACnD,OAAO,SAAS,GAAG;GACjB;GACA,UAAU,aAAa,SAAS,aAAa;GAC7C,SAAS,KAAK,MAAM,SAAS,aAAa,OAAO;;EAGnD;EACA,OAAO"}
@@ -0,0 +1,8 @@
1
+ import { t as Options } from "./plugin-Coz4K9xU.js";
2
+ import * as _$unplugin from "unplugin";
3
+
4
+ //#region src/index.d.ts
5
+ declare const _default: _$unplugin.UnpluginInstance<Options, boolean>;
6
+ //#endregion
7
+ export { type Options, _default as default };
8
+ //# sourceMappingURL=index.d.ts.map
package/dist/index.js ADDED
@@ -0,0 +1,8 @@
1
+ import { t as unpluginFactory } from "./plugin-Cj1x2KK1.js";
2
+ import { createUnplugin } from "unplugin";
3
+ //#region src/index.ts
4
+ var src_default = createUnplugin(unpluginFactory);
5
+ //#endregion
6
+ export { src_default as default };
7
+
8
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","names":[],"sources":["../src/index.ts"],"sourcesContent":["import { createUnplugin } from 'unplugin';\nimport { type Options, unpluginFactory } from './internal/plugin.js';\n\nexport default createUnplugin(unpluginFactory);\nexport type { Options };\n"],"mappings":";;;AAGA,IAAA,cAAe,eAAe,gBAAgB"}
@@ -0,0 +1,125 @@
1
+ import { a as transformCode, c as VIRTUAL_LOCAL_MODULE_ID, i as extractKeywords, l as VIRTUAL_MODULE_ID, n as createHasher, o as encodeIdentifier, r as createRunner, s as PLUGIN_NAME, t as createCounter } from "./hash-DMsO2VqQ.js";
2
+ import { readFile } from "node:fs/promises";
3
+ import pLimit from "p-limit";
4
+ //#region src/internal/plugin.ts
5
+ const resolveId = (id) => `\0${id}`;
6
+ const splitQuery = (id) => {
7
+ const index = id.indexOf("?");
8
+ if (index === -1) return [id, void 0];
9
+ return [id.slice(0, index), id.slice(index + 1)];
10
+ };
11
+ const toIncludes = (id) => [new RegExp(`^${id}/`)];
12
+ const SUFFIX_REGEX = /\.m?[jt]sx?$/;
13
+ const COMMON_EXCLUDES = [/\/node_modules\//];
14
+ const unpluginFactory = ({ isDev, secret }) => {
15
+ const runner = createRunner({ silent: true });
16
+ const runnerLimit = pLimit({ concurrency: 1 });
17
+ const allKeywords = {
18
+ main: /* @__PURE__ */ new Set(),
19
+ local: /* @__PURE__ */ new Set()
20
+ };
21
+ let isInitialized = false;
22
+ const runInit = async () => {
23
+ try {
24
+ const keywords = await runner.collect();
25
+ for (const keyword of keywords.main) allKeywords.main.add(keyword);
26
+ for (const keyword of keywords.local) allKeywords.local.add(keyword);
27
+ await runner.save(allKeywords);
28
+ isInitialized = true;
29
+ } catch {}
30
+ };
31
+ let hasherMain;
32
+ let hasherLocal;
33
+ let resolvedMap;
34
+ return {
35
+ name: PLUGIN_NAME,
36
+ buildStart() {
37
+ hasherMain = createHasher(secret);
38
+ hasherLocal = createCounter();
39
+ resolvedMap = /* @__PURE__ */ new Map();
40
+ runnerLimit(async () => {
41
+ if (!isInitialized) await runInit();
42
+ });
43
+ },
44
+ resolveId: {
45
+ filter: { id: {
46
+ include: [...toIncludes(VIRTUAL_MODULE_ID), ...toIncludes(VIRTUAL_LOCAL_MODULE_ID)],
47
+ exclude: COMMON_EXCLUDES
48
+ } },
49
+ handler(id) {
50
+ return resolveId(id);
51
+ }
52
+ },
53
+ load: {
54
+ filter: { id: {
55
+ include: [...toIncludes(resolveId(VIRTUAL_MODULE_ID)), ...toIncludes(resolveId(VIRTUAL_LOCAL_MODULE_ID))],
56
+ exclude: COMMON_EXCLUDES
57
+ } },
58
+ handler(id) {
59
+ const [validId] = splitQuery(id);
60
+ if (resolvedMap.has(validId)) return resolvedMap.get(validId);
61
+ return null;
62
+ }
63
+ },
64
+ transform: {
65
+ filter: {
66
+ id: {
67
+ include: [SUFFIX_REGEX],
68
+ exclude: COMMON_EXCLUDES
69
+ },
70
+ code: { include: [VIRTUAL_MODULE_ID, VIRTUAL_LOCAL_MODULE_ID] }
71
+ },
72
+ handler(code, id) {
73
+ const result = transformCode(code, id);
74
+ if (!result) return null;
75
+ const { code: transformed, map, keywords } = result;
76
+ for (const keyword of keywords.main) {
77
+ const resolvedId = resolveId(`${VIRTUAL_MODULE_ID}/_/${encodeIdentifier(keyword)}`);
78
+ const hash = hasherMain(keyword);
79
+ const value = isDev ? `${hash}.${keyword}` : hash;
80
+ resolvedMap.set(resolvedId, `export default ${JSON.stringify(value)};\n`);
81
+ }
82
+ for (const keyword of keywords.local) {
83
+ const resolvedId = resolveId(`${VIRTUAL_LOCAL_MODULE_ID}/_/${encodeIdentifier(keyword)}`);
84
+ const hash = hasherLocal(keyword);
85
+ const value = isDev ? `${hash}.${keyword}` : hash;
86
+ resolvedMap.set(resolvedId, `export default ${JSON.stringify(value)};\n`);
87
+ }
88
+ return {
89
+ code: transformed,
90
+ map
91
+ };
92
+ }
93
+ },
94
+ async watchChange(id, { event }) {
95
+ if (!SUFFIX_REGEX.test(id) || COMMON_EXCLUDES.some((regex) => regex.test(id)) || event === "delete") return;
96
+ let code;
97
+ try {
98
+ code = await readFile(id, "utf-8");
99
+ } catch {
100
+ return;
101
+ }
102
+ const keywords = extractKeywords(code);
103
+ if (!keywords) return;
104
+ let isAdded = false;
105
+ for (const keyword of keywords.main) if (!allKeywords.main.has(keyword)) {
106
+ allKeywords.main.add(keyword);
107
+ isAdded = true;
108
+ }
109
+ for (const keyword of keywords.local) if (!allKeywords.local.has(keyword)) {
110
+ allKeywords.local.add(keyword);
111
+ isAdded = true;
112
+ }
113
+ if (!isInitialized || isAdded) runnerLimit(async () => {
114
+ if (!isInitialized) await runInit();
115
+ else if (isAdded) try {
116
+ await runner.save(allKeywords);
117
+ } catch {}
118
+ });
119
+ }
120
+ };
121
+ };
122
+ //#endregion
123
+ export { unpluginFactory as t };
124
+
125
+ //# sourceMappingURL=plugin-Cj1x2KK1.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"plugin-Cj1x2KK1.js","names":[],"sources":["../src/internal/plugin.ts"],"sourcesContent":["import { readFile } from 'node:fs/promises';\nimport pLimit from 'p-limit';\nimport type { UnpluginFactory } from 'unplugin';\nimport { createRunner } from './cli.js';\nimport {\n DEBUG_SEPARATOR,\n KEYWORD_ROUTE_SEGMENT,\n PLUGIN_NAME,\n VIRTUAL_LOCAL_MODULE_ID,\n VIRTUAL_MODULE_ID,\n} from './constants.js';\nimport { encodeIdentifier } from './encode.js';\nimport { createCounter, createHasher, type Hasher } from './hash.js';\nimport {\n extractKeywords,\n type KeywordSet,\n transformCode,\n} from './transform.js';\n\nconst resolveId = (id: string): string => `\\0${id}`;\n\nconst splitQuery = (id: string): [string, string | undefined] => {\n const index = id.indexOf('?');\n if (index === -1) {\n return [id, undefined];\n }\n return [id.slice(0, index), id.slice(index + 1)];\n};\n\nconst toIncludes = (id: string): RegExp[] => [new RegExp(`^${id}/`)];\n\nconst SUFFIX_REGEX = /\\.m?[jt]sx?$/;\nconst COMMON_EXCLUDES = [/\\/node_modules\\//];\n\nexport interface Options {\n /**\n * If true, preserves the original keyword as a suffix in the generated\n * identifier for easier debugging (e.g., `\"zXpL21k.SET_USER\"`).\n */\n isDev: boolean;\n /**\n * The cryptographic key used to initialize the deterministic HMAC algorithm.\n * Modifying this value will globally rotate all generated hashes.\n * To ensure cross-boundary consistency between independent builds,\n * they must share the same secret key.\n */\n secret: string;\n}\n\nexport const unpluginFactory: UnpluginFactory<Options> = ({\n isDev,\n secret,\n}) => {\n const runner = createRunner({ silent: true });\n const runnerLimit = pLimit({ concurrency: 1 });\n const allKeywords: KeywordSet = { main: new Set(), local: new Set() };\n\n let isInitialized = false;\n const runInit = async () => {\n try {\n const keywords = await runner.collect();\n for (const keyword of keywords.main) {\n allKeywords.main.add(keyword);\n }\n for (const keyword of keywords.local) {\n allKeywords.local.add(keyword);\n }\n await runner.save(allKeywords);\n isInitialized = true;\n } catch {}\n };\n\n let hasherMain: Hasher;\n let hasherLocal: Hasher;\n let resolvedMap: Map<string, string>;\n\n return {\n name: PLUGIN_NAME,\n\n buildStart() {\n hasherMain = createHasher(secret);\n hasherLocal = createCounter();\n resolvedMap = new Map();\n runnerLimit(async () => {\n if (!isInitialized) {\n await runInit();\n }\n });\n },\n\n resolveId: {\n filter: {\n id: {\n include: [\n ...toIncludes(VIRTUAL_MODULE_ID),\n ...toIncludes(VIRTUAL_LOCAL_MODULE_ID),\n ],\n exclude: COMMON_EXCLUDES,\n },\n },\n handler(id) {\n return resolveId(id);\n },\n },\n\n load: {\n filter: {\n id: {\n include: [\n ...toIncludes(resolveId(VIRTUAL_MODULE_ID)),\n ...toIncludes(resolveId(VIRTUAL_LOCAL_MODULE_ID)),\n ],\n exclude: COMMON_EXCLUDES,\n },\n },\n handler(id) {\n const [validId] = splitQuery(id);\n if (resolvedMap.has(validId)) {\n return resolvedMap.get(validId);\n }\n return null;\n },\n },\n\n transform: {\n filter: {\n id: {\n include: [SUFFIX_REGEX],\n exclude: COMMON_EXCLUDES,\n },\n code: {\n include: [VIRTUAL_MODULE_ID, VIRTUAL_LOCAL_MODULE_ID],\n },\n },\n handler(code, id) {\n const result = transformCode(code, id);\n if (!result) {\n return null;\n }\n const { code: transformed, map, keywords } = result;\n for (const keyword of keywords.main) {\n const encoded = encodeIdentifier(keyword);\n const resolvedId = resolveId(\n `${VIRTUAL_MODULE_ID}/${KEYWORD_ROUTE_SEGMENT}/${encoded}`,\n );\n const hash = hasherMain(keyword);\n const value = isDev ? `${hash}${DEBUG_SEPARATOR}${keyword}` : hash;\n resolvedMap.set(\n resolvedId,\n `export default ${JSON.stringify(value)};\\n`,\n );\n }\n for (const keyword of keywords.local) {\n const encoded = encodeIdentifier(keyword);\n const resolvedId = resolveId(\n `${VIRTUAL_LOCAL_MODULE_ID}/${KEYWORD_ROUTE_SEGMENT}/${encoded}`,\n );\n const hash = hasherLocal(keyword);\n const value = isDev ? `${hash}${DEBUG_SEPARATOR}${keyword}` : hash;\n resolvedMap.set(\n resolvedId,\n `export default ${JSON.stringify(value)};\\n`,\n );\n }\n return { code: transformed, map };\n },\n },\n\n async watchChange(id, { event }) {\n if (\n !SUFFIX_REGEX.test(id) ||\n COMMON_EXCLUDES.some((regex) => regex.test(id)) ||\n event === 'delete'\n ) {\n return;\n }\n let code: string;\n try {\n code = await readFile(id, 'utf-8');\n } catch {\n return;\n }\n const keywords = extractKeywords(code);\n if (!keywords) {\n return;\n }\n let isAdded = false;\n for (const keyword of keywords.main) {\n if (!allKeywords.main.has(keyword)) {\n allKeywords.main.add(keyword);\n isAdded = true;\n }\n }\n for (const keyword of keywords.local) {\n if (!allKeywords.local.has(keyword)) {\n allKeywords.local.add(keyword);\n isAdded = true;\n }\n }\n if (!isInitialized || isAdded) {\n runnerLimit(async () => {\n if (!isInitialized) {\n await runInit();\n } else if (isAdded) {\n try {\n await runner.save(allKeywords);\n } catch {}\n }\n });\n }\n },\n };\n};\n"],"mappings":";;;;AAmBA,MAAM,aAAa,OAAuB,KAAK;AAE/C,MAAM,cAAc,OAA6C;CAC/D,MAAM,QAAQ,GAAG,QAAQ,IAAI;CAC7B,IAAI,UAAU,IACZ,OAAO,CAAC,IAAI,KAAA,EAAU;CAExB,OAAO,CAAC,GAAG,MAAM,GAAG,MAAM,EAAE,GAAG,MAAM,QAAQ,EAAE,CAAC;;AAGlD,MAAM,cAAc,OAAyB,CAAC,IAAI,OAAO,IAAI,GAAG,GAAG,CAAC;AAEpE,MAAM,eAAe;AACrB,MAAM,kBAAkB,CAAC,mBAAmB;AAiB5C,MAAa,mBAA6C,EACxD,OACA,aACI;CACJ,MAAM,SAAS,aAAa,EAAE,QAAQ,MAAM,CAAC;CAC7C,MAAM,cAAc,OAAO,EAAE,aAAa,GAAG,CAAC;CAC9C,MAAM,cAA0B;EAAE,sBAAM,IAAI,KAAK;EAAE,uBAAO,IAAI,KAAK;EAAE;CAErE,IAAI,gBAAgB;CACpB,MAAM,UAAU,YAAY;EAC1B,IAAI;GACF,MAAM,WAAW,MAAM,OAAO,SAAS;GACvC,KAAK,MAAM,WAAW,SAAS,MAC7B,YAAY,KAAK,IAAI,QAAQ;GAE/B,KAAK,MAAM,WAAW,SAAS,OAC7B,YAAY,MAAM,IAAI,QAAQ;GAEhC,MAAM,OAAO,KAAK,YAAY;GAC9B,gBAAgB;UACV;;CAGV,IAAI;CACJ,IAAI;CACJ,IAAI;CAEJ,OAAO;EACL,MAAM;EAEN,aAAa;GACX,aAAa,aAAa,OAAO;GACjC,cAAc,eAAe;GAC7B,8BAAc,IAAI,KAAK;GACvB,YAAY,YAAY;IACtB,IAAI,CAAC,eACH,MAAM,SAAS;KAEjB;;EAGJ,WAAW;GACT,QAAQ,EACN,IAAI;IACF,SAAS,CACP,GAAG,WAAW,kBAAkB,EAChC,GAAG,WAAW,wBAAwB,CACvC;IACD,SAAS;IACV,EACF;GACD,QAAQ,IAAI;IACV,OAAO,UAAU,GAAG;;GAEvB;EAED,MAAM;GACJ,QAAQ,EACN,IAAI;IACF,SAAS,CACP,GAAG,WAAW,UAAU,kBAAkB,CAAC,EAC3C,GAAG,WAAW,UAAU,wBAAwB,CAAC,CAClD;IACD,SAAS;IACV,EACF;GACD,QAAQ,IAAI;IACV,MAAM,CAAC,WAAW,WAAW,GAAG;IAChC,IAAI,YAAY,IAAI,QAAQ,EAC1B,OAAO,YAAY,IAAI,QAAQ;IAEjC,OAAO;;GAEV;EAED,WAAW;GACT,QAAQ;IACN,IAAI;KACF,SAAS,CAAC,aAAa;KACvB,SAAS;KACV;IACD,MAAM,EACJ,SAAS,CAAC,mBAAmB,wBAAwB,EACtD;IACF;GACD,QAAQ,MAAM,IAAI;IAChB,MAAM,SAAS,cAAc,MAAM,GAAG;IACtC,IAAI,CAAC,QACH,OAAO;IAET,MAAM,EAAE,MAAM,aAAa,KAAK,aAAa;IAC7C,KAAK,MAAM,WAAW,SAAS,MAAM;KAEnC,MAAM,aAAa,UACjB,GAAG,kBAAkB,KAFP,iBAAiB,QAEyB,GACzD;KACD,MAAM,OAAO,WAAW,QAAQ;KAChC,MAAM,QAAQ,QAAQ,GAAG,QAAyB,YAAY;KAC9D,YAAY,IACV,YACA,kBAAkB,KAAK,UAAU,MAAM,CAAC,KACzC;;IAEH,KAAK,MAAM,WAAW,SAAS,OAAO;KAEpC,MAAM,aAAa,UACjB,GAAG,wBAAwB,KAFb,iBAAiB,QAE+B,GAC/D;KACD,MAAM,OAAO,YAAY,QAAQ;KACjC,MAAM,QAAQ,QAAQ,GAAG,QAAyB,YAAY;KAC9D,YAAY,IACV,YACA,kBAAkB,KAAK,UAAU,MAAM,CAAC,KACzC;;IAEH,OAAO;KAAE,MAAM;KAAa;KAAK;;GAEpC;EAED,MAAM,YAAY,IAAI,EAAE,SAAS;GAC/B,IACE,CAAC,aAAa,KAAK,GAAG,IACtB,gBAAgB,MAAM,UAAU,MAAM,KAAK,GAAG,CAAC,IAC/C,UAAU,UAEV;GAEF,IAAI;GACJ,IAAI;IACF,OAAO,MAAM,SAAS,IAAI,QAAQ;WAC5B;IACN;;GAEF,MAAM,WAAW,gBAAgB,KAAK;GACtC,IAAI,CAAC,UACH;GAEF,IAAI,UAAU;GACd,KAAK,MAAM,WAAW,SAAS,MAC7B,IAAI,CAAC,YAAY,KAAK,IAAI,QAAQ,EAAE;IAClC,YAAY,KAAK,IAAI,QAAQ;IAC7B,UAAU;;GAGd,KAAK,MAAM,WAAW,SAAS,OAC7B,IAAI,CAAC,YAAY,MAAM,IAAI,QAAQ,EAAE;IACnC,YAAY,MAAM,IAAI,QAAQ;IAC9B,UAAU;;GAGd,IAAI,CAAC,iBAAiB,SACpB,YAAY,YAAY;IACtB,IAAI,CAAC,eACH,MAAM,SAAS;SACV,IAAI,SACT,IAAI;KACF,MAAM,OAAO,KAAK,YAAY;YACxB;KAEV;;EAGP"}
@@ -0,0 +1,18 @@
1
+ //#region src/internal/plugin.d.ts
2
+ interface Options {
3
+ /**
4
+ * If true, preserves the original keyword as a suffix in the generated
5
+ * identifier for easier debugging (e.g., `"zXpL21k.SET_USER"`).
6
+ */
7
+ isDev: boolean;
8
+ /**
9
+ * The cryptographic key used to initialize the deterministic HMAC algorithm.
10
+ * Modifying this value will globally rotate all generated hashes.
11
+ * To ensure cross-boundary consistency between independent builds,
12
+ * they must share the same secret key.
13
+ */
14
+ secret: string;
15
+ }
16
+ //#endregion
17
+ export { Options as t };
18
+ //# sourceMappingURL=plugin-Coz4K9xU.d.ts.map
@@ -0,0 +1,7 @@
1
+ import { t as Options } from "./plugin-Coz4K9xU.js";
2
+
3
+ //#region src/rollup.d.ts
4
+ declare const _default: (options: Options) => any;
5
+ //#endregion
6
+ export { type Options, _default as default };
7
+ //# sourceMappingURL=rollup.d.ts.map
package/dist/rollup.js ADDED
@@ -0,0 +1,8 @@
1
+ import { t as unpluginFactory } from "./plugin-Cj1x2KK1.js";
2
+ import { createRollupPlugin } from "unplugin";
3
+ //#region src/rollup.ts
4
+ var rollup_default = createRollupPlugin(unpluginFactory);
5
+ //#endregion
6
+ export { rollup_default as default };
7
+
8
+ //# sourceMappingURL=rollup.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"rollup.js","names":[],"sources":["../src/rollup.ts"],"sourcesContent":["import { createRollupPlugin } from 'unplugin';\nimport { type Options, unpluginFactory } from './internal/plugin.js';\n\nexport default createRollupPlugin(unpluginFactory);\nexport type { Options };\n"],"mappings":";;;AAGA,IAAA,iBAAe,mBAAmB,gBAAgB"}
package/dist/vite.d.ts ADDED
@@ -0,0 +1,8 @@
1
+ import { t as Options } from "./plugin-Coz4K9xU.js";
2
+ import * as _$unplugin from "unplugin";
3
+
4
+ //#region src/vite.d.ts
5
+ declare const _default: (options: Options) => _$unplugin.VitePlugin<any> | _$unplugin.VitePlugin<any>[];
6
+ //#endregion
7
+ export { type Options, _default as default };
8
+ //# sourceMappingURL=vite.d.ts.map
package/dist/vite.js ADDED
@@ -0,0 +1,8 @@
1
+ import { t as unpluginFactory } from "./plugin-Cj1x2KK1.js";
2
+ import { createVitePlugin } from "unplugin";
3
+ //#region src/vite.ts
4
+ var vite_default = createVitePlugin(unpluginFactory);
5
+ //#endregion
6
+ export { vite_default as default };
7
+
8
+ //# sourceMappingURL=vite.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"vite.js","names":[],"sources":["../src/vite.ts"],"sourcesContent":["import { createVitePlugin } from 'unplugin';\nimport { type Options, unpluginFactory } from './internal/plugin.js';\n\nexport default createVitePlugin(unpluginFactory);\nexport type { Options };\n"],"mappings":";;;AAGA,IAAA,eAAe,iBAAiB,gBAAgB"}
@@ -0,0 +1,7 @@
1
+ import { t as Options } from "./plugin-Coz4K9xU.js";
2
+
3
+ //#region src/webpack.d.ts
4
+ declare const _default: (options: Options) => WebpackPluginInstance;
5
+ //#endregion
6
+ export { type Options, _default as default };
7
+ //# sourceMappingURL=webpack.d.ts.map
@@ -0,0 +1,8 @@
1
+ import { t as unpluginFactory } from "./plugin-Cj1x2KK1.js";
2
+ import { createWebpackPlugin } from "unplugin";
3
+ //#region src/webpack.ts
4
+ var webpack_default = createWebpackPlugin(unpluginFactory);
5
+ //#endregion
6
+ export { webpack_default as default };
7
+
8
+ //# sourceMappingURL=webpack.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"webpack.js","names":[],"sources":["../src/webpack.ts"],"sourcesContent":["import { createWebpackPlugin } from 'unplugin';\nimport { type Options, unpluginFactory } from './internal/plugin.js';\n\nexport default createWebpackPlugin(unpluginFactory);\nexport type { Options };\n"],"mappings":";;;AAGA,IAAA,kBAAe,oBAAoB,gBAAgB"}
package/package.json ADDED
@@ -0,0 +1,83 @@
1
+ {
2
+ "name": "unplugin-keywords",
3
+ "version": "2.5.1",
4
+ "description": "A build plugin for structural string literal minification and obfuscation.",
5
+ "keywords": [
6
+ "unplugin",
7
+ "rollup",
8
+ "rollup-plugin",
9
+ "vite",
10
+ "vite-plugin",
11
+ "minification",
12
+ "obfuscation"
13
+ ],
14
+ "repository": {
15
+ "type": "git",
16
+ "url": "git+https://github.com/cueaz/unplugin-keywords.git"
17
+ },
18
+ "license": "MIT",
19
+ "author": "cueaz",
20
+ "type": "module",
21
+ "exports": {
22
+ ".": {
23
+ "types": "./dist/index.d.ts",
24
+ "default": "./dist/index.js"
25
+ },
26
+ "./api": {
27
+ "types": "./dist/api.d.ts",
28
+ "default": "./dist/api.js"
29
+ },
30
+ "./esbuild": {
31
+ "types": "./dist/esbuild.d.ts",
32
+ "default": "./dist/esbuild.js"
33
+ },
34
+ "./rollup": {
35
+ "types": "./dist/rollup.d.ts",
36
+ "default": "./dist/rollup.js"
37
+ },
38
+ "./vite": {
39
+ "types": "./dist/vite.d.ts",
40
+ "default": "./dist/vite.js"
41
+ },
42
+ "./webpack": {
43
+ "types": "./dist/webpack.d.ts",
44
+ "default": "./dist/webpack.js"
45
+ },
46
+ "./cli": {
47
+ "types": "./dist/cli.d.ts",
48
+ "default": "./dist/cli.js"
49
+ },
50
+ "./package.json": "./package.json"
51
+ },
52
+ "bin": {
53
+ "keywords": "./bin/cli.js"
54
+ },
55
+ "files": [
56
+ "bin",
57
+ "dist"
58
+ ],
59
+ "dependencies": {
60
+ "@babel/core": "^7.29.0",
61
+ "globby": "^16.2.0",
62
+ "p-limit": "^7.3.0",
63
+ "unplugin": "^3.0.0"
64
+ },
65
+ "devDependencies": {
66
+ "@biomejs/biome": "^2.4.14",
67
+ "@tsconfig/bases": "^1.0.23",
68
+ "@types/babel__core": "^7.20.5",
69
+ "@types/node": "^25.6.0",
70
+ "tsdown": "^0.22.0",
71
+ "typescript": "^6.0.3",
72
+ "vitest": "^4.1.5"
73
+ },
74
+ "scripts": {
75
+ "precheck": "pnpm --filter \"./demo/*\" typegen",
76
+ "check": "tsc --build && biome check",
77
+ "format": "biome check --write",
78
+ "build": "tsdown",
79
+ "build:demo": "pnpm --filter \"./demo/*\" build",
80
+ "test": "vitest run",
81
+ "test:demo": "pnpm --filter \"./demo/*\" test"
82
+ }
83
+ }