binja 0.3.4 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,6 +1,286 @@
1
1
  // @bun
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropNames = Object.getOwnPropertyNames;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __moduleCache = /* @__PURE__ */ new WeakMap;
7
+ var __toCommonJS = (from) => {
8
+ var entry = __moduleCache.get(from), desc;
9
+ if (entry)
10
+ return entry;
11
+ entry = __defProp({}, "__esModule", { value: true });
12
+ if (from && typeof from === "object" || typeof from === "function")
13
+ __getOwnPropNames(from).map((key) => !__hasOwnProp.call(entry, key) && __defProp(entry, key, {
14
+ get: () => from[key],
15
+ enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
16
+ }));
17
+ __moduleCache.set(from, entry);
18
+ return entry;
19
+ };
20
+ var __export = (target, all) => {
21
+ for (var name in all)
22
+ __defProp(target, name, {
23
+ get: all[name],
24
+ enumerable: true,
25
+ configurable: true,
26
+ set: (newValue) => all[name] = () => newValue
27
+ });
28
+ };
29
+ var __esm = (fn, res) => () => (fn && (res = fn(fn = 0)), res);
2
30
  var __require = import.meta.require;
3
31
 
32
+ // src/native/index.ts
33
+ var exports_native = {};
34
+ __export(exports_native, {
35
+ tokenizeCount: () => tokenizeCount,
36
+ tokenize: () => tokenize,
37
+ nativeVersion: () => nativeVersion,
38
+ isNativeAvailable: () => isNativeAvailable,
39
+ TokenType: () => TokenType2,
40
+ NativeLexer: () => NativeLexer
41
+ });
42
+ import { dlopen, FFIType, ptr, CString } from "bun:ffi";
43
+ import { join } from "path";
44
+ import { existsSync } from "fs";
45
+ function getLibraryPath() {
46
+ const platform = process.platform;
47
+ const arch = process.arch;
48
+ const libExt = platform === "darwin" ? "dylib" : platform === "win32" ? "dll" : "so";
49
+ const libName = `libbinja.${libExt}`;
50
+ const projectRoot = join(import.meta.dir, "..", "..");
51
+ const searchPaths = [
52
+ join(projectRoot, "native", `${platform}-${arch}`, libName),
53
+ join(projectRoot, "native", libName),
54
+ join(projectRoot, "zig-native", "zig-out", "lib", libName),
55
+ join(projectRoot, "zig-native", libName),
56
+ join(import.meta.dir, libName)
57
+ ];
58
+ for (const p of searchPaths) {
59
+ if (existsSync(p)) {
60
+ return p;
61
+ }
62
+ }
63
+ return null;
64
+ }
65
+ function loadLibrary() {
66
+ if (_loadAttempted) {
67
+ return _lib;
68
+ }
69
+ _loadAttempted = true;
70
+ const libPath = getLibraryPath();
71
+ if (!libPath) {
72
+ console.warn("[binja] Native library not found, using pure JS fallback");
73
+ return null;
74
+ }
75
+ try {
76
+ _lib = dlopen(libPath, symbols);
77
+ _nativeAvailable = true;
78
+ return _lib;
79
+ } catch (e) {
80
+ console.warn(`[binja] Failed to load native library: ${e}`);
81
+ return null;
82
+ }
83
+ }
84
+ function isNativeAvailable() {
85
+ loadLibrary();
86
+ return _nativeAvailable;
87
+ }
88
+ function nativeVersion() {
89
+ const lib = loadLibrary();
90
+ if (!lib)
91
+ return null;
92
+ const versionPtr = lib.symbols.binja_version();
93
+ if (!versionPtr)
94
+ return null;
95
+ return new CString(versionPtr).toString();
96
+ }
97
+ function tokenizeCount(source) {
98
+ if (source.length === 0) {
99
+ return 1;
100
+ }
101
+ const lib = loadLibrary();
102
+ if (!lib) {
103
+ throw new Error("Native library not available");
104
+ }
105
+ const bytes = new TextEncoder().encode(source);
106
+ return Number(lib.symbols.binja_tokenize_count(ptr(bytes), bytes.length));
107
+ }
108
+ function tokenize(source) {
109
+ const lexer = new NativeLexer(source);
110
+ try {
111
+ return lexer.getAllTokens();
112
+ } finally {
113
+ lexer.free();
114
+ }
115
+ }
116
+ var TokenType2, symbols, _lib = null, _loadAttempted = false, _nativeAvailable = false, NativeLexer;
117
+ var init_native = __esm(() => {
118
+ TokenType2 = {
119
+ TEXT: 0,
120
+ VAR_START: 1,
121
+ VAR_END: 2,
122
+ BLOCK_START: 3,
123
+ BLOCK_END: 4,
124
+ COMMENT_START: 5,
125
+ COMMENT_END: 6,
126
+ IDENTIFIER: 7,
127
+ STRING: 8,
128
+ NUMBER: 9,
129
+ OPERATOR: 10,
130
+ DOT: 11,
131
+ COMMA: 12,
132
+ PIPE: 13,
133
+ COLON: 14,
134
+ LPAREN: 15,
135
+ RPAREN: 16,
136
+ LBRACKET: 17,
137
+ RBRACKET: 18,
138
+ LBRACE: 19,
139
+ RBRACE: 20,
140
+ ASSIGN: 21,
141
+ EOF: 22
142
+ };
143
+ symbols = {
144
+ binja_lexer_new: {
145
+ args: [FFIType.ptr, FFIType.u64],
146
+ returns: FFIType.ptr
147
+ },
148
+ binja_lexer_free: {
149
+ args: [FFIType.ptr],
150
+ returns: FFIType.void
151
+ },
152
+ binja_lexer_token_count: {
153
+ args: [FFIType.ptr],
154
+ returns: FFIType.u64
155
+ },
156
+ binja_lexer_token_type: {
157
+ args: [FFIType.ptr, FFIType.u64],
158
+ returns: FFIType.u8
159
+ },
160
+ binja_lexer_token_start: {
161
+ args: [FFIType.ptr, FFIType.u64],
162
+ returns: FFIType.u32
163
+ },
164
+ binja_lexer_token_end: {
165
+ args: [FFIType.ptr, FFIType.u64],
166
+ returns: FFIType.u32
167
+ },
168
+ binja_lexer_has_error: {
169
+ args: [FFIType.ptr],
170
+ returns: FFIType.bool
171
+ },
172
+ binja_lexer_error_code: {
173
+ args: [FFIType.ptr],
174
+ returns: FFIType.u8
175
+ },
176
+ binja_lexer_error_line: {
177
+ args: [FFIType.ptr],
178
+ returns: FFIType.u32
179
+ },
180
+ binja_tokenize_count: {
181
+ args: [FFIType.ptr, FFIType.u64],
182
+ returns: FFIType.u64
183
+ },
184
+ binja_version: {
185
+ args: [],
186
+ returns: FFIType.ptr
187
+ }
188
+ };
189
+ NativeLexer = class NativeLexer {
190
+ lexerPtr = 0;
191
+ source;
192
+ sourceBuffer;
193
+ lib;
194
+ _tokenCount = 0;
195
+ _isEmpty = false;
196
+ constructor(source) {
197
+ const lib = loadLibrary();
198
+ if (!lib) {
199
+ throw new Error("Native library not available. Use isNativeAvailable() to check first.");
200
+ }
201
+ this.lib = lib;
202
+ this.source = source;
203
+ if (source.length === 0) {
204
+ this._isEmpty = true;
205
+ this._tokenCount = 1;
206
+ this.sourceBuffer = new Uint8Array(0);
207
+ return;
208
+ }
209
+ this.sourceBuffer = new TextEncoder().encode(source);
210
+ const result = this.lib.symbols.binja_lexer_new(ptr(this.sourceBuffer), this.sourceBuffer.length);
211
+ if (!result) {
212
+ throw new Error("Failed to create native lexer");
213
+ }
214
+ this.lexerPtr = result;
215
+ this._tokenCount = Number(this.lib.symbols.binja_lexer_token_count(this.lexerPtr));
216
+ }
217
+ get tokenCount() {
218
+ return this._tokenCount;
219
+ }
220
+ getTokenType(index) {
221
+ if (this._isEmpty)
222
+ return TokenType2.EOF;
223
+ return Number(this.lib.symbols.binja_lexer_token_type(this.lexerPtr, index));
224
+ }
225
+ getTokenStart(index) {
226
+ if (this._isEmpty)
227
+ return 0;
228
+ return Number(this.lib.symbols.binja_lexer_token_start(this.lexerPtr, index));
229
+ }
230
+ getTokenEnd(index) {
231
+ if (this._isEmpty)
232
+ return 0;
233
+ return Number(this.lib.symbols.binja_lexer_token_end(this.lexerPtr, index));
234
+ }
235
+ hasError() {
236
+ if (this._isEmpty)
237
+ return false;
238
+ return Boolean(this.lib.symbols.binja_lexer_has_error(this.lexerPtr));
239
+ }
240
+ getErrorCode() {
241
+ if (this._isEmpty)
242
+ return 0;
243
+ return Number(this.lib.symbols.binja_lexer_error_code(this.lexerPtr));
244
+ }
245
+ getErrorLine() {
246
+ if (this._isEmpty)
247
+ return 1;
248
+ return Number(this.lib.symbols.binja_lexer_error_line(this.lexerPtr));
249
+ }
250
+ getTokenValue(index) {
251
+ if (this._isEmpty)
252
+ return "";
253
+ const start = this.getTokenStart(index);
254
+ const end = this.getTokenEnd(index);
255
+ return new TextDecoder().decode(this.sourceBuffer.slice(start, end));
256
+ }
257
+ getToken(index) {
258
+ return {
259
+ type: this.getTokenType(index),
260
+ start: this.getTokenStart(index),
261
+ end: this.getTokenEnd(index),
262
+ value: this.getTokenValue(index)
263
+ };
264
+ }
265
+ getAllTokens() {
266
+ const tokens = [];
267
+ for (let i = 0;i < this._tokenCount; i++) {
268
+ tokens.push(this.getToken(i));
269
+ }
270
+ return tokens;
271
+ }
272
+ free() {
273
+ if (this.lexerPtr) {
274
+ this.lib.symbols.binja_lexer_free(this.lexerPtr);
275
+ this.lexerPtr = null;
276
+ }
277
+ }
278
+ [Symbol.dispose]() {
279
+ this.free();
280
+ }
281
+ };
282
+ });
283
+
4
284
  // src/lexer/tokens.ts
5
285
  var TokenType;
6
286
  ((TokenType2) => {
@@ -56,6 +336,135 @@ var KEYWORDS = {
56
336
  in: "NAME" /* NAME */
57
337
  };
58
338
 
339
+ // src/lexer/hybrid.ts
340
+ var _nativeChecked = false;
341
+ var _nativeAvailable2 = false;
342
+ var NativeLexerClass = null;
343
+ function checkNative() {
344
+ if (_nativeChecked)
345
+ return _nativeAvailable2;
346
+ _nativeChecked = true;
347
+ try {
348
+ const native = (init_native(), __toCommonJS(exports_native));
349
+ if (typeof native.isNativeAvailable === "function" && native.isNativeAvailable()) {
350
+ _nativeAvailable2 = true;
351
+ NativeLexerClass = native.NativeLexer;
352
+ return true;
353
+ }
354
+ } catch {}
355
+ return false;
356
+ }
357
+ var NATIVE_TO_TS = {
358
+ 0: "TEXT" /* TEXT */,
359
+ 1: "VARIABLE_START" /* VARIABLE_START */,
360
+ 2: "VARIABLE_END" /* VARIABLE_END */,
361
+ 3: "BLOCK_START" /* BLOCK_START */,
362
+ 4: "BLOCK_END" /* BLOCK_END */,
363
+ 5: "COMMENT_START" /* COMMENT_START */,
364
+ 6: "COMMENT_END" /* COMMENT_END */,
365
+ 7: "NAME" /* NAME */,
366
+ 8: "STRING" /* STRING */,
367
+ 9: "NUMBER" /* NUMBER */,
368
+ 10: "NAME" /* NAME */,
369
+ 11: "DOT" /* DOT */,
370
+ 12: "COMMA" /* COMMA */,
371
+ 13: "PIPE" /* PIPE */,
372
+ 14: "COLON" /* COLON */,
373
+ 15: "LPAREN" /* LPAREN */,
374
+ 16: "RPAREN" /* RPAREN */,
375
+ 17: "LBRACKET" /* LBRACKET */,
376
+ 18: "RBRACKET" /* RBRACKET */,
377
+ 19: "LBRACE" /* LBRACE */,
378
+ 20: "RBRACE" /* RBRACE */,
379
+ 21: "ASSIGN" /* ASSIGN */,
380
+ 22: "EOF" /* EOF */
381
+ };
382
+ var OPERATOR_TO_TYPE = {
383
+ "==": "EQ" /* EQ */,
384
+ "!=": "NE" /* NE */,
385
+ "<": "LT" /* LT */,
386
+ ">": "GT" /* GT */,
387
+ "<=": "LE" /* LE */,
388
+ ">=": "GE" /* GE */,
389
+ "+": "ADD" /* ADD */,
390
+ "-": "SUB" /* SUB */,
391
+ "*": "MUL" /* MUL */,
392
+ "/": "DIV" /* DIV */,
393
+ "%": "MOD" /* MOD */,
394
+ "~": "TILDE" /* TILDE */
395
+ };
396
+ var KEYWORD_TO_TYPE = {
397
+ and: "AND" /* AND */,
398
+ or: "OR" /* OR */,
399
+ not: "NOT" /* NOT */
400
+ };
401
+ var ERROR_MESSAGES = {
402
+ 1: "Unterminated string",
403
+ 2: "Unclosed template tag",
404
+ 3: "Invalid operator",
405
+ 4: "Unexpected character"
406
+ };
407
+ function isNativeAccelerated() {
408
+ return checkNative();
409
+ }
410
+ function tokenizeNative(source) {
411
+ if (!checkNative() || !NativeLexerClass)
412
+ return null;
413
+ if (source.length === 0) {
414
+ return [{ type: "EOF" /* EOF */, value: "", line: 1, column: 1 }];
415
+ }
416
+ const lexer = new NativeLexerClass(source);
417
+ try {
418
+ if (lexer.hasError()) {
419
+ const errorCode = lexer.getErrorCode();
420
+ const errorLine = lexer.getErrorLine();
421
+ const message = ERROR_MESSAGES[errorCode] ?? "Unknown error";
422
+ throw new Error(`${message} at line ${errorLine}`);
423
+ }
424
+ const tokens = [];
425
+ const count = lexer.tokenCount;
426
+ const lineStarts = [0];
427
+ for (let i = 0;i < source.length; i++) {
428
+ if (source[i] === `
429
+ `)
430
+ lineStarts.push(i + 1);
431
+ }
432
+ for (let i = 0;i < count; i++) {
433
+ const nativeType = lexer.getTokenType(i);
434
+ const value = lexer.getTokenValue(i);
435
+ const start = lexer.getTokenStart(i);
436
+ let lo = 0, hi = lineStarts.length - 1;
437
+ while (lo < hi) {
438
+ const mid = lo + hi + 1 >> 1;
439
+ if (lineStarts[mid] <= start)
440
+ lo = mid;
441
+ else
442
+ hi = mid - 1;
443
+ }
444
+ const line = lo + 1;
445
+ const column = start - lineStarts[lo] + 1;
446
+ let type = NATIVE_TO_TS[nativeType] ?? "NAME" /* NAME */;
447
+ let finalValue = value;
448
+ if (nativeType === 10 && OPERATOR_TO_TYPE[value]) {
449
+ type = OPERATOR_TO_TYPE[value];
450
+ } else if (type === "NAME" /* NAME */ && KEYWORD_TO_TYPE[value]) {
451
+ type = KEYWORD_TO_TYPE[value];
452
+ }
453
+ if (type === "STRING" /* STRING */ && finalValue.length >= 2) {
454
+ const first = finalValue[0];
455
+ const last = finalValue[finalValue.length - 1];
456
+ if (first === '"' && last === '"' || first === "'" && last === "'") {
457
+ finalValue = finalValue.slice(1, -1);
458
+ }
459
+ }
460
+ tokens.push({ type, value: finalValue, line, column });
461
+ }
462
+ return tokens;
463
+ } finally {
464
+ lexer.free();
465
+ }
466
+ }
467
+
59
468
  // src/lexer/index.ts
60
469
  class Lexer {
61
470
  state;
@@ -65,6 +474,7 @@ class Lexer {
65
474
  blockEnd;
66
475
  commentStart;
67
476
  commentEnd;
477
+ useNative;
68
478
  constructor(source, options = {}) {
69
479
  this.state = {
70
480
  source,
@@ -79,8 +489,15 @@ class Lexer {
79
489
  this.blockEnd = options.blockEnd ?? "%}";
80
490
  this.commentStart = options.commentStart ?? "{#";
81
491
  this.commentEnd = options.commentEnd ?? "#}";
492
+ const hasCustomDelimiters = options.variableStart !== undefined || options.variableEnd !== undefined || options.blockStart !== undefined || options.blockEnd !== undefined || options.commentStart !== undefined || options.commentEnd !== undefined;
493
+ this.useNative = !hasCustomDelimiters && isNativeAccelerated();
82
494
  }
83
495
  tokenize() {
496
+ if (this.useNative) {
497
+ const nativeTokens = tokenizeNative(this.state.source);
498
+ if (nativeTokens)
499
+ return nativeTokens;
500
+ }
84
501
  while (!this.isAtEnd()) {
85
502
  this.scanToken();
86
503
  }
@@ -1589,7 +2006,7 @@ var last = (value) => {
1589
2006
  return value[value.length - 1];
1590
2007
  return value;
1591
2008
  };
1592
- var join = (value, separator = "") => {
2009
+ var join2 = (value, separator = "") => {
1593
2010
  if (Array.isArray(value))
1594
2011
  return value.join(separator);
1595
2012
  return String(value);
@@ -2141,7 +2558,7 @@ var builtinFilters = {
2141
2558
  length_is,
2142
2559
  first,
2143
2560
  last,
2144
- join,
2561
+ join: join2,
2145
2562
  slice,
2146
2563
  reverse,
2147
2564
  sort,
@@ -0,0 +1,13 @@
1
+ /**
2
+ * Hybrid Lexer - Uses Zig FFI when available, falls back to TypeScript
3
+ */
4
+ import { Token } from './tokens';
5
+ /**
6
+ * Check if native acceleration is available
7
+ */
8
+ export declare function isNativeAccelerated(): boolean;
9
+ /**
10
+ * Tokenize using native FFI (returns null if not available)
11
+ */
12
+ export declare function tokenizeNative(source: string): Token[] | null;
13
+ //# sourceMappingURL=hybrid.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"hybrid.d.ts","sourceRoot":"","sources":["../../src/lexer/hybrid.ts"],"names":[],"mappings":"AAAA;;GAEG;AACH,OAAO,EAAE,KAAK,EAAa,MAAM,UAAU,CAAA;AA0E3C;;GAEG;AACH,wBAAgB,mBAAmB,IAAI,OAAO,CAE7C;AAED;;GAEG;AACH,wBAAgB,cAAc,CAAC,MAAM,EAAE,MAAM,GAAG,KAAK,EAAE,GAAG,IAAI,CAkE7D"}
@@ -1,6 +1,8 @@
1
1
  /**
2
2
  * Jinja2/DTL Lexer - Tokenizes template source into tokens
3
3
  * Compatible with both Jinja2 and Django Template Language
4
+ *
5
+ * Automatically uses Zig FFI acceleration when available and using default delimiters.
4
6
  */
5
7
  import { Token } from './tokens';
6
8
  export declare class Lexer {
@@ -11,6 +13,7 @@ export declare class Lexer {
11
13
  private blockEnd;
12
14
  private commentStart;
13
15
  private commentEnd;
16
+ private useNative;
14
17
  constructor(source: string, options?: {
15
18
  variableStart?: string;
16
19
  variableEnd?: string;
@@ -46,4 +49,5 @@ export declare class Lexer {
46
49
  }
47
50
  export { TokenType, KEYWORDS } from './tokens';
48
51
  export type { Token, LexerState } from './tokens';
52
+ export { isNativeAccelerated } from './hybrid';
49
53
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/lexer/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AACH,OAAO,EAAE,KAAK,EAAmC,MAAM,UAAU,CAAA;AAEjE,qBAAa,KAAK;IAChB,OAAO,CAAC,KAAK,CAAY;IACzB,OAAO,CAAC,aAAa,CAAQ;IAC7B,OAAO,CAAC,WAAW,CAAQ;IAC3B,OAAO,CAAC,UAAU,CAAQ;IAC1B,OAAO,CAAC,QAAQ,CAAQ;IACxB,OAAO,CAAC,YAAY,CAAQ;IAC5B,OAAO,CAAC,UAAU,CAAQ;gBAGxB,MAAM,EAAE,MAAM,EACd,OAAO,GAAE;QACP,aAAa,CAAC,EAAE,MAAM,CAAA;QACtB,WAAW,CAAC,EAAE,MAAM,CAAA;QACpB,UAAU,CAAC,EAAE,MAAM,CAAA;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAA;QACjB,YAAY,CAAC,EAAE,MAAM,CAAA;QACrB,UAAU,CAAC,EAAE,MAAM,CAAA;KACf;IAmBR,QAAQ,IAAI,KAAK,EAAE;IASnB,OAAO,CAAC,SAAS;IAsCjB,OAAO,CAAC,SAAS;IAYjB,OAAO,CAAC,YAAY;IA0EpB,OAAO,CAAC,QAAQ;IAmChB,OAAO,CAAC,cAAc;IAwBtB,OAAO,CAAC,mBAAmB;IA6B3B,OAAO,CAAC,UAAU;IAyBlB,OAAO,CAAC,UAAU;IAmBlB,OAAO,CAAC,cAAc;IAatB,OAAO,CAAC,YAAY;IAuDpB,OAAO,CAAC,WAAW;IAgBnB,OAAO,CAAC,OAAO;IAIf,OAAO,CAAC,IAAI;IAKZ,OAAO,CAAC,QAAQ;IAKhB,OAAO,CAAC,OAAO;IAOf,OAAO,CAAC,KAAK;IAkBb,OAAO,CAAC,KAAK;IAab,OAAO,CAAC,cAAc;IAUtB,OAAO,CAAC,YAAY;IAIpB,OAAO,CAAC,OAAO;IAIf,OAAO,CAAC,OAAO;IAIf,OAAO,CAAC,cAAc;IAItB,OAAO,CAAC,QAAQ;CAQjB;AAED,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAC9C,YAAY,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,UAAU,CAAA"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/lexer/index.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AACH,OAAO,EAAE,KAAK,EAAmC,MAAM,UAAU,CAAA;AAGjE,qBAAa,KAAK;IAChB,OAAO,CAAC,KAAK,CAAY;IACzB,OAAO,CAAC,aAAa,CAAQ;IAC7B,OAAO,CAAC,WAAW,CAAQ;IAC3B,OAAO,CAAC,UAAU,CAAQ;IAC1B,OAAO,CAAC,QAAQ,CAAQ;IACxB,OAAO,CAAC,YAAY,CAAQ;IAC5B,OAAO,CAAC,UAAU,CAAQ;IAC1B,OAAO,CAAC,SAAS,CAAS;gBAGxB,MAAM,EAAE,MAAM,EACd,OAAO,GAAE;QACP,aAAa,CAAC,EAAE,MAAM,CAAA;QACtB,WAAW,CAAC,EAAE,MAAM,CAAA;QACpB,UAAU,CAAC,EAAE,MAAM,CAAA;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAA;QACjB,YAAY,CAAC,EAAE,MAAM,CAAA;QACrB,UAAU,CAAC,EAAE,MAAM,CAAA;KACf;IA8BR,QAAQ,IAAI,KAAK,EAAE;IAgBnB,OAAO,CAAC,SAAS;IAsCjB,OAAO,CAAC,SAAS;IAYjB,OAAO,CAAC,YAAY;IA0EpB,OAAO,CAAC,QAAQ;IAmChB,OAAO,CAAC,cAAc;IAwBtB,OAAO,CAAC,mBAAmB;IA6B3B,OAAO,CAAC,UAAU;IAyBlB,OAAO,CAAC,UAAU;IAmBlB,OAAO,CAAC,cAAc;IAatB,OAAO,CAAC,YAAY;IAuDpB,OAAO,CAAC,WAAW;IAgBnB,OAAO,CAAC,OAAO;IAIf,OAAO,CAAC,IAAI;IAKZ,OAAO,CAAC,QAAQ;IAKhB,OAAO,CAAC,OAAO;IAOf,OAAO,CAAC,KAAK;IAkBb,OAAO,CAAC,KAAK;IAab,OAAO,CAAC,cAAc;IAUtB,OAAO,CAAC,YAAY;IAIpB,OAAO,CAAC,OAAO;IAIf,OAAO,CAAC,OAAO;IAIf,OAAO,CAAC,cAAc;IAItB,OAAO,CAAC,QAAQ;CAQjB;AAED,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAC9C,YAAY,EAAE,KAAK,EAAE,UAAU,EAAE,MAAM,UAAU,CAAA;AACjD,OAAO,EAAE,mBAAmB,EAAE,MAAM,UAAU,CAAA"}
@@ -0,0 +1,72 @@
1
+ export interface NativeToken {
2
+ type: number;
3
+ start: number;
4
+ end: number;
5
+ value: string;
6
+ }
7
+ export declare const TokenType: {
8
+ readonly TEXT: 0;
9
+ readonly VAR_START: 1;
10
+ readonly VAR_END: 2;
11
+ readonly BLOCK_START: 3;
12
+ readonly BLOCK_END: 4;
13
+ readonly COMMENT_START: 5;
14
+ readonly COMMENT_END: 6;
15
+ readonly IDENTIFIER: 7;
16
+ readonly STRING: 8;
17
+ readonly NUMBER: 9;
18
+ readonly OPERATOR: 10;
19
+ readonly DOT: 11;
20
+ readonly COMMA: 12;
21
+ readonly PIPE: 13;
22
+ readonly COLON: 14;
23
+ readonly LPAREN: 15;
24
+ readonly RPAREN: 16;
25
+ readonly LBRACKET: 17;
26
+ readonly RBRACKET: 18;
27
+ readonly LBRACE: 19;
28
+ readonly RBRACE: 20;
29
+ readonly ASSIGN: 21;
30
+ readonly EOF: 22;
31
+ };
32
+ /**
33
+ * Check if native Zig library is available
34
+ */
35
+ export declare function isNativeAvailable(): boolean;
36
+ /**
37
+ * Get native library version
38
+ */
39
+ export declare function nativeVersion(): string | null;
40
+ /**
41
+ * Native Lexer - uses Zig FFI for maximum performance
42
+ */
43
+ export declare class NativeLexer {
44
+ private lexerPtr;
45
+ private source;
46
+ private sourceBuffer;
47
+ private lib;
48
+ private _tokenCount;
49
+ private _isEmpty;
50
+ constructor(source: string);
51
+ get tokenCount(): number;
52
+ getTokenType(index: number): number;
53
+ getTokenStart(index: number): number;
54
+ getTokenEnd(index: number): number;
55
+ hasError(): boolean;
56
+ getErrorCode(): number;
57
+ getErrorLine(): number;
58
+ getTokenValue(index: number): string;
59
+ getToken(index: number): NativeToken;
60
+ getAllTokens(): NativeToken[];
61
+ free(): void;
62
+ [Symbol.dispose](): void;
63
+ }
64
+ /**
65
+ * Quick tokenize - returns token count only (fastest)
66
+ */
67
+ export declare function tokenizeCount(source: string): number;
68
+ /**
69
+ * Tokenize with native lexer, auto-cleanup
70
+ */
71
+ export declare function tokenize(source: string): NativeToken[];
72
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/native/index.ts"],"names":[],"mappings":"AAcA,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,MAAM,CAAA;IACX,KAAK,EAAE,MAAM,CAAA;CACd;AAED,eAAO,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;;CAwBZ,CAAA;AA8HV;;GAEG;AACH,wBAAgB,iBAAiB,IAAI,OAAO,CAG3C;AAED;;GAEG;AACH,wBAAgB,aAAa,IAAI,MAAM,GAAG,IAAI,CAO7C;AAED;;GAEG;AACH,qBAAa,WAAW;IACtB,OAAO,CAAC,QAAQ,CAAS;IACzB,OAAO,CAAC,MAAM,CAAQ;IACtB,OAAO,CAAC,YAAY,CAAY;IAChC,OAAO,CAAC,GAAG,CAAS;IACpB,OAAO,CAAC,WAAW,CAAY;IAC/B,OAAO,CAAC,QAAQ,CAAiB;gBAErB,MAAM,EAAE,MAAM;IAgC1B,IAAI,UAAU,IAAI,MAAM,CAEvB;IAED,YAAY,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM;IAKnC,aAAa,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM;IAKpC,WAAW,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM;IAKlC,QAAQ,IAAI,OAAO;IAKnB,YAAY,IAAI,MAAM;IAKtB,YAAY,IAAI,MAAM;IAKtB,aAAa,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM;IAQpC,QAAQ,CAAC,KAAK,EAAE,MAAM,GAAG,WAAW;IASpC,YAAY,IAAI,WAAW,EAAE;IAQ7B,IAAI,IAAI,IAAI;IAOZ,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,IAAI;CAGzB;AAED;;GAEG;AACH,wBAAgB,aAAa,CAAC,MAAM,EAAE,MAAM,GAAG,MAAM,CAYpD;AAED;;GAEG;AACH,wBAAgB,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,WAAW,EAAE,CAOtD"}