@projectwallace/css-parser 0.5.0 → 0.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -474,6 +474,36 @@ class Lexer {
474
474
  column: this.token_column
475
475
  };
476
476
  }
477
+ /**
478
+ * Save complete lexer state for backtracking
479
+ * @returns Object containing all lexer state
480
+ */
481
+ save_position() {
482
+ return {
483
+ pos: this.pos,
484
+ line: this.line,
485
+ column: this.column,
486
+ token_type: this.token_type,
487
+ token_start: this.token_start,
488
+ token_end: this.token_end,
489
+ token_line: this.token_line,
490
+ token_column: this.token_column
491
+ };
492
+ }
493
+ /**
494
+ * Restore lexer state from saved position
495
+ * @param saved The saved position to restore
496
+ */
497
+ restore_position(saved) {
498
+ this.pos = saved.pos;
499
+ this.line = saved.line;
500
+ this.column = saved.column;
501
+ this.token_type = saved.token_type;
502
+ this.token_start = saved.token_start;
503
+ this.token_end = saved.token_end;
504
+ this.token_line = saved.token_line;
505
+ this.token_column = saved.token_column;
506
+ }
477
507
  }
478
508
 
479
509
  export { Lexer as L, TOKEN_IDENT as T, TOKEN_FUNCTION as a, TOKEN_AT_KEYWORD as b, TOKEN_HASH as c, TOKEN_STRING as d, TOKEN_BAD_STRING as e, TOKEN_URL as f, TOKEN_BAD_URL as g, TOKEN_DELIM as h, TOKEN_NUMBER as i, TOKEN_PERCENTAGE as j, TOKEN_DIMENSION as k, TOKEN_WHITESPACE as l, TOKEN_CDO as m, TOKEN_CDC as n, TOKEN_COLON as o, TOKEN_SEMICOLON as p, TOKEN_COMMA as q, TOKEN_LEFT_BRACKET as r, TOKEN_RIGHT_BRACKET as s, TOKEN_LEFT_PAREN as t, TOKEN_RIGHT_PAREN as u, TOKEN_LEFT_BRACE as v, TOKEN_RIGHT_BRACE as w, TOKEN_COMMENT as x, TOKEN_EOF as y };
package/dist/lexer.d.ts CHANGED
@@ -1,4 +1,14 @@
1
1
  import { type Token, type TokenType } from './token-types';
2
+ export interface LexerPosition {
3
+ pos: number;
4
+ line: number;
5
+ column: number;
6
+ token_type: TokenType;
7
+ token_start: number;
8
+ token_end: number;
9
+ token_line: number;
10
+ token_column: number;
11
+ }
2
12
  export declare class Lexer {
3
13
  source: string;
4
14
  pos: number;
@@ -24,4 +34,14 @@ export declare class Lexer {
24
34
  peek(offset?: number): number;
25
35
  make_token(type: TokenType, start: number, end: number, line?: number, column?: number): TokenType;
26
36
  next_token(skip_whitespace?: boolean): Token | null;
37
+ /**
38
+ * Save complete lexer state for backtracking
39
+ * @returns Object containing all lexer state
40
+ */
41
+ save_position(): LexerPosition;
42
+ /**
43
+ * Restore lexer state from saved position
44
+ * @param saved The saved position to restore
45
+ */
46
+ restore_position(saved: LexerPosition): void;
27
47
  }
@@ -0,0 +1,22 @@
1
+ import { CSSDataArena } from './arena';
2
+ import { CSSNode } from './css-node';
3
+ export declare class ANplusBParser {
4
+ private lexer;
5
+ private arena;
6
+ private source;
7
+ private expr_end;
8
+ constructor(arena: CSSDataArena, source: string);
9
+ /**
10
+ * Parse An+B expression
11
+ * Examples: odd, even, 3, n, -n, 2n, 2n+1, -3n-5
12
+ */
13
+ parse_anplusb(start: number, end: number, line?: number): number | null;
14
+ /**
15
+ * Parse the b part after 'n'
16
+ * Handles: +5, -3, whitespace variations
17
+ */
18
+ private parse_b_part;
19
+ private skip_whitespace;
20
+ private create_anplusb_node;
21
+ }
22
+ export declare function parse_anplusb(expr: string): CSSNode | null;
@@ -0,0 +1,220 @@
1
+ import { L as Lexer, T as TOKEN_IDENT, h as TOKEN_DELIM, k as TOKEN_DIMENSION, i as TOKEN_NUMBER } from './lexer-CtBKgfVv.js';
2
+ import { am as CHAR_MINUS_HYPHEN, a3 as CHAR_PLUS, ak as skip_whitespace_forward, H as NODE_SELECTOR_NTH, C as CSSNode, W as CSSDataArena } from './css-node-CIM4dthB.js';
3
+
4
+ class ANplusBParser {
5
+ lexer;
6
+ arena;
7
+ source;
8
+ expr_end;
9
+ constructor(arena, source) {
10
+ this.arena = arena;
11
+ this.source = source;
12
+ this.lexer = new Lexer(source, true);
13
+ this.expr_end = 0;
14
+ }
15
+ /**
16
+ * Parse An+B expression
17
+ * Examples: odd, even, 3, n, -n, 2n, 2n+1, -3n-5
18
+ */
19
+ parse_anplusb(start, end, line = 1) {
20
+ this.expr_end = end;
21
+ this.lexer.pos = start;
22
+ this.lexer.line = line;
23
+ let a = null;
24
+ let b = null;
25
+ let a_start = start;
26
+ let a_end = start;
27
+ let b_start = start;
28
+ let b_end = start;
29
+ const node_start = start;
30
+ this.skip_whitespace();
31
+ if (this.lexer.pos >= this.expr_end) {
32
+ return null;
33
+ }
34
+ this.lexer.next_token_fast(true);
35
+ if (this.lexer.token_type === TOKEN_IDENT) {
36
+ const text = this.source.substring(this.lexer.token_start, this.lexer.token_end).toLowerCase();
37
+ if (text === "odd" || text === "even") {
38
+ a = this.source.substring(this.lexer.token_start, this.lexer.token_end);
39
+ a_start = this.lexer.token_start;
40
+ a_end = this.lexer.token_end;
41
+ return this.create_anplusb_node(node_start, a, null, a_start, a_end, 0, 0);
42
+ }
43
+ const first_char = this.source.charCodeAt(this.lexer.token_start);
44
+ const second_char = this.lexer.token_end > this.lexer.token_start + 1 ? this.source.charCodeAt(this.lexer.token_start + 1) : 0;
45
+ if (first_char === CHAR_MINUS_HYPHEN && second_char === 110) {
46
+ if (this.lexer.token_end > this.lexer.token_start + 2) {
47
+ const third_char = this.source.charCodeAt(this.lexer.token_start + 2);
48
+ if (third_char === CHAR_MINUS_HYPHEN) {
49
+ a = "-n";
50
+ a_start = this.lexer.token_start;
51
+ a_end = this.lexer.token_start + 2;
52
+ b = this.source.substring(this.lexer.token_start + 2, this.lexer.token_end);
53
+ b_start = this.lexer.token_start + 2;
54
+ b_end = this.lexer.token_end;
55
+ return this.create_anplusb_node(node_start, a, b, a_start, a_end, b_start, b_end);
56
+ }
57
+ }
58
+ a = "-n";
59
+ a_start = this.lexer.token_start;
60
+ a_end = this.lexer.token_start + 2;
61
+ b = this.parse_b_part();
62
+ if (b !== null) {
63
+ b_start = this.lexer.token_start;
64
+ b_end = this.lexer.token_end;
65
+ }
66
+ return this.create_anplusb_node(node_start, a, b, a_start, a_end, b_start, b_end);
67
+ }
68
+ if (first_char === 110) {
69
+ if (this.lexer.token_end > this.lexer.token_start + 1) {
70
+ const second_char2 = this.source.charCodeAt(this.lexer.token_start + 1);
71
+ if (second_char2 === CHAR_MINUS_HYPHEN) {
72
+ a = "n";
73
+ a_start = this.lexer.token_start;
74
+ a_end = this.lexer.token_start + 1;
75
+ b = this.source.substring(this.lexer.token_start + 1, this.lexer.token_end);
76
+ b_start = this.lexer.token_start + 1;
77
+ b_end = this.lexer.token_end;
78
+ return this.create_anplusb_node(node_start, a, b, a_start, a_end, b_start, b_end);
79
+ }
80
+ }
81
+ a = "n";
82
+ a_start = this.lexer.token_start;
83
+ a_end = this.lexer.token_start + 1;
84
+ b = this.parse_b_part();
85
+ if (b !== null) {
86
+ b_start = this.lexer.token_start;
87
+ b_end = this.lexer.token_end;
88
+ }
89
+ return this.create_anplusb_node(node_start, a, b, a_start, a_end, b_start, b_end);
90
+ }
91
+ return null;
92
+ }
93
+ if (this.lexer.token_type === TOKEN_DELIM && this.source.charCodeAt(this.lexer.token_start) === CHAR_PLUS) {
94
+ const saved = this.lexer.save_position();
95
+ this.lexer.next_token_fast(true);
96
+ if (this.lexer.token_type === TOKEN_IDENT) {
97
+ const text = this.source.substring(this.lexer.token_start, this.lexer.token_end);
98
+ const first_char = text.charCodeAt(0);
99
+ if (first_char === 110) {
100
+ a = "+n";
101
+ a_start = saved.pos - 1;
102
+ a_end = this.lexer.token_start + 1;
103
+ if (this.lexer.token_end > this.lexer.token_start + 1) {
104
+ const second_char = this.source.charCodeAt(this.lexer.token_start + 1);
105
+ if (second_char === CHAR_MINUS_HYPHEN) {
106
+ b = this.source.substring(this.lexer.token_start + 1, this.lexer.token_end);
107
+ b_start = this.lexer.token_start + 1;
108
+ b_end = this.lexer.token_end;
109
+ return this.create_anplusb_node(node_start, a, b, a_start, a_end, b_start, b_end);
110
+ }
111
+ }
112
+ b = this.parse_b_part();
113
+ if (b !== null) {
114
+ b_start = this.lexer.token_start;
115
+ b_end = this.lexer.token_end;
116
+ }
117
+ return this.create_anplusb_node(node_start, a, b, a_start, a_end, b_start, b_end);
118
+ }
119
+ }
120
+ this.lexer.restore_position(saved);
121
+ }
122
+ if (this.lexer.token_type === TOKEN_DIMENSION) {
123
+ const token_text = this.source.substring(this.lexer.token_start, this.lexer.token_end);
124
+ const n_index = token_text.toLowerCase().indexOf("n");
125
+ if (n_index !== -1) {
126
+ a = token_text.substring(0, n_index + 1);
127
+ a_start = this.lexer.token_start;
128
+ a_end = this.lexer.token_start + n_index + 1;
129
+ if (n_index + 1 < token_text.length) {
130
+ const remainder = token_text.substring(n_index + 1);
131
+ if (remainder.charCodeAt(0) === CHAR_MINUS_HYPHEN) {
132
+ b = remainder;
133
+ b_start = this.lexer.token_start + n_index + 1;
134
+ b_end = this.lexer.token_end;
135
+ return this.create_anplusb_node(node_start, a, b, a_start, a_end, b_start, b_end);
136
+ }
137
+ }
138
+ b = this.parse_b_part();
139
+ if (b !== null) {
140
+ b_start = this.lexer.token_start;
141
+ b_end = this.lexer.token_end;
142
+ }
143
+ return this.create_anplusb_node(node_start, a, b, a_start, a_end, b_start, b_end);
144
+ }
145
+ }
146
+ if (this.lexer.token_type === TOKEN_NUMBER) {
147
+ let num_text = this.source.substring(this.lexer.token_start, this.lexer.token_end);
148
+ b = num_text;
149
+ b_start = this.lexer.token_start;
150
+ b_end = this.lexer.token_end;
151
+ return this.create_anplusb_node(node_start, a, b, a_start, a_end, b_start, b_end);
152
+ }
153
+ return null;
154
+ }
155
+ /**
156
+ * Parse the b part after 'n'
157
+ * Handles: +5, -3, whitespace variations
158
+ */
159
+ parse_b_part() {
160
+ this.skip_whitespace();
161
+ if (this.lexer.pos >= this.expr_end) {
162
+ return null;
163
+ }
164
+ this.lexer.next_token_fast(true);
165
+ if (this.lexer.token_type === TOKEN_DELIM) {
166
+ const ch = this.source.charCodeAt(this.lexer.token_start);
167
+ if (ch === CHAR_PLUS || ch === CHAR_MINUS_HYPHEN) {
168
+ const sign = ch === CHAR_MINUS_HYPHEN ? "-" : "";
169
+ this.skip_whitespace();
170
+ this.lexer.next_token_fast(true);
171
+ if (this.lexer.token_type === TOKEN_NUMBER) {
172
+ let num_text = this.source.substring(this.lexer.token_start, this.lexer.token_end);
173
+ if (num_text.charCodeAt(0) === CHAR_PLUS) {
174
+ num_text = num_text.substring(1);
175
+ }
176
+ return sign === "-" ? sign + num_text : num_text;
177
+ }
178
+ }
179
+ }
180
+ if (this.lexer.token_type === TOKEN_NUMBER) {
181
+ let num_text = this.source.substring(this.lexer.token_start, this.lexer.token_end);
182
+ const first_char = num_text.charCodeAt(0);
183
+ if (first_char === CHAR_PLUS || first_char === CHAR_MINUS_HYPHEN) {
184
+ if (first_char === CHAR_PLUS) {
185
+ num_text = num_text.substring(1);
186
+ }
187
+ return num_text;
188
+ }
189
+ }
190
+ return null;
191
+ }
192
+ skip_whitespace() {
193
+ this.lexer.pos = skip_whitespace_forward(this.source, this.lexer.pos, this.expr_end);
194
+ }
195
+ create_anplusb_node(start, a, b, a_start, a_end, b_start, b_end) {
196
+ const node = this.arena.create_node();
197
+ this.arena.set_type(node, NODE_SELECTOR_NTH);
198
+ this.arena.set_start_offset(node, start);
199
+ this.arena.set_length(node, this.lexer.pos - start);
200
+ this.arena.set_start_line(node, this.lexer.line);
201
+ if (a !== null) {
202
+ this.arena.set_content_start(node, a_start);
203
+ this.arena.set_content_length(node, a_end - a_start);
204
+ }
205
+ if (b !== null) {
206
+ this.arena.set_value_start(node, b_start);
207
+ this.arena.set_value_length(node, b_end - b_start);
208
+ }
209
+ return node;
210
+ }
211
+ }
212
+ function parse_anplusb(expr) {
213
+ const arena = new CSSDataArena(64);
214
+ const parser = new ANplusBParser(arena, expr);
215
+ const nodeIndex = parser.parse_anplusb(0, expr.length);
216
+ if (nodeIndex === null) return null;
217
+ return new CSSNode(arena, expr, nodeIndex);
218
+ }
219
+
220
+ export { ANplusBParser, parse_anplusb };
@@ -1,4 +1,28 @@
1
+ import { CSSDataArena } from './arena';
1
2
  import { CSSNode } from './css-node';
3
+ export declare class AtRulePreludeParser {
4
+ private lexer;
5
+ private arena;
6
+ private source;
7
+ private prelude_end;
8
+ constructor(arena: CSSDataArena, source: string);
9
+ parse_prelude(at_rule_name: string, start: number, end: number, line?: number, column?: number): number[];
10
+ private parse_media_query_list;
11
+ private is_and_or_not;
12
+ private parse_single_media_query;
13
+ private parse_media_feature;
14
+ private parse_container_query;
15
+ private parse_supports_query;
16
+ private parse_layer_names;
17
+ private parse_identifier;
18
+ private parse_import_prelude;
19
+ private parse_import_url;
20
+ private parse_import_layer;
21
+ private parse_import_supports;
22
+ private skip_whitespace;
23
+ private peek_token_type;
24
+ private next_token;
25
+ }
2
26
  /**
3
27
  * Parse an at-rule prelude string and return an array of AST nodes
4
28
  * @param at_rule_name - The name of the at-rule (e.g., "media", "supports", "layer")