jpsx 0.1.16 → 0.1.18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +243 -242
- package/dist/api/__tests__/compile.test.js +2 -2
- package/dist/api/__tests__/runtime.test.js +17 -11
- package/dist/api/index.d.ts.map +1 -1
- package/dist/api/index.js +3 -0
- package/dist/generator/generator.js +2 -2
- package/dist/lexer/tokenizer.d.ts +4 -2
- package/dist/lexer/tokenizer.d.ts.map +1 -1
- package/dist/lexer/tokenizer.js +71 -29
- package/dist/parser/grammar.d.ts.map +1 -1
- package/dist/parser/grammar.js +148 -74
- package/dist/parser/parser.js +1 -1
- package/package.json +31 -2
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"tokenizer.d.ts","sourceRoot":"","sources":["../../src/lexer/tokenizer.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,SAAS,GACjB,QAAQ,GACR,QAAQ,GACR,SAAS,GACT,KAAK,GACL,IAAI,GACJ,MAAM,GACN,KAAK,GACL,IAAI,GACJ,OAAO,GACP,QAAQ,GACR,KAAK,GACL,OAAO,GACP,KAAK,GACL,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,MAAM,GACN,KAAK,GACL,KAAK,GACL,IAAI,GACJ,QAAQ,GACR,IAAI,GACJ,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,SAAS,GACT,QAAQ,GACR,QAAQ,GACR,OAAO,GACP,OAAO,GACP,IAAI,GACJ,UAAU,GACV,UAAU,GACV,MAAM,GACN,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,OAAO,GACP,KAAK,CAAC;AAEV,MAAM,MAAM,KAAK,GAAG;IAClB,IAAI,EAAE,SAAS,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,
|
|
1
|
+
{"version":3,"file":"tokenizer.d.ts","sourceRoot":"","sources":["../../src/lexer/tokenizer.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,SAAS,GACjB,QAAQ,GACR,QAAQ,GACR,SAAS,GACT,KAAK,GACL,IAAI,GACJ,MAAM,GACN,KAAK,GACL,IAAI,GACJ,OAAO,GACP,QAAQ,GACR,KAAK,GACL,OAAO,GACP,KAAK,GACL,QAAQ,GACR,QAAQ,GACR,QAAQ,GACR,MAAM,GACN,KAAK,GACL,KAAK,GACL,IAAI,GACJ,QAAQ,GACR,IAAI,GACJ,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,SAAS,GACT,QAAQ,GACR,QAAQ,GACR,OAAO,GACP,OAAO,GACP,IAAI,GACJ,UAAU,GACV,UAAU,GACV,MAAM,GACN,OAAO,GACP,QAAQ,GACR,QAAQ,GACR,OAAO,GACP,KAAK,CAAC;AAEV,MAAM,MAAM,KAAK,GAAG;IAClB,IAAI,EAAE,SAAS,CAAC;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,EAAE,MAAM,CAAC;CACb,CAAC;AAqCF,wBAAgB,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,KAAK,EAAE,CAmQhD"}
|
package/dist/lexer/tokenizer.js
CHANGED
|
@@ -34,20 +34,40 @@ export function tokenize(source) {
|
|
|
34
34
|
const tokens = [];
|
|
35
35
|
const lines = source.replace(/\r\n/g, "\n").split("\n");
|
|
36
36
|
const indentStack = [0];
|
|
37
|
+
let absoluteOffset = 0;
|
|
37
38
|
let inMultilineString = false;
|
|
38
39
|
let multilineStringValue = "";
|
|
39
40
|
let multilineStartLine = 0;
|
|
40
41
|
let multilineStartCol = 0;
|
|
42
|
+
let multilineStartOffset = 0;
|
|
41
43
|
lines.forEach((lineText, lineIndex) => {
|
|
42
44
|
const lineNumber = lineIndex + 1;
|
|
43
45
|
let i = 0;
|
|
46
|
+
const pushToken = (type, value, col, len) => {
|
|
47
|
+
const text = lineText.slice(col - 1, col - 1 + len);
|
|
48
|
+
tokens.push({
|
|
49
|
+
type,
|
|
50
|
+
value,
|
|
51
|
+
text: text || value, // Fallback to value if text is empty (newlines etc)
|
|
52
|
+
offset: absoluteOffset + (col - 1),
|
|
53
|
+
line: lineNumber,
|
|
54
|
+
col
|
|
55
|
+
});
|
|
56
|
+
};
|
|
44
57
|
// --- State: Inside Multiline String ---
|
|
45
58
|
if (inMultilineString) {
|
|
46
59
|
const closingIndex = lineText.indexOf('"""');
|
|
47
60
|
if (closingIndex !== -1) {
|
|
48
61
|
// Found closing delimiter
|
|
49
62
|
multilineStringValue += lineText.slice(0, closingIndex);
|
|
50
|
-
tokens.push({
|
|
63
|
+
tokens.push({
|
|
64
|
+
type: "STRING",
|
|
65
|
+
value: multilineStringValue,
|
|
66
|
+
text: '"""' + multilineStringValue + '"""', // Approximation
|
|
67
|
+
offset: multilineStartOffset,
|
|
68
|
+
line: multilineStartLine,
|
|
69
|
+
col: multilineStartCol
|
|
70
|
+
});
|
|
51
71
|
inMultilineString = false;
|
|
52
72
|
multilineStringValue = "";
|
|
53
73
|
i = closingIndex + 3; // Resume processing after """
|
|
@@ -55,17 +75,20 @@ export function tokenize(source) {
|
|
|
55
75
|
else {
|
|
56
76
|
// No closing delimiter, consume entire line
|
|
57
77
|
multilineStringValue += lineText + "\n";
|
|
58
|
-
|
|
78
|
+
absoluteOffset += lineText.length + 1;
|
|
79
|
+
return; // Proceed to next line
|
|
59
80
|
}
|
|
60
81
|
}
|
|
61
82
|
else {
|
|
62
83
|
// --- State: Normal (Start of Line) ---
|
|
63
84
|
// 1. Skip empty lines (only if not inside multiline string)
|
|
64
85
|
if (lineText.trim().length === 0) {
|
|
86
|
+
absoluteOffset += lineText.length + 1;
|
|
65
87
|
return;
|
|
66
88
|
}
|
|
67
89
|
// 2. Skip full-line comments
|
|
68
90
|
if (lineText.trim().startsWith("#")) {
|
|
91
|
+
absoluteOffset += lineText.length + 1;
|
|
69
92
|
return;
|
|
70
93
|
}
|
|
71
94
|
// 3. Handle Indentation
|
|
@@ -74,12 +97,12 @@ export function tokenize(source) {
|
|
|
74
97
|
i = indent; // Start processing after indentation
|
|
75
98
|
if (indent > indentStack[indentStack.length - 1]) {
|
|
76
99
|
indentStack.push(indent);
|
|
77
|
-
|
|
100
|
+
pushToken("INDENT", " ", 1, indent);
|
|
78
101
|
}
|
|
79
102
|
else {
|
|
80
103
|
while (indent < indentStack[indentStack.length - 1]) {
|
|
81
104
|
indentStack.pop();
|
|
82
|
-
|
|
105
|
+
pushToken("DEDENT", "", 1, 0);
|
|
83
106
|
}
|
|
84
107
|
}
|
|
85
108
|
}
|
|
@@ -93,14 +116,15 @@ export function tokenize(source) {
|
|
|
93
116
|
}
|
|
94
117
|
// Handle F-String
|
|
95
118
|
if (char === 'f' && lineText[i + 1] === '"') {
|
|
96
|
-
let start = i
|
|
97
|
-
i += 2;
|
|
119
|
+
let start = i;
|
|
120
|
+
i += 2; // skip f"
|
|
121
|
+
let valStart = i;
|
|
98
122
|
while (i < lineText.length && lineText[i] !== '"') {
|
|
99
123
|
i += 1;
|
|
100
124
|
}
|
|
101
|
-
const value = lineText.slice(
|
|
102
|
-
i += 1;
|
|
103
|
-
|
|
125
|
+
const value = lineText.slice(valStart, i);
|
|
126
|
+
i += 1; // skip "
|
|
127
|
+
pushToken("FSTRING", value, start + 1, i - start);
|
|
104
128
|
continue;
|
|
105
129
|
}
|
|
106
130
|
// Handle Multiline String Start
|
|
@@ -108,6 +132,7 @@ export function tokenize(source) {
|
|
|
108
132
|
inMultilineString = true;
|
|
109
133
|
multilineStartLine = lineNumber;
|
|
110
134
|
multilineStartCol = i + 1;
|
|
135
|
+
multilineStartOffset = absoluteOffset + i;
|
|
111
136
|
multilineStringValue = "";
|
|
112
137
|
i += 3;
|
|
113
138
|
// Try to find closing on same line
|
|
@@ -115,13 +140,21 @@ export function tokenize(source) {
|
|
|
115
140
|
const closingIndex = remainder.indexOf('"""');
|
|
116
141
|
if (closingIndex !== -1) {
|
|
117
142
|
multilineStringValue = remainder.slice(0, closingIndex);
|
|
118
|
-
tokens.push({
|
|
143
|
+
tokens.push({
|
|
144
|
+
type: "STRING",
|
|
145
|
+
value: multilineStringValue,
|
|
146
|
+
text: '"""' + multilineStringValue + '"""',
|
|
147
|
+
offset: multilineStartOffset,
|
|
148
|
+
line: multilineStartLine,
|
|
149
|
+
col: multilineStartCol
|
|
150
|
+
});
|
|
119
151
|
inMultilineString = false;
|
|
120
152
|
multilineStringValue = "";
|
|
121
153
|
i += closingIndex + 3;
|
|
122
154
|
}
|
|
123
155
|
else {
|
|
124
156
|
multilineStringValue += remainder + "\n";
|
|
157
|
+
absoluteOffset += lineText.length + 1;
|
|
125
158
|
return; // Stop processing this line, iterate to next
|
|
126
159
|
}
|
|
127
160
|
continue;
|
|
@@ -139,10 +172,10 @@ export function tokenize(source) {
|
|
|
139
172
|
const text = lineText.slice(start, i);
|
|
140
173
|
const keyword = keywords[text];
|
|
141
174
|
if (keyword) {
|
|
142
|
-
|
|
175
|
+
pushToken(keyword, text, start + 1, i - start);
|
|
143
176
|
}
|
|
144
177
|
else {
|
|
145
|
-
|
|
178
|
+
pushToken("IDENT", text, start + 1, i - start);
|
|
146
179
|
}
|
|
147
180
|
continue;
|
|
148
181
|
}
|
|
@@ -158,83 +191,92 @@ export function tokenize(source) {
|
|
|
158
191
|
i += 1;
|
|
159
192
|
}
|
|
160
193
|
}
|
|
161
|
-
|
|
194
|
+
pushToken("NUMBER", lineText.slice(start, i), start + 1, i - start);
|
|
162
195
|
continue;
|
|
163
196
|
}
|
|
164
197
|
if (char === '"') {
|
|
165
|
-
let start = i
|
|
198
|
+
let start = i;
|
|
166
199
|
i += 1;
|
|
200
|
+
let valStart = i;
|
|
167
201
|
while (i < lineText.length && lineText[i] !== '"') {
|
|
168
202
|
i += 1;
|
|
169
203
|
}
|
|
170
|
-
const value = lineText.slice(
|
|
204
|
+
const value = lineText.slice(valStart, i);
|
|
171
205
|
i += 1;
|
|
172
|
-
|
|
206
|
+
pushToken("STRING", value, start + 1, i - start);
|
|
173
207
|
continue;
|
|
174
208
|
}
|
|
175
209
|
if (char === "(") {
|
|
176
|
-
|
|
210
|
+
pushToken("LPAREN", "(", i + 1, 1);
|
|
177
211
|
i += 1;
|
|
178
212
|
continue;
|
|
179
213
|
}
|
|
180
214
|
if (char === ")") {
|
|
181
|
-
|
|
215
|
+
pushToken("RPAREN", ")", i + 1, 1);
|
|
182
216
|
i += 1;
|
|
183
217
|
continue;
|
|
184
218
|
}
|
|
185
219
|
if (char === ":") {
|
|
186
|
-
|
|
220
|
+
pushToken("COLON", ":", i + 1, 1);
|
|
187
221
|
i += 1;
|
|
188
222
|
continue;
|
|
189
223
|
}
|
|
190
224
|
if (char === ",") {
|
|
191
|
-
|
|
225
|
+
pushToken("COMMA", ",", i + 1, 1);
|
|
192
226
|
i += 1;
|
|
193
227
|
continue;
|
|
194
228
|
}
|
|
195
229
|
if (char === ".") {
|
|
196
|
-
|
|
230
|
+
pushToken("DOT", ".", i + 1, 1);
|
|
197
231
|
i += 1;
|
|
198
232
|
continue;
|
|
199
233
|
}
|
|
200
234
|
if (char === "[") {
|
|
201
|
-
|
|
235
|
+
pushToken("LBRACKET", "[", i + 1, 1);
|
|
202
236
|
i += 1;
|
|
203
237
|
continue;
|
|
204
238
|
}
|
|
205
239
|
if (char === "]") {
|
|
206
|
-
|
|
240
|
+
pushToken("RBRACKET", "]", i + 1, 1);
|
|
207
241
|
i += 1;
|
|
208
242
|
continue;
|
|
209
243
|
}
|
|
210
244
|
if (char === "{") {
|
|
211
|
-
|
|
245
|
+
pushToken("LBRACE", "{", i + 1, 1);
|
|
212
246
|
i += 1;
|
|
213
247
|
continue;
|
|
214
248
|
}
|
|
215
249
|
if (char === "}") {
|
|
216
|
-
|
|
250
|
+
pushToken("RBRACE", "}", i + 1, 1);
|
|
217
251
|
i += 1;
|
|
218
252
|
continue;
|
|
219
253
|
}
|
|
220
254
|
const twoChar = lineText.slice(i, i + 2);
|
|
221
255
|
if (operators.includes(twoChar)) {
|
|
222
|
-
|
|
256
|
+
pushToken("OP", twoChar, i + 1, 2);
|
|
223
257
|
i += 2;
|
|
224
258
|
continue;
|
|
225
259
|
}
|
|
226
260
|
if (operators.includes(char)) {
|
|
227
|
-
|
|
261
|
+
pushToken("OP", char, i + 1, 1);
|
|
228
262
|
i += 1;
|
|
229
263
|
continue;
|
|
230
264
|
}
|
|
231
265
|
throw new Error(`Unexpected character '${char}' at ${lineNumber}:${i + 1}`);
|
|
232
266
|
}
|
|
233
|
-
|
|
267
|
+
pushToken("NEWLINE", "\n", lineText.length + 1, 1);
|
|
268
|
+
absoluteOffset += lineText.length + 1;
|
|
234
269
|
});
|
|
235
270
|
while (indentStack.length > 1) {
|
|
236
271
|
indentStack.pop();
|
|
237
|
-
tokens.push({
|
|
272
|
+
tokens.push({
|
|
273
|
+
type: "DEDENT",
|
|
274
|
+
value: "",
|
|
275
|
+
text: "",
|
|
276
|
+
offset: absoluteOffset,
|
|
277
|
+
line: lines.length,
|
|
278
|
+
col: 1
|
|
279
|
+
});
|
|
238
280
|
}
|
|
239
281
|
return tokens;
|
|
240
282
|
}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"grammar.d.ts","sourceRoot":"","sources":["../../src/parser/grammar.ts"],"names":[],"mappings":"AAwDA,UAAU,YAAY;IACpB,KAAK,EAAE,GAAG,CAAC;IACX,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,UAAU,YAAY;IACpB,KAAK,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAC1C,IAAI,EAAE,MAAM,YAAY,GAAG,SAAS,CAAC;IACrC,IAAI,EAAE,MAAM,GAAG,CAAC;IAChB,WAAW,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,MAAM,CAAC;IACtC,GAAG,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC;CACrC;AAED,UAAU,WAAW;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,aAAa,EAAE,CAAC;IACzB,WAAW,CAAC,EAAE,CAAC,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,EAAE,KAAK,GAAG,CAAC;CAC5D;AAED,KAAK,aAAa,GAAG,MAAM,GAAG;IAAE,OAAO,EAAE,GAAG,CAAA;CAAE,GAAG;IAAE,IAAI,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,OAAO,CAAA;CAAE,CAAC;AAEnF,UAAU,OAAO;IACf,KAAK,EAAE,YAAY,GAAG,SAAS,CAAC;IAChC,WAAW,EAAE,WAAW,EAAE,CAAC;IAC3B,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,QAAA,MAAM,OAAO,EAAE,
|
|
1
|
+
{"version":3,"file":"grammar.d.ts","sourceRoot":"","sources":["../../src/parser/grammar.ts"],"names":[],"mappings":"AAwDA,UAAU,YAAY;IACpB,KAAK,EAAE,GAAG,CAAC;IACX,CAAC,GAAG,EAAE,MAAM,GAAG,GAAG,CAAC;CACpB;AAED,UAAU,YAAY;IACpB,KAAK,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAC1C,IAAI,EAAE,MAAM,YAAY,GAAG,SAAS,CAAC;IACrC,IAAI,EAAE,MAAM,GAAG,CAAC;IAChB,WAAW,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,MAAM,CAAC;IACtC,GAAG,EAAE,CAAC,SAAS,EAAE,MAAM,KAAK,OAAO,CAAC;CACrC;AAED,UAAU,WAAW;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,aAAa,EAAE,CAAC;IACzB,WAAW,CAAC,EAAE,CAAC,CAAC,EAAE,GAAG,EAAE,EAAE,GAAG,CAAC,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE,EAAE,KAAK,GAAG,CAAC;CAC5D;AAED,KAAK,aAAa,GAAG,MAAM,GAAG;IAAE,OAAO,EAAE,GAAG,CAAA;CAAE,GAAG;IAAE,IAAI,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,OAAO,CAAA;CAAE,CAAC;AAEnF,UAAU,OAAO;IACf,KAAK,EAAE,YAAY,GAAG,SAAS,CAAC;IAChC,WAAW,EAAE,WAAW,EAAE,CAAC;IAC3B,WAAW,EAAE,MAAM,CAAC;CACrB;AAED,QAAA,MAAM,OAAO,EAAE,OA6Wd,CAAC;AAEF,eAAe,OAAO,CAAC"}
|