skir 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +447 -0
- package/dist/casing.d.ts +8 -0
- package/dist/casing.d.ts.map +1 -0
- package/dist/casing.js +49 -0
- package/dist/casing.js.map +1 -0
- package/dist/casing.test.d.ts +2 -0
- package/dist/casing.test.d.ts.map +1 -0
- package/dist/casing.test.js +134 -0
- package/dist/casing.test.js.map +1 -0
- package/dist/command_line_parser.d.ts +33 -0
- package/dist/command_line_parser.d.ts.map +1 -0
- package/dist/command_line_parser.js +171 -0
- package/dist/command_line_parser.js.map +1 -0
- package/dist/command_line_parser.test.d.ts +2 -0
- package/dist/command_line_parser.test.d.ts.map +1 -0
- package/dist/command_line_parser.test.js +302 -0
- package/dist/command_line_parser.test.js.map +1 -0
- package/dist/compatibility_checker.d.ts +68 -0
- package/dist/compatibility_checker.d.ts.map +1 -0
- package/dist/compatibility_checker.js +328 -0
- package/dist/compatibility_checker.js.map +1 -0
- package/dist/compatibility_checker.test.d.ts +2 -0
- package/dist/compatibility_checker.test.d.ts.map +1 -0
- package/dist/compatibility_checker.test.js +528 -0
- package/dist/compatibility_checker.test.js.map +1 -0
- package/dist/compiler.d.ts +3 -0
- package/dist/compiler.d.ts.map +1 -0
- package/dist/compiler.js +358 -0
- package/dist/compiler.js.map +1 -0
- package/dist/config.d.ts +47 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +23 -0
- package/dist/config.js.map +1 -0
- package/dist/definition_finder.d.ts +12 -0
- package/dist/definition_finder.d.ts.map +1 -0
- package/dist/definition_finder.js +180 -0
- package/dist/definition_finder.js.map +1 -0
- package/dist/definition_finder.test.d.ts +2 -0
- package/dist/definition_finder.test.d.ts.map +1 -0
- package/dist/definition_finder.test.js +164 -0
- package/dist/definition_finder.test.js.map +1 -0
- package/dist/encoding.d.ts +2 -0
- package/dist/encoding.d.ts.map +1 -0
- package/dist/encoding.js +38 -0
- package/dist/encoding.js.map +1 -0
- package/dist/encoding.test.d.ts +2 -0
- package/dist/encoding.test.d.ts.map +1 -0
- package/dist/encoding.test.js +23 -0
- package/dist/encoding.test.js.map +1 -0
- package/dist/error_renderer.d.ts +10 -0
- package/dist/error_renderer.d.ts.map +1 -0
- package/dist/error_renderer.js +247 -0
- package/dist/error_renderer.js.map +1 -0
- package/dist/formatter.d.ts +3 -0
- package/dist/formatter.d.ts.map +1 -0
- package/dist/formatter.js +263 -0
- package/dist/formatter.js.map +1 -0
- package/dist/formatter.test.d.ts +2 -0
- package/dist/formatter.test.d.ts.map +1 -0
- package/dist/formatter.test.js +156 -0
- package/dist/formatter.test.js.map +1 -0
- package/dist/index.d.ts +6 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +5 -0
- package/dist/index.js.map +1 -0
- package/dist/index.test.d.ts +2 -0
- package/dist/index.test.d.ts.map +1 -0
- package/dist/index.test.js +14 -0
- package/dist/index.test.js.map +1 -0
- package/dist/io.d.ts +13 -0
- package/dist/io.d.ts.map +1 -0
- package/dist/io.js +22 -0
- package/dist/io.js.map +1 -0
- package/dist/language_server.d.ts +15 -0
- package/dist/language_server.d.ts.map +1 -0
- package/dist/language_server.js +248 -0
- package/dist/language_server.js.map +1 -0
- package/dist/literals.d.ts +13 -0
- package/dist/literals.d.ts.map +1 -0
- package/dist/literals.js +100 -0
- package/dist/literals.js.map +1 -0
- package/dist/literals.test.d.ts +2 -0
- package/dist/literals.test.d.ts.map +1 -0
- package/dist/literals.test.js +149 -0
- package/dist/literals.test.js.map +1 -0
- package/dist/module_collector.d.ts +3 -0
- package/dist/module_collector.d.ts.map +1 -0
- package/dist/module_collector.js +22 -0
- package/dist/module_collector.js.map +1 -0
- package/dist/module_set.d.ts +44 -0
- package/dist/module_set.d.ts.map +1 -0
- package/dist/module_set.js +1025 -0
- package/dist/module_set.js.map +1 -0
- package/dist/module_set.test.d.ts +2 -0
- package/dist/module_set.test.d.ts.map +1 -0
- package/dist/module_set.test.js +1330 -0
- package/dist/module_set.test.js.map +1 -0
- package/dist/parser.d.ts +6 -0
- package/dist/parser.d.ts.map +1 -0
- package/dist/parser.js +971 -0
- package/dist/parser.js.map +1 -0
- package/dist/parser.test.d.ts +2 -0
- package/dist/parser.test.d.ts.map +1 -0
- package/dist/parser.test.js +1366 -0
- package/dist/parser.test.js.map +1 -0
- package/dist/snapshotter.d.ts +6 -0
- package/dist/snapshotter.d.ts.map +1 -0
- package/dist/snapshotter.js +107 -0
- package/dist/snapshotter.js.map +1 -0
- package/dist/tokenizer.d.ts +4 -0
- package/dist/tokenizer.d.ts.map +1 -0
- package/dist/tokenizer.js +192 -0
- package/dist/tokenizer.js.map +1 -0
- package/dist/tokenizer.test.d.ts +2 -0
- package/dist/tokenizer.test.d.ts.map +1 -0
- package/dist/tokenizer.test.js +425 -0
- package/dist/tokenizer.test.js.map +1 -0
- package/dist/types.d.ts +375 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/types.js.map +1 -0
- package/package.json +63 -0
- package/src/casing.ts +64 -0
- package/src/command_line_parser.ts +249 -0
- package/src/compatibility_checker.ts +470 -0
- package/src/compiler.ts +435 -0
- package/src/config.ts +28 -0
- package/src/definition_finder.ts +221 -0
- package/src/encoding.ts +32 -0
- package/src/error_renderer.ts +278 -0
- package/src/formatter.ts +274 -0
- package/src/index.ts +6 -0
- package/src/io.ts +33 -0
- package/src/language_server.ts +301 -0
- package/src/literals.ts +120 -0
- package/src/module_collector.ts +22 -0
- package/src/module_set.ts +1175 -0
- package/src/parser.ts +1122 -0
- package/src/snapshotter.ts +136 -0
- package/src/tokenizer.ts +216 -0
- package/src/types.ts +518 -0
|
@@ -0,0 +1,1025 @@
|
|
|
1
|
+
import * as paths from "path";
|
|
2
|
+
import { isStringLiteral, literalValueToDenseJson, literalValueToIdentity, unquoteAndUnescape, valueHasPrimitiveType, } from "./literals.js";
|
|
3
|
+
import { parseModule } from "./parser.js";
|
|
4
|
+
import { tokenizeModule } from "./tokenizer.js";
|
|
5
|
+
export class ModuleSet {
|
|
6
|
+
static create(fileReader, rootPath) {
|
|
7
|
+
return new ModuleSet(new DefaultModuleParser(fileReader, rootPath));
|
|
8
|
+
}
|
|
9
|
+
static fromMap(map) {
|
|
10
|
+
return new ModuleSet(new MapBasedModuleParser(map));
|
|
11
|
+
}
|
|
12
|
+
constructor(moduleParser) {
|
|
13
|
+
this.moduleParser = moduleParser;
|
|
14
|
+
this.modules = new Map();
|
|
15
|
+
this.mutableRecordMap = new Map();
|
|
16
|
+
this.mutableResolvedModules = [];
|
|
17
|
+
this.numberToRecord = new Map();
|
|
18
|
+
this.numberToMethod = new Map();
|
|
19
|
+
this.mutableErrors = [];
|
|
20
|
+
}
|
|
21
|
+
parseAndResolve(modulePath, inProgressSet) {
|
|
22
|
+
const inMap = this.modules.get(modulePath);
|
|
23
|
+
if (inMap !== undefined) {
|
|
24
|
+
return inMap;
|
|
25
|
+
}
|
|
26
|
+
const result = this.doParseAndResolve(modulePath, inProgressSet || new Set());
|
|
27
|
+
this.modules.set(modulePath, result);
|
|
28
|
+
this.mutableErrors.push(...result.errors);
|
|
29
|
+
return result;
|
|
30
|
+
}
|
|
31
|
+
/** Called by `parseAndResolve` when the module is not in the map already. */
|
|
32
|
+
doParseAndResolve(modulePath, inProgressSet) {
|
|
33
|
+
const errors = [];
|
|
34
|
+
let module;
|
|
35
|
+
{
|
|
36
|
+
const parseResult = this.moduleParser.parseModule(modulePath);
|
|
37
|
+
if (parseResult.result === null) {
|
|
38
|
+
return parseResult;
|
|
39
|
+
}
|
|
40
|
+
errors.push(...parseResult.errors);
|
|
41
|
+
module = parseResult.result;
|
|
42
|
+
}
|
|
43
|
+
// Process all imports.
|
|
44
|
+
const pathToImports = new Map();
|
|
45
|
+
for (const declaration of module.declarations) {
|
|
46
|
+
if (declaration.kind !== "import" &&
|
|
47
|
+
declaration.kind !== "import-alias") {
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
const otherModulePath = resolveModulePath(declaration.modulePath, modulePath, errors);
|
|
51
|
+
declaration.resolvedModulePath = otherModulePath;
|
|
52
|
+
if (otherModulePath === undefined) {
|
|
53
|
+
// An error was already registered.
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
let imports = pathToImports.get(otherModulePath);
|
|
57
|
+
if (!imports) {
|
|
58
|
+
imports = [];
|
|
59
|
+
pathToImports.set(otherModulePath, imports);
|
|
60
|
+
}
|
|
61
|
+
imports.push(declaration);
|
|
62
|
+
// Add the imported module to the module set.
|
|
63
|
+
const circularDependencyMessage = "Circular dependency between modules";
|
|
64
|
+
if (inProgressSet.has(modulePath)) {
|
|
65
|
+
errors.push({
|
|
66
|
+
token: declaration.modulePath,
|
|
67
|
+
message: circularDependencyMessage,
|
|
68
|
+
});
|
|
69
|
+
continue;
|
|
70
|
+
}
|
|
71
|
+
inProgressSet.add(modulePath);
|
|
72
|
+
const otherModule = this.parseAndResolve(otherModulePath, inProgressSet);
|
|
73
|
+
inProgressSet.delete(modulePath);
|
|
74
|
+
if (otherModule.result === null) {
|
|
75
|
+
errors.push({
|
|
76
|
+
token: declaration.modulePath,
|
|
77
|
+
message: "Module not found",
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
else if (otherModule.errors.length !== 0) {
|
|
81
|
+
const hasCircularDependency = otherModule.errors.some((e) => e.message === circularDependencyMessage);
|
|
82
|
+
if (hasCircularDependency) {
|
|
83
|
+
errors.push({
|
|
84
|
+
token: declaration.modulePath,
|
|
85
|
+
message: circularDependencyMessage,
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
else {
|
|
89
|
+
errors.push({
|
|
90
|
+
token: declaration.modulePath,
|
|
91
|
+
message: "Imported module has errors",
|
|
92
|
+
errorIsInOtherModule: true,
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
const pathToImportedNames = module.pathToImportedNames;
|
|
98
|
+
for (const [path, imports] of pathToImports.entries()) {
|
|
99
|
+
const importsNoAlias = imports.filter((i) => i.kind === "import");
|
|
100
|
+
const importsWithAlias = imports.filter((i) => i.kind === "import-alias");
|
|
101
|
+
if (importsNoAlias.length && importsWithAlias.length) {
|
|
102
|
+
for (const importNoAlias of importsNoAlias) {
|
|
103
|
+
errors.push({
|
|
104
|
+
token: importNoAlias.modulePath,
|
|
105
|
+
message: "Module already imported with an alias",
|
|
106
|
+
});
|
|
107
|
+
}
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
if (importsWithAlias.length >= 2) {
|
|
111
|
+
for (const importWithAlias of importsWithAlias.slice(1)) {
|
|
112
|
+
errors.push({
|
|
113
|
+
token: importWithAlias.modulePath,
|
|
114
|
+
message: "Module already imported with a different alias",
|
|
115
|
+
});
|
|
116
|
+
}
|
|
117
|
+
continue;
|
|
118
|
+
}
|
|
119
|
+
if (importsNoAlias.length) {
|
|
120
|
+
const names = new Set();
|
|
121
|
+
for (const importNoAlias of importsNoAlias) {
|
|
122
|
+
for (const importedName of importNoAlias.importedNames) {
|
|
123
|
+
names.add(importedName.text);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
pathToImportedNames[path] = {
|
|
127
|
+
kind: "some",
|
|
128
|
+
names: names,
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
else {
|
|
132
|
+
const alias = importsWithAlias[0].name.text;
|
|
133
|
+
pathToImportedNames[path] = {
|
|
134
|
+
kind: "all",
|
|
135
|
+
alias: alias,
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
const result = {
|
|
140
|
+
result: module,
|
|
141
|
+
errors: errors,
|
|
142
|
+
};
|
|
143
|
+
if (errors.length) {
|
|
144
|
+
return result;
|
|
145
|
+
}
|
|
146
|
+
this.mutableResolvedModules.push(module);
|
|
147
|
+
// We can't merge these 3 loops into a single one, each operation must run
|
|
148
|
+
// after the last operation ran on the whole map.
|
|
149
|
+
// Loop 1: merge the module records map into the cross-module record map.
|
|
150
|
+
for (const record of module.records) {
|
|
151
|
+
const { key } = record.record;
|
|
152
|
+
this.mutableRecordMap.set(key, record);
|
|
153
|
+
const { recordNumber } = record.record;
|
|
154
|
+
if (recordNumber != null) {
|
|
155
|
+
const existing = this.numberToRecord.get(recordNumber);
|
|
156
|
+
if (existing === undefined) {
|
|
157
|
+
this.numberToRecord.set(recordNumber, key);
|
|
158
|
+
}
|
|
159
|
+
else {
|
|
160
|
+
const otherRecord = this.recordMap.get(existing);
|
|
161
|
+
const otherRecordName = otherRecord.record.name.text;
|
|
162
|
+
const otherModulePath = otherRecord.modulePath;
|
|
163
|
+
errors.push({
|
|
164
|
+
token: record.record.name,
|
|
165
|
+
message: `Same number as ${otherRecordName} in ${otherModulePath}`,
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
// Loop 2: resolve every field type of every record in the module.
|
|
171
|
+
// Store the result in the Field object.
|
|
172
|
+
const usedImports = new Set();
|
|
173
|
+
const typeResolver = new TypeResolver(module, modulePath, this.modules, usedImports, errors);
|
|
174
|
+
for (const record of module.records) {
|
|
175
|
+
this.storeResolvedFieldTypes(record, typeResolver);
|
|
176
|
+
}
|
|
177
|
+
// Loop 3: once all the types of record fields have been resolved.
|
|
178
|
+
for (const moduleRecord of module.records) {
|
|
179
|
+
const { record } = moduleRecord;
|
|
180
|
+
// For every field, determine if the field is recursive, i.e. the field
|
|
181
|
+
// type depends on the record where the field is defined.
|
|
182
|
+
// Store the result in the Field object.
|
|
183
|
+
this.storeFieldRecursivity(record);
|
|
184
|
+
// If the record has explicit numbering, register an error if any field
|
|
185
|
+
// has a direct dependency on a record with implicit numbering.
|
|
186
|
+
this.verifyNumberingConstraint(record, errors);
|
|
187
|
+
// Verify that the `key` field of every array type is valid.
|
|
188
|
+
for (const field of record.fields) {
|
|
189
|
+
const { type } = field;
|
|
190
|
+
if (type) {
|
|
191
|
+
this.validateArrayKeys(type, errors);
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
// Resolve every request/response type of every method in the module.
|
|
196
|
+
// Store the result in the Method object.
|
|
197
|
+
for (const method of module.methods) {
|
|
198
|
+
{
|
|
199
|
+
const request = method.unresolvedRequestType;
|
|
200
|
+
const requestType = typeResolver.resolve(request, "top-level");
|
|
201
|
+
method.requestType = requestType;
|
|
202
|
+
if (requestType) {
|
|
203
|
+
this.validateArrayKeys(requestType, errors);
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
{
|
|
207
|
+
const response = method.unresolvedResponseType;
|
|
208
|
+
const responseType = typeResolver.resolve(response, "top-level");
|
|
209
|
+
method.responseType = responseType;
|
|
210
|
+
if (responseType) {
|
|
211
|
+
this.validateArrayKeys(responseType, errors);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
const { number } = method;
|
|
215
|
+
const existing = this.numberToMethod.get(number);
|
|
216
|
+
if (existing === undefined) {
|
|
217
|
+
this.numberToMethod.set(number, method);
|
|
218
|
+
}
|
|
219
|
+
else {
|
|
220
|
+
const otherMethodName = existing.name.text;
|
|
221
|
+
const otherModulePath = existing.name.line.modulePath;
|
|
222
|
+
errors.push({
|
|
223
|
+
token: method.name,
|
|
224
|
+
message: `Same number as ${otherMethodName} in ${otherModulePath}`,
|
|
225
|
+
});
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
// Resolve every constant type. Store the result in the constant object.
|
|
229
|
+
for (const constant of module.constants) {
|
|
230
|
+
const { unresolvedType } = constant;
|
|
231
|
+
const type = typeResolver.resolve(unresolvedType, "top-level");
|
|
232
|
+
constant.type = type;
|
|
233
|
+
if (type) {
|
|
234
|
+
this.validateArrayKeys(type, errors);
|
|
235
|
+
constant.valueAsDenseJson = //
|
|
236
|
+
this.valueToDenseJson(constant.value, type, errors);
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
ensureAllImportsAreUsed(module, usedImports, errors);
|
|
240
|
+
return result;
|
|
241
|
+
}
|
|
242
|
+
storeResolvedFieldTypes(record, typeResolver) {
|
|
243
|
+
for (const field of record.record.fields) {
|
|
244
|
+
if (field.unresolvedType === undefined) {
|
|
245
|
+
// A constant enum field.
|
|
246
|
+
continue;
|
|
247
|
+
}
|
|
248
|
+
field.type = typeResolver.resolve(field.unresolvedType, record);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
storeFieldRecursivity(record) {
|
|
252
|
+
for (const field of record.fields) {
|
|
253
|
+
if (!field.type)
|
|
254
|
+
continue;
|
|
255
|
+
const modes = record.recordType === "struct" ? ["hard", "soft"] : ["soft"];
|
|
256
|
+
for (const mode of modes) {
|
|
257
|
+
const deps = new Set();
|
|
258
|
+
this.collectTypeDeps(field.type, mode, deps);
|
|
259
|
+
if (deps.has(record.key)) {
|
|
260
|
+
field.isRecursive = mode;
|
|
261
|
+
break;
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
collectTypeDeps(input, mode, out) {
|
|
267
|
+
switch (input.kind) {
|
|
268
|
+
case "record": {
|
|
269
|
+
const { key } = input;
|
|
270
|
+
if (out.has(key))
|
|
271
|
+
return;
|
|
272
|
+
out.add(key);
|
|
273
|
+
// Recursively add deps of all fields of the record.
|
|
274
|
+
const record = this.recordMap.get(key).record;
|
|
275
|
+
if (mode === "hard" && record.recordType === "enum") {
|
|
276
|
+
return;
|
|
277
|
+
}
|
|
278
|
+
for (const field of record.fields) {
|
|
279
|
+
if (field.type === undefined)
|
|
280
|
+
continue;
|
|
281
|
+
this.collectTypeDeps(field.type, mode, out);
|
|
282
|
+
}
|
|
283
|
+
break;
|
|
284
|
+
}
|
|
285
|
+
case "array": {
|
|
286
|
+
if (mode === "hard")
|
|
287
|
+
break;
|
|
288
|
+
this.collectTypeDeps(input.item, mode, out);
|
|
289
|
+
break;
|
|
290
|
+
}
|
|
291
|
+
case "optional": {
|
|
292
|
+
if (mode === "hard")
|
|
293
|
+
break;
|
|
294
|
+
this.collectTypeDeps(input.other, mode, out);
|
|
295
|
+
break;
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
/**
|
|
300
|
+
* If the record has explicit numbering, register an error if any field has a
|
|
301
|
+
* direct dependency on a record with implicit numbering.
|
|
302
|
+
*/
|
|
303
|
+
verifyNumberingConstraint(record, errors) {
|
|
304
|
+
if (record.numbering !== "explicit") {
|
|
305
|
+
return;
|
|
306
|
+
}
|
|
307
|
+
for (const field of record.fields) {
|
|
308
|
+
if (!field.type)
|
|
309
|
+
continue;
|
|
310
|
+
const invalidRef = this.referencesImplicitlyNumberedRecord(field.type);
|
|
311
|
+
if (invalidRef) {
|
|
312
|
+
errors.push({
|
|
313
|
+
token: invalidRef.refToken,
|
|
314
|
+
message: `Field type references a ${invalidRef.recordType} with implicit ` +
|
|
315
|
+
`numbering, but field belongs to a ${record.recordType} with ` +
|
|
316
|
+
`explicit numbering`,
|
|
317
|
+
});
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
referencesImplicitlyNumberedRecord(input) {
|
|
322
|
+
switch (input.kind) {
|
|
323
|
+
case "array":
|
|
324
|
+
return this.referencesImplicitlyNumberedRecord(input.item);
|
|
325
|
+
case "optional":
|
|
326
|
+
return this.referencesImplicitlyNumberedRecord(input.other);
|
|
327
|
+
case "primitive":
|
|
328
|
+
return false;
|
|
329
|
+
case "record": {
|
|
330
|
+
const record = this.recordMap.get(input.key).record;
|
|
331
|
+
return record.numbering === "implicit" && input;
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
/**
|
|
336
|
+
* Verifies that the `key` field of every array type found in `topLevelType`
|
|
337
|
+
* is valid. Populates the `keyType` field of every field path.
|
|
338
|
+
*/
|
|
339
|
+
validateArrayKeys(topLevelType, errors) {
|
|
340
|
+
const validate = (type) => {
|
|
341
|
+
const { key, item } = type;
|
|
342
|
+
if (!key) {
|
|
343
|
+
return;
|
|
344
|
+
}
|
|
345
|
+
const { path } = key;
|
|
346
|
+
// Iterate the fields in the sequence.
|
|
347
|
+
let currentType = item;
|
|
348
|
+
let enumRef;
|
|
349
|
+
for (let i = 0; i < path.length; ++i) {
|
|
350
|
+
const pathItem = path[i];
|
|
351
|
+
const fieldName = pathItem.name;
|
|
352
|
+
if (currentType.kind !== "record") {
|
|
353
|
+
if (i === 0) {
|
|
354
|
+
errors.push({
|
|
355
|
+
token: key.pipeToken,
|
|
356
|
+
message: "Item must have struct type",
|
|
357
|
+
});
|
|
358
|
+
}
|
|
359
|
+
else {
|
|
360
|
+
const previousFieldName = path[i - 1].name;
|
|
361
|
+
errors.push({
|
|
362
|
+
token: previousFieldName,
|
|
363
|
+
message: "Must have struct type",
|
|
364
|
+
});
|
|
365
|
+
}
|
|
366
|
+
return;
|
|
367
|
+
}
|
|
368
|
+
const record = this.recordMap.get(currentType.key).record;
|
|
369
|
+
if (record.recordType === "struct") {
|
|
370
|
+
const field = record.nameToDeclaration[fieldName.text];
|
|
371
|
+
if (!field || field.kind !== "field") {
|
|
372
|
+
errors.push({
|
|
373
|
+
token: fieldName,
|
|
374
|
+
message: `Field not found in struct ${record.name.text}`,
|
|
375
|
+
});
|
|
376
|
+
return undefined;
|
|
377
|
+
}
|
|
378
|
+
pathItem.declaration = field;
|
|
379
|
+
if (!field.type) {
|
|
380
|
+
// An error was already registered.
|
|
381
|
+
return;
|
|
382
|
+
}
|
|
383
|
+
currentType = field.type;
|
|
384
|
+
}
|
|
385
|
+
else {
|
|
386
|
+
// An enum.
|
|
387
|
+
if (fieldName.text !== "kind") {
|
|
388
|
+
errors.push({
|
|
389
|
+
token: fieldName,
|
|
390
|
+
expected: '"kind"',
|
|
391
|
+
});
|
|
392
|
+
return undefined;
|
|
393
|
+
}
|
|
394
|
+
enumRef = currentType;
|
|
395
|
+
currentType = {
|
|
396
|
+
kind: "primitive",
|
|
397
|
+
primitive: "string",
|
|
398
|
+
};
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
if (currentType.kind !== "primitive") {
|
|
402
|
+
errors.push({
|
|
403
|
+
token: path.at(-1).name,
|
|
404
|
+
message: "Does not have primitive type",
|
|
405
|
+
});
|
|
406
|
+
return;
|
|
407
|
+
}
|
|
408
|
+
// If the last field name of the `kind` field of an enum, we store a
|
|
409
|
+
// reference to the enum in the `keyType` field of the array type.
|
|
410
|
+
key.keyType = enumRef || currentType;
|
|
411
|
+
};
|
|
412
|
+
const traverseType = (type) => {
|
|
413
|
+
switch (type.kind) {
|
|
414
|
+
case "array":
|
|
415
|
+
validate(type);
|
|
416
|
+
return traverseType(type.item);
|
|
417
|
+
case "optional":
|
|
418
|
+
return traverseType(type.other);
|
|
419
|
+
}
|
|
420
|
+
};
|
|
421
|
+
traverseType(topLevelType);
|
|
422
|
+
}
|
|
423
|
+
valueToDenseJson(value, expectedType, errors) {
|
|
424
|
+
switch (expectedType.kind) {
|
|
425
|
+
case "optional": {
|
|
426
|
+
if (value.kind === "literal" && value.token.text === "null") {
|
|
427
|
+
value.type = { kind: "null" };
|
|
428
|
+
return null;
|
|
429
|
+
}
|
|
430
|
+
return this.valueToDenseJson(value, expectedType.other, errors);
|
|
431
|
+
}
|
|
432
|
+
case "array": {
|
|
433
|
+
if (value.kind !== "array") {
|
|
434
|
+
errors.push({
|
|
435
|
+
token: value.token,
|
|
436
|
+
expected: "array",
|
|
437
|
+
});
|
|
438
|
+
return undefined;
|
|
439
|
+
}
|
|
440
|
+
const json = [];
|
|
441
|
+
let allGood = true;
|
|
442
|
+
for (const item of value.items) {
|
|
443
|
+
const itemJson = //
|
|
444
|
+
this.valueToDenseJson(item, expectedType.item, errors);
|
|
445
|
+
if (itemJson !== undefined) {
|
|
446
|
+
json.push(itemJson);
|
|
447
|
+
}
|
|
448
|
+
else {
|
|
449
|
+
// Even if we could return now, better to verify the type of the
|
|
450
|
+
// other items.
|
|
451
|
+
allGood = false;
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
if (!allGood) {
|
|
455
|
+
return undefined;
|
|
456
|
+
}
|
|
457
|
+
const { key } = expectedType;
|
|
458
|
+
value.key = key;
|
|
459
|
+
if (key) {
|
|
460
|
+
validateKeyedItems(value.items, key, errors);
|
|
461
|
+
}
|
|
462
|
+
return json;
|
|
463
|
+
}
|
|
464
|
+
case "record": {
|
|
465
|
+
const record = this.recordMap.get(expectedType.key);
|
|
466
|
+
if (!record) {
|
|
467
|
+
// An error was already registered.
|
|
468
|
+
return undefined;
|
|
469
|
+
}
|
|
470
|
+
return record.record.recordType === "struct"
|
|
471
|
+
? this.structValueToDenseJson(value, record.record, errors)
|
|
472
|
+
: this.enumValueToDenseJson(value, record.record, errors);
|
|
473
|
+
}
|
|
474
|
+
case "primitive": {
|
|
475
|
+
const { token } = value;
|
|
476
|
+
const { primitive } = expectedType;
|
|
477
|
+
if (value.kind !== "literal" ||
|
|
478
|
+
!valueHasPrimitiveType(token.text, primitive)) {
|
|
479
|
+
errors.push({
|
|
480
|
+
token: value.token,
|
|
481
|
+
expected: primitive,
|
|
482
|
+
});
|
|
483
|
+
return undefined;
|
|
484
|
+
}
|
|
485
|
+
value.type = expectedType;
|
|
486
|
+
return literalValueToDenseJson(token.text, expectedType.primitive);
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
}
|
|
490
|
+
structValueToDenseJson(value, expectedStruct, errors) {
|
|
491
|
+
const { token } = value;
|
|
492
|
+
if (value.kind !== "object") {
|
|
493
|
+
errors.push({
|
|
494
|
+
token: token,
|
|
495
|
+
expected: "object",
|
|
496
|
+
});
|
|
497
|
+
return undefined;
|
|
498
|
+
}
|
|
499
|
+
const json = Array(expectedStruct.numSlotsInclRemovedNumbers).fill(0);
|
|
500
|
+
let allGood = true;
|
|
501
|
+
for (const [fieldName, fieldEntry] of Object.entries(value.entries)) {
|
|
502
|
+
const field = expectedStruct.nameToDeclaration[fieldName];
|
|
503
|
+
if (!field || field.kind !== "field") {
|
|
504
|
+
errors.push({
|
|
505
|
+
token: fieldEntry.name,
|
|
506
|
+
message: `Field not found in struct ${expectedStruct.name.text}`,
|
|
507
|
+
});
|
|
508
|
+
allGood = false;
|
|
509
|
+
continue;
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
let arrayLen = 0;
|
|
513
|
+
for (const field of expectedStruct.fields) {
|
|
514
|
+
const { type } = field;
|
|
515
|
+
if (!type) {
|
|
516
|
+
allGood = false;
|
|
517
|
+
continue;
|
|
518
|
+
}
|
|
519
|
+
const fieldEntry = value.entries[field.name.text];
|
|
520
|
+
let valueJson;
|
|
521
|
+
if (fieldEntry) {
|
|
522
|
+
valueJson = this.valueToDenseJson(fieldEntry.value, type, errors);
|
|
523
|
+
}
|
|
524
|
+
else {
|
|
525
|
+
// Unless the object is declared partial, all fields are required.
|
|
526
|
+
if (value.partial) {
|
|
527
|
+
valueJson = this.getDefaultJson(type);
|
|
528
|
+
}
|
|
529
|
+
else {
|
|
530
|
+
errors.push({
|
|
531
|
+
token: token,
|
|
532
|
+
message: `Missing entry: ${field.name.text}`,
|
|
533
|
+
});
|
|
534
|
+
}
|
|
535
|
+
}
|
|
536
|
+
if (valueJson === undefined) {
|
|
537
|
+
allGood = false;
|
|
538
|
+
continue;
|
|
539
|
+
}
|
|
540
|
+
json[field.number] = valueJson;
|
|
541
|
+
const hasDefaultValue = type.kind === "optional"
|
|
542
|
+
? valueJson === null
|
|
543
|
+
: !valueJson ||
|
|
544
|
+
(Array.isArray(valueJson) && !valueJson.length) ||
|
|
545
|
+
(type.kind === "primitive" &&
|
|
546
|
+
(type.primitive === "int64" || type.primitive === "uint64") &&
|
|
547
|
+
valueJson === "0");
|
|
548
|
+
if (!hasDefaultValue) {
|
|
549
|
+
arrayLen = Math.max(arrayLen, field.number + 1);
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
if (!allGood) {
|
|
553
|
+
return undefined;
|
|
554
|
+
}
|
|
555
|
+
value.type = expectedStruct.key;
|
|
556
|
+
return json.slice(0, arrayLen);
|
|
557
|
+
}
|
|
558
|
+
enumValueToDenseJson(value, expectedEnum, errors) {
|
|
559
|
+
const { token } = value;
|
|
560
|
+
if (value.kind === "literal" && isStringLiteral(token.text)) {
|
|
561
|
+
// The value is a string.
|
|
562
|
+
// It must match the name of one of the constants defined in the enum.
|
|
563
|
+
const fieldName = unquoteAndUnescape(token.text);
|
|
564
|
+
if (fieldName === "?") {
|
|
565
|
+
// Present on every enum.
|
|
566
|
+
return 0;
|
|
567
|
+
}
|
|
568
|
+
const field = expectedEnum.nameToDeclaration[fieldName];
|
|
569
|
+
if (!field || field.kind !== "field") {
|
|
570
|
+
errors.push({
|
|
571
|
+
token: token,
|
|
572
|
+
message: `Field not found in enum ${expectedEnum.name.text}`,
|
|
573
|
+
});
|
|
574
|
+
return undefined;
|
|
575
|
+
}
|
|
576
|
+
if (field.type) {
|
|
577
|
+
errors.push({
|
|
578
|
+
token: token,
|
|
579
|
+
message: "Refers to a wrapper field",
|
|
580
|
+
});
|
|
581
|
+
return undefined;
|
|
582
|
+
}
|
|
583
|
+
value.type = {
|
|
584
|
+
kind: "enum",
|
|
585
|
+
key: expectedEnum.key,
|
|
586
|
+
};
|
|
587
|
+
return field.number;
|
|
588
|
+
}
|
|
589
|
+
else if (value.kind === "object") {
|
|
590
|
+
// The value is an object. It must have exactly two entries:
|
|
591
|
+
// · 'kind' must match the name of one of the wrapper fields defined in
|
|
592
|
+
// the enum
|
|
593
|
+
// · 'value' must match the type of the wrapper field
|
|
594
|
+
const entries = { ...value.entries };
|
|
595
|
+
const kindEntry = entries.kind;
|
|
596
|
+
if (!kindEntry) {
|
|
597
|
+
errors.push({
|
|
598
|
+
token: token,
|
|
599
|
+
message: "Missing entry: kind",
|
|
600
|
+
});
|
|
601
|
+
return undefined;
|
|
602
|
+
}
|
|
603
|
+
delete entries.kind;
|
|
604
|
+
const kindValueToken = kindEntry.value.token;
|
|
605
|
+
if (kindEntry.value.kind !== "literal" ||
|
|
606
|
+
!isStringLiteral(kindValueToken.text)) {
|
|
607
|
+
errors.push({
|
|
608
|
+
token: kindValueToken,
|
|
609
|
+
expected: "string",
|
|
610
|
+
});
|
|
611
|
+
return undefined;
|
|
612
|
+
}
|
|
613
|
+
const fieldName = unquoteAndUnescape(kindValueToken.text);
|
|
614
|
+
const field = expectedEnum.nameToDeclaration[fieldName];
|
|
615
|
+
if (!field || field.kind !== "field") {
|
|
616
|
+
errors.push({
|
|
617
|
+
token: kindValueToken,
|
|
618
|
+
message: `Field not found in enum ${expectedEnum.name.text}`,
|
|
619
|
+
});
|
|
620
|
+
return undefined;
|
|
621
|
+
}
|
|
622
|
+
if (!field.type) {
|
|
623
|
+
errors.push({
|
|
624
|
+
token: kindValueToken,
|
|
625
|
+
message: "Refers to a constant field",
|
|
626
|
+
});
|
|
627
|
+
return undefined;
|
|
628
|
+
}
|
|
629
|
+
const enumValue = entries.value;
|
|
630
|
+
if (!enumValue) {
|
|
631
|
+
errors.push({
|
|
632
|
+
token: token,
|
|
633
|
+
message: "Missing entry: value",
|
|
634
|
+
});
|
|
635
|
+
return undefined;
|
|
636
|
+
}
|
|
637
|
+
delete entries.value;
|
|
638
|
+
const valueJson = //
|
|
639
|
+
this.valueToDenseJson(enumValue.value, field.type, errors);
|
|
640
|
+
if (valueJson === undefined) {
|
|
641
|
+
return undefined;
|
|
642
|
+
}
|
|
643
|
+
const extraEntries = Object.values(entries);
|
|
644
|
+
if (extraEntries.length !== 0) {
|
|
645
|
+
const extraEntry = extraEntries[0];
|
|
646
|
+
errors.push({
|
|
647
|
+
token: extraEntry.name,
|
|
648
|
+
message: "Extraneous entry",
|
|
649
|
+
});
|
|
650
|
+
return undefined;
|
|
651
|
+
}
|
|
652
|
+
value.type = expectedEnum.key;
|
|
653
|
+
// Return an array of length 2.
|
|
654
|
+
return [field.number, valueJson];
|
|
655
|
+
}
|
|
656
|
+
else {
|
|
657
|
+
// The value is neither a string nor an object. It can't be of enum type.
|
|
658
|
+
errors.push({
|
|
659
|
+
token: token,
|
|
660
|
+
expected: "string or object",
|
|
661
|
+
});
|
|
662
|
+
return undefined;
|
|
663
|
+
}
|
|
664
|
+
}
|
|
665
|
+
getDefaultJson(type) {
|
|
666
|
+
switch (type.kind) {
|
|
667
|
+
case "primitive": {
|
|
668
|
+
switch (type.primitive) {
|
|
669
|
+
case "bool":
|
|
670
|
+
case "int32":
|
|
671
|
+
case "int64":
|
|
672
|
+
case "uint64":
|
|
673
|
+
case "float32":
|
|
674
|
+
case "float64":
|
|
675
|
+
case "timestamp":
|
|
676
|
+
return 0;
|
|
677
|
+
case "string":
|
|
678
|
+
case "bytes":
|
|
679
|
+
return "";
|
|
680
|
+
default: {
|
|
681
|
+
const _ = type.primitive;
|
|
682
|
+
throw new TypeError(_);
|
|
683
|
+
}
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
case "array":
|
|
687
|
+
return [];
|
|
688
|
+
case "optional":
|
|
689
|
+
return null;
|
|
690
|
+
case "record": {
|
|
691
|
+
const record = this.recordMap.get(type.key);
|
|
692
|
+
switch (record.record.recordType) {
|
|
693
|
+
case "struct":
|
|
694
|
+
return [];
|
|
695
|
+
case "enum":
|
|
696
|
+
return 0;
|
|
697
|
+
}
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
}
|
|
701
|
+
get recordMap() {
|
|
702
|
+
return this.mutableRecordMap;
|
|
703
|
+
}
|
|
704
|
+
get resolvedModules() {
|
|
705
|
+
return this.mutableResolvedModules;
|
|
706
|
+
}
|
|
707
|
+
findRecordByNumber(recordNumber) {
|
|
708
|
+
const recordKey = this.numberToRecord.get(recordNumber);
|
|
709
|
+
if (recordKey === undefined) {
|
|
710
|
+
return undefined;
|
|
711
|
+
}
|
|
712
|
+
return this.recordMap.get(recordKey);
|
|
713
|
+
}
|
|
714
|
+
findMethodByNumber(methodNumber) {
|
|
715
|
+
return this.numberToMethod.get(methodNumber);
|
|
716
|
+
}
|
|
717
|
+
get errors() {
|
|
718
|
+
return this.mutableErrors;
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
/**
|
|
722
|
+
* If the array type is keyed, the array value must satisfy two conditions.
|
|
723
|
+
* First: the key field of every item must be set.
|
|
724
|
+
* Second: not two items can have the same key.
|
|
725
|
+
*/
|
|
726
|
+
function validateKeyedItems(items, fieldPath, errors) {
|
|
727
|
+
const { keyType, path } = fieldPath;
|
|
728
|
+
const tryExtractKeyFromItem = (item) => {
|
|
729
|
+
let value = item;
|
|
730
|
+
for (const pathItem of path) {
|
|
731
|
+
const fieldName = pathItem.name;
|
|
732
|
+
if (value.kind === "literal" && fieldName.text === "kind") {
|
|
733
|
+
// An enum constant.
|
|
734
|
+
return value;
|
|
735
|
+
}
|
|
736
|
+
if (value.kind !== "object") {
|
|
737
|
+
// An error was already registered.
|
|
738
|
+
return undefined;
|
|
739
|
+
}
|
|
740
|
+
const entry = value.entries[fieldName.text];
|
|
741
|
+
if (!entry) {
|
|
742
|
+
errors.push({
|
|
743
|
+
token: value.token,
|
|
744
|
+
message: `Missing entry: ${fieldName.text}`,
|
|
745
|
+
});
|
|
746
|
+
return;
|
|
747
|
+
}
|
|
748
|
+
value = entry.value;
|
|
749
|
+
}
|
|
750
|
+
return value;
|
|
751
|
+
};
|
|
752
|
+
const keyIdentityToKeys = new Map();
|
|
753
|
+
for (const item of items) {
|
|
754
|
+
const key = tryExtractKeyFromItem(item);
|
|
755
|
+
if (!key) {
|
|
756
|
+
return;
|
|
757
|
+
}
|
|
758
|
+
if (key.kind !== "literal") {
|
|
759
|
+
// Cannot happen.
|
|
760
|
+
return;
|
|
761
|
+
}
|
|
762
|
+
let keyIdentity;
|
|
763
|
+
const keyToken = key.token.text;
|
|
764
|
+
if (keyType.kind === "primitive") {
|
|
765
|
+
const { primitive } = keyType;
|
|
766
|
+
if (!valueHasPrimitiveType(keyToken, primitive)) {
|
|
767
|
+
continue;
|
|
768
|
+
}
|
|
769
|
+
keyIdentity = literalValueToIdentity(keyToken, primitive);
|
|
770
|
+
}
|
|
771
|
+
else {
|
|
772
|
+
// The key is an enum, use the enum field name as the key identity.
|
|
773
|
+
if (!isStringLiteral(keyToken)) {
|
|
774
|
+
continue;
|
|
775
|
+
}
|
|
776
|
+
keyIdentity = unquoteAndUnescape(keyToken);
|
|
777
|
+
}
|
|
778
|
+
if (keyIdentityToKeys.has(keyIdentity)) {
|
|
779
|
+
keyIdentityToKeys.get(keyIdentity).push(key);
|
|
780
|
+
}
|
|
781
|
+
else {
|
|
782
|
+
keyIdentityToKeys.set(keyIdentity, [key]);
|
|
783
|
+
}
|
|
784
|
+
}
|
|
785
|
+
// Verify that every key in `keyIdentityToItems` has a single value.
|
|
786
|
+
for (const duplicateKeys of keyIdentityToKeys.values()) {
|
|
787
|
+
if (duplicateKeys.length <= 1) {
|
|
788
|
+
continue;
|
|
789
|
+
}
|
|
790
|
+
for (const key of duplicateKeys) {
|
|
791
|
+
errors.push({
|
|
792
|
+
token: key.token,
|
|
793
|
+
message: "Duplicate key",
|
|
794
|
+
});
|
|
795
|
+
}
|
|
796
|
+
}
|
|
797
|
+
}
|
|
798
|
+
class TypeResolver {
|
|
799
|
+
constructor(module, modulePath, modules, usedImports, errors) {
|
|
800
|
+
this.module = module;
|
|
801
|
+
this.modulePath = modulePath;
|
|
802
|
+
this.modules = modules;
|
|
803
|
+
this.usedImports = usedImports;
|
|
804
|
+
this.errors = errors;
|
|
805
|
+
}
|
|
806
|
+
resolve(input, recordOrigin) {
|
|
807
|
+
switch (input.kind) {
|
|
808
|
+
case "primitive":
|
|
809
|
+
return input;
|
|
810
|
+
case "array": {
|
|
811
|
+
const item = this.resolve(input.item, recordOrigin);
|
|
812
|
+
if (!item) {
|
|
813
|
+
return undefined;
|
|
814
|
+
}
|
|
815
|
+
return { kind: "array", item: item, key: input.key };
|
|
816
|
+
}
|
|
817
|
+
case "optional": {
|
|
818
|
+
const value = this.resolve(input.other, recordOrigin);
|
|
819
|
+
if (!value) {
|
|
820
|
+
return undefined;
|
|
821
|
+
}
|
|
822
|
+
return { kind: "optional", other: value };
|
|
823
|
+
}
|
|
824
|
+
case "record": {
|
|
825
|
+
return this.resolveRecordRef(input, recordOrigin);
|
|
826
|
+
}
|
|
827
|
+
}
|
|
828
|
+
}
|
|
829
|
+
/**
|
|
830
|
+
* Finds the definition of the actual record referenced from a value type.
|
|
831
|
+
* This is where we implement the name resolution algorithm.
|
|
832
|
+
*/
|
|
833
|
+
resolveRecordRef(recordRef, recordOrigin) {
|
|
834
|
+
const firstNamePart = recordRef.nameParts[0];
|
|
835
|
+
// The most nested record/module which contains the first name in the record
|
|
836
|
+
// reference, or the module if the record reference is absolute (starts with
|
|
837
|
+
// a dot).
|
|
838
|
+
let start;
|
|
839
|
+
const { errors, module, modules, usedImports } = this;
|
|
840
|
+
if (recordOrigin !== "top-level") {
|
|
841
|
+
if (!recordRef.absolute) {
|
|
842
|
+
// Traverse the chain of ancestors from most nested to top-level.
|
|
843
|
+
for (const fromRecord of [...recordOrigin.recordAncestors].reverse()) {
|
|
844
|
+
const matchMaybe = fromRecord.nameToDeclaration[firstNamePart.text];
|
|
845
|
+
if (matchMaybe && matchMaybe.kind === "record") {
|
|
846
|
+
start = fromRecord;
|
|
847
|
+
break;
|
|
848
|
+
}
|
|
849
|
+
}
|
|
850
|
+
}
|
|
851
|
+
if (!start) {
|
|
852
|
+
start = module;
|
|
853
|
+
}
|
|
854
|
+
}
|
|
855
|
+
else {
|
|
856
|
+
start = module;
|
|
857
|
+
}
|
|
858
|
+
const makeNotARecordError = (name) => ({
|
|
859
|
+
token: name,
|
|
860
|
+
message: "Does not refer to a struct or an enum",
|
|
861
|
+
});
|
|
862
|
+
const makeCannotFindNameError = (name) => ({
|
|
863
|
+
token: name,
|
|
864
|
+
message: `Cannot find name '${name.text}'`,
|
|
865
|
+
});
|
|
866
|
+
let it = start;
|
|
867
|
+
const nameParts = [];
|
|
868
|
+
for (let i = 0; i < recordRef.nameParts.length; ++i) {
|
|
869
|
+
const namePart = recordRef.nameParts[i];
|
|
870
|
+
const name = namePart.text;
|
|
871
|
+
let newIt = it.nameToDeclaration[name];
|
|
872
|
+
if (newIt === undefined) {
|
|
873
|
+
errors.push(makeCannotFindNameError(namePart));
|
|
874
|
+
return undefined;
|
|
875
|
+
}
|
|
876
|
+
else if (newIt.kind === "record") {
|
|
877
|
+
it = newIt;
|
|
878
|
+
}
|
|
879
|
+
else if (newIt.kind === "import" || newIt.kind === "import-alias") {
|
|
880
|
+
const cannotReimportError = () => ({
|
|
881
|
+
token: namePart,
|
|
882
|
+
message: `Cannot reimport imported name '${name}'`,
|
|
883
|
+
});
|
|
884
|
+
if (i !== 0) {
|
|
885
|
+
errors.push(cannotReimportError());
|
|
886
|
+
return undefined;
|
|
887
|
+
}
|
|
888
|
+
usedImports.add(name);
|
|
889
|
+
const newModulePath = newIt.resolvedModulePath;
|
|
890
|
+
if (newModulePath === undefined) {
|
|
891
|
+
return undefined;
|
|
892
|
+
}
|
|
893
|
+
const newModuleResult = modules.get(newModulePath);
|
|
894
|
+
if (newModuleResult === undefined || newModuleResult.result === null) {
|
|
895
|
+
// The module was not found or has errors: an error was already
|
|
896
|
+
// registered, no need to register a new one.
|
|
897
|
+
return undefined;
|
|
898
|
+
}
|
|
899
|
+
const newModule = newModuleResult.result;
|
|
900
|
+
if (newIt.kind === "import") {
|
|
901
|
+
newIt = newModule.nameToDeclaration[name];
|
|
902
|
+
if (!newIt) {
|
|
903
|
+
errors.push(makeCannotFindNameError(namePart));
|
|
904
|
+
return undefined;
|
|
905
|
+
}
|
|
906
|
+
if (!newIt || newIt.kind !== "record") {
|
|
907
|
+
this.errors.push(newIt.kind === "import" || newIt.kind === "import-alias"
|
|
908
|
+
? cannotReimportError()
|
|
909
|
+
: makeNotARecordError(namePart));
|
|
910
|
+
return undefined;
|
|
911
|
+
}
|
|
912
|
+
it = newIt;
|
|
913
|
+
}
|
|
914
|
+
else {
|
|
915
|
+
it = newModule;
|
|
916
|
+
}
|
|
917
|
+
}
|
|
918
|
+
else {
|
|
919
|
+
this.errors.push(makeNotARecordError(namePart));
|
|
920
|
+
return undefined;
|
|
921
|
+
}
|
|
922
|
+
nameParts.push({ token: namePart, declaration: newIt });
|
|
923
|
+
}
|
|
924
|
+
if (it.kind !== "record") {
|
|
925
|
+
const name = recordRef.nameParts[0];
|
|
926
|
+
this.errors.push(makeNotARecordError(name));
|
|
927
|
+
return undefined;
|
|
928
|
+
}
|
|
929
|
+
return {
|
|
930
|
+
kind: "record",
|
|
931
|
+
key: it.key,
|
|
932
|
+
recordType: it.recordType,
|
|
933
|
+
nameParts: nameParts,
|
|
934
|
+
refToken: recordRef.nameParts.at(-1),
|
|
935
|
+
};
|
|
936
|
+
}
|
|
937
|
+
}
|
|
938
|
+
function ensureAllImportsAreUsed(module, usedImports, errors) {
|
|
939
|
+
for (const declaration of module.declarations) {
|
|
940
|
+
if (declaration.kind === "import") {
|
|
941
|
+
for (const importedName of declaration.importedNames) {
|
|
942
|
+
if (!usedImports.has(importedName.text)) {
|
|
943
|
+
errors.push({
|
|
944
|
+
token: importedName,
|
|
945
|
+
message: "Unused import",
|
|
946
|
+
});
|
|
947
|
+
}
|
|
948
|
+
}
|
|
949
|
+
}
|
|
950
|
+
else if (declaration.kind === "import-alias") {
|
|
951
|
+
if (!usedImports.has(declaration.name.text)) {
|
|
952
|
+
errors.push({
|
|
953
|
+
token: declaration.name,
|
|
954
|
+
message: "Unused import alias",
|
|
955
|
+
});
|
|
956
|
+
}
|
|
957
|
+
}
|
|
958
|
+
}
|
|
959
|
+
}
|
|
960
|
+
class ModuleParserBase {
|
|
961
|
+
parseModule(modulePath) {
|
|
962
|
+
const code = this.readSourceCode(modulePath);
|
|
963
|
+
if (code === undefined) {
|
|
964
|
+
return {
|
|
965
|
+
result: null,
|
|
966
|
+
errors: [],
|
|
967
|
+
};
|
|
968
|
+
}
|
|
969
|
+
const tokens = tokenizeModule(code, modulePath);
|
|
970
|
+
if (tokens.errors.length !== 0) {
|
|
971
|
+
return {
|
|
972
|
+
result: null,
|
|
973
|
+
errors: tokens.errors,
|
|
974
|
+
};
|
|
975
|
+
}
|
|
976
|
+
return parseModule(tokens.result, modulePath, code);
|
|
977
|
+
}
|
|
978
|
+
}
|
|
979
|
+
class DefaultModuleParser extends ModuleParserBase {
|
|
980
|
+
constructor(fileReader, rootPath) {
|
|
981
|
+
super();
|
|
982
|
+
this.fileReader = fileReader;
|
|
983
|
+
this.rootPath = rootPath;
|
|
984
|
+
}
|
|
985
|
+
readSourceCode(modulePath) {
|
|
986
|
+
return this.fileReader.readTextFile(paths.join(this.rootPath, modulePath));
|
|
987
|
+
}
|
|
988
|
+
}
|
|
989
|
+
class MapBasedModuleParser extends ModuleParserBase {
|
|
990
|
+
constructor(moduleMap) {
|
|
991
|
+
super();
|
|
992
|
+
this.moduleMap = moduleMap;
|
|
993
|
+
}
|
|
994
|
+
readSourceCode(modulePath) {
|
|
995
|
+
return this.moduleMap.get(modulePath);
|
|
996
|
+
}
|
|
997
|
+
}
|
|
998
|
+
function resolveModulePath(pathToken, originModulePath, errors) {
|
|
999
|
+
let modulePath = unquoteAndUnescape(pathToken.text);
|
|
1000
|
+
if (/\\/.test(modulePath)) {
|
|
1001
|
+
errors.push({
|
|
1002
|
+
token: pathToken,
|
|
1003
|
+
message: "Replace backslash with slash",
|
|
1004
|
+
});
|
|
1005
|
+
return undefined;
|
|
1006
|
+
}
|
|
1007
|
+
if (modulePath.startsWith("./") || modulePath.startsWith("../")) {
|
|
1008
|
+
// This is a relative path from the module. Let's transform it into a
|
|
1009
|
+
// relative path from root.
|
|
1010
|
+
modulePath = paths.join(originModulePath, "..", modulePath);
|
|
1011
|
+
}
|
|
1012
|
+
// "a/./b/../c" => "a/c"
|
|
1013
|
+
// Note that `paths.normalize` will use backslashes on Windows.
|
|
1014
|
+
// We don't want that.
|
|
1015
|
+
modulePath = paths.normalize(modulePath).replace(/\\/g, "/");
|
|
1016
|
+
if (modulePath.startsWith(`../`)) {
|
|
1017
|
+
errors.push({
|
|
1018
|
+
token: pathToken,
|
|
1019
|
+
message: "Module path must point to a file within root",
|
|
1020
|
+
});
|
|
1021
|
+
return undefined;
|
|
1022
|
+
}
|
|
1023
|
+
return modulePath;
|
|
1024
|
+
}
|
|
1025
|
+
//# sourceMappingURL=module_set.js.map
|