@bcts/envelope-pattern 1.0.0-alpha.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +48 -0
- package/README.md +13 -0
- package/dist/index.cjs +6781 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +2628 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.mts +2628 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.iife.js +6781 -0
- package/dist/index.iife.js.map +1 -0
- package/dist/index.mjs +6545 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +77 -0
- package/src/error.ts +262 -0
- package/src/format.ts +375 -0
- package/src/index.ts +27 -0
- package/src/parse/index.ts +923 -0
- package/src/parse/token.ts +906 -0
- package/src/parse/utils.ts +339 -0
- package/src/pattern/index.ts +719 -0
- package/src/pattern/leaf/array-pattern.ts +273 -0
- package/src/pattern/leaf/bool-pattern.ts +140 -0
- package/src/pattern/leaf/byte-string-pattern.ts +172 -0
- package/src/pattern/leaf/cbor-pattern.ts +355 -0
- package/src/pattern/leaf/date-pattern.ts +178 -0
- package/src/pattern/leaf/index.ts +280 -0
- package/src/pattern/leaf/known-value-pattern.ts +192 -0
- package/src/pattern/leaf/map-pattern.ts +152 -0
- package/src/pattern/leaf/null-pattern.ts +110 -0
- package/src/pattern/leaf/number-pattern.ts +248 -0
- package/src/pattern/leaf/tagged-pattern.ts +228 -0
- package/src/pattern/leaf/text-pattern.ts +165 -0
- package/src/pattern/matcher.ts +88 -0
- package/src/pattern/meta/and-pattern.ts +109 -0
- package/src/pattern/meta/any-pattern.ts +81 -0
- package/src/pattern/meta/capture-pattern.ts +111 -0
- package/src/pattern/meta/group-pattern.ts +110 -0
- package/src/pattern/meta/index.ts +269 -0
- package/src/pattern/meta/not-pattern.ts +91 -0
- package/src/pattern/meta/or-pattern.ts +146 -0
- package/src/pattern/meta/search-pattern.ts +201 -0
- package/src/pattern/meta/traverse-pattern.ts +146 -0
- package/src/pattern/structure/assertions-pattern.ts +244 -0
- package/src/pattern/structure/digest-pattern.ts +225 -0
- package/src/pattern/structure/index.ts +272 -0
- package/src/pattern/structure/leaf-structure-pattern.ts +85 -0
- package/src/pattern/structure/node-pattern.ts +188 -0
- package/src/pattern/structure/object-pattern.ts +149 -0
- package/src/pattern/structure/obscured-pattern.ts +159 -0
- package/src/pattern/structure/predicate-pattern.ts +151 -0
- package/src/pattern/structure/subject-pattern.ts +152 -0
- package/src/pattern/structure/wrapped-pattern.ts +195 -0
- package/src/pattern/vm.ts +1021 -0
|
@@ -0,0 +1,923 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* @bcts/envelope-pattern - Parser entry point
|
|
3
|
+
*
|
|
4
|
+
* This is a 1:1 TypeScript port of bc-envelope-pattern-rust parse/mod.rs
|
|
5
|
+
* Recursive descent parser for Gordian Envelope pattern syntax.
|
|
6
|
+
*
|
|
7
|
+
* @module envelope-pattern/parse
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { parse as parseDcborPattern } from "@bcts/dcbor-pattern";
|
|
11
|
+
import { Lexer } from "./token";
|
|
12
|
+
import {
|
|
13
|
+
type Result,
|
|
14
|
+
type Span,
|
|
15
|
+
ok,
|
|
16
|
+
err,
|
|
17
|
+
unexpectedEndOfInput,
|
|
18
|
+
extraData,
|
|
19
|
+
invalidRegex,
|
|
20
|
+
unexpectedToken,
|
|
21
|
+
} from "../error";
|
|
22
|
+
import {
|
|
23
|
+
type Pattern,
|
|
24
|
+
// Leaf pattern constructors
|
|
25
|
+
any,
|
|
26
|
+
anyBool,
|
|
27
|
+
bool,
|
|
28
|
+
anyText,
|
|
29
|
+
text,
|
|
30
|
+
textRegex,
|
|
31
|
+
anyNumber,
|
|
32
|
+
number,
|
|
33
|
+
numberRange,
|
|
34
|
+
numberGreaterThan,
|
|
35
|
+
numberLessThan,
|
|
36
|
+
anyByteString,
|
|
37
|
+
byteString,
|
|
38
|
+
anyDate,
|
|
39
|
+
date,
|
|
40
|
+
anyKnownValue,
|
|
41
|
+
knownValue,
|
|
42
|
+
anyArray,
|
|
43
|
+
anyTag,
|
|
44
|
+
nullPattern,
|
|
45
|
+
// Structure pattern constructors
|
|
46
|
+
leaf,
|
|
47
|
+
anyAssertion,
|
|
48
|
+
assertionWithPredicate,
|
|
49
|
+
assertionWithObject,
|
|
50
|
+
anySubject,
|
|
51
|
+
subject,
|
|
52
|
+
anyPredicate,
|
|
53
|
+
predicate,
|
|
54
|
+
anyObject,
|
|
55
|
+
object,
|
|
56
|
+
digestPrefix,
|
|
57
|
+
anyNode,
|
|
58
|
+
obscured,
|
|
59
|
+
elided,
|
|
60
|
+
encrypted,
|
|
61
|
+
compressed,
|
|
62
|
+
wrapped,
|
|
63
|
+
unwrapEnvelope,
|
|
64
|
+
unwrapMatching,
|
|
65
|
+
// Meta pattern constructors
|
|
66
|
+
and,
|
|
67
|
+
or,
|
|
68
|
+
notMatching,
|
|
69
|
+
capture,
|
|
70
|
+
search,
|
|
71
|
+
traverse,
|
|
72
|
+
repeat,
|
|
73
|
+
group,
|
|
74
|
+
// Pattern types
|
|
75
|
+
patternLeaf,
|
|
76
|
+
patternStructure,
|
|
77
|
+
// Specific pattern classes
|
|
78
|
+
NumberPattern,
|
|
79
|
+
ByteStringPattern,
|
|
80
|
+
KnownValuePattern,
|
|
81
|
+
ArrayPattern,
|
|
82
|
+
TaggedPattern,
|
|
83
|
+
DigestPattern,
|
|
84
|
+
NodePattern,
|
|
85
|
+
AssertionsPattern,
|
|
86
|
+
leafNumber,
|
|
87
|
+
leafByteString,
|
|
88
|
+
leafKnownValue,
|
|
89
|
+
leafArray,
|
|
90
|
+
leafTag,
|
|
91
|
+
structureDigest,
|
|
92
|
+
structureNode,
|
|
93
|
+
structureAssertions,
|
|
94
|
+
} from "../pattern";
|
|
95
|
+
import { Quantifier, Reluctance } from "@bcts/dcbor-pattern";
|
|
96
|
+
import { type KnownValue as KnownValueType } from "@bcts/known-values";
|
|
97
|
+
import { CborDate } from "@bcts/dcbor";
|
|
98
|
+
|
|
99
|
+
// Re-export token types
|
|
100
|
+
export { type Token, Lexer } from "./token";
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Parse a pattern expression string into a Pattern.
|
|
104
|
+
*/
|
|
105
|
+
export function parse(input: string): Result<Pattern> {
|
|
106
|
+
const lexer = new Lexer(input);
|
|
107
|
+
|
|
108
|
+
// Try envelope-pattern parsing first
|
|
109
|
+
const result = parseOr(lexer);
|
|
110
|
+
if (!result.ok) {
|
|
111
|
+
// If envelope-pattern parsing failed, try dcbor-pattern as fallback
|
|
112
|
+
const dcborResult = parseDcborPattern(input);
|
|
113
|
+
if (dcborResult.ok) {
|
|
114
|
+
return convertDcborPatternToEnvelopePattern(dcborResult.value);
|
|
115
|
+
}
|
|
116
|
+
// Both parsers failed, return the original envelope error
|
|
117
|
+
return result;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Check for extra data
|
|
121
|
+
const next = lexer.next();
|
|
122
|
+
if (next !== undefined) {
|
|
123
|
+
return err(extraData(next.span));
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
return result;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Parse a pattern, allowing extra data after the pattern.
|
|
131
|
+
*/
|
|
132
|
+
export function parsePartial(input: string): Result<[Pattern, number]> {
|
|
133
|
+
const lexer = new Lexer(input);
|
|
134
|
+
const result = parseOr(lexer);
|
|
135
|
+
if (!result.ok) {
|
|
136
|
+
return result as Result<[Pattern, number]>;
|
|
137
|
+
}
|
|
138
|
+
return ok([result.value, lexer.position]);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
/**
|
|
142
|
+
* Convert a dcbor-pattern Pattern to an envelope-pattern Pattern.
|
|
143
|
+
*/
|
|
144
|
+
function convertDcborPatternToEnvelopePattern(_dcborPattern: unknown): Result<Pattern> {
|
|
145
|
+
// For now, wrap dcbor patterns as CBOR patterns
|
|
146
|
+
// This is a simplified conversion - the dcbor pattern is matched by the any() pattern
|
|
147
|
+
return ok(any());
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
// ============================================================================
|
|
151
|
+
// Recursive Descent Parser
|
|
152
|
+
// ============================================================================
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* Parse an Or expression: expr (| expr)*
|
|
156
|
+
*/
|
|
157
|
+
function parseOr(lexer: Lexer): Result<Pattern> {
|
|
158
|
+
const patterns: Pattern[] = [];
|
|
159
|
+
|
|
160
|
+
const first = parseTraverse(lexer);
|
|
161
|
+
if (!first.ok) return first;
|
|
162
|
+
patterns.push(first.value);
|
|
163
|
+
|
|
164
|
+
while (true) {
|
|
165
|
+
const next = lexer.peekToken();
|
|
166
|
+
if (next?.token.type !== "Or") {
|
|
167
|
+
break;
|
|
168
|
+
}
|
|
169
|
+
lexer.next(); // consume the |
|
|
170
|
+
|
|
171
|
+
const nextExpr = parseTraverse(lexer);
|
|
172
|
+
if (!nextExpr.ok) return nextExpr;
|
|
173
|
+
patterns.push(nextExpr.value);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
if (patterns.length === 1) {
|
|
177
|
+
return ok(patterns[0]);
|
|
178
|
+
}
|
|
179
|
+
return ok(or(patterns));
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* Parse a Traverse expression: expr (-> expr)*
|
|
184
|
+
*/
|
|
185
|
+
function parseTraverse(lexer: Lexer): Result<Pattern> {
|
|
186
|
+
const patterns: Pattern[] = [];
|
|
187
|
+
|
|
188
|
+
const first = parseAnd(lexer);
|
|
189
|
+
if (!first.ok) return first;
|
|
190
|
+
patterns.push(first.value);
|
|
191
|
+
|
|
192
|
+
while (true) {
|
|
193
|
+
const next = lexer.peekToken();
|
|
194
|
+
if (next?.token.type !== "Traverse") {
|
|
195
|
+
break;
|
|
196
|
+
}
|
|
197
|
+
lexer.next(); // consume the ->
|
|
198
|
+
|
|
199
|
+
const nextExpr = parseAnd(lexer);
|
|
200
|
+
if (!nextExpr.ok) return nextExpr;
|
|
201
|
+
patterns.push(nextExpr.value);
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
if (patterns.length === 1) {
|
|
205
|
+
return ok(patterns[0]);
|
|
206
|
+
}
|
|
207
|
+
return ok(traverse(patterns));
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
/**
|
|
211
|
+
* Parse an And expression: expr (& expr)*
|
|
212
|
+
*/
|
|
213
|
+
function parseAnd(lexer: Lexer): Result<Pattern> {
|
|
214
|
+
const patterns: Pattern[] = [];
|
|
215
|
+
|
|
216
|
+
const first = parseNot(lexer);
|
|
217
|
+
if (!first.ok) return first;
|
|
218
|
+
patterns.push(first.value);
|
|
219
|
+
|
|
220
|
+
while (true) {
|
|
221
|
+
const next = lexer.peekToken();
|
|
222
|
+
if (next?.token.type !== "And") {
|
|
223
|
+
break;
|
|
224
|
+
}
|
|
225
|
+
lexer.next(); // consume the &
|
|
226
|
+
|
|
227
|
+
const nextExpr = parseNot(lexer);
|
|
228
|
+
if (!nextExpr.ok) return nextExpr;
|
|
229
|
+
patterns.push(nextExpr.value);
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
if (patterns.length === 1) {
|
|
233
|
+
return ok(patterns[0]);
|
|
234
|
+
}
|
|
235
|
+
return ok(and(patterns));
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
/**
|
|
239
|
+
* Parse a Not expression: !? group
|
|
240
|
+
*/
|
|
241
|
+
function parseNot(lexer: Lexer): Result<Pattern> {
|
|
242
|
+
const next = lexer.peekToken();
|
|
243
|
+
if (next?.token.type === "Not") {
|
|
244
|
+
lexer.next(); // consume the !
|
|
245
|
+
const inner = parseGroup(lexer);
|
|
246
|
+
if (!inner.ok) return inner;
|
|
247
|
+
return ok(notMatching(inner.value));
|
|
248
|
+
}
|
|
249
|
+
return parseGroup(lexer);
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
/**
|
|
253
|
+
* Parse a Group expression: primary quantifier?
|
|
254
|
+
*/
|
|
255
|
+
function parseGroup(lexer: Lexer): Result<Pattern> {
|
|
256
|
+
const primary = parsePrimary(lexer);
|
|
257
|
+
if (!primary.ok) return primary;
|
|
258
|
+
|
|
259
|
+
// Check for quantifier
|
|
260
|
+
const next = lexer.peekToken();
|
|
261
|
+
if (next === undefined) {
|
|
262
|
+
return primary;
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
const tokenType = next.token.type;
|
|
266
|
+
let quantifier: Quantifier | undefined;
|
|
267
|
+
|
|
268
|
+
if (tokenType === "RepeatZeroOrMore") {
|
|
269
|
+
lexer.next();
|
|
270
|
+
quantifier = Quantifier.zeroOrMore(Reluctance.Greedy);
|
|
271
|
+
} else if (tokenType === "RepeatZeroOrMoreLazy") {
|
|
272
|
+
lexer.next();
|
|
273
|
+
quantifier = Quantifier.zeroOrMore(Reluctance.Lazy);
|
|
274
|
+
} else if (tokenType === "RepeatZeroOrMorePossessive") {
|
|
275
|
+
lexer.next();
|
|
276
|
+
quantifier = Quantifier.zeroOrMore(Reluctance.Possessive);
|
|
277
|
+
} else if (tokenType === "RepeatOneOrMore") {
|
|
278
|
+
lexer.next();
|
|
279
|
+
quantifier = Quantifier.oneOrMore(Reluctance.Greedy);
|
|
280
|
+
} else if (tokenType === "RepeatOneOrMoreLazy") {
|
|
281
|
+
lexer.next();
|
|
282
|
+
quantifier = Quantifier.oneOrMore(Reluctance.Lazy);
|
|
283
|
+
} else if (tokenType === "RepeatOneOrMorePossessive") {
|
|
284
|
+
lexer.next();
|
|
285
|
+
quantifier = Quantifier.oneOrMore(Reluctance.Possessive);
|
|
286
|
+
} else if (tokenType === "RepeatZeroOrOne") {
|
|
287
|
+
lexer.next();
|
|
288
|
+
quantifier = Quantifier.zeroOrOne(Reluctance.Greedy);
|
|
289
|
+
} else if (tokenType === "RepeatZeroOrOneLazy") {
|
|
290
|
+
lexer.next();
|
|
291
|
+
quantifier = Quantifier.zeroOrOne(Reluctance.Lazy);
|
|
292
|
+
} else if (tokenType === "RepeatZeroOrOnePossessive") {
|
|
293
|
+
lexer.next();
|
|
294
|
+
quantifier = Quantifier.zeroOrOne(Reluctance.Possessive);
|
|
295
|
+
} else if (tokenType === "Range") {
|
|
296
|
+
lexer.next();
|
|
297
|
+
if (!next.token.value.ok) {
|
|
298
|
+
return err(next.token.value.error);
|
|
299
|
+
}
|
|
300
|
+
quantifier = next.token.value.value;
|
|
301
|
+
} else {
|
|
302
|
+
// No quantifier found, return the primary expression as-is
|
|
303
|
+
return primary;
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
return ok(repeat(primary.value, quantifier.min(), quantifier.max(), quantifier.reluctance()));
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
/**
|
|
310
|
+
* Parse a primary expression (atoms and structure keywords).
|
|
311
|
+
*/
|
|
312
|
+
function parsePrimary(lexer: Lexer): Result<Pattern> {
|
|
313
|
+
const tokenResult = lexer.next();
|
|
314
|
+
if (tokenResult === undefined) {
|
|
315
|
+
return err(unexpectedEndOfInput());
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
const { token, span } = tokenResult;
|
|
319
|
+
|
|
320
|
+
switch (token.type) {
|
|
321
|
+
// Envelope-specific structure patterns
|
|
322
|
+
case "Search":
|
|
323
|
+
return parseSearch(lexer);
|
|
324
|
+
case "Node":
|
|
325
|
+
return parseNode(lexer);
|
|
326
|
+
case "Assertion":
|
|
327
|
+
return parseAssertion(lexer);
|
|
328
|
+
case "AssertionPred":
|
|
329
|
+
return parseAssertionPred(lexer);
|
|
330
|
+
case "AssertionObj":
|
|
331
|
+
return parseAssertionObj(lexer);
|
|
332
|
+
case "Digest":
|
|
333
|
+
return parseDigest(lexer);
|
|
334
|
+
case "Obj":
|
|
335
|
+
return parseObject(lexer);
|
|
336
|
+
case "Obscured":
|
|
337
|
+
return ok(obscured());
|
|
338
|
+
case "Elided":
|
|
339
|
+
return ok(elided());
|
|
340
|
+
case "Encrypted":
|
|
341
|
+
return ok(encrypted());
|
|
342
|
+
case "Compressed":
|
|
343
|
+
return ok(compressed());
|
|
344
|
+
case "Pred":
|
|
345
|
+
return parsePredicate(lexer);
|
|
346
|
+
case "Subject":
|
|
347
|
+
return parseSubject(lexer);
|
|
348
|
+
case "Wrapped":
|
|
349
|
+
return ok(wrapped());
|
|
350
|
+
case "Unwrap":
|
|
351
|
+
return parseUnwrap(lexer);
|
|
352
|
+
case "Leaf":
|
|
353
|
+
return ok(leaf());
|
|
354
|
+
|
|
355
|
+
// Capture group
|
|
356
|
+
case "GroupName":
|
|
357
|
+
return parseCapture(lexer, token.name);
|
|
358
|
+
|
|
359
|
+
// Grouping with parentheses
|
|
360
|
+
case "ParenOpen":
|
|
361
|
+
return parseParenGroup(lexer);
|
|
362
|
+
|
|
363
|
+
// CBOR pattern
|
|
364
|
+
case "Cbor":
|
|
365
|
+
return parseCbor(lexer);
|
|
366
|
+
|
|
367
|
+
// Simple patterns
|
|
368
|
+
case "RepeatZeroOrMore":
|
|
369
|
+
return ok(any()); // * means any
|
|
370
|
+
case "BoolKeyword":
|
|
371
|
+
return ok(anyBool());
|
|
372
|
+
case "BoolTrue":
|
|
373
|
+
return ok(bool(true));
|
|
374
|
+
case "BoolFalse":
|
|
375
|
+
return ok(bool(false));
|
|
376
|
+
case "NumberKeyword":
|
|
377
|
+
return ok(anyNumber());
|
|
378
|
+
case "TextKeyword":
|
|
379
|
+
return ok(anyText());
|
|
380
|
+
case "StringLiteral":
|
|
381
|
+
if (!token.value.ok) return err(token.value.error);
|
|
382
|
+
return ok(text(token.value.value));
|
|
383
|
+
case "UnsignedInteger":
|
|
384
|
+
if (!token.value.ok) return err(token.value.error);
|
|
385
|
+
return parseNumberRangeOrComparison(lexer, token.value.value);
|
|
386
|
+
case "Integer":
|
|
387
|
+
if (!token.value.ok) return err(token.value.error);
|
|
388
|
+
return parseNumberRangeOrComparison(lexer, token.value.value);
|
|
389
|
+
case "Float":
|
|
390
|
+
if (!token.value.ok) return err(token.value.error);
|
|
391
|
+
return parseNumberRangeOrComparison(lexer, token.value.value);
|
|
392
|
+
case "GreaterThanOrEqual":
|
|
393
|
+
return parseComparisonNumber(lexer, ">=");
|
|
394
|
+
case "LessThanOrEqual":
|
|
395
|
+
return parseComparisonNumber(lexer, "<=");
|
|
396
|
+
case "GreaterThan":
|
|
397
|
+
return parseComparisonNumber(lexer, ">");
|
|
398
|
+
case "LessThan":
|
|
399
|
+
return parseComparisonNumber(lexer, "<");
|
|
400
|
+
case "NaN":
|
|
401
|
+
return ok(patternLeaf(leafNumber(NumberPattern.nan())));
|
|
402
|
+
case "Infinity":
|
|
403
|
+
return ok(number(Infinity));
|
|
404
|
+
case "NegativeInfinity":
|
|
405
|
+
return ok(number(-Infinity));
|
|
406
|
+
case "Regex":
|
|
407
|
+
if (!token.value.ok) return err(token.value.error);
|
|
408
|
+
try {
|
|
409
|
+
return ok(textRegex(new RegExp(token.value.value)));
|
|
410
|
+
} catch {
|
|
411
|
+
return err(invalidRegex(span));
|
|
412
|
+
}
|
|
413
|
+
case "BracketOpen":
|
|
414
|
+
return parseArray(lexer);
|
|
415
|
+
case "ByteString":
|
|
416
|
+
return ok(anyByteString());
|
|
417
|
+
case "HexPattern":
|
|
418
|
+
if (!token.value.ok) return err(token.value.error);
|
|
419
|
+
return ok(byteString(token.value.value));
|
|
420
|
+
case "HexBinaryRegex":
|
|
421
|
+
if (!token.value.ok) return err(token.value.error);
|
|
422
|
+
try {
|
|
423
|
+
return ok(
|
|
424
|
+
patternLeaf(leafByteString(ByteStringPattern.regex(new RegExp(token.value.value)))),
|
|
425
|
+
);
|
|
426
|
+
} catch {
|
|
427
|
+
return err(invalidRegex(span));
|
|
428
|
+
}
|
|
429
|
+
case "DateKeyword":
|
|
430
|
+
return ok(anyDate());
|
|
431
|
+
case "DatePattern":
|
|
432
|
+
if (!token.value.ok) return err(token.value.error);
|
|
433
|
+
return parseDateContent(token.value.value, span);
|
|
434
|
+
case "Tagged":
|
|
435
|
+
return parseTag(lexer);
|
|
436
|
+
case "Known":
|
|
437
|
+
return ok(anyKnownValue());
|
|
438
|
+
case "SingleQuotedPattern":
|
|
439
|
+
if (!token.value.ok) return err(token.value.error);
|
|
440
|
+
return parseKnownValueContent(token.value.value);
|
|
441
|
+
case "SingleQuotedRegex":
|
|
442
|
+
if (!token.value.ok) return err(token.value.error);
|
|
443
|
+
try {
|
|
444
|
+
return ok(
|
|
445
|
+
patternLeaf(leafKnownValue(KnownValuePattern.regex(new RegExp(token.value.value)))),
|
|
446
|
+
);
|
|
447
|
+
} catch {
|
|
448
|
+
return err(invalidRegex(span));
|
|
449
|
+
}
|
|
450
|
+
case "Null":
|
|
451
|
+
return ok(nullPattern());
|
|
452
|
+
|
|
453
|
+
// These tokens are not valid as primary expressions
|
|
454
|
+
// They are handled by other parsers or are structural tokens
|
|
455
|
+
case "And":
|
|
456
|
+
case "Or":
|
|
457
|
+
case "Not":
|
|
458
|
+
case "Traverse":
|
|
459
|
+
case "RepeatZeroOrMoreLazy":
|
|
460
|
+
case "RepeatZeroOrMorePossessive":
|
|
461
|
+
case "RepeatOneOrMore":
|
|
462
|
+
case "RepeatOneOrMoreLazy":
|
|
463
|
+
case "RepeatOneOrMorePossessive":
|
|
464
|
+
case "RepeatZeroOrOne":
|
|
465
|
+
case "RepeatZeroOrOneLazy":
|
|
466
|
+
case "RepeatZeroOrOnePossessive":
|
|
467
|
+
case "ParenClose":
|
|
468
|
+
case "BracketClose":
|
|
469
|
+
case "Comma":
|
|
470
|
+
case "Ellipsis":
|
|
471
|
+
case "Range":
|
|
472
|
+
return err(unexpectedToken(token, span));
|
|
473
|
+
}
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
// ============================================================================
|
|
477
|
+
// Helper Functions
|
|
478
|
+
// ============================================================================
|
|
479
|
+
|
|
480
|
+
/**
|
|
481
|
+
* Parse a parenthesized group expression.
|
|
482
|
+
*/
|
|
483
|
+
function parseParenGroup(lexer: Lexer): Result<Pattern> {
|
|
484
|
+
const inner = parseOr(lexer);
|
|
485
|
+
if (!inner.ok) return inner;
|
|
486
|
+
|
|
487
|
+
const close = lexer.next();
|
|
488
|
+
if (close?.token.type !== "ParenClose") {
|
|
489
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
return ok(group(inner.value));
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
/**
|
|
496
|
+
* Parse a capture group: @name pattern
|
|
497
|
+
*/
|
|
498
|
+
function parseCapture(lexer: Lexer, name: string): Result<Pattern> {
|
|
499
|
+
const inner = parseGroup(lexer);
|
|
500
|
+
if (!inner.ok) return inner;
|
|
501
|
+
return ok(capture(name, inner.value));
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
/**
|
|
505
|
+
* Parse a search pattern: search(pattern)
|
|
506
|
+
*/
|
|
507
|
+
function parseSearch(lexer: Lexer): Result<Pattern> {
|
|
508
|
+
const open = lexer.next();
|
|
509
|
+
if (open?.token.type !== "ParenOpen") {
|
|
510
|
+
return err({ type: "ExpectedOpenParen", span: lexer.span() });
|
|
511
|
+
}
|
|
512
|
+
|
|
513
|
+
const inner = parseOr(lexer);
|
|
514
|
+
if (!inner.ok) return inner;
|
|
515
|
+
|
|
516
|
+
const close = lexer.next();
|
|
517
|
+
if (close?.token.type !== "ParenClose") {
|
|
518
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
519
|
+
}
|
|
520
|
+
|
|
521
|
+
return ok(search(inner.value));
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
/**
|
|
525
|
+
* Parse number with possible range or comparison.
|
|
526
|
+
*/
|
|
527
|
+
function parseNumberRangeOrComparison(lexer: Lexer, firstValue: number): Result<Pattern> {
|
|
528
|
+
const next = lexer.peekToken();
|
|
529
|
+
if (next === undefined) {
|
|
530
|
+
return ok(number(firstValue));
|
|
531
|
+
}
|
|
532
|
+
|
|
533
|
+
if (next.token.type === "Ellipsis") {
|
|
534
|
+
lexer.next(); // consume ...
|
|
535
|
+
const endToken = lexer.next();
|
|
536
|
+
if (endToken === undefined) {
|
|
537
|
+
return err(unexpectedEndOfInput());
|
|
538
|
+
}
|
|
539
|
+
|
|
540
|
+
let endValue: number;
|
|
541
|
+
if (endToken.token.type === "UnsignedInteger" || endToken.token.type === "Integer") {
|
|
542
|
+
if (!endToken.token.value.ok) return err(endToken.token.value.error);
|
|
543
|
+
endValue = endToken.token.value.value;
|
|
544
|
+
} else if (endToken.token.type === "Float") {
|
|
545
|
+
if (!endToken.token.value.ok) return err(endToken.token.value.error);
|
|
546
|
+
endValue = endToken.token.value.value;
|
|
547
|
+
} else {
|
|
548
|
+
return err(unexpectedToken(endToken.token, endToken.span));
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
return ok(numberRange(firstValue, endValue));
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
return ok(number(firstValue));
|
|
555
|
+
}
|
|
556
|
+
|
|
557
|
+
/**
|
|
558
|
+
* Parse comparison number: >=n, <=n, >n, <n
|
|
559
|
+
*/
|
|
560
|
+
function parseComparisonNumber(lexer: Lexer, op: string): Result<Pattern> {
|
|
561
|
+
const numToken = lexer.next();
|
|
562
|
+
if (numToken === undefined) {
|
|
563
|
+
return err(unexpectedEndOfInput());
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
let value: number;
|
|
567
|
+
if (numToken.token.type === "UnsignedInteger" || numToken.token.type === "Integer") {
|
|
568
|
+
if (!numToken.token.value.ok) return err(numToken.token.value.error);
|
|
569
|
+
value = numToken.token.value.value;
|
|
570
|
+
} else if (numToken.token.type === "Float") {
|
|
571
|
+
if (!numToken.token.value.ok) return err(numToken.token.value.error);
|
|
572
|
+
value = numToken.token.value.value;
|
|
573
|
+
} else {
|
|
574
|
+
return err(unexpectedToken(numToken.token, numToken.span));
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
switch (op) {
|
|
578
|
+
case ">=":
|
|
579
|
+
return ok(patternLeaf(leafNumber(NumberPattern.greaterThanOrEqual(value))));
|
|
580
|
+
case "<=":
|
|
581
|
+
return ok(patternLeaf(leafNumber(NumberPattern.lessThanOrEqual(value))));
|
|
582
|
+
case ">":
|
|
583
|
+
return ok(numberGreaterThan(value));
|
|
584
|
+
case "<":
|
|
585
|
+
return ok(numberLessThan(value));
|
|
586
|
+
default:
|
|
587
|
+
return ok(number(value));
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
|
|
591
|
+
/**
|
|
592
|
+
* Parse an array pattern.
|
|
593
|
+
*/
|
|
594
|
+
function parseArray(lexer: Lexer): Result<Pattern> {
|
|
595
|
+
// Check for empty array or simple patterns
|
|
596
|
+
const first = lexer.peekToken();
|
|
597
|
+
if (first === undefined) {
|
|
598
|
+
return err(unexpectedEndOfInput());
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
if (first.token.type === "BracketClose") {
|
|
602
|
+
lexer.next(); // consume ]
|
|
603
|
+
return ok(patternLeaf(leafArray(ArrayPattern.count(0))));
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
if (first.token.type === "RepeatZeroOrMore") {
|
|
607
|
+
lexer.next(); // consume *
|
|
608
|
+
const close = lexer.next();
|
|
609
|
+
if (close?.token.type !== "BracketClose") {
|
|
610
|
+
return err({ type: "ExpectedCloseBracket", span: lexer.span() });
|
|
611
|
+
}
|
|
612
|
+
return ok(anyArray());
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
// Parse the inner pattern(s)
|
|
616
|
+
const patterns: Pattern[] = [];
|
|
617
|
+
|
|
618
|
+
while (true) {
|
|
619
|
+
const next = lexer.peekToken();
|
|
620
|
+
if (next === undefined) {
|
|
621
|
+
return err(unexpectedEndOfInput());
|
|
622
|
+
}
|
|
623
|
+
|
|
624
|
+
if (next.token.type === "BracketClose") {
|
|
625
|
+
lexer.next(); // consume ]
|
|
626
|
+
break;
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
const pattern = parseOr(lexer);
|
|
630
|
+
if (!pattern.ok) return pattern;
|
|
631
|
+
patterns.push(pattern.value);
|
|
632
|
+
|
|
633
|
+
const afterPattern = lexer.peekToken();
|
|
634
|
+
if (afterPattern === undefined) {
|
|
635
|
+
return err(unexpectedEndOfInput());
|
|
636
|
+
}
|
|
637
|
+
|
|
638
|
+
if (afterPattern.token.type === "Comma") {
|
|
639
|
+
lexer.next(); // consume ,
|
|
640
|
+
} else if (afterPattern.token.type !== "BracketClose") {
|
|
641
|
+
return err(unexpectedToken(afterPattern.token, afterPattern.span));
|
|
642
|
+
}
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
if (patterns.length === 0) {
|
|
646
|
+
return ok(patternLeaf(leafArray(ArrayPattern.count(0))));
|
|
647
|
+
}
|
|
648
|
+
|
|
649
|
+
return ok(patternLeaf(leafArray(ArrayPattern.withPatterns(patterns))));
|
|
650
|
+
}
|
|
651
|
+
|
|
652
|
+
/**
|
|
653
|
+
* Parse a tag pattern.
|
|
654
|
+
*/
|
|
655
|
+
function parseTag(lexer: Lexer): Result<Pattern> {
|
|
656
|
+
const open = lexer.next();
|
|
657
|
+
if (open?.token.type !== "ParenOpen") {
|
|
658
|
+
return ok(anyTag());
|
|
659
|
+
}
|
|
660
|
+
|
|
661
|
+
// Parse tag number or pattern
|
|
662
|
+
const tagToken = lexer.next();
|
|
663
|
+
if (tagToken === undefined) {
|
|
664
|
+
return err(unexpectedEndOfInput());
|
|
665
|
+
}
|
|
666
|
+
|
|
667
|
+
if (tagToken.token.type !== "UnsignedInteger") {
|
|
668
|
+
return err(unexpectedToken(tagToken.token, tagToken.span));
|
|
669
|
+
}
|
|
670
|
+
|
|
671
|
+
if (!tagToken.token.value.ok) return err(tagToken.token.value.error);
|
|
672
|
+
// tagToken.token.value.value contains the tag number for future tag-specific matching
|
|
673
|
+
|
|
674
|
+
const close = lexer.next();
|
|
675
|
+
if (close?.token.type !== "ParenClose") {
|
|
676
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
// Create a tagged pattern with the specific tag
|
|
680
|
+
// For now, just match the tag number
|
|
681
|
+
return ok(anyTag()); // Simplified - full implementation would match specific tag
|
|
682
|
+
}
|
|
683
|
+
|
|
684
|
+
/**
|
|
685
|
+
* Parse date content from date'...' pattern.
|
|
686
|
+
*/
|
|
687
|
+
function parseDateContent(content: string, span: Span): Result<Pattern> {
|
|
688
|
+
// Try to parse as ISO date
|
|
689
|
+
const parsed = Date.parse(content);
|
|
690
|
+
if (isNaN(parsed)) {
|
|
691
|
+
return err({ type: "InvalidDateFormat", span });
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
const cborDate = CborDate.fromDatetime(new Date(parsed));
|
|
695
|
+
return ok(date(cborDate));
|
|
696
|
+
}
|
|
697
|
+
|
|
698
|
+
/**
|
|
699
|
+
* Parse known value content from '...' pattern.
|
|
700
|
+
*/
|
|
701
|
+
function parseKnownValueContent(content: string): Result<Pattern> {
|
|
702
|
+
// Try to parse as number first
|
|
703
|
+
const numValue = parseInt(content, 10);
|
|
704
|
+
if (!isNaN(numValue)) {
|
|
705
|
+
const kv = { value: () => BigInt(numValue) } as unknown as KnownValueType;
|
|
706
|
+
return ok(knownValue(kv));
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
// Try to find by name in known values
|
|
710
|
+
// For now, just create a named pattern
|
|
711
|
+
return ok(patternLeaf(leafKnownValue(KnownValuePattern.named(content))));
|
|
712
|
+
}
|
|
713
|
+
|
|
714
|
+
/**
|
|
715
|
+
* Parse CBOR pattern.
|
|
716
|
+
*/
|
|
717
|
+
function parseCbor(lexer: Lexer): Result<Pattern> {
|
|
718
|
+
// Check for optional content in parentheses
|
|
719
|
+
const next = lexer.peekToken();
|
|
720
|
+
if (next?.token.type !== "ParenOpen") {
|
|
721
|
+
return ok(patternLeaf(leafTag(TaggedPattern.any()))); // cbor matches any CBOR
|
|
722
|
+
}
|
|
723
|
+
|
|
724
|
+
lexer.next(); // consume (
|
|
725
|
+
|
|
726
|
+
// Parse inner content - this is a dcbor-pattern expression
|
|
727
|
+
const inner = parseOr(lexer);
|
|
728
|
+
if (!inner.ok) return inner;
|
|
729
|
+
|
|
730
|
+
const close = lexer.next();
|
|
731
|
+
if (close?.token.type !== "ParenClose") {
|
|
732
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
733
|
+
}
|
|
734
|
+
|
|
735
|
+
return inner;
|
|
736
|
+
}
|
|
737
|
+
|
|
738
|
+
// ============================================================================
|
|
739
|
+
// Structure Pattern Parsers
|
|
740
|
+
// ============================================================================
|
|
741
|
+
|
|
742
|
+
function parseNode(lexer: Lexer): Result<Pattern> {
|
|
743
|
+
const next = lexer.peekToken();
|
|
744
|
+
if (next?.token.type !== "ParenOpen") {
|
|
745
|
+
return ok(anyNode());
|
|
746
|
+
}
|
|
747
|
+
|
|
748
|
+
lexer.next(); // consume (
|
|
749
|
+
const inner = parseOr(lexer);
|
|
750
|
+
if (!inner.ok) return inner;
|
|
751
|
+
|
|
752
|
+
const close = lexer.next();
|
|
753
|
+
if (close?.token.type !== "ParenClose") {
|
|
754
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
755
|
+
}
|
|
756
|
+
|
|
757
|
+
return ok(patternStructure(structureNode(NodePattern.withSubject(inner.value))));
|
|
758
|
+
}
|
|
759
|
+
|
|
760
|
+
function parseAssertion(lexer: Lexer): Result<Pattern> {
|
|
761
|
+
const next = lexer.peekToken();
|
|
762
|
+
if (next?.token.type !== "ParenOpen") {
|
|
763
|
+
return ok(anyAssertion());
|
|
764
|
+
}
|
|
765
|
+
|
|
766
|
+
lexer.next(); // consume (
|
|
767
|
+
|
|
768
|
+
// Parse predicate pattern
|
|
769
|
+
const pred = parseOr(lexer);
|
|
770
|
+
if (!pred.ok) return pred;
|
|
771
|
+
|
|
772
|
+
const comma = lexer.next();
|
|
773
|
+
if (comma?.token.type !== "Comma") {
|
|
774
|
+
return err(unexpectedToken(comma?.token ?? { type: "Null" }, comma?.span ?? lexer.span()));
|
|
775
|
+
}
|
|
776
|
+
|
|
777
|
+
// Parse object pattern
|
|
778
|
+
const obj = parseOr(lexer);
|
|
779
|
+
if (!obj.ok) return obj;
|
|
780
|
+
|
|
781
|
+
const close = lexer.next();
|
|
782
|
+
if (close?.token.type !== "ParenClose") {
|
|
783
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
784
|
+
}
|
|
785
|
+
|
|
786
|
+
return ok(
|
|
787
|
+
patternStructure(structureAssertions(AssertionsPattern.withBoth(pred.value, obj.value))),
|
|
788
|
+
);
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
function parseAssertionPred(lexer: Lexer): Result<Pattern> {
|
|
792
|
+
const next = lexer.peekToken();
|
|
793
|
+
if (next?.token.type !== "ParenOpen") {
|
|
794
|
+
return ok(anyAssertion());
|
|
795
|
+
}
|
|
796
|
+
|
|
797
|
+
lexer.next(); // consume (
|
|
798
|
+
const inner = parseOr(lexer);
|
|
799
|
+
if (!inner.ok) return inner;
|
|
800
|
+
|
|
801
|
+
const close = lexer.next();
|
|
802
|
+
if (close?.token.type !== "ParenClose") {
|
|
803
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
804
|
+
}
|
|
805
|
+
|
|
806
|
+
return ok(assertionWithPredicate(inner.value));
|
|
807
|
+
}
|
|
808
|
+
|
|
809
|
+
function parseAssertionObj(lexer: Lexer): Result<Pattern> {
|
|
810
|
+
const next = lexer.peekToken();
|
|
811
|
+
if (next?.token.type !== "ParenOpen") {
|
|
812
|
+
return ok(anyAssertion());
|
|
813
|
+
}
|
|
814
|
+
|
|
815
|
+
lexer.next(); // consume (
|
|
816
|
+
const inner = parseOr(lexer);
|
|
817
|
+
if (!inner.ok) return inner;
|
|
818
|
+
|
|
819
|
+
const close = lexer.next();
|
|
820
|
+
if (close?.token.type !== "ParenClose") {
|
|
821
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
822
|
+
}
|
|
823
|
+
|
|
824
|
+
return ok(assertionWithObject(inner.value));
|
|
825
|
+
}
|
|
826
|
+
|
|
827
|
+
function parseDigest(lexer: Lexer): Result<Pattern> {
|
|
828
|
+
const next = lexer.peekToken();
|
|
829
|
+
if (next?.token.type !== "ParenOpen") {
|
|
830
|
+
return ok(patternStructure(structureDigest(DigestPattern.any())));
|
|
831
|
+
}
|
|
832
|
+
|
|
833
|
+
lexer.next(); // consume (
|
|
834
|
+
|
|
835
|
+
// Parse digest hex pattern
|
|
836
|
+
const digestToken = lexer.next();
|
|
837
|
+
if (digestToken === undefined) {
|
|
838
|
+
return err(unexpectedEndOfInput());
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
if (digestToken.token.type === "HexPattern") {
|
|
842
|
+
if (!digestToken.token.value.ok) return err(digestToken.token.value.error);
|
|
843
|
+
const close = lexer.next();
|
|
844
|
+
if (close?.token.type !== "ParenClose") {
|
|
845
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
846
|
+
}
|
|
847
|
+
return ok(digestPrefix(digestToken.token.value.value));
|
|
848
|
+
}
|
|
849
|
+
|
|
850
|
+
return err(unexpectedToken(digestToken.token, digestToken.span));
|
|
851
|
+
}
|
|
852
|
+
|
|
853
|
+
function parseObject(lexer: Lexer): Result<Pattern> {
|
|
854
|
+
const next = lexer.peekToken();
|
|
855
|
+
if (next?.token.type !== "ParenOpen") {
|
|
856
|
+
return ok(anyObject());
|
|
857
|
+
}
|
|
858
|
+
|
|
859
|
+
lexer.next(); // consume (
|
|
860
|
+
const inner = parseOr(lexer);
|
|
861
|
+
if (!inner.ok) return inner;
|
|
862
|
+
|
|
863
|
+
const close = lexer.next();
|
|
864
|
+
if (close?.token.type !== "ParenClose") {
|
|
865
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
866
|
+
}
|
|
867
|
+
|
|
868
|
+
return ok(object(inner.value));
|
|
869
|
+
}
|
|
870
|
+
|
|
871
|
+
function parsePredicate(lexer: Lexer): Result<Pattern> {
|
|
872
|
+
const next = lexer.peekToken();
|
|
873
|
+
if (next?.token.type !== "ParenOpen") {
|
|
874
|
+
return ok(anyPredicate());
|
|
875
|
+
}
|
|
876
|
+
|
|
877
|
+
lexer.next(); // consume (
|
|
878
|
+
const inner = parseOr(lexer);
|
|
879
|
+
if (!inner.ok) return inner;
|
|
880
|
+
|
|
881
|
+
const close = lexer.next();
|
|
882
|
+
if (close?.token.type !== "ParenClose") {
|
|
883
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
884
|
+
}
|
|
885
|
+
|
|
886
|
+
return ok(predicate(inner.value));
|
|
887
|
+
}
|
|
888
|
+
|
|
889
|
+
function parseSubject(lexer: Lexer): Result<Pattern> {
|
|
890
|
+
const next = lexer.peekToken();
|
|
891
|
+
if (next?.token.type !== "ParenOpen") {
|
|
892
|
+
return ok(anySubject());
|
|
893
|
+
}
|
|
894
|
+
|
|
895
|
+
lexer.next(); // consume (
|
|
896
|
+
const inner = parseOr(lexer);
|
|
897
|
+
if (!inner.ok) return inner;
|
|
898
|
+
|
|
899
|
+
const close = lexer.next();
|
|
900
|
+
if (close?.token.type !== "ParenClose") {
|
|
901
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
902
|
+
}
|
|
903
|
+
|
|
904
|
+
return ok(subject(inner.value));
|
|
905
|
+
}
|
|
906
|
+
|
|
907
|
+
function parseUnwrap(lexer: Lexer): Result<Pattern> {
|
|
908
|
+
const next = lexer.peekToken();
|
|
909
|
+
if (next?.token.type !== "ParenOpen") {
|
|
910
|
+
return ok(unwrapEnvelope());
|
|
911
|
+
}
|
|
912
|
+
|
|
913
|
+
lexer.next(); // consume (
|
|
914
|
+
const inner = parseOr(lexer);
|
|
915
|
+
if (!inner.ok) return inner;
|
|
916
|
+
|
|
917
|
+
const close = lexer.next();
|
|
918
|
+
if (close?.token.type !== "ParenClose") {
|
|
919
|
+
return err({ type: "ExpectedCloseParen", span: lexer.span() });
|
|
920
|
+
}
|
|
921
|
+
|
|
922
|
+
return ok(unwrapMatching(inner.value));
|
|
923
|
+
}
|