@matter/model 0.12.0-alpha.0-20241228-9f74a0273 → 0.12.0-alpha.0-20241231-14ac774ba

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/dist/cjs/aspects/Constraint.d.ts +24 -15
  2. package/dist/cjs/aspects/Constraint.d.ts.map +1 -1
  3. package/dist/cjs/aspects/Constraint.js +268 -198
  4. package/dist/cjs/aspects/Constraint.js.map +2 -2
  5. package/dist/cjs/common/FieldValue.d.ts +10 -4
  6. package/dist/cjs/common/FieldValue.d.ts.map +1 -1
  7. package/dist/cjs/common/FieldValue.js +1 -1
  8. package/dist/cjs/common/FieldValue.js.map +1 -1
  9. package/dist/cjs/common/Metatype.d.ts +19 -1
  10. package/dist/cjs/common/Metatype.d.ts.map +1 -1
  11. package/dist/cjs/common/Metatype.js +171 -170
  12. package/dist/cjs/common/Metatype.js.map +1 -1
  13. package/dist/cjs/common/Specification.d.ts +1 -1
  14. package/dist/cjs/common/Specification.d.ts.map +1 -1
  15. package/dist/cjs/logic/ModelDiff.d.ts +40 -0
  16. package/dist/cjs/logic/ModelDiff.d.ts.map +1 -0
  17. package/dist/cjs/logic/ModelDiff.js +119 -0
  18. package/dist/cjs/logic/ModelDiff.js.map +6 -0
  19. package/dist/cjs/logic/definition-validation/ValueValidator.js +1 -1
  20. package/dist/cjs/logic/definition-validation/ValueValidator.js.map +1 -1
  21. package/dist/cjs/logic/index.d.ts +1 -0
  22. package/dist/cjs/logic/index.d.ts.map +1 -1
  23. package/dist/cjs/logic/index.js +1 -0
  24. package/dist/cjs/logic/index.js.map +1 -1
  25. package/dist/cjs/parser/Lexer.d.ts +3 -3
  26. package/dist/cjs/parser/Lexer.d.ts.map +1 -1
  27. package/dist/cjs/parser/Lexer.js +35 -31
  28. package/dist/cjs/parser/Lexer.js.map +1 -1
  29. package/dist/cjs/parser/Token.d.ts +5 -2
  30. package/dist/cjs/parser/Token.d.ts.map +1 -1
  31. package/dist/cjs/parser/TokenStream.js +2 -2
  32. package/dist/esm/aspects/Constraint.d.ts +24 -15
  33. package/dist/esm/aspects/Constraint.d.ts.map +1 -1
  34. package/dist/esm/aspects/Constraint.js +269 -199
  35. package/dist/esm/aspects/Constraint.js.map +2 -2
  36. package/dist/esm/common/FieldValue.d.ts +10 -4
  37. package/dist/esm/common/FieldValue.d.ts.map +1 -1
  38. package/dist/esm/common/FieldValue.js +1 -1
  39. package/dist/esm/common/FieldValue.js.map +1 -1
  40. package/dist/esm/common/Metatype.d.ts +19 -1
  41. package/dist/esm/common/Metatype.d.ts.map +1 -1
  42. package/dist/esm/common/Metatype.js +171 -170
  43. package/dist/esm/common/Metatype.js.map +1 -1
  44. package/dist/esm/common/Specification.d.ts +1 -1
  45. package/dist/esm/common/Specification.d.ts.map +1 -1
  46. package/dist/esm/logic/ModelDiff.d.ts +40 -0
  47. package/dist/esm/logic/ModelDiff.d.ts.map +1 -0
  48. package/dist/esm/logic/ModelDiff.js +99 -0
  49. package/dist/esm/logic/ModelDiff.js.map +6 -0
  50. package/dist/esm/logic/definition-validation/ValueValidator.js +1 -1
  51. package/dist/esm/logic/definition-validation/ValueValidator.js.map +1 -1
  52. package/dist/esm/logic/index.d.ts +1 -0
  53. package/dist/esm/logic/index.d.ts.map +1 -1
  54. package/dist/esm/logic/index.js +1 -0
  55. package/dist/esm/logic/index.js.map +1 -1
  56. package/dist/esm/parser/Lexer.d.ts +3 -3
  57. package/dist/esm/parser/Lexer.d.ts.map +1 -1
  58. package/dist/esm/parser/Lexer.js +35 -31
  59. package/dist/esm/parser/Lexer.js.map +1 -1
  60. package/dist/esm/parser/Token.d.ts +5 -2
  61. package/dist/esm/parser/Token.d.ts.map +1 -1
  62. package/dist/esm/parser/TokenStream.js +2 -2
  63. package/package.json +4 -4
  64. package/src/aspects/Constraint.ts +340 -215
  65. package/src/common/FieldValue.ts +10 -5
  66. package/src/common/Metatype.ts +200 -181
  67. package/src/common/Specification.ts +1 -1
  68. package/src/logic/ModelDiff.ts +150 -0
  69. package/src/logic/definition-validation/ValueValidator.ts +1 -1
  70. package/src/logic/index.ts +1 -0
  71. package/src/parser/Lexer.ts +38 -40
  72. package/src/parser/Token.ts +11 -1
  73. package/src/parser/TokenStream.ts +2 -2
@@ -12,6 +12,7 @@ export * from "./cluster-variance/VarianceCondition.js";
12
12
  export * from "./ClusterVariance.js";
13
13
  export * from "./DefaultValue.js";
14
14
  export * from "./MergedModel.js";
15
+ export * from "./ModelDiff.js";
15
16
  export * from "./ModelVariantTraversal.js";
16
17
  export * from "./Scope.js";
17
18
  export * from "./ValidateModel.js";
@@ -16,10 +16,10 @@ function isNameChar(c: string) {
16
16
  *
17
17
  * Tokenizes simple text dialects. Currently sufficient for Matter conformance and constraint tokenization.
18
18
  */
19
- export class Lexer<T extends BasicToken> {
19
+ export class Lexer<T extends BasicToken<KW>, const KW extends string[] = []> {
20
20
  #keywords: Set<string>;
21
21
 
22
- constructor(keywords: Iterable<string> = []) {
22
+ constructor(keywords?: KW) {
23
23
  if (keywords instanceof Set) {
24
24
  this.#keywords = keywords;
25
25
  } else {
@@ -117,33 +117,7 @@ function* lex(
117
117
  return;
118
118
  }
119
119
 
120
- function tokenizeNumber(sign: number) {
121
- markStart();
122
- if (sign === -1) {
123
- // Skip "-" prefix
124
- next();
125
- }
126
-
127
- if (current.value === "0") {
128
- if (peeked.value === "x") {
129
- next();
130
- next();
131
- return tokenizeDigits(16, sign, hexadecimalValueOf);
132
- } else if (peeked.value === "b") {
133
- next();
134
- next();
135
- return tokenizeDigits(2, sign, binaryValueOf);
136
- }
137
- }
138
-
139
- return tokenizeDigits(10, sign, decimalValueOf);
140
- }
141
-
142
- function tokenizeDigits(
143
- base: number,
144
- sign: number,
145
- valueOf: (digit: string[1] | undefined) => number | undefined,
146
- ): BasicToken {
120
+ function tokenizeDigits(base: number, valueOf: (digit: string[1] | undefined) => number | undefined): BasicToken {
147
121
  // The first digit may not actually be a digit if number is hexadecimal or binary
148
122
  let num = valueOf(current.value);
149
123
  if (num === undefined) {
@@ -161,7 +135,19 @@ function* lex(
161
135
  num = num * base + digitValue;
162
136
  }
163
137
 
164
- num *= sign;
138
+ if (base === 10 && peeked.value === ".") {
139
+ next();
140
+ let fraction = "";
141
+ while (true) {
142
+ const digitValue = valueOf(peeked.value);
143
+ if (digitValue === undefined) {
144
+ break;
145
+ }
146
+ fraction += peeked.value;
147
+ next();
148
+ }
149
+ num = Number.parseFloat(`${num}.${fraction}`);
150
+ }
165
151
 
166
152
  // Handle specialized suffices for percents and temperatures
167
153
  if (peeked.value === "%") {
@@ -169,7 +155,7 @@ function* lex(
169
155
  return { type: "value", value: FieldValue.Percent(num), startLine, startChar };
170
156
  } else if (peeked.value === "°") {
171
157
  next();
172
- if (peeked.value?.toLowerCase() === "C") {
158
+ if (peeked.value?.toLowerCase() === "c") {
173
159
  next();
174
160
  }
175
161
  return { type: "value", value: FieldValue.Celsius(num), startLine, startChar };
@@ -190,22 +176,34 @@ function* lex(
190
176
  case "]":
191
177
  case "(":
192
178
  case ")":
179
+ case "{":
180
+ case "}":
181
+ case "-":
193
182
  case "+":
194
183
  case "/":
195
184
  case "*":
196
185
  yield { type: current.value, startLine: line, startChar: char };
197
186
  break;
198
187
 
199
- case "-":
200
- if (peeked.value !== undefined && (peeked.value >= "0" || peeked.value <= "0")) {
201
- yield tokenizeNumber(-1);
202
- } else {
203
- yield { type: current.value, startLine: line, startChar: char };
188
+ case "0":
189
+ markStart();
190
+
191
+ if (current.value === "0") {
192
+ if (peeked.value === "x") {
193
+ next();
194
+ next();
195
+ yield tokenizeDigits(16, hexadecimalValueOf);
196
+ break;
197
+ }
198
+
199
+ if (peeked.value === "b") {
200
+ next();
201
+ next();
202
+ yield tokenizeDigits(2, binaryValueOf);
203
+ }
204
204
  }
205
- break;
206
205
 
207
- case "0":
208
- yield tokenizeNumber(1);
206
+ yield tokenizeDigits(10, decimalValueOf);
209
207
  break;
210
208
 
211
209
  case "1":
@@ -217,7 +215,7 @@ function* lex(
217
215
  case "7":
218
216
  case "8":
219
217
  case "9":
220
- yield tokenizeDigits(10, 1, decimalValueOf);
218
+ yield tokenizeDigits(10, decimalValueOf);
221
219
  break;
222
220
 
223
221
  case "!":
@@ -18,7 +18,11 @@ export interface Token {
18
18
  /**
19
19
  * The base token produced by the tokenizer.
20
20
  */
21
- export type BasicToken = BasicToken.Special | BasicToken.Word | BasicToken.Number;
21
+ export type BasicToken<KW extends string[] = []> =
22
+ | BasicToken.Special
23
+ | BasicToken.Word
24
+ | BasicToken.Number
25
+ | BasicToken.Keyword<KW>;
22
26
 
23
27
  /**
24
28
  * A {@link BasicToken} with additional keywords.
@@ -39,6 +43,8 @@ export namespace BasicToken {
39
43
  | "]"
40
44
  | "("
41
45
  | ")"
46
+ | "{"
47
+ | "}"
42
48
  | "-"
43
49
  | "+"
44
50
  | "*"
@@ -61,4 +67,8 @@ export namespace BasicToken {
61
67
  type: "value";
62
68
  value: FieldValue;
63
69
  }
70
+
71
+ export interface Keyword<T extends string[]> extends Token {
72
+ type: T[number];
73
+ }
64
74
  }
@@ -50,10 +50,10 @@ export function TokenStream<T extends Token>(iterator: Iterator<T>): TokenStream
50
50
  return "end of statement";
51
51
 
52
52
  case "word":
53
- return `word "${(this.token as unknown as BasicToken.Word).value}`;
53
+ return `word "${(this.token as unknown as BasicToken.Word).value}"`;
54
54
 
55
55
  case "number":
56
- return `number "${(this.token as unknown as BasicToken.Number).value}`;
56
+ return `number "${(this.token as unknown as BasicToken.Number).value}"`;
57
57
 
58
58
  default:
59
59
  if (this.token?.type.match(/[a-z]/i)) {