ezmedicationinput 0.1.43 → 0.1.44

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/README.md +4 -1
  2. package/dist/advice-rules.json +772 -0
  3. package/dist/advice-terminology.json +104 -0
  4. package/dist/advice.d.ts +16 -0
  5. package/dist/advice.js +1375 -0
  6. package/dist/event-trigger.d.ts +14 -0
  7. package/dist/event-trigger.js +501 -0
  8. package/dist/fhir-translations.d.ts +5 -0
  9. package/dist/fhir-translations.js +117 -0
  10. package/dist/fhir.d.ts +6 -4
  11. package/dist/fhir.js +566 -134
  12. package/dist/format.d.ts +4 -2
  13. package/dist/format.js +515 -218
  14. package/dist/i18n.d.ts +2 -2
  15. package/dist/i18n.js +641 -199
  16. package/dist/index.d.ts +0 -1
  17. package/dist/index.js +219 -168
  18. package/dist/internal-types.d.ts +5 -5
  19. package/dist/ir.d.ts +4 -0
  20. package/dist/ir.js +178 -0
  21. package/dist/lexer/lex.d.ts +2 -0
  22. package/dist/lexer/lex.js +401 -0
  23. package/dist/lexer/meaning.d.ts +71 -0
  24. package/dist/lexer/meaning.js +619 -0
  25. package/dist/lexer/surface.d.ts +2 -0
  26. package/dist/lexer/surface.js +62 -0
  27. package/dist/lexer/token-types.d.ts +36 -0
  28. package/dist/lexer/token-types.js +19 -0
  29. package/dist/maps.d.ts +6 -12
  30. package/dist/maps.js +793 -247
  31. package/dist/parser-state.d.ts +101 -0
  32. package/dist/parser-state.js +441 -0
  33. package/dist/parser.d.ts +7 -7
  34. package/dist/parser.js +3598 -1974
  35. package/dist/prn.d.ts +4 -0
  36. package/dist/prn.js +59 -0
  37. package/dist/schedule.js +230 -32
  38. package/dist/site-phrases.d.ts +35 -0
  39. package/dist/site-phrases.js +344 -0
  40. package/dist/timing-summary.d.ts +13 -3
  41. package/dist/timing-summary.js +7 -7
  42. package/dist/types.d.ts +237 -32
  43. package/dist/types.js +49 -1
  44. package/dist/utils/text.d.ts +3 -0
  45. package/dist/utils/text.js +48 -0
  46. package/package.json +1 -1
package/dist/ir.js ADDED
@@ -0,0 +1,178 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.buildCanonicalSigClauses = buildCanonicalSigClauses;
4
+ exports.shiftCanonicalSigClauses = shiftCanonicalSigClauses;
5
+ function computeTrimmedInputRange(input) {
6
+ var _a;
7
+ if (!input) {
8
+ return undefined;
9
+ }
10
+ const start = input.search(/\S/);
11
+ if (start === -1) {
12
+ return undefined;
13
+ }
14
+ let end = input.length;
15
+ while (end > start && /\s/.test((_a = input[end - 1]) !== null && _a !== void 0 ? _a : "")) {
16
+ end -= 1;
17
+ }
18
+ return { start, end };
19
+ }
20
+ function clampConfidence(value) {
21
+ if (value < 0) {
22
+ return 0;
23
+ }
24
+ if (value > 1) {
25
+ return 1;
26
+ }
27
+ return Number(value.toFixed(2));
28
+ }
29
+ function cloneCoding(coding) {
30
+ if (!(coding === null || coding === void 0 ? void 0 : coding.code) && !(coding === null || coding === void 0 ? void 0 : coding.display) && !(coding === null || coding === void 0 ? void 0 : coding.system)) {
31
+ return undefined;
32
+ }
33
+ return {
34
+ code: coding.code,
35
+ display: coding.display,
36
+ system: coding.system
37
+ };
38
+ }
39
+ function cloneAdditionalInstruction(instruction) {
40
+ return {
41
+ text: instruction.text,
42
+ coding: cloneCoding(instruction.coding),
43
+ frames: instruction.frames ? [...instruction.frames] : undefined
44
+ };
45
+ }
46
+ function buildSourceSpan(input, start, end, tokenIndices) {
47
+ return {
48
+ start,
49
+ end,
50
+ text: input.slice(start, end),
51
+ tokenIndices: (tokenIndices === null || tokenIndices === void 0 ? void 0 : tokenIndices.length) ? [...tokenIndices] : undefined
52
+ };
53
+ }
54
+ function buildRangeSourceSpan(input, range, tokenIndices) {
55
+ const safeRange = range !== null && range !== void 0 ? range : { start: 0, end: input.length };
56
+ return buildSourceSpan(input, safeRange.start, safeRange.end, tokenIndices);
57
+ }
58
+ function spanFromTokenIndices(input, tokens, tokenIndices) {
59
+ let start;
60
+ let end;
61
+ const resolvedIndices = [];
62
+ for (const tokenIndex of tokenIndices) {
63
+ const token = tokens[tokenIndex];
64
+ if (!token) {
65
+ continue;
66
+ }
67
+ resolvedIndices.push(tokenIndex);
68
+ start = start === undefined ? token.sourceStart : Math.min(start, token.sourceStart);
69
+ end = end === undefined ? token.sourceEnd : Math.max(end, token.sourceEnd);
70
+ }
71
+ if (start === undefined || end === undefined) {
72
+ return undefined;
73
+ }
74
+ return buildSourceSpan(input, start, end, resolvedIndices);
75
+ }
76
+ function buildEvidence(rule, span, note, score) {
77
+ if (!span) {
78
+ return [];
79
+ }
80
+ return [
81
+ {
82
+ rule,
83
+ spans: [
84
+ Object.assign(Object.assign({}, span), { tokenIndices: span.tokenIndices ? [...span.tokenIndices] : undefined })
85
+ ],
86
+ note,
87
+ score
88
+ }
89
+ ];
90
+ }
91
+ function collectLeftoverSpans(internal) {
92
+ const groups = [];
93
+ let current = [];
94
+ const flush = () => {
95
+ if (!current.length) {
96
+ return;
97
+ }
98
+ const span = spanFromTokenIndices(internal.input, internal.tokens, current);
99
+ if (span) {
100
+ groups.push(span);
101
+ }
102
+ current = [];
103
+ };
104
+ for (const token of internal.tokens) {
105
+ if (internal.consumed.has(token.index)) {
106
+ flush();
107
+ continue;
108
+ }
109
+ if (current.length > 0 && token.index !== current[current.length - 1] + 1) {
110
+ flush();
111
+ }
112
+ current.push(token.index);
113
+ }
114
+ flush();
115
+ return groups;
116
+ }
117
+ function findTokensByLower(internal, matcher) {
118
+ const matches = [];
119
+ for (const token of internal.tokens) {
120
+ if (!internal.consumed.has(token.index)) {
121
+ continue;
122
+ }
123
+ if (matcher(token.lower)) {
124
+ matches.push(token.index);
125
+ }
126
+ }
127
+ return matches;
128
+ }
129
+ function buildClauseConfidence(internal, leftovers) {
130
+ let confidence = 1;
131
+ confidence -= Math.min(0.4, leftovers.length * 0.12);
132
+ confidence -= Math.min(0.2, internal.warnings.length * 0.08);
133
+ if (!internal.routeCode && !internal.routeText && !internal.siteText && !internal.timingCode) {
134
+ confidence -= 0.05;
135
+ }
136
+ return clampConfidence(confidence);
137
+ }
138
+ function buildCanonicalSigClauses(internal) {
139
+ return internal.clauses;
140
+ }
141
+ function shiftCanonicalSigClauses(clauses, offset) {
142
+ var _a, _b, _c, _d, _e;
143
+ for (const clause of clauses) {
144
+ if (clause.span) {
145
+ clause.span = {
146
+ start: clause.span.start + offset,
147
+ end: clause.span.end + offset
148
+ };
149
+ }
150
+ clause.raw = Object.assign(Object.assign({}, clause.raw), { start: clause.raw.start + offset, end: clause.raw.end + offset });
151
+ for (const leftover of clause.leftovers) {
152
+ leftover.start += offset;
153
+ leftover.end += offset;
154
+ }
155
+ const shiftEvidenceSpans = (evidence) => {
156
+ if (!evidence) {
157
+ return;
158
+ }
159
+ for (const entry of evidence) {
160
+ for (const span of entry.spans) {
161
+ span.start += offset;
162
+ span.end += offset;
163
+ }
164
+ }
165
+ };
166
+ shiftEvidenceSpans(clause.evidence);
167
+ shiftEvidenceSpans((_a = clause.dose) === null || _a === void 0 ? void 0 : _a.evidence);
168
+ shiftEvidenceSpans((_b = clause.route) === null || _b === void 0 ? void 0 : _b.evidence);
169
+ shiftEvidenceSpans((_c = clause.site) === null || _c === void 0 ? void 0 : _c.evidence);
170
+ shiftEvidenceSpans((_d = clause.schedule) === null || _d === void 0 ? void 0 : _d.evidence);
171
+ shiftEvidenceSpans((_e = clause.prn) === null || _e === void 0 ? void 0 : _e.evidence);
172
+ if (clause.additionalInstructions) {
173
+ for (const instruction of clause.additionalInstructions) {
174
+ shiftEvidenceSpans(instruction.evidence);
175
+ }
176
+ }
177
+ }
178
+ }
@@ -0,0 +1,2 @@
1
+ import { LexToken } from "./token-types";
2
+ export declare function lexInput(input: string): LexToken[];
@@ -0,0 +1,401 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.lexInput = lexInput;
4
+ const maps_1 = require("../maps");
5
+ const token_types_1 = require("./token-types");
6
+ const surface_1 = require("./surface");
7
+ const PER_SLASH_UNITS = new Set([
8
+ "d",
9
+ "day",
10
+ "days",
11
+ "wk",
12
+ "w",
13
+ "week",
14
+ "weeks",
15
+ "mo",
16
+ "month",
17
+ "months",
18
+ "hr",
19
+ "hrs",
20
+ "hour",
21
+ "hours",
22
+ "h",
23
+ "min",
24
+ "mins",
25
+ "minute",
26
+ "minutes"
27
+ ]);
28
+ const COMPACT_DISCRETE_UNITS_PATTERN = /^(tab|tabs|tablet|tablets|cap|caps|capsule|capsules|mg|mcg|ml|g|drops|drop|puff|puffs|spray|sprays|patch|patches)$/i;
29
+ function classifyLexKind(value) {
30
+ const lower = value.toLowerCase();
31
+ if (/^[0-9]+(?:\.[0-9]+)?$/.test(lower)) {
32
+ return { kind: token_types_1.LexKind.Number, value: parseFloat(value) };
33
+ }
34
+ const rangeMatch = lower.match(/^([0-9]+(?:\.[0-9]+)?)-([0-9]+(?:\.[0-9]+)?)$/);
35
+ if (rangeMatch) {
36
+ return {
37
+ kind: token_types_1.LexKind.NumberRange,
38
+ low: parseFloat(rangeMatch[1]),
39
+ high: parseFloat(rangeMatch[2])
40
+ };
41
+ }
42
+ if (/^[0-9]+(?:st|nd|rd|th)$/i.test(lower)) {
43
+ return { kind: token_types_1.LexKind.Ordinal };
44
+ }
45
+ if (/^@?\d{1,2}([:.]\d{2})?\s*(am|pm)?$/i.test(lower) ||
46
+ /^\d{1,2}\s*(am|pm)$/i.test(lower)) {
47
+ return { kind: token_types_1.LexKind.TimeLike };
48
+ }
49
+ if (lower === "," || lower === ";") {
50
+ return { kind: token_types_1.LexKind.Separator };
51
+ }
52
+ if (lower === "@" || lower === "&" || lower === "+") {
53
+ return { kind: token_types_1.LexKind.Punctuation };
54
+ }
55
+ return { kind: token_types_1.LexKind.Word };
56
+ }
57
+ function isNumericText(value) {
58
+ return /^[0-9]+(?:\.[0-9]+)?$/.test(value);
59
+ }
60
+ function escapeRegExp(value) {
61
+ return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
62
+ }
63
+ function hasWhitespaceAround(surface, input) {
64
+ const before = surface.start > 0 ? input[surface.start - 1] : "";
65
+ const after = surface.end < input.length ? input[surface.end] : "";
66
+ return /\s/.test(before) && /\s/.test(after);
67
+ }
68
+ function isSurfaceText(surface) {
69
+ return Boolean(surface && surface.kind === token_types_1.SurfaceTokenKind.Text);
70
+ }
71
+ function buildToken(text, surfaces, input, start, end, derived) {
72
+ const first = surfaces[0];
73
+ const last = surfaces[surfaces.length - 1];
74
+ const sourceStart = start !== null && start !== void 0 ? start : first.start;
75
+ const sourceEnd = end !== null && end !== void 0 ? end : last.end;
76
+ const classified = classifyLexKind(text);
77
+ return {
78
+ original: text,
79
+ lower: text.toLowerCase(),
80
+ index: -1,
81
+ kind: classified.kind,
82
+ value: classified.value,
83
+ low: classified.low,
84
+ high: classified.high,
85
+ sourceStart,
86
+ sourceEnd,
87
+ surfaceIndices: surfaces.map((surface) => surface.index),
88
+ sourceText: input.slice(sourceStart, sourceEnd),
89
+ derived
90
+ };
91
+ }
92
+ function pushIfPresent(values, value) {
93
+ if (value !== undefined) {
94
+ values.push(value);
95
+ }
96
+ }
97
+ function buildFractionPartToken(part, surface, input, start, end) {
98
+ if (!part || !surface || start === undefined || end === undefined || end <= start) {
99
+ return undefined;
100
+ }
101
+ return buildToken(part, [surface], input, start, end, true);
102
+ }
103
+ function buildFractionTokens(left, slash, right, input) {
104
+ const leftMatch = left.lower.match(/^([a-z]*)([0-9]+(?:\.[0-9]+)?)$/i);
105
+ const rightMatch = right.lower.match(/^([0-9]+(?:\.[0-9]+)?)([a-z]*)$/i);
106
+ if (!leftMatch || !rightMatch) {
107
+ return undefined;
108
+ }
109
+ const numerator = parseFloat(leftMatch[2]);
110
+ const denominator = parseFloat(rightMatch[1]);
111
+ if (!Number.isFinite(numerator) || !Number.isFinite(denominator) || denominator === 0) {
112
+ return undefined;
113
+ }
114
+ const tokens = [];
115
+ const prefix = leftMatch[1];
116
+ const suffix = rightMatch[2];
117
+ const valueText = (numerator / denominator).toString();
118
+ pushIfPresent(tokens, buildFractionPartToken(prefix, left, input, left.start, left.start + prefix.length));
119
+ tokens.push(buildToken(valueText, [left, slash, right], input, undefined, undefined, true));
120
+ pushIfPresent(tokens, buildFractionPartToken(suffix, right, input, right.end - suffix.length, right.end));
121
+ return tokens;
122
+ }
123
+ function pushSplitParts(output, surface, parts, input) {
124
+ let searchOffset = 0;
125
+ const sourceLower = surface.lower;
126
+ for (const part of parts) {
127
+ const lowerPart = part.toLowerCase();
128
+ const pattern = new RegExp(escapeRegExp(lowerPart), "i");
129
+ const relative = sourceLower.slice(searchOffset).search(pattern);
130
+ if (relative === -1) {
131
+ output.push(buildToken(part, [surface], input, undefined, undefined, true));
132
+ continue;
133
+ }
134
+ const partStart = surface.start + searchOffset + relative;
135
+ const partEnd = partStart + part.length;
136
+ searchOffset = partEnd - surface.start;
137
+ output.push(buildToken(part, [surface], input, partStart, partEnd, part !== surface.original ? true : undefined));
138
+ }
139
+ }
140
+ function splitCompactToken(token) {
141
+ if (/^[0-9]+(?:\.[0-9]+)?$/.test(token)) {
142
+ return [token];
143
+ }
144
+ const compactTimesWord = token.match(/^([0-9]+(?:\.[0-9]+)?[x*])([A-Za-z]+)$/i);
145
+ if (compactTimesWord) {
146
+ return [compactTimesWord[1], compactTimesWord[2]];
147
+ }
148
+ const compactPoMeal = token.match(/^(po)(ac|pc|c)$/i);
149
+ if (compactPoMeal) {
150
+ return [compactPoMeal[1], compactPoMeal[2]];
151
+ }
152
+ if (/^[A-Za-z]+$/.test(token)) {
153
+ return [token];
154
+ }
155
+ const qRange = token.match(/^q([0-9]+(?:\.[0-9]+)?)-([0-9]+(?:\.[0-9]+)?)([A-Za-z]+)$/i);
156
+ if (qRange) {
157
+ return [token.charAt(0), `${qRange[1]}-${qRange[2]}`, qRange[3]];
158
+ }
159
+ const match = token.match(/^([0-9]+(?:\.[0-9]+)?)([A-Za-z]+)$/);
160
+ if (match) {
161
+ const [, numberPart, suffix] = match;
162
+ if (/^(st|nd|rd|th)$/i.test(suffix)) {
163
+ return [token];
164
+ }
165
+ const compactPoMealUnit = suffix.match(/^(po)(ac|pc|c)$/i);
166
+ if (compactPoMealUnit) {
167
+ return [numberPart, compactPoMealUnit[1], compactPoMealUnit[2]];
168
+ }
169
+ if (!/^x\d+/i.test(suffix) && !/^q\d+/i.test(suffix)) {
170
+ return [numberPart, suffix];
171
+ }
172
+ }
173
+ return [token];
174
+ }
175
+ function tryDayRangeToken(surfaces, index, input) {
176
+ const start = surfaces[index];
177
+ const hyphen = surfaces[index + 1];
178
+ const end = surfaces[index + 2];
179
+ if (!isSurfaceText(start) ||
180
+ !hyphen ||
181
+ hyphen.original !== "-" ||
182
+ !isSurfaceText(end) ||
183
+ !maps_1.DAY_OF_WEEK_TOKENS[start.lower] ||
184
+ !maps_1.DAY_OF_WEEK_TOKENS[end.lower]) {
185
+ return undefined;
186
+ }
187
+ return {
188
+ token: buildToken(`${start.original}-${end.original}`, [start, hyphen, end], input, undefined, undefined, true),
189
+ nextIndex: index + 3
190
+ };
191
+ }
192
+ function tryQRangeToken(surfaces, index, input) {
193
+ const start = surfaces[index];
194
+ const hyphen = surfaces[index + 1];
195
+ const end = surfaces[index + 2];
196
+ if (!isSurfaceText(start) || !hyphen || hyphen.original !== "-" || !isSurfaceText(end)) {
197
+ return undefined;
198
+ }
199
+ const startMatch = start.lower.match(/^q([0-9]+(?:\.[0-9]+)?)$/);
200
+ const endMatch = end.lower.match(/^([0-9]+(?:\.[0-9]+)?)([a-z]+)$/);
201
+ if (!startMatch || !endMatch) {
202
+ return undefined;
203
+ }
204
+ return {
205
+ token: buildToken(`q${startMatch[1]}-${endMatch[1]}${endMatch[2]}`, [start, hyphen, end], input, undefined, undefined, true),
206
+ nextIndex: index + 3
207
+ };
208
+ }
209
+ function tryNumericRangeToken(surfaces, index, input) {
210
+ const start = surfaces[index];
211
+ const hyphen = surfaces[index + 1];
212
+ const end = surfaces[index + 2];
213
+ if (!isSurfaceText(start) ||
214
+ !hyphen ||
215
+ hyphen.original !== "-" ||
216
+ !isSurfaceText(end) ||
217
+ !isNumericText(start.lower) ||
218
+ !isNumericText(end.lower)) {
219
+ return undefined;
220
+ }
221
+ return {
222
+ token: buildToken(`${start.original}-${end.original}`, [start, hyphen, end], input, undefined, undefined, true),
223
+ nextIndex: index + 3
224
+ };
225
+ }
226
+ function trySlashUnitExpansion(surfaces, index, input) {
227
+ const valueToken = surfaces[index];
228
+ const slash = surfaces[index + 1];
229
+ const unitToken = surfaces[index + 2];
230
+ if (!isSurfaceText(valueToken) ||
231
+ !slash ||
232
+ slash.original !== "/" ||
233
+ !isSurfaceText(unitToken) ||
234
+ !isNumericText(valueToken.lower) ||
235
+ !PER_SLASH_UNITS.has(unitToken.lower)) {
236
+ return undefined;
237
+ }
238
+ return {
239
+ tokens: [
240
+ buildToken(valueToken.original, [valueToken], input),
241
+ buildToken("per", [slash], input, slash.start, slash.end, true),
242
+ buildToken(unitToken.original, [unitToken], input)
243
+ ],
244
+ nextIndex: index + 3
245
+ };
246
+ }
247
+ function tryFractionToken(surfaces, index, input) {
248
+ const left = surfaces[index];
249
+ const slash = surfaces[index + 1];
250
+ const right = surfaces[index + 2];
251
+ if (!isSurfaceText(left) || !slash || slash.original !== "/" || !isSurfaceText(right)) {
252
+ return undefined;
253
+ }
254
+ const tokens = buildFractionTokens(left, slash, right, input);
255
+ if (!tokens) {
256
+ return undefined;
257
+ }
258
+ return {
259
+ tokens,
260
+ nextIndex: index + 3
261
+ };
262
+ }
263
+ function tryTimeLikeToken(surfaces, index, input) {
264
+ const hour = surfaces[index];
265
+ const separator = surfaces[index + 1];
266
+ const minute = surfaces[index + 2];
267
+ if (!isSurfaceText(hour) ||
268
+ !separator ||
269
+ (separator.original !== ":" && separator.original !== ".") ||
270
+ !isSurfaceText(minute) ||
271
+ separator.start !== hour.end ||
272
+ separator.end !== minute.start ||
273
+ !/^\d{1,2}$/.test(hour.lower) ||
274
+ !/^\d{2}$/.test(minute.lower)) {
275
+ return undefined;
276
+ }
277
+ return {
278
+ token: buildToken(`${hour.original}${separator.original}${minute.original}`, [hour, separator, minute], input, undefined, undefined, true),
279
+ nextIndex: index + 3
280
+ };
281
+ }
282
+ function pushTextToken(output, surface, input) {
283
+ const slashUnitMatch = surface.original.match(/^([0-9]+(?:\.[0-9]+)?)\/(d|day|days|wk|w|week|weeks|mo|month|months|hr|hrs|hour|hours|h|min|mins|minute|minutes)$/i);
284
+ if (slashUnitMatch) {
285
+ const slashIndex = surface.original.indexOf("/");
286
+ const valuePart = slashUnitMatch[1];
287
+ const unitPart = slashUnitMatch[2];
288
+ output.push(buildToken(valuePart, [surface], input, surface.start, surface.start + valuePart.length, true));
289
+ output.push(buildToken("per", [surface], input, surface.start + slashIndex, surface.start + slashIndex + 1, true));
290
+ output.push(buildToken(unitPart, [surface], input, surface.end - unitPart.length, surface.end, true));
291
+ return;
292
+ }
293
+ const fractionMatch = surface.lower.match(/^([a-z]*)([0-9]+(?:\.[0-9]+)?)\/([0-9]+(?:\.[0-9]+)?)([a-z]*)$/i);
294
+ if (fractionMatch) {
295
+ const slashIndex = surface.original.indexOf("/");
296
+ const left = {
297
+ original: surface.original.slice(0, slashIndex),
298
+ lower: surface.lower.slice(0, slashIndex),
299
+ index: surface.index,
300
+ kind: surface.kind,
301
+ start: surface.start,
302
+ end: surface.start + slashIndex
303
+ };
304
+ const slash = {
305
+ original: "/",
306
+ lower: "/",
307
+ index: surface.index,
308
+ kind: token_types_1.SurfaceTokenKind.Punctuation,
309
+ start: surface.start + slashIndex,
310
+ end: surface.start + slashIndex + 1
311
+ };
312
+ const right = {
313
+ original: surface.original.slice(slashIndex + 1),
314
+ lower: surface.lower.slice(slashIndex + 1),
315
+ index: surface.index,
316
+ kind: surface.kind,
317
+ start: surface.start + slashIndex + 1,
318
+ end: surface.end
319
+ };
320
+ const tokens = buildFractionTokens(left, slash, right, input);
321
+ if (tokens) {
322
+ output.push(...tokens);
323
+ return;
324
+ }
325
+ }
326
+ const compactDiscrete = surface.original.match(/^([0-9]+(?:\.[0-9]+)?)([A-Za-z]+)$/);
327
+ if (compactDiscrete && COMPACT_DISCRETE_UNITS_PATTERN.test(compactDiscrete[2])) {
328
+ pushSplitParts(output, surface, [compactDiscrete[1], compactDiscrete[2]], input);
329
+ return;
330
+ }
331
+ pushSplitParts(output, surface, splitCompactToken(surface.original), input);
332
+ }
333
+ function lexInput(input) {
334
+ const surfaces = (0, surface_1.scanSurfaceTokens)(input);
335
+ const output = [];
336
+ let index = 0;
337
+ while (index < surfaces.length) {
338
+ const current = surfaces[index];
339
+ const dayRange = tryDayRangeToken(surfaces, index, input);
340
+ if (dayRange) {
341
+ output.push(dayRange.token);
342
+ index = dayRange.nextIndex;
343
+ continue;
344
+ }
345
+ const qRange = tryQRangeToken(surfaces, index, input);
346
+ if (qRange) {
347
+ output.push(qRange.token);
348
+ index = qRange.nextIndex;
349
+ continue;
350
+ }
351
+ const numericRange = tryNumericRangeToken(surfaces, index, input);
352
+ if (numericRange) {
353
+ output.push(numericRange.token);
354
+ index = numericRange.nextIndex;
355
+ continue;
356
+ }
357
+ const slashUnit = trySlashUnitExpansion(surfaces, index, input);
358
+ if (slashUnit) {
359
+ output.push(...slashUnit.tokens);
360
+ index = slashUnit.nextIndex;
361
+ continue;
362
+ }
363
+ const fraction = tryFractionToken(surfaces, index, input);
364
+ if (fraction) {
365
+ output.push(...fraction.tokens);
366
+ index = fraction.nextIndex;
367
+ continue;
368
+ }
369
+ const timeLike = tryTimeLikeToken(surfaces, index, input);
370
+ if (timeLike) {
371
+ output.push(timeLike.token);
372
+ index = timeLike.nextIndex;
373
+ continue;
374
+ }
375
+ if (current.kind === token_types_1.SurfaceTokenKind.Separator) {
376
+ if (current.original === "," || current.original === ";") {
377
+ output.push(buildToken(current.original, [current], input));
378
+ }
379
+ index += 1;
380
+ continue;
381
+ }
382
+ if (current.kind === token_types_1.SurfaceTokenKind.Punctuation) {
383
+ if (current.original === "-" && hasWhitespaceAround(current, input)) {
384
+ output.push(buildToken(";", [current], input, current.start, current.end, true));
385
+ }
386
+ else if (current.original === "@" ||
387
+ current.original === "&" ||
388
+ current.original === "+") {
389
+ output.push(buildToken(current.original, [current], input));
390
+ }
391
+ index += 1;
392
+ continue;
393
+ }
394
+ pushTextToken(output, current, input);
395
+ index += 1;
396
+ }
397
+ for (let tokenIndex = 0; tokenIndex < output.length; tokenIndex += 1) {
398
+ output[tokenIndex].index = tokenIndex;
399
+ }
400
+ return output;
401
+ }
@@ -0,0 +1,71 @@
1
+ import { FrequencyDescriptor, RouteSynonym } from "../maps";
2
+ import { EventTiming, FhirDayOfWeek, RouteCode } from "../types";
3
+ import { LexToken } from "./token-types";
4
+ export declare enum ConnectorRole {
5
+ General = "GENERAL",
6
+ SiteAnchor = "SITE_ANCHOR",
7
+ SiteList = "SITE_LIST",
8
+ MealContext = "MEAL_CONTEXT",
9
+ DayRange = "DAY_RANGE"
10
+ }
11
+ export declare enum TokenWordClass {
12
+ AdministrationVerb = "ADMINISTRATION_VERB",
13
+ SiteSurfaceModifier = "SITE_SURFACE_MODIFIER",
14
+ WorkflowInstruction = "WORKFLOW_INSTRUCTION",
15
+ ApplicationVerb = "APPLICATION_VERB",
16
+ CountKeyword = "COUNT_KEYWORD"
17
+ }
18
+ export interface SiteMeaningCandidate {
19
+ text: string;
20
+ route?: RouteCode;
21
+ source: string;
22
+ }
23
+ export interface RouteMeaningCandidate extends RouteSynonym {
24
+ source: string;
25
+ }
26
+ export interface TokenAnnotations {
27
+ eventTiming?: EventTiming;
28
+ timingAbbreviation?: FrequencyDescriptor;
29
+ dayOfWeek?: FhirDayOfWeek[];
30
+ routeCandidates?: RouteMeaningCandidate[];
31
+ siteCandidates?: SiteMeaningCandidate[];
32
+ prn?: true;
33
+ connectorRoles?: ConnectorRole[];
34
+ wordClasses?: TokenWordClass[];
35
+ }
36
+ export interface AnnotatedLexToken extends LexToken {
37
+ annotations?: TokenAnnotations;
38
+ }
39
+ interface Lowerable {
40
+ lower: string;
41
+ annotations?: TokenAnnotations;
42
+ }
43
+ export declare function expandDayMeaningRange(start: FhirDayOfWeek, end: FhirDayOfWeek): FhirDayOfWeek[];
44
+ export declare function resolveDayMeaning(tokenLower: string): FhirDayOfWeek[] | undefined;
45
+ export declare function annotateLexToken(token: LexToken): AnnotatedLexToken;
46
+ export declare function annotateLexTokens(tokens: LexToken[]): AnnotatedLexToken[];
47
+ export declare function hasConnectorRole(token: AnnotatedLexToken | undefined, role: ConnectorRole): boolean;
48
+ export declare function hasTokenWordClass(token: AnnotatedLexToken | undefined, wordClass: TokenWordClass): boolean;
49
+ export declare function hasDayOfWeekMeaning(token: Lowerable | undefined): boolean;
50
+ export declare function getDayOfWeekMeaning(token: Lowerable | undefined): FhirDayOfWeek[] | undefined;
51
+ export declare function hasEventTimingMeaning(token: Lowerable | undefined): boolean;
52
+ export declare function getEventTimingMeaning(token: Lowerable | undefined): EventTiming | undefined;
53
+ export declare function hasTimingAbbreviationMeaning(token: Lowerable | undefined): boolean;
54
+ export declare function getTimingAbbreviationMeaning(token: Lowerable | undefined): FrequencyDescriptor | undefined;
55
+ export declare function hasRouteMeaning(token: Lowerable | undefined): boolean;
56
+ export declare function getRouteMeaning(token: Lowerable | undefined): RouteMeaningCandidate | undefined;
57
+ export declare function hasSiteMeaningCandidate(token: Lowerable | undefined): boolean;
58
+ export declare function getSiteMeaningCandidates(token: Lowerable | undefined): SiteMeaningCandidate[] | undefined;
59
+ export declare function getPrimarySiteMeaningCandidate(token: Lowerable | undefined): SiteMeaningCandidate | undefined;
60
+ export declare function hasPrnMeaning(token: Lowerable | undefined): boolean;
61
+ export declare function hasConnectorMeaning(token: Lowerable | undefined): boolean;
62
+ export declare function isSiteAnchorWord(word: string): boolean;
63
+ export declare function isSiteListConnectorWord(word: string): boolean;
64
+ export declare function isSiteSurfaceModifierWord(word: string): boolean;
65
+ export declare function isWorkflowInstructionWord(word: string): boolean;
66
+ export declare function isApplicationVerbWord(word: string): boolean;
67
+ export declare function isAdministrationVerbWord(word: string): boolean;
68
+ export declare function isCountKeywordWord(word: string): boolean;
69
+ export declare function isMealContextConnectorWord(word: string): boolean;
70
+ export declare function isDayRangeConnectorWord(word: string): boolean;
71
+ export {};