@willwade/aac-processors 0.0.11 → 0.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/README.md +44 -41
  2. package/dist/cli/index.js +7 -0
  3. package/dist/core/analyze.js +1 -0
  4. package/dist/core/treeStructure.d.ts +45 -2
  5. package/dist/core/treeStructure.js +22 -3
  6. package/dist/index.d.ts +2 -1
  7. package/dist/index.js +20 -3
  8. package/dist/{analytics → optional/analytics}/history.d.ts +15 -4
  9. package/dist/{analytics → optional/analytics}/history.js +3 -3
  10. package/dist/optional/analytics/index.d.ts +30 -0
  11. package/dist/optional/analytics/index.js +78 -0
  12. package/dist/optional/analytics/metrics/comparison.d.ts +36 -0
  13. package/dist/optional/analytics/metrics/comparison.js +334 -0
  14. package/dist/optional/analytics/metrics/core.d.ts +45 -0
  15. package/dist/optional/analytics/metrics/core.js +575 -0
  16. package/dist/optional/analytics/metrics/effort.d.ts +147 -0
  17. package/dist/optional/analytics/metrics/effort.js +211 -0
  18. package/dist/optional/analytics/metrics/index.d.ts +15 -0
  19. package/dist/optional/analytics/metrics/index.js +36 -0
  20. package/dist/optional/analytics/metrics/obl-types.d.ts +93 -0
  21. package/dist/optional/analytics/metrics/obl-types.js +7 -0
  22. package/dist/optional/analytics/metrics/obl.d.ts +40 -0
  23. package/dist/optional/analytics/metrics/obl.js +287 -0
  24. package/dist/optional/analytics/metrics/sentence.d.ts +49 -0
  25. package/dist/optional/analytics/metrics/sentence.js +112 -0
  26. package/dist/optional/analytics/metrics/types.d.ts +157 -0
  27. package/dist/optional/analytics/metrics/types.js +7 -0
  28. package/dist/optional/analytics/metrics/vocabulary.d.ts +65 -0
  29. package/dist/optional/analytics/metrics/vocabulary.js +142 -0
  30. package/dist/optional/analytics/reference/index.d.ts +51 -0
  31. package/dist/optional/analytics/reference/index.js +102 -0
  32. package/dist/optional/analytics/utils/idGenerator.d.ts +59 -0
  33. package/dist/optional/analytics/utils/idGenerator.js +96 -0
  34. package/dist/optional/symbolTools.js +13 -16
  35. package/dist/processors/astericsGridProcessor.d.ts +15 -0
  36. package/dist/processors/astericsGridProcessor.js +17 -0
  37. package/dist/processors/gridset/helpers.d.ts +4 -1
  38. package/dist/processors/gridset/helpers.js +4 -0
  39. package/dist/processors/gridset/pluginTypes.js +51 -50
  40. package/dist/processors/gridset/symbolExtractor.js +3 -2
  41. package/dist/processors/gridset/symbolSearch.js +9 -7
  42. package/dist/processors/gridsetProcessor.js +82 -20
  43. package/dist/processors/index.d.ts +1 -0
  44. package/dist/processors/index.js +5 -3
  45. package/dist/processors/obfProcessor.js +37 -2
  46. package/dist/processors/obfsetProcessor.d.ts +26 -0
  47. package/dist/processors/obfsetProcessor.js +179 -0
  48. package/dist/processors/snap/helpers.d.ts +5 -1
  49. package/dist/processors/snap/helpers.js +5 -0
  50. package/dist/processors/snapProcessor.d.ts +2 -0
  51. package/dist/processors/snapProcessor.js +184 -5
  52. package/dist/processors/touchchatProcessor.js +50 -4
  53. package/dist/types/aac.d.ts +67 -0
  54. package/dist/types/aac.js +33 -0
  55. package/dist/validation/gridsetValidator.js +10 -0
  56. package/package.json +1 -1
@@ -0,0 +1,287 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.OblAnonymizer = exports.OblUtil = void 0;
4
+ const treeStructure_1 = require("../../../core/treeStructure");
5
+ /**
6
+ * .obl (Open Board Logging) Utility
7
+ *
8
+ * Provides parsing and generation support for the .obl format.
9
+ */
10
+ class OblUtil {
11
+ /**
12
+ * Parse an OBL JSON string.
13
+ * Handles the optional /* notice * / at the start of the file.
14
+ */
15
+ static parse(json) {
16
+ // Remove potential comment at the start
17
+ let cleanJson = json.trim();
18
+ if (cleanJson.startsWith('/*')) {
19
+ const endComment = cleanJson.indexOf('*/');
20
+ if (endComment !== -1) {
21
+ cleanJson = cleanJson.substring(endComment + 2).trim();
22
+ }
23
+ }
24
+ return JSON.parse(cleanJson);
25
+ }
26
+ /**
27
+ * Stringify an OBL file object.
28
+ * Optionally adds the recommended notice comment.
29
+ */
30
+ static stringify(obl, includeNotice = true) {
31
+ const json = JSON.stringify(obl, null, 2);
32
+ if (includeNotice) {
33
+ return `/* NOTICE: The following information represents an individual's communication and should be treated respectfully and securely. */\n${json}`;
34
+ }
35
+ return json;
36
+ }
37
+ /**
38
+ * Convert an OBL file to internal HistoryEntry format.
39
+ */
40
+ static toHistoryEntries(obl) {
41
+ const entries = [];
42
+ const source = obl.source || 'OBL';
43
+ // OBL is session-based and event-based.
44
+ // HistoryEntry is content-based with occurrences.
45
+ // We'll group events by content (label/text) to match HistoryEntry structure.
46
+ const contentMap = new Map();
47
+ for (const session of obl.sessions) {
48
+ for (const event of session.events) {
49
+ let content = '';
50
+ const evtAny = event;
51
+ const occurrence = {
52
+ timestamp: new Date(event.timestamp),
53
+ modeling: event.modeling,
54
+ pageId: evtAny.board_id || null,
55
+ latitude: event.geo?.[0] || null,
56
+ longitude: event.geo?.[1] || null,
57
+ type: event.type,
58
+ // Store all other OBL fields in the occurrence
59
+ buttonId: evtAny.button_id || null,
60
+ boardId: evtAny.board_id || null,
61
+ spoken: evtAny.spoken,
62
+ vocalization: evtAny.vocalization,
63
+ imageUrl: evtAny.image_url,
64
+ actions: evtAny.actions,
65
+ };
66
+ if (event.type === 'button') {
67
+ const btn = event;
68
+ content = btn.vocalization || btn.label;
69
+ }
70
+ else if (event.type === 'utterance') {
71
+ const utt = event;
72
+ content = utt.text;
73
+ }
74
+ else if (event.type === 'action') {
75
+ const act = event;
76
+ content = act.action;
77
+ }
78
+ else if (event.type === 'note') {
79
+ const note = event;
80
+ content = note.text;
81
+ }
82
+ else {
83
+ const evtAny = event;
84
+ content = evtAny.label || evtAny.text || evtAny.action || 'unknown';
85
+ }
86
+ const occurrences = contentMap.get(content) || [];
87
+ occurrences.push(occurrence);
88
+ contentMap.set(content, occurrences);
89
+ }
90
+ }
91
+ contentMap.forEach((occurrences, content) => {
92
+ entries.push({
93
+ id: `obl:${content}`,
94
+ source: source,
95
+ content: content,
96
+ occurrences: occurrences.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime()),
97
+ });
98
+ });
99
+ return entries;
100
+ }
101
+ /**
102
+ * Convert HistoryEntries to an OBL file object.
103
+ */
104
+ static fromHistoryEntries(entries, userId, source) {
105
+ const events = [];
106
+ for (const entry of entries) {
107
+ for (const occ of entry.occurrences) {
108
+ const timestamp = occ.timestamp.toISOString();
109
+ const intent = occ.intent;
110
+ let oblType = occ.type || 'button';
111
+ let actionStr = undefined;
112
+ // Smart mapping based on AACSemanticIntent
113
+ if (intent === treeStructure_1.AACSemanticIntent.CLEAR_TEXT) {
114
+ oblType = 'action';
115
+ actionStr = ':clear';
116
+ }
117
+ else if (intent === treeStructure_1.AACSemanticIntent.GO_HOME) {
118
+ oblType = 'action';
119
+ actionStr = ':home';
120
+ }
121
+ else if (intent === treeStructure_1.AACSemanticIntent.NAVIGATE_TO) {
122
+ oblType = 'action';
123
+ actionStr = ':open_board';
124
+ }
125
+ else if (intent === treeStructure_1.AACSemanticIntent.GO_BACK) {
126
+ oblType = 'action';
127
+ actionStr = ':back';
128
+ }
129
+ else if (intent === treeStructure_1.AACSemanticIntent.DELETE_CHARACTER) {
130
+ oblType = 'action';
131
+ actionStr = ':backspace';
132
+ }
133
+ else if (intent === treeStructure_1.AACSemanticIntent.SPEAK_IMMEDIATE ||
134
+ intent === treeStructure_1.AACSemanticIntent.SPEAK_TEXT) {
135
+ // Speak could be a button or an utterance or an action
136
+ if (oblType !== 'utterance' && oblType !== 'button') {
137
+ oblType = 'action';
138
+ actionStr = ':speak';
139
+ }
140
+ }
141
+ const common = {
142
+ id: Math.random().toString(36).substring(2, 11),
143
+ timestamp,
144
+ modeling: occ.modeling,
145
+ type: oblType,
146
+ };
147
+ if (occ.latitude !== null &&
148
+ occ.latitude !== undefined &&
149
+ occ.longitude !== null &&
150
+ occ.longitude !== undefined) {
151
+ common.geo = [occ.latitude, occ.longitude];
152
+ }
153
+ if (oblType === 'utterance') {
154
+ events.push({
155
+ ...common,
156
+ text: entry.content,
157
+ });
158
+ }
159
+ else if (oblType === 'action') {
160
+ events.push({
161
+ ...common,
162
+ action: actionStr || entry.content,
163
+ destination_board_id: occ.boardId || undefined,
164
+ text: intent === treeStructure_1.AACSemanticIntent.SPEAK_TEXT ? entry.content : undefined,
165
+ });
166
+ }
167
+ else if (oblType === 'note') {
168
+ events.push({
169
+ ...common,
170
+ text: entry.content,
171
+ });
172
+ }
173
+ else {
174
+ // Default to button
175
+ events.push({
176
+ ...common,
177
+ type: 'button',
178
+ label: occ.vocalization ? entry.content : entry.content,
179
+ spoken: occ.spoken ??
180
+ occ.category === treeStructure_1.AACSemanticCategory.COMMUNICATION,
181
+ button_id: occ.buttonId || undefined,
182
+ board_id: occ.boardId || occ.pageId || undefined,
183
+ vocalization: occ.vocalization || undefined,
184
+ image_url: occ.imageUrl || undefined,
185
+ actions: occ.actions || undefined,
186
+ });
187
+ }
188
+ }
189
+ }
190
+ // Sort events by timestamp
191
+ events.sort((a, b) => a.timestamp.localeCompare(b.timestamp));
192
+ const started = events.length > 0 ? events[0].timestamp : new Date().toISOString();
193
+ const ended = events.length > 0 ? events[events.length - 1].timestamp : new Date().toISOString();
194
+ const session = {
195
+ id: 'session-1',
196
+ type: 'log',
197
+ started,
198
+ ended,
199
+ events,
200
+ };
201
+ return {
202
+ format: 'open-board-log-0.1',
203
+ user_id: userId,
204
+ source: source || 'aac-processors',
205
+ sessions: [session],
206
+ };
207
+ }
208
+ }
209
+ exports.OblUtil = OblUtil;
210
+ /**
211
+ * .obl Anonymization Utility
212
+ */
213
+ class OblAnonymizer {
214
+ /**
215
+ * Apply anonymization to an OBL file.
216
+ */
217
+ static anonymize(obl, types) {
218
+ const newObl = JSON.parse(JSON.stringify(obl));
219
+ newObl.anonymized = true;
220
+ for (const session of newObl.sessions) {
221
+ session.anonymizations = session.anonymizations || [];
222
+ if (types.includes('timestamp_shift')) {
223
+ this.applyTimestampShift(session);
224
+ if (!session.anonymizations.includes('timestamp_shift'))
225
+ session.anonymizations.push('timestamp_shift');
226
+ }
227
+ if (types.includes('geolocation_masking')) {
228
+ this.applyGeolocationMasking(session);
229
+ if (!session.anonymizations.includes('geolocation_masking'))
230
+ session.anonymizations.push('geolocation_masking');
231
+ }
232
+ if (types.includes('url_stripping')) {
233
+ this.applyUrlStripping(session);
234
+ if (!session.anonymizations.includes('url_stripping'))
235
+ session.anonymizations.push('url_stripping');
236
+ }
237
+ if (types.includes('name_masking')) {
238
+ this.applyNameMasking(newObl, session);
239
+ if (!session.anonymizations.includes('name_masking'))
240
+ session.anonymizations.push('name_masking');
241
+ }
242
+ }
243
+ return newObl;
244
+ }
245
+ static applyTimestampShift(session) {
246
+ if (session.events.length === 0)
247
+ return;
248
+ const firstEventTime = session.events.length > 0 ? new Date(session.events[0].timestamp).getTime() : Infinity;
249
+ const sessionStartTime = session.started ? new Date(session.started).getTime() : Infinity;
250
+ const firstTimestamp = Math.min(firstEventTime, sessionStartTime);
251
+ if (firstTimestamp === Infinity)
252
+ return;
253
+ const targetStart = new Date('2000-01-01T00:00:00.000Z').getTime();
254
+ const offset = targetStart - firstTimestamp;
255
+ session.started = new Date(new Date(session.started).getTime() + offset).toISOString();
256
+ session.ended = new Date(new Date(session.ended).getTime() + offset).toISOString();
257
+ for (const event of session.events) {
258
+ event.timestamp = new Date(new Date(event.timestamp).getTime() + offset).toISOString();
259
+ }
260
+ }
261
+ static applyGeolocationMasking(session) {
262
+ for (const event of session.events) {
263
+ delete event.geo;
264
+ delete event.location_id;
265
+ }
266
+ }
267
+ static applyUrlStripping(session) {
268
+ for (const event of session.events) {
269
+ if (event.type === 'button') {
270
+ delete event.image_url;
271
+ }
272
+ if (event.type === 'note') {
273
+ delete event.author_url;
274
+ delete event.author_email;
275
+ }
276
+ }
277
+ }
278
+ static applyNameMasking(obl, session) {
279
+ delete obl.user_name;
280
+ for (const event of session.events) {
281
+ if (event.type === 'note') {
282
+ delete event.author_name;
283
+ }
284
+ }
285
+ }
286
+ }
287
+ exports.OblAnonymizer = OblAnonymizer;
@@ -0,0 +1,49 @@
1
+ /**
2
+ * Sentence Construction Analysis
3
+ *
4
+ * Calculates the effort required to construct test sentences
5
+ * from the AAC board set, including spelling fallback for missing words.
6
+ */
7
+ import { MetricsResult } from './types';
8
+ export interface SentenceAnalysis {
9
+ sentence: string;
10
+ words: string[];
11
+ effort: number;
12
+ total_effort: number;
13
+ typing: boolean;
14
+ missing_words: string[];
15
+ word_efforts: Array<{
16
+ word: string;
17
+ effort: number;
18
+ typed: boolean;
19
+ }>;
20
+ }
21
+ export declare class SentenceAnalyzer {
22
+ /**
23
+ * Analyze effort to construct a set of test sentences
24
+ */
25
+ analyzeSentences(metrics: MetricsResult, sentences: string[][]): SentenceAnalysis[];
26
+ /**
27
+ * Analyze effort to construct a single sentence
28
+ */
29
+ analyzeSentence(metrics: MetricsResult, words: string[]): SentenceAnalysis;
30
+ /**
31
+ * Reconstruct sentence from word array
32
+ */
33
+ private reconstructSentence;
34
+ /**
35
+ * Calculate statistics across all sentences
36
+ */
37
+ calculateStatistics(analyses: SentenceAnalysis[]): {
38
+ total_sentences: number;
39
+ sentences_requiring_typing: number;
40
+ sentences_without_typing: number;
41
+ average_effort: number;
42
+ min_effort: number;
43
+ max_effort: number;
44
+ median_effort: number;
45
+ total_words: number;
46
+ words_requiring_typing: number;
47
+ typing_percent: number;
48
+ };
49
+ }
@@ -0,0 +1,112 @@
1
+ "use strict";
2
+ /**
3
+ * Sentence Construction Analysis
4
+ *
5
+ * Calculates the effort required to construct test sentences
6
+ * from the AAC board set, including spelling fallback for missing words.
7
+ */
8
+ Object.defineProperty(exports, "__esModule", { value: true });
9
+ exports.SentenceAnalyzer = void 0;
10
+ const effort_1 = require("./effort");
11
+ class SentenceAnalyzer {
12
+ /**
13
+ * Analyze effort to construct a set of test sentences
14
+ */
15
+ analyzeSentences(metrics, sentences) {
16
+ return sentences.map((words) => this.analyzeSentence(metrics, words));
17
+ }
18
+ /**
19
+ * Analyze effort to construct a single sentence
20
+ */
21
+ analyzeSentence(metrics, words) {
22
+ const wordEfforts = [];
23
+ let totalEffort = 0;
24
+ let typing = false;
25
+ const missingWords = [];
26
+ // Create word lookup map
27
+ const wordMap = new Map();
28
+ metrics.buttons.forEach((btn) => {
29
+ const existing = wordMap.get(btn.label.toLowerCase());
30
+ if (!existing || btn.effort < existing.effort) {
31
+ wordMap.set(btn.label.toLowerCase(), { effort: btn.effort });
32
+ }
33
+ });
34
+ // Calculate effort for each word
35
+ words.forEach((word) => {
36
+ const lowerWord = word.toLowerCase();
37
+ const found = wordMap.get(lowerWord);
38
+ if (found) {
39
+ wordEfforts.push({ word, effort: found.effort, typed: false });
40
+ totalEffort += found.effort;
41
+ }
42
+ else {
43
+ // Word not found - use spelling effort
44
+ const spellEffort = (0, effort_1.spellingEffort)(word);
45
+ wordEfforts.push({ word, effort: spellEffort, typed: true });
46
+ totalEffort += spellEffort;
47
+ typing = true;
48
+ missingWords.push(word);
49
+ }
50
+ });
51
+ const averageEffort = totalEffort / words.length;
52
+ // Reconstruct sentence for display
53
+ const sentence = this.reconstructSentence(words);
54
+ return {
55
+ sentence,
56
+ words,
57
+ effort: averageEffort,
58
+ total_effort: totalEffort,
59
+ typing,
60
+ missing_words: missingWords,
61
+ word_efforts: wordEfforts,
62
+ };
63
+ }
64
+ /**
65
+ * Reconstruct sentence from word array
66
+ */
67
+ reconstructSentence(words) {
68
+ return words
69
+ .map((word, idx) => {
70
+ // Capitalize first word
71
+ if (idx === 0) {
72
+ return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase();
73
+ }
74
+ return word.toLowerCase();
75
+ })
76
+ .join(' ');
77
+ }
78
+ /**
79
+ * Calculate statistics across all sentences
80
+ */
81
+ calculateStatistics(analyses) {
82
+ const totalSentences = analyses.length;
83
+ const sentencesRequiringTyping = analyses.filter((a) => a.typing).length;
84
+ const sentencesWithoutTyping = totalSentences - sentencesRequiringTyping;
85
+ const efforts = analyses.map((a) => a.effort);
86
+ const averageEffort = efforts.reduce((sum, e) => sum + e, 0) / efforts.length;
87
+ const minEffort = Math.min(...efforts);
88
+ const maxEffort = Math.max(...efforts);
89
+ // Calculate median
90
+ const sortedEfforts = [...efforts].sort((a, b) => a - b);
91
+ const medianEffort = sortedEfforts.length % 2 === 0
92
+ ? (sortedEfforts[sortedEfforts.length / 2 - 1] + sortedEfforts[sortedEfforts.length / 2]) /
93
+ 2
94
+ : sortedEfforts[Math.floor(sortedEfforts.length / 2)];
95
+ const totalWords = analyses.reduce((sum, a) => sum + a.words.length, 0);
96
+ const wordsRequiringTyping = analyses.reduce((sum, a) => sum + a.missing_words.length, 0);
97
+ const typingPercent = (wordsRequiringTyping / totalWords) * 100;
98
+ return {
99
+ total_sentences: totalSentences,
100
+ sentences_requiring_typing: sentencesRequiringTyping,
101
+ sentences_without_typing: sentencesWithoutTyping,
102
+ average_effort: averageEffort,
103
+ min_effort: minEffort,
104
+ max_effort: maxEffort,
105
+ median_effort: medianEffort,
106
+ total_words: totalWords,
107
+ words_requiring_typing: wordsRequiringTyping,
108
+ typing_percent: typingPercent,
109
+ };
110
+ }
111
+ }
112
+ exports.SentenceAnalyzer = SentenceAnalyzer;
@@ -0,0 +1,157 @@
1
+ /**
2
+ * Metrics Types and Interfaces
3
+ *
4
+ * Defines the data structures used for AAC metrics analysis
5
+ */
6
+ /**
7
+ * Button-level metrics result
8
+ */
9
+ export interface ButtonMetrics {
10
+ id: string;
11
+ label: string;
12
+ level: number;
13
+ effort: number;
14
+ count?: number;
15
+ semantic_id?: string;
16
+ clone_id?: string;
17
+ temporary_home_id?: string;
18
+ comp_level?: number;
19
+ comp_effort?: number;
20
+ }
21
+ /**
22
+ * Board/page level analysis result
23
+ */
24
+ export interface BoardAnalysis {
25
+ boardId: string;
26
+ level: number;
27
+ entryX: number;
28
+ entryY: number;
29
+ priorEffort?: number;
30
+ temporaryHomeId?: string;
31
+ }
32
+ /**
33
+ * Metrics analysis result
34
+ */
35
+ export interface MetricsResult {
36
+ analysis_version: string;
37
+ locale: string;
38
+ total_boards: number;
39
+ total_buttons: number;
40
+ total_words: number;
41
+ reference_counts: {
42
+ [id: string]: number;
43
+ };
44
+ grid: {
45
+ rows: number;
46
+ columns: number;
47
+ };
48
+ buttons: ButtonMetrics[];
49
+ levels: {
50
+ [level: number]: ButtonMetrics[];
51
+ };
52
+ alternates?: {
53
+ [boardId: string]: AlternateBoardMetrics;
54
+ };
55
+ obfset?: any;
56
+ }
57
+ /**
58
+ * Alternate board metrics (for temporary home navigation)
59
+ */
60
+ export interface AlternateBoardMetrics {
61
+ buttons: ButtonMetrics[];
62
+ levels: {
63
+ [level: number]: ButtonMetrics[];
64
+ };
65
+ }
66
+ /**
67
+ * Comparison result between two board sets
68
+ */
69
+ export interface ComparisonResult extends MetricsResult {
70
+ target_effort_score: number;
71
+ comp_boards: number;
72
+ comp_buttons: number;
73
+ comp_words: number;
74
+ comp_grid: {
75
+ rows: number;
76
+ columns: number;
77
+ };
78
+ comp_effort_score: number;
79
+ missing_words: string[];
80
+ extra_words: string[];
81
+ overlapping_words: string[];
82
+ missing: {
83
+ [listId: string]: {
84
+ name: string;
85
+ list: string[];
86
+ };
87
+ };
88
+ high_effort_words: string[];
89
+ low_effort_words: string[];
90
+ cores: {
91
+ [listId: string]: {
92
+ name: string;
93
+ list: string[];
94
+ average_effort: number;
95
+ comp_effort: number;
96
+ };
97
+ };
98
+ care_components: {
99
+ core: number;
100
+ comp_core: number;
101
+ sentences: number;
102
+ comp_sentences: number;
103
+ fringe: number;
104
+ comp_fringe: number;
105
+ common_fringe: number;
106
+ comp_common_fringe: number;
107
+ };
108
+ sentences: SentenceAnalysis[];
109
+ fringe_words: FringeWord[];
110
+ common_fringe_words: FringeWord[];
111
+ }
112
+ /**
113
+ * Sentence construction analysis
114
+ */
115
+ export interface SentenceAnalysis {
116
+ sentence: string;
117
+ words: string[];
118
+ effort: number;
119
+ typing: boolean;
120
+ comp_effort: number;
121
+ comp_typing: boolean;
122
+ }
123
+ /**
124
+ * Fringe vocabulary word analysis
125
+ */
126
+ export interface FringeWord {
127
+ word: string;
128
+ effort: number;
129
+ comp_effort: number;
130
+ }
131
+ /**
132
+ * Core vocabulary list definition
133
+ */
134
+ export interface CoreList {
135
+ id: string;
136
+ name: string;
137
+ url?: string;
138
+ locale: string;
139
+ words: string[];
140
+ }
141
+ /**
142
+ * Common words reference data
143
+ */
144
+ export interface CommonWordsData {
145
+ version: string;
146
+ files: string[];
147
+ words: string[];
148
+ efforts: {
149
+ [word: string]: number;
150
+ };
151
+ }
152
+ /**
153
+ * Synonym mappings
154
+ */
155
+ export interface SynonymsData {
156
+ [word: string]: string[];
157
+ }
@@ -0,0 +1,7 @@
1
+ "use strict";
2
+ /**
3
+ * Metrics Types and Interfaces
4
+ *
5
+ * Defines the data structures used for AAC metrics analysis
6
+ */
7
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,65 @@
1
+ /**
2
+ * Vocabulary Coverage Analysis
3
+ *
4
+ * Analyzes how well an AAC board set covers core vocabulary
5
+ * and identifies missing/extra words compared to reference lists.
6
+ */
7
+ import { MetricsResult } from './types';
8
+ import { ReferenceLoader } from '../reference/index';
9
+ export interface VocabularyAnalysis {
10
+ core_coverage: {
11
+ [listId: string]: {
12
+ name: string;
13
+ total_words: number;
14
+ covered: number;
15
+ missing: number;
16
+ coverage_percent: number;
17
+ missing_words: string[];
18
+ average_effort: number;
19
+ };
20
+ };
21
+ total_unique_words: number;
22
+ words_with_effort: number;
23
+ words_requiring_spelling: number;
24
+ extra_words: string[];
25
+ high_effort_words: Array<{
26
+ word: string;
27
+ effort: number;
28
+ }>;
29
+ low_effort_words: Array<{
30
+ word: string;
31
+ effort: number;
32
+ }>;
33
+ }
34
+ export declare class VocabularyAnalyzer {
35
+ private referenceLoader;
36
+ constructor(referenceLoader?: ReferenceLoader);
37
+ /**
38
+ * Analyze vocabulary coverage against core lists
39
+ */
40
+ analyze(metrics: MetricsResult, options?: {
41
+ locale?: string;
42
+ highEffortThreshold?: number;
43
+ lowEffortThreshold?: number;
44
+ }): VocabularyAnalysis;
45
+ /**
46
+ * Analyze coverage for a single core list
47
+ */
48
+ private analyzeCoreList;
49
+ /**
50
+ * Calculate coverage percentage for a specific word list
51
+ */
52
+ calculateCoverage(wordList: string[], metrics: MetricsResult): {
53
+ covered: string[];
54
+ missing: string[];
55
+ coverage_percent: number;
56
+ };
57
+ /**
58
+ * Get effort for a word, or calculate spelling effort if missing
59
+ */
60
+ getWordEffort(word: string, metrics: MetricsResult): number;
61
+ /**
62
+ * Check if a word is in the board set
63
+ */
64
+ hasWord(word: string, metrics: MetricsResult): boolean;
65
+ }