@willwade/aac-processors 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/analytics.d.ts +7 -0
- package/dist/analytics.js +23 -0
- package/dist/browser/index.browser.js +5 -0
- package/dist/browser/metrics.js +17 -0
- package/dist/browser/processors/gridset/helpers.js +390 -0
- package/dist/browser/processors/snap/helpers.js +252 -0
- package/dist/browser/utilities/analytics/history.js +116 -0
- package/dist/browser/utilities/analytics/metrics/comparison.js +477 -0
- package/dist/browser/utilities/analytics/metrics/core.js +775 -0
- package/dist/browser/utilities/analytics/metrics/effort.js +221 -0
- package/dist/browser/utilities/analytics/metrics/obl-types.js +6 -0
- package/dist/browser/utilities/analytics/metrics/obl.js +282 -0
- package/dist/browser/utilities/analytics/metrics/sentence.js +121 -0
- package/dist/browser/utilities/analytics/metrics/types.js +6 -0
- package/dist/browser/utilities/analytics/metrics/vocabulary.js +138 -0
- package/dist/browser/utilities/analytics/reference/browser.js +67 -0
- package/dist/browser/utilities/analytics/reference/index.js +129 -0
- package/dist/browser/utils/dotnetTicks.js +17 -0
- package/dist/browser/utils/io.js +16 -2
- package/dist/browser/validation/gridsetValidator.js +7 -27
- package/dist/browser/validation/obfValidator.js +9 -4
- package/dist/browser/validation/snapValidator.js +6 -9
- package/dist/browser/validation/touchChatValidator.js +6 -7
- package/dist/index.browser.d.ts +1 -0
- package/dist/index.browser.js +18 -1
- package/dist/index.node.d.ts +2 -2
- package/dist/index.node.js +5 -5
- package/dist/metrics.d.ts +17 -0
- package/dist/metrics.js +44 -0
- package/dist/utilities/analytics/metrics/comparison.d.ts +2 -1
- package/dist/utilities/analytics/metrics/comparison.js +3 -3
- package/dist/utilities/analytics/metrics/vocabulary.d.ts +2 -2
- package/dist/utilities/analytics/reference/browser.d.ts +31 -0
- package/dist/utilities/analytics/reference/browser.js +73 -0
- package/dist/utilities/analytics/reference/index.d.ts +21 -0
- package/dist/utilities/analytics/reference/index.js +22 -46
- package/dist/utils/io.d.ts +2 -0
- package/dist/utils/io.js +18 -2
- package/dist/validation/applePanelsValidator.js +11 -28
- package/dist/validation/astericsValidator.js +11 -30
- package/dist/validation/dotValidator.js +11 -30
- package/dist/validation/excelValidator.js +5 -6
- package/dist/validation/gridsetValidator.js +29 -26
- package/dist/validation/index.d.ts +2 -1
- package/dist/validation/index.js +9 -32
- package/dist/validation/obfValidator.js +8 -3
- package/dist/validation/obfsetValidator.js +11 -30
- package/dist/validation/opmlValidator.js +11 -30
- package/dist/validation/snapValidator.js +6 -9
- package/dist/validation/touchChatValidator.js +6 -7
- package/examples/vitedemo/index.html +49 -0
- package/examples/vitedemo/src/main.ts +84 -0
- package/examples/vitedemo/vite.config.ts +26 -7
- package/package.json +9 -1
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Vocabulary Coverage Analysis
|
|
3
|
+
*
|
|
4
|
+
* Analyzes how well an AAC board set covers core vocabulary
|
|
5
|
+
* and identifies missing/extra words compared to reference lists.
|
|
6
|
+
*/
|
|
7
|
+
import { ReferenceLoader } from '../reference/index';
|
|
8
|
+
import { spellingEffort } from './effort';
|
|
9
|
+
export class VocabularyAnalyzer {
|
|
10
|
+
constructor(referenceLoader) {
|
|
11
|
+
this.referenceLoader = referenceLoader || new ReferenceLoader();
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Analyze vocabulary coverage against core lists
|
|
15
|
+
*/
|
|
16
|
+
analyze(metrics, options) {
|
|
17
|
+
// const locale = options?.locale || metrics.locale || 'en';
|
|
18
|
+
const highEffortThreshold = options?.highEffortThreshold || 5.0;
|
|
19
|
+
const lowEffortThreshold = options?.lowEffortThreshold || 2.0;
|
|
20
|
+
// Load reference data
|
|
21
|
+
const coreLists = this.referenceLoader.loadCoreLists();
|
|
22
|
+
// Create word to effort map (using lowercase keys for matching)
|
|
23
|
+
const wordEffortMap = new Map();
|
|
24
|
+
metrics.buttons.forEach((btn) => {
|
|
25
|
+
const word = btn.label.toLowerCase();
|
|
26
|
+
const existing = wordEffortMap.get(word);
|
|
27
|
+
if (!existing || btn.effort < existing) {
|
|
28
|
+
wordEffortMap.set(word, btn.effort);
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
// Analyze each core list
|
|
32
|
+
const core_coverage = {};
|
|
33
|
+
coreLists.forEach((list) => {
|
|
34
|
+
const analysis = this.analyzeCoreList(list, wordEffortMap);
|
|
35
|
+
core_coverage[list.id] = analysis;
|
|
36
|
+
});
|
|
37
|
+
// Find extra words (words not in any core list)
|
|
38
|
+
const allCoreWords = new Set();
|
|
39
|
+
coreLists.forEach((list) => {
|
|
40
|
+
list.words.forEach((word) => allCoreWords.add(word.toLowerCase()));
|
|
41
|
+
});
|
|
42
|
+
const extraWords = [];
|
|
43
|
+
wordEffortMap.forEach((effort, word) => {
|
|
44
|
+
if (!allCoreWords.has(word.toLowerCase())) {
|
|
45
|
+
extraWords.push(word);
|
|
46
|
+
}
|
|
47
|
+
});
|
|
48
|
+
extraWords.sort((a, b) => a.localeCompare(b));
|
|
49
|
+
// Find high/low effort words
|
|
50
|
+
const highEffortWords = [];
|
|
51
|
+
const lowEffortWords = [];
|
|
52
|
+
wordEffortMap.forEach((effort, word) => {
|
|
53
|
+
if (effort > highEffortThreshold) {
|
|
54
|
+
highEffortWords.push({ word, effort });
|
|
55
|
+
}
|
|
56
|
+
else if (effort < lowEffortThreshold) {
|
|
57
|
+
lowEffortWords.push({ word, effort });
|
|
58
|
+
}
|
|
59
|
+
});
|
|
60
|
+
highEffortWords.sort((a, b) => b.effort - a.effort);
|
|
61
|
+
lowEffortWords.sort((a, b) => a.effort - b.effort);
|
|
62
|
+
return {
|
|
63
|
+
core_coverage,
|
|
64
|
+
total_unique_words: wordEffortMap.size,
|
|
65
|
+
words_with_effort: wordEffortMap.size,
|
|
66
|
+
words_requiring_spelling: 0, // Calculated during sentence analysis
|
|
67
|
+
extra_words: extraWords,
|
|
68
|
+
high_effort_words: highEffortWords.slice(0, 50), // Top 50
|
|
69
|
+
low_effort_words: lowEffortWords.slice(0, 50), // Bottom 50
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Analyze coverage for a single core list
|
|
74
|
+
*/
|
|
75
|
+
analyzeCoreList(list, wordEffortMap) {
|
|
76
|
+
const covered = [];
|
|
77
|
+
const missing = [];
|
|
78
|
+
let totalEffort = 0;
|
|
79
|
+
list.words.forEach((word) => {
|
|
80
|
+
const lowerWord = word.toLowerCase();
|
|
81
|
+
const effort = wordEffortMap.get(lowerWord);
|
|
82
|
+
if (effort !== undefined) {
|
|
83
|
+
covered.push(word);
|
|
84
|
+
totalEffort += effort;
|
|
85
|
+
}
|
|
86
|
+
else {
|
|
87
|
+
missing.push(word);
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
const averageEffort = covered.length > 0 ? totalEffort / covered.length : 0;
|
|
91
|
+
return {
|
|
92
|
+
name: list.name,
|
|
93
|
+
total_words: list.words.length,
|
|
94
|
+
covered: covered.length,
|
|
95
|
+
missing: missing.length,
|
|
96
|
+
coverage_percent: (covered.length / list.words.length) * 100,
|
|
97
|
+
missing_words: missing,
|
|
98
|
+
average_effort: averageEffort,
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
/**
|
|
102
|
+
* Calculate coverage percentage for a specific word list
|
|
103
|
+
*/
|
|
104
|
+
calculateCoverage(wordList, metrics) {
|
|
105
|
+
const wordSet = new Set(metrics.buttons.map((btn) => btn.label.toLowerCase()));
|
|
106
|
+
const covered = [];
|
|
107
|
+
const missing = [];
|
|
108
|
+
wordList.forEach((word) => {
|
|
109
|
+
if (wordSet.has(word.toLowerCase())) {
|
|
110
|
+
covered.push(word);
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
missing.push(word);
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
return {
|
|
117
|
+
covered,
|
|
118
|
+
missing,
|
|
119
|
+
coverage_percent: (covered.length / wordList.length) * 100,
|
|
120
|
+
};
|
|
121
|
+
}
|
|
122
|
+
/**
|
|
123
|
+
* Get effort for a word, or calculate spelling effort if missing
|
|
124
|
+
*/
|
|
125
|
+
getWordEffort(word, metrics) {
|
|
126
|
+
const btn = metrics.buttons.find((b) => b.label.toLowerCase() === word.toLowerCase());
|
|
127
|
+
if (btn) {
|
|
128
|
+
return btn.effort;
|
|
129
|
+
}
|
|
130
|
+
return spellingEffort(word, metrics.spelling_effort_base, metrics.spelling_effort_per_letter);
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Check if a word is in the board set
|
|
134
|
+
*/
|
|
135
|
+
hasWord(word, metrics) {
|
|
136
|
+
return metrics.buttons.some((b) => b.label.toLowerCase() === word.toLowerCase());
|
|
137
|
+
}
|
|
138
|
+
}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Browser-friendly reference data loader using fetch.
|
|
3
|
+
*/
|
|
4
|
+
export class InMemoryReferenceLoader {
|
|
5
|
+
constructor(data) {
|
|
6
|
+
this.data = data;
|
|
7
|
+
}
|
|
8
|
+
loadCoreLists() {
|
|
9
|
+
return this.data.coreLists;
|
|
10
|
+
}
|
|
11
|
+
loadCommonWords() {
|
|
12
|
+
return this.data.commonWords;
|
|
13
|
+
}
|
|
14
|
+
loadSynonyms() {
|
|
15
|
+
return this.data.synonyms;
|
|
16
|
+
}
|
|
17
|
+
loadSentences() {
|
|
18
|
+
return this.data.sentences;
|
|
19
|
+
}
|
|
20
|
+
loadFringe() {
|
|
21
|
+
return this.data.fringe;
|
|
22
|
+
}
|
|
23
|
+
loadBaseWords() {
|
|
24
|
+
return this.data.baseWords;
|
|
25
|
+
}
|
|
26
|
+
loadCommonFringe() {
|
|
27
|
+
const commonWords = new Set(this.data.commonWords.words.map((w) => w.toLowerCase()));
|
|
28
|
+
const coreWords = new Set();
|
|
29
|
+
this.data.coreLists.forEach((list) => {
|
|
30
|
+
list.words.forEach((word) => coreWords.add(word.toLowerCase()));
|
|
31
|
+
});
|
|
32
|
+
return Array.from(commonWords).filter((word) => !coreWords.has(word));
|
|
33
|
+
}
|
|
34
|
+
loadAll() {
|
|
35
|
+
return this.data;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
export async function loadReferenceDataFromUrl(baseUrl, locale = 'en') {
|
|
39
|
+
const root = baseUrl.replace(/\/$/, '');
|
|
40
|
+
const fetchJson = async (name) => {
|
|
41
|
+
const res = await fetch(`${root}/${name}.${locale}.json`);
|
|
42
|
+
if (!res.ok) {
|
|
43
|
+
throw new Error(`Failed to load ${name}.${locale}.json`);
|
|
44
|
+
}
|
|
45
|
+
return (await res.json());
|
|
46
|
+
};
|
|
47
|
+
const [coreLists, commonWords, synonyms, sentences, fringe, baseWords] = await Promise.all([
|
|
48
|
+
fetchJson('core_lists'),
|
|
49
|
+
fetchJson('common_words'),
|
|
50
|
+
fetchJson('synonyms'),
|
|
51
|
+
fetchJson('sentences'),
|
|
52
|
+
fetchJson('fringe'),
|
|
53
|
+
fetchJson('base_words'),
|
|
54
|
+
]);
|
|
55
|
+
return {
|
|
56
|
+
coreLists,
|
|
57
|
+
commonWords,
|
|
58
|
+
synonyms,
|
|
59
|
+
sentences,
|
|
60
|
+
fringe,
|
|
61
|
+
baseWords,
|
|
62
|
+
};
|
|
63
|
+
}
|
|
64
|
+
export async function createBrowserReferenceLoader(baseUrl, locale = 'en') {
|
|
65
|
+
const data = await loadReferenceDataFromUrl(baseUrl, locale);
|
|
66
|
+
return new InMemoryReferenceLoader(data);
|
|
67
|
+
}
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Reference Data Loader
|
|
3
|
+
*
|
|
4
|
+
* Loads reference vocabulary lists, core lists, and sentences
|
|
5
|
+
* for AAC metrics analysis.
|
|
6
|
+
*/
|
|
7
|
+
import { getFs, getPath } from '../../../utils/io';
|
|
8
|
+
export class ReferenceLoader {
|
|
9
|
+
constructor(dataDir, locale = 'en') {
|
|
10
|
+
this.locale = locale;
|
|
11
|
+
if (dataDir) {
|
|
12
|
+
this.dataDir = dataDir;
|
|
13
|
+
}
|
|
14
|
+
else {
|
|
15
|
+
// Resolve the data directory relative to this file's location
|
|
16
|
+
// Use __dirname which works correctly after compilation
|
|
17
|
+
this.dataDir = getPath().join(__dirname, 'data');
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Load core vocabulary lists
|
|
22
|
+
*/
|
|
23
|
+
loadCoreLists() {
|
|
24
|
+
const filePath = getPath().join(this.dataDir, `core_lists.${this.locale}.json`);
|
|
25
|
+
const content = getFs().readFileSync(filePath, 'utf-8');
|
|
26
|
+
return JSON.parse(String(content));
|
|
27
|
+
}
|
|
28
|
+
/**
|
|
29
|
+
* Load common words with baseline effort scores
|
|
30
|
+
*/
|
|
31
|
+
loadCommonWords() {
|
|
32
|
+
const filePath = getPath().join(this.dataDir, `common_words.${this.locale}.json`);
|
|
33
|
+
const content = getFs().readFileSync(filePath, 'utf-8');
|
|
34
|
+
return JSON.parse(String(content));
|
|
35
|
+
}
|
|
36
|
+
/**
|
|
37
|
+
* Load synonym mappings
|
|
38
|
+
*/
|
|
39
|
+
loadSynonyms() {
|
|
40
|
+
const filePath = getPath().join(this.dataDir, `synonyms.${this.locale}.json`);
|
|
41
|
+
const content = getFs().readFileSync(filePath, 'utf-8');
|
|
42
|
+
return JSON.parse(String(content));
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Load test sentences
|
|
46
|
+
*/
|
|
47
|
+
loadSentences() {
|
|
48
|
+
const filePath = getPath().join(this.dataDir, `sentences.${this.locale}.json`);
|
|
49
|
+
const content = getFs().readFileSync(filePath, 'utf-8');
|
|
50
|
+
return JSON.parse(String(content));
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Load fringe vocabulary
|
|
54
|
+
*/
|
|
55
|
+
loadFringe() {
|
|
56
|
+
const filePath = getPath().join(this.dataDir, `fringe.${this.locale}.json`);
|
|
57
|
+
const content = getFs().readFileSync(filePath, 'utf-8');
|
|
58
|
+
const data = JSON.parse(String(content));
|
|
59
|
+
// Flatten nested category words if needed
|
|
60
|
+
if (Array.isArray(data) && data.length > 0 && data[0].categories) {
|
|
61
|
+
const flattened = [];
|
|
62
|
+
data.forEach((list) => {
|
|
63
|
+
list.categories.forEach((cat) => {
|
|
64
|
+
flattened.push(...cat.words);
|
|
65
|
+
});
|
|
66
|
+
});
|
|
67
|
+
return flattened;
|
|
68
|
+
}
|
|
69
|
+
return data;
|
|
70
|
+
}
|
|
71
|
+
/**
|
|
72
|
+
* Load base words hash map
|
|
73
|
+
*/
|
|
74
|
+
loadBaseWords() {
|
|
75
|
+
const filePath = getPath().join(this.dataDir, `base_words.${this.locale}.json`);
|
|
76
|
+
const content = getFs().readFileSync(filePath, 'utf-8');
|
|
77
|
+
return JSON.parse(String(content));
|
|
78
|
+
}
|
|
79
|
+
/**
|
|
80
|
+
* Load common fringe vocabulary
|
|
81
|
+
* Common words that are NOT in core vocabulary lists
|
|
82
|
+
* (matching Ruby loader.rb:413-420)
|
|
83
|
+
*/
|
|
84
|
+
loadCommonFringe() {
|
|
85
|
+
const commonWordsData = this.loadCommonWords();
|
|
86
|
+
const commonWords = new Set(commonWordsData.words.map((w) => w.toLowerCase()));
|
|
87
|
+
const coreLists = this.loadCoreLists();
|
|
88
|
+
const coreWords = new Set();
|
|
89
|
+
coreLists.forEach((list) => {
|
|
90
|
+
list.words.forEach((word) => coreWords.add(word.toLowerCase()));
|
|
91
|
+
});
|
|
92
|
+
// Common fringe = common words - core words
|
|
93
|
+
const commonFringe = Array.from(commonWords).filter((word) => !coreWords.has(word));
|
|
94
|
+
return commonFringe;
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Get all reference data at once
|
|
98
|
+
*/
|
|
99
|
+
loadAll() {
|
|
100
|
+
return {
|
|
101
|
+
coreLists: this.loadCoreLists(),
|
|
102
|
+
commonWords: this.loadCommonWords(),
|
|
103
|
+
synonyms: this.loadSynonyms(),
|
|
104
|
+
sentences: this.loadSentences(),
|
|
105
|
+
fringe: this.loadFringe(),
|
|
106
|
+
baseWords: this.loadBaseWords(),
|
|
107
|
+
};
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Get the default reference data path
|
|
112
|
+
*/
|
|
113
|
+
export function getReferenceDataPath() {
|
|
114
|
+
return String(getPath().join(__dirname, 'data'));
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Check if reference data files exist
|
|
118
|
+
*/
|
|
119
|
+
export function hasReferenceData() {
|
|
120
|
+
const dataPath = getReferenceDataPath();
|
|
121
|
+
const requiredFiles = [
|
|
122
|
+
'core_lists.en.json',
|
|
123
|
+
'common_words.en.json',
|
|
124
|
+
'sentences.en.json',
|
|
125
|
+
'synonyms.en.json',
|
|
126
|
+
'fringe.en.json',
|
|
127
|
+
];
|
|
128
|
+
return requiredFiles.every((file) => getFs().existsSync(getPath().join(dataPath, file)));
|
|
129
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Number of ticks (.NET 100ns units) between 0001-01-01 and Unix epoch.
|
|
3
|
+
*/
|
|
4
|
+
export const DOTNET_EPOCH_TICKS = 621355968000000000n;
|
|
5
|
+
/**
|
|
6
|
+
* Number of ticks per millisecond.
|
|
7
|
+
*/
|
|
8
|
+
export const TICKS_PER_MILLISECOND = 10000n;
|
|
9
|
+
/**
|
|
10
|
+
* Convert .NET ticks (100ns since 0001-01-01) to a JavaScript Date.
|
|
11
|
+
* Accepts bigint or number and rounds down to millisecond precision.
|
|
12
|
+
*/
|
|
13
|
+
export function dotNetTicksToDate(ticks) {
|
|
14
|
+
const tickValue = BigInt(ticks);
|
|
15
|
+
const ms = Number((tickValue - DOTNET_EPOCH_TICKS) / TICKS_PER_MILLISECOND);
|
|
16
|
+
return new Date(ms);
|
|
17
|
+
}
|
package/dist/browser/utils/io.js
CHANGED
|
@@ -72,8 +72,22 @@ export function isNodeRuntime() {
|
|
|
72
72
|
return typeof process !== 'undefined' && !!process.versions?.node;
|
|
73
73
|
}
|
|
74
74
|
export function getBasename(filePath) {
|
|
75
|
-
const
|
|
76
|
-
|
|
75
|
+
const trimmed = filePath.replace(/[/\\]+$/, '') || filePath;
|
|
76
|
+
const parts = trimmed.split(/[/\\]/);
|
|
77
|
+
return parts[parts.length - 1] || trimmed;
|
|
78
|
+
}
|
|
79
|
+
export function toUint8Array(input) {
|
|
80
|
+
if (input instanceof Uint8Array) {
|
|
81
|
+
return input;
|
|
82
|
+
}
|
|
83
|
+
return new Uint8Array(input);
|
|
84
|
+
}
|
|
85
|
+
export function toArrayBuffer(input) {
|
|
86
|
+
if (input instanceof ArrayBuffer) {
|
|
87
|
+
return input;
|
|
88
|
+
}
|
|
89
|
+
const view = input instanceof Uint8Array ? input : new Uint8Array(input);
|
|
90
|
+
return view.buffer.slice(view.byteOffset, view.byteOffset + view.byteLength);
|
|
77
91
|
}
|
|
78
92
|
export function decodeText(input) {
|
|
79
93
|
if (typeof Buffer !== 'undefined' && Buffer.isBuffer(input)) {
|
|
@@ -2,24 +2,9 @@
|
|
|
2
2
|
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
|
3
3
|
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
|
4
4
|
import JSZip from 'jszip';
|
|
5
|
+
import * as xml2js from 'xml2js';
|
|
5
6
|
import { BaseValidator } from './baseValidator';
|
|
6
|
-
import {
|
|
7
|
-
let cachedXml2js = null;
|
|
8
|
-
function getXml2js() {
|
|
9
|
-
if (cachedXml2js)
|
|
10
|
-
return cachedXml2js;
|
|
11
|
-
try {
|
|
12
|
-
const nodeRequire = getNodeRequire();
|
|
13
|
-
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
14
|
-
const module = nodeRequire('xml2js');
|
|
15
|
-
const resolved = module.default || module;
|
|
16
|
-
cachedXml2js = resolved;
|
|
17
|
-
return resolved;
|
|
18
|
-
}
|
|
19
|
-
catch {
|
|
20
|
-
throw new Error('Validator requires Xml2js in this environment.');
|
|
21
|
-
}
|
|
22
|
-
}
|
|
7
|
+
import { decodeText, getBasename, getFs, toUint8Array } from '../utils/io';
|
|
23
8
|
/**
|
|
24
9
|
* Validator for Grid3/Smartbox Gridset files (.gridset, .gridsetx)
|
|
25
10
|
*/
|
|
@@ -33,10 +18,9 @@ export class GridsetValidator extends BaseValidator {
|
|
|
33
18
|
static async validateFile(filePath) {
|
|
34
19
|
const validator = new GridsetValidator();
|
|
35
20
|
const fs = getFs();
|
|
36
|
-
const path = getPath();
|
|
37
21
|
const content = fs.readFileSync(filePath);
|
|
38
22
|
const stats = fs.statSync(filePath);
|
|
39
|
-
return validator.validate(content,
|
|
23
|
+
return validator.validate(content, getBasename(filePath), stats.size);
|
|
40
24
|
}
|
|
41
25
|
/**
|
|
42
26
|
* Check if content is Gridset format
|
|
@@ -48,8 +32,7 @@ export class GridsetValidator extends BaseValidator {
|
|
|
48
32
|
}
|
|
49
33
|
// Try to parse as XML and check for gridset structure
|
|
50
34
|
try {
|
|
51
|
-
const contentStr =
|
|
52
|
-
const xml2js = getXml2js();
|
|
35
|
+
const contentStr = typeof content === 'string' ? content : decodeText(toUint8Array(content));
|
|
53
36
|
const parser = new xml2js.Parser();
|
|
54
37
|
const result = await parser.parseStringPromise(contentStr);
|
|
55
38
|
return result && (result.gridset || result.Gridset);
|
|
@@ -102,9 +85,8 @@ export class GridsetValidator extends BaseValidator {
|
|
|
102
85
|
let xmlObj = null;
|
|
103
86
|
await this.add_check('xml_parse', 'valid XML', async () => {
|
|
104
87
|
try {
|
|
105
|
-
const xml2js = getXml2js();
|
|
106
88
|
const parser = new xml2js.Parser();
|
|
107
|
-
const contentStr = content
|
|
89
|
+
const contentStr = decodeText(content);
|
|
108
90
|
xmlObj = await parser.parseStringPromise(contentStr);
|
|
109
91
|
}
|
|
110
92
|
catch (e) {
|
|
@@ -129,7 +111,7 @@ export class GridsetValidator extends BaseValidator {
|
|
|
129
111
|
async validateZipArchive(content, filename, _filesize) {
|
|
130
112
|
let zip;
|
|
131
113
|
try {
|
|
132
|
-
zip = await JSZip.loadAsync(
|
|
114
|
+
zip = await JSZip.loadAsync(toUint8Array(content));
|
|
133
115
|
}
|
|
134
116
|
catch (e) {
|
|
135
117
|
this.err(`Failed to open ZIP archive: ${e.message}`, true);
|
|
@@ -145,7 +127,6 @@ export class GridsetValidator extends BaseValidator {
|
|
|
145
127
|
else {
|
|
146
128
|
try {
|
|
147
129
|
const gridsetXml = await gridsetEntry.async('string');
|
|
148
|
-
const xml2js = getXml2js();
|
|
149
130
|
const parser = new xml2js.Parser();
|
|
150
131
|
const xmlObj = await parser.parseStringPromise(gridsetXml);
|
|
151
132
|
const gridset = xmlObj.gridset || xmlObj.Gridset;
|
|
@@ -153,7 +134,7 @@ export class GridsetValidator extends BaseValidator {
|
|
|
153
134
|
this.err('Invalid gridset.xml structure', true);
|
|
154
135
|
}
|
|
155
136
|
else {
|
|
156
|
-
await this.validateGridsetStructure(gridset, filename,
|
|
137
|
+
await this.validateGridsetStructure(gridset, filename, new Uint8Array());
|
|
157
138
|
}
|
|
158
139
|
}
|
|
159
140
|
catch (e) {
|
|
@@ -170,7 +151,6 @@ export class GridsetValidator extends BaseValidator {
|
|
|
170
151
|
else {
|
|
171
152
|
try {
|
|
172
153
|
const settingsXml = await settingsEntry.async('string');
|
|
173
|
-
const xml2js = getXml2js();
|
|
174
154
|
const parser = new xml2js.Parser();
|
|
175
155
|
const xmlObj = await parser.parseStringPromise(settingsXml);
|
|
176
156
|
const settings = xmlObj.GridSetSettings || xmlObj.gridSetSettings || xmlObj.GridsetSettings;
|
|
@@ -5,7 +5,7 @@
|
|
|
5
5
|
/* eslint-disable @typescript-eslint/restrict-template-expressions */
|
|
6
6
|
import JSZip from 'jszip';
|
|
7
7
|
import { BaseValidator } from './baseValidator';
|
|
8
|
-
import {
|
|
8
|
+
import { decodeText, getBasename, getFs, readBinaryFromInput, toUint8Array } from '../utils/io';
|
|
9
9
|
const OBF_FORMAT = 'open-board-0.1';
|
|
10
10
|
const OBF_FORMAT_CURRENT_VERSION = 0.1;
|
|
11
11
|
/**
|
|
@@ -22,7 +22,7 @@ export class ObfValidator extends BaseValidator {
|
|
|
22
22
|
const validator = new ObfValidator();
|
|
23
23
|
const content = readBinaryFromInput(filePath);
|
|
24
24
|
const stats = getFs().statSync(filePath);
|
|
25
|
-
return validator.validate(content,
|
|
25
|
+
return validator.validate(content, getBasename(filePath), stats.size);
|
|
26
26
|
}
|
|
27
27
|
/**
|
|
28
28
|
* Check if content is OBF format
|
|
@@ -34,7 +34,12 @@ export class ObfValidator extends BaseValidator {
|
|
|
34
34
|
}
|
|
35
35
|
// Try to parse as JSON and check format
|
|
36
36
|
try {
|
|
37
|
-
|
|
37
|
+
if (typeof content !== 'string' &&
|
|
38
|
+
!(content instanceof ArrayBuffer) &&
|
|
39
|
+
!(content instanceof Uint8Array)) {
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
const contentStr = typeof content === 'string' ? content : decodeText(toUint8Array(content));
|
|
38
43
|
const json = JSON.parse(contentStr);
|
|
39
44
|
return json && json.format && json.format.startsWith('open-board-');
|
|
40
45
|
}
|
|
@@ -68,7 +73,7 @@ export class ObfValidator extends BaseValidator {
|
|
|
68
73
|
let json = null;
|
|
69
74
|
await this.add_check('valid_json', 'JSON file', async () => {
|
|
70
75
|
try {
|
|
71
|
-
json = JSON.parse(content
|
|
76
|
+
json = JSON.parse(decodeText(content));
|
|
72
77
|
}
|
|
73
78
|
catch {
|
|
74
79
|
this.err("Couldn't parse as JSON", true);
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
/* eslint-disable @typescript-eslint/require-await */
|
|
2
2
|
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
|
3
|
-
import * as fs from 'fs';
|
|
4
|
-
import * as path from 'path';
|
|
5
3
|
import * as xml2js from 'xml2js';
|
|
6
4
|
import JSZip from 'jszip';
|
|
7
5
|
import { BaseValidator } from './baseValidator';
|
|
6
|
+
import { getBasename, getFs, readBinaryFromInput, toUint8Array } from '../utils/io';
|
|
8
7
|
/**
|
|
9
8
|
* Validator for Snap files (.spb, .sps)
|
|
10
9
|
* Snap files are zipped packages containing XML configuration
|
|
@@ -18,9 +17,9 @@ export class SnapValidator extends BaseValidator {
|
|
|
18
17
|
*/
|
|
19
18
|
static async validateFile(filePath) {
|
|
20
19
|
const validator = new SnapValidator();
|
|
21
|
-
const content =
|
|
22
|
-
const stats =
|
|
23
|
-
return validator.validate(content,
|
|
20
|
+
const content = readBinaryFromInput(filePath);
|
|
21
|
+
const stats = getFs().statSync(filePath);
|
|
22
|
+
return validator.validate(content, getBasename(filePath), stats.size);
|
|
24
23
|
}
|
|
25
24
|
/**
|
|
26
25
|
* Check if content is Snap format
|
|
@@ -33,8 +32,7 @@ export class SnapValidator extends BaseValidator {
|
|
|
33
32
|
}
|
|
34
33
|
// Try to parse as ZIP and check for Snap structure
|
|
35
34
|
try {
|
|
36
|
-
const
|
|
37
|
-
const zip = await JSZip.loadAsync(buffer);
|
|
35
|
+
const zip = await JSZip.loadAsync(toUint8Array(content));
|
|
38
36
|
const entries = Object.values(zip.files).filter((entry) => !entry.dir);
|
|
39
37
|
return entries.some((entry) => entry.name.includes('settings') || entry.name.includes('.xml'));
|
|
40
38
|
}
|
|
@@ -56,8 +54,7 @@ export class SnapValidator extends BaseValidator {
|
|
|
56
54
|
let validZip = false;
|
|
57
55
|
await this.add_check('zip', 'valid zip package', async () => {
|
|
58
56
|
try {
|
|
59
|
-
|
|
60
|
-
zip = await JSZip.loadAsync(buffer);
|
|
57
|
+
zip = await JSZip.loadAsync(toUint8Array(content));
|
|
61
58
|
const entries = Object.values(zip.files);
|
|
62
59
|
validZip = entries.length > 0;
|
|
63
60
|
}
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
/* eslint-disable @typescript-eslint/require-await */
|
|
2
2
|
/* eslint-disable @typescript-eslint/no-unsafe-argument */
|
|
3
3
|
/* eslint-disable @typescript-eslint/no-unsafe-return */
|
|
4
|
-
import * as fs from 'fs';
|
|
5
|
-
import * as path from 'path';
|
|
6
4
|
import * as xml2js from 'xml2js';
|
|
7
5
|
import { BaseValidator } from './baseValidator';
|
|
6
|
+
import { decodeText, getBasename, getFs, readBinaryFromInput, toUint8Array } from '../utils/io';
|
|
8
7
|
/**
|
|
9
8
|
* Validator for TouchChat files (.ce)
|
|
10
9
|
* TouchChat files are XML-based
|
|
@@ -18,9 +17,9 @@ export class TouchChatValidator extends BaseValidator {
|
|
|
18
17
|
*/
|
|
19
18
|
static async validateFile(filePath) {
|
|
20
19
|
const validator = new TouchChatValidator();
|
|
21
|
-
const content =
|
|
22
|
-
const stats =
|
|
23
|
-
return validator.validate(content,
|
|
20
|
+
const content = readBinaryFromInput(filePath);
|
|
21
|
+
const stats = getFs().statSync(filePath);
|
|
22
|
+
return validator.validate(content, getBasename(filePath), stats.size);
|
|
24
23
|
}
|
|
25
24
|
/**
|
|
26
25
|
* Check if content is TouchChat format
|
|
@@ -32,7 +31,7 @@ export class TouchChatValidator extends BaseValidator {
|
|
|
32
31
|
}
|
|
33
32
|
// Try to parse as XML and check for TouchChat structure
|
|
34
33
|
try {
|
|
35
|
-
const contentStr =
|
|
34
|
+
const contentStr = typeof content === 'string' ? content : decodeText(toUint8Array(content));
|
|
36
35
|
const parser = new xml2js.Parser();
|
|
37
36
|
const result = await parser.parseStringPromise(contentStr);
|
|
38
37
|
// TouchChat files typically have specific structure
|
|
@@ -56,7 +55,7 @@ export class TouchChatValidator extends BaseValidator {
|
|
|
56
55
|
await this.add_check('xml_parse', 'valid XML', async () => {
|
|
57
56
|
try {
|
|
58
57
|
const parser = new xml2js.Parser();
|
|
59
|
-
const contentStr = content
|
|
58
|
+
const contentStr = decodeText(content);
|
|
60
59
|
xmlObj = await parser.parseStringPromise(contentStr);
|
|
61
60
|
}
|
|
62
61
|
catch (e) {
|
package/dist/index.browser.d.ts
CHANGED
|
@@ -22,6 +22,7 @@ export { SnapProcessor } from './processors/snapProcessor';
|
|
|
22
22
|
export { TouchChatProcessor } from './processors/touchchatProcessor';
|
|
23
23
|
export { ApplePanelsProcessor } from './processors/applePanelsProcessor';
|
|
24
24
|
export { AstericsGridProcessor } from './processors/astericsGridProcessor';
|
|
25
|
+
export * as Metrics from './metrics';
|
|
25
26
|
import { BaseProcessor } from './core/baseProcessor';
|
|
26
27
|
export { configureSqlJs } from './utils/sqlite';
|
|
27
28
|
/**
|
package/dist/index.browser.js
CHANGED
|
@@ -23,11 +23,23 @@ var __createBinding = (this && this.__createBinding) || (Object.create ? (functi
|
|
|
23
23
|
if (k2 === undefined) k2 = k;
|
|
24
24
|
o[k2] = m[k];
|
|
25
25
|
}));
|
|
26
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
27
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
28
|
+
}) : function(o, v) {
|
|
29
|
+
o["default"] = v;
|
|
30
|
+
});
|
|
26
31
|
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
27
32
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
28
33
|
};
|
|
34
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
35
|
+
if (mod && mod.__esModule) return mod;
|
|
36
|
+
var result = {};
|
|
37
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
38
|
+
__setModuleDefault(result, mod);
|
|
39
|
+
return result;
|
|
40
|
+
};
|
|
29
41
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
30
|
-
exports.configureSqlJs = exports.AstericsGridProcessor = exports.ApplePanelsProcessor = exports.TouchChatProcessor = exports.SnapProcessor = exports.GridsetProcessor = exports.ObfProcessor = exports.OpmlProcessor = exports.DotProcessor = void 0;
|
|
42
|
+
exports.configureSqlJs = exports.Metrics = exports.AstericsGridProcessor = exports.ApplePanelsProcessor = exports.TouchChatProcessor = exports.SnapProcessor = exports.GridsetProcessor = exports.ObfProcessor = exports.OpmlProcessor = exports.DotProcessor = void 0;
|
|
31
43
|
exports.getProcessor = getProcessor;
|
|
32
44
|
exports.getSupportedExtensions = getSupportedExtensions;
|
|
33
45
|
exports.isExtensionSupported = isExtensionSupported;
|
|
@@ -56,6 +68,11 @@ var applePanelsProcessor_1 = require("./processors/applePanelsProcessor");
|
|
|
56
68
|
Object.defineProperty(exports, "ApplePanelsProcessor", { enumerable: true, get: function () { return applePanelsProcessor_1.ApplePanelsProcessor; } });
|
|
57
69
|
var astericsGridProcessor_1 = require("./processors/astericsGridProcessor");
|
|
58
70
|
Object.defineProperty(exports, "AstericsGridProcessor", { enumerable: true, get: function () { return astericsGridProcessor_1.AstericsGridProcessor; } });
|
|
71
|
+
// ===================================================================
|
|
72
|
+
// UTILITY FUNCTIONS
|
|
73
|
+
// ===================================================================
|
|
74
|
+
// Metrics namespace (pageset analytics)
|
|
75
|
+
exports.Metrics = __importStar(require("./metrics"));
|
|
59
76
|
const dotProcessor_2 = require("./processors/dotProcessor");
|
|
60
77
|
const opmlProcessor_2 = require("./processors/opmlProcessor");
|
|
61
78
|
const obfProcessor_2 = require("./processors/obfProcessor");
|
package/dist/index.node.d.ts
CHANGED
|
@@ -9,9 +9,9 @@ export * from './core/treeStructure';
|
|
|
9
9
|
export * from './core/baseProcessor';
|
|
10
10
|
export * from './core/stringCasing';
|
|
11
11
|
export * from './processors';
|
|
12
|
-
export * as Analytics from './
|
|
13
|
-
export * from './utilities/analytics';
|
|
12
|
+
export * as Analytics from './analytics';
|
|
14
13
|
export * as Validation from './validation';
|
|
14
|
+
export * as Metrics from './metrics';
|
|
15
15
|
export * as Gridset from './gridset';
|
|
16
16
|
export * as Snap from './snap';
|
|
17
17
|
export * as OBF from './obf';
|