@natlibfi/marc-record-validators-melinda 12.0.0-alpha.9 → 12.0.1-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/access-rights.test.js +1 -1
- package/dist/access-rights.test.js.map +1 -1
- package/dist/addMissingField337.test.js +1 -1
- package/dist/addMissingField337.test.js.map +1 -1
- package/dist/addMissingField338.test.js +1 -1
- package/dist/addMissingField338.test.js.map +1 -1
- package/dist/cyrillux-usemarcon-replacement.test.js +4 -7
- package/dist/cyrillux-usemarcon-replacement.test.js.map +2 -2
- package/dist/cyrillux.test.js +1 -1
- package/dist/cyrillux.test.js.map +1 -1
- package/dist/disambiguateSeriesStatements.test.js +1 -7
- package/dist/disambiguateSeriesStatements.test.js.map +2 -2
- package/dist/double-commas.test.js +1 -1
- package/dist/double-commas.test.js.map +1 -1
- package/dist/drop-terms.js +122 -0
- package/dist/drop-terms.js.map +7 -0
- package/dist/drop-terms.test.js +56 -0
- package/dist/drop-terms.test.js.map +7 -0
- package/dist/empty-fields.test.js +1 -1
- package/dist/empty-fields.test.js.map +1 -1
- package/dist/ending-punctuation.test.js +39 -19
- package/dist/ending-punctuation.test.js.map +2 -2
- package/dist/field-008-18-34-character-groups.test.js +2 -8
- package/dist/field-008-18-34-character-groups.test.js.map +2 -2
- package/dist/field-505-separators.test.js +1 -7
- package/dist/field-505-separators.test.js.map +2 -2
- package/dist/field-521-fix.test.js +1 -7
- package/dist/field-521-fix.test.js.map +2 -2
- package/dist/field-exclusion.test.js +11 -8
- package/dist/field-exclusion.test.js.map +2 -2
- package/dist/field-structure.test.js +1 -1
- package/dist/field-structure.test.js.map +1 -1
- package/dist/fields-present.test.js +1 -1
- package/dist/fields-present.test.js.map +1 -1
- package/dist/fix-33X.test.js +1 -1
- package/dist/fix-33X.test.js.map +1 -1
- package/dist/fix-country-codes.test.js +1 -7
- package/dist/fix-country-codes.test.js.map +2 -2
- package/dist/fix-sami-041.js +87 -0
- package/dist/fix-sami-041.js.map +7 -0
- package/dist/fix-sami-041.test.js +40 -0
- package/dist/fix-sami-041.test.js.map +7 -0
- package/dist/fixRelatorTerms.test.js +2 -8
- package/dist/fixRelatorTerms.test.js.map +2 -2
- package/dist/fixed-fields.test.js +29 -18
- package/dist/fixed-fields.test.js.map +2 -2
- package/dist/identical-fields.test.js +1 -1
- package/dist/identical-fields.test.js.map +1 -1
- package/dist/index.js +7 -1
- package/dist/index.js.map +2 -2
- package/dist/indicator-fixes.js +10 -0
- package/dist/indicator-fixes.js.map +2 -2
- package/dist/indicator-fixes.test.js +1 -7
- package/dist/indicator-fixes.test.js.map +2 -2
- package/dist/isbn-issn.js +1 -1
- package/dist/isbn-issn.js.map +2 -2
- package/dist/isbn-issn.test.js +9 -6
- package/dist/isbn-issn.test.js.map +2 -2
- package/dist/item-language.test.js +1 -1
- package/dist/item-language.test.js.map +2 -2
- package/dist/merge-fields.test.js +2 -7
- package/dist/merge-fields.test.js.map +2 -2
- package/dist/mergeField500Lisapainokset.test.js +1 -7
- package/dist/mergeField500Lisapainokset.test.js.map +2 -2
- package/dist/mergeRelatorTermFields.test.js +1 -7
- package/dist/mergeRelatorTermFields.test.js.map +2 -2
- package/dist/multiple-subfield-0.test.js +1 -7
- package/dist/multiple-subfield-0.test.js.map +2 -2
- package/dist/normalize-dashes.test.js +1 -7
- package/dist/normalize-dashes.test.js.map +2 -2
- package/dist/normalize-identifiers.test.js +1 -7
- package/dist/normalize-identifiers.test.js.map +2 -2
- package/dist/normalize-qualifying-information.test.js +1 -7
- package/dist/normalize-qualifying-information.test.js.map +2 -2
- package/dist/normalize-utf8-diacritics.test.js +1 -7
- package/dist/normalize-utf8-diacritics.test.js.map +2 -2
- package/dist/normalizeFieldForComparison.js +24 -0
- package/dist/normalizeFieldForComparison.js.map +2 -2
- package/dist/punctuation.test.js +1 -7
- package/dist/punctuation.test.js.map +2 -2
- package/dist/punctuation2.test.js +2 -8
- package/dist/punctuation2.test.js.map +2 -2
- package/dist/reindexSubfield6OccurenceNumbers.test.js +1 -7
- package/dist/reindexSubfield6OccurenceNumbers.test.js.map +2 -2
- package/dist/remove-041-zxx.js +56 -0
- package/dist/remove-041-zxx.js.map +7 -0
- package/dist/remove-041-zxx.test.js +40 -0
- package/dist/remove-041-zxx.test.js.map +7 -0
- package/dist/removeDuplicateDataFields.test.js +2 -8
- package/dist/removeDuplicateDataFields.test.js.map +2 -2
- package/dist/removeInferiorDataFields.js +1 -9
- package/dist/removeInferiorDataFields.js.map +2 -2
- package/dist/removeInferiorDataFields.test.js +1 -7
- package/dist/removeInferiorDataFields.test.js.map +2 -2
- package/dist/resolvable-ext-references-melinda.test.js +1 -1
- package/dist/resolvable-ext-references-melinda.test.js.map +2 -2
- package/dist/resolveOrphanedSubfield6s.js +1 -3
- package/dist/resolveOrphanedSubfield6s.js.map +2 -2
- package/dist/resolveOrphanedSubfield6s.test.js +1 -7
- package/dist/resolveOrphanedSubfield6s.test.js.map +2 -2
- package/dist/sanitize-vocabulary-source-codes.test.js +1 -7
- package/dist/sanitize-vocabulary-source-codes.test.js.map +2 -2
- package/dist/sort-tags.test.js +1 -1
- package/dist/sort-tags.test.js.map +1 -1
- package/dist/sortFields.js +16 -1
- package/dist/sortFields.js.map +2 -2
- package/dist/sortFields.test.js +1 -7
- package/dist/sortFields.test.js.map +2 -2
- package/dist/sortRelatorTerms.test.js +1 -7
- package/dist/sortRelatorTerms.test.js.map +2 -2
- package/dist/sortSubfields.js +3 -1
- package/dist/sortSubfields.js.map +2 -2
- package/dist/sortSubfields.test.js +1 -7
- package/dist/sortSubfields.test.js.map +2 -2
- package/dist/stripPunctuation.js +7 -4
- package/dist/stripPunctuation.js.map +2 -2
- package/dist/stripPunctuation.test.js +1 -7
- package/dist/stripPunctuation.test.js.map +2 -2
- package/dist/subfield-exclusion.test.js +1 -1
- package/dist/subfield-exclusion.test.js.map +1 -1
- package/dist/subfield6Utils.js +1 -13
- package/dist/subfield6Utils.js.map +2 -2
- package/dist/subfieldValueNormalizations.test.js +1 -7
- package/dist/subfieldValueNormalizations.test.js.map +2 -2
- package/dist/sync-007-and-300.test.js +1 -7
- package/dist/sync-007-and-300.test.js.map +2 -2
- package/dist/sync-language.js +103 -0
- package/dist/sync-language.js.map +7 -0
- package/dist/sync-language.test.js +40 -0
- package/dist/sync-language.test.js.map +7 -0
- package/dist/translate-terms.js +121 -85
- package/dist/translate-terms.js.map +3 -3
- package/dist/translate-terms.test.js +5 -8
- package/dist/translate-terms.test.js.map +2 -2
- package/dist/unicode-decomposition.test.js +1 -1
- package/dist/unicode-decomposition.test.js.map +1 -1
- package/dist/update-field-540.test.js +2 -8
- package/dist/update-field-540.test.js.map +2 -2
- package/dist/urn.test.js +2 -8
- package/dist/urn.test.js.map +2 -2
- package/package.json +18 -16
- package/src/access-rights.test.js +1 -1
- package/src/addMissingField337.test.js +1 -1
- package/src/addMissingField338.test.js +1 -1
- package/src/cyrillux-usemarcon-replacement.test.js +4 -9
- package/src/cyrillux.test.js +1 -1
- package/src/disambiguateSeriesStatements.test.js +3 -8
- package/src/double-commas.test.js +1 -1
- package/src/drop-terms.js +162 -0
- package/src/drop-terms.test.js +81 -0
- package/src/empty-fields.test.js +1 -1
- package/src/ending-punctuation.test.js +28 -20
- package/src/field-008-18-34-character-groups.test.js +4 -9
- package/src/field-505-separators.test.js +3 -8
- package/src/field-521-fix.test.js +3 -8
- package/src/field-exclusion.test.js +10 -8
- package/src/field-structure.test.js +1 -1
- package/src/fields-present.test.js +1 -1
- package/src/fix-33X.test.js +1 -1
- package/src/fix-country-codes.test.js +3 -8
- package/src/fix-sami-041.js +113 -0
- package/src/fix-sami-041.test.js +52 -0
- package/src/fixRelatorTerms.test.js +4 -9
- package/src/fixed-fields.test.js +24 -18
- package/src/identical-fields.test.js +1 -1
- package/src/index.js +8 -1
- package/src/indicator-fixes.js +12 -0
- package/src/indicator-fixes.test.js +3 -8
- package/src/isbn-issn.js +1 -1
- package/src/isbn-issn.test.js +8 -6
- package/src/item-language.test.js +2 -2
- package/src/merge-fields.test.js +3 -8
- package/src/mergeField500Lisapainokset.test.js +3 -8
- package/src/mergeRelatorTermFields.test.js +3 -8
- package/src/multiple-subfield-0.test.js +3 -8
- package/src/normalize-dashes.test.js +3 -8
- package/src/normalize-identifiers.test.js +3 -8
- package/src/normalize-qualifying-information.test.js +3 -8
- package/src/normalize-utf8-diacritics.test.js +3 -8
- package/src/normalizeFieldForComparison.js +26 -0
- package/src/punctuation.test.js +3 -8
- package/src/punctuation2.test.js +4 -9
- package/src/reindexSubfield6OccurenceNumbers.test.js +3 -8
- package/src/remove-041-zxx.js +85 -0
- package/src/remove-041-zxx.test.js +52 -0
- package/src/removeDuplicateDataFields.test.js +4 -9
- package/src/removeInferiorDataFields.js +7 -7
- package/src/removeInferiorDataFields.test.js +3 -8
- package/src/resolvable-ext-references-melinda.test.js +5 -5
- package/src/resolveOrphanedSubfield6s.js +3 -3
- package/src/resolveOrphanedSubfield6s.test.js +3 -8
- package/src/sanitize-vocabulary-source-codes.test.js +3 -8
- package/src/sort-tags.test.js +1 -1
- package/src/sortFields.js +20 -1
- package/src/sortFields.test.js +3 -8
- package/src/sortRelatorTerms.test.js +3 -8
- package/src/sortSubfields.js +3 -1
- package/src/sortSubfields.test.js +3 -8
- package/src/stripPunctuation.js +9 -6
- package/src/stripPunctuation.test.js +3 -8
- package/src/subfield-exclusion.test.js +1 -1
- package/src/subfield6Utils.js +13 -13
- package/src/subfieldValueNormalizations.test.js +3 -8
- package/src/sync-007-and-300.test.js +3 -8
- package/src/sync-language.js +148 -0
- package/src/sync-language.test.js +52 -0
- package/src/translate-terms.js +158 -103
- package/src/translate-terms.test.js +12 -16
- package/src/unicode-decomposition.test.js +1 -1
- package/src/update-field-540.test.js +4 -9
- package/src/urn.test.js +4 -9
- package/test-fixtures/drop-terms/01/expectedResult.json +31 -0
- package/test-fixtures/drop-terms/01/metadata.json +6 -0
- package/test-fixtures/drop-terms/01/record.json +35 -0
- package/test-fixtures/drop-terms/02/expectedResult.json +7 -0
- package/test-fixtures/drop-terms/02/metadata.json +6 -0
- package/test-fixtures/drop-terms/02/record.json +40 -0
- package/test-fixtures/drop-terms/03/expectedResult.json +6 -0
- package/test-fixtures/drop-terms/03/metadata.json +18 -0
- package/test-fixtures/drop-terms/03/record.json +39 -0
- package/test-fixtures/drop-terms/04/expectedResult.json +6 -0
- package/test-fixtures/drop-terms/04/metadata.json +19 -0
- package/test-fixtures/drop-terms/04/record.json +24 -0
- package/test-fixtures/fix-language-codes/02/metadata.json +1 -1
- package/test-fixtures/fix-sami-041/01/expectedResult.json +6 -0
- package/test-fixtures/fix-sami-041/01/metadata.json +4 -0
- package/test-fixtures/fix-sami-041/01/record.json +13 -0
- package/test-fixtures/fix-sami-041/02/expectedResult.json +10 -0
- package/test-fixtures/fix-sami-041/02/metadata.json +4 -0
- package/test-fixtures/fix-sami-041/02/record.json +8 -0
- package/test-fixtures/fix-sami-041/03/expectedResult.json +5 -0
- package/test-fixtures/fix-sami-041/03/metadata.json +5 -0
- package/test-fixtures/fix-sami-041/03/record.json +8 -0
- package/test-fixtures/fix-sami-041/04/expectedResult.json +7 -0
- package/test-fixtures/fix-sami-041/04/metadata.json +4 -0
- package/test-fixtures/fix-sami-041/04/record.json +10 -0
- package/test-fixtures/fix-sami-041/05/expectedResult.json +10 -0
- package/test-fixtures/fix-sami-041/05/metadata.json +6 -0
- package/test-fixtures/fix-sami-041/05/record.json +8 -0
- package/test-fixtures/indicator-fixes/11/expectedResult.json +10 -0
- package/test-fixtures/indicator-fixes/11/metadata.json +4 -0
- package/test-fixtures/indicator-fixes/11/record.json +10 -0
- package/test-fixtures/merge-fields/f05/metadata.json +1 -1
- package/test-fixtures/remove-041-zxx/01/expectedResult.json +5 -0
- package/test-fixtures/remove-041-zxx/01/metadata.json +5 -0
- package/test-fixtures/remove-041-zxx/01/record.json +10 -0
- package/test-fixtures/remove-041-zxx/02/expectedResult.json +7 -0
- package/test-fixtures/remove-041-zxx/02/metadata.json +5 -0
- package/test-fixtures/remove-041-zxx/02/record.json +9 -0
- package/test-fixtures/remove-041-zxx/11/expectedResult.json +10 -0
- package/test-fixtures/remove-041-zxx/11/metadata.json +5 -0
- package/test-fixtures/remove-041-zxx/11/record.json +9 -0
- package/test-fixtures/remove-041-zxx/12/expectedResult.json +10 -0
- package/test-fixtures/remove-041-zxx/12/metadata.json +5 -0
- package/test-fixtures/remove-041-zxx/12/record.json +9 -0
- package/test-fixtures/sort-fields/15/input.json +9 -0
- package/test-fixtures/sort-fields/15/metadata.json +5 -0
- package/test-fixtures/sort-fields/15/result.json +10 -0
- package/test-fixtures/sync-language/01/expectedResult.json +5 -0
- package/test-fixtures/sync-language/01/metadata.json +5 -0
- package/test-fixtures/sync-language/01/record.json +7 -0
- package/test-fixtures/sync-language/02/expectedResult.json +6 -0
- package/test-fixtures/sync-language/02/metadata.json +5 -0
- package/test-fixtures/sync-language/02/record.json +10 -0
- package/test-fixtures/sync-language/03/expectedResult.json +6 -0
- package/test-fixtures/sync-language/03/metadata.json +5 -0
- package/test-fixtures/sync-language/03/record.json +6 -0
- package/test-fixtures/sync-language/10/expectedResult.json +10 -0
- package/test-fixtures/sync-language/10/metadata.json +5 -0
- package/test-fixtures/sync-language/10/record.json +8 -0
- package/test-fixtures/sync-language/11/expectedResult.json +10 -0
- package/test-fixtures/sync-language/11/metadata.json +5 -0
- package/test-fixtures/sync-language/11/record.json +7 -0
- package/test-fixtures/sync-language/12/expectedResult.json +9 -0
- package/test-fixtures/sync-language/12/metadata.json +6 -0
- package/test-fixtures/sync-language/12/record.json +6 -0
- package/test-fixtures/sync-language/13/expectedResult.json +10 -0
- package/test-fixtures/sync-language/13/metadata.json +5 -0
- package/test-fixtures/sync-language/13/record.json +8 -0
- package/test-fixtures/sync-language/14/expectedResult.json +9 -0
- package/test-fixtures/sync-language/14/metadata.json +5 -0
- package/test-fixtures/sync-language/14/record.json +7 -0
- package/test-fixtures/sync-language/15/expectedResult.json +9 -0
- package/test-fixtures/sync-language/15/metadata.json +5 -0
- package/test-fixtures/sync-language/15/record.json +7 -0
- package/test-fixtures/translate-terms/05/expectedResult.json +12 -0
- package/test-fixtures/translate-terms/05/metadata.json +7 -0
- package/test-fixtures/translate-terms/05/record.json +11 -0
- package/test-fixtures/translate-terms/06/expectedResult.json +12 -0
- package/test-fixtures/translate-terms/06/metadata.json +7 -0
- package/test-fixtures/translate-terms/06/record.json +11 -0
- package/test-fixtures/translate-terms-data.js +23 -0
package/dist/translate-terms.js
CHANGED
|
@@ -42,8 +42,8 @@ export default function() {
|
|
|
42
42
|
return [];
|
|
43
43
|
}
|
|
44
44
|
const fields = record.get(tag);
|
|
45
|
-
const finnishFields = fields.filter((f) =>
|
|
46
|
-
const swedishFields = fields.filter((f) =>
|
|
45
|
+
const finnishFields = fields.filter((f) => isTranslatable(f, "fin"));
|
|
46
|
+
const swedishFields = fields.filter((f) => isTranslatable(f, "swe"));
|
|
47
47
|
const finnishOnly = getMisses(finnishFields, swedishFields);
|
|
48
48
|
const swedishOnly = getMisses(swedishFields, finnishFields);
|
|
49
49
|
return [...finnishOnly, ...swedishOnly].filter((f) => tagAndFieldAgree(f));
|
|
@@ -74,95 +74,34 @@ export default function() {
|
|
|
74
74
|
const newField = { tag: field.tag, ind1: field.ind1, ind2: field.ind2, subfields: [sfA, sf2, sf0] };
|
|
75
75
|
return newField;
|
|
76
76
|
}
|
|
77
|
-
function getLexiconAndLanguage(field) {
|
|
78
|
-
const subfield2 = field.subfields.find((sf) => sf.code === "2");
|
|
79
|
-
if (subfield2.value === "slm/fin") {
|
|
80
|
-
return { "lex": "slm", "lang": "fin" };
|
|
81
|
-
}
|
|
82
|
-
if (subfield2.value === "slm/swe") {
|
|
83
|
-
return { "lex": "slm", "lang": "swe" };
|
|
84
|
-
}
|
|
85
|
-
if (subfield2.value === "yso/fin") {
|
|
86
|
-
return { "lex": "yso", "lang": "fin" };
|
|
87
|
-
}
|
|
88
|
-
if (subfield2.value === "yso/swe") {
|
|
89
|
-
return { "lex": "yso", "lang": "swe" };
|
|
90
|
-
}
|
|
91
|
-
return {};
|
|
92
|
-
}
|
|
93
77
|
async function getPrefLabel(field) {
|
|
94
78
|
const uri = fieldToUri(field);
|
|
95
79
|
if (!uri) {
|
|
96
80
|
return void 0;
|
|
97
81
|
}
|
|
98
|
-
const
|
|
99
|
-
if (!
|
|
82
|
+
const data = await getTermData(uri);
|
|
83
|
+
if (!data) {
|
|
100
84
|
nvdebug(`No labels found for ${uri}`, debug);
|
|
101
85
|
return void 0;
|
|
102
86
|
}
|
|
87
|
+
const prefLabels = data.prefLabel;
|
|
103
88
|
const lexData = getLexiconAndLanguage(field);
|
|
104
89
|
const lang = changeAbbr(lexData.lang);
|
|
105
90
|
const subfieldA = field.subfields.find((sf) => sf.code === "a");
|
|
106
|
-
|
|
107
|
-
if (prefLabel.value === subfieldA.value) {
|
|
108
|
-
nvdebug(`'${fieldToString(field)}' requires translating`, debug);
|
|
91
|
+
if (isLabel(prefLabels, subfieldA.value, lang)) {
|
|
109
92
|
return prefLabels;
|
|
110
93
|
}
|
|
111
94
|
return void 0;
|
|
112
95
|
}
|
|
113
|
-
function swapLanguageCodeBetweenLanguages(code) {
|
|
114
|
-
if (swapLanguageCode[code]) {
|
|
115
|
-
return swapLanguageCode[code];
|
|
116
|
-
}
|
|
117
|
-
return code;
|
|
118
|
-
}
|
|
119
|
-
function changeAbbr(abbr) {
|
|
120
|
-
if (changeAbbrHash[abbr]) {
|
|
121
|
-
return changeAbbrHash[abbr];
|
|
122
|
-
}
|
|
123
|
-
return abbr;
|
|
124
|
-
}
|
|
125
|
-
function swaggerQuery(uri) {
|
|
126
|
-
return `https://api.finto.fi/rest/v1/data?uri=${uri}&format=application%2Fjson`;
|
|
127
|
-
}
|
|
128
|
-
async function getTermData(uri) {
|
|
129
|
-
if (termCache[uri]) {
|
|
130
|
-
return termCache[uri];
|
|
131
|
-
}
|
|
132
|
-
const tmp = await getTermDataFromFinto(uri);
|
|
133
|
-
termCache[uri] = tmp;
|
|
134
|
-
return tmp;
|
|
135
|
-
}
|
|
136
|
-
async function getTermDataFromFinto(uri) {
|
|
137
|
-
const headers = { "Accept": "application/json" };
|
|
138
|
-
const uri2 = swaggerQuery(uri);
|
|
139
|
-
const response = await fetch(uri2, { method: "GET", headers });
|
|
140
|
-
if (!response.ok) {
|
|
141
|
-
return void 0;
|
|
142
|
-
}
|
|
143
|
-
const json = await response.json();
|
|
144
|
-
if (!json.graph) {
|
|
145
|
-
return void 0;
|
|
146
|
-
}
|
|
147
|
-
const arr = json.graph;
|
|
148
|
-
const [hit] = arr.filter((row) => row.uri === uri);
|
|
149
|
-
return hit.prefLabel;
|
|
150
|
-
}
|
|
151
96
|
function fieldToUri(field) {
|
|
152
97
|
const lex = mapTagToLex(field.tag);
|
|
153
98
|
const subfield0 = field.subfields.find((sf) => sf.code === "0");
|
|
154
99
|
const id = subfield0.value.replace(/^[^0-9]+/u, "");
|
|
155
|
-
|
|
156
|
-
return `http://www.yso.fi/onto/yso/p${id}`;
|
|
157
|
-
}
|
|
158
|
-
if (lex === "slm") {
|
|
159
|
-
return `http://urn.fi/URN:NBN:fi:au:slm:s${id}`;
|
|
160
|
-
}
|
|
161
|
-
return void 0;
|
|
100
|
+
return buildUri(lex, id);
|
|
162
101
|
}
|
|
163
|
-
function
|
|
102
|
+
function isTranslatable(field, lang) {
|
|
164
103
|
const fieldAsString = fieldToString(field);
|
|
165
|
-
if (!fieldAsString.match(/^... #7 ‡a [^‡]+ ‡2 [^‡]+ ‡0 [^‡]+(?: ‡9 [A-Z]+<(?:KEEP|DROP)>)*$/u)) {
|
|
104
|
+
if (!fieldAsString.match(/^... #7 (?: ‡8 [^‡]+ )*‡a [^‡]+ ‡2 [^‡]+ ‡0 [^‡]+(?: ‡9 [A-Z]+<(?:KEEP|DROP)>)*$/u)) {
|
|
166
105
|
return false;
|
|
167
106
|
}
|
|
168
107
|
const lex = mapTagToLex(field.tag);
|
|
@@ -172,17 +111,6 @@ export default function() {
|
|
|
172
111
|
}
|
|
173
112
|
return fieldHasValidSubfield0(field);
|
|
174
113
|
}
|
|
175
|
-
function fieldHasValidSubfield0(field) {
|
|
176
|
-
const lex = mapTagToLex(field.tag);
|
|
177
|
-
const subfield0 = field.subfields.find((sf) => sf.code === "0");
|
|
178
|
-
if (lex === "yso" && subfield0.value.match(/^http:\/\/www\.yso\.fi\/onto\/yso\/p[0-9]+$/u)) {
|
|
179
|
-
return true;
|
|
180
|
-
}
|
|
181
|
-
if (lex === "slm" && subfield0.value.match(/^http:\/\/urn\.fi\/URN:NBN:fi:au:slm:s[0-9]+$/u)) {
|
|
182
|
-
return true;
|
|
183
|
-
}
|
|
184
|
-
return false;
|
|
185
|
-
}
|
|
186
114
|
function getMisses(fieldList1, fieldList2) {
|
|
187
115
|
return fieldList1.filter((f) => !hasSubfield0Match(f, fieldList2));
|
|
188
116
|
}
|
|
@@ -190,11 +118,119 @@ export default function() {
|
|
|
190
118
|
const subfield0 = field.subfields.find((sf) => sf.code === "0");
|
|
191
119
|
return pairFields.some((f) => f.subfields.some((sf) => sf.code === "0" && sf.value === subfield0.value));
|
|
192
120
|
}
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
121
|
+
}
|
|
122
|
+
export function fieldHasValidSubfield0(field, defaultLex = void 0) {
|
|
123
|
+
const lex = defaultLex || mapTagToLex(field.tag);
|
|
124
|
+
return field.subfields.some((sf) => isValidSubfield0(sf, lex));
|
|
125
|
+
}
|
|
126
|
+
export function isValidSubfield0(subfield, lex = "???") {
|
|
127
|
+
if (subfield.code !== "0") {
|
|
128
|
+
return false;
|
|
129
|
+
}
|
|
130
|
+
if (["yso", "yso/fin", "yso/swe"].includes(lex) && subfield.value.match(/^https?:\/\/www\.yso\.fi\/onto\/yso\/p[0-9]+$/u)) {
|
|
131
|
+
return true;
|
|
132
|
+
}
|
|
133
|
+
if (["slm", "slm/fin", "slm/swe"].includes(lex) && subfield.value.match(/^https?:\/\/urn\.fi\/URN:NBN:fi:au:slm:s[0-9]+$/u)) {
|
|
134
|
+
return true;
|
|
135
|
+
}
|
|
136
|
+
return false;
|
|
137
|
+
}
|
|
138
|
+
export function buildUri(lex, id) {
|
|
139
|
+
if (["yso", "yso/fin", "yso/swe"].includes(lex)) {
|
|
140
|
+
return `http://www.yso.fi/onto/yso/p${id}`;
|
|
141
|
+
}
|
|
142
|
+
if (["slm", "slm/fin", "slm/swe"].includes(lex)) {
|
|
143
|
+
return `http://urn.fi/URN:NBN:fi:au:slm:s${id}`;
|
|
144
|
+
}
|
|
145
|
+
return void 0;
|
|
146
|
+
}
|
|
147
|
+
function mapTagToLex(tag) {
|
|
148
|
+
if (tag === "655") {
|
|
149
|
+
return "slm";
|
|
150
|
+
}
|
|
151
|
+
return "yso";
|
|
152
|
+
}
|
|
153
|
+
export async function getTermData(uri) {
|
|
154
|
+
nvdebug(`getTermData(${uri})`);
|
|
155
|
+
if (termCache[uri]) {
|
|
156
|
+
return termCache[uri];
|
|
157
|
+
}
|
|
158
|
+
const tmp = await getTermDataFromFinto(uri);
|
|
159
|
+
termCache[uri] = tmp;
|
|
160
|
+
return tmp;
|
|
161
|
+
}
|
|
162
|
+
async function getTermDataFromFinto(uri) {
|
|
163
|
+
const headers = { "Accept": "application/json" };
|
|
164
|
+
const uri2 = swaggerQuery(uri);
|
|
165
|
+
const response = await fetch(uri2, { method: "GET", headers });
|
|
166
|
+
if (!response.ok) {
|
|
167
|
+
return void 0;
|
|
168
|
+
}
|
|
169
|
+
const json = await response.json();
|
|
170
|
+
if (!json.graph) {
|
|
171
|
+
return void 0;
|
|
172
|
+
}
|
|
173
|
+
const arr = json.graph;
|
|
174
|
+
const [hit] = arr.filter((row) => row.uri === uri);
|
|
175
|
+
const subset = {
|
|
176
|
+
prefLabel: processLabel(hit?.prefLabel || void 0),
|
|
177
|
+
altLabel: processLabel(hit?.altLabel || void 0)
|
|
178
|
+
};
|
|
179
|
+
return subset;
|
|
180
|
+
function swaggerQuery(uri3) {
|
|
181
|
+
return `https://api.finto.fi/rest/v1/data?uri=${uri3}&format=application%2Fjson`;
|
|
182
|
+
}
|
|
183
|
+
function processLabel(label) {
|
|
184
|
+
if (typeof label === "object") {
|
|
185
|
+
if (Array.isArray(label)) {
|
|
186
|
+
return label;
|
|
187
|
+
}
|
|
188
|
+
return [label];
|
|
189
|
+
}
|
|
190
|
+
return [];
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
export function getLexiconAndLanguage(field) {
|
|
194
|
+
const subfield2 = field.subfields.find((sf) => sf.code === "2");
|
|
195
|
+
if (subfield2) {
|
|
196
|
+
if (subfield2.value === "slm/fin") {
|
|
197
|
+
return { "lex": "slm", "lang": "fin" };
|
|
198
|
+
}
|
|
199
|
+
if (subfield2.value === "slm/swe") {
|
|
200
|
+
return { "lex": "slm", "lang": "swe" };
|
|
201
|
+
}
|
|
202
|
+
if (subfield2.value === "yso/fin") {
|
|
203
|
+
return { "lex": "yso", "lang": "fin" };
|
|
204
|
+
}
|
|
205
|
+
if (subfield2.value === "yso/swe") {
|
|
206
|
+
return { "lex": "yso", "lang": "swe" };
|
|
196
207
|
}
|
|
197
|
-
return "yso";
|
|
198
208
|
}
|
|
209
|
+
return {};
|
|
210
|
+
}
|
|
211
|
+
export function isLabel(labels, term, lang = void 0) {
|
|
212
|
+
const twoLetterLanguageCode = lang && lang.length === 3 ? changeAbbr(lang) : lang;
|
|
213
|
+
return labels.some((l) => isMatch(l));
|
|
214
|
+
function isMatch(label) {
|
|
215
|
+
if (label.value !== term) {
|
|
216
|
+
return false;
|
|
217
|
+
}
|
|
218
|
+
if (!twoLetterLanguageCode) {
|
|
219
|
+
return true;
|
|
220
|
+
}
|
|
221
|
+
return label.lang === twoLetterLanguageCode;
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
function changeAbbr(abbr) {
|
|
225
|
+
if (changeAbbrHash[abbr]) {
|
|
226
|
+
return changeAbbrHash[abbr];
|
|
227
|
+
}
|
|
228
|
+
return abbr;
|
|
229
|
+
}
|
|
230
|
+
function swapLanguageCodeBetweenLanguages(code) {
|
|
231
|
+
if (swapLanguageCode[code]) {
|
|
232
|
+
return swapLanguageCode[code];
|
|
233
|
+
}
|
|
234
|
+
return code;
|
|
199
235
|
}
|
|
200
236
|
//# sourceMappingURL=translate-terms.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/translate-terms.js"],
|
|
4
|
-
"sourcesContent": ["import clone from 'clone';\nimport createDebugLogger from 'debug';\nimport {fieldHasSubfield, fieldToString, nvdebug} from './utils.js';\n\n\nconst debug = createDebugLogger('@natlibfi/marc-record-validators-melinda:translate-terms');\nconst defaultTags = ['648', '650', '651', '655'];\n\nconst swapLanguageCode = {'fin': 'swe', 'fi': 'sv', 'sv': 'fi', 'swe': 'fin'};\nconst changeAbbrHash = {'fi': 'fin', 'fin': 'fi', 'sv': 'swe', 'swe': 'sv'};\n\nconst termCache = {};\n\n// Author(s): Nicholas Volk\nexport default function () {\n\n\n return {\n description: 'Translate yso (648, 650, 651) and slm (655) terms (FIN <=> SWE)',\n validate, fix\n };\n\n async function fix(record) {\n const newFields = await getFields(record, defaultTags, []);\n\n newFields.forEach(nf => nvdebug(`Add new field '${fieldToString(nf)}'`, debug));\n\n newFields.forEach(nf => record.insertField(nf));\n\n const newFieldsAsStrings = newFields.map(f => fieldToString(f));\n\n\n return {message: [], fix: newFieldsAsStrings, valid: true};\n }\n\n async function validate(record) {\n const newFields = await getFields(record, defaultTags, []);\n if (newFields.length === 0) {\n return {'message': [], 'valid': true};\n }\n const messages = newFields.map(f => fieldToString(f));\n\n return {'message': messages, 'valid': false};\n }\n\n async function getFields(record, tags, fieldsToAdd) {\n const [currTag, ...remainingTags] = tags;\n if (!currTag) {\n return fieldsToAdd;\n }\n const missingFields = await deriveMissingFields(record, currTag);\n\n const tmp = await getFields(record, remainingTags, [...fieldsToAdd, ...missingFields]);\n return tmp;\n }\n\n function getPairlessFinnishAndSwedishFields(record, tag) {\n const expectedLex = mapTagToLex(tag);\n if (!expectedLex) {\n return [];\n }\n const fields = record.get(tag);\n const finnishFields = fields.filter(f => isRelevantField(f, 'fin'));\n const swedishFields = fields.filter(f => isRelevantField(f, 'swe'));\n const finnishOnly = getMisses(finnishFields, swedishFields);\n const swedishOnly = getMisses(swedishFields, finnishFields);\n\n //console.log(` Looking at ${finnishOnly.length} + ${swedishOnly.length} fields`); // eslint-disable-line no-console\n return [...finnishOnly, ...swedishOnly].filter(f => tagAndFieldAgree(f));\n\n function tagAndFieldAgree(field) {\n // Check that tag and $2 value are pairable:\n const lexData = getLexiconAndLanguage(field); // $2 data\n return expectedLex === lexData.lex;\n }\n }\n\n async function deriveMissingFields(record, tag) {\n const pairlessFields = getPairlessFinnishAndSwedishFields(record, tag);\n\n // Dunno how to handle loop+promise combo in our normal coding style. Spent half a day trying... (I reckon it takes like 2 minuts to do this properly...)\n let prefLabels = [];\n for (let i=0; i < pairlessFields.length; i += 1) {\n prefLabels[i] = await getPrefLabel(pairlessFields[i]);\n }\n\n const missingFields = pairlessFields.map((f, i) => pairField(f, prefLabels[i]));\n return missingFields.filter(f => f);\n }\n\n function pairField(field, prefLabels) {\n if (!prefLabels) {\n return undefined;\n }\n //console.log(`pairField() WP 1: ${fieldToString(field)}`); // eslint-disable-line no-console\n const lexAndLang = getLexiconAndLanguage(field);\n //console.log(`pairField() WP 2: ${JSON.stringify(lexAndLang)}`); // eslint-disable-line no-console\n const twoLetterOtherLang = swapLanguageCodeBetweenLanguages(changeAbbr(lexAndLang.lang));\n const prefLabel = prefLabels.find(l => l.lang === twoLetterOtherLang);\n //console.log(`pairField() WP 4: ${JSON.stringify(prefLabel)}`); // eslint-disable-line no-console\n const sfA = {'code': 'a', 'value': prefLabel.value}; // field.subfields.field(sf => sf.code === 'a');\n const sf0 = clone(field.subfields.find(sf => sf.code === '0'));\n const sf2 = {'code': '2', 'value': `${lexAndLang.lex}/${lexAndLang.lang === 'fin' ? 'swe' : 'fin'}`}; // swap fin <=> swe\n const newField = {tag: field.tag, ind1: field.ind1, ind2: field.ind2, subfields: [sfA, sf2, sf0]};\n return newField;\n }\n\n function getLexiconAndLanguage(field) {\n const subfield2 = field.subfields.find(sf => sf.code === '2');\n if (subfield2.value === 'slm/fin') {\n return {'lex': 'slm', 'lang': 'fin'};\n }\n if (subfield2.value === 'slm/swe') {\n return {'lex': 'slm', 'lang': 'swe'};\n }\n if (subfield2.value === 'yso/fin') {\n return {'lex': 'yso', 'lang': 'fin'};\n }\n if (subfield2.value === 'yso/swe') {\n return {'lex': 'yso', 'lang': 'swe'};\n }\n return {};\n }\n\n async function getPrefLabel(field) {\n // Tag vs $2 correlation has already been checked!\n const uri = fieldToUri(field);\n if (!uri) { // $0 is invalid or sumthing\n return undefined;\n }\n const prefLabels = await getTermData(uri);\n if (!prefLabels) { // Sanity check. Miss caused by illegal id etc.\n nvdebug(`No labels found for ${uri}`, debug);\n return undefined;\n }\n const lexData = getLexiconAndLanguage(field); // $2 data\n const lang = changeAbbr(lexData.lang);\n\n\n const subfieldA = field.subfields.find(sf => sf.code === 'a');\n\n const prefLabel = prefLabels.find(pl => pl.lang === lang);\n //console.info(`Compare prefLabel '${prefLabel.value}' AND $a '${subfieldA.value}'`); // eslint-disable-line no-console\n if (prefLabel.value === subfieldA.value) {\n nvdebug(`'${fieldToString(field)}' requires translating`, debug);\n return prefLabels;\n }\n return undefined;\n }\n\n function swapLanguageCodeBetweenLanguages(code) {\n if (swapLanguageCode[code]) {\n return swapLanguageCode[code];\n }\n return code;\n }\n\n function changeAbbr(abbr) {\n if (changeAbbrHash[abbr]) {\n return changeAbbrHash[abbr];\n }\n return abbr;\n }\n\n function swaggerQuery(uri) {\n // This would work for only yso, not yso-paikat etc `https://api.finto.fi/rest/v1/yso/data?format=application%2Fjson&uri=${uri}`;\n return `https://api.finto.fi/rest/v1/data?uri=${uri}&format=application%2Fjson`; // This is simpler, but contains more irrelevant data\n }\n\n async function getTermData(uri) {\n //console.log(`getTermData(${uri})`); // eslint-disable-line no-console\n if (termCache[uri]) { // Don't think current implementation uses the cache any more.\n //console.log(`CACHED ${uri}`); // eslint-disable-line no-console\n return termCache[uri];\n }\n const tmp = await getTermDataFromFinto(uri);\n termCache[uri] = tmp;\n return tmp;\n }\n\n async function getTermDataFromFinto(uri) {\n const headers = {'Accept': 'application/json'};\n const uri2 = swaggerQuery(uri);\n\n const response = await fetch(uri2, {method: 'GET', headers});\n if (!response.ok) {\n return undefined;\n }\n const json = await response.json();\n\n if (!json.graph) {\n return undefined;\n }\n const arr = json.graph;\n const [hit] = arr.filter(row => row.uri === uri);\n //console.log(`NEW JSON: ${JSON.stringify(hit.prefLabel)}`); // eslint-disable-line no-console\n return hit.prefLabel;\n }\n\n\n function fieldToUri(field) {\n const lex = mapTagToLex(field.tag);\n\n const subfield0 = field.subfields.find(sf => sf.code === '0');\n const id = subfield0.value.replace(/^[^0-9]+/u, '');\n if (lex === 'yso') {\n //return `http%3A%2F%2Fwww.yso.fi%2Fonto%2Fyso%2Fp${id}`;\n return `http://www.yso.fi/onto/yso/p${id}`;\n }\n if (lex === 'slm') {\n return `http://urn.fi/URN:NBN:fi:au:slm:s${id}`;\n }\n return undefined;\n }\n\n function isRelevantField(field, lang) {\n const fieldAsString = fieldToString(field);\n\n // We should probably allow an optional $8 as the first subfield.\n if (!fieldAsString.match(/^... #7 \u2021a [^\u2021]+ \u20212 [^\u2021]+ \u20210 [^\u2021]+(?: \u20219 [A-Z]+<(?:KEEP|DROP)>)*$/u)) {\n return false;\n }\n const lex = mapTagToLex(field.tag);\n const lexLang = `${lex}/${lang}`;\n if (!fieldHasSubfield(field, '2', lexLang)) {\n return false;\n }\n return fieldHasValidSubfield0(field);\n }\n\n function fieldHasValidSubfield0(field) {\n const lex = mapTagToLex(field.tag);\n const subfield0 = field.subfields.find(sf => sf.code === '0');\n if (lex === 'yso' && subfield0.value.match(/^http:\\/\\/www\\.yso\\.fi\\/onto\\/yso\\/p[0-9]+$/u)) {\n return true;\n }\n if (lex === 'slm' && subfield0.value.match(/^http:\\/\\/urn\\.fi\\/URN:NBN:fi:au:slm:s[0-9]+$/u)) {\n return true;\n }\n return false;\n }\n\n function getMisses(fieldList1, fieldList2) {\n return fieldList1.filter(f => !hasSubfield0Match(f, fieldList2));\n }\n\n function hasSubfield0Match(field, pairFields) {\n const subfield0 = field.subfields.find(sf => sf.code === '0');\n return pairFields.some(f => f.subfields.some(sf => sf.code === '0' && sf.value === subfield0.value));\n }\n\n\n function mapTagToLex(tag) {\n if (tag === '655') {\n return 'slm';\n }\n return 'yso';\n }\n\n\n /*\n function getValidIdentifiers(record, tag, lang) {\n const lex = mapTagToLex(tag);\n const subfield2Value = `${lex}/${lang}`;\n const candFields = record.get(tag).filter(f => f.subfields.some(sf => sf.code === '2' && sf.value === subfield2Value)); // TODO: filter\n return [];\n }\n */\n\n}\n\n"],
|
|
5
|
-
"mappings": "AAAA,OAAO,WAAW;AAClB,OAAO,uBAAuB;AAC9B,SAAQ,kBAAkB,eAAe,eAAc;AAGvD,MAAM,QAAQ,kBAAkB,0DAA0D;AAC1F,MAAM,cAAc,CAAC,OAAO,OAAO,OAAO,KAAK;AAE/C,MAAM,mBAAmB,EAAC,OAAO,OAAO,MAAM,MAAM,MAAM,MAAM,OAAO,MAAK;AAC5E,MAAM,iBAAiB,EAAC,MAAM,OAAO,OAAO,MAAM,MAAM,OAAO,OAAO,KAAI;AAE1E,MAAM,YAAY,CAAC;AAGnB,0BAA2B;AAGzB,SAAO;AAAA,IACL,aAAa;AAAA,IACb;AAAA,IAAU;AAAA,EACZ;AAEA,iBAAe,IAAI,QAAQ;AACzB,UAAM,YAAY,MAAM,UAAU,QAAQ,aAAa,CAAC,CAAC;AAEzD,cAAU,QAAQ,QAAM,QAAQ,kBAAkB,cAAc,EAAE,CAAC,KAAK,KAAK,CAAC;AAE9E,cAAU,QAAQ,QAAM,OAAO,YAAY,EAAE,CAAC;AAE9C,UAAM,qBAAqB,UAAU,IAAI,OAAK,cAAc,CAAC,CAAC;AAG9D,WAAO,EAAC,SAAS,CAAC,GAAG,KAAK,oBAAoB,OAAO,KAAI;AAAA,EAC3D;AAEA,iBAAe,SAAS,QAAQ;AAC9B,UAAM,YAAY,MAAM,UAAU,QAAQ,aAAa,CAAC,CAAC;AACzD,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,EAAC,WAAW,CAAC,GAAG,SAAS,KAAI;AAAA,IACtC;AACA,UAAM,WAAW,UAAU,IAAI,OAAK,cAAc,CAAC,CAAC;AAEpD,WAAO,EAAC,WAAW,UAAU,SAAS,MAAK;AAAA,EAC7C;AAEA,iBAAe,UAAU,QAAQ,MAAM,aAAa;AAClD,UAAM,CAAC,SAAS,GAAG,aAAa,IAAI;AACpC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AACA,UAAM,gBAAgB,MAAM,oBAAoB,QAAQ,OAAO;AAE/D,UAAM,MAAM,MAAM,UAAU,QAAQ,eAAe,CAAC,GAAG,aAAa,GAAG,aAAa,CAAC;AACrF,WAAO;AAAA,EACT;AAEA,WAAS,mCAAmC,QAAQ,KAAK;AACvD,UAAM,cAAc,YAAY,GAAG;AACnC,QAAI,CAAC,aAAa;AAChB,aAAO,CAAC;AAAA,IACV;AACA,UAAM,SAAS,OAAO,IAAI,GAAG;AAC7B,UAAM,gBAAgB,OAAO,OAAO,OAAK,
|
|
6
|
-
"names": []
|
|
4
|
+
"sourcesContent": ["import clone from 'clone';\nimport createDebugLogger from 'debug';\nimport {fieldHasSubfield, fieldToString, nvdebug} from './utils.js';\n\n\nconst debug = createDebugLogger('@natlibfi/marc-record-validators-melinda:translate-terms');\nconst defaultTags = ['648', '650', '651', '655'];\n\nconst swapLanguageCode = {'fin': 'swe', 'fi': 'sv', 'sv': 'fi', 'swe': 'fin'};\nconst changeAbbrHash = {'fi': 'fin', 'fin': 'fi', 'sv': 'swe', 'swe': 'sv'};\n\nconst termCache = {};\n\n// Author(s): Nicholas Volk\nexport default function () {\n\n\n return {\n description: 'Translate yso (648, 650, 651) and slm (655) terms (FIN <=> SWE)',\n validate, fix\n };\n\n async function fix(record) {\n const newFields = await getFields(record, defaultTags, []);\n\n newFields.forEach(nf => nvdebug(`Add new field '${fieldToString(nf)}'`, debug));\n\n newFields.forEach(nf => record.insertField(nf));\n\n const newFieldsAsStrings = newFields.map(f => fieldToString(f));\n\n\n return {message: [], fix: newFieldsAsStrings, valid: true};\n }\n\n async function validate(record) {\n const newFields = await getFields(record, defaultTags, []);\n if (newFields.length === 0) {\n return {'message': [], 'valid': true};\n }\n const messages = newFields.map(f => fieldToString(f));\n\n return {'message': messages, 'valid': false};\n }\n\n async function getFields(record, tags, fieldsToAdd) {\n const [currTag, ...remainingTags] = tags;\n if (!currTag) {\n return fieldsToAdd;\n }\n const missingFields = await deriveMissingFields(record, currTag);\n\n const tmp = await getFields(record, remainingTags, [...fieldsToAdd, ...missingFields]);\n return tmp;\n }\n\n function getPairlessFinnishAndSwedishFields(record, tag) {\n const expectedLex = mapTagToLex(tag);\n if (!expectedLex) {\n return [];\n }\n const fields = record.get(tag);\n const finnishFields = fields.filter(f => isTranslatable(f, 'fin'));\n const swedishFields = fields.filter(f => isTranslatable(f, 'swe'));\n const finnishOnly = getMisses(finnishFields, swedishFields);\n const swedishOnly = getMisses(swedishFields, finnishFields);\n\n //console.log(` Looking at ${finnishOnly.length} + ${swedishOnly.length} fields`); // eslint-disable-line no-console\n return [...finnishOnly, ...swedishOnly].filter(f => tagAndFieldAgree(f));\n\n function tagAndFieldAgree(field) {\n // Check that tag and $2 value are pairable:\n const lexData = getLexiconAndLanguage(field); // $2 data\n return expectedLex === lexData.lex;\n }\n }\n\n async function deriveMissingFields(record, tag) {\n const pairlessFields = getPairlessFinnishAndSwedishFields(record, tag);\n\n // Dunno how to handle loop+promise combo in our normal coding style. Spent half a day trying... (I reckon it takes like 2 minuts to do this properly...)\n // Did a proper implementation in drop-terms.js...\n let prefLabels = [];\n for (let i=0; i < pairlessFields.length; i += 1) {\n prefLabels[i] = await getPrefLabel(pairlessFields[i]);\n }\n\n const missingFields = pairlessFields.map((f, i) => pairField(f, prefLabels[i]));\n return missingFields.filter(f => f);\n }\n\n function pairField(field, prefLabels) {\n if (!prefLabels) {\n return undefined;\n }\n //console.log(`pairField() WP 1: ${fieldToString(field)}`); // eslint-disable-line no-console\n const lexAndLang = getLexiconAndLanguage(field);\n //console.log(`pairField() WP 2: ${JSON.stringify(lexAndLang)}`); // eslint-disable-line no-console\n const twoLetterOtherLang = swapLanguageCodeBetweenLanguages(changeAbbr(lexAndLang.lang));\n const prefLabel = prefLabels.find(l => l.lang === twoLetterOtherLang);\n //console.log(`pairField() WP 4: ${JSON.stringify(prefLabel)}`); // eslint-disable-line no-console\n const sfA = {'code': 'a', 'value': prefLabel.value}; // field.subfields.field(sf => sf.code === 'a');\n const sf0 = clone(field.subfields.find(sf => sf.code === '0'));\n const sf2 = {'code': '2', 'value': `${lexAndLang.lex}/${lexAndLang.lang === 'fin' ? 'swe' : 'fin'}`}; // swap fin <=> swe\n const newField = {tag: field.tag, ind1: field.ind1, ind2: field.ind2, subfields: [sfA, sf2, sf0]};\n return newField;\n }\n\n\n\n async function getPrefLabel(field) {\n // Pre-requisite: tag vs $2 correlation has already been checked!\n const uri = fieldToUri(field);\n if (!uri) { // $0 is invalid or sumthing\n return undefined;\n }\n\n const data = await getTermData(uri);\n\n if (!data) { // Sanity check. Miss caused by illegal id etc.\n nvdebug(`No labels found for ${uri}`, debug);\n return undefined;\n }\n\n const prefLabels = data.prefLabel;\n\n const lexData = getLexiconAndLanguage(field); // $2 data\n const lang = changeAbbr(lexData.lang); // fi <=> fin (finto use 2 chars, we use 3-letters)\n const subfieldA = field.subfields.find(sf => sf.code === 'a');\n\n if (isLabel(prefLabels, subfieldA.value, lang)) {\n return prefLabels;\n }\n return undefined;\n }\n\n\n function fieldToUri(field) {\n const lex = mapTagToLex(field.tag);\n\n const subfield0 = field.subfields.find(sf => sf.code === '0');\n const id = subfield0.value.replace(/^[^0-9]+/u, '');\n return buildUri(lex, id);\n }\n\n function isTranslatable(field, lang) {\n const fieldAsString = fieldToString(field);\n\n // We should probably allow an optional $8 as the first subfield.\n if (!fieldAsString.match(/^... #7 (?: \u20218 [^\u2021]+ )*\u2021a [^\u2021]+ \u20212 [^\u2021]+ \u20210 [^\u2021]+(?: \u20219 [A-Z]+<(?:KEEP|DROP)>)*$/u)) {\n return false;\n }\n const lex = mapTagToLex(field.tag);\n const lexLang = `${lex}/${lang}`;\n if (!fieldHasSubfield(field, '2', lexLang)) {\n return false;\n }\n return fieldHasValidSubfield0(field);\n }\n\n\n function getMisses(fieldList1, fieldList2) {\n return fieldList1.filter(f => !hasSubfield0Match(f, fieldList2));\n }\n\n function hasSubfield0Match(field, pairFields) {\n const subfield0 = field.subfields.find(sf => sf.code === '0');\n return pairFields.some(f => f.subfields.some(sf => sf.code === '0' && sf.value === subfield0.value));\n }\n\n\n\n\n\n /*\n function getValidIdentifiers(record, tag, lang) {\n const lex = mapTagToLex(tag);\n const subfield2Value = `${lex}/${lang}`;\n const candFields = record.get(tag).filter(f => f.subfields.some(sf => sf.code === '2' && sf.value === subfield2Value)); // TODO: filter\n return [];\n }\n */\n\n}\n\nexport function fieldHasValidSubfield0(field, defaultLex = undefined) {\n const lex = defaultLex || mapTagToLex(field.tag);\n return field.subfields.some(sf => isValidSubfield0(sf, lex));\n}\n\nexport function isValidSubfield0(subfield, lex = '???') {\n if (subfield.code !== '0') {\n return false;\n }\n // 2025-12-10: supports both http and https as well. Note that translation will copy the original $0 idenfifier even though it might be non-standard.\n // Note that currently 'http' is teh standard!!!\n if (['yso', 'yso/fin', 'yso/swe'].includes(lex) && subfield.value.match(/^https?:\\/\\/www\\.yso\\.fi\\/onto\\/yso\\/p[0-9]+$/u)) {\n return true;\n }\n if (['slm', 'slm/fin', 'slm/swe'].includes(lex) && subfield.value.match(/^https?:\\/\\/urn\\.fi\\/URN:NBN:fi:au:slm:s[0-9]+$/u)) {\n return true;\n }\n return false;\n}\n\n\nexport function buildUri(lex, id) {\n if (['yso', 'yso/fin', 'yso/swe'].includes(lex)) {\n //return `http%3A%2F%2Fwww.yso.fi%2Fonto%2Fyso%2Fp${id}`;\n return `http://www.yso.fi/onto/yso/p${id}`;\n }\n if (['slm', 'slm/fin', 'slm/swe'].includes(lex)) {\n return `http://urn.fi/URN:NBN:fi:au:slm:s${id}`;\n }\n return undefined;\n}\n\nfunction mapTagToLex(tag) {\n if (tag === '655') {\n return 'slm';\n }\n return 'yso';\n}\n\nexport async function getTermData(uri) {\n nvdebug(`getTermData(${uri})`);\n if (termCache[uri]) { // Don't think current implementation uses the cache any more.\n //console.log(`CACHED ${uri}`); // eslint-disable-line no-console\n return termCache[uri];\n }\n const tmp = await getTermDataFromFinto(uri);\n termCache[uri] = tmp;\n return tmp;\n}\n\nasync function getTermDataFromFinto(uri) {\n const headers = {'Accept': 'application/json'};\n const uri2 = swaggerQuery(uri);\n\n const response = await fetch(uri2, {method: 'GET', headers});\n if (!response.ok) {\n return undefined;\n }\n const json = await response.json();\n\n if (!json.graph) {\n return undefined;\n }\n const arr = json.graph;\n const [hit] = arr.filter(row => row.uri === uri);\n const subset = {\n prefLabel: processLabel(hit?.prefLabel || undefined),\n altLabel: processLabel(hit?.altLabel || undefined)\n };\n //console.log(`NEW JSON: ${JSON.stringify(hit)}`); // eslint-disable-line no-console\n\n return subset;\n\n function swaggerQuery(uri) {\n // This would work for only yso, not yso-paikat etc `https://api.finto.fi/rest/v1/yso/data?format=application%2Fjson&uri=${uri}`;\n return `https://api.finto.fi/rest/v1/data?uri=${uri}&format=application%2Fjson`; // This is simpler, but contains more irrelevant data\n }\n\n function processLabel(label) {\n if (typeof label === 'object') {\n if (Array.isArray(label)) {\n return label;\n }\n return [label];\n }\n return [];\n }\n}\n\nexport function getLexiconAndLanguage(field) {\n // Assumes that field has exactly one $2\n const subfield2 = field.subfields.find(sf => sf.code === '2');\n if (subfield2) {\n if (subfield2.value === 'slm/fin') {\n return {'lex': 'slm', 'lang': 'fin'};\n }\n if (subfield2.value === 'slm/swe') {\n return {'lex': 'slm', 'lang': 'swe'};\n }\n if (subfield2.value === 'yso/fin') {\n return {'lex': 'yso', 'lang': 'fin'};\n }\n if (subfield2.value === 'yso/swe') {\n return {'lex': 'yso', 'lang': 'swe'};\n }\n }\n return {};\n}\n\n\n\nexport function isLabel(labels, term, lang = undefined) {\n const twoLetterLanguageCode = lang && lang.length === 3 ? changeAbbr(lang) : lang;\n return labels.some(l => isMatch(l));\n\n function isMatch(label) {\n if (label.value !== term) {\n return false;\n }\n if (!twoLetterLanguageCode) { // If language code is not defined, any hit will do\n return true;\n }\n return label.lang === twoLetterLanguageCode;\n }\n}\n\n\n function changeAbbr(abbr) {\n if (changeAbbrHash[abbr]) {\n return changeAbbrHash[abbr];\n }\n return abbr;\n }\n\n\n function swapLanguageCodeBetweenLanguages(code) {\n if (swapLanguageCode[code]) {\n return swapLanguageCode[code];\n }\n return code;\n }"],
|
|
5
|
+
"mappings": "AAAA,OAAO,WAAW;AAClB,OAAO,uBAAuB;AAC9B,SAAQ,kBAAkB,eAAe,eAAc;AAGvD,MAAM,QAAQ,kBAAkB,0DAA0D;AAC1F,MAAM,cAAc,CAAC,OAAO,OAAO,OAAO,KAAK;AAE/C,MAAM,mBAAmB,EAAC,OAAO,OAAO,MAAM,MAAM,MAAM,MAAM,OAAO,MAAK;AAC5E,MAAM,iBAAiB,EAAC,MAAM,OAAO,OAAO,MAAM,MAAM,OAAO,OAAO,KAAI;AAE1E,MAAM,YAAY,CAAC;AAGnB,0BAA2B;AAGzB,SAAO;AAAA,IACL,aAAa;AAAA,IACb;AAAA,IAAU;AAAA,EACZ;AAEA,iBAAe,IAAI,QAAQ;AACzB,UAAM,YAAY,MAAM,UAAU,QAAQ,aAAa,CAAC,CAAC;AAEzD,cAAU,QAAQ,QAAM,QAAQ,kBAAkB,cAAc,EAAE,CAAC,KAAK,KAAK,CAAC;AAE9E,cAAU,QAAQ,QAAM,OAAO,YAAY,EAAE,CAAC;AAE9C,UAAM,qBAAqB,UAAU,IAAI,OAAK,cAAc,CAAC,CAAC;AAG9D,WAAO,EAAC,SAAS,CAAC,GAAG,KAAK,oBAAoB,OAAO,KAAI;AAAA,EAC3D;AAEA,iBAAe,SAAS,QAAQ;AAC9B,UAAM,YAAY,MAAM,UAAU,QAAQ,aAAa,CAAC,CAAC;AACzD,QAAI,UAAU,WAAW,GAAG;AAC1B,aAAO,EAAC,WAAW,CAAC,GAAG,SAAS,KAAI;AAAA,IACtC;AACA,UAAM,WAAW,UAAU,IAAI,OAAK,cAAc,CAAC,CAAC;AAEpD,WAAO,EAAC,WAAW,UAAU,SAAS,MAAK;AAAA,EAC7C;AAEA,iBAAe,UAAU,QAAQ,MAAM,aAAa;AAClD,UAAM,CAAC,SAAS,GAAG,aAAa,IAAI;AACpC,QAAI,CAAC,SAAS;AACZ,aAAO;AAAA,IACT;AACA,UAAM,gBAAgB,MAAM,oBAAoB,QAAQ,OAAO;AAE/D,UAAM,MAAM,MAAM,UAAU,QAAQ,eAAe,CAAC,GAAG,aAAa,GAAG,aAAa,CAAC;AACrF,WAAO;AAAA,EACT;AAEA,WAAS,mCAAmC,QAAQ,KAAK;AACvD,UAAM,cAAc,YAAY,GAAG;AACnC,QAAI,CAAC,aAAa;AAChB,aAAO,CAAC;AAAA,IACV;AACA,UAAM,SAAS,OAAO,IAAI,GAAG;AAC7B,UAAM,gBAAgB,OAAO,OAAO,OAAK,eAAe,GAAG,KAAK,CAAC;AACjE,UAAM,gBAAgB,OAAO,OAAO,OAAK,eAAe,GAAG,KAAK,CAAC;AACjE,UAAM,cAAc,UAAU,eAAe,aAAa;AAC1D,UAAM,cAAc,UAAU,eAAe,aAAa;AAG1D,WAAO,CAAC,GAAG,aAAa,GAAG,WAAW,EAAE,OAAO,OAAK,iBAAiB,CAAC,CAAC;AAEvE,aAAS,iBAAiB,OAAO;AAE/B,YAAM,UAAU,sBAAsB,KAAK;AAC3C,aAAO,gBAAgB,QAAQ;AAAA,IACjC;AAAA,EACF;AAEA,iBAAe,oBAAoB,QAAQ,KAAK;AAC9C,UAAM,iBAAiB,mCAAmC,QAAQ,GAAG;AAIrE,QAAI,aAAa,CAAC;AAClB,aAAS,IAAE,GAAG,IAAI,eAAe,QAAQ,KAAK,GAAG;AAC/C,iBAAW,CAAC,IAAI,MAAM,aAAa,eAAe,CAAC,CAAC;AAAA,IACtD;AAEA,UAAM,gBAAgB,eAAe,IAAI,CAAC,GAAG,MAAM,UAAU,GAAG,WAAW,CAAC,CAAC,CAAC;AAC9E,WAAO,cAAc,OAAO,OAAK,CAAC;AAAA,EACpC;AAEA,WAAS,UAAU,OAAO,YAAY;AACpC,QAAI,CAAC,YAAY;AACf,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,sBAAsB,KAAK;AAE9C,UAAM,qBAAqB,iCAAiC,WAAW,WAAW,IAAI,CAAC;AACvF,UAAM,YAAY,WAAW,KAAK,OAAK,EAAE,SAAS,kBAAkB;AAEpE,UAAM,MAAM,EAAC,QAAQ,KAAK,SAAS,UAAU,MAAK;AAClD,UAAM,MAAM,MAAM,MAAM,UAAU,KAAK,QAAM,GAAG,SAAS,GAAG,CAAC;AAC7D,UAAM,MAAM,EAAC,QAAQ,KAAK,SAAS,GAAG,WAAW,GAAG,IAAI,WAAW,SAAS,QAAQ,QAAQ,KAAK,GAAE;AACnG,UAAM,WAAW,EAAC,KAAK,MAAM,KAAK,MAAM,MAAM,MAAM,MAAM,MAAM,MAAM,WAAW,CAAC,KAAK,KAAK,GAAG,EAAC;AAChG,WAAO;AAAA,EACT;AAIA,iBAAe,aAAa,OAAO;AAEjC,UAAM,MAAM,WAAW,KAAK;AAC5B,QAAI,CAAC,KAAK;AACR,aAAO;AAAA,IACT;AAEA,UAAM,OAAO,MAAM,YAAY,GAAG;AAElC,QAAI,CAAC,MAAM;AACT,cAAQ,uBAAuB,GAAG,IAAI,KAAK;AAC3C,aAAO;AAAA,IACT;AAEA,UAAM,aAAa,KAAK;AAExB,UAAM,UAAU,sBAAsB,KAAK;AAC3C,UAAM,OAAO,WAAW,QAAQ,IAAI;AACpC,UAAM,YAAY,MAAM,UAAU,KAAK,QAAM,GAAG,SAAS,GAAG;AAE5D,QAAI,QAAQ,YAAY,UAAU,OAAO,IAAI,GAAG;AAC9C,aAAO;AAAA,IACT;AACA,WAAO;AAAA,EACT;AAGA,WAAS,WAAW,OAAO;AACzB,UAAM,MAAM,YAAY,MAAM,GAAG;AAEjC,UAAM,YAAY,MAAM,UAAU,KAAK,QAAM,GAAG,SAAS,GAAG;AAC5D,UAAM,KAAK,UAAU,MAAM,QAAQ,aAAa,EAAE;AAClD,WAAO,SAAS,KAAK,EAAE;AAAA,EACzB;AAEA,WAAS,eAAe,OAAO,MAAM;AACnC,UAAM,gBAAgB,cAAc,KAAK;AAGzC,QAAI,CAAC,cAAc,MAAM,mFAAmF,GAAG;AAC7G,aAAO;AAAA,IACT;AACA,UAAM,MAAM,YAAY,MAAM,GAAG;AACjC,UAAM,UAAU,GAAG,GAAG,IAAI,IAAI;AAC9B,QAAI,CAAC,iBAAiB,OAAO,KAAK,OAAO,GAAG;AAC1C,aAAO;AAAA,IACT;AACA,WAAO,uBAAuB,KAAK;AAAA,EACrC;AAGA,WAAS,UAAU,YAAY,YAAY;AACzC,WAAO,WAAW,OAAO,OAAK,CAAC,kBAAkB,GAAG,UAAU,CAAC;AAAA,EACjE;AAEA,WAAS,kBAAkB,OAAO,YAAY;AAC5C,UAAM,YAAY,MAAM,UAAU,KAAK,QAAM,GAAG,SAAS,GAAG;AAC5D,WAAO,WAAW,KAAK,OAAK,EAAE,UAAU,KAAK,QAAM,GAAG,SAAS,OAAO,GAAG,UAAU,UAAU,KAAK,CAAC;AAAA,EACrG;AAeF;AAEO,gBAAS,uBAAuB,OAAO,aAAa,QAAW;AACpE,QAAM,MAAM,cAAc,YAAY,MAAM,GAAG;AAC/C,SAAO,MAAM,UAAU,KAAK,QAAM,iBAAiB,IAAI,GAAG,CAAC;AAC7D;AAEO,gBAAS,iBAAiB,UAAU,MAAM,OAAO;AACtD,MAAI,SAAS,SAAS,KAAK;AACzB,WAAO;AAAA,EACT;AAGA,MAAI,CAAC,OAAO,WAAW,SAAS,EAAE,SAAS,GAAG,KAAK,SAAS,MAAM,MAAM,gDAAgD,GAAG;AACzH,WAAO;AAAA,EACT;AACA,MAAI,CAAC,OAAO,WAAW,SAAS,EAAE,SAAS,GAAG,KAAK,SAAS,MAAM,MAAM,kDAAkD,GAAG;AAC3H,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAGO,gBAAS,SAAS,KAAK,IAAI;AAChC,MAAI,CAAC,OAAO,WAAW,SAAS,EAAE,SAAS,GAAG,GAAG;AAE/C,WAAO,+BAA+B,EAAE;AAAA,EAC1C;AACA,MAAI,CAAC,OAAO,WAAW,SAAS,EAAE,SAAS,GAAG,GAAG;AAC/C,WAAO,oCAAoC,EAAE;AAAA,EAC/C;AACA,SAAO;AACT;AAEA,SAAS,YAAY,KAAK;AACxB,MAAI,QAAQ,OAAO;AACjB,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEA,sBAAsB,YAAY,KAAK;AACrC,UAAQ,eAAe,GAAG,GAAG;AAC7B,MAAI,UAAU,GAAG,GAAG;AAElB,WAAO,UAAU,GAAG;AAAA,EACtB;AACA,QAAM,MAAM,MAAM,qBAAqB,GAAG;AAC1C,YAAU,GAAG,IAAI;AACjB,SAAO;AACT;AAEA,eAAe,qBAAqB,KAAK;AACrC,QAAM,UAAU,EAAC,UAAU,mBAAkB;AAC7C,QAAM,OAAO,aAAa,GAAG;AAE7B,QAAM,WAAW,MAAM,MAAM,MAAM,EAAC,QAAQ,OAAO,QAAO,CAAC;AAC3D,MAAI,CAAC,SAAS,IAAI;AAChB,WAAO;AAAA,EACT;AACA,QAAM,OAAO,MAAM,SAAS,KAAK;AAEjC,MAAI,CAAC,KAAK,OAAO;AACf,WAAO;AAAA,EACT;AACA,QAAM,MAAM,KAAK;AACjB,QAAM,CAAC,GAAG,IAAI,IAAI,OAAO,SAAO,IAAI,QAAQ,GAAG;AAC/C,QAAM,SAAS;AAAA,IACb,WAAW,aAAa,KAAK,aAAa,MAAS;AAAA,IACnD,UAAU,aAAa,KAAK,YAAY,MAAS;AAAA,EACnD;AAGA,SAAO;AAEP,WAAS,aAAaA,MAAK;AAEzB,WAAO,yCAAyCA,IAAG;AAAA,EACrD;AAEA,WAAS,aAAa,OAAO;AAC3B,QAAI,OAAO,UAAU,UAAU;AAC7B,UAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,eAAO;AAAA,MACT;AACA,aAAO,CAAC,KAAK;AAAA,IACf;AACA,WAAO,CAAC;AAAA,EACV;AACJ;AAEO,gBAAS,sBAAsB,OAAO;AAE3C,QAAM,YAAY,MAAM,UAAU,KAAK,QAAM,GAAG,SAAS,GAAG;AAC5D,MAAI,WAAW;AACb,QAAI,UAAU,UAAU,WAAW;AACjC,aAAO,EAAC,OAAO,OAAO,QAAQ,MAAK;AAAA,IACrC;AACA,QAAI,UAAU,UAAU,WAAW;AACjC,aAAO,EAAC,OAAO,OAAO,QAAQ,MAAK;AAAA,IACrC;AACA,QAAI,UAAU,UAAU,WAAW;AACjC,aAAO,EAAC,OAAO,OAAO,QAAQ,MAAK;AAAA,IACrC;AACA,QAAI,UAAU,UAAU,WAAW;AACjC,aAAO,EAAC,OAAO,OAAO,QAAQ,MAAK;AAAA,IACrC;AAAA,EACF;AACA,SAAO,CAAC;AACV;AAIO,gBAAS,QAAQ,QAAQ,MAAM,OAAO,QAAW;AACtD,QAAM,wBAAwB,QAAQ,KAAK,WAAW,IAAI,WAAW,IAAI,IAAI;AAC7E,SAAO,OAAO,KAAK,OAAK,QAAQ,CAAC,CAAC;AAElC,WAAS,QAAQ,OAAO;AACtB,QAAI,MAAM,UAAU,MAAM;AACxB,aAAO;AAAA,IACT;AACA,QAAI,CAAC,uBAAuB;AAC1B,aAAO;AAAA,IACT;AACA,WAAO,MAAM,SAAS;AAAA,EACxB;AACF;AAGE,SAAS,WAAW,MAAM;AACxB,MAAI,eAAe,IAAI,GAAG;AACxB,WAAO,eAAe,IAAI;AAAA,EAC5B;AACA,SAAO;AACT;AAGA,SAAS,iCAAiC,MAAM;AAC9C,MAAI,iBAAiB,IAAI,GAAG;AAC1B,WAAO,iBAAiB,IAAI;AAAA,EAC9B;AACA,SAAO;AACT;",
|
|
6
|
+
"names": ["uri"]
|
|
7
7
|
}
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
|
-
import createDebugLogger from "debug";
|
|
3
2
|
import fetchMock from "fetch-mock";
|
|
4
3
|
import validatorFactory from "./translate-terms.js";
|
|
5
4
|
import { MarcRecord } from "@natlibfi/marc-record";
|
|
@@ -13,6 +12,7 @@ const uris = [
|
|
|
13
12
|
"http://www.yso.fi/onto/yso/p6196061969",
|
|
14
13
|
"http://urn.fi/URN:NBN:fi:au:slm:s161"
|
|
15
14
|
];
|
|
15
|
+
const useMock = false;
|
|
16
16
|
generateTests({
|
|
17
17
|
callback,
|
|
18
18
|
path: [import.meta.dirname, "..", "test-fixtures", "translate-terms"],
|
|
@@ -23,23 +23,20 @@ generateTests({
|
|
|
23
23
|
},
|
|
24
24
|
hooks: {
|
|
25
25
|
before: async () => {
|
|
26
|
-
|
|
26
|
+
if (useMock) {
|
|
27
|
+
fetchMock.mockGlobal().get(`https://api.finto.fi/rest/v1/data?uri=${uris[0]}&format=application%2Fjson`, { status: 200, headers: {}, body: fakeData }).get(`https://api.finto.fi/rest/v1/data?uri=${uris[1]}&format=application%2Fjson`, { status: 200, headers: {}, body: fakeData }).get(`https://api.finto.fi/rest/v1/data?uri=${uris[2]}&format=application%2Fjson`, { status: 200, headers: {}, body: fakeData }).get(`https://api.finto.fi/rest/v1/data?uri=${uris[3]}&format=application%2Fjson`, { status: 200, headers: {}, body: fakeData }).get(`https://api.finto.fi/rest/v1/data?uri=${uris[4]}&format=application%2Fjson`, { status: 200, headers: {}, body: fakeData });
|
|
28
|
+
}
|
|
27
29
|
testValidatorFactory();
|
|
28
30
|
}
|
|
29
31
|
}
|
|
30
32
|
});
|
|
31
|
-
const debug = createDebugLogger("@natlibfi/marc-record-validators-melinda/translate-terms:test");
|
|
32
33
|
async function testValidatorFactory() {
|
|
33
34
|
const validator = await validatorFactory();
|
|
34
35
|
assert.equal(typeof validator, "object");
|
|
35
36
|
assert.equal(typeof validator.description, "string");
|
|
36
37
|
assert.equal(typeof validator.validate, "function");
|
|
37
38
|
}
|
|
38
|
-
async function callback({ getFixture,
|
|
39
|
-
if (enabled === false) {
|
|
40
|
-
debug("TEST SKIPPED!");
|
|
41
|
-
return;
|
|
42
|
-
}
|
|
39
|
+
async function callback({ getFixture, fix = false }) {
|
|
43
40
|
const validator = await validatorFactory();
|
|
44
41
|
const record = new MarcRecord(getFixture("record.json"));
|
|
45
42
|
const expectedResult = getFixture("expectedResult.json");
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/translate-terms.test.js"],
|
|
4
|
-
"sourcesContent": ["import assert from 'node:assert';\
|
|
5
|
-
"mappings": "AAAA,OAAO,YAAY;
|
|
4
|
+
"sourcesContent": ["import assert from 'node:assert';\n//import createDebugLogger from 'debug';\nimport fetchMock from 'fetch-mock';\n\nimport validatorFactory from './translate-terms.js';\n\nimport {MarcRecord} from '@natlibfi/marc-record';\nimport {READERS} from '@natlibfi/fixura';\nimport generateTests from '@natlibfi/fixugen';\nimport {fakeData} from '../test-fixtures/translate-terms-data.js';\n\nconst uris = [\n 'http://www.yso.fi/onto/yso/p13299',\n 'http://www.yso.fi/onto/yso/p111739',\n 'http://www.yso.fi/onto/yso/p6197061979',\n 'http://www.yso.fi/onto/yso/p6196061969',\n 'http://urn.fi/URN:NBN:fi:au:slm:s161'\n];\n\nconst useMock = false;\n\ngenerateTests({\n callback,\n path: [import.meta.dirname, '..', 'test-fixtures', 'translate-terms'],\n useMetadataFile: true,\n recurse: false,\n fixura: {\n reader: READERS.JSON\n },\n hooks: {\n before: async () => {\n\n if (useMock){ \n fetchMock.mockGlobal()\n .get(`https://api.finto.fi/rest/v1/data?uri=${uris[0]}&format=application%2Fjson`, {status: 200, headers: {}, body: fakeData})\n .get(`https://api.finto.fi/rest/v1/data?uri=${uris[1]}&format=application%2Fjson`, {status: 200, headers: {}, body: fakeData})\n .get(`https://api.finto.fi/rest/v1/data?uri=${uris[2]}&format=application%2Fjson`, {status: 200, headers: {}, body: fakeData})\n .get(`https://api.finto.fi/rest/v1/data?uri=${uris[3]}&format=application%2Fjson`, {status: 200, headers: {}, body: fakeData})\n .get(`https://api.finto.fi/rest/v1/data?uri=${uris[4]}&format=application%2Fjson`, {status: 200, headers: {}, body: fakeData});\n }\n\n testValidatorFactory();\n }\n }\n});\n\n//const debug = createDebugLogger('@natlibfi/marc-record-validators-melinda/translate-terms:test');\n\nasync function testValidatorFactory() {\n const validator = await validatorFactory();\n\n assert.equal(typeof validator, 'object');\n assert.equal(typeof validator.description, 'string');\n assert.equal(typeof validator.validate, 'function');\n}\n\nasync function callback({getFixture, fix = false}) {\n const validator = await validatorFactory();\n const record = new MarcRecord(getFixture('record.json'));\n const expectedResult = getFixture('expectedResult.json');\n // console.log(expectedResult); // eslint-disable-line\n\n if (!fix) {\n const result = await validator.validate(record);\n assert.deepEqual(result, expectedResult);\n return;\n }\n\n await validator.fix(record);\n assert.deepEqual(record, expectedResult);\n}\n"],
|
|
5
|
+
"mappings": "AAAA,OAAO,YAAY;AAEnB,OAAO,eAAe;AAEtB,OAAO,sBAAsB;AAE7B,SAAQ,kBAAiB;AACzB,SAAQ,eAAc;AACtB,OAAO,mBAAmB;AAC1B,SAAQ,gBAAe;AAEvB,MAAM,OAAO;AAAA,EACX;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,MAAM,UAAU;AAEhB,cAAc;AAAA,EACZ;AAAA,EACA,MAAM,CAAC,YAAY,SAAS,MAAM,iBAAiB,iBAAiB;AAAA,EACpE,iBAAiB;AAAA,EACjB,SAAS;AAAA,EACT,QAAQ;AAAA,IACN,QAAQ,QAAQ;AAAA,EAClB;AAAA,EACA,OAAO;AAAA,IACL,QAAQ,YAAY;AAElB,UAAI,SAAQ;AACV,kBAAU,WAAW,EACpB,IAAI,yCAAyC,KAAK,CAAC,CAAC,8BAA8B,EAAC,QAAQ,KAAK,SAAS,CAAC,GAAG,MAAM,SAAQ,CAAC,EAC5H,IAAI,yCAAyC,KAAK,CAAC,CAAC,8BAA8B,EAAC,QAAQ,KAAK,SAAS,CAAC,GAAG,MAAM,SAAQ,CAAC,EAC5H,IAAI,yCAAyC,KAAK,CAAC,CAAC,8BAA8B,EAAC,QAAQ,KAAK,SAAS,CAAC,GAAG,MAAM,SAAQ,CAAC,EAC5H,IAAI,yCAAyC,KAAK,CAAC,CAAC,8BAA8B,EAAC,QAAQ,KAAK,SAAS,CAAC,GAAG,MAAM,SAAQ,CAAC,EAC5H,IAAI,yCAAyC,KAAK,CAAC,CAAC,8BAA8B,EAAC,QAAQ,KAAK,SAAS,CAAC,GAAG,MAAM,SAAQ,CAAC;AAAA,MAC/H;AAEA,2BAAqB;AAAA,IACvB;AAAA,EACF;AACF,CAAC;AAID,eAAe,uBAAuB;AACpC,QAAM,YAAY,MAAM,iBAAiB;AAEzC,SAAO,MAAM,OAAO,WAAW,QAAQ;AACvC,SAAO,MAAM,OAAO,UAAU,aAAa,QAAQ;AACnD,SAAO,MAAM,OAAO,UAAU,UAAU,UAAU;AACpD;AAEA,eAAe,SAAS,EAAC,YAAY,MAAM,MAAK,GAAG;AACjD,QAAM,YAAY,MAAM,iBAAiB;AACzC,QAAM,SAAS,IAAI,WAAW,WAAW,aAAa,CAAC;AACvD,QAAM,iBAAiB,WAAW,qBAAqB;AAGvD,MAAI,CAAC,KAAK;AACR,UAAM,SAAS,MAAM,UAAU,SAAS,MAAM;AAC9C,WAAO,UAAU,QAAQ,cAAc;AACvC;AAAA,EACF;AAEA,QAAM,UAAU,IAAI,MAAM;AAC1B,SAAO,UAAU,QAAQ,cAAc;AACzC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
2
|
import { MarcRecord } from "@natlibfi/marc-record";
|
|
3
|
-
import validatorFactory from "
|
|
3
|
+
import validatorFactory from "./unicode-decomposition.js";
|
|
4
4
|
import { describe, it } from "node:test";
|
|
5
5
|
describe("unicode-decomposition", () => {
|
|
6
6
|
it("Creates a validator", async () => {
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/unicode-decomposition.test.js"],
|
|
4
|
-
"sourcesContent": ["import assert from 'node:assert';\nimport {MarcRecord} from '@natlibfi/marc-record';\nimport validatorFactory from '
|
|
4
|
+
"sourcesContent": ["import assert from 'node:assert';\nimport {MarcRecord} from '@natlibfi/marc-record';\nimport validatorFactory from './unicode-decomposition.js';\nimport {describe, it} from 'node:test';\n\ndescribe('unicode-decomposition', () => {\n it('Creates a validator', async () => {\n const validator = await validatorFactory();\n\n assert.equal(typeof validator, 'object');\n assert.equal(typeof validator.description, 'string');\n assert.equal(typeof validator.validate, 'function');\n });\n\n describe('#validate', () => {\n it('Finds the record valid', async () => {\n const validator = await validatorFactory();\n const record = new MarcRecord({\n fields: [\n {\n tag: '245',\n subfields: [\n {\n code: 'a',\n value: 'F\u00F6\u00F6, B\u00E4r'\n }\n ]\n }\n ]\n });\n const result = await validator.validate(record);\n assert.deepEqual(result, {valid: true, messages: []});\n });\n\n it('Finds the record invalid', async () => {\n const validator = await validatorFactory();\n const record = new MarcRecord({\n fields: [\n {\n tag: '001',\n ind1: ' ',\n ind2: '0',\n subfields: [\n {\n code: 'a',\n value: 'F\u00F6o\u0308, Ba\u0308r'\n }\n ]\n }\n ]\n });\n const result = await validator.validate(record);\n\n assert.deepEqual(result, {valid: false, messages: ['The following subfields are not properly decomposed: a']});\n });\n\n describe('#fix', () => {\n it('Should fix the record', async () => {\n const validator = await validatorFactory();\n\n const record = new MarcRecord({\n fields: [\n {\n tag: '245',\n subfields: [\n {\n code: 'a',\n value: 'F\u00F6o\u0308, Ba\u0308r'\n },\n {\n code: 'b',\n value: '== Fubar'\n }\n ]\n }\n ]\n });\n\n const recordOriginal = record.toObject();\n const fieldModified = {\n tag: '245',\n ind1: ' ',\n ind2: ' ',\n subfields: [\n {\n code: 'a',\n value: 'F\u00F6\u00F6, B\u00E4r'\n },\n {\n code: 'b',\n value: '== Fubar'\n }\n ]\n };\n await validator.fix(record);\n\n assert.notDeepEqual(recordOriginal, record);\n assert.deepEqual(record.fields, [fieldModified]);\n });\n });\n });\n});\n"],
|
|
5
5
|
"mappings": "AAAA,OAAO,YAAY;AACnB,SAAQ,kBAAiB;AACzB,OAAO,sBAAsB;AAC7B,SAAQ,UAAU,UAAS;AAE3B,SAAS,yBAAyB,MAAM;AACtC,KAAG,uBAAuB,YAAY;AACpC,UAAM,YAAY,MAAM,iBAAiB;AAEzC,WAAO,MAAM,OAAO,WAAW,QAAQ;AACvC,WAAO,MAAM,OAAO,UAAU,aAAa,QAAQ;AACnD,WAAO,MAAM,OAAO,UAAU,UAAU,UAAU;AAAA,EACpD,CAAC;AAED,WAAS,aAAa,MAAM;AAC1B,OAAG,0BAA0B,YAAY;AACvC,YAAM,YAAY,MAAM,iBAAiB;AACzC,YAAM,SAAS,IAAI,WAAW;AAAA,QAC5B,QAAQ;AAAA,UACN;AAAA,YACE,KAAK;AAAA,YACL,WAAW;AAAA,cACT;AAAA,gBACE,MAAM;AAAA,gBACN,OAAO;AAAA,cACT;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AACD,YAAM,SAAS,MAAM,UAAU,SAAS,MAAM;AAC9C,aAAO,UAAU,QAAQ,EAAC,OAAO,MAAM,UAAU,CAAC,EAAC,CAAC;AAAA,IACtD,CAAC;AAED,OAAG,4BAA4B,YAAY;AACzC,YAAM,YAAY,MAAM,iBAAiB;AACzC,YAAM,SAAS,IAAI,WAAW;AAAA,QAC5B,QAAQ;AAAA,UACN;AAAA,YACE,KAAK;AAAA,YACL,MAAM;AAAA,YACN,MAAM;AAAA,YACN,WAAW;AAAA,cACT;AAAA,gBACE,MAAM;AAAA,gBACN,OAAO;AAAA,cACT;AAAA,YACF;AAAA,UACF;AAAA,QACF;AAAA,MACF,CAAC;AACD,YAAM,SAAS,MAAM,UAAU,SAAS,MAAM;AAE9C,aAAO,UAAU,QAAQ,EAAC,OAAO,OAAO,UAAU,CAAC,wDAAwD,EAAC,CAAC;AAAA,IAC/G,CAAC;AAED,aAAS,QAAQ,MAAM;AACrB,SAAG,yBAAyB,YAAY;AACtC,cAAM,YAAY,MAAM,iBAAiB;AAEzC,cAAM,SAAS,IAAI,WAAW;AAAA,UAC5B,QAAQ;AAAA,YACN;AAAA,cACE,KAAK;AAAA,cACL,WAAW;AAAA,gBACT;AAAA,kBACE,MAAM;AAAA,kBACN,OAAO;AAAA,gBACT;AAAA,gBACA;AAAA,kBACE,MAAM;AAAA,kBACN,OAAO;AAAA,gBACT;AAAA,cACF;AAAA,YACF;AAAA,UACF;AAAA,QACF,CAAC;AAED,cAAM,iBAAiB,OAAO,SAAS;AACvC,cAAM,gBAAgB;AAAA,UACpB,KAAK;AAAA,UACL,MAAM;AAAA,UACN,MAAM;AAAA,UACN,WAAW;AAAA,YACT;AAAA,cACE,MAAM;AAAA,cACN,OAAO;AAAA,YACT;AAAA,YACA;AAAA,cACE,MAAM;AAAA,cACN,OAAO;AAAA,YACT;AAAA,UACF;AAAA,QACF;AACA,cAAM,UAAU,IAAI,MAAM;AAE1B,eAAO,aAAa,gBAAgB,MAAM;AAC1C,eAAO,UAAU,OAAO,QAAQ,CAAC,aAAa,CAAC;AAAA,MACjD,CAAC;AAAA,IACH,CAAC;AAAA,EACH,CAAC;AACH,CAAC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
2
|
import { MarcRecord } from "@natlibfi/marc-record";
|
|
3
|
-
import validatorFactory from "
|
|
3
|
+
import validatorFactory from "./update-field-540.js";
|
|
4
4
|
import { READERS } from "@natlibfi/fixura";
|
|
5
5
|
import generateTests from "@natlibfi/fixugen";
|
|
6
|
-
import createDebugLogger from "debug";
|
|
7
6
|
generateTests({
|
|
8
7
|
callback,
|
|
9
8
|
path: [import.meta.dirname, "..", "test-fixtures", "update-field-540"],
|
|
@@ -18,18 +17,13 @@ generateTests({
|
|
|
18
17
|
}
|
|
19
18
|
}
|
|
20
19
|
});
|
|
21
|
-
const debug = createDebugLogger("@natlibfi/marc-record-validators-melinda/update-field-540:test");
|
|
22
20
|
async function testValidatorFactory() {
|
|
23
21
|
const validator = await validatorFactory();
|
|
24
22
|
assert(validator).to.be.an("object").that.has.any.keys("description", "validate");
|
|
25
23
|
assert(validator.description).to.be.a("string");
|
|
26
24
|
assert(validator.validate).to.be.a("function");
|
|
27
25
|
}
|
|
28
|
-
async function callback({ getFixture,
|
|
29
|
-
if (enabled === false) {
|
|
30
|
-
debug("TEST SKIPPED!");
|
|
31
|
-
return;
|
|
32
|
-
}
|
|
26
|
+
async function callback({ getFixture, fix = false }) {
|
|
33
27
|
const validator = await validatorFactory();
|
|
34
28
|
const record = new MarcRecord(getFixture("record.json"));
|
|
35
29
|
const expectedResult = getFixture("expectedResult.json");
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/update-field-540.test.js"],
|
|
4
|
-
"sourcesContent": ["import assert from 'node:assert';\nimport {MarcRecord} from '@natlibfi/marc-record';\nimport validatorFactory from '
|
|
5
|
-
"mappings": "AAAA,OAAO,YAAY;AACnB,SAAQ,kBAAiB;AACzB,OAAO,sBAAsB;AAC7B,SAAQ,eAAc;AACtB,OAAO,mBAAmB;
|
|
4
|
+
"sourcesContent": ["import assert from 'node:assert';\nimport {MarcRecord} from '@natlibfi/marc-record';\nimport validatorFactory from './update-field-540.js';\nimport {READERS} from '@natlibfi/fixura';\nimport generateTests from '@natlibfi/fixugen';\n//import createDebugLogger from 'debug';\n\ngenerateTests({\n callback,\n path: [import.meta.dirname, '..', 'test-fixtures', 'update-field-540'],\n useMetadataFile: true,\n recurse: false,\n fixura: {\n reader: READERS.JSON\n },\n hooks: {\n before: async () => {\n testValidatorFactory();\n }\n }\n});\n\n//const debug = createDebugLogger('@natlibfi/marc-record-validators-melinda/update-field-540:test');\n\nasync function testValidatorFactory() {\n const validator = await validatorFactory();\n\n assert(validator)\n .to.be.an('object')\n .that.has.any.keys('description', 'validate');\n\n assert(validator.description).to.be.a('string');\n assert(validator.validate).to.be.a('function');\n}\n\nasync function callback({getFixture, fix = false}) {\n const validator = await validatorFactory();\n const record = new MarcRecord(getFixture('record.json'));\n const expectedResult = getFixture('expectedResult.json');\n // console.log(expectedResult); // eslint-disable-line\n\n if (!fix) {\n const result = await validator.validate(record);\n assert.deepEqual(result, expectedResult);\n return;\n }\n\n await validator.fix(record);\n assert.deepEqual(record, expectedResult);\n}\n"],
|
|
5
|
+
"mappings": "AAAA,OAAO,YAAY;AACnB,SAAQ,kBAAiB;AACzB,OAAO,sBAAsB;AAC7B,SAAQ,eAAc;AACtB,OAAO,mBAAmB;AAG1B,cAAc;AAAA,EACZ;AAAA,EACA,MAAM,CAAC,YAAY,SAAS,MAAM,iBAAiB,kBAAkB;AAAA,EACrE,iBAAiB;AAAA,EACjB,SAAS;AAAA,EACT,QAAQ;AAAA,IACN,QAAQ,QAAQ;AAAA,EAClB;AAAA,EACA,OAAO;AAAA,IACL,QAAQ,YAAY;AAClB,2BAAqB;AAAA,IACvB;AAAA,EACF;AACF,CAAC;AAID,eAAe,uBAAuB;AACpC,QAAM,YAAY,MAAM,iBAAiB;AAEzC,SAAO,SAAS,EACb,GAAG,GAAG,GAAG,QAAQ,EACjB,KAAK,IAAI,IAAI,KAAK,eAAe,UAAU;AAE9C,SAAO,UAAU,WAAW,EAAE,GAAG,GAAG,EAAE,QAAQ;AAC9C,SAAO,UAAU,QAAQ,EAAE,GAAG,GAAG,EAAE,UAAU;AAC/C;AAEA,eAAe,SAAS,EAAC,YAAY,MAAM,MAAK,GAAG;AACjD,QAAM,YAAY,MAAM,iBAAiB;AACzC,QAAM,SAAS,IAAI,WAAW,WAAW,aAAa,CAAC;AACvD,QAAM,iBAAiB,WAAW,qBAAqB;AAGvD,MAAI,CAAC,KAAK;AACR,UAAM,SAAS,MAAM,UAAU,SAAS,MAAM;AAC9C,WAAO,UAAU,QAAQ,cAAc;AACvC;AAAA,EACF;AAEA,QAAM,UAAU,IAAI,MAAM;AAC1B,SAAO,UAAU,QAAQ,cAAc;AACzC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|
package/dist/urn.test.js
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
import assert from "node:assert";
|
|
2
2
|
import { MarcRecord } from "@natlibfi/marc-record";
|
|
3
|
-
import validatorFactory from "
|
|
3
|
+
import validatorFactory from "./urn.js";
|
|
4
4
|
import { READERS } from "@natlibfi/fixura";
|
|
5
5
|
import generateTests from "@natlibfi/fixugen";
|
|
6
|
-
import createDebugLogger from "debug";
|
|
7
6
|
generateTests({
|
|
8
7
|
callback,
|
|
9
8
|
path: [import.meta.dirname, "..", "test-fixtures", "urn"],
|
|
@@ -18,18 +17,13 @@ generateTests({
|
|
|
18
17
|
}
|
|
19
18
|
}
|
|
20
19
|
});
|
|
21
|
-
const debug = createDebugLogger("@natlibfi/marc-record-validators-melinda/urn:test");
|
|
22
20
|
async function testValidatorFactory() {
|
|
23
21
|
const validator = await validatorFactory();
|
|
24
22
|
assert.equal(typeof validator, "object");
|
|
25
23
|
assert.equal(typeof validator.description, "string");
|
|
26
24
|
assert.equal(typeof validator.validate, "function");
|
|
27
25
|
}
|
|
28
|
-
async function callback({ getFixture,
|
|
29
|
-
if (enabled === false) {
|
|
30
|
-
debug("TEST SKIPPED!");
|
|
31
|
-
return;
|
|
32
|
-
}
|
|
26
|
+
async function callback({ getFixture, fix = true, isLegalDeposit = false }) {
|
|
33
27
|
const validator = await validatorFactory(isLegalDeposit);
|
|
34
28
|
const record = new MarcRecord(getFixture("input.json"));
|
|
35
29
|
const expectedResult = getFixture("result.json");
|
package/dist/urn.test.js.map
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"version": 3,
|
|
3
3
|
"sources": ["../src/urn.test.js"],
|
|
4
|
-
"sourcesContent": ["import assert from 'node:assert';\nimport {MarcRecord} from '@natlibfi/marc-record';\nimport validatorFactory from '
|
|
5
|
-
"mappings": "AAAA,OAAO,YAAY;AACnB,SAAQ,kBAAiB;AACzB,OAAO,sBAAsB;AAC7B,SAAQ,eAAc;AACtB,OAAO,mBAAmB;
|
|
4
|
+
"sourcesContent": ["import assert from 'node:assert';\nimport {MarcRecord} from '@natlibfi/marc-record';\nimport validatorFactory from './urn.js';\nimport {READERS} from '@natlibfi/fixura';\nimport generateTests from '@natlibfi/fixugen';\n//import createDebugLogger from 'debug';\n\ngenerateTests({\n callback,\n path: [import.meta.dirname, '..', 'test-fixtures', 'urn'],\n useMetadataFile: true,\n recurse: false,\n fixura: {\n reader: READERS.JSON\n },\n hooks: {\n before: async () => {\n testValidatorFactory();\n }\n }\n});\n//const debug = createDebugLogger('@natlibfi/marc-record-validators-melinda/urn:test');\n\nasync function testValidatorFactory() {\n const validator = await validatorFactory();\n\n assert.equal(typeof validator, 'object');\n assert.equal(typeof validator.description, 'string');\n assert.equal(typeof validator.validate, 'function');\n}\n\nasync function callback({getFixture, fix = true, isLegalDeposit = false}) {\n const validator = await validatorFactory(isLegalDeposit);\n const record = new MarcRecord(getFixture('input.json'));\n const expectedResult = getFixture('result.json');\n // console.log(expectedResult); // eslint-disable-line\n\n if (!fix) {\n const result = await validator.validate(record);\n assert.deepEqual(result, expectedResult);\n return;\n }\n\n await validator.fix(record);\n assert.deepEqual(record, expectedResult);\n}\n"],
|
|
5
|
+
"mappings": "AAAA,OAAO,YAAY;AACnB,SAAQ,kBAAiB;AACzB,OAAO,sBAAsB;AAC7B,SAAQ,eAAc;AACtB,OAAO,mBAAmB;AAG1B,cAAc;AAAA,EACZ;AAAA,EACA,MAAM,CAAC,YAAY,SAAS,MAAM,iBAAiB,KAAK;AAAA,EACxD,iBAAiB;AAAA,EACjB,SAAS;AAAA,EACT,QAAQ;AAAA,IACN,QAAQ,QAAQ;AAAA,EAClB;AAAA,EACA,OAAO;AAAA,IACL,QAAQ,YAAY;AAClB,2BAAqB;AAAA,IACvB;AAAA,EACF;AACF,CAAC;AAGD,eAAe,uBAAuB;AACpC,QAAM,YAAY,MAAM,iBAAiB;AAEzC,SAAO,MAAM,OAAO,WAAW,QAAQ;AACvC,SAAO,MAAM,OAAO,UAAU,aAAa,QAAQ;AACnD,SAAO,MAAM,OAAO,UAAU,UAAU,UAAU;AACpD;AAEA,eAAe,SAAS,EAAC,YAAY,MAAM,MAAM,iBAAiB,MAAK,GAAG;AACxE,QAAM,YAAY,MAAM,iBAAiB,cAAc;AACvD,QAAM,SAAS,IAAI,WAAW,WAAW,YAAY,CAAC;AACtD,QAAM,iBAAiB,WAAW,aAAa;AAG/C,MAAI,CAAC,KAAK;AACR,UAAM,SAAS,MAAM,UAAU,SAAS,MAAM;AAC9C,WAAO,UAAU,QAAQ,cAAc;AACvC;AAAA,EACF;AAEA,QAAM,UAAU,IAAI,MAAM;AAC1B,SAAO,UAAU,QAAQ,cAAc;AACzC;",
|
|
6
6
|
"names": []
|
|
7
7
|
}
|