yomitan-core 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +485 -0
- package/dist/anki-connect-BQyCGW3O.cjs +513 -0
- package/dist/anki-connect-BQyCGW3O.cjs.map +1 -0
- package/dist/anki-connect-CPPuhyiQ.js +6 -0
- package/dist/anki-connect-DbrQHphS.js +495 -0
- package/dist/anki-connect-DbrQHphS.js.map +1 -0
- package/dist/anki-connect-DcheJrp-.cjs +6 -0
- package/dist/anki.cjs +1758 -0
- package/dist/anki.cjs.map +1 -0
- package/dist/anki.d.cts +751 -0
- package/dist/anki.d.cts.map +1 -0
- package/dist/anki.d.ts +751 -0
- package/dist/anki.d.ts.map +1 -0
- package/dist/anki.js +1751 -0
- package/dist/anki.js.map +1 -0
- package/dist/audio-D9DvYyB7.d.cts +48 -0
- package/dist/audio-D9DvYyB7.d.cts.map +1 -0
- package/dist/audio-DQulUkDM.d.ts +48 -0
- package/dist/audio-DQulUkDM.d.ts.map +1 -0
- package/dist/audio-url-generator-BXvQaqUi.cjs +4 -0
- package/dist/audio-url-generator-Dy2hb2Mm.cjs +414 -0
- package/dist/audio-url-generator-Dy2hb2Mm.cjs.map +1 -0
- package/dist/audio-url-generator-Qi0rfzHz.js +4 -0
- package/dist/audio-url-generator-pFQAB5Nb.js +390 -0
- package/dist/audio-url-generator-pFQAB5Nb.js.map +1 -0
- package/dist/audio.cjs +7 -0
- package/dist/audio.d.cts +86 -0
- package/dist/audio.d.cts.map +1 -0
- package/dist/audio.d.ts +86 -0
- package/dist/audio.d.ts.map +1 -0
- package/dist/audio.js +4 -0
- package/dist/batch-processor-BR-gB3H3.js +84 -0
- package/dist/batch-processor-BR-gB3H3.js.map +1 -0
- package/dist/batch-processor-CSF1acTw.cjs +3 -0
- package/dist/batch-processor-DFqM_L-_.cjs +91 -0
- package/dist/batch-processor-DFqM_L-_.cjs.map +1 -0
- package/dist/batch-processor-Quo9jUyf.js +3 -0
- package/dist/chunk-BCwAaXi7.cjs +31 -0
- package/dist/cjk-util-Dp0ZU0sh.cjs +167 -0
- package/dist/cjk-util-Dp0ZU0sh.cjs.map +1 -0
- package/dist/cjk-util-DubXBGDG.js +94 -0
- package/dist/cjk-util-DubXBGDG.js.map +1 -0
- package/dist/core-BUpclilG.d.cts +102 -0
- package/dist/core-BUpclilG.d.cts.map +1 -0
- package/dist/core-DFUj5GtA.d.ts +102 -0
- package/dist/core-DFUj5GtA.d.ts.map +1 -0
- package/dist/database.cjs +7 -0
- package/dist/database.d.cts +4 -0
- package/dist/database.d.ts +4 -0
- package/dist/database.js +5 -0
- package/dist/dictionary-D7l-qFt1.d.cts +316 -0
- package/dist/dictionary-D7l-qFt1.d.cts.map +1 -0
- package/dist/dictionary-_vzfBLWi.d.ts +316 -0
- package/dist/dictionary-_vzfBLWi.d.ts.map +1 -0
- package/dist/dictionary-data-util-CHnRdYZ9.cjs +378 -0
- package/dist/dictionary-data-util-CHnRdYZ9.cjs.map +1 -0
- package/dist/dictionary-data-util-CfOLfEDE.js +323 -0
- package/dist/dictionary-data-util-CfOLfEDE.js.map +1 -0
- package/dist/dictionary-database-BDC2f9zc.d.ts +58 -0
- package/dist/dictionary-database-BDC2f9zc.d.ts.map +1 -0
- package/dist/dictionary-database-CU4TsvCC.js +393 -0
- package/dist/dictionary-database-CU4TsvCC.js.map +1 -0
- package/dist/dictionary-database-DsOi04Sg.d.cts +58 -0
- package/dist/dictionary-database-DsOi04Sg.d.cts.map +1 -0
- package/dist/dictionary-database-lvFvftnO.cjs +412 -0
- package/dist/dictionary-database-lvFvftnO.cjs.map +1 -0
- package/dist/dictionary-importer-BkQQSBhm.d.ts +237 -0
- package/dist/dictionary-importer-BkQQSBhm.d.ts.map +1 -0
- package/dist/dictionary-importer-Cen1z6co.js +1821 -0
- package/dist/dictionary-importer-Cen1z6co.js.map +1 -0
- package/dist/dictionary-importer-DYmmWmcX.cjs +8 -0
- package/dist/dictionary-importer-Da3AuTZw.d.cts +237 -0
- package/dist/dictionary-importer-Da3AuTZw.d.cts.map +1 -0
- package/dist/dictionary-importer-Dhn75iZ4.cjs +1834 -0
- package/dist/dictionary-importer-Dhn75iZ4.cjs.map +1 -0
- package/dist/dictionary-importer-xWkel0h-.js +8 -0
- package/dist/dictionary-update-checker-BNE4pGTx.js +4 -0
- package/dist/dictionary-update-checker-Byjvifd2.cjs +75 -0
- package/dist/dictionary-update-checker-Byjvifd2.cjs.map +1 -0
- package/dist/dictionary-update-checker-YdpalZ41.cjs +4 -0
- package/dist/dictionary-update-checker-kKukiovj.js +69 -0
- package/dist/dictionary-update-checker-kKukiovj.js.map +1 -0
- package/dist/display-generator-BGVWiI0t.js +746 -0
- package/dist/display-generator-BGVWiI0t.js.map +1 -0
- package/dist/display-generator-BMQmG5Ov.cjs +9 -0
- package/dist/display-generator-BxZ7mBjP.js +9 -0
- package/dist/display-generator-DyP-HNzP.cjs +758 -0
- package/dist/display-generator-DyP-HNzP.cjs.map +1 -0
- package/dist/errors-BSezaJwm.cjs +35 -0
- package/dist/errors-BSezaJwm.cjs.map +1 -0
- package/dist/errors-DuuDSO5N.js +22 -0
- package/dist/errors-DuuDSO5N.js.map +1 -0
- package/dist/frequency-ranking-BXjfhhUQ.js +3 -0
- package/dist/frequency-ranking-Cx1kkIrw.cjs +3 -0
- package/dist/frequency-ranking-DEJMTMdg.js +159 -0
- package/dist/frequency-ranking-DEJMTMdg.js.map +1 -0
- package/dist/frequency-ranking-DVYxTXN-.cjs +166 -0
- package/dist/frequency-ranking-DVYxTXN-.cjs.map +1 -0
- package/dist/furigana-5HK97CY8.js +4 -0
- package/dist/furigana-9bBI9-qe.d.ts +47 -0
- package/dist/furigana-9bBI9-qe.d.ts.map +1 -0
- package/dist/furigana-B3-0y231.js +471 -0
- package/dist/furigana-B3-0y231.js.map +1 -0
- package/dist/furigana-CjOhzvZt.d.cts +47 -0
- package/dist/furigana-CjOhzvZt.d.cts.map +1 -0
- package/dist/furigana-DpZLcues.cjs +609 -0
- package/dist/furigana-DpZLcues.cjs.map +1 -0
- package/dist/furigana-h3v2ub4-.cjs +4 -0
- package/dist/import.cjs +12 -0
- package/dist/import.d.cts +107 -0
- package/dist/import.d.cts.map +1 -0
- package/dist/import.d.ts +107 -0
- package/dist/import.d.ts.map +1 -0
- package/dist/import.js +9 -0
- package/dist/index.cjs +275 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +211 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.ts +211 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +238 -0
- package/dist/index.js.map +1 -0
- package/dist/json-DGd-cunA.js +17 -0
- package/dist/json-DGd-cunA.js.map +1 -0
- package/dist/json-DKWp-B7Y.cjs +30 -0
- package/dist/json-DKWp-B7Y.cjs.map +1 -0
- package/dist/language-KN_u-nTR.d.ts +104 -0
- package/dist/language-KN_u-nTR.d.ts.map +1 -0
- package/dist/language-xAbQxgXc.d.cts +104 -0
- package/dist/language-xAbQxgXc.d.cts.map +1 -0
- package/dist/language.cjs +15626 -0
- package/dist/language.cjs.map +1 -0
- package/dist/language.d.cts +959 -0
- package/dist/language.d.cts.map +1 -0
- package/dist/language.d.ts +959 -0
- package/dist/language.d.ts.map +1 -0
- package/dist/language.js +15522 -0
- package/dist/language.js.map +1 -0
- package/dist/log-D8KtR3aP.cjs +67 -0
- package/dist/log-D8KtR3aP.cjs.map +1 -0
- package/dist/log-hgSll-dS.js +60 -0
- package/dist/log-hgSll-dS.js.map +1 -0
- package/dist/lookup.cjs +13 -0
- package/dist/lookup.d.cts +161 -0
- package/dist/lookup.d.cts.map +1 -0
- package/dist/lookup.d.ts +161 -0
- package/dist/lookup.d.ts.map +1 -0
- package/dist/lookup.js +10 -0
- package/dist/media-loader-BABA_E4W.js +3 -0
- package/dist/media-loader-Ce9cuANS.cjs +21 -0
- package/dist/media-loader-Ce9cuANS.cjs.map +1 -0
- package/dist/media-loader-qRti-Q6h.js +14 -0
- package/dist/media-loader-qRti-Q6h.js.map +1 -0
- package/dist/media-loader-xlUGaJrx.cjs +3 -0
- package/dist/multi-language-transformer-AlxOM6b3.js +637 -0
- package/dist/multi-language-transformer-AlxOM6b3.js.map +1 -0
- package/dist/multi-language-transformer-MdbQBBOt.cjs +685 -0
- package/dist/multi-language-transformer-MdbQBBOt.cjs.map +1 -0
- package/dist/multi-language-transformer-SEhcJXEB.d.ts +63 -0
- package/dist/multi-language-transformer-SEhcJXEB.d.ts.map +1 -0
- package/dist/multi-language-transformer-Ul9mbRce.d.cts +63 -0
- package/dist/multi-language-transformer-Ul9mbRce.d.cts.map +1 -0
- package/dist/pronunciation-generator-BtBc4q_V.js +397 -0
- package/dist/pronunciation-generator-BtBc4q_V.js.map +1 -0
- package/dist/pronunciation-generator-CBYdXYou.js +4 -0
- package/dist/pronunciation-generator-CFbZlf5J.cjs +445 -0
- package/dist/pronunciation-generator-CFbZlf5J.cjs.map +1 -0
- package/dist/pronunciation-generator-DOz9hEuk.cjs +4 -0
- package/dist/render.cjs +2796 -0
- package/dist/render.cjs.map +1 -0
- package/dist/render.d.cts +424 -0
- package/dist/render.d.cts.map +1 -0
- package/dist/render.d.ts +424 -0
- package/dist/render.d.ts.map +1 -0
- package/dist/render.js +2777 -0
- package/dist/render.js.map +1 -0
- package/dist/sentence-parser-BPAJNzqW.js +126 -0
- package/dist/sentence-parser-BPAJNzqW.js.map +1 -0
- package/dist/sentence-parser-BVIOI64h.cjs +132 -0
- package/dist/sentence-parser-BVIOI64h.cjs.map +1 -0
- package/dist/sentence-parser-BoHO3cHn.js +5 -0
- package/dist/sentence-parser-DQVLSW0z.cjs +5 -0
- package/dist/structured-content-generator-BtOApkTW.cjs +4 -0
- package/dist/structured-content-generator-Bx62RYa8.js +4 -0
- package/dist/structured-content-generator-CLnybumI.js +276 -0
- package/dist/structured-content-generator-CLnybumI.js.map +1 -0
- package/dist/structured-content-generator-DrwkB0-k.cjs +282 -0
- package/dist/structured-content-generator-DrwkB0-k.cjs.map +1 -0
- package/dist/text-utilities-B7PIythe.js +8 -0
- package/dist/text-utilities-B7PIythe.js.map +1 -0
- package/dist/text-utilities-Del2Ivkg.cjs +15 -0
- package/dist/text-utilities-Del2Ivkg.cjs.map +1 -0
- package/dist/translator-CRPlPzqi.cjs +1545 -0
- package/dist/translator-CRPlPzqi.cjs.map +1 -0
- package/dist/translator-CWgG5drA.js +1539 -0
- package/dist/translator-CWgG5drA.js.map +1 -0
- package/dist/translator-CaGtJvnQ.cjs +6 -0
- package/dist/translator-Cc6OGxrW.d.ts +180 -0
- package/dist/translator-Cc6OGxrW.d.ts.map +1 -0
- package/dist/translator-CcA-s-W4.d.cts +180 -0
- package/dist/translator-CcA-s-W4.d.cts.map +1 -0
- package/dist/translator-CuJOTK6l.js +6 -0
- package/dist/utilities-C-lbZaJE.cjs +52 -0
- package/dist/utilities-C-lbZaJE.cjs.map +1 -0
- package/dist/utilities-bi3EF-q5.js +33 -0
- package/dist/utilities-bi3EF-q5.js.map +1 -0
- package/package.json +102 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dictionary-data-util-CHnRdYZ9.cjs","names":["dictionaryEntry: Dictionary.TermDictionaryEntry","headwordIndex: number","results: TagGroup[]","dictionaryInfo: DictionaryImporter.Summary[]","results: DictionaryFrequency<DDUTermFrequency>[]","frequencies: DDUTermFrequency[]","averageFrequencies: DDUTermFrequency[]","sourceFrequencies: Dictionary.KanjiFrequency[]","results: DictionaryFrequency<DDUKanjiFrequency>[]","frequencies: DDUKanjiFrequency[]","results: DictionaryGroupedPronunciations[]","pronunciations2: GroupedPronunciation[]","exclusiveReadings: string[]","pronunciations: Dictionary.Pronunciation[]","type: T","results: Dictionary.PronunciationGeneric<T>[]","termTags: { score: number }[]","headwords: Dictionary.TermHeadword[]","headwordIndices: number[]","allTermsSet: Set<string>","allReadingsSet: Set<string>","disambiguations: string[]","wordClasses: string[]","current: string","latest: string","reading: string","pronunciation: Dictionary.Pronunciation","groupedPronunciationList: GroupedPronunciationInternal[]","pronunciation2: Dictionary.Pronunciation","array1: T[]","array2: T[]","tagList1: Dictionary.Tag[]","tagList2: Dictionary.Tag[]","set1: Set<T>","set2: Set<T>","result: T[]","array: unknown[]"],"sources":["../src/util/dictionary-data-util.ts"],"sourcesContent":["import type * as Dictionary from '../types/dictionary';\nimport type {\n KanjiFrequency as DDUKanjiFrequency,\n TermFrequency as DDUTermFrequency,\n DictionaryFrequency,\n DictionaryGroupedPronunciations,\n FrequencyValue,\n GroupedPronunciation,\n GroupedPronunciationInternal,\n TagGroup,\n TermFrequencyType,\n} from '../types/dictionary-data-util';\nimport type * as DictionaryImporter from '../types/dictionary-importer';\n\n/**\n * Computes the harmonic mean of frequency values for a given headword in a dictionary entry.\n */\nexport function getFrequencyHarmonic(dictionaryEntry: Dictionary.TermDictionaryEntry, headwordIndex: number): number {\n const dominated = dictionaryEntry.frequencies\n .filter((f) => f.headwordIndex === headwordIndex)\n .map((f) => f.frequency);\n if (dominated.length === 0) {\n return 0;\n }\n let sum = 0;\n for (const freq of dominated) {\n if (freq <= 0) {\n continue;\n }\n sum += 1 / freq;\n }\n return sum > 0 ? Math.round(dominated.length / sum) : 0;\n}\n\nexport function groupTermTags(dictionaryEntry: Dictionary.TermDictionaryEntry): TagGroup[] {\n const { headwords } = dictionaryEntry;\n const headwordCount = headwords.length;\n const uniqueCheck = headwordCount > 1;\n const resultsIndexMap = new Map<string, number>();\n const results: TagGroup[] = [];\n for (let i = 0; i < headwordCount; ++i) {\n const { tags } = headwords[i];\n for (const tag of tags) {\n if (uniqueCheck) {\n const { name, category, content, dictionaries } = tag;\n const key = createMapKey([name, category, content, dictionaries]);\n const index = resultsIndexMap.get(key);\n if (typeof index !== 'undefined') {\n results[index].headwordIndices.push(i);\n continue;\n }\n resultsIndexMap.set(key, results.length);\n }\n results.push({ tag, headwordIndices: [i] });\n }\n }\n return results;\n}\n\nexport function groupTermFrequencies(\n dictionaryEntry: Dictionary.TermDictionaryEntry,\n dictionaryInfo: DictionaryImporter.Summary[],\n): DictionaryFrequency<DDUTermFrequency>[] {\n const { headwords, frequencies: sourceFrequencies } = dictionaryEntry;\n\n const map1 = new Map<\n string,\n Map<string, { term: string; reading: string | null; values: Map<string, FrequencyValue> }>\n >();\n const aliasMap = new Map<string, string>();\n for (const {\n headwordIndex,\n dictionary,\n dictionaryAlias,\n hasReading,\n frequency,\n displayValue,\n } of sourceFrequencies) {\n const { term, reading } = headwords[headwordIndex];\n\n let map2 = map1.get(dictionary);\n if (typeof map2 === 'undefined') {\n map2 = new Map();\n map1.set(dictionary, map2);\n aliasMap.set(dictionary, dictionaryAlias);\n }\n\n const readingKey = hasReading ? reading : null;\n const key = createMapKey([term, readingKey]);\n let frequencyData = map2.get(key);\n if (typeof frequencyData === 'undefined') {\n frequencyData = { term, reading: readingKey, values: new Map() };\n map2.set(key, frequencyData);\n }\n\n frequencyData.values.set(createMapKey([frequency, displayValue]), { frequency, displayValue });\n }\n\n const results: DictionaryFrequency<DDUTermFrequency>[] = [];\n\n for (const [dictionary, map2] of map1.entries()) {\n const frequencies: DDUTermFrequency[] = [];\n const dictionaryAlias = aliasMap.get(dictionary) ?? dictionary;\n for (const { term, reading, values } of map2.values()) {\n frequencies.push({ term, reading, values: [...values.values()] });\n }\n const currentDictionaryInfo = dictionaryInfo.find(({ title }) => title === dictionary);\n const freqCount = currentDictionaryInfo?.counts?.termMeta.freq ?? 0;\n results.push({ dictionary, frequencies, dictionaryAlias, freqCount });\n }\n\n const averageFrequencies: DDUTermFrequency[] = [];\n for (let i = 0; i < dictionaryEntry.headwords.length; i++) {\n const averageFrequency = getFrequencyHarmonic(dictionaryEntry, i);\n averageFrequencies.push({\n term: dictionaryEntry.headwords[i].term,\n reading: dictionaryEntry.headwords[i].reading,\n values: [{ frequency: averageFrequency, displayValue: averageFrequency.toString() }],\n });\n }\n\n results.push({\n dictionary: 'Average',\n frequencies: averageFrequencies,\n dictionaryAlias: 'Average',\n freqCount: averageFrequencies.length,\n });\n\n return results;\n}\n\nexport function groupKanjiFrequencies(\n sourceFrequencies: Dictionary.KanjiFrequency[],\n dictionaryInfo: DictionaryImporter.Summary[],\n): DictionaryFrequency<DDUKanjiFrequency>[] {\n const map1 = new Map<string, Map<string, { character: string; values: Map<string, FrequencyValue> }>>();\n const aliasMap = new Map<string, string>();\n for (const { dictionary, dictionaryAlias, character, frequency, displayValue } of sourceFrequencies) {\n let map2 = map1.get(dictionary);\n if (typeof map2 === 'undefined') {\n map2 = new Map();\n map1.set(dictionary, map2);\n aliasMap.set(dictionary, dictionaryAlias);\n }\n let frequencyData = map2.get(character);\n if (typeof frequencyData === 'undefined') {\n frequencyData = { character, values: new Map() };\n map2.set(character, frequencyData);\n }\n frequencyData.values.set(createMapKey([frequency, displayValue]), { frequency, displayValue });\n }\n\n const results: DictionaryFrequency<DDUKanjiFrequency>[] = [];\n for (const [dictionary, map2] of map1.entries()) {\n const frequencies: DDUKanjiFrequency[] = [];\n const dictionaryAlias = aliasMap.get(dictionary) ?? dictionary;\n for (const { character, values } of map2.values()) {\n frequencies.push({ character, values: [...values.values()] });\n }\n const currentDictionaryInfo = dictionaryInfo.find(({ title }) => title === dictionary);\n const freqCount = currentDictionaryInfo?.counts?.kanjiMeta.freq ?? 0;\n results.push({ dictionary, frequencies, dictionaryAlias, freqCount });\n }\n return results;\n}\n\nexport function getGroupedPronunciations(\n dictionaryEntry: Dictionary.TermDictionaryEntry,\n): DictionaryGroupedPronunciations[] {\n const { headwords, pronunciations: termPronunciations } = dictionaryEntry;\n\n const allTerms = new Set<string>();\n const allReadings = new Set<string>();\n const aliasMap = new Map<string, string>();\n for (const { term, reading } of headwords) {\n allTerms.add(term);\n allReadings.add(reading);\n }\n\n const groupedPronunciationsMap = new Map<string, GroupedPronunciationInternal[]>();\n for (const { headwordIndex, dictionary, dictionaryAlias, pronunciations } of termPronunciations) {\n const { term, reading } = headwords[headwordIndex];\n let dictionaryGroupedPronunciationList = groupedPronunciationsMap.get(dictionary);\n if (typeof dictionaryGroupedPronunciationList === 'undefined') {\n dictionaryGroupedPronunciationList = [];\n groupedPronunciationsMap.set(dictionary, dictionaryGroupedPronunciationList);\n aliasMap.set(dictionary, dictionaryAlias);\n }\n for (const pronunciation of pronunciations) {\n let groupedPronunciation = findExistingGroupedPronunciation(\n reading,\n pronunciation,\n dictionaryGroupedPronunciationList,\n );\n if (groupedPronunciation === null) {\n groupedPronunciation = { pronunciation, terms: new Set(), reading };\n dictionaryGroupedPronunciationList.push(groupedPronunciation);\n }\n groupedPronunciation.terms.add(term);\n }\n }\n\n const results: DictionaryGroupedPronunciations[] = [];\n const multipleReadings = allReadings.size > 1;\n for (const [dictionary, dictionaryGroupedPronunciationList] of groupedPronunciationsMap.entries()) {\n const pronunciations2: GroupedPronunciation[] = [];\n const dictionaryAlias = aliasMap.get(dictionary) ?? dictionary;\n for (const groupedPronunciation of dictionaryGroupedPronunciationList) {\n const { pronunciation, terms, reading } = groupedPronunciation;\n const exclusiveTerms = !areSetsEqual(terms, allTerms) ? getSetIntersection(terms, allTerms) : [];\n const exclusiveReadings: string[] = [];\n if (multipleReadings) {\n exclusiveReadings.push(reading);\n }\n pronunciations2.push({ pronunciation, terms: [...terms], reading, exclusiveTerms, exclusiveReadings });\n }\n results.push({ dictionary, dictionaryAlias, pronunciations: pronunciations2 });\n }\n return results;\n}\n\nexport function getPronunciationsOfType<T extends Dictionary.PronunciationType>(\n pronunciations: Dictionary.Pronunciation[],\n type: T,\n): Dictionary.PronunciationGeneric<T>[] {\n const results: Dictionary.PronunciationGeneric<T>[] = [];\n for (const pronunciation of pronunciations) {\n if (pronunciation.type === type) {\n results.push(pronunciation as Dictionary.PronunciationGeneric<T>);\n }\n }\n return results;\n}\n\nexport function getTermFrequency(termTags: { score: number }[]): TermFrequencyType {\n let totalScore = 0;\n for (const { score } of termTags) {\n totalScore += score;\n }\n if (totalScore > 0) {\n return 'popular';\n }\n if (totalScore < 0) {\n return 'rare';\n }\n return 'normal';\n}\n\nexport function getDisambiguations(\n headwords: Dictionary.TermHeadword[],\n headwordIndices: number[],\n allTermsSet: Set<string>,\n allReadingsSet: Set<string>,\n): string[] {\n if (allTermsSet.size <= 1 && allReadingsSet.size <= 1) {\n return [];\n }\n const terms = new Set<string>();\n const readings = new Set<string>();\n for (const headwordIndex of headwordIndices) {\n const { term, reading } = headwords[headwordIndex];\n terms.add(term);\n readings.add(reading);\n }\n const disambiguations: string[] = [];\n const addTerms = !areSetsEqual(terms, allTermsSet);\n const addReadings = !areSetsEqual(readings, allReadingsSet);\n if (addTerms) {\n disambiguations.push(...getSetIntersection(terms, allTermsSet));\n }\n if (addReadings) {\n if (addTerms) {\n for (const term of terms) {\n readings.delete(term);\n }\n }\n disambiguations.push(...getSetIntersection(readings, allReadingsSet));\n }\n return disambiguations;\n}\n\nexport function isNonNounVerbOrAdjective(wordClasses: string[]): boolean {\n let isVerbOrAdjective = false;\n let isSuruVerb = false;\n let isNoun = false;\n for (const wordClass of wordClasses) {\n switch (wordClass) {\n case 'v1':\n case 'v5':\n case 'vk':\n case 'vz':\n case 'adj-i':\n isVerbOrAdjective = true;\n break;\n case 'vs':\n isVerbOrAdjective = true;\n isSuruVerb = true;\n break;\n case 'n':\n isNoun = true;\n break;\n }\n }\n return isVerbOrAdjective && !(isSuruVerb && isNoun);\n}\n\nexport function compareRevisions(current: string, latest: string): boolean {\n const simpleVersionTest = /^(\\d+\\.)*\\d+$/;\n if (!simpleVersionTest.test(current) || !simpleVersionTest.test(latest)) {\n return current < latest;\n }\n const currentParts = current.split('.').map((part) => Number.parseInt(part, 10));\n const latestParts = latest.split('.').map((part) => Number.parseInt(part, 10));\n if (currentParts.length !== latestParts.length) {\n return current < latest;\n }\n for (let i = 0; i < currentParts.length; i++) {\n if (currentParts[i] !== latestParts[i]) {\n return currentParts[i] < latestParts[i];\n }\n }\n return false;\n}\n\n// Private helpers\n\nfunction findExistingGroupedPronunciation(\n reading: string,\n pronunciation: Dictionary.Pronunciation,\n groupedPronunciationList: GroupedPronunciationInternal[],\n): GroupedPronunciationInternal | null {\n return (\n groupedPronunciationList.find(\n (gp) => gp.reading === reading && arePronunciationsEquivalent(gp, pronunciation),\n ) ?? null\n );\n}\n\nfunction arePronunciationsEquivalent(\n { pronunciation: pronunciation1 }: GroupedPronunciationInternal,\n pronunciation2: Dictionary.Pronunciation,\n): boolean {\n if (pronunciation1.type !== pronunciation2.type || !areTagListsEqual(pronunciation1.tags, pronunciation2.tags)) {\n return false;\n }\n switch (pronunciation1.type) {\n case 'pitch-accent': {\n const pitchAccent2 = pronunciation2 as Dictionary.PitchAccent;\n return (\n pronunciation1.positions === pitchAccent2.positions &&\n areSimpleArraysEqual(pronunciation1.nasalPositions, pitchAccent2.nasalPositions) &&\n areSimpleArraysEqual(pronunciation1.devoicePositions, pitchAccent2.devoicePositions)\n );\n }\n case 'phonetic-transcription': {\n const phoneticTranscription2 = pronunciation2 as Dictionary.PhoneticTranscription;\n return pronunciation1.ipa === phoneticTranscription2.ipa;\n }\n }\n return true;\n}\n\nfunction areSimpleArraysEqual<T>(array1: T[], array2: T[]): boolean {\n const ii = array1.length;\n if (ii !== array2.length) {\n return false;\n }\n for (let i = 0; i < ii; ++i) {\n if (array1[i] !== array2[i]) {\n return false;\n }\n }\n return true;\n}\n\nfunction areTagListsEqual(tagList1: Dictionary.Tag[], tagList2: Dictionary.Tag[]): boolean {\n const ii = tagList1.length;\n if (tagList2.length !== ii) {\n return false;\n }\n for (let i = 0; i < ii; ++i) {\n const tag1 = tagList1[i];\n const tag2 = tagList2[i];\n if (tag1.name !== tag2.name || !areSimpleArraysEqual(tag1.dictionaries, tag2.dictionaries)) {\n return false;\n }\n }\n return true;\n}\n\nfunction areSetsEqual<T>(set1: Set<T>, set2: Set<T>): boolean {\n if (set1.size !== set2.size) {\n return false;\n }\n for (const value of set1) {\n if (!set2.has(value)) {\n return false;\n }\n }\n return true;\n}\n\nfunction getSetIntersection<T>(set1: Set<T>, set2: Set<T>): T[] {\n const result: T[] = [];\n for (const value of set1) {\n if (set2.has(value)) {\n result.push(value);\n }\n }\n return result;\n}\n\nfunction createMapKey(array: unknown[]): string {\n return JSON.stringify(array);\n}\n"],"mappings":";;;;;;AAiBA,SAAgB,qBAAqBA,iBAAiDC,eAA+B;CACjH,MAAM,YAAY,gBAAgB,YAC7B,OAAO,CAAC,MAAM,EAAE,kBAAkB,cAAc,CAChD,IAAI,CAAC,MAAM,EAAE,UAAU;AAC5B,KAAI,UAAU,WAAW,EACrB,QAAO;CAEX,IAAI,MAAM;AACV,MAAK,MAAM,QAAQ,WAAW;AAC1B,MAAI,QAAQ,EACR;AAEJ,SAAO,IAAI;CACd;AACD,QAAO,MAAM,IAAI,KAAK,MAAM,UAAU,SAAS,IAAI,GAAG;AACzD;AAED,SAAgB,cAAcD,iBAA6D;CACvF,MAAM,EAAE,WAAW,GAAG;CACtB,MAAM,gBAAgB,UAAU;CAChC,MAAM,cAAc,gBAAgB;CACpC,MAAM,kBAAkB,IAAI;CAC5B,MAAME,UAAsB,CAAE;AAC9B,MAAK,IAAI,IAAI,GAAG,IAAI,eAAe,EAAE,GAAG;EACpC,MAAM,EAAE,MAAM,GAAG,UAAU;AAC3B,OAAK,MAAM,OAAO,MAAM;AACpB,OAAI,aAAa;IACb,MAAM,EAAE,MAAM,UAAU,SAAS,cAAc,GAAG;IAClD,MAAM,MAAM,aAAa;KAAC;KAAM;KAAU;KAAS;IAAa,EAAC;IACjE,MAAM,QAAQ,gBAAgB,IAAI,IAAI;AACtC,eAAW,UAAU,aAAa;AAC9B,aAAQ,OAAO,gBAAgB,KAAK,EAAE;AACtC;IACH;AACD,oBAAgB,IAAI,KAAK,QAAQ,OAAO;GAC3C;AACD,WAAQ,KAAK;IAAE;IAAK,iBAAiB,CAAC,CAAE;GAAE,EAAC;EAC9C;CACJ;AACD,QAAO;AACV;AAED,SAAgB,qBACZF,iBACAG,gBACuC;CACvC,MAAM,EAAE,WAAW,aAAa,mBAAmB,GAAG;CAEtD,MAAM,OAAO,IAAI;CAIjB,MAAM,WAAW,IAAI;AACrB,MAAK,MAAM,EACP,eACA,YACA,iBACA,YACA,WACA,cACH,IAAI,mBAAmB;EACpB,MAAM,EAAE,MAAM,SAAS,GAAG,UAAU;EAEpC,IAAI,OAAO,KAAK,IAAI,WAAW;AAC/B,aAAW,SAAS,aAAa;AAC7B,UAAO,IAAI;AACX,QAAK,IAAI,YAAY,KAAK;AAC1B,YAAS,IAAI,YAAY,gBAAgB;EAC5C;EAED,MAAM,aAAa,aAAa,UAAU;EAC1C,MAAM,MAAM,aAAa,CAAC,MAAM,UAAW,EAAC;EAC5C,IAAI,gBAAgB,KAAK,IAAI,IAAI;AACjC,aAAW,kBAAkB,aAAa;AACtC,mBAAgB;IAAE;IAAM,SAAS;IAAY,QAAQ,IAAI;GAAO;AAChE,QAAK,IAAI,KAAK,cAAc;EAC/B;AAED,gBAAc,OAAO,IAAI,aAAa,CAAC,WAAW,YAAa,EAAC,EAAE;GAAE;GAAW;EAAc,EAAC;CACjG;CAED,MAAMC,UAAmD,CAAE;AAE3D,MAAK,MAAM,CAAC,YAAY,KAAK,IAAI,KAAK,SAAS,EAAE;EAC7C,MAAMC,cAAkC,CAAE;EAC1C,MAAM,kBAAkB,SAAS,IAAI,WAAW,IAAI;AACpD,OAAK,MAAM,EAAE,MAAM,SAAS,QAAQ,IAAI,KAAK,QAAQ,CACjD,aAAY,KAAK;GAAE;GAAM;GAAS,QAAQ,CAAC,GAAG,OAAO,QAAQ,AAAC;EAAE,EAAC;EAErE,MAAM,wBAAwB,eAAe,KAAK,CAAC,EAAE,OAAO,KAAK,UAAU,WAAW;EACtF,MAAM,YAAY,uBAAuB,QAAQ,SAAS,QAAQ;AAClE,UAAQ,KAAK;GAAE;GAAY;GAAa;GAAiB;EAAW,EAAC;CACxE;CAED,MAAMC,qBAAyC,CAAE;AACjD,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,UAAU,QAAQ,KAAK;EACvD,MAAM,mBAAmB,qBAAqB,iBAAiB,EAAE;AACjE,qBAAmB,KAAK;GACpB,MAAM,gBAAgB,UAAU,GAAG;GACnC,SAAS,gBAAgB,UAAU,GAAG;GACtC,QAAQ,CAAC;IAAE,WAAW;IAAkB,cAAc,iBAAiB,UAAU;GAAE,CAAC;EACvF,EAAC;CACL;AAED,SAAQ,KAAK;EACT,YAAY;EACZ,aAAa;EACb,iBAAiB;EACjB,WAAW,mBAAmB;CACjC,EAAC;AAEF,QAAO;AACV;AAED,SAAgB,sBACZC,mBACAJ,gBACwC;CACxC,MAAM,OAAO,IAAI;CACjB,MAAM,WAAW,IAAI;AACrB,MAAK,MAAM,EAAE,YAAY,iBAAiB,WAAW,WAAW,cAAc,IAAI,mBAAmB;EACjG,IAAI,OAAO,KAAK,IAAI,WAAW;AAC/B,aAAW,SAAS,aAAa;AAC7B,UAAO,IAAI;AACX,QAAK,IAAI,YAAY,KAAK;AAC1B,YAAS,IAAI,YAAY,gBAAgB;EAC5C;EACD,IAAI,gBAAgB,KAAK,IAAI,UAAU;AACvC,aAAW,kBAAkB,aAAa;AACtC,mBAAgB;IAAE;IAAW,QAAQ,IAAI;GAAO;AAChD,QAAK,IAAI,WAAW,cAAc;EACrC;AACD,gBAAc,OAAO,IAAI,aAAa,CAAC,WAAW,YAAa,EAAC,EAAE;GAAE;GAAW;EAAc,EAAC;CACjG;CAED,MAAMK,UAAoD,CAAE;AAC5D,MAAK,MAAM,CAAC,YAAY,KAAK,IAAI,KAAK,SAAS,EAAE;EAC7C,MAAMC,cAAmC,CAAE;EAC3C,MAAM,kBAAkB,SAAS,IAAI,WAAW,IAAI;AACpD,OAAK,MAAM,EAAE,WAAW,QAAQ,IAAI,KAAK,QAAQ,CAC7C,aAAY,KAAK;GAAE;GAAW,QAAQ,CAAC,GAAG,OAAO,QAAQ,AAAC;EAAE,EAAC;EAEjE,MAAM,wBAAwB,eAAe,KAAK,CAAC,EAAE,OAAO,KAAK,UAAU,WAAW;EACtF,MAAM,YAAY,uBAAuB,QAAQ,UAAU,QAAQ;AACnE,UAAQ,KAAK;GAAE;GAAY;GAAa;GAAiB;EAAW,EAAC;CACxE;AACD,QAAO;AACV;AAED,SAAgB,yBACZT,iBACiC;CACjC,MAAM,EAAE,WAAW,gBAAgB,oBAAoB,GAAG;CAE1D,MAAM,WAAW,IAAI;CACrB,MAAM,cAAc,IAAI;CACxB,MAAM,WAAW,IAAI;AACrB,MAAK,MAAM,EAAE,MAAM,SAAS,IAAI,WAAW;AACvC,WAAS,IAAI,KAAK;AAClB,cAAY,IAAI,QAAQ;CAC3B;CAED,MAAM,2BAA2B,IAAI;AACrC,MAAK,MAAM,EAAE,eAAe,YAAY,iBAAiB,gBAAgB,IAAI,oBAAoB;EAC7F,MAAM,EAAE,MAAM,SAAS,GAAG,UAAU;EACpC,IAAI,qCAAqC,yBAAyB,IAAI,WAAW;AACjF,aAAW,uCAAuC,aAAa;AAC3D,wCAAqC,CAAE;AACvC,4BAAyB,IAAI,YAAY,mCAAmC;AAC5E,YAAS,IAAI,YAAY,gBAAgB;EAC5C;AACD,OAAK,MAAM,iBAAiB,gBAAgB;GACxC,IAAI,uBAAuB,iCACvB,SACA,eACA,mCACH;AACD,OAAI,yBAAyB,MAAM;AAC/B,2BAAuB;KAAE;KAAe,OAAO,IAAI;KAAO;IAAS;AACnE,uCAAmC,KAAK,qBAAqB;GAChE;AACD,wBAAqB,MAAM,IAAI,KAAK;EACvC;CACJ;CAED,MAAMU,UAA6C,CAAE;CACrD,MAAM,mBAAmB,YAAY,OAAO;AAC5C,MAAK,MAAM,CAAC,YAAY,mCAAmC,IAAI,yBAAyB,SAAS,EAAE;EAC/F,MAAMC,kBAA0C,CAAE;EAClD,MAAM,kBAAkB,SAAS,IAAI,WAAW,IAAI;AACpD,OAAK,MAAM,wBAAwB,oCAAoC;GACnE,MAAM,EAAE,eAAe,OAAO,SAAS,GAAG;GAC1C,MAAM,kBAAkB,aAAa,OAAO,SAAS,GAAG,mBAAmB,OAAO,SAAS,GAAG,CAAE;GAChG,MAAMC,oBAA8B,CAAE;AACtC,OAAI,iBACA,mBAAkB,KAAK,QAAQ;AAEnC,mBAAgB,KAAK;IAAE;IAAe,OAAO,CAAC,GAAG,KAAM;IAAE;IAAS;IAAgB;GAAmB,EAAC;EACzG;AACD,UAAQ,KAAK;GAAE;GAAY;GAAiB,gBAAgB;EAAiB,EAAC;CACjF;AACD,QAAO;AACV;AAED,SAAgB,wBACZC,gBACAC,MACoC;CACpC,MAAMC,UAAgD,CAAE;AACxD,MAAK,MAAM,iBAAiB,eACxB,KAAI,cAAc,SAAS,KACvB,SAAQ,KAAK,cAAoD;AAGzE,QAAO;AACV;AAED,SAAgB,iBAAiBC,UAAkD;CAC/E,IAAI,aAAa;AACjB,MAAK,MAAM,EAAE,OAAO,IAAI,SACpB,eAAc;AAElB,KAAI,aAAa,EACb,QAAO;AAEX,KAAI,aAAa,EACb,QAAO;AAEX,QAAO;AACV;AAED,SAAgB,mBACZC,WACAC,iBACAC,aACAC,gBACQ;AACR,KAAI,YAAY,QAAQ,KAAK,eAAe,QAAQ,EAChD,QAAO,CAAE;CAEb,MAAM,QAAQ,IAAI;CAClB,MAAM,WAAW,IAAI;AACrB,MAAK,MAAM,iBAAiB,iBAAiB;EACzC,MAAM,EAAE,MAAM,SAAS,GAAG,UAAU;AACpC,QAAM,IAAI,KAAK;AACf,WAAS,IAAI,QAAQ;CACxB;CACD,MAAMC,kBAA4B,CAAE;CACpC,MAAM,YAAY,aAAa,OAAO,YAAY;CAClD,MAAM,eAAe,aAAa,UAAU,eAAe;AAC3D,KAAI,SACA,iBAAgB,KAAK,GAAG,mBAAmB,OAAO,YAAY,CAAC;AAEnE,KAAI,aAAa;AACb,MAAI,SACA,MAAK,MAAM,QAAQ,MACf,UAAS,OAAO,KAAK;AAG7B,kBAAgB,KAAK,GAAG,mBAAmB,UAAU,eAAe,CAAC;CACxE;AACD,QAAO;AACV;AAED,SAAgB,yBAAyBC,aAAgC;CACrE,IAAI,oBAAoB;CACxB,IAAI,aAAa;CACjB,IAAI,SAAS;AACb,MAAK,MAAM,aAAa,YACpB,SAAQ,WAAR;EACI,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;AACD,uBAAoB;AACpB;EACJ,KAAK;AACD,uBAAoB;AACpB,gBAAa;AACb;EACJ,KAAK;AACD,YAAS;AACT;CACP;AAEL,QAAO,uBAAuB,cAAc;AAC/C;AAED,SAAgB,iBAAiBC,SAAiBC,QAAyB;CACvE,MAAM,oBAAoB;AAC1B,MAAK,kBAAkB,KAAK,QAAQ,KAAK,kBAAkB,KAAK,OAAO,CACnE,QAAO,UAAU;CAErB,MAAM,eAAe,QAAQ,MAAM,IAAI,CAAC,IAAI,CAAC,SAAS,OAAO,SAAS,MAAM,GAAG,CAAC;CAChF,MAAM,cAAc,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,SAAS,OAAO,SAAS,MAAM,GAAG,CAAC;AAC9E,KAAI,aAAa,WAAW,YAAY,OACpC,QAAO,UAAU;AAErB,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACrC,KAAI,aAAa,OAAO,YAAY,GAChC,QAAO,aAAa,KAAK,YAAY;AAG7C,QAAO;AACV;AAID,SAAS,iCACLC,SACAC,eACAC,0BACmC;AACnC,QACI,yBAAyB,KACrB,CAAC,OAAO,GAAG,YAAY,WAAW,4BAA4B,IAAI,cAAc,CACnF,IAAI;AAEZ;AAED,SAAS,4BACL,EAAE,eAAe,gBAA8C,EAC/DC,gBACO;AACP,KAAI,eAAe,SAAS,eAAe,SAAS,iBAAiB,eAAe,MAAM,eAAe,KAAK,CAC1G,QAAO;AAEX,SAAQ,eAAe,MAAvB;EACI,KAAK,gBAAgB;GACjB,MAAM,eAAe;AACrB,UACI,eAAe,cAAc,aAAa,aAC1C,qBAAqB,eAAe,gBAAgB,aAAa,eAAe,IAChF,qBAAqB,eAAe,kBAAkB,aAAa,iBAAiB;EAE3F;EACD,KAAK,0BAA0B;GAC3B,MAAM,yBAAyB;AAC/B,UAAO,eAAe,QAAQ,uBAAuB;EACxD;CACJ;AACD,QAAO;AACV;AAED,SAAS,qBAAwBC,QAAaC,QAAsB;CAChE,MAAM,KAAK,OAAO;AAClB,KAAI,OAAO,OAAO,OACd,QAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,EAAE,EACtB,KAAI,OAAO,OAAO,OAAO,GACrB,QAAO;AAGf,QAAO;AACV;AAED,SAAS,iBAAiBC,UAA4BC,UAAqC;CACvF,MAAM,KAAK,SAAS;AACpB,KAAI,SAAS,WAAW,GACpB,QAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,EAAE,GAAG;EACzB,MAAM,OAAO,SAAS;EACtB,MAAM,OAAO,SAAS;AACtB,MAAI,KAAK,SAAS,KAAK,SAAS,qBAAqB,KAAK,cAAc,KAAK,aAAa,CACtF,QAAO;CAEd;AACD,QAAO;AACV;AAED,SAAS,aAAgBC,MAAcC,MAAuB;AAC1D,KAAI,KAAK,SAAS,KAAK,KACnB,QAAO;AAEX,MAAK,MAAM,SAAS,KAChB,MAAK,KAAK,IAAI,MAAM,CAChB,QAAO;AAGf,QAAO;AACV;AAED,SAAS,mBAAsBD,MAAcC,MAAmB;CAC5D,MAAMC,SAAc,CAAE;AACtB,MAAK,MAAM,SAAS,KAChB,KAAI,KAAK,IAAI,MAAM,CACf,QAAO,KAAK,MAAM;AAG1B,QAAO;AACV;AAED,SAAS,aAAaC,OAA0B;AAC5C,QAAO,KAAK,UAAU,MAAM;AAC/B"}
|
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
//#region src/util/dictionary-data-util.ts
|
|
2
|
+
/**
|
|
3
|
+
* Computes the harmonic mean of frequency values for a given headword in a dictionary entry.
|
|
4
|
+
*/
|
|
5
|
+
function getFrequencyHarmonic(dictionaryEntry, headwordIndex) {
|
|
6
|
+
const dominated = dictionaryEntry.frequencies.filter((f) => f.headwordIndex === headwordIndex).map((f) => f.frequency);
|
|
7
|
+
if (dominated.length === 0) return 0;
|
|
8
|
+
let sum = 0;
|
|
9
|
+
for (const freq of dominated) {
|
|
10
|
+
if (freq <= 0) continue;
|
|
11
|
+
sum += 1 / freq;
|
|
12
|
+
}
|
|
13
|
+
return sum > 0 ? Math.round(dominated.length / sum) : 0;
|
|
14
|
+
}
|
|
15
|
+
function groupTermTags(dictionaryEntry) {
|
|
16
|
+
const { headwords } = dictionaryEntry;
|
|
17
|
+
const headwordCount = headwords.length;
|
|
18
|
+
const uniqueCheck = headwordCount > 1;
|
|
19
|
+
const resultsIndexMap = new Map();
|
|
20
|
+
const results = [];
|
|
21
|
+
for (let i = 0; i < headwordCount; ++i) {
|
|
22
|
+
const { tags } = headwords[i];
|
|
23
|
+
for (const tag of tags) {
|
|
24
|
+
if (uniqueCheck) {
|
|
25
|
+
const { name, category, content, dictionaries } = tag;
|
|
26
|
+
const key = createMapKey([
|
|
27
|
+
name,
|
|
28
|
+
category,
|
|
29
|
+
content,
|
|
30
|
+
dictionaries
|
|
31
|
+
]);
|
|
32
|
+
const index = resultsIndexMap.get(key);
|
|
33
|
+
if (typeof index !== "undefined") {
|
|
34
|
+
results[index].headwordIndices.push(i);
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
resultsIndexMap.set(key, results.length);
|
|
38
|
+
}
|
|
39
|
+
results.push({
|
|
40
|
+
tag,
|
|
41
|
+
headwordIndices: [i]
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
return results;
|
|
46
|
+
}
|
|
47
|
+
function groupTermFrequencies(dictionaryEntry, dictionaryInfo) {
|
|
48
|
+
const { headwords, frequencies: sourceFrequencies } = dictionaryEntry;
|
|
49
|
+
const map1 = new Map();
|
|
50
|
+
const aliasMap = new Map();
|
|
51
|
+
for (const { headwordIndex, dictionary, dictionaryAlias, hasReading, frequency, displayValue } of sourceFrequencies) {
|
|
52
|
+
const { term, reading } = headwords[headwordIndex];
|
|
53
|
+
let map2 = map1.get(dictionary);
|
|
54
|
+
if (typeof map2 === "undefined") {
|
|
55
|
+
map2 = new Map();
|
|
56
|
+
map1.set(dictionary, map2);
|
|
57
|
+
aliasMap.set(dictionary, dictionaryAlias);
|
|
58
|
+
}
|
|
59
|
+
const readingKey = hasReading ? reading : null;
|
|
60
|
+
const key = createMapKey([term, readingKey]);
|
|
61
|
+
let frequencyData = map2.get(key);
|
|
62
|
+
if (typeof frequencyData === "undefined") {
|
|
63
|
+
frequencyData = {
|
|
64
|
+
term,
|
|
65
|
+
reading: readingKey,
|
|
66
|
+
values: new Map()
|
|
67
|
+
};
|
|
68
|
+
map2.set(key, frequencyData);
|
|
69
|
+
}
|
|
70
|
+
frequencyData.values.set(createMapKey([frequency, displayValue]), {
|
|
71
|
+
frequency,
|
|
72
|
+
displayValue
|
|
73
|
+
});
|
|
74
|
+
}
|
|
75
|
+
const results = [];
|
|
76
|
+
for (const [dictionary, map2] of map1.entries()) {
|
|
77
|
+
const frequencies = [];
|
|
78
|
+
const dictionaryAlias = aliasMap.get(dictionary) ?? dictionary;
|
|
79
|
+
for (const { term, reading, values } of map2.values()) frequencies.push({
|
|
80
|
+
term,
|
|
81
|
+
reading,
|
|
82
|
+
values: [...values.values()]
|
|
83
|
+
});
|
|
84
|
+
const currentDictionaryInfo = dictionaryInfo.find(({ title }) => title === dictionary);
|
|
85
|
+
const freqCount = currentDictionaryInfo?.counts?.termMeta.freq ?? 0;
|
|
86
|
+
results.push({
|
|
87
|
+
dictionary,
|
|
88
|
+
frequencies,
|
|
89
|
+
dictionaryAlias,
|
|
90
|
+
freqCount
|
|
91
|
+
});
|
|
92
|
+
}
|
|
93
|
+
const averageFrequencies = [];
|
|
94
|
+
for (let i = 0; i < dictionaryEntry.headwords.length; i++) {
|
|
95
|
+
const averageFrequency = getFrequencyHarmonic(dictionaryEntry, i);
|
|
96
|
+
averageFrequencies.push({
|
|
97
|
+
term: dictionaryEntry.headwords[i].term,
|
|
98
|
+
reading: dictionaryEntry.headwords[i].reading,
|
|
99
|
+
values: [{
|
|
100
|
+
frequency: averageFrequency,
|
|
101
|
+
displayValue: averageFrequency.toString()
|
|
102
|
+
}]
|
|
103
|
+
});
|
|
104
|
+
}
|
|
105
|
+
results.push({
|
|
106
|
+
dictionary: "Average",
|
|
107
|
+
frequencies: averageFrequencies,
|
|
108
|
+
dictionaryAlias: "Average",
|
|
109
|
+
freqCount: averageFrequencies.length
|
|
110
|
+
});
|
|
111
|
+
return results;
|
|
112
|
+
}
|
|
113
|
+
function groupKanjiFrequencies(sourceFrequencies, dictionaryInfo) {
|
|
114
|
+
const map1 = new Map();
|
|
115
|
+
const aliasMap = new Map();
|
|
116
|
+
for (const { dictionary, dictionaryAlias, character, frequency, displayValue } of sourceFrequencies) {
|
|
117
|
+
let map2 = map1.get(dictionary);
|
|
118
|
+
if (typeof map2 === "undefined") {
|
|
119
|
+
map2 = new Map();
|
|
120
|
+
map1.set(dictionary, map2);
|
|
121
|
+
aliasMap.set(dictionary, dictionaryAlias);
|
|
122
|
+
}
|
|
123
|
+
let frequencyData = map2.get(character);
|
|
124
|
+
if (typeof frequencyData === "undefined") {
|
|
125
|
+
frequencyData = {
|
|
126
|
+
character,
|
|
127
|
+
values: new Map()
|
|
128
|
+
};
|
|
129
|
+
map2.set(character, frequencyData);
|
|
130
|
+
}
|
|
131
|
+
frequencyData.values.set(createMapKey([frequency, displayValue]), {
|
|
132
|
+
frequency,
|
|
133
|
+
displayValue
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
const results = [];
|
|
137
|
+
for (const [dictionary, map2] of map1.entries()) {
|
|
138
|
+
const frequencies = [];
|
|
139
|
+
const dictionaryAlias = aliasMap.get(dictionary) ?? dictionary;
|
|
140
|
+
for (const { character, values } of map2.values()) frequencies.push({
|
|
141
|
+
character,
|
|
142
|
+
values: [...values.values()]
|
|
143
|
+
});
|
|
144
|
+
const currentDictionaryInfo = dictionaryInfo.find(({ title }) => title === dictionary);
|
|
145
|
+
const freqCount = currentDictionaryInfo?.counts?.kanjiMeta.freq ?? 0;
|
|
146
|
+
results.push({
|
|
147
|
+
dictionary,
|
|
148
|
+
frequencies,
|
|
149
|
+
dictionaryAlias,
|
|
150
|
+
freqCount
|
|
151
|
+
});
|
|
152
|
+
}
|
|
153
|
+
return results;
|
|
154
|
+
}
|
|
155
|
+
function getGroupedPronunciations(dictionaryEntry) {
|
|
156
|
+
const { headwords, pronunciations: termPronunciations } = dictionaryEntry;
|
|
157
|
+
const allTerms = new Set();
|
|
158
|
+
const allReadings = new Set();
|
|
159
|
+
const aliasMap = new Map();
|
|
160
|
+
for (const { term, reading } of headwords) {
|
|
161
|
+
allTerms.add(term);
|
|
162
|
+
allReadings.add(reading);
|
|
163
|
+
}
|
|
164
|
+
const groupedPronunciationsMap = new Map();
|
|
165
|
+
for (const { headwordIndex, dictionary, dictionaryAlias, pronunciations } of termPronunciations) {
|
|
166
|
+
const { term, reading } = headwords[headwordIndex];
|
|
167
|
+
let dictionaryGroupedPronunciationList = groupedPronunciationsMap.get(dictionary);
|
|
168
|
+
if (typeof dictionaryGroupedPronunciationList === "undefined") {
|
|
169
|
+
dictionaryGroupedPronunciationList = [];
|
|
170
|
+
groupedPronunciationsMap.set(dictionary, dictionaryGroupedPronunciationList);
|
|
171
|
+
aliasMap.set(dictionary, dictionaryAlias);
|
|
172
|
+
}
|
|
173
|
+
for (const pronunciation of pronunciations) {
|
|
174
|
+
let groupedPronunciation = findExistingGroupedPronunciation(reading, pronunciation, dictionaryGroupedPronunciationList);
|
|
175
|
+
if (groupedPronunciation === null) {
|
|
176
|
+
groupedPronunciation = {
|
|
177
|
+
pronunciation,
|
|
178
|
+
terms: new Set(),
|
|
179
|
+
reading
|
|
180
|
+
};
|
|
181
|
+
dictionaryGroupedPronunciationList.push(groupedPronunciation);
|
|
182
|
+
}
|
|
183
|
+
groupedPronunciation.terms.add(term);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
const results = [];
|
|
187
|
+
const multipleReadings = allReadings.size > 1;
|
|
188
|
+
for (const [dictionary, dictionaryGroupedPronunciationList] of groupedPronunciationsMap.entries()) {
|
|
189
|
+
const pronunciations2 = [];
|
|
190
|
+
const dictionaryAlias = aliasMap.get(dictionary) ?? dictionary;
|
|
191
|
+
for (const groupedPronunciation of dictionaryGroupedPronunciationList) {
|
|
192
|
+
const { pronunciation, terms, reading } = groupedPronunciation;
|
|
193
|
+
const exclusiveTerms = !areSetsEqual(terms, allTerms) ? getSetIntersection(terms, allTerms) : [];
|
|
194
|
+
const exclusiveReadings = [];
|
|
195
|
+
if (multipleReadings) exclusiveReadings.push(reading);
|
|
196
|
+
pronunciations2.push({
|
|
197
|
+
pronunciation,
|
|
198
|
+
terms: [...terms],
|
|
199
|
+
reading,
|
|
200
|
+
exclusiveTerms,
|
|
201
|
+
exclusiveReadings
|
|
202
|
+
});
|
|
203
|
+
}
|
|
204
|
+
results.push({
|
|
205
|
+
dictionary,
|
|
206
|
+
dictionaryAlias,
|
|
207
|
+
pronunciations: pronunciations2
|
|
208
|
+
});
|
|
209
|
+
}
|
|
210
|
+
return results;
|
|
211
|
+
}
|
|
212
|
+
function getPronunciationsOfType(pronunciations, type) {
|
|
213
|
+
const results = [];
|
|
214
|
+
for (const pronunciation of pronunciations) if (pronunciation.type === type) results.push(pronunciation);
|
|
215
|
+
return results;
|
|
216
|
+
}
|
|
217
|
+
function getTermFrequency(termTags) {
|
|
218
|
+
let totalScore = 0;
|
|
219
|
+
for (const { score } of termTags) totalScore += score;
|
|
220
|
+
if (totalScore > 0) return "popular";
|
|
221
|
+
if (totalScore < 0) return "rare";
|
|
222
|
+
return "normal";
|
|
223
|
+
}
|
|
224
|
+
function getDisambiguations(headwords, headwordIndices, allTermsSet, allReadingsSet) {
|
|
225
|
+
if (allTermsSet.size <= 1 && allReadingsSet.size <= 1) return [];
|
|
226
|
+
const terms = new Set();
|
|
227
|
+
const readings = new Set();
|
|
228
|
+
for (const headwordIndex of headwordIndices) {
|
|
229
|
+
const { term, reading } = headwords[headwordIndex];
|
|
230
|
+
terms.add(term);
|
|
231
|
+
readings.add(reading);
|
|
232
|
+
}
|
|
233
|
+
const disambiguations = [];
|
|
234
|
+
const addTerms = !areSetsEqual(terms, allTermsSet);
|
|
235
|
+
const addReadings = !areSetsEqual(readings, allReadingsSet);
|
|
236
|
+
if (addTerms) disambiguations.push(...getSetIntersection(terms, allTermsSet));
|
|
237
|
+
if (addReadings) {
|
|
238
|
+
if (addTerms) for (const term of terms) readings.delete(term);
|
|
239
|
+
disambiguations.push(...getSetIntersection(readings, allReadingsSet));
|
|
240
|
+
}
|
|
241
|
+
return disambiguations;
|
|
242
|
+
}
|
|
243
|
+
function isNonNounVerbOrAdjective(wordClasses) {
|
|
244
|
+
let isVerbOrAdjective = false;
|
|
245
|
+
let isSuruVerb = false;
|
|
246
|
+
let isNoun = false;
|
|
247
|
+
for (const wordClass of wordClasses) switch (wordClass) {
|
|
248
|
+
case "v1":
|
|
249
|
+
case "v5":
|
|
250
|
+
case "vk":
|
|
251
|
+
case "vz":
|
|
252
|
+
case "adj-i":
|
|
253
|
+
isVerbOrAdjective = true;
|
|
254
|
+
break;
|
|
255
|
+
case "vs":
|
|
256
|
+
isVerbOrAdjective = true;
|
|
257
|
+
isSuruVerb = true;
|
|
258
|
+
break;
|
|
259
|
+
case "n":
|
|
260
|
+
isNoun = true;
|
|
261
|
+
break;
|
|
262
|
+
}
|
|
263
|
+
return isVerbOrAdjective && !(isSuruVerb && isNoun);
|
|
264
|
+
}
|
|
265
|
+
function compareRevisions(current, latest) {
|
|
266
|
+
const simpleVersionTest = /^(\d+\.)*\d+$/;
|
|
267
|
+
if (!simpleVersionTest.test(current) || !simpleVersionTest.test(latest)) return current < latest;
|
|
268
|
+
const currentParts = current.split(".").map((part) => Number.parseInt(part, 10));
|
|
269
|
+
const latestParts = latest.split(".").map((part) => Number.parseInt(part, 10));
|
|
270
|
+
if (currentParts.length !== latestParts.length) return current < latest;
|
|
271
|
+
for (let i = 0; i < currentParts.length; i++) if (currentParts[i] !== latestParts[i]) return currentParts[i] < latestParts[i];
|
|
272
|
+
return false;
|
|
273
|
+
}
|
|
274
|
+
function findExistingGroupedPronunciation(reading, pronunciation, groupedPronunciationList) {
|
|
275
|
+
return groupedPronunciationList.find((gp) => gp.reading === reading && arePronunciationsEquivalent(gp, pronunciation)) ?? null;
|
|
276
|
+
}
|
|
277
|
+
function arePronunciationsEquivalent({ pronunciation: pronunciation1 }, pronunciation2) {
|
|
278
|
+
if (pronunciation1.type !== pronunciation2.type || !areTagListsEqual(pronunciation1.tags, pronunciation2.tags)) return false;
|
|
279
|
+
switch (pronunciation1.type) {
|
|
280
|
+
case "pitch-accent": {
|
|
281
|
+
const pitchAccent2 = pronunciation2;
|
|
282
|
+
return pronunciation1.positions === pitchAccent2.positions && areSimpleArraysEqual(pronunciation1.nasalPositions, pitchAccent2.nasalPositions) && areSimpleArraysEqual(pronunciation1.devoicePositions, pitchAccent2.devoicePositions);
|
|
283
|
+
}
|
|
284
|
+
case "phonetic-transcription": {
|
|
285
|
+
const phoneticTranscription2 = pronunciation2;
|
|
286
|
+
return pronunciation1.ipa === phoneticTranscription2.ipa;
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
return true;
|
|
290
|
+
}
|
|
291
|
+
function areSimpleArraysEqual(array1, array2) {
|
|
292
|
+
const ii = array1.length;
|
|
293
|
+
if (ii !== array2.length) return false;
|
|
294
|
+
for (let i = 0; i < ii; ++i) if (array1[i] !== array2[i]) return false;
|
|
295
|
+
return true;
|
|
296
|
+
}
|
|
297
|
+
function areTagListsEqual(tagList1, tagList2) {
|
|
298
|
+
const ii = tagList1.length;
|
|
299
|
+
if (tagList2.length !== ii) return false;
|
|
300
|
+
for (let i = 0; i < ii; ++i) {
|
|
301
|
+
const tag1 = tagList1[i];
|
|
302
|
+
const tag2 = tagList2[i];
|
|
303
|
+
if (tag1.name !== tag2.name || !areSimpleArraysEqual(tag1.dictionaries, tag2.dictionaries)) return false;
|
|
304
|
+
}
|
|
305
|
+
return true;
|
|
306
|
+
}
|
|
307
|
+
function areSetsEqual(set1, set2) {
|
|
308
|
+
if (set1.size !== set2.size) return false;
|
|
309
|
+
for (const value of set1) if (!set2.has(value)) return false;
|
|
310
|
+
return true;
|
|
311
|
+
}
|
|
312
|
+
function getSetIntersection(set1, set2) {
|
|
313
|
+
const result = [];
|
|
314
|
+
for (const value of set1) if (set2.has(value)) result.push(value);
|
|
315
|
+
return result;
|
|
316
|
+
}
|
|
317
|
+
function createMapKey(array) {
|
|
318
|
+
return JSON.stringify(array);
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
//#endregion
|
|
322
|
+
export { compareRevisions, getDisambiguations, getGroupedPronunciations, getPronunciationsOfType, getTermFrequency, groupKanjiFrequencies, groupTermFrequencies, groupTermTags, isNonNounVerbOrAdjective };
|
|
323
|
+
//# sourceMappingURL=dictionary-data-util-CfOLfEDE.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dictionary-data-util-CfOLfEDE.js","names":["dictionaryEntry: Dictionary.TermDictionaryEntry","headwordIndex: number","results: TagGroup[]","dictionaryInfo: DictionaryImporter.Summary[]","results: DictionaryFrequency<DDUTermFrequency>[]","frequencies: DDUTermFrequency[]","averageFrequencies: DDUTermFrequency[]","sourceFrequencies: Dictionary.KanjiFrequency[]","results: DictionaryFrequency<DDUKanjiFrequency>[]","frequencies: DDUKanjiFrequency[]","results: DictionaryGroupedPronunciations[]","pronunciations2: GroupedPronunciation[]","exclusiveReadings: string[]","pronunciations: Dictionary.Pronunciation[]","type: T","results: Dictionary.PronunciationGeneric<T>[]","termTags: { score: number }[]","headwords: Dictionary.TermHeadword[]","headwordIndices: number[]","allTermsSet: Set<string>","allReadingsSet: Set<string>","disambiguations: string[]","wordClasses: string[]","current: string","latest: string","reading: string","pronunciation: Dictionary.Pronunciation","groupedPronunciationList: GroupedPronunciationInternal[]","pronunciation2: Dictionary.Pronunciation","array1: T[]","array2: T[]","tagList1: Dictionary.Tag[]","tagList2: Dictionary.Tag[]","set1: Set<T>","set2: Set<T>","result: T[]","array: unknown[]"],"sources":["../src/util/dictionary-data-util.ts"],"sourcesContent":["import type * as Dictionary from '../types/dictionary';\nimport type {\n KanjiFrequency as DDUKanjiFrequency,\n TermFrequency as DDUTermFrequency,\n DictionaryFrequency,\n DictionaryGroupedPronunciations,\n FrequencyValue,\n GroupedPronunciation,\n GroupedPronunciationInternal,\n TagGroup,\n TermFrequencyType,\n} from '../types/dictionary-data-util';\nimport type * as DictionaryImporter from '../types/dictionary-importer';\n\n/**\n * Computes the harmonic mean of frequency values for a given headword in a dictionary entry.\n */\nexport function getFrequencyHarmonic(dictionaryEntry: Dictionary.TermDictionaryEntry, headwordIndex: number): number {\n const dominated = dictionaryEntry.frequencies\n .filter((f) => f.headwordIndex === headwordIndex)\n .map((f) => f.frequency);\n if (dominated.length === 0) {\n return 0;\n }\n let sum = 0;\n for (const freq of dominated) {\n if (freq <= 0) {\n continue;\n }\n sum += 1 / freq;\n }\n return sum > 0 ? Math.round(dominated.length / sum) : 0;\n}\n\nexport function groupTermTags(dictionaryEntry: Dictionary.TermDictionaryEntry): TagGroup[] {\n const { headwords } = dictionaryEntry;\n const headwordCount = headwords.length;\n const uniqueCheck = headwordCount > 1;\n const resultsIndexMap = new Map<string, number>();\n const results: TagGroup[] = [];\n for (let i = 0; i < headwordCount; ++i) {\n const { tags } = headwords[i];\n for (const tag of tags) {\n if (uniqueCheck) {\n const { name, category, content, dictionaries } = tag;\n const key = createMapKey([name, category, content, dictionaries]);\n const index = resultsIndexMap.get(key);\n if (typeof index !== 'undefined') {\n results[index].headwordIndices.push(i);\n continue;\n }\n resultsIndexMap.set(key, results.length);\n }\n results.push({ tag, headwordIndices: [i] });\n }\n }\n return results;\n}\n\nexport function groupTermFrequencies(\n dictionaryEntry: Dictionary.TermDictionaryEntry,\n dictionaryInfo: DictionaryImporter.Summary[],\n): DictionaryFrequency<DDUTermFrequency>[] {\n const { headwords, frequencies: sourceFrequencies } = dictionaryEntry;\n\n const map1 = new Map<\n string,\n Map<string, { term: string; reading: string | null; values: Map<string, FrequencyValue> }>\n >();\n const aliasMap = new Map<string, string>();\n for (const {\n headwordIndex,\n dictionary,\n dictionaryAlias,\n hasReading,\n frequency,\n displayValue,\n } of sourceFrequencies) {\n const { term, reading } = headwords[headwordIndex];\n\n let map2 = map1.get(dictionary);\n if (typeof map2 === 'undefined') {\n map2 = new Map();\n map1.set(dictionary, map2);\n aliasMap.set(dictionary, dictionaryAlias);\n }\n\n const readingKey = hasReading ? reading : null;\n const key = createMapKey([term, readingKey]);\n let frequencyData = map2.get(key);\n if (typeof frequencyData === 'undefined') {\n frequencyData = { term, reading: readingKey, values: new Map() };\n map2.set(key, frequencyData);\n }\n\n frequencyData.values.set(createMapKey([frequency, displayValue]), { frequency, displayValue });\n }\n\n const results: DictionaryFrequency<DDUTermFrequency>[] = [];\n\n for (const [dictionary, map2] of map1.entries()) {\n const frequencies: DDUTermFrequency[] = [];\n const dictionaryAlias = aliasMap.get(dictionary) ?? dictionary;\n for (const { term, reading, values } of map2.values()) {\n frequencies.push({ term, reading, values: [...values.values()] });\n }\n const currentDictionaryInfo = dictionaryInfo.find(({ title }) => title === dictionary);\n const freqCount = currentDictionaryInfo?.counts?.termMeta.freq ?? 0;\n results.push({ dictionary, frequencies, dictionaryAlias, freqCount });\n }\n\n const averageFrequencies: DDUTermFrequency[] = [];\n for (let i = 0; i < dictionaryEntry.headwords.length; i++) {\n const averageFrequency = getFrequencyHarmonic(dictionaryEntry, i);\n averageFrequencies.push({\n term: dictionaryEntry.headwords[i].term,\n reading: dictionaryEntry.headwords[i].reading,\n values: [{ frequency: averageFrequency, displayValue: averageFrequency.toString() }],\n });\n }\n\n results.push({\n dictionary: 'Average',\n frequencies: averageFrequencies,\n dictionaryAlias: 'Average',\n freqCount: averageFrequencies.length,\n });\n\n return results;\n}\n\nexport function groupKanjiFrequencies(\n sourceFrequencies: Dictionary.KanjiFrequency[],\n dictionaryInfo: DictionaryImporter.Summary[],\n): DictionaryFrequency<DDUKanjiFrequency>[] {\n const map1 = new Map<string, Map<string, { character: string; values: Map<string, FrequencyValue> }>>();\n const aliasMap = new Map<string, string>();\n for (const { dictionary, dictionaryAlias, character, frequency, displayValue } of sourceFrequencies) {\n let map2 = map1.get(dictionary);\n if (typeof map2 === 'undefined') {\n map2 = new Map();\n map1.set(dictionary, map2);\n aliasMap.set(dictionary, dictionaryAlias);\n }\n let frequencyData = map2.get(character);\n if (typeof frequencyData === 'undefined') {\n frequencyData = { character, values: new Map() };\n map2.set(character, frequencyData);\n }\n frequencyData.values.set(createMapKey([frequency, displayValue]), { frequency, displayValue });\n }\n\n const results: DictionaryFrequency<DDUKanjiFrequency>[] = [];\n for (const [dictionary, map2] of map1.entries()) {\n const frequencies: DDUKanjiFrequency[] = [];\n const dictionaryAlias = aliasMap.get(dictionary) ?? dictionary;\n for (const { character, values } of map2.values()) {\n frequencies.push({ character, values: [...values.values()] });\n }\n const currentDictionaryInfo = dictionaryInfo.find(({ title }) => title === dictionary);\n const freqCount = currentDictionaryInfo?.counts?.kanjiMeta.freq ?? 0;\n results.push({ dictionary, frequencies, dictionaryAlias, freqCount });\n }\n return results;\n}\n\nexport function getGroupedPronunciations(\n dictionaryEntry: Dictionary.TermDictionaryEntry,\n): DictionaryGroupedPronunciations[] {\n const { headwords, pronunciations: termPronunciations } = dictionaryEntry;\n\n const allTerms = new Set<string>();\n const allReadings = new Set<string>();\n const aliasMap = new Map<string, string>();\n for (const { term, reading } of headwords) {\n allTerms.add(term);\n allReadings.add(reading);\n }\n\n const groupedPronunciationsMap = new Map<string, GroupedPronunciationInternal[]>();\n for (const { headwordIndex, dictionary, dictionaryAlias, pronunciations } of termPronunciations) {\n const { term, reading } = headwords[headwordIndex];\n let dictionaryGroupedPronunciationList = groupedPronunciationsMap.get(dictionary);\n if (typeof dictionaryGroupedPronunciationList === 'undefined') {\n dictionaryGroupedPronunciationList = [];\n groupedPronunciationsMap.set(dictionary, dictionaryGroupedPronunciationList);\n aliasMap.set(dictionary, dictionaryAlias);\n }\n for (const pronunciation of pronunciations) {\n let groupedPronunciation = findExistingGroupedPronunciation(\n reading,\n pronunciation,\n dictionaryGroupedPronunciationList,\n );\n if (groupedPronunciation === null) {\n groupedPronunciation = { pronunciation, terms: new Set(), reading };\n dictionaryGroupedPronunciationList.push(groupedPronunciation);\n }\n groupedPronunciation.terms.add(term);\n }\n }\n\n const results: DictionaryGroupedPronunciations[] = [];\n const multipleReadings = allReadings.size > 1;\n for (const [dictionary, dictionaryGroupedPronunciationList] of groupedPronunciationsMap.entries()) {\n const pronunciations2: GroupedPronunciation[] = [];\n const dictionaryAlias = aliasMap.get(dictionary) ?? dictionary;\n for (const groupedPronunciation of dictionaryGroupedPronunciationList) {\n const { pronunciation, terms, reading } = groupedPronunciation;\n const exclusiveTerms = !areSetsEqual(terms, allTerms) ? getSetIntersection(terms, allTerms) : [];\n const exclusiveReadings: string[] = [];\n if (multipleReadings) {\n exclusiveReadings.push(reading);\n }\n pronunciations2.push({ pronunciation, terms: [...terms], reading, exclusiveTerms, exclusiveReadings });\n }\n results.push({ dictionary, dictionaryAlias, pronunciations: pronunciations2 });\n }\n return results;\n}\n\nexport function getPronunciationsOfType<T extends Dictionary.PronunciationType>(\n pronunciations: Dictionary.Pronunciation[],\n type: T,\n): Dictionary.PronunciationGeneric<T>[] {\n const results: Dictionary.PronunciationGeneric<T>[] = [];\n for (const pronunciation of pronunciations) {\n if (pronunciation.type === type) {\n results.push(pronunciation as Dictionary.PronunciationGeneric<T>);\n }\n }\n return results;\n}\n\nexport function getTermFrequency(termTags: { score: number }[]): TermFrequencyType {\n let totalScore = 0;\n for (const { score } of termTags) {\n totalScore += score;\n }\n if (totalScore > 0) {\n return 'popular';\n }\n if (totalScore < 0) {\n return 'rare';\n }\n return 'normal';\n}\n\nexport function getDisambiguations(\n headwords: Dictionary.TermHeadword[],\n headwordIndices: number[],\n allTermsSet: Set<string>,\n allReadingsSet: Set<string>,\n): string[] {\n if (allTermsSet.size <= 1 && allReadingsSet.size <= 1) {\n return [];\n }\n const terms = new Set<string>();\n const readings = new Set<string>();\n for (const headwordIndex of headwordIndices) {\n const { term, reading } = headwords[headwordIndex];\n terms.add(term);\n readings.add(reading);\n }\n const disambiguations: string[] = [];\n const addTerms = !areSetsEqual(terms, allTermsSet);\n const addReadings = !areSetsEqual(readings, allReadingsSet);\n if (addTerms) {\n disambiguations.push(...getSetIntersection(terms, allTermsSet));\n }\n if (addReadings) {\n if (addTerms) {\n for (const term of terms) {\n readings.delete(term);\n }\n }\n disambiguations.push(...getSetIntersection(readings, allReadingsSet));\n }\n return disambiguations;\n}\n\nexport function isNonNounVerbOrAdjective(wordClasses: string[]): boolean {\n let isVerbOrAdjective = false;\n let isSuruVerb = false;\n let isNoun = false;\n for (const wordClass of wordClasses) {\n switch (wordClass) {\n case 'v1':\n case 'v5':\n case 'vk':\n case 'vz':\n case 'adj-i':\n isVerbOrAdjective = true;\n break;\n case 'vs':\n isVerbOrAdjective = true;\n isSuruVerb = true;\n break;\n case 'n':\n isNoun = true;\n break;\n }\n }\n return isVerbOrAdjective && !(isSuruVerb && isNoun);\n}\n\nexport function compareRevisions(current: string, latest: string): boolean {\n const simpleVersionTest = /^(\\d+\\.)*\\d+$/;\n if (!simpleVersionTest.test(current) || !simpleVersionTest.test(latest)) {\n return current < latest;\n }\n const currentParts = current.split('.').map((part) => Number.parseInt(part, 10));\n const latestParts = latest.split('.').map((part) => Number.parseInt(part, 10));\n if (currentParts.length !== latestParts.length) {\n return current < latest;\n }\n for (let i = 0; i < currentParts.length; i++) {\n if (currentParts[i] !== latestParts[i]) {\n return currentParts[i] < latestParts[i];\n }\n }\n return false;\n}\n\n// Private helpers\n\nfunction findExistingGroupedPronunciation(\n reading: string,\n pronunciation: Dictionary.Pronunciation,\n groupedPronunciationList: GroupedPronunciationInternal[],\n): GroupedPronunciationInternal | null {\n return (\n groupedPronunciationList.find(\n (gp) => gp.reading === reading && arePronunciationsEquivalent(gp, pronunciation),\n ) ?? null\n );\n}\n\nfunction arePronunciationsEquivalent(\n { pronunciation: pronunciation1 }: GroupedPronunciationInternal,\n pronunciation2: Dictionary.Pronunciation,\n): boolean {\n if (pronunciation1.type !== pronunciation2.type || !areTagListsEqual(pronunciation1.tags, pronunciation2.tags)) {\n return false;\n }\n switch (pronunciation1.type) {\n case 'pitch-accent': {\n const pitchAccent2 = pronunciation2 as Dictionary.PitchAccent;\n return (\n pronunciation1.positions === pitchAccent2.positions &&\n areSimpleArraysEqual(pronunciation1.nasalPositions, pitchAccent2.nasalPositions) &&\n areSimpleArraysEqual(pronunciation1.devoicePositions, pitchAccent2.devoicePositions)\n );\n }\n case 'phonetic-transcription': {\n const phoneticTranscription2 = pronunciation2 as Dictionary.PhoneticTranscription;\n return pronunciation1.ipa === phoneticTranscription2.ipa;\n }\n }\n return true;\n}\n\nfunction areSimpleArraysEqual<T>(array1: T[], array2: T[]): boolean {\n const ii = array1.length;\n if (ii !== array2.length) {\n return false;\n }\n for (let i = 0; i < ii; ++i) {\n if (array1[i] !== array2[i]) {\n return false;\n }\n }\n return true;\n}\n\nfunction areTagListsEqual(tagList1: Dictionary.Tag[], tagList2: Dictionary.Tag[]): boolean {\n const ii = tagList1.length;\n if (tagList2.length !== ii) {\n return false;\n }\n for (let i = 0; i < ii; ++i) {\n const tag1 = tagList1[i];\n const tag2 = tagList2[i];\n if (tag1.name !== tag2.name || !areSimpleArraysEqual(tag1.dictionaries, tag2.dictionaries)) {\n return false;\n }\n }\n return true;\n}\n\nfunction areSetsEqual<T>(set1: Set<T>, set2: Set<T>): boolean {\n if (set1.size !== set2.size) {\n return false;\n }\n for (const value of set1) {\n if (!set2.has(value)) {\n return false;\n }\n }\n return true;\n}\n\nfunction getSetIntersection<T>(set1: Set<T>, set2: Set<T>): T[] {\n const result: T[] = [];\n for (const value of set1) {\n if (set2.has(value)) {\n result.push(value);\n }\n }\n return result;\n}\n\nfunction createMapKey(array: unknown[]): string {\n return JSON.stringify(array);\n}\n"],"mappings":";;;;AAiBA,SAAgB,qBAAqBA,iBAAiDC,eAA+B;CACjH,MAAM,YAAY,gBAAgB,YAC7B,OAAO,CAAC,MAAM,EAAE,kBAAkB,cAAc,CAChD,IAAI,CAAC,MAAM,EAAE,UAAU;AAC5B,KAAI,UAAU,WAAW,EACrB,QAAO;CAEX,IAAI,MAAM;AACV,MAAK,MAAM,QAAQ,WAAW;AAC1B,MAAI,QAAQ,EACR;AAEJ,SAAO,IAAI;CACd;AACD,QAAO,MAAM,IAAI,KAAK,MAAM,UAAU,SAAS,IAAI,GAAG;AACzD;AAED,SAAgB,cAAcD,iBAA6D;CACvF,MAAM,EAAE,WAAW,GAAG;CACtB,MAAM,gBAAgB,UAAU;CAChC,MAAM,cAAc,gBAAgB;CACpC,MAAM,kBAAkB,IAAI;CAC5B,MAAME,UAAsB,CAAE;AAC9B,MAAK,IAAI,IAAI,GAAG,IAAI,eAAe,EAAE,GAAG;EACpC,MAAM,EAAE,MAAM,GAAG,UAAU;AAC3B,OAAK,MAAM,OAAO,MAAM;AACpB,OAAI,aAAa;IACb,MAAM,EAAE,MAAM,UAAU,SAAS,cAAc,GAAG;IAClD,MAAM,MAAM,aAAa;KAAC;KAAM;KAAU;KAAS;IAAa,EAAC;IACjE,MAAM,QAAQ,gBAAgB,IAAI,IAAI;AACtC,eAAW,UAAU,aAAa;AAC9B,aAAQ,OAAO,gBAAgB,KAAK,EAAE;AACtC;IACH;AACD,oBAAgB,IAAI,KAAK,QAAQ,OAAO;GAC3C;AACD,WAAQ,KAAK;IAAE;IAAK,iBAAiB,CAAC,CAAE;GAAE,EAAC;EAC9C;CACJ;AACD,QAAO;AACV;AAED,SAAgB,qBACZF,iBACAG,gBACuC;CACvC,MAAM,EAAE,WAAW,aAAa,mBAAmB,GAAG;CAEtD,MAAM,OAAO,IAAI;CAIjB,MAAM,WAAW,IAAI;AACrB,MAAK,MAAM,EACP,eACA,YACA,iBACA,YACA,WACA,cACH,IAAI,mBAAmB;EACpB,MAAM,EAAE,MAAM,SAAS,GAAG,UAAU;EAEpC,IAAI,OAAO,KAAK,IAAI,WAAW;AAC/B,aAAW,SAAS,aAAa;AAC7B,UAAO,IAAI;AACX,QAAK,IAAI,YAAY,KAAK;AAC1B,YAAS,IAAI,YAAY,gBAAgB;EAC5C;EAED,MAAM,aAAa,aAAa,UAAU;EAC1C,MAAM,MAAM,aAAa,CAAC,MAAM,UAAW,EAAC;EAC5C,IAAI,gBAAgB,KAAK,IAAI,IAAI;AACjC,aAAW,kBAAkB,aAAa;AACtC,mBAAgB;IAAE;IAAM,SAAS;IAAY,QAAQ,IAAI;GAAO;AAChE,QAAK,IAAI,KAAK,cAAc;EAC/B;AAED,gBAAc,OAAO,IAAI,aAAa,CAAC,WAAW,YAAa,EAAC,EAAE;GAAE;GAAW;EAAc,EAAC;CACjG;CAED,MAAMC,UAAmD,CAAE;AAE3D,MAAK,MAAM,CAAC,YAAY,KAAK,IAAI,KAAK,SAAS,EAAE;EAC7C,MAAMC,cAAkC,CAAE;EAC1C,MAAM,kBAAkB,SAAS,IAAI,WAAW,IAAI;AACpD,OAAK,MAAM,EAAE,MAAM,SAAS,QAAQ,IAAI,KAAK,QAAQ,CACjD,aAAY,KAAK;GAAE;GAAM;GAAS,QAAQ,CAAC,GAAG,OAAO,QAAQ,AAAC;EAAE,EAAC;EAErE,MAAM,wBAAwB,eAAe,KAAK,CAAC,EAAE,OAAO,KAAK,UAAU,WAAW;EACtF,MAAM,YAAY,uBAAuB,QAAQ,SAAS,QAAQ;AAClE,UAAQ,KAAK;GAAE;GAAY;GAAa;GAAiB;EAAW,EAAC;CACxE;CAED,MAAMC,qBAAyC,CAAE;AACjD,MAAK,IAAI,IAAI,GAAG,IAAI,gBAAgB,UAAU,QAAQ,KAAK;EACvD,MAAM,mBAAmB,qBAAqB,iBAAiB,EAAE;AACjE,qBAAmB,KAAK;GACpB,MAAM,gBAAgB,UAAU,GAAG;GACnC,SAAS,gBAAgB,UAAU,GAAG;GACtC,QAAQ,CAAC;IAAE,WAAW;IAAkB,cAAc,iBAAiB,UAAU;GAAE,CAAC;EACvF,EAAC;CACL;AAED,SAAQ,KAAK;EACT,YAAY;EACZ,aAAa;EACb,iBAAiB;EACjB,WAAW,mBAAmB;CACjC,EAAC;AAEF,QAAO;AACV;AAED,SAAgB,sBACZC,mBACAJ,gBACwC;CACxC,MAAM,OAAO,IAAI;CACjB,MAAM,WAAW,IAAI;AACrB,MAAK,MAAM,EAAE,YAAY,iBAAiB,WAAW,WAAW,cAAc,IAAI,mBAAmB;EACjG,IAAI,OAAO,KAAK,IAAI,WAAW;AAC/B,aAAW,SAAS,aAAa;AAC7B,UAAO,IAAI;AACX,QAAK,IAAI,YAAY,KAAK;AAC1B,YAAS,IAAI,YAAY,gBAAgB;EAC5C;EACD,IAAI,gBAAgB,KAAK,IAAI,UAAU;AACvC,aAAW,kBAAkB,aAAa;AACtC,mBAAgB;IAAE;IAAW,QAAQ,IAAI;GAAO;AAChD,QAAK,IAAI,WAAW,cAAc;EACrC;AACD,gBAAc,OAAO,IAAI,aAAa,CAAC,WAAW,YAAa,EAAC,EAAE;GAAE;GAAW;EAAc,EAAC;CACjG;CAED,MAAMK,UAAoD,CAAE;AAC5D,MAAK,MAAM,CAAC,YAAY,KAAK,IAAI,KAAK,SAAS,EAAE;EAC7C,MAAMC,cAAmC,CAAE;EAC3C,MAAM,kBAAkB,SAAS,IAAI,WAAW,IAAI;AACpD,OAAK,MAAM,EAAE,WAAW,QAAQ,IAAI,KAAK,QAAQ,CAC7C,aAAY,KAAK;GAAE;GAAW,QAAQ,CAAC,GAAG,OAAO,QAAQ,AAAC;EAAE,EAAC;EAEjE,MAAM,wBAAwB,eAAe,KAAK,CAAC,EAAE,OAAO,KAAK,UAAU,WAAW;EACtF,MAAM,YAAY,uBAAuB,QAAQ,UAAU,QAAQ;AACnE,UAAQ,KAAK;GAAE;GAAY;GAAa;GAAiB;EAAW,EAAC;CACxE;AACD,QAAO;AACV;AAED,SAAgB,yBACZT,iBACiC;CACjC,MAAM,EAAE,WAAW,gBAAgB,oBAAoB,GAAG;CAE1D,MAAM,WAAW,IAAI;CACrB,MAAM,cAAc,IAAI;CACxB,MAAM,WAAW,IAAI;AACrB,MAAK,MAAM,EAAE,MAAM,SAAS,IAAI,WAAW;AACvC,WAAS,IAAI,KAAK;AAClB,cAAY,IAAI,QAAQ;CAC3B;CAED,MAAM,2BAA2B,IAAI;AACrC,MAAK,MAAM,EAAE,eAAe,YAAY,iBAAiB,gBAAgB,IAAI,oBAAoB;EAC7F,MAAM,EAAE,MAAM,SAAS,GAAG,UAAU;EACpC,IAAI,qCAAqC,yBAAyB,IAAI,WAAW;AACjF,aAAW,uCAAuC,aAAa;AAC3D,wCAAqC,CAAE;AACvC,4BAAyB,IAAI,YAAY,mCAAmC;AAC5E,YAAS,IAAI,YAAY,gBAAgB;EAC5C;AACD,OAAK,MAAM,iBAAiB,gBAAgB;GACxC,IAAI,uBAAuB,iCACvB,SACA,eACA,mCACH;AACD,OAAI,yBAAyB,MAAM;AAC/B,2BAAuB;KAAE;KAAe,OAAO,IAAI;KAAO;IAAS;AACnE,uCAAmC,KAAK,qBAAqB;GAChE;AACD,wBAAqB,MAAM,IAAI,KAAK;EACvC;CACJ;CAED,MAAMU,UAA6C,CAAE;CACrD,MAAM,mBAAmB,YAAY,OAAO;AAC5C,MAAK,MAAM,CAAC,YAAY,mCAAmC,IAAI,yBAAyB,SAAS,EAAE;EAC/F,MAAMC,kBAA0C,CAAE;EAClD,MAAM,kBAAkB,SAAS,IAAI,WAAW,IAAI;AACpD,OAAK,MAAM,wBAAwB,oCAAoC;GACnE,MAAM,EAAE,eAAe,OAAO,SAAS,GAAG;GAC1C,MAAM,kBAAkB,aAAa,OAAO,SAAS,GAAG,mBAAmB,OAAO,SAAS,GAAG,CAAE;GAChG,MAAMC,oBAA8B,CAAE;AACtC,OAAI,iBACA,mBAAkB,KAAK,QAAQ;AAEnC,mBAAgB,KAAK;IAAE;IAAe,OAAO,CAAC,GAAG,KAAM;IAAE;IAAS;IAAgB;GAAmB,EAAC;EACzG;AACD,UAAQ,KAAK;GAAE;GAAY;GAAiB,gBAAgB;EAAiB,EAAC;CACjF;AACD,QAAO;AACV;AAED,SAAgB,wBACZC,gBACAC,MACoC;CACpC,MAAMC,UAAgD,CAAE;AACxD,MAAK,MAAM,iBAAiB,eACxB,KAAI,cAAc,SAAS,KACvB,SAAQ,KAAK,cAAoD;AAGzE,QAAO;AACV;AAED,SAAgB,iBAAiBC,UAAkD;CAC/E,IAAI,aAAa;AACjB,MAAK,MAAM,EAAE,OAAO,IAAI,SACpB,eAAc;AAElB,KAAI,aAAa,EACb,QAAO;AAEX,KAAI,aAAa,EACb,QAAO;AAEX,QAAO;AACV;AAED,SAAgB,mBACZC,WACAC,iBACAC,aACAC,gBACQ;AACR,KAAI,YAAY,QAAQ,KAAK,eAAe,QAAQ,EAChD,QAAO,CAAE;CAEb,MAAM,QAAQ,IAAI;CAClB,MAAM,WAAW,IAAI;AACrB,MAAK,MAAM,iBAAiB,iBAAiB;EACzC,MAAM,EAAE,MAAM,SAAS,GAAG,UAAU;AACpC,QAAM,IAAI,KAAK;AACf,WAAS,IAAI,QAAQ;CACxB;CACD,MAAMC,kBAA4B,CAAE;CACpC,MAAM,YAAY,aAAa,OAAO,YAAY;CAClD,MAAM,eAAe,aAAa,UAAU,eAAe;AAC3D,KAAI,SACA,iBAAgB,KAAK,GAAG,mBAAmB,OAAO,YAAY,CAAC;AAEnE,KAAI,aAAa;AACb,MAAI,SACA,MAAK,MAAM,QAAQ,MACf,UAAS,OAAO,KAAK;AAG7B,kBAAgB,KAAK,GAAG,mBAAmB,UAAU,eAAe,CAAC;CACxE;AACD,QAAO;AACV;AAED,SAAgB,yBAAyBC,aAAgC;CACrE,IAAI,oBAAoB;CACxB,IAAI,aAAa;CACjB,IAAI,SAAS;AACb,MAAK,MAAM,aAAa,YACpB,SAAQ,WAAR;EACI,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;AACD,uBAAoB;AACpB;EACJ,KAAK;AACD,uBAAoB;AACpB,gBAAa;AACb;EACJ,KAAK;AACD,YAAS;AACT;CACP;AAEL,QAAO,uBAAuB,cAAc;AAC/C;AAED,SAAgB,iBAAiBC,SAAiBC,QAAyB;CACvE,MAAM,oBAAoB;AAC1B,MAAK,kBAAkB,KAAK,QAAQ,KAAK,kBAAkB,KAAK,OAAO,CACnE,QAAO,UAAU;CAErB,MAAM,eAAe,QAAQ,MAAM,IAAI,CAAC,IAAI,CAAC,SAAS,OAAO,SAAS,MAAM,GAAG,CAAC;CAChF,MAAM,cAAc,OAAO,MAAM,IAAI,CAAC,IAAI,CAAC,SAAS,OAAO,SAAS,MAAM,GAAG,CAAC;AAC9E,KAAI,aAAa,WAAW,YAAY,OACpC,QAAO,UAAU;AAErB,MAAK,IAAI,IAAI,GAAG,IAAI,aAAa,QAAQ,IACrC,KAAI,aAAa,OAAO,YAAY,GAChC,QAAO,aAAa,KAAK,YAAY;AAG7C,QAAO;AACV;AAID,SAAS,iCACLC,SACAC,eACAC,0BACmC;AACnC,QACI,yBAAyB,KACrB,CAAC,OAAO,GAAG,YAAY,WAAW,4BAA4B,IAAI,cAAc,CACnF,IAAI;AAEZ;AAED,SAAS,4BACL,EAAE,eAAe,gBAA8C,EAC/DC,gBACO;AACP,KAAI,eAAe,SAAS,eAAe,SAAS,iBAAiB,eAAe,MAAM,eAAe,KAAK,CAC1G,QAAO;AAEX,SAAQ,eAAe,MAAvB;EACI,KAAK,gBAAgB;GACjB,MAAM,eAAe;AACrB,UACI,eAAe,cAAc,aAAa,aAC1C,qBAAqB,eAAe,gBAAgB,aAAa,eAAe,IAChF,qBAAqB,eAAe,kBAAkB,aAAa,iBAAiB;EAE3F;EACD,KAAK,0BAA0B;GAC3B,MAAM,yBAAyB;AAC/B,UAAO,eAAe,QAAQ,uBAAuB;EACxD;CACJ;AACD,QAAO;AACV;AAED,SAAS,qBAAwBC,QAAaC,QAAsB;CAChE,MAAM,KAAK,OAAO;AAClB,KAAI,OAAO,OAAO,OACd,QAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,EAAE,EACtB,KAAI,OAAO,OAAO,OAAO,GACrB,QAAO;AAGf,QAAO;AACV;AAED,SAAS,iBAAiBC,UAA4BC,UAAqC;CACvF,MAAM,KAAK,SAAS;AACpB,KAAI,SAAS,WAAW,GACpB,QAAO;AAEX,MAAK,IAAI,IAAI,GAAG,IAAI,IAAI,EAAE,GAAG;EACzB,MAAM,OAAO,SAAS;EACtB,MAAM,OAAO,SAAS;AACtB,MAAI,KAAK,SAAS,KAAK,SAAS,qBAAqB,KAAK,cAAc,KAAK,aAAa,CACtF,QAAO;CAEd;AACD,QAAO;AACV;AAED,SAAS,aAAgBC,MAAcC,MAAuB;AAC1D,KAAI,KAAK,SAAS,KAAK,KACnB,QAAO;AAEX,MAAK,MAAM,SAAS,KAChB,MAAK,KAAK,IAAI,MAAM,CAChB,QAAO;AAGf,QAAO;AACV;AAED,SAAS,mBAAsBD,MAAcC,MAAmB;CAC5D,MAAMC,SAAc,CAAE;AACtB,MAAK,MAAM,SAAS,KAChB,KAAI,KAAK,IAAI,MAAM,CACf,QAAO,KAAK,MAAM;AAG1B,QAAO;AACV;AAED,SAAS,aAAaC,OAA0B;AAC5C,QAAO,KAAK,UAAU,MAAM;AAC/B"}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { DeleteDictionaryProgressCallback, DictionaryAndQueryRequest, DictionaryCounts, DictionarySet, KanjiEntry, KanjiMeta, MatchType, Media, MediaRequest, ObjectStoreName, Summary, Tag$1 as Tag, TermEntry, TermExactRequest, TermMeta } from "./dictionary-importer-BkQQSBhm.js";
|
|
2
|
+
import Dexie from "dexie";
|
|
3
|
+
|
|
4
|
+
//#region src/database/schema.d.ts
|
|
5
|
+
declare class YomitanDatabase extends Dexie {
|
|
6
|
+
terms: Dexie.Table;
|
|
7
|
+
termMeta: Dexie.Table;
|
|
8
|
+
kanji: Dexie.Table;
|
|
9
|
+
kanjiMeta: Dexie.Table;
|
|
10
|
+
tagMeta: Dexie.Table;
|
|
11
|
+
dictionaries: Dexie.Table;
|
|
12
|
+
media: Dexie.Table;
|
|
13
|
+
constructor(name?: string);
|
|
14
|
+
}
|
|
15
|
+
declare const OBJECT_STORE_NAMES: ObjectStoreName[];
|
|
16
|
+
|
|
17
|
+
//#endregion
|
|
18
|
+
//#region src/database/dictionary-database.d.ts
|
|
19
|
+
//# sourceMappingURL=schema.d.ts.map
|
|
20
|
+
declare class DictionaryDB {
|
|
21
|
+
private _db;
|
|
22
|
+
private _isOpen;
|
|
23
|
+
constructor(dbName?: string);
|
|
24
|
+
open(): Promise<void>;
|
|
25
|
+
close(): void;
|
|
26
|
+
get isOpen(): boolean;
|
|
27
|
+
get dexie(): YomitanDatabase;
|
|
28
|
+
purge(): Promise<boolean>;
|
|
29
|
+
deleteDictionary(dictionaryName: string, onProgress?: DeleteDictionaryProgressCallback): Promise<void>;
|
|
30
|
+
findTermsBulk(termList: string[], dictionaries: DictionarySet, matchType: MatchType): Promise<TermEntry[]>;
|
|
31
|
+
findTermsExactBulk(termList: TermExactRequest[], dictionaries: DictionarySet): Promise<TermEntry[]>;
|
|
32
|
+
findTermsBySequenceBulk(items: DictionaryAndQueryRequest[]): Promise<TermEntry[]>;
|
|
33
|
+
findTermMetaBulk(termList: string[], dictionaries: DictionarySet): Promise<TermMeta[]>;
|
|
34
|
+
findKanjiBulk(kanjiList: string[], dictionaries: DictionarySet): Promise<KanjiEntry[]>;
|
|
35
|
+
findKanjiMetaBulk(kanjiList: string[], dictionaries: DictionarySet): Promise<KanjiMeta[]>;
|
|
36
|
+
findTagMetaBulk(items: DictionaryAndQueryRequest[]): Promise<(Tag | undefined)[]>;
|
|
37
|
+
findTagForTitle(name: string, dictionary: string): Promise<Tag | undefined>;
|
|
38
|
+
getMedia(items: MediaRequest[]): Promise<Media[]>;
|
|
39
|
+
getDictionaryInfo(): Promise<Summary[]>;
|
|
40
|
+
getDictionaryCounts(dictionaryNames: string[], getTotal: boolean): Promise<DictionaryCounts>;
|
|
41
|
+
dictionaryExists(title: string): Promise<boolean>;
|
|
42
|
+
bulkAdd(objectStoreName: ObjectStoreName, items: unknown[], start: number, count: number): Promise<void>;
|
|
43
|
+
addWithResult(objectStoreName: ObjectStoreName, item: unknown): Promise<number>;
|
|
44
|
+
bulkUpdate(objectStoreName: ObjectStoreName, items: {
|
|
45
|
+
primaryKey: number;
|
|
46
|
+
data: unknown;
|
|
47
|
+
}[], start: number, count: number): Promise<void>;
|
|
48
|
+
private _createTerm;
|
|
49
|
+
private _createTermMeta;
|
|
50
|
+
private _createKanji;
|
|
51
|
+
private _splitField;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
//#endregion
|
|
55
|
+
//# sourceMappingURL=dictionary-database.d.ts.map
|
|
56
|
+
|
|
57
|
+
export { DictionaryDB as DictionaryDB$1, OBJECT_STORE_NAMES as OBJECT_STORE_NAMES$1, YomitanDatabase as YomitanDatabase$1 };
|
|
58
|
+
//# sourceMappingURL=dictionary-database-BDC2f9zc.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"dictionary-database-BDC2f9zc.d.ts","names":[],"sources":["../src/database/schema.ts","../src/database/dictionary-database.ts"],"sourcesContent":null,"mappings":";;;;cAGa,eAAA,SAAwB,KAAA;SACzB,KAAA,CAAM;EADL,QAAA,EAEE,KAAA,CAAM,KAFQ;EAAA,KAAA,EAGjB,KAAA,CAAM,KAHW;EAAA,SACX,EAGF,KAAA,CAAM,KAHJ;EAAK,OACR,EAGD,KAAA,CAAM,KAHC;EAAK,YACR,EAGC,KAAA,CAAM,KAHP;EAAK,KACP,EAGJ,KAAA,CAAM,KAHI;EAAK,WACP,CAAA,IAAA,CAAA,EAAA,MAAA;;AAER,cAmDC,kBAnDK,EAmDe,eAnDf,EAAA;;;;;cCJL,YAAA;EDHA,QAAA,GAAA;EAAgB,QAAA,OAAA;EAAA,WACX,CAAA,MAAA,CAAA,EAAA,MAAA;EAAK,IACR,CAAA,CAAA,ECUG,ODVG,CAAA,IAAA,CAAA;EAAK,KACd,CAAA,CAAA,EAAM,IAAA;EAAK,IACP,MAAM,CAAA,CAAA,EAAA,OAAA;EAAK,IACb,KAAM,CAAA,CAAA,ECqBH,eDrBG;EAAK,KACN,CAAA,CAAA,ECwBA,ODxBM,CAAA,OAAA,CAAA;EAAK,gBACZ,CAAA,cAAA,EAAA,MAAA,EAAA,UAAA,CAAA,ECuBQ,gCDvBR,CAAA,ECyCX,ODzCW,CAAA,IAAA,CAAA;EAAK,aAPc,CAAA,QAAA,EAAA,MAAA,EAAA,EAAA,YAAA,ECgDvB,aDhDuB,EAAA,SAAA,ECyFiB,SDzFjB,CAAA,EC2F9B,OD3F8B,CC0FU,SD1FV,EAAA,CAAA;EAAK,kBAAA,CAAA,QAAA,EC2F5B,gBD3F4B,EAAA,EAAA,YAAA,ECoJW,aDpJX,CAAA,ECsJnC,ODtJmC,CCqJY,SDrJZ,EAAA,CAAA;EA0D7B,uBAQZ,CAAA,KAAA,ECoFa,yBD5FkC,EAAA,CAAA,ECoHzC,ODpHyC,CCmHW,SDnHX,EAAA,CAAA;qDCoHlC,gBAyBP,QAD+C;mDACxC,gBAyBP,QAD+C;uDACxC,gBAyBP,QAD+C;EArPzC,eAAY,CAAA,KAAA,EAsPX,yBAtPW,EAAA,CAAA,EAoRlB,OApRkB,CAAA,CAmRkC,GAnRlC,GAAA,SAAA,CAAA,EAAA,CAAA;EAAA,eAAA,CAAA,IAAA,EAAA,MAAA,EAAA,UAAA,EAAA,MAAA,CAAA,EAoSoC,OApSpC,CAoRX,GApRW,GAAA,SAAA,CAAA;EAAA,QASP,CAAA,KAAA,EA2RkD,YA3RlD,EAAA,CAAA,EAmS4C,OAnS5C,CAmSuC,KAnSvC,EAAA,CAAA;EAAO,iBAcR,CAAA,CAAA,EAmTc,OAnTd,CAqRoD,OArRpD,EAAA,CAAA;EAAe,mBAIb,CAAA,eAAA,EAAA,MAAA,EAAA,EAAA,QAAA,EAAA,OAAA,CAAA,EAsTZ,OAtTY,CA+SmB,gBA/SnB,CAAA;EAAO,gBAAA,CAAA,KAAA,EAAA,MAAA,CAAA,EA0ViB,OA1VjB,CAAA,OAAA,CAAA;EAiB8C,OACjE,CAAA,eAAA,EAwU2C,eAxU3C,EAAA,KAAA,EAAA,OAAA,EAAA,EAAA,KAAA,EAAA,MAAA,EAAA,KAAA,EAAA,MAAA,CAAA,EAkVA,OAlVA,CAAA,IAAA,CAAA;EAAO,aAAA,CAAA,eAAA,EAkVA,eAlVA,EAAA,IAAA,EAAA,OAAA,CAAA,EA+V+E,OA/V/E,CAAA,MAAA,CAAA;EAyCwC,UAAA,CAAA,eAAA,EAsT8C,eAtT9C,EAAA,KAAA,EAAA;IACP,UAAA,EAAA,MAAA;IACxC,IAAA,EAAA,OAAA;EAAO,CAAA,EAAA,EAAA,KAAA,EAAA,MAAA,EAAA,KAAA,EAAA,MAAA,CAAA,EA8TP,OA9TO,CAAA,IAAA,CAAA;EAyDuC,QAAA,WAAA;EACC,QAAA,eAAA;EACX,QAApC,YAAA;EAAO,QAAA,WAAA"}
|