kernpy 0.0.2__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kernpy/__init__.py +215 -0
- kernpy/__main__.py +217 -0
- kernpy/core/__init__.py +119 -0
- kernpy/core/_io.py +48 -0
- kernpy/core/base_antlr_importer.py +61 -0
- kernpy/core/base_antlr_spine_parser_listener.py +196 -0
- kernpy/core/basic_spine_importer.py +43 -0
- kernpy/core/document.py +965 -0
- kernpy/core/dyn_importer.py +30 -0
- kernpy/core/dynam_spine_importer.py +42 -0
- kernpy/core/error_listener.py +51 -0
- kernpy/core/exporter.py +535 -0
- kernpy/core/fing_spine_importer.py +42 -0
- kernpy/core/generated/kernSpineLexer.interp +444 -0
- kernpy/core/generated/kernSpineLexer.py +535 -0
- kernpy/core/generated/kernSpineLexer.tokens +236 -0
- kernpy/core/generated/kernSpineParser.interp +425 -0
- kernpy/core/generated/kernSpineParser.py +9954 -0
- kernpy/core/generated/kernSpineParser.tokens +236 -0
- kernpy/core/generated/kernSpineParserListener.py +1200 -0
- kernpy/core/generated/kernSpineParserVisitor.py +673 -0
- kernpy/core/generic.py +426 -0
- kernpy/core/gkern.py +526 -0
- kernpy/core/graphviz_exporter.py +89 -0
- kernpy/core/harm_spine_importer.py +41 -0
- kernpy/core/import_humdrum_old.py +853 -0
- kernpy/core/importer.py +285 -0
- kernpy/core/importer_factory.py +43 -0
- kernpy/core/kern_spine_importer.py +73 -0
- kernpy/core/mens_spine_importer.py +23 -0
- kernpy/core/mhxm_spine_importer.py +44 -0
- kernpy/core/pitch_models.py +338 -0
- kernpy/core/root_spine_importer.py +58 -0
- kernpy/core/spine_importer.py +45 -0
- kernpy/core/text_spine_importer.py +43 -0
- kernpy/core/tokenizers.py +239 -0
- kernpy/core/tokens.py +2011 -0
- kernpy/core/transposer.py +300 -0
- kernpy/io/__init__.py +14 -0
- kernpy/io/public.py +355 -0
- kernpy/polish_scores/__init__.py +13 -0
- kernpy/polish_scores/download_polish_dataset.py +357 -0
- kernpy/polish_scores/iiif.py +47 -0
- kernpy/test_grammar.sh +22 -0
- kernpy/util/__init__.py +14 -0
- kernpy/util/helpers.py +55 -0
- kernpy/util/store_cache.py +35 -0
- kernpy/visualize_analysis.sh +23 -0
- kernpy-1.0.0.dist-info/METADATA +501 -0
- kernpy-1.0.0.dist-info/RECORD +51 -0
- {kernpy-0.0.2.dist-info → kernpy-1.0.0.dist-info}/WHEEL +1 -2
- kernpy/example.py +0 -1
- kernpy-0.0.2.dist-info/LICENSE +0 -19
- kernpy-0.0.2.dist-info/METADATA +0 -19
- kernpy-0.0.2.dist-info/RECORD +0 -7
- kernpy-0.0.2.dist-info/top_level.txt +0 -1
@@ -0,0 +1,30 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from .base_antlr_spine_parser_listener import BaseANTLRSpineParserListener
|
5
|
+
from .spine_importer import SpineImporter
|
6
|
+
from .tokens import SimpleToken, TokenCategory, Token
|
7
|
+
from .kern_spine_importer import KernSpineImporter, KernSpineListener
|
8
|
+
from .dynam_spine_importer import DynamSpineImporter
|
9
|
+
|
10
|
+
|
11
|
+
class DynSpineImporter(SpineImporter):
|
12
|
+
def __init__(self, verbose: Optional[bool] = False):
|
13
|
+
"""
|
14
|
+
KernSpineImporter constructor.
|
15
|
+
|
16
|
+
Args:
|
17
|
+
verbose (Optional[bool]): Level of verbosity for error messages.
|
18
|
+
"""
|
19
|
+
super().__init__(verbose=verbose)
|
20
|
+
|
21
|
+
def import_listener(self) -> BaseANTLRSpineParserListener:
|
22
|
+
return KernSpineListener()
|
23
|
+
|
24
|
+
def import_token(self, encoding: str) -> Token:
|
25
|
+
# TODO: Find out differences between **dyn vs **dynam and change this class. Using the same dor both for now.
|
26
|
+
dynam_importer = DynamSpineImporter()
|
27
|
+
return dynam_importer.import_token(encoding)
|
28
|
+
|
29
|
+
|
30
|
+
|
@@ -0,0 +1,42 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
from typing import Optional
|
3
|
+
|
4
|
+
from .base_antlr_spine_parser_listener import BaseANTLRSpineParserListener
|
5
|
+
from .kern_spine_importer import KernSpineImporter, KernSpineListener
|
6
|
+
from .spine_importer import SpineImporter
|
7
|
+
from .tokens import Token, TokenCategory, SimpleToken
|
8
|
+
|
9
|
+
|
10
|
+
class DynamSpineImporter(SpineImporter):
|
11
|
+
def __init__(self, verbose: Optional[bool] = False):
|
12
|
+
"""
|
13
|
+
KernSpineImporter constructor.
|
14
|
+
|
15
|
+
Args:
|
16
|
+
verbose (Optional[bool]): Level of verbosity for error messages.
|
17
|
+
"""
|
18
|
+
super().__init__(verbose=verbose)
|
19
|
+
|
20
|
+
def import_listener(self) -> BaseANTLRSpineParserListener:
|
21
|
+
return KernSpineListener() # TODO: Create a custom functional listener for DynamSpineImporter
|
22
|
+
|
23
|
+
def import_token(self, encoding: str) -> Token:
|
24
|
+
self._raise_error_if_wrong_input(encoding)
|
25
|
+
|
26
|
+
kern_spine_importer = KernSpineImporter()
|
27
|
+
token = kern_spine_importer.import_token(encoding)
|
28
|
+
|
29
|
+
ACCEPTED_CATEGORIES = {
|
30
|
+
TokenCategory.STRUCTURAL,
|
31
|
+
TokenCategory.SIGNATURES,
|
32
|
+
TokenCategory.EMPTY,
|
33
|
+
TokenCategory.IMAGE_ANNOTATIONS,
|
34
|
+
TokenCategory.BARLINES,
|
35
|
+
TokenCategory.COMMENTS,
|
36
|
+
}
|
37
|
+
|
38
|
+
if not any(TokenCategory.is_child(child=token.category, parent=cat) for cat in ACCEPTED_CATEGORIES):
|
39
|
+
return SimpleToken(encoding, TokenCategory.DYNAMICS)
|
40
|
+
|
41
|
+
return token
|
42
|
+
|
@@ -0,0 +1,51 @@
|
|
1
|
+
from typing import Optional
|
2
|
+
|
3
|
+
from antlr4.error.ErrorListener import ConsoleErrorListener
|
4
|
+
|
5
|
+
|
6
|
+
class ParseError:
|
7
|
+
def __init__(self, offendingSymbol, charPositionInLine, msg, exception):
|
8
|
+
self.offendingSymbol = offendingSymbol
|
9
|
+
self.charPositionInLine = charPositionInLine
|
10
|
+
self.msg = msg
|
11
|
+
self.exception = exception
|
12
|
+
|
13
|
+
def __str__(self):
|
14
|
+
return f"({self.charPositionInLine}): {self.msg}"
|
15
|
+
|
16
|
+
def getOffendingSymbol(self):
|
17
|
+
return self.offendingSymbol
|
18
|
+
|
19
|
+
def getCharPositionInLine(self):
|
20
|
+
return self.charPositionInLine
|
21
|
+
|
22
|
+
def getMsg(self):
|
23
|
+
return self.msg
|
24
|
+
|
25
|
+
|
26
|
+
class ErrorListener(ConsoleErrorListener):
|
27
|
+
def __init__(self, *, verbose: Optional[bool] = False):
|
28
|
+
"""
|
29
|
+
ErrorListener constructor.
|
30
|
+
Args:
|
31
|
+
verbose (bool): If True, the error messages will be printed to the console using \
|
32
|
+
the `ConsoleErrorListener` interface.
|
33
|
+
"""
|
34
|
+
super().__init__()
|
35
|
+
self.errors = []
|
36
|
+
self.verbose = verbose
|
37
|
+
|
38
|
+
def syntaxError(self, recognizer, offendingSymbol, line, charPositionInLine, msg, e):
|
39
|
+
if self.verbose:
|
40
|
+
self.syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e)
|
41
|
+
|
42
|
+
self.errors.append(ParseError(offendingSymbol, charPositionInLine, msg, e))
|
43
|
+
|
44
|
+
def getNumberErrorsFound(self):
|
45
|
+
return len(self.errors)
|
46
|
+
|
47
|
+
def __str__(self):
|
48
|
+
sb = ""
|
49
|
+
for error in self.errors:
|
50
|
+
sb += str(error) + "\n"
|
51
|
+
return sb
|
kernpy/core/exporter.py
ADDED
@@ -0,0 +1,535 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
from copy import deepcopy
|
4
|
+
from enum import Enum
|
5
|
+
from typing import Optional
|
6
|
+
from collections.abc import Sequence
|
7
|
+
from abc import ABC, abstractmethod
|
8
|
+
|
9
|
+
from kernpy.core import Document, SpineOperationToken, HeaderToken, Importer, TokenCategory, InstrumentToken, \
|
10
|
+
TOKEN_SEPARATOR, DECORATION_SEPARATOR, Token, NoteRestToken, HEADERS, BEKERN_CATEGORIES, ComplexToken
|
11
|
+
from kernpy.core.tokenizers import Encoding, TokenizerFactory, Tokenizer
|
12
|
+
|
13
|
+
|
14
|
+
|
15
|
+
class ExportOptions:
|
16
|
+
"""
|
17
|
+
`ExportOptions` class.
|
18
|
+
|
19
|
+
Store the options to export a **kern file.
|
20
|
+
"""
|
21
|
+
|
22
|
+
def __init__(
|
23
|
+
self,
|
24
|
+
spine_types: [] = None,
|
25
|
+
token_categories: [] = None,
|
26
|
+
from_measure: int = None,
|
27
|
+
to_measure: int = None,
|
28
|
+
kern_type: Encoding = Encoding.normalizedKern,
|
29
|
+
instruments: [] = None,
|
30
|
+
show_measure_numbers: bool = False,
|
31
|
+
spine_ids: [int] = None
|
32
|
+
):
|
33
|
+
"""
|
34
|
+
Create a new ExportOptions object.
|
35
|
+
|
36
|
+
Args:
|
37
|
+
spine_types (Iterable): **kern, **mens, etc...
|
38
|
+
token_categories (Iterable): TokenCategory
|
39
|
+
from_measure (int): The measure to start exporting. When None, the exporter will start from the beginning of the file. The first measure is 1
|
40
|
+
to_measure (int): The measure to end exporting. When None, the exporter will end at the end of the file.
|
41
|
+
kern_type (Encoding): The type of the kern file to export.
|
42
|
+
instruments (Iterable): The instruments to export. When None, all the instruments will be exported.
|
43
|
+
show_measure_numbers (Bool): Show the measure numbers in the exported file.
|
44
|
+
spine_ids (Iterable): The ids of the spines to export. When None, all the spines will be exported. Spines ids start from 0 and they are increased by 1.
|
45
|
+
|
46
|
+
Example:
|
47
|
+
>>> import kernpy
|
48
|
+
|
49
|
+
Create the importer and read the file
|
50
|
+
>>> hi = Importer()
|
51
|
+
>>> document = hi.import_file('file.krn')
|
52
|
+
>>> exporter = Exporter()
|
53
|
+
|
54
|
+
Export the file with the specified options
|
55
|
+
>>> options = ExportOptions(spine_types=['**kern'], token_categories=BEKERN_CATEGORIES)
|
56
|
+
>>> exported_data = exporter.export_string(document, options)
|
57
|
+
|
58
|
+
Export only the lyrics
|
59
|
+
>>> options = ExportOptions(spine_types=['**kern'], token_categories=[TokenCategory.LYRICS])
|
60
|
+
>>> exported_data = exporter.export_string(document, options)
|
61
|
+
|
62
|
+
Export the comments
|
63
|
+
>>> options = ExportOptions(spine_types=['**kern'], token_categories=[TokenCategory.LINE_COMMENTS, TokenCategory.FIELD_COMMENTS])
|
64
|
+
>>> exported_data = exporter.export_string(document, options)
|
65
|
+
|
66
|
+
Export using the eKern version
|
67
|
+
>>> options = ExportOptions(spine_types=['**kern'], token_categories=BEKERN_CATEGORIES, kern_type=Encoding.eKern)
|
68
|
+
>>> exported_data = exporter.export_string(document, options)
|
69
|
+
|
70
|
+
"""
|
71
|
+
self.spine_types = spine_types if spine_types is not None else deepcopy(HEADERS)
|
72
|
+
self.from_measure = from_measure
|
73
|
+
self.to_measure = to_measure
|
74
|
+
self.token_categories = token_categories if token_categories is not None else [c for c in TokenCategory]
|
75
|
+
self.kern_type = kern_type
|
76
|
+
self.instruments = instruments
|
77
|
+
self.show_measure_numbers = show_measure_numbers
|
78
|
+
self.spine_ids = spine_ids # When exporting, if spine_ids=None all the spines will be exported.
|
79
|
+
|
80
|
+
def __eq__(self, other: 'ExportOptions') -> bool:
|
81
|
+
"""
|
82
|
+
Compare two ExportOptions objects.
|
83
|
+
|
84
|
+
Args:
|
85
|
+
other: The other ExportOptions object to compare.
|
86
|
+
|
87
|
+
Returns (bool):
|
88
|
+
True if the objects are equal, False otherwise.
|
89
|
+
|
90
|
+
Examples:
|
91
|
+
>>> options1 = ExportOptions(spine_types=['**kern'], token_categories=BEKERN_CATEGORIES)
|
92
|
+
>>> options2 = ExportOptions(spine_types=['**kern'], token_categories=BEKERN_CATEGORIES)
|
93
|
+
>>> options1 == options2
|
94
|
+
True
|
95
|
+
|
96
|
+
>>> options3 = ExportOptions(spine_types=['**kern', '**harm'], token_categories=BEKERN_CATEGORIES)
|
97
|
+
>>> options1 == options3
|
98
|
+
False
|
99
|
+
"""
|
100
|
+
return self.spine_types == other.spine_types and \
|
101
|
+
self.token_categories == other.token_categories and \
|
102
|
+
self.from_measure == other.from_measure and \
|
103
|
+
self.to_measure == other.to_measure and \
|
104
|
+
self.kern_type == other.kern_type and \
|
105
|
+
self.instruments == other.instruments and \
|
106
|
+
self.show_measure_numbers == other.show_measure_numbers and \
|
107
|
+
self.spine_ids == other.spine_ids
|
108
|
+
|
109
|
+
def __ne__(self, other: 'ExportOptions') -> bool:
|
110
|
+
"""
|
111
|
+
Compare two ExportOptions objects.
|
112
|
+
|
113
|
+
Args:
|
114
|
+
other (ExportOptions): The other ExportOptions object to compare.
|
115
|
+
|
116
|
+
Returns (bool):
|
117
|
+
True if the objects are not equal, False otherwise.
|
118
|
+
|
119
|
+
Examples:
|
120
|
+
>>> options1 = ExportOptions(spine_types=['**kern'], token_categories=BEKERN_CATEGORIES)
|
121
|
+
>>> options2 = ExportOptions(spine_types=['**kern'], token_categories=BEKERN_CATEGORIES)
|
122
|
+
>>> options1 != options2
|
123
|
+
False
|
124
|
+
|
125
|
+
>>> options3 = ExportOptions(spine_types=['**kern', '**harm'], token_categories=BEKERN_CATEGORIES)
|
126
|
+
>>> options1 != options3
|
127
|
+
True
|
128
|
+
"""
|
129
|
+
return not self.__eq__(other)
|
130
|
+
|
131
|
+
@classmethod
|
132
|
+
def default(cls):
|
133
|
+
return cls(
|
134
|
+
spine_types=deepcopy(HEADERS),
|
135
|
+
token_categories=[c for c in TokenCategory],
|
136
|
+
from_measure=None,
|
137
|
+
to_measure=None,
|
138
|
+
kern_type=Encoding.normalizedKern,
|
139
|
+
instruments=None,
|
140
|
+
show_measure_numbers=False,
|
141
|
+
spine_ids=None
|
142
|
+
)
|
143
|
+
|
144
|
+
|
145
|
+
def empty_row(row):
|
146
|
+
for col in row:
|
147
|
+
if col != '.' and col != '' and col != '*':
|
148
|
+
return False
|
149
|
+
return True
|
150
|
+
|
151
|
+
|
152
|
+
class HeaderTokenGenerator:
|
153
|
+
"""
|
154
|
+
HeaderTokenGenerator class.
|
155
|
+
|
156
|
+
This class is used to translate the HeaderTokens to the specific tokenizer format.
|
157
|
+
"""
|
158
|
+
@classmethod
|
159
|
+
def new(cls, *, token: HeaderToken, type: Encoding):
|
160
|
+
"""
|
161
|
+
Create a new HeaderTokenGenerator object. Only accepts stardized Humdrum **kern encodings.
|
162
|
+
|
163
|
+
Args:
|
164
|
+
token (HeaderToken): The HeaderToken to be translated.
|
165
|
+
type (Encoding): The tokenizer to be used.
|
166
|
+
|
167
|
+
Examples:
|
168
|
+
>>> header = HeaderToken('**kern', 0)
|
169
|
+
>>> header.encoding
|
170
|
+
'**kern'
|
171
|
+
>>> new_header = HeaderTokenGenerator.new(token=header, type=Encoding.eKern)
|
172
|
+
>>> new_header.encoding
|
173
|
+
'**ekern'
|
174
|
+
"""
|
175
|
+
new_encoding = f'**{type.prefix()}{token.encoding[2:]}'
|
176
|
+
new_token = HeaderToken(new_encoding, token.spine_id)
|
177
|
+
|
178
|
+
return new_token
|
179
|
+
|
180
|
+
|
181
|
+
|
182
|
+
|
183
|
+
class Exporter:
|
184
|
+
def export_string(self, document: Document, options: ExportOptions) -> str:
|
185
|
+
self.export_options_validator(document, options)
|
186
|
+
|
187
|
+
rows = []
|
188
|
+
|
189
|
+
if options.to_measure is not None and options.to_measure < len(document.measure_start_tree_stages):
|
190
|
+
|
191
|
+
if options.to_measure < len(document.measure_start_tree_stages) - 1:
|
192
|
+
to_stage = document.measure_start_tree_stages[
|
193
|
+
options.to_measure] # take the barlines from the next coming measure
|
194
|
+
else:
|
195
|
+
to_stage = len(document.tree.stages) - 1 # all stages
|
196
|
+
else:
|
197
|
+
to_stage = len(document.tree.stages) - 1 # all stages
|
198
|
+
|
199
|
+
if options.from_measure:
|
200
|
+
# In case of beginning not from the first measure, we recover the spine creation and the headers
|
201
|
+
# Traversed in reverse order to only include the active spines at the given measure...
|
202
|
+
from_stage = document.measure_start_tree_stages[options.from_measure - 1]
|
203
|
+
next_nodes = document.tree.stages[from_stage]
|
204
|
+
while next_nodes and len(next_nodes) > 0 and next_nodes[0] != document.tree.root:
|
205
|
+
row = []
|
206
|
+
new_next_nodes = []
|
207
|
+
non_place_holder_in_row = False
|
208
|
+
spine_operation_row = False
|
209
|
+
for node in next_nodes:
|
210
|
+
if isinstance(node.token, SpineOperationToken):
|
211
|
+
spine_operation_row = True
|
212
|
+
break
|
213
|
+
|
214
|
+
for node in next_nodes:
|
215
|
+
content = ''
|
216
|
+
if isinstance(node.token, HeaderToken) and node.token.encoding in options.spine_types:
|
217
|
+
content = self.export_token(node.token, options)
|
218
|
+
non_place_holder_in_row = True
|
219
|
+
elif spine_operation_row:
|
220
|
+
# either if it is the split operator that has been cancelled, or the join one
|
221
|
+
if isinstance(node.token, SpineOperationToken) and (node.token.is_cancelled_at(
|
222
|
+
from_stage) or node.last_spine_operator_node and node.last_spine_operator_node.token.cancelled_at_stage == node.stage):
|
223
|
+
content = '*'
|
224
|
+
else:
|
225
|
+
content = self.export_token(node.token, options)
|
226
|
+
non_place_holder_in_row = True
|
227
|
+
if content:
|
228
|
+
row.append(content)
|
229
|
+
new_next_nodes.append(node.parent)
|
230
|
+
next_nodes = new_next_nodes
|
231
|
+
if non_place_holder_in_row: # if the row contains just place holders due to an ommitted place holder, don't add it
|
232
|
+
rows.insert(0, row)
|
233
|
+
|
234
|
+
# now, export the signatures
|
235
|
+
node_signatures = None
|
236
|
+
for node in document.tree.stages[from_stage]:
|
237
|
+
node_signature_rows = []
|
238
|
+
for signature_node in node.last_signature_nodes.nodes.values():
|
239
|
+
if not self.is_signature_cancelled(signature_node, node, from_stage, to_stage):
|
240
|
+
node_signature_rows.append(self.export_token(signature_node.token, options))
|
241
|
+
if len(node_signature_rows) > 0:
|
242
|
+
if not node_signatures:
|
243
|
+
node_signatures = [] # an array for each spine
|
244
|
+
else:
|
245
|
+
if len(node_signatures[0]) != len(node_signature_rows):
|
246
|
+
raise Exception(f'Node signature mismatch: multiple spines with signatures at measure {len(rows)}') # TODO better message
|
247
|
+
node_signatures.append(node_signature_rows)
|
248
|
+
|
249
|
+
if node_signatures:
|
250
|
+
for irow in range(len(node_signatures[0])): # all spines have the same number of rows
|
251
|
+
row = []
|
252
|
+
for icol in range(len(node_signatures)): #len(node_signatures) = number of spines
|
253
|
+
row.append(node_signatures[icol][irow])
|
254
|
+
rows.append(row)
|
255
|
+
|
256
|
+
else:
|
257
|
+
from_stage = 0
|
258
|
+
rows = []
|
259
|
+
|
260
|
+
#if not node.token.category == TokenCategory.LINE_COMMENTS and not node.token.category == TokenCategory.FIELD_COMMENTS:
|
261
|
+
for stage in range(from_stage, to_stage + 1): # to_stage included
|
262
|
+
row = []
|
263
|
+
for node in document.tree.stages[stage]:
|
264
|
+
self.append_row(document=document, node=node, options=options, row=row)
|
265
|
+
|
266
|
+
if len(row) > 0:
|
267
|
+
rows.append(row)
|
268
|
+
|
269
|
+
# now, add the spine terminate row
|
270
|
+
if options.to_measure is not None and len(rows) > 0 and rows[len(rows) - 1][
|
271
|
+
0] != '*-': # if the terminate is not added yet
|
272
|
+
spine_count = len(rows[len(rows) - 1])
|
273
|
+
row = []
|
274
|
+
for i in range(spine_count):
|
275
|
+
row.append('*-')
|
276
|
+
rows.append(row)
|
277
|
+
|
278
|
+
result = ""
|
279
|
+
for row in rows:
|
280
|
+
if not empty_row(row):
|
281
|
+
result += '\t'.join(row) + '\n'
|
282
|
+
return result
|
283
|
+
|
284
|
+
def compute_header_type(self, node) -> Optional[HeaderToken]:
|
285
|
+
"""
|
286
|
+
Compute the header type of the node.
|
287
|
+
|
288
|
+
Args:
|
289
|
+
node (Node): The node to compute.
|
290
|
+
|
291
|
+
Returns (Optional[Token]): The header type `Node`object. None if the current node is the header.
|
292
|
+
|
293
|
+
"""
|
294
|
+
if isinstance(node.token, HeaderToken):
|
295
|
+
header_type = node.token
|
296
|
+
elif node.header_node:
|
297
|
+
header_type = node.header_node.token
|
298
|
+
else:
|
299
|
+
header_type = None
|
300
|
+
return header_type
|
301
|
+
|
302
|
+
def export_token(self, token: Token, options: ExportOptions) -> str:
|
303
|
+
if isinstance(token, HeaderToken):
|
304
|
+
new_token = HeaderTokenGenerator.new(token=token, type=options.kern_type)
|
305
|
+
else:
|
306
|
+
new_token = token
|
307
|
+
return (TokenizerFactory
|
308
|
+
.create(options.kern_type.value, token_categories=options.token_categories)
|
309
|
+
.tokenize(new_token))
|
310
|
+
|
311
|
+
def append_row(self, document: Document, node, options: ExportOptions, row: list) -> bool:
|
312
|
+
"""
|
313
|
+
Append a row to the row list if the node accomplishes the requirements.
|
314
|
+
Args:
|
315
|
+
document (Document): The document with the spines.
|
316
|
+
node (Node): The node to append.
|
317
|
+
options (ExportOptions): The export options to filter the token.
|
318
|
+
row (list): The row to append.
|
319
|
+
|
320
|
+
Returns (bool): True if the row was appended. False if the row was not appended.
|
321
|
+
"""
|
322
|
+
header_type = self.compute_header_type(node)
|
323
|
+
|
324
|
+
if (header_type is not None
|
325
|
+
and header_type.encoding in options.spine_types
|
326
|
+
and not node.token.hidden
|
327
|
+
and (isinstance(node.token, ComplexToken) or node.token.category in options.token_categories)
|
328
|
+
and (options.spine_ids is None or header_type.spine_id in options.spine_ids)
|
329
|
+
# If None, all the spines will be exported. TODO: put all the spines as spine_ids = None
|
330
|
+
):
|
331
|
+
row.append(self.export_token(node.token, options))
|
332
|
+
return True
|
333
|
+
|
334
|
+
return False
|
335
|
+
|
336
|
+
def get_spine_types(self, document: Document, spine_types: list = None):
|
337
|
+
"""
|
338
|
+
Get the spine types from the document.
|
339
|
+
|
340
|
+
Args:
|
341
|
+
document (Document): The document with the spines.
|
342
|
+
spine_types (list): The spine types to export. If None, all the spine types will be exported.
|
343
|
+
|
344
|
+
Returns: A list with the spine types.
|
345
|
+
|
346
|
+
Examples:
|
347
|
+
>>> exporter = Exporter()
|
348
|
+
>>> exporter.get_spine_types(document)
|
349
|
+
['**kern', '**kern', '**kern', '**kern', '**root', '**harm']
|
350
|
+
>>> exporter.get_spine_types(document, None)
|
351
|
+
['**kern', '**kern', '**kern', '**kern', '**root', '**harm']
|
352
|
+
>>> exporter.get_spine_types(document, ['**kern'])
|
353
|
+
['**kern', '**kern', '**kern', '**kern']
|
354
|
+
>>> exporter.get_spine_types(document, ['**kern', '**root'])
|
355
|
+
['**kern', '**kern', '**kern', '**kern', '**root']
|
356
|
+
>>> exporter.get_spine_types(document, ['**kern', '**root', '**harm'])
|
357
|
+
['**kern', '**kern', '**kern', '**kern', '**root', '**harm']
|
358
|
+
>>> exporter.get_spine_types(document, [])
|
359
|
+
[]
|
360
|
+
"""
|
361
|
+
if spine_types is not None and len(spine_types) == 0:
|
362
|
+
return []
|
363
|
+
|
364
|
+
options = ExportOptions(spine_types=spine_types, token_categories=[TokenCategory.HEADER])
|
365
|
+
content = self.export_string(document, options)
|
366
|
+
|
367
|
+
# Remove all after the first line: **kern, **mens, etc... are always in the first row
|
368
|
+
lines = content.split('\n')
|
369
|
+
first_line = lines[0:1]
|
370
|
+
tokens = first_line[0].split('\t')
|
371
|
+
|
372
|
+
return tokens if tokens not in [[], ['']] else []
|
373
|
+
|
374
|
+
|
375
|
+
@classmethod
|
376
|
+
def export_options_validator(cls, document: Document, options: ExportOptions) -> None:
|
377
|
+
"""
|
378
|
+
Validate the export options. Raise an exception if the options are invalid.
|
379
|
+
|
380
|
+
Args:
|
381
|
+
document: `Document` - The document to export.
|
382
|
+
options: `ExportOptions` - The options to export the document.
|
383
|
+
|
384
|
+
Returns: None
|
385
|
+
|
386
|
+
Example:
|
387
|
+
>>> export_options_validator(document, options)
|
388
|
+
ValueError: option from_measure must be >=0 but -1 was found.
|
389
|
+
>>> export_options_validator(document, options2)
|
390
|
+
None
|
391
|
+
"""
|
392
|
+
if options.from_measure is not None and options.from_measure < 0:
|
393
|
+
raise ValueError(f'option from_measure must be >=0 but {options.from_measure} was found. ')
|
394
|
+
if options.to_measure is not None and options.to_measure > len(document.measure_start_tree_stages):
|
395
|
+
# "TODO: DAVID, check options.to_measure bounds. len(document.measure_start_tree_stages) or len(document.measure_start_tree_stages) - 1"
|
396
|
+
raise ValueError(
|
397
|
+
f'option to_measure must be <= {len(document.measure_start_tree_stages)} but {options.to_measure} was found. ')
|
398
|
+
if options.to_measure is not None and options.from_measure is not None and options.to_measure < options.from_measure:
|
399
|
+
raise ValueError(
|
400
|
+
f'option to_measure must be >= from_measure but {options.to_measure} < {options.from_measure} was found. ')
|
401
|
+
|
402
|
+
def is_signature_cancelled(self, signature_node, node, from_stage, to_stage) -> bool:
|
403
|
+
if node.token.__class__ == signature_node.token.__class__:
|
404
|
+
return True
|
405
|
+
elif isinstance(node.token, NoteRestToken):
|
406
|
+
return False
|
407
|
+
elif from_stage < to_stage:
|
408
|
+
for child in node.children:
|
409
|
+
if self.is_signature_cancelled(signature_node, child, from_stage + 1, to_stage):
|
410
|
+
return True
|
411
|
+
return False
|
412
|
+
|
413
|
+
|
414
|
+
def get_kern_from_ekern(ekern_content: str) -> str:
|
415
|
+
"""
|
416
|
+
Read the content of a **ekern file and return the **kern content.
|
417
|
+
|
418
|
+
Args:
|
419
|
+
ekern_content: The content of the **ekern file.
|
420
|
+
Returns:
|
421
|
+
The content of the **kern file.
|
422
|
+
|
423
|
+
Example:
|
424
|
+
```python
|
425
|
+
# Read **ekern file
|
426
|
+
ekern_file = 'path/to/file.ekrn'
|
427
|
+
with open(ekern_file, 'r') as file:
|
428
|
+
ekern_content = file.read()
|
429
|
+
|
430
|
+
# Get **kern content
|
431
|
+
kern_content = get_kern_from_ekern(ekern_content)
|
432
|
+
with open('path/to/file.krn', 'w') as file:
|
433
|
+
file.write(kern_content)
|
434
|
+
|
435
|
+
```
|
436
|
+
"""
|
437
|
+
content = ekern_content.replace("**ekern", "**kern") # TODO Constante según las cabeceras
|
438
|
+
content = content.replace(TOKEN_SEPARATOR, "")
|
439
|
+
content = content.replace(DECORATION_SEPARATOR, "")
|
440
|
+
|
441
|
+
return content
|
442
|
+
|
443
|
+
|
444
|
+
def ekern_to_krn(
|
445
|
+
input_file: str,
|
446
|
+
output_file: str
|
447
|
+
) -> None:
|
448
|
+
"""
|
449
|
+
Convert one .ekrn file to .krn file.
|
450
|
+
|
451
|
+
Args:
|
452
|
+
input_file (str): Filepath to the input **ekern
|
453
|
+
output_file (str): Filepath to the output **kern
|
454
|
+
Returns:
|
455
|
+
None
|
456
|
+
|
457
|
+
Example:
|
458
|
+
# Convert .ekrn to .krn
|
459
|
+
>>> ekern_to_krn('path/to/file.ekrn', 'path/to/file.krn')
|
460
|
+
|
461
|
+
# Convert a list of .ekrn files to .krn files
|
462
|
+
```python
|
463
|
+
ekrn_files = your_modue.get_files()
|
464
|
+
|
465
|
+
# Use the wrapper to avoid stopping the process if an error occurs
|
466
|
+
def ekern_to_krn_wrapper(ekern_file, kern_file):
|
467
|
+
try:
|
468
|
+
ekern_to_krn(ekrn_files, output_folder)
|
469
|
+
except Exception as e:
|
470
|
+
print(f'Error:{e}')
|
471
|
+
|
472
|
+
# Convert all the files
|
473
|
+
for ekern_file in ekrn_files:
|
474
|
+
output_file = ekern_file.replace('.ekrn', '.krn')
|
475
|
+
ekern_to_krn_wrapper(ekern_file, output_file)
|
476
|
+
```
|
477
|
+
"""
|
478
|
+
with open(input_file, 'r') as file:
|
479
|
+
content = file.read()
|
480
|
+
|
481
|
+
kern_content = get_kern_from_ekern(content)
|
482
|
+
|
483
|
+
with open(output_file, 'w') as file:
|
484
|
+
file.write(kern_content)
|
485
|
+
|
486
|
+
|
487
|
+
def kern_to_ekern(
|
488
|
+
input_file: str,
|
489
|
+
output_file: str
|
490
|
+
) -> None:
|
491
|
+
"""
|
492
|
+
Convert one .krn file to .ekrn file
|
493
|
+
|
494
|
+
Args:
|
495
|
+
input_file (str): Filepath to the input **kern
|
496
|
+
output_file (str): Filepath to the output **ekern
|
497
|
+
|
498
|
+
Returns:
|
499
|
+
None
|
500
|
+
|
501
|
+
Example:
|
502
|
+
# Convert .krn to .ekrn
|
503
|
+
>>> kern_to_ekern('path/to/file.krn', 'path/to/file.ekrn')
|
504
|
+
|
505
|
+
# Convert a list of .krn files to .ekrn files
|
506
|
+
```python
|
507
|
+
krn_files = your_module.get_files()
|
508
|
+
|
509
|
+
# Use the wrapper to avoid stopping the process if an error occurs
|
510
|
+
def kern_to_ekern_wrapper(krn_file, ekern_file):
|
511
|
+
try:
|
512
|
+
kern_to_ekern(krn_file, ekern_file)
|
513
|
+
except Exception as e:
|
514
|
+
print(f'Error:{e}')
|
515
|
+
|
516
|
+
# Convert all the files
|
517
|
+
for krn_file in krn_files:
|
518
|
+
output_file = krn_file.replace('.krn', '.ekrn')
|
519
|
+
kern_to_ekern_wrapper(krn_file, output_file)
|
520
|
+
```
|
521
|
+
|
522
|
+
"""
|
523
|
+
importer = Importer()
|
524
|
+
document = importer.import_file(input_file)
|
525
|
+
|
526
|
+
if len(importer.errors):
|
527
|
+
raise Exception(f'ERROR: {input_file} has errors {importer.get_error_messages()}')
|
528
|
+
|
529
|
+
export_options = ExportOptions(spine_types=['**kern'], token_categories=BEKERN_CATEGORIES,
|
530
|
+
kern_type=Encoding.eKern)
|
531
|
+
exporter = Exporter()
|
532
|
+
exported_ekern = exporter.export_string(document, export_options)
|
533
|
+
|
534
|
+
with open(output_file, 'w') as file:
|
535
|
+
file.write(exported_ekern)
|