kernpy 0.0.2__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. kernpy/__init__.py +215 -0
  2. kernpy/__main__.py +217 -0
  3. kernpy/core/__init__.py +119 -0
  4. kernpy/core/_io.py +48 -0
  5. kernpy/core/base_antlr_importer.py +61 -0
  6. kernpy/core/base_antlr_spine_parser_listener.py +196 -0
  7. kernpy/core/basic_spine_importer.py +43 -0
  8. kernpy/core/document.py +965 -0
  9. kernpy/core/dyn_importer.py +30 -0
  10. kernpy/core/dynam_spine_importer.py +42 -0
  11. kernpy/core/error_listener.py +51 -0
  12. kernpy/core/exporter.py +535 -0
  13. kernpy/core/fing_spine_importer.py +42 -0
  14. kernpy/core/generated/kernSpineLexer.interp +444 -0
  15. kernpy/core/generated/kernSpineLexer.py +535 -0
  16. kernpy/core/generated/kernSpineLexer.tokens +236 -0
  17. kernpy/core/generated/kernSpineParser.interp +425 -0
  18. kernpy/core/generated/kernSpineParser.py +9954 -0
  19. kernpy/core/generated/kernSpineParser.tokens +236 -0
  20. kernpy/core/generated/kernSpineParserListener.py +1200 -0
  21. kernpy/core/generated/kernSpineParserVisitor.py +673 -0
  22. kernpy/core/generic.py +426 -0
  23. kernpy/core/gkern.py +526 -0
  24. kernpy/core/graphviz_exporter.py +89 -0
  25. kernpy/core/harm_spine_importer.py +41 -0
  26. kernpy/core/import_humdrum_old.py +853 -0
  27. kernpy/core/importer.py +285 -0
  28. kernpy/core/importer_factory.py +43 -0
  29. kernpy/core/kern_spine_importer.py +73 -0
  30. kernpy/core/mens_spine_importer.py +23 -0
  31. kernpy/core/mhxm_spine_importer.py +44 -0
  32. kernpy/core/pitch_models.py +338 -0
  33. kernpy/core/root_spine_importer.py +58 -0
  34. kernpy/core/spine_importer.py +45 -0
  35. kernpy/core/text_spine_importer.py +43 -0
  36. kernpy/core/tokenizers.py +239 -0
  37. kernpy/core/tokens.py +2011 -0
  38. kernpy/core/transposer.py +300 -0
  39. kernpy/io/__init__.py +14 -0
  40. kernpy/io/public.py +355 -0
  41. kernpy/polish_scores/__init__.py +13 -0
  42. kernpy/polish_scores/download_polish_dataset.py +357 -0
  43. kernpy/polish_scores/iiif.py +47 -0
  44. kernpy/test_grammar.sh +22 -0
  45. kernpy/util/__init__.py +14 -0
  46. kernpy/util/helpers.py +55 -0
  47. kernpy/util/store_cache.py +35 -0
  48. kernpy/visualize_analysis.sh +23 -0
  49. kernpy-1.0.0.dist-info/METADATA +501 -0
  50. kernpy-1.0.0.dist-info/RECORD +51 -0
  51. {kernpy-0.0.2.dist-info → kernpy-1.0.0.dist-info}/WHEEL +1 -2
  52. kernpy/example.py +0 -1
  53. kernpy-0.0.2.dist-info/LICENSE +0 -19
  54. kernpy-0.0.2.dist-info/METADATA +0 -19
  55. kernpy-0.0.2.dist-info/RECORD +0 -7
  56. kernpy-0.0.2.dist-info/top_level.txt +0 -1
@@ -0,0 +1,357 @@
1
+ import sys
2
+
3
+ import requests
4
+ from PIL import Image
5
+ from io import BytesIO
6
+ import os
7
+ import json
8
+ from typing import List, Optional
9
+
10
+ from kernpy import ExportOptions, BEKERN_CATEGORIES, Importer, Exporter, Document, Encoding, read
11
+
12
+
13
+ # This script creates the Polish dataset from the kern files.
14
+ # It downloads both the systems and full pages
15
+ DEFAULT_IIIF_ID = '/full/full/0/default.jpg'
16
+
17
+ LOG_FILENAME = 'polish_index.json'
18
+
19
+
20
+ def get_image_urls(_manifest_url):
21
+ # It returns the URL of pages tagged with a page number
22
+ # It returns a map with the page label as key, and the URL as
23
+ response = requests.get(_manifest_url)
24
+ manifest_data = response.json()
25
+ result = {}
26
+
27
+ # The corpus contains two kinds of IIIF manifests
28
+ if 'sequences' in manifest_data:
29
+ for sequence in manifest_data['sequences']:
30
+ for canvas in sequence['canvases']:
31
+ image_id = canvas['images'][0]['resource']['service']['@id']
32
+ page_label = canvas['label'] if 'label' in canvas else None
33
+ result[page_label] = image_id
34
+ else:
35
+ items = manifest_data.get('items', [])
36
+ for item in items:
37
+ pl = item.get('label').get('pl')
38
+ if pl:
39
+ page_label = pl[0]
40
+ if page_label != '[]':
41
+ image_id = item.get('items')[0].get('items')[0].get('id', '')
42
+ if image_id.endswith(DEFAULT_IIIF_ID):
43
+ image_id = image_id[:-len(DEFAULT_IIIF_ID)]
44
+ result[page_label] = image_id
45
+
46
+ # print(f'Items: ', len(items))
47
+
48
+ return result
49
+
50
+
51
+ def download_and_save_image(url, save_path):
52
+ try:
53
+ # Make a GET request to the URL
54
+ response = requests.get(url)
55
+ response.raise_for_status() # Raise an exception for bad responses
56
+
57
+ # Open the image using Pillow
58
+ image = Image.open(BytesIO(response.content))
59
+
60
+ # Save the image to the specified path
61
+ image.save(save_path, format='JPEG')
62
+
63
+ print(f"Image downloaded and saved to: {save_path}")
64
+ except Exception as e:
65
+ print(f"An error occurred: {e}")
66
+
67
+
68
+ def factory_get_kern_type_exporter(kern_type: str) -> Encoding:
69
+ """
70
+ Factory method to get the Encoding
71
+
72
+ Args:
73
+ kern_type (str): the type of kern exporter. It can be 'krn' or 'ekrn'
74
+
75
+ Returns:
76
+ Encoding: the Encoding instance
77
+ """
78
+ if kern_type == 'krn':
79
+ return Encoding.normalizedKern
80
+ elif kern_type == 'ekrn':
81
+ return Encoding.eKern
82
+ else:
83
+ raise Exception(f'Unknown export kern type: {kern_type}')
84
+
85
+
86
+ def extract_and_save_measures(document, from_measure, to_measure, krn_path, exporter_kern_type='ekrn'):
87
+ exporter_kern_type = factory_get_kern_type_exporter(exporter_kern_type)
88
+ export_options = ExportOptions(spine_types=['**kern'], token_categories=BEKERN_CATEGORIES, kern_type=exporter_kern_type)
89
+ export_options.from_measure = from_measure
90
+ export_options.to_measure = to_measure
91
+ exporter = Exporter()
92
+ exported_ekern = exporter.export_string(document, export_options)
93
+ with open(krn_path, "w") as f:
94
+ f.write(exported_ekern)
95
+
96
+
97
+ def download_and_save_page_images(document, _output_path, map_page_label_iiif_ids, page_bounding_boxes, log_filename, exporter_kern_type='ekrn'):
98
+ print(f'Bounding boxes {page_bounding_boxes}')
99
+
100
+ for page_label, bounding_box_measure in page_bounding_boxes.items():
101
+ page_iiif_id = map_page_label_iiif_ids.get(page_label)
102
+ if page_iiif_id is None and page_label.startswith('#'): # sometimes it's wrongly tagged without the #
103
+ page_iiif_id = map_page_label_iiif_ids.get(page_label[1:])
104
+
105
+ if page_iiif_id is not None:
106
+ bounding_box = bounding_box_measure.bounding_box
107
+ print(f"Page: {page_label}, "
108
+ f"Bounding box: {bounding_box}, "
109
+ f"ID: {page_iiif_id}, "
110
+ f"from bar {bounding_box_measure.from_measure}, "
111
+ f"to bar {bounding_box_measure.to_measure}")
112
+ url = os.path.join(page_iiif_id, bounding_box.xywh(), 'full', '0', 'default.jpg')
113
+ print(url)
114
+ image_path = os.path.join(_output_path, page_label + ".jpg")
115
+ download_and_save_image(url, image_path)
116
+ krn_path = os.path.join(_output_path, page_label + f'.{exporter_kern_type}')
117
+ extract_and_save_measures(document, bounding_box_measure.from_measure, bounding_box_measure.to_measure - 1,
118
+ krn_path, exporter_kern_type=exporter_kern_type)
119
+ add_log(document, krn_path, log_filename=log_filename)
120
+ else:
121
+ raise Exception(f'Cannot find IIIF id for page with label "{page_label}"')
122
+
123
+
124
+ def findIIIFIds(document):
125
+ iiifTag = "!!!IIIF:"
126
+ for metacomment_token in document.get_metacomments():
127
+ encoding = metacomment_token
128
+ if encoding.startswith(iiifTag):
129
+ url = encoding[len(iiifTag):].strip()
130
+ print(f'Reading IIIF manifest from {url}')
131
+ return get_image_urls(url)
132
+ raise Exception('Cannot find any IIIF metacomment')
133
+
134
+
135
+ def is_valid_document(document, kern_spines_filter) -> bool:
136
+ if kern_spines_filter is None:
137
+ return True
138
+
139
+ exporter = Exporter()
140
+ kern_types = exporter.get_spine_types(document, spine_types=['**kern'])
141
+ return len(kern_types) == int(kern_spines_filter)
142
+
143
+
144
+ def convert_and_download_file(input_kern, _output_path, log_filename, kern_spines_filter: int = None, exporter_kern_type='ekrn') -> None:
145
+ document, errors = read(input_kern)
146
+ if len(errors) > 0:
147
+ print(f'ERRORS when kernpy.read:{input_kern} has errors {errors}\nContinue...', file=sys.stderr)
148
+ raise Exception(f'ERRORS when kernpy.read: {input_kern}. Has errors: {errors}')
149
+
150
+ if not is_valid_document(document, kern_spines_filter):
151
+ return
152
+
153
+ map_page_label_IIIF_ids = findIIIFIds(document)
154
+ download_and_save_page_images(document, _output_path, map_page_label_IIIF_ids, document.page_bounding_boxes,
155
+ log_filename=log_filename, exporter_kern_type=exporter_kern_type)
156
+
157
+
158
+ def search_files_with_string(root_folder, target_string):
159
+ matching_files = []
160
+
161
+ for foldername, subfolders, filenames in os.walk(root_folder):
162
+ for filename in filenames:
163
+ if filename.endswith('.krn'):
164
+ file_path = os.path.join(foldername, filename)
165
+ try:
166
+ with open(file_path, 'r', encoding='utf-8') as file:
167
+ content = file.read()
168
+ if target_string in content:
169
+ relative_path = os.path.relpath(file_path, root_folder)
170
+ matching_files.append(relative_path)
171
+ except Exception as e:
172
+ print(f"Error reading {file_path}: {e}")
173
+
174
+ return matching_files
175
+
176
+
177
+ def remove_extension(file_name):
178
+ # Using os.path.splitext to split the file name and extension
179
+ base_name, _ = os.path.splitext(file_name)
180
+ return base_name
181
+
182
+
183
+ def add_log(document: Document, path, log_filename) -> None:
184
+ try:
185
+ def get_instruments(line):
186
+ words = line.split(' ')
187
+ instruments = []
188
+ for i in range(len(words)):
189
+ if words[i].isnumeric():
190
+ instruments.extend([words[i + 1]] * int(words[i]))
191
+ return instruments
192
+
193
+ def get_publish_date(line):
194
+ if line is None or line == '':
195
+ return 0
196
+
197
+ clean_line = [char for char in line if char.isnumeric()]
198
+ return int(''.join(clean_line))
199
+
200
+ def round_publication_year(original_composer_date):
201
+ try:
202
+ if original_composer_date is None:
203
+ return 0
204
+ start_date, end_date = original_composer_date.split('-')
205
+
206
+ start_year = int(start_date.split('/')[0])
207
+ end_year = int(end_date.split('/')[0])
208
+
209
+ RATIO = 0.7 # date where the composer was most active
210
+ return int(start_year + (end_year - start_year) * RATIO)
211
+ except Exception as e:
212
+ return -1
213
+
214
+ def round_publication_year_v2(original_composer_date):
215
+ def flatten(xss):
216
+ return [x for xs in xss for x in xs]
217
+
218
+ try:
219
+ items_date = original_composer_date.split('/')
220
+ clean_items = [item.replace(' ', '') for item in items_date]
221
+ clean_items = [item.replace('~', '') for item in clean_items]
222
+ split_again = [item.split('-') for item in clean_items]
223
+ flatten_items = flatten(split_again)
224
+ useful_items = [item for item in flatten_items if item.isnumeric()]
225
+ year_items = [int(item) for item in useful_items if len(item) == 4]
226
+ return int(year_items[0]) if len(year_items) > 0 else -3
227
+ except Exception as e:
228
+ return -2
229
+
230
+ info = {
231
+ 'path': path,
232
+ 'publication_date': get_publish_date(document.get_metacomments('PDT')[0]) if document.get_metacomments(
233
+ 'PDT') else None,
234
+ 'original_publication_date_tag': True,
235
+ 'iiif': document.get_metacomments('IIIF')[0] if document.get_metacomments('IIIF') else None,
236
+ 'n_measures': len(document.tree.stages),
237
+ 'composer': document.get_metacomments('COM')[0] if document.get_metacomments('COM') else None,
238
+ 'composer_dates': document.get_metacomments('CDT')[0] if document.get_metacomments('CDT') else None,
239
+ 'tempo': document.get_metacomments('OTL')[0] if document.get_metacomments('OTL') else None,
240
+ 'piece_title': document.get_metacomments('OPR')[0] if document.get_metacomments('OPR') else None,
241
+ 'segment': document.get_metacomments('SEGMENT')[0] if document.get_metacomments('SEGMENT') else None,
242
+ 'n_voices': len(get_instruments(document.get_metacomments('AIN')[0])) if document.get_metacomments(
243
+ 'AIN') else 0,
244
+ 'instruments': get_instruments(document.get_metacomments('AIN')[0]) if document.get_metacomments(
245
+ 'AIN') else [],
246
+ 'unique_instruments': [
247
+ *set(get_instruments(document.get_metacomments('AIN')[0]))] if document.get_metacomments('AIN') else [],
248
+ }
249
+
250
+ if info['publication_date'] in (0, 1, -1, -2) or info['publication_date'] is None:
251
+ info['publication_date'] = round_publication_year(info['composer_dates'])
252
+ info['original_publication_date_tag'] = False
253
+
254
+ if info['publication_date'] in (0, 1, -1, -2) or info['publication_date'] is None:
255
+ info['publication_date'] = round_publication_year_v2(info['composer_dates'])
256
+ info['original_publication_date_tag'] = False
257
+
258
+ with open(log_filename, 'a') as f:
259
+ json.dump(info, f)
260
+ f.write('\n')
261
+ except Exception as e:
262
+ print(f"Error adding log:{path}:{e}")
263
+
264
+
265
+ def remove_empty_dirs(directory):
266
+ for root, dirs, files in os.walk(directory):
267
+ for dir in dirs:
268
+ full_dir = os.path.join(root, dir)
269
+ if not os.listdir(full_dir):
270
+ os.rmdir(full_dir)
271
+
272
+
273
+ def store_error_log(filename, msg: dict):
274
+ with open(filename, encoding='utf-8', mode='a') as f:
275
+ f.write(f'{json.dumps(msg)}\n')
276
+
277
+
278
+ def main(
279
+ input_directory: str,
280
+ output_directory: str,
281
+ remove_empty_directories: Optional[bool] = True,
282
+ kern_spines_filter: Optional[int] = 2,
283
+ exporter_kern_type: Optional[str] = 'ekern'
284
+ ) -> None:
285
+ """
286
+ Process the files in the input_directory and save the results in the output_directory.
287
+ http requests are made to download the images.
288
+
289
+ Args:
290
+ input_directory (str): directory where the input files are found
291
+ output_directory (str): directory where the output files are saved
292
+ remove_empty_directories (Optional[bool]): remove empty directories when finish processing the files
293
+ kern_spines_filter (Optional[int]): Only process files with the number of **kern spines specified.\
294
+ Use it to export 2-voice files. Default is 2.\
295
+ Use None to process all files.
296
+ exporter_kern_type (Optional[str]): the type of kern exporter. It can be 'krn' or 'ekrn'
297
+
298
+
299
+
300
+ Returns:
301
+ None
302
+
303
+ Examples:
304
+ >>> main('/kern_files', '/output_ekern')
305
+ None
306
+
307
+ >>> main('/kern_files', '/output_ekern', remove_empty_directories=False)
308
+ None
309
+
310
+ >>> main('/kern_files', '/output_ekern', kern_spines_filter=2, remove_empty_directories=False)
311
+ None
312
+
313
+ >>> main('/kern_files', '/output_ekern', kern_spines_filter=None, remove_empty_directories=False)
314
+ None
315
+
316
+ >>> main('/kern_files', '/output_ekern', exporter_kern_type='krn', remove_empty_directories=True)
317
+ None
318
+
319
+ >>> main('/kern_files', '/output_ekern', exporter_kern_type='ekrn', remove_empty_directories=True, kern_spines_filter=2)
320
+ None
321
+
322
+ """
323
+ print(f'Processing files in {input_directory} and saving to {output_directory}')
324
+ kern_with_bboxes = search_files_with_string(input_directory, 'xywh')
325
+ ok_files = []
326
+ ko_files = []
327
+ log_file = os.path.join(output_directory, LOG_FILENAME)
328
+ print(f"{25*'='}"
329
+ f"\nProcessing {len(kern_with_bboxes)} files."
330
+ f"\nLog will be saved in {log_file}."
331
+ f"\n{25*'='}")
332
+ for kern in kern_with_bboxes:
333
+ try:
334
+ filename = remove_extension(kern)
335
+ kern_path = os.path.join(input_directory, kern)
336
+ output_kern_path = os.path.join(output_directory, filename)
337
+ if not os.path.exists(output_kern_path):
338
+ os.makedirs(output_kern_path)
339
+ convert_and_download_file(kern_path, output_kern_path, log_filename=log_file, kern_spines_filter=kern_spines_filter, exporter_kern_type=exporter_kern_type)
340
+ ok_files.append(kern)
341
+ except Exception as error:
342
+ ko_files.append(kern)
343
+ print(f'Errors in {kern}: {error}')
344
+ store_error_log(os.path.join(output_directory, 'errors.json'), {'kern': kern, 'error': str(error)})
345
+
346
+ if remove_empty_directories:
347
+ remove_empty_dirs(output_directory)
348
+
349
+ print(f'----> OK files #{len(ok_files)}')
350
+ print(f'----> KO files #{len(ko_files)}')
351
+ print(ko_files)
352
+
353
+
354
+ if __name__ == '__main__':
355
+ print(f'Usage: python -m kernpy --polish --input_directory /path/to/input --output_directory /path/to/output')
356
+ sys.exit(1)
357
+
@@ -0,0 +1,47 @@
1
+ import requests
2
+ from PIL import Image
3
+ from io import BytesIO
4
+ import os
5
+
6
+
7
+ def download_and_save_image(url, save_path):
8
+ try:
9
+ # Make a GET request to the URL
10
+ response = requests.get(url)
11
+ response.raise_for_status() # Raise an exception for bad responses
12
+
13
+ # Open the image using Pillow
14
+ image = Image.open(BytesIO(response.content))
15
+
16
+ # Save the image to the specified path
17
+ image.save(save_path, format='JPEG')
18
+
19
+ print(f"Image downloaded and saved to: {save_path}")
20
+ except Exception as e:
21
+ print(f"An error occurred: {e}")
22
+
23
+
24
+ def get_image_urls(_manifest_url):
25
+ # It returns the URL of pages tagged with a page number
26
+ # It returns a map with the page label as key, and the URL as
27
+ response = requests.get(_manifest_url)
28
+ manifest_data = response.json()
29
+
30
+ items = manifest_data.get('items', [])
31
+ print(f'Items: ', len(items))
32
+
33
+ for item in items:
34
+ pl = item.get('label').get('pl')
35
+ if pl:
36
+ page_num = pl[0]
37
+ if page_num != '[]':
38
+ url = item.get('items')[0].get('items')[0].get('id', '')
39
+ print(f'Page #{page_num}, {url}')
40
+ #image_path = os.path.join(output_folder, page_num + '.jpg')
41
+ #download_and_save_image(url, image_path)
42
+
43
+
44
+ if __name__ == "__main__":
45
+ # Replace the manifest_url with the actual URL of your IIIF manifest
46
+ manifest_url = "https://polona2.pl/iiif/item/MTk4NjI5Mw/manifest.json"
47
+ get_image_urls(manifest_url)
kernpy/test_grammar.sh ADDED
@@ -0,0 +1,22 @@
1
+ #!/bin/bash
2
+ JAR=antlr-4.13.0-complete.jar
3
+ TMP=/tmp/kerntest
4
+ if [ $# -lt 1 ]
5
+ then
6
+ echo "Use: <input files>"
7
+ exit 1
8
+ fi
9
+
10
+ mkdir ${TMP} 2> /dev/null
11
+ cp kern/*g4 ${TMP}
12
+
13
+ if [ ! -f ${TMP}/${JAR} ]; then
14
+ cp ${JAR} ${TMP}
15
+ fi
16
+
17
+ cd ${TMP}
18
+ java -cp antlr-4.13.0-complete.jar org.antlr.v4.Tool kernLexer.g4
19
+ java -cp antlr-4.13.0-complete.jar org.antlr.v4.Tool kernParser.g4
20
+ javac -cp antlr-4.13.0-complete.jar *java
21
+ cd -
22
+ java -cp ${TMP}:antlr-4.13.0-complete.jar org.antlr.v4.gui.TestRig kern start $*
@@ -0,0 +1,14 @@
1
+ """
2
+ kernpy.util
3
+
4
+ =====
5
+
6
+ This module contains utility functions for the kernpy package.
7
+
8
+ """
9
+ from .store_cache import *
10
+
11
+ __all__ = [
12
+ 'StoreCache'
13
+ ]
14
+
kernpy/util/helpers.py ADDED
@@ -0,0 +1,55 @@
1
+ import warnings
2
+ import functools
3
+
4
+
5
+ def deprecated(reason: str):
6
+ """
7
+ Decorator to mark a function or class as deprecated.
8
+
9
+ Args:
10
+ reason (str): The reason why the function/class is deprecated.
11
+
12
+ Example:
13
+ @deprecated("Use new_function instead.")
14
+ def old_function():
15
+ pass
16
+ """
17
+ def decorator(func):
18
+ @functools.wraps(func)
19
+ def wrapper(*args, **kwargs):
20
+ warnings.warn(
21
+ f"'{func.__name__}' is deprecated: {reason}",
22
+ category=DeprecationWarning,
23
+ stacklevel=2,
24
+ )
25
+ return func(*args, **kwargs)
26
+ return wrapper
27
+ return decorator
28
+
29
+
30
+ def deprecated_argument(arg_name: str, reason: str):
31
+ """
32
+ Decorator to mark a specific argument as deprecated.
33
+
34
+ Args:
35
+ arg_name (str): The deprecated argument name.
36
+ reason (str): The reason why the argument is deprecated.
37
+
38
+ Example:
39
+ @deprecated_argument("old_param", "Use 'new_param' instead.")
40
+ def my_function(new_param, old_param=None):
41
+ pass
42
+ """
43
+ def decorator(func):
44
+ @functools.wraps(func)
45
+ def wrapper(*args, **kwargs):
46
+ if arg_name in kwargs:
47
+ warnings.warn(
48
+ f"Argument '{arg_name}' in '{func.__name__}' is deprecated: {reason}",
49
+ category=DeprecationWarning,
50
+ stacklevel=2,
51
+ )
52
+ return func(*args, **kwargs)
53
+ return wrapper
54
+ return decorator
55
+
@@ -0,0 +1,35 @@
1
+ class StoreCache:
2
+ """
3
+ A simple cache that stores the result of a callback function
4
+ """
5
+ def __init__(self):
6
+ """
7
+ Constructor
8
+ """
9
+ self.memory = {}
10
+
11
+ def request(self, callback, request):
12
+ """
13
+ Request a value from the cache. If the value is not in the cache, it will be calculated by the callback function
14
+ Args:
15
+ callback (function): The callback function that will be called to calculate the value
16
+ request (any): The request that will be passed to the callback function
17
+
18
+ Returns (any): The value that was requested
19
+
20
+ Examples:
21
+ >>> def add_five(x):
22
+ ... return x + 5
23
+ >>> store_cache = StoreCache()
24
+ >>> store_cache.request(callback, 5) # Call the callback function
25
+ 10
26
+ >>> store_cache.request(callback, 5) # Return the value from the cache, without calling the callback function
27
+ 10
28
+ """
29
+ if request in self.memory:
30
+ return self.memory[request]
31
+ else:
32
+ result = callback(request)
33
+ self.memory[request] = result
34
+ return result
35
+
@@ -0,0 +1,23 @@
1
+ #!/bin/bash
2
+ JAR=antlr-4.13.0-complete.jar
3
+ TMP=/tmp/kerntest
4
+ if [ $# -lt 1 ]
5
+ then
6
+ echo "Use: <input file>"
7
+ exit 1
8
+ fi
9
+
10
+
11
+ mkdir ${TMP} 2> /dev/null
12
+ cp kern/*g4 ${TMP}
13
+
14
+ if [ ! -f ${TMP}/${JAR} ]; then
15
+ cp ${JAR} ${TMP}
16
+ fi
17
+
18
+ cd ${TMP}
19
+ java -cp antlr-4.13.0-complete.jar org.antlr.v4.Tool kernLexer.g4
20
+ java -cp antlr-4.13.0-complete.jar org.antlr.v4.Tool kernParser.g4
21
+ javac -cp antlr-4.13.0-complete.jar *java
22
+ cd -
23
+ java -cp ${TMP}:antlr-4.13.0-complete.jar org.antlr.v4.gui.TestRig kern start -gui $1