weirdo 2.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
weirdo/data_manager.py ADDED
@@ -0,0 +1,475 @@
1
+ """Data management for WEIRDO reference data.
2
+
3
+ Handles downloading and caching of reference data files.
4
+ Data is stored in ~/.weirdo/ by default.
5
+
6
+ Example
7
+ -------
8
+ >>> from weirdo.data_manager import DataManager
9
+ >>> dm = DataManager()
10
+ >>> dm.download('swissprot-8mers') # Download reference data
11
+ >>> dm.status() # Show what's downloaded
12
+ >>> dm.clear_all() # Remove all data
13
+ """
14
+
15
+ import json
16
+ import os
17
+ import shutil
18
+ import sys
19
+ import tempfile
20
+ from datetime import datetime
21
+ from pathlib import Path
22
+ from typing import Any, Dict, List, Optional
23
+ from urllib.request import urlretrieve
24
+ from urllib.error import URLError
25
+
26
+
27
+ # Default data directory
28
+ DEFAULT_DATA_DIR = Path.home() / '.weirdo'
29
+
30
+ # Available datasets with their metadata
31
+ DATASETS = {
32
+ 'swissprot-8mers': {
33
+ 'description': 'SwissProt 8-mer reference data (~100M k-mers)',
34
+ 'url': 'https://github.com/pirl-unc/weirdo-data/releases/download/v1.0/swissprot-8mers.csv.gz',
35
+ 'filename': 'swissprot-8mers.csv',
36
+ 'compressed': True,
37
+ 'size_mb': 2500, # Compressed size
38
+ 'uncompressed_size_mb': 7500,
39
+ 'sha256': None, # Will be verified if provided
40
+ },
41
+ }
42
+
43
+
44
+
45
+ def _progress_hook(block_num: int, block_size: int, total_size: int) -> None:
46
+ """Progress callback for urlretrieve."""
47
+ if total_size > 0:
48
+ downloaded = block_num * block_size
49
+ percent = min(100, downloaded * 100 / total_size)
50
+ mb_downloaded = downloaded / (1024 * 1024)
51
+ mb_total = total_size / (1024 * 1024)
52
+ sys.stdout.write(f'\r Downloading: {percent:.1f}% ({mb_downloaded:.1f}/{mb_total:.1f} MB)')
53
+ sys.stdout.flush()
54
+ else:
55
+ downloaded = block_num * block_size
56
+ mb_downloaded = downloaded / (1024 * 1024)
57
+ sys.stdout.write(f'\r Downloading: {mb_downloaded:.1f} MB')
58
+ sys.stdout.flush()
59
+
60
+
61
+ class DataManager:
62
+ """Manages WEIRDO reference data.
63
+
64
+ Parameters
65
+ ----------
66
+ data_dir : str or Path, optional
67
+ Directory for storing data. Defaults to ~/.weirdo/
68
+ auto_download : bool, default=False
69
+ If True, automatically download missing data when needed.
70
+ verbose : bool, default=True
71
+ Print status messages.
72
+
73
+ Example
74
+ -------
75
+ >>> dm = DataManager()
76
+ >>> dm.download('swissprot-8mers')
77
+ >>> print(dm.status())
78
+ """
79
+
80
+ def __init__(
81
+ self,
82
+ data_dir: Optional[Path] = None,
83
+ auto_download: bool = False,
84
+ verbose: bool = True,
85
+ ):
86
+ self.data_dir = Path(data_dir) if data_dir else DEFAULT_DATA_DIR
87
+ self.auto_download = auto_download
88
+ self.verbose = verbose
89
+
90
+ # Subdirectories
91
+ self.downloads_dir = self.data_dir / 'downloads'
92
+ self.metadata_file = self.data_dir / 'metadata.json'
93
+
94
+ # Ensure directories exist
95
+ self._ensure_dirs()
96
+
97
+ # Load metadata
98
+ self._metadata = self._load_metadata()
99
+
100
+ def _ensure_dirs(self) -> None:
101
+ """Create data directories if they don't exist."""
102
+ self.data_dir.mkdir(parents=True, exist_ok=True)
103
+ self.downloads_dir.mkdir(exist_ok=True)
104
+
105
+ def _load_metadata(self) -> Dict[str, Any]:
106
+ """Load metadata from disk."""
107
+ if self.metadata_file.exists():
108
+ with open(self.metadata_file, 'r') as f:
109
+ data = json.load(f)
110
+ if 'downloads' not in data:
111
+ data['downloads'] = {}
112
+ return data
113
+ return {'downloads': {}}
114
+
115
+ def _save_metadata(self) -> None:
116
+ """Save metadata to disk."""
117
+ with open(self.metadata_file, 'w') as f:
118
+ json.dump(self._metadata, f, indent=2, default=str)
119
+
120
+ def _log(self, msg: str) -> None:
121
+ """Print message if verbose."""
122
+ if self.verbose:
123
+ print(msg)
124
+
125
+ # -------------------------------------------------------------------------
126
+ # Dataset queries
127
+ # -------------------------------------------------------------------------
128
+
129
+ def list_available_datasets(self) -> List[str]:
130
+ """List available datasets that can be downloaded.
131
+
132
+ Returns
133
+ -------
134
+ datasets : list of str
135
+ Names of available datasets.
136
+ """
137
+ return list(DATASETS.keys())
138
+
139
+ def get_dataset_info(self, name: str) -> Dict[str, Any]:
140
+ """Get information about a dataset.
141
+
142
+ Parameters
143
+ ----------
144
+ name : str
145
+ Dataset name.
146
+
147
+ Returns
148
+ -------
149
+ info : dict
150
+ Dataset metadata.
151
+ """
152
+ if name not in DATASETS:
153
+ raise ValueError(f"Unknown dataset: {name}. Available: {list(DATASETS.keys())}")
154
+ return DATASETS[name].copy()
155
+
156
+ # -------------------------------------------------------------------------
157
+ # Download management
158
+ # -------------------------------------------------------------------------
159
+
160
+ def is_downloaded(self, name: str) -> bool:
161
+ """Check if a dataset is downloaded.
162
+
163
+ Parameters
164
+ ----------
165
+ name : str
166
+ Dataset name.
167
+
168
+ Returns
169
+ -------
170
+ downloaded : bool
171
+ True if dataset is downloaded.
172
+ """
173
+ if name not in DATASETS:
174
+ raise ValueError(f"Unknown dataset: {name}")
175
+
176
+ filepath = self.downloads_dir / DATASETS[name]['filename']
177
+ return filepath.exists() and name in self._metadata.get('downloads', {})
178
+
179
+ def get_data_path(self, name: str, auto_download: Optional[bool] = None) -> Path:
180
+ """Get path to a dataset, optionally downloading if missing.
181
+
182
+ Parameters
183
+ ----------
184
+ name : str
185
+ Dataset name.
186
+ auto_download : bool, optional
187
+ Override instance auto_download setting.
188
+
189
+ Returns
190
+ -------
191
+ path : Path
192
+ Path to the dataset file.
193
+
194
+ Raises
195
+ ------
196
+ FileNotFoundError
197
+ If dataset is not downloaded and auto_download is False.
198
+ """
199
+ if name not in DATASETS:
200
+ raise ValueError(f"Unknown dataset: {name}")
201
+
202
+ filepath = self.downloads_dir / DATASETS[name]['filename']
203
+
204
+ if not filepath.exists():
205
+ should_download = auto_download if auto_download is not None else self.auto_download
206
+ if should_download:
207
+ self.download(name)
208
+ else:
209
+ raise FileNotFoundError(
210
+ f"Dataset '{name}' not found. Run: weirdo data download {name}\n"
211
+ f"Or use DataManager(auto_download=True)"
212
+ )
213
+
214
+ return filepath
215
+
216
+ def download(self, name: str, force: bool = False) -> Path:
217
+ """Download a dataset.
218
+
219
+ Parameters
220
+ ----------
221
+ name : str
222
+ Dataset name.
223
+ force : bool, default=False
224
+ Re-download even if already present.
225
+
226
+ Returns
227
+ -------
228
+ path : Path
229
+ Path to downloaded file.
230
+ """
231
+ if name not in DATASETS:
232
+ raise ValueError(f"Unknown dataset: {name}. Available: {list(DATASETS.keys())}")
233
+
234
+ info = DATASETS[name]
235
+ filepath = self.downloads_dir / info['filename']
236
+
237
+ if filepath.exists() and not force:
238
+ self._log(f"Dataset '{name}' already downloaded at {filepath}")
239
+ return filepath
240
+
241
+ self._log(f"Downloading '{name}'...")
242
+ self._log(f" Source: {info['url']}")
243
+ self._log(f" Size: ~{info['size_mb']} MB (compressed)")
244
+
245
+ # Download to temp file first
246
+ temp_path = None
247
+ try:
248
+ if info.get('compressed', False):
249
+ # Download compressed file
250
+ temp_fd, temp_path = tempfile.mkstemp(suffix='.gz')
251
+ os.close(temp_fd)
252
+ urlretrieve(info['url'], temp_path, _progress_hook)
253
+ print() # Newline after progress
254
+
255
+ # Decompress
256
+ self._log(" Decompressing...")
257
+ import gzip
258
+ with gzip.open(temp_path, 'rb') as f_in:
259
+ with open(filepath, 'wb') as f_out:
260
+ shutil.copyfileobj(f_in, f_out)
261
+ os.unlink(temp_path)
262
+ else:
263
+ urlretrieve(info['url'], filepath, _progress_hook)
264
+ print() # Newline after progress
265
+
266
+ # Update metadata
267
+ self._metadata.setdefault('downloads', {})[name] = {
268
+ 'path': str(filepath),
269
+ 'downloaded_at': datetime.now().isoformat(),
270
+ 'size_bytes': filepath.stat().st_size,
271
+ }
272
+ self._save_metadata()
273
+
274
+ self._log(f" Downloaded to: {filepath}")
275
+ return filepath
276
+
277
+ except URLError as e:
278
+ if temp_path and os.path.exists(temp_path):
279
+ os.unlink(temp_path)
280
+ raise RuntimeError(f"Failed to download '{name}': {e}")
281
+
282
+ def download_all(self, force: bool = False) -> List[Path]:
283
+ """Download all available datasets.
284
+
285
+ Parameters
286
+ ----------
287
+ force : bool, default=False
288
+ Re-download even if already present.
289
+
290
+ Returns
291
+ -------
292
+ paths : list of Path
293
+ Paths to downloaded files.
294
+ """
295
+ paths = []
296
+ for name in DATASETS:
297
+ paths.append(self.download(name, force=force))
298
+ return paths
299
+
300
+ def delete_download(self, name: str) -> bool:
301
+ """Delete a downloaded dataset.
302
+
303
+ Parameters
304
+ ----------
305
+ name : str
306
+ Dataset name.
307
+
308
+ Returns
309
+ -------
310
+ deleted : bool
311
+ True if file was deleted.
312
+ """
313
+ if name not in DATASETS:
314
+ raise ValueError(f"Unknown dataset: {name}")
315
+
316
+ filepath = self.downloads_dir / DATASETS[name]['filename']
317
+ deleted = False
318
+
319
+ if filepath.exists():
320
+ filepath.unlink()
321
+ deleted = True
322
+ self._log(f"Deleted: {filepath}")
323
+
324
+ if name in self._metadata.get('downloads', {}):
325
+ del self._metadata['downloads'][name]
326
+ self._save_metadata()
327
+
328
+ return deleted
329
+
330
+ def delete_all_downloads(self) -> int:
331
+ """Delete all downloaded datasets.
332
+
333
+ Returns
334
+ -------
335
+ count : int
336
+ Number of files deleted.
337
+ """
338
+ count = 0
339
+ for name in list(DATASETS.keys()):
340
+ if self.delete_download(name):
341
+ count += 1
342
+ return count
343
+
344
+ # -------------------------------------------------------------------------
345
+ # Status and cleanup
346
+ # -------------------------------------------------------------------------
347
+
348
+ def status(self) -> Dict[str, Any]:
349
+ """Get status of all downloaded data.
350
+
351
+ Returns
352
+ -------
353
+ status : dict
354
+ Status information.
355
+ """
356
+ status = {
357
+ 'data_dir': str(self.data_dir),
358
+ 'downloads': {},
359
+ 'total_size_bytes': 0,
360
+ }
361
+
362
+ for name in DATASETS:
363
+ filepath = self.downloads_dir / DATASETS[name]['filename']
364
+ if filepath.exists():
365
+ size = filepath.stat().st_size
366
+ status['downloads'][name] = {
367
+ 'path': str(filepath),
368
+ 'size_bytes': size,
369
+ 'size_mb': size / (1024 * 1024),
370
+ 'metadata': self._metadata.get('downloads', {}).get(name, {}),
371
+ }
372
+ status['total_size_bytes'] += size
373
+ else:
374
+ status['downloads'][name] = {'downloaded': False}
375
+
376
+ status['total_size_mb'] = status['total_size_bytes'] / (1024 * 1024)
377
+ return status
378
+
379
+ def print_status(self) -> None:
380
+ """Print human-readable status."""
381
+ status = self.status()
382
+
383
+ print(f"\nWEIRDO Data Directory: {status['data_dir']}")
384
+ print(f"Total Disk Usage: {status['total_size_mb']:.1f} MB")
385
+
386
+ print("\nDatasets:")
387
+ print("-" * 70)
388
+ for name, info in status['downloads'].items():
389
+ ds_info = DATASETS.get(name, {})
390
+ if info.get('downloaded', True) and 'size_mb' in info:
391
+ meta = info.get('metadata', {})
392
+ downloaded_at = meta.get('downloaded_at', 'unknown')[:10] # Just date
393
+ print(f" [x] {name}")
394
+ print(f" {ds_info.get('description', '')}")
395
+ print(f" Status: Downloaded ({info['size_mb']:.0f} MB, {downloaded_at})")
396
+ else:
397
+ print(f" [ ] {name}")
398
+ print(f" {ds_info.get('description', '')}")
399
+ print(f" Status: Not downloaded (~{ds_info.get('size_mb', '?')} MB compressed)")
400
+
401
+ print()
402
+
403
+ def clear_all(self) -> int:
404
+ """Delete all downloaded data.
405
+
406
+ Returns
407
+ -------
408
+ count : int
409
+ Number of downloads deleted.
410
+ """
411
+ return self.delete_all_downloads()
412
+
413
+ def disk_usage(self) -> int:
414
+ """Get total disk usage in bytes.
415
+
416
+ Returns
417
+ -------
418
+ bytes : int
419
+ Total disk usage.
420
+ """
421
+ return self.status()['total_size_bytes']
422
+
423
+
424
+ # Singleton instance for convenience
425
+ _default_manager: Optional[DataManager] = None
426
+
427
+
428
+ def get_data_manager(
429
+ data_dir: Optional[Path] = None,
430
+ auto_download: bool = False,
431
+ verbose: bool = True,
432
+ ) -> DataManager:
433
+ """Get the default DataManager instance.
434
+
435
+ Parameters
436
+ ----------
437
+ data_dir : Path, optional
438
+ Override data directory.
439
+ auto_download : bool, default=False
440
+ Enable auto-download.
441
+ verbose : bool, default=True
442
+ Print status messages.
443
+
444
+ Returns
445
+ -------
446
+ manager : DataManager
447
+ Data manager instance.
448
+ """
449
+ global _default_manager
450
+
451
+ if _default_manager is None or data_dir is not None:
452
+ _default_manager = DataManager(
453
+ data_dir=data_dir,
454
+ auto_download=auto_download,
455
+ verbose=verbose,
456
+ )
457
+
458
+ return _default_manager
459
+
460
+
461
+ def ensure_data_available(auto_download: bool = False) -> Path:
462
+ """Ensure reference data is available, optionally downloading.
463
+
464
+ Parameters
465
+ ----------
466
+ auto_download : bool, default=False
467
+ Download if not present.
468
+
469
+ Returns
470
+ -------
471
+ path : Path
472
+ Path to reference data.
473
+ """
474
+ dm = get_data_manager(auto_download=auto_download)
475
+ return dm.get_data_path('swissprot-8mers')
weirdo/distances.py ADDED
@@ -0,0 +1,16 @@
1
+ # Licensed under the Apache License, Version 2.0 (the "License");
2
+ # you may not use this file except in compliance with the License.
3
+ # You may obtain a copy of the License at
4
+ #
5
+ # http://www.apache.org/licenses/LICENSE-2.0
6
+ #
7
+ # Unless required by applicable law or agreed to in writing, software
8
+ # distributed under the License is distributed on an "AS IS" BASIS,
9
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10
+ # See the License for the specific language governing permissions and
11
+ # limitations under the License.
12
+
13
+ def hamming(p1, p2):
14
+ n = min(len(p1), len(p2))
15
+ return sum([p1[i] != p2[i] for i in range(n)])
16
+
@@ -0,0 +1,25 @@
1
+ A B C D E F G H I K L M N P Q R S T V W X Y Z *
2
+ A 4 0 -3 0 0 -2 0 -2 0 0 -1 1 0 -1 1 -1 1 1 1 -5 0 -4 0 -7
3
+ B 0 5 -2 5 0 -3 0 -2 -2 0 -1 -2 4 -2 -1 -2 0 0 -2 -5 -1 -3 0 -7
4
+ C -3 -2 17 -3 1 -3 -4 -5 -2 -3 0 -2 -1 -3 -2 -2 -2 -2 -2 -2 -2 -6 0 -7
5
+ D 0 5 -3 9 1 -5 -1 -2 -4 0 -1 -3 1 -1 -1 -1 0 -1 -2 -4 -1 -1 0 -7
6
+ E 0 0 1 1 6 -4 -2 0 -3 2 -1 -1 -1 1 2 -1 0 -2 -3 -1 -1 -2 5 -7
7
+ F -2 -3 -3 -5 -4 10 -3 -3 0 -1 2 -2 -1 -4 -3 -1 -1 -2 1 1 -1 3 -4 -7
8
+ G 0 0 -4 -1 -2 -3 8 -3 -1 -1 -2 -2 0 -1 -2 -2 0 -2 -3 1 -1 -3 -2 -7
9
+ H -2 -2 -5 -2 0 -3 -3 14 -2 -2 -1 2 -1 1 0 -1 -1 -2 -3 -5 -1 0 0 -7
10
+ I 0 -2 -2 -4 -3 0 -1 -2 6 -2 2 1 0 -3 -2 -3 -1 0 4 -3 0 -1 -3 -7
11
+ K 0 0 -3 0 2 -1 -1 -2 -2 4 -2 2 0 1 0 1 0 -1 -2 -2 0 -1 1 -7
12
+ L -1 -1 0 -1 -1 2 -2 -1 2 -2 4 2 -2 -3 -2 -2 -2 0 1 -2 0 3 -1 -7
13
+ M 1 -2 -2 -3 -1 -2 -2 2 1 2 2 6 0 -4 -1 0 -2 0 0 -3 0 -1 -1 -7
14
+ N 0 4 -1 1 -1 -1 0 -1 0 0 -2 0 8 -3 -1 -2 0 1 -2 -7 0 -4 -1 -7
15
+ P -1 -2 -3 -1 1 -4 -1 1 -3 1 -3 -4 -3 11 0 -1 -1 0 -4 -3 -1 -2 0 -7
16
+ Q 1 -1 -2 -1 2 -3 -2 0 -2 0 -2 -1 -1 0 8 3 -1 0 -3 -1 0 -1 4 -7
17
+ R -1 -2 -2 -1 -1 -1 -2 -1 -3 1 -2 0 -2 -1 3 8 -1 -3 -1 0 -1 0 0 -7
18
+ S 1 0 -2 0 0 -1 0 -1 -1 0 -2 -2 0 -1 -1 -1 4 2 -1 -3 0 -2 -1 -7
19
+ T 1 0 -2 -1 -2 -2 -2 -2 0 -1 0 0 1 0 0 -3 2 5 1 -5 0 -1 -1 -7
20
+ V 1 -2 -2 -2 -3 1 -3 -3 4 -2 1 0 -2 -4 -3 -1 -1 1 5 -3 0 1 -3 -7
21
+ W -5 -5 -2 -4 -1 1 1 -5 -3 -2 -2 -3 -7 -3 -1 0 -3 -5 -3 20 -2 5 -1 -7
22
+ X 0 -1 -2 -1 -1 -1 -1 -1 0 0 0 0 0 -1 0 -1 0 0 0 -2 -1 -1 0 -7
23
+ Y -4 -3 -6 -1 -2 3 -3 0 -1 -1 3 -1 -4 -2 -1 0 -2 -1 1 5 -1 9 -2 -7
24
+ Z 0 0 0 0 5 -4 -2 0 -3 1 -1 -1 -1 0 4 0 -1 -1 -3 -1 0 -2 4 -7
25
+ * -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 1
@@ -0,0 +1,21 @@
1
+ A R N D C Q E G H I L K M F P S T W Y V
2
+ A 5 -2 -1 -2 -1 -1 -1 0 -2 -1 -2 -1 -1 -3 -1 1 0 -3 -2 0
3
+ R -2 7 -1 -2 -4 1 0 -3 0 -4 -3 3 -2 -3 -3 -1 -1 -3 -1 -3
4
+ N -1 -1 7 2 -2 0 0 0 1 -3 -4 0 -2 -4 -2 1 0 -4 -2 -3
5
+ D -2 -2 2 8 -4 0 2 -1 -1 -4 -4 -1 -4 -5 -1 0 -1 -5 -3 -4
6
+ C -1 -4 -2 -4 13 -3 -3 -3 -3 -2 -2 -3 -2 -2 -4 -1 -1 -5 -3 -1
7
+ Q -1 1 0 0 -3 7 2 -2 1 -3 -2 2 0 -4 -1 0 -1 -1 -1 -3
8
+ E -1 0 0 2 -3 2 6 -3 0 -4 -3 1 -2 -3 -1 -1 -1 -3 -2 -3
9
+ G 0 -3 0 -1 -3 -2 -3 8 -2 -4 -4 -2 -3 -4 -2 0 -2 -3 -3 -4
10
+ H -2 0 1 -1 -3 1 0 -2 10 -4 -3 0 -1 -1 -2 -1 -2 -3 2 -4
11
+ I -1 -4 -3 -4 -2 -3 -4 -4 -4 5 2 -3 2 0 -3 -3 -1 -3 -1 4
12
+ L -2 -3 -4 -4 -2 -2 -3 -4 -3 2 5 -3 3 1 -4 -3 -1 -2 -1 1
13
+ K -1 3 0 -1 -3 2 1 -2 0 -3 -3 6 -2 -4 -1 0 -1 -3 -2 -3
14
+ M -1 -2 -2 -4 -2 0 -2 -3 -1 2 3 -2 7 0 -3 -2 -1 -1 0 1
15
+ F -3 -3 -4 -5 -2 -4 -3 -4 -1 0 1 -4 0 8 -4 -3 -2 1 4 -1
16
+ P -1 -3 -2 -1 -4 -1 -1 -2 -2 -3 -4 -1 -3 -4 10 -1 -1 -4 -3 -3
17
+ S 1 -1 1 0 -1 0 -1 0 -1 -3 -3 0 -2 -3 -1 5 2 -4 -2 -2
18
+ T 0 -1 0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -1 2 5 -3 -2 0
19
+ W -3 -3 -4 -5 -5 -1 -3 -3 -3 -3 -2 -3 -1 1 -4 -4 -4 15 2 -3
20
+ Y -2 -1 -2 -3 -3 -1 -2 -3 2 -1 -1 -2 0 4 -3 -2 -2 2 8 -1
21
+ V 0 -3 -3 -4 -1 -3 -3 -4 -4 4 1 -3 1 -1 -3 -2 0 -3 -1 5
@@ -0,0 +1,27 @@
1
+ # Entries for the BLOSUM62 matrix at a scale of ln(2)/2.0.
2
+ A R N D C Q E G H I L K M F P S T W Y V B J Z X *
3
+ A 4 -1 -2 -2 0 -1 -1 0 -2 -1 -1 -1 -1 -2 -1 1 0 -3 -2 0 -2 -1 -1 -1 -4
4
+ R -1 5 0 -2 -3 1 0 -2 0 -3 -2 2 -1 -3 -2 -1 -1 -3 -2 -3 -1 -2 0 -1 -4
5
+ N -2 0 6 1 -3 0 0 0 1 -3 -3 0 -2 -3 -2 1 0 -4 -2 -3 4 -3 0 -1 -4
6
+ D -2 -2 1 6 -3 0 2 -1 -1 -3 -4 -1 -3 -3 -1 0 -1 -4 -3 -3 4 -3 1 -1 -4
7
+ C 0 -3 -3 -3 9 -3 -4 -3 -3 -1 -1 -3 -1 -2 -3 -1 -1 -2 -2 -1 -3 -1 -3 -1 -4
8
+ Q -1 1 0 0 -3 5 2 -2 0 -3 -2 1 0 -3 -1 0 -1 -2 -1 -2 0 -2 4 -1 -4
9
+ E -1 0 0 2 -4 2 5 -2 0 -3 -3 1 -2 -3 -1 0 -1 -3 -2 -2 1 -3 4 -1 -4
10
+ G 0 -2 0 -1 -3 -2 -2 6 -2 -4 -4 -2 -3 -3 -2 0 -2 -2 -3 -3 -1 -4 -2 -1 -4
11
+ H -2 0 1 -1 -3 0 0 -2 8 -3 -3 -1 -2 -1 -2 -1 -2 -2 2 -3 0 -3 0 -1 -4
12
+ I -1 -3 -3 -3 -1 -3 -3 -4 -3 4 2 -3 1 0 -3 -2 -1 -3 -1 3 -3 3 -3 -1 -4
13
+ L -1 -2 -3 -4 -1 -2 -3 -4 -3 2 4 -2 2 0 -3 -2 -1 -2 -1 1 -4 3 -3 -1 -4
14
+ K -1 2 0 -1 -3 1 1 -2 -1 -3 -2 5 -1 -3 -1 0 -1 -3 -2 -2 0 -3 1 -1 -4
15
+ M -1 -1 -2 -3 -1 0 -2 -3 -2 1 2 -1 5 0 -2 -1 -1 -1 -1 1 -3 2 -1 -1 -4
16
+ F -2 -3 -3 -3 -2 -3 -3 -3 -1 0 0 -3 0 6 -4 -2 -2 1 3 -1 -3 0 -3 -1 -4
17
+ P -1 -2 -2 -1 -3 -1 -1 -2 -2 -3 -3 -1 -2 -4 7 -1 -1 -4 -3 -2 -2 -3 -1 -1 -4
18
+ S 1 -1 1 0 -1 0 0 0 -1 -2 -2 0 -1 -2 -1 4 1 -3 -2 -2 0 -2 0 -1 -4
19
+ T 0 -1 0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -1 1 5 -2 -2 0 -1 -1 -1 -1 -4
20
+ W -3 -3 -4 -4 -2 -2 -3 -2 -2 -3 -2 -3 -1 1 -4 -3 -2 11 2 -3 -4 -2 -2 -1 -4
21
+ Y -2 -2 -2 -3 -2 -1 -2 -3 2 -1 -1 -2 -1 3 -3 -2 -2 2 7 -1 -3 -1 -2 -1 -4
22
+ V 0 -3 -3 -3 -1 -2 -2 -3 -3 3 1 -2 1 -1 -2 -2 0 -3 -1 4 -3 2 -2 -1 -4
23
+ B -2 -1 4 4 -3 0 1 -1 0 -3 -4 0 -3 -3 -2 0 -1 -4 -3 -3 4 -3 0 -1 -4
24
+ J -1 -2 -3 -3 -1 -2 -3 -4 -3 3 3 -3 2 0 -3 -2 -1 -2 -1 2 -3 3 -3 -1 -4
25
+ Z -1 0 0 1 -3 4 4 -2 0 -3 -3 1 -1 -3 -1 0 -1 -2 -2 -2 0 -3 4 -1 -4
26
+ X -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -4
27
+ * -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 1
File without changes