wolof-translate 0.0.2__tar.gz → 0.0.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/PKG-INFO +1 -1
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/setup.py +1 -1
- wolof_translate-0.0.4/wolof_translate/utils/bucket_iterator.py +243 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate.egg-info/PKG-INFO +1 -1
- wolof_translate-0.0.2/wolof_translate/utils/bucket_iterator.py +0 -138
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/setup.cfg +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/__init__.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/__pycache__/__init__.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/__pycache__/__init__.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/__pycache__/dataset_v1.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/__pycache__/sent_transformers.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/__init__.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/__pycache__/__init__.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/__pycache__/__init__.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/__pycache__/dataset_v1.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/__pycache__/dataset_v2.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/__pycache__/dataset_v3.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/__pycache__/dataset_v3.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/__pycache__/dataset_v4.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/__pycache__/dataset_v4.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/dataset_v1.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/dataset_v2.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/dataset_v3.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/dataset_v3_2.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/dataset_v4.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/data/dataset_v5.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/__init__.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/__pycache__/__init__.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/__pycache__/__init__.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/french/tfidfaug_w2idf.txt +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/french/tfidfaug_w2tfidf.txt +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__init__.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/__init__.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/__init__.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/main.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/main.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/optimization.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/optimization.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/position.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/position.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/size.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__pycache__/size.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/main.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/main_2.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/optimization.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/position.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/size.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/wolof/tfidfaug_w2idf.txt +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/wolof/tfidfaug_w2tfidf.txt +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/pipe/__init__.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/pipe/__pycache__/__init__.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/pipe/__pycache__/nlp_pipeline.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/pipe/nlp_pipeline.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/__init__.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/__pycache__/__init__.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/adverse_tokenizer.json +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/bart_tokenizers/tokenizer_v3.json +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/bart_tokenizers/tokenizer_v3_2.json +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/bart_tokenizers/tokenizer_v5.json +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v3.model +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v3.vocab +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v4.model +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v4.vocab +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v5.model +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v5.vocab +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v6.model +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v6.vocab +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v7.model +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v7.vocab +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v8.model +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v8.vocab +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v9.model +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers/tokenizer_v9.vocab +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers.zip +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/tokenizer_v1.json +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/tokenizer_v3_2.json +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/trax/sentencepiece_tokenizer_v4.subwords +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/trax/sentencepiece_tokenizer_v5.subwords +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/trax/sentencepiece_tokenizer_v6.subwords +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/trax/sentencepiece_tokenizer_v7.subwords +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/__init__.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/__pycache__/__init__.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/__pycache__/__init__.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/__pycache__/transformer_trainer.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/__pycache__/transformer_trainer.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/__pycache__/transformer_trainer_custom.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/__pycache__/transformer_trainer_ml.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/__pycache__/transformer_trainer_ml_.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/transformer_trainer.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/transformer_trainer_custom.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/transformer_trainer_ml.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/transformer_trainer_ml_.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__init__.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/bucket_iterator.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/database_manager.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/display_predictions.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/download_model.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/evaluate_custom.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/evaluation.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/evaluation.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/extract_new_sentences.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/recuperate_datasets.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/sent_corrections.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/sent_corrections.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/sent_transformers.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/sent_transformers.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/sent_unification.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/split_with_valid.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/split_with_valid.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/tokenize_text.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/__pycache__/training.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/database_manager.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/display_predictions.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/download_model.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/evaluate_custom.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/evaluation.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/extract_new_sentences.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/extract_poems.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/extract_sentences.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/improvements/__init__.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/improvements/__pycache__/__init__.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/improvements/__pycache__/__init__.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/improvements/__pycache__/end_marks.cpython-310.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/improvements/__pycache__/end_marks.cpython-311.pyc +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/improvements/end_marks.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/recuperate_datasets.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/recuperate_datasets_trunc.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/send_model.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/sent_corrections.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/sent_transformers.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/sent_unification.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/split_with_valid.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/tokenize_text.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/training.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/trunc_hg_training.py +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate.egg-info/SOURCES.txt +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate.egg-info/dependency_links.txt +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate.egg-info/requires.txt +0 -0
- {wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate.egg-info/top_level.txt +0 -0
|
@@ -2,7 +2,7 @@ from setuptools import setup
|
|
|
2
2
|
|
|
3
3
|
setup(
|
|
4
4
|
name="wolof_translate",
|
|
5
|
-
version="0.0.
|
|
5
|
+
version="0.0.4",
|
|
6
6
|
author="Oumar Kane",
|
|
7
7
|
author_email="oumar.kane@univ-thies.sn",
|
|
8
8
|
description="Contain function and classes to process corpora for making translation between wolof text and other languages.",
|
|
@@ -0,0 +1,243 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
from typing import Optional, List, Iterator, Union
|
|
3
|
+
from torch.utils.data import Sampler
|
|
4
|
+
from math import ceil
|
|
5
|
+
from tqdm import tqdm
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class SequenceLengthBatchSampler(Sampler[List[int]]):
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
dataset,
|
|
13
|
+
boundaries: List[int],
|
|
14
|
+
batch_sizes: List[int],
|
|
15
|
+
input_key: Optional[Union[int, str]] = None,
|
|
16
|
+
label_key: Optional[Union[int, str]] = None,
|
|
17
|
+
drop_unique: bool = True,
|
|
18
|
+
):
|
|
19
|
+
"""
|
|
20
|
+
Sampler that batches sequences of similar lengths together to minimize padding.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
dataset: Dataset to sample from.
|
|
24
|
+
boundaries: List of length boundaries to bucket sequences.
|
|
25
|
+
batch_sizes: List of batch sizes per bucket (length must be len(boundaries)+1).
|
|
26
|
+
input_key: Key or index to access input sequence in dataset item.
|
|
27
|
+
label_key: Key or index to access label sequence in dataset item.
|
|
28
|
+
drop_unique: Whether to drop batches with a single leftover element.
|
|
29
|
+
"""
|
|
30
|
+
self.dataset = dataset
|
|
31
|
+
self.boundaries = boundaries
|
|
32
|
+
self.batch_sizes = batch_sizes
|
|
33
|
+
self.drop_unique = drop_unique
|
|
34
|
+
|
|
35
|
+
assert len(batch_sizes) == len(boundaries) + 1, (
|
|
36
|
+
f"batch_sizes length ({len(batch_sizes)}) must be one more than boundaries length ({len(boundaries)})"
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
start_time = time.time()
|
|
40
|
+
tqdm.write("Computing sequence lengths...")
|
|
41
|
+
|
|
42
|
+
self.lengths = np.array([
|
|
43
|
+
self._get_length(data, input_key, label_key)
|
|
44
|
+
for data in tqdm(self.dataset, desc="Lengths", unit="seq")
|
|
45
|
+
])
|
|
46
|
+
|
|
47
|
+
tqdm.write(f"Sequence lengths computed in {time.time() - start_time:.2f} seconds.")
|
|
48
|
+
|
|
49
|
+
start_time = time.time()
|
|
50
|
+
tqdm.write("Assigning buckets...")
|
|
51
|
+
|
|
52
|
+
# Assign bucket ids (0-based)
|
|
53
|
+
self.bucket_ids = np.digitize(self.lengths, bins=self.boundaries, right=True)
|
|
54
|
+
|
|
55
|
+
# Create buckets of indices
|
|
56
|
+
self.buckets = [np.where(self.bucket_ids == i)[0] for i in range(len(boundaries) + 1)]
|
|
57
|
+
|
|
58
|
+
tqdm.write(f"Buckets assigned in {time.time() - start_time:.2f} seconds.")
|
|
59
|
+
|
|
60
|
+
start_time = time.time()
|
|
61
|
+
tqdm.write("Preparing batches...")
|
|
62
|
+
|
|
63
|
+
self.batches = []
|
|
64
|
+
for bucket, batch_size in zip(self.buckets, self.batch_sizes):
|
|
65
|
+
bucket = bucket.copy()
|
|
66
|
+
np.random.shuffle(bucket)
|
|
67
|
+
|
|
68
|
+
n_full_batches = len(bucket) // batch_size
|
|
69
|
+
leftover = len(bucket) % batch_size
|
|
70
|
+
|
|
71
|
+
for i in range(n_full_batches):
|
|
72
|
+
batch = bucket[i * batch_size : (i + 1) * batch_size].tolist()
|
|
73
|
+
self.batches.append(batch)
|
|
74
|
+
|
|
75
|
+
if leftover > 0 and (leftover != 1 or not self.drop_unique):
|
|
76
|
+
batch = bucket[-leftover:].tolist()
|
|
77
|
+
self.batches.append(batch)
|
|
78
|
+
|
|
79
|
+
self.length = len(self.batches)
|
|
80
|
+
tqdm.write(f"Batches prepared in {time.time() - start_time:.2f} seconds.")
|
|
81
|
+
|
|
82
|
+
def _get_length(self, data, input_key, label_key) -> int:
|
|
83
|
+
"""
|
|
84
|
+
Helper to get the max length of input and label sequences in a dataset item.
|
|
85
|
+
|
|
86
|
+
Supports dict-like or tuple/list-like dataset items.
|
|
87
|
+
"""
|
|
88
|
+
try:
|
|
89
|
+
if input_key is None or label_key is None:
|
|
90
|
+
# Assume tuple/list with input at 0, label at 2
|
|
91
|
+
input_seq = data[0]
|
|
92
|
+
label_seq = data[2]
|
|
93
|
+
else:
|
|
94
|
+
input_seq = data[input_key]
|
|
95
|
+
label_seq = data[label_key]
|
|
96
|
+
return max(len(input_seq), len(label_seq))
|
|
97
|
+
except Exception as e:
|
|
98
|
+
raise ValueError(f"Error accessing lengths with input_key={input_key}, label_key={label_key}: {e}")
|
|
99
|
+
|
|
100
|
+
def __iter__(self) -> Iterator[List[int]]:
|
|
101
|
+
# Shuffle batches globally for randomness
|
|
102
|
+
np.random.shuffle(self.batches)
|
|
103
|
+
for batch in self.batches:
|
|
104
|
+
yield batch
|
|
105
|
+
|
|
106
|
+
def __len__(self) -> int:
|
|
107
|
+
return self.length
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
# class SequenceLengthBatchSampler(Sampler[List[int]]):
|
|
113
|
+
# def __init__(
|
|
114
|
+
# self,
|
|
115
|
+
# dataset,
|
|
116
|
+
# boundaries: List[int],
|
|
117
|
+
# batch_sizes: List[int],
|
|
118
|
+
# input_key: Optional[int] = None,
|
|
119
|
+
# label_key: Optional[int] = None,
|
|
120
|
+
# drop_unique: bool = True,
|
|
121
|
+
# ):
|
|
122
|
+
# self.dataset = dataset
|
|
123
|
+
# self.boundaries = boundaries
|
|
124
|
+
# self.batch_sizes = batch_sizes
|
|
125
|
+
# self.drop_unique = drop_unique
|
|
126
|
+
# self.data_info = {}
|
|
127
|
+
|
|
128
|
+
# # Extract lengths
|
|
129
|
+
# for i in range(len(dataset)):
|
|
130
|
+
# data = dataset[i]
|
|
131
|
+
# if input_key is None or label_key is None:
|
|
132
|
+
# length = max(len(data[0]), len(data[2]))
|
|
133
|
+
# else:
|
|
134
|
+
# length = max(len(data[input_key]), len(data[label_key]))
|
|
135
|
+
# self.data_info[i] = {"index": i, "length": length}
|
|
136
|
+
|
|
137
|
+
# self.calculate_length()
|
|
138
|
+
|
|
139
|
+
# def calculate_length(self):
|
|
140
|
+
# self.batches = []
|
|
141
|
+
# sorted_indices = sorted(self.data_info.keys(), key=lambda i: self.data_info[i]["length"])
|
|
142
|
+
|
|
143
|
+
# prev_boundary = 0
|
|
144
|
+
# for boundary in self.boundaries:
|
|
145
|
+
# batch = [i for i in sorted_indices if prev_boundary < self.data_info[i]["length"] <= boundary]
|
|
146
|
+
# self.batches.append(batch)
|
|
147
|
+
# sorted_indices = [i for i in sorted_indices if i not in batch]
|
|
148
|
+
# prev_boundary = boundary
|
|
149
|
+
|
|
150
|
+
# # Remaining sequences > last boundary
|
|
151
|
+
# self.batches.append(sorted_indices)
|
|
152
|
+
|
|
153
|
+
# total_batches = 0
|
|
154
|
+
# for batch, batch_size in zip(self.batches, self.batch_sizes):
|
|
155
|
+
# n_full_batches = len(batch) // batch_size
|
|
156
|
+
# leftover = len(batch) % batch_size
|
|
157
|
+
# total_batches += n_full_batches
|
|
158
|
+
# if leftover > 0 and (leftover != 1 or not self.drop_unique):
|
|
159
|
+
# total_batches += 1
|
|
160
|
+
# self.length = total_batches
|
|
161
|
+
|
|
162
|
+
# def __iter__(self) -> Iterator[List[int]]:
|
|
163
|
+
# for batch_indices, batch_size in zip(self.batches, self.batch_sizes):
|
|
164
|
+
# num_batches = len(batch_indices) // batch_size
|
|
165
|
+
|
|
166
|
+
# for i in range(num_batches):
|
|
167
|
+
# current_bucket = batch_indices[i * batch_size: (i + 1) * batch_size]
|
|
168
|
+
# np.random.shuffle(current_bucket)
|
|
169
|
+
# yield [self.data_info[idx]["index"] for idx in current_bucket]
|
|
170
|
+
|
|
171
|
+
# remaining = len(batch_indices) % batch_size
|
|
172
|
+
# if remaining > 0 and (remaining != 1 or not self.drop_unique):
|
|
173
|
+
# current_bucket = batch_indices[-remaining:]
|
|
174
|
+
# np.random.shuffle(current_bucket)
|
|
175
|
+
# yield [self.data_info[idx]["index"] for idx in current_bucket]
|
|
176
|
+
|
|
177
|
+
# def __len__(self) -> int:
|
|
178
|
+
# return self.length
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
class BucketSampler(Sampler):
|
|
183
|
+
def __init__(self, dataset, batch_size, sort_key=lambda x, index_1, index_2: max(len(x[index_1]), len(x[index_2])), input_key: Union[str, int] = 0, label_key: Union[str, int] = 1):
|
|
184
|
+
self.dataset = dataset
|
|
185
|
+
self.batch_size = batch_size
|
|
186
|
+
self.sort_key = sort_key
|
|
187
|
+
self.index_1 = input_key
|
|
188
|
+
self.index_2 = label_key
|
|
189
|
+
indices = np.argsort([self.sort_key(self.dataset[i], self.index_1, self.index_2) for i in range(len(self.dataset))])
|
|
190
|
+
self.batches = [indices[i:i + self.batch_size] for i in range(0, len(indices), self.batch_size)]
|
|
191
|
+
|
|
192
|
+
def __iter__(self):
|
|
193
|
+
if self.batch_size > 1:
|
|
194
|
+
np.random.shuffle(self.batches)
|
|
195
|
+
for batch in self.batches:
|
|
196
|
+
yield batch.tolist()
|
|
197
|
+
|
|
198
|
+
def __len__(self):
|
|
199
|
+
return ceil(len(self.dataset) / self.batch_size)
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def collate_fn(batch):
|
|
203
|
+
from torch.nn.utils.rnn import pad_sequence
|
|
204
|
+
# Separate the input sequences, target sequences, and attention masks
|
|
205
|
+
input_seqs, input_masks, target_seqs, target_masks = zip(*batch)
|
|
206
|
+
|
|
207
|
+
# Pad the input sequences to have the same length
|
|
208
|
+
padded_input_seqs = pad_sequence(input_seqs, batch_first=True)
|
|
209
|
+
|
|
210
|
+
# Pad the target sequences to have the same length
|
|
211
|
+
padded_target_seqs = pad_sequence(target_seqs, batch_first=True)
|
|
212
|
+
|
|
213
|
+
# Pad the input masks to have the same length
|
|
214
|
+
padded_input_masks = pad_sequence(input_masks, batch_first=True)
|
|
215
|
+
|
|
216
|
+
# Pad the labels masks to have the same length
|
|
217
|
+
padded_target_masks = pad_sequence(target_masks, batch_first=True)
|
|
218
|
+
|
|
219
|
+
return padded_input_seqs, padded_input_masks, padded_target_seqs, padded_target_masks
|
|
220
|
+
|
|
221
|
+
def collate_fn_trunc(batch, max_len, eos_token_id, pad_token_id):
|
|
222
|
+
from torch.nn.utils.rnn import pad_sequence
|
|
223
|
+
# Separate the input sequences, target sequences, and attention masks
|
|
224
|
+
input_seqs, input_masks, target_seqs, target_masks = zip(*batch)
|
|
225
|
+
|
|
226
|
+
# Pad the input sequences to have the same length
|
|
227
|
+
padded_input_seqs = pad_sequence(input_seqs, batch_first=True)[:,:max_len]
|
|
228
|
+
|
|
229
|
+
# Pad the target sequences to have the same length
|
|
230
|
+
padded_target_seqs = pad_sequence(target_seqs, batch_first=True)[:,:max_len]
|
|
231
|
+
|
|
232
|
+
# add eos_token id if pad token id is not visible
|
|
233
|
+
padded_input_seqs[:, -1:][(padded_input_seqs[:, -1:] != eos_token_id) & (padded_input_seqs[:, -1:] != pad_token_id)] = eos_token_id
|
|
234
|
+
|
|
235
|
+
padded_target_seqs[:, -1:][(padded_target_seqs[:, -1:] != eos_token_id) & (padded_target_seqs[:, -1:] != pad_token_id)] = eos_token_id
|
|
236
|
+
|
|
237
|
+
# Pad the input masks to have the same length
|
|
238
|
+
padded_input_masks = pad_sequence(input_masks, batch_first=True)[:,:max_len]
|
|
239
|
+
|
|
240
|
+
# Pad the labels masks to have the same length
|
|
241
|
+
padded_target_masks = pad_sequence(target_masks, batch_first=True)[:,:max_len]
|
|
242
|
+
|
|
243
|
+
return padded_input_seqs, padded_input_masks, padded_target_seqs, padded_target_masks
|
|
@@ -1,138 +0,0 @@
|
|
|
1
|
-
import torch
|
|
2
|
-
import numpy as np
|
|
3
|
-
from typing import Optional, List, Iterator
|
|
4
|
-
from torch.utils.data import Sampler
|
|
5
|
-
from math import ceil
|
|
6
|
-
|
|
7
|
-
class SequenceLengthBatchSampler(Sampler[List[int]]):
|
|
8
|
-
def __init__(
|
|
9
|
-
self,
|
|
10
|
-
dataset,
|
|
11
|
-
boundaries: List[int],
|
|
12
|
-
batch_sizes: List[int],
|
|
13
|
-
input_key: Optional[int] = None,
|
|
14
|
-
label_key: Optional[int] = None,
|
|
15
|
-
drop_unique: bool = True,
|
|
16
|
-
):
|
|
17
|
-
self.dataset = dataset
|
|
18
|
-
self.boundaries = boundaries
|
|
19
|
-
self.batch_sizes = batch_sizes
|
|
20
|
-
self.drop_unique = drop_unique
|
|
21
|
-
self.data_info = {}
|
|
22
|
-
|
|
23
|
-
# Extract lengths
|
|
24
|
-
for i in range(len(dataset)):
|
|
25
|
-
data = dataset[i]
|
|
26
|
-
if input_key is None or label_key is None:
|
|
27
|
-
length = max(len(data[0]), len(data[2]))
|
|
28
|
-
else:
|
|
29
|
-
length = max(len(data[input_key]), len(data[label_key]))
|
|
30
|
-
self.data_info[i] = {"index": i, "length": length}
|
|
31
|
-
|
|
32
|
-
self.calculate_length()
|
|
33
|
-
|
|
34
|
-
def calculate_length(self):
|
|
35
|
-
self.batches = []
|
|
36
|
-
sorted_indices = sorted(self.data_info.keys(), key=lambda i: self.data_info[i]["length"])
|
|
37
|
-
|
|
38
|
-
prev_boundary = 0
|
|
39
|
-
for boundary in self.boundaries:
|
|
40
|
-
batch = [i for i in sorted_indices if prev_boundary < self.data_info[i]["length"] <= boundary]
|
|
41
|
-
self.batches.append(batch)
|
|
42
|
-
sorted_indices = [i for i in sorted_indices if i not in batch]
|
|
43
|
-
prev_boundary = boundary
|
|
44
|
-
|
|
45
|
-
# Remaining sequences > last boundary
|
|
46
|
-
self.batches.append(sorted_indices)
|
|
47
|
-
|
|
48
|
-
total_batches = 0
|
|
49
|
-
for batch, batch_size in zip(self.batches, self.batch_sizes):
|
|
50
|
-
n_full_batches = len(batch) // batch_size
|
|
51
|
-
leftover = len(batch) % batch_size
|
|
52
|
-
total_batches += n_full_batches
|
|
53
|
-
if leftover > 0 and (leftover != 1 or not self.drop_unique):
|
|
54
|
-
total_batches += 1
|
|
55
|
-
self.length = total_batches
|
|
56
|
-
|
|
57
|
-
def __iter__(self) -> Iterator[List[int]]:
|
|
58
|
-
for batch_indices, batch_size in zip(self.batches, self.batch_sizes):
|
|
59
|
-
num_batches = len(batch_indices) // batch_size
|
|
60
|
-
|
|
61
|
-
for i in range(num_batches):
|
|
62
|
-
current_bucket = batch_indices[i * batch_size: (i + 1) * batch_size]
|
|
63
|
-
np.random.shuffle(current_bucket)
|
|
64
|
-
yield [self.data_info[idx]["index"] for idx in current_bucket]
|
|
65
|
-
|
|
66
|
-
remaining = len(batch_indices) % batch_size
|
|
67
|
-
if remaining > 0 and (remaining != 1 or not self.drop_unique):
|
|
68
|
-
current_bucket = batch_indices[-remaining:]
|
|
69
|
-
np.random.shuffle(current_bucket)
|
|
70
|
-
yield [self.data_info[idx]["index"] for idx in current_bucket]
|
|
71
|
-
|
|
72
|
-
def __len__(self) -> int:
|
|
73
|
-
return self.length
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
class BucketSampler(Sampler):
|
|
78
|
-
def __init__(self, dataset, batch_size, sort_key=lambda x, index_1, index_2: max(len(x[index_1]), len(x[index_2])), input_key: Union[str, int] = 0, label_key: Union[str, int] = 1):
|
|
79
|
-
self.dataset = dataset
|
|
80
|
-
self.batch_size = batch_size
|
|
81
|
-
self.sort_key = sort_key
|
|
82
|
-
self.index_1 = input_key
|
|
83
|
-
self.index_2 = label_key
|
|
84
|
-
indices = np.argsort([self.sort_key(self.dataset[i], self.index_1, self.index_2) for i in range(len(self.dataset))])
|
|
85
|
-
self.batches = [indices[i:i + self.batch_size] for i in range(0, len(indices), self.batch_size)]
|
|
86
|
-
|
|
87
|
-
def __iter__(self):
|
|
88
|
-
if self.batch_size > 1:
|
|
89
|
-
np.random.shuffle(self.batches)
|
|
90
|
-
for batch in self.batches:
|
|
91
|
-
yield batch.tolist()
|
|
92
|
-
|
|
93
|
-
def __len__(self):
|
|
94
|
-
return ceil(len(self.dataset) / self.batch_size)
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def collate_fn(batch):
|
|
98
|
-
from torch.nn.utils.rnn import pad_sequence
|
|
99
|
-
# Separate the input sequences, target sequences, and attention masks
|
|
100
|
-
input_seqs, input_masks, target_seqs, target_masks = zip(*batch)
|
|
101
|
-
|
|
102
|
-
# Pad the input sequences to have the same length
|
|
103
|
-
padded_input_seqs = pad_sequence(input_seqs, batch_first=True)
|
|
104
|
-
|
|
105
|
-
# Pad the target sequences to have the same length
|
|
106
|
-
padded_target_seqs = pad_sequence(target_seqs, batch_first=True)
|
|
107
|
-
|
|
108
|
-
# Pad the input masks to have the same length
|
|
109
|
-
padded_input_masks = pad_sequence(input_masks, batch_first=True)
|
|
110
|
-
|
|
111
|
-
# Pad the labels masks to have the same length
|
|
112
|
-
padded_target_masks = pad_sequence(target_masks, batch_first=True)
|
|
113
|
-
|
|
114
|
-
return padded_input_seqs, padded_input_masks, padded_target_seqs, padded_target_masks
|
|
115
|
-
|
|
116
|
-
def collate_fn_trunc(batch, max_len, eos_token_id, pad_token_id):
|
|
117
|
-
from torch.nn.utils.rnn import pad_sequence
|
|
118
|
-
# Separate the input sequences, target sequences, and attention masks
|
|
119
|
-
input_seqs, input_masks, target_seqs, target_masks = zip(*batch)
|
|
120
|
-
|
|
121
|
-
# Pad the input sequences to have the same length
|
|
122
|
-
padded_input_seqs = pad_sequence(input_seqs, batch_first=True)[:,:max_len]
|
|
123
|
-
|
|
124
|
-
# Pad the target sequences to have the same length
|
|
125
|
-
padded_target_seqs = pad_sequence(target_seqs, batch_first=True)[:,:max_len]
|
|
126
|
-
|
|
127
|
-
# add eos_token id if pad token id is not visible
|
|
128
|
-
padded_input_seqs[:, -1:][(padded_input_seqs[:, -1:] != eos_token_id) & (padded_input_seqs[:, -1:] != pad_token_id)] = eos_token_id
|
|
129
|
-
|
|
130
|
-
padded_target_seqs[:, -1:][(padded_target_seqs[:, -1:] != eos_token_id) & (padded_target_seqs[:, -1:] != pad_token_id)] = eos_token_id
|
|
131
|
-
|
|
132
|
-
# Pad the input masks to have the same length
|
|
133
|
-
padded_input_masks = pad_sequence(input_masks, batch_first=True)[:,:max_len]
|
|
134
|
-
|
|
135
|
-
# Pad the labels masks to have the same length
|
|
136
|
-
padded_target_masks = pad_sequence(target_masks, batch_first=True)[:,:max_len]
|
|
137
|
-
|
|
138
|
-
return padded_input_seqs, padded_input_masks, padded_target_seqs, padded_target_masks
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/__pycache__/__init__.cpython-310.pyc
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/__pycache__/__init__.cpython-311.pyc
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/french/tfidfaug_w2idf.txt
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/french/tfidfaug_w2tfidf.txt
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/main_2.py
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/optimization.py
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/transformers/position.py
RENAMED
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/wolof/tfidfaug_w2idf.txt
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/models/wolof/tfidfaug_w2tfidf.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/adverse_tokenizer.json
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/t5_tokenizers.zip
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/tokenizer_v1.json
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/tokenizers/tokenizer_v3_2.json
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/transformer_trainer.py
RENAMED
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/transformer_trainer_ml.py
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/trainers/transformer_trainer_ml_.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/display_predictions.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/extract_new_sentences.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/improvements/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/improvements/end_marks.py
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/recuperate_datasets.py
RENAMED
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate/utils/recuperate_datasets_trunc.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{wolof_translate-0.0.2 → wolof_translate-0.0.4}/wolof_translate.egg-info/dependency_links.txt
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|