rxnn 0.1.73__tar.gz → 0.1.75__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {rxnn-0.1.73 → rxnn-0.1.75}/PKG-INFO +1 -1
  2. {rxnn-0.1.73 → rxnn-0.1.75}/pyproject.toml +1 -1
  3. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/training/dataset.py +17 -9
  4. {rxnn-0.1.73 → rxnn-0.1.75}/LICENSE +0 -0
  5. {rxnn-0.1.73 → rxnn-0.1.75}/README.md +0 -0
  6. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/__init__.py +0 -0
  7. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/experimental/__init__.py +0 -0
  8. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/experimental/attention.py +0 -0
  9. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/experimental/models.py +0 -0
  10. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/experimental/moe.py +0 -0
  11. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/memory/__init__.py +0 -0
  12. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/memory/norm.py +0 -0
  13. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/memory/stm.py +0 -0
  14. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/rxt/__init__.py +0 -0
  15. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/rxt/models.py +0 -0
  16. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/training/__init__.py +0 -0
  17. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/training/base.py +0 -0
  18. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/training/bml.py +0 -0
  19. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/training/callbacks.py +0 -0
  20. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/training/scheduler.py +0 -0
  21. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/training/tokenizer.py +0 -0
  22. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/transformers/__init__.py +0 -0
  23. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/transformers/attention.py +0 -0
  24. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/transformers/ff.py +0 -0
  25. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/transformers/layers.py +0 -0
  26. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/transformers/mask.py +0 -0
  27. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/transformers/models.py +0 -0
  28. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/transformers/moe.py +0 -0
  29. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/transformers/positional.py +0 -0
  30. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/transformers/sampler.py +0 -0
  31. {rxnn-0.1.73 → rxnn-0.1.75}/src/rxnn/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: rxnn
3
- Version: 0.1.73
3
+ Version: 0.1.75
4
4
  Summary: RxNN: Reactive Neural Networks Platform
5
5
  License: Apache-2.0
6
6
  Keywords: deep-learning,ai,machine-learning
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [tool.poetry]
6
6
  name = "rxnn"
7
- version = "0.1.73"
7
+ version = "0.1.75"
8
8
  description = "RxNN: Reactive Neural Networks Platform"
9
9
 
10
10
  license = "Apache-2.0"
@@ -76,7 +76,7 @@ class BaseDataset(Dataset):
76
76
  self.texts = self.texts[0:split_point] if not from_start else self.texts[split_point:-1]
77
77
  return self.__class__(subset, self.tokenizer, max_seq_len=self.max_seq_len, hf_field=self.hf_field, **kwargs)
78
78
 
79
- def pre_tokenize(self, verbose: bool = False, log_interval: int = 10_000):
79
+ def pre_tokenize(self, verbose: bool = False, log_interval: int = 10_000, map_hf_ds_to_list: bool = True):
80
80
  """
81
81
  Pre-tokenizes all the items in the dataset, for faster training. Training with pre-tokenized
82
82
  dataset could be even 2x faster.
@@ -92,14 +92,22 @@ class BaseDataset(Dataset):
92
92
  """
93
93
  if not self.is_pre_tokenized:
94
94
  num_texts = len(self.texts)
95
- txts = self.texts if isinstance(self.texts, list) else self.texts.to_list()
96
- del self.texts
97
- self.texts = None
98
- for index in range(num_texts):
99
- item = txts.pop() if isinstance(txts, list) else txts.pop()[self.hf_field]
100
- self.inputs.append(self.get_tokenized_text(index, txt=item))
101
- if verbose and index % log_interval == 0:
102
- print(f'Processed {index + 1}/{num_texts}')
95
+ is_txt_list = isinstance(self.texts, list)
96
+ if is_txt_list or map_hf_ds_to_list:
97
+ txts = self.texts if is_txt_list else self.texts.to_list()
98
+ del self.texts
99
+ self.texts = None
100
+ for index in range(num_texts):
101
+ item = txts.pop() if is_txt_list else txts.pop()[self.hf_field]
102
+ self.inputs.append(self.get_tokenized_text(index, txt=item))
103
+ if verbose and index % log_interval == 0:
104
+ print(f'Processed {index + 1}/{num_texts}')
105
+ else:
106
+ for index in range(num_texts):
107
+ self.inputs.append(self.get_tokenized_text(index))
108
+ del self.texts[index]
109
+ if verbose and index % log_interval == 0:
110
+ print(f'Processed {index + 1}/{num_texts}')
103
111
  self.is_pre_tokenized = True
104
112
 
105
113
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes