rxnn 0.1.72__tar.gz → 0.1.74__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. {rxnn-0.1.72 → rxnn-0.1.74}/PKG-INFO +1 -1
  2. {rxnn-0.1.72 → rxnn-0.1.74}/pyproject.toml +1 -1
  3. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/training/dataset.py +8 -4
  4. {rxnn-0.1.72 → rxnn-0.1.74}/LICENSE +0 -0
  5. {rxnn-0.1.72 → rxnn-0.1.74}/README.md +0 -0
  6. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/__init__.py +0 -0
  7. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/experimental/__init__.py +0 -0
  8. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/experimental/attention.py +0 -0
  9. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/experimental/models.py +0 -0
  10. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/experimental/moe.py +0 -0
  11. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/memory/__init__.py +0 -0
  12. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/memory/norm.py +0 -0
  13. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/memory/stm.py +0 -0
  14. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/rxt/__init__.py +0 -0
  15. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/rxt/models.py +0 -0
  16. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/training/__init__.py +0 -0
  17. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/training/base.py +0 -0
  18. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/training/bml.py +0 -0
  19. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/training/callbacks.py +0 -0
  20. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/training/scheduler.py +0 -0
  21. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/training/tokenizer.py +0 -0
  22. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/transformers/__init__.py +0 -0
  23. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/transformers/attention.py +0 -0
  24. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/transformers/ff.py +0 -0
  25. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/transformers/layers.py +0 -0
  26. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/transformers/mask.py +0 -0
  27. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/transformers/models.py +0 -0
  28. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/transformers/moe.py +0 -0
  29. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/transformers/positional.py +0 -0
  30. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/transformers/sampler.py +0 -0
  31. {rxnn-0.1.72 → rxnn-0.1.74}/src/rxnn/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: rxnn
3
- Version: 0.1.72
3
+ Version: 0.1.74
4
4
  Summary: RxNN: Reactive Neural Networks Platform
5
5
  License: Apache-2.0
6
6
  Keywords: deep-learning,ai,machine-learning
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
4
4
 
5
5
  [tool.poetry]
6
6
  name = "rxnn"
7
- version = "0.1.72"
7
+ version = "0.1.74"
8
8
  description = "RxNN: Reactive Neural Networks Platform"
9
9
 
10
10
  license = "Apache-2.0"
@@ -32,11 +32,13 @@ class BaseDataset(Dataset):
32
32
  def __len__(self):
33
33
  return len(self.texts if not self.is_pre_tokenized else self.inputs)
34
34
 
35
- def get_tokenized_text(self, idx: int):
35
+ def get_tokenized_text(self, idx: int, txt: str = None):
36
36
  if self.is_pre_tokenized:
37
37
  return self.inputs[idx]
38
38
  else:
39
- if isinstance(self.texts, list):
39
+ if txt:
40
+ text = txt
41
+ elif isinstance(self.texts, list):
40
42
  text = self.texts[idx]
41
43
  else:
42
44
  text = self.texts[idx][self.hf_field]
@@ -90,11 +92,13 @@ class BaseDataset(Dataset):
90
92
  """
91
93
  if not self.is_pre_tokenized:
92
94
  num_texts = len(self.texts)
93
- txts = self.texts if isinstance(self.texts, list) else self.texts.to_list()
95
+ is_txt_list = isinstance(self.texts, list)
96
+ txts = self.texts if is_txt_list else self.texts.to_list()
94
97
  del self.texts
95
98
  self.texts = None
96
99
  for index in range(num_texts):
97
- self.inputs.append(txts.pop() if isinstance(txts, list) else txts.pop()[self.hf_field])
100
+ item = txts.pop() if is_txt_list else txts.pop()[self.hf_field]
101
+ self.inputs.append(self.get_tokenized_text(index, txt=item))
98
102
  if verbose and index % log_interval == 0:
99
103
  print(f'Processed {index + 1}/{num_texts}')
100
104
  self.is_pre_tokenized = True
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes