deeplotx 0.4.6__tar.gz → 0.4.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {deeplotx-0.4.6 → deeplotx-0.4.7}/PKG-INFO +1 -1
  2. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/encoder/long_text_encoder.py +12 -8
  3. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx.egg-info/PKG-INFO +1 -1
  4. {deeplotx-0.4.6 → deeplotx-0.4.7}/pyproject.toml +1 -1
  5. {deeplotx-0.4.6 → deeplotx-0.4.7}/LICENSE +0 -0
  6. {deeplotx-0.4.6 → deeplotx-0.4.7}/README.md +0 -0
  7. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/__init__.py +0 -0
  8. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/encoder/__init__.py +0 -0
  9. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/encoder/bert_encoder.py +0 -0
  10. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/encoder/longformer_encoder.py +0 -0
  11. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/nn/__init__.py +0 -0
  12. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/nn/auto_regression.py +0 -0
  13. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/nn/base_neural_network.py +0 -0
  14. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/nn/linear_regression.py +0 -0
  15. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/nn/logistic_regression.py +0 -0
  16. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/nn/recursive_sequential.py +0 -0
  17. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/nn/softmax_regression.py +0 -0
  18. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/similarity/__init__.py +0 -0
  19. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/similarity/distribution.py +0 -0
  20. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/similarity/set.py +0 -0
  21. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/similarity/vector.py +0 -0
  22. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/trainer/__init__.py +0 -0
  23. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/trainer/base_trainer.py +0 -0
  24. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/trainer/text_binary_classification_trainer.py +0 -0
  25. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/util/__init__.py +0 -0
  26. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/util/hash.py +0 -0
  27. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx/util/read_file.py +0 -0
  28. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx.egg-info/SOURCES.txt +0 -0
  29. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx.egg-info/dependency_links.txt +0 -0
  30. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx.egg-info/requires.txt +0 -0
  31. {deeplotx-0.4.6 → deeplotx-0.4.7}/deeplotx.egg-info/top_level.txt +0 -0
  32. {deeplotx-0.4.6 → deeplotx-0.4.7}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: deeplotx
3
- Version: 0.4.6
3
+ Version: 0.4.7
4
4
  Summary: Easy-2-use long text NLP toolkit.
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
@@ -24,13 +24,21 @@ class LongTextEncoder(BertEncoder):
24
24
  return input_tup[0], super().forward(input_tup[1], attention_mask=input_tup[2])
25
25
 
26
26
  @override
27
- def encode(self, text: str, use_cache: bool = True) -> torch.Tensor:
27
+ def encode(self, text: str, flatten: bool = True, use_cache: bool = True) -> torch.Tensor:
28
+ def postprocess(tensors: list[torch.Tensor], _flatten: bool) -> torch.Tensor:
29
+ if not _flatten:
30
+ return torch.stack(tensors, dim=0).squeeze()
31
+ _fin_emb_tensor = torch.tensor([], dtype=torch.float32)
32
+ for _emb in fin_embedding:
33
+ _fin_emb_tensor = torch.cat((_fin_emb_tensor.detach().clone(), _emb.detach().clone()), dim=-1)
34
+ return _fin_emb_tensor.squeeze()
35
+
28
36
  _text_to_show = text.replace("\n", str())
29
37
  logger.debug(f'Embedding \"{_text_to_show if len(_text_to_show) < 128 else _text_to_show[:128] + "..."}\".')
30
38
  # read cache
31
39
  _text_hash = md5(text)
32
40
  if _text_hash in self._cache.keys():
33
- return self._cache[_text_hash]
41
+ return postprocess(self._cache[_text_hash], flatten)
34
42
  _text_to_input_ids = self.tokenizer.encode(text.strip())[:self._max_length]
35
43
  _text_to_input_ids_att_mask = []
36
44
  # padding
@@ -53,11 +61,7 @@ class LongTextEncoder(BertEncoder):
53
61
  embeddings = list(executor.map(self.__chunk_embedding, chunks))
54
62
  embeddings.sort(key=lambda x: x[0])
55
63
  fin_embedding = [x[1] for x in embeddings]
56
- fin_emb_tensor = torch.tensor([], dtype=torch.float32)
57
- for emb in fin_embedding:
58
- fin_emb_tensor = torch.cat((fin_emb_tensor.detach().clone(), emb.detach().clone()), dim=-1)
59
- fin_emb_tensor = fin_emb_tensor.squeeze()
60
64
  # write cache
61
65
  if use_cache:
62
- self._cache[_text_hash] = fin_emb_tensor
63
- return fin_emb_tensor
66
+ self._cache[_text_hash] = fin_embedding
67
+ return postprocess(fin_embedding, flatten)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: deeplotx
3
- Version: 0.4.6
3
+ Version: 0.4.7
4
4
  Summary: Easy-2-use long text NLP toolkit.
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "deeplotx"
3
- version = "0.4.6"
3
+ version = "0.4.7"
4
4
  description = "Easy-2-use long text NLP toolkit."
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
File without changes
File without changes
File without changes
File without changes
File without changes