deeplotx 0.4.5__py3-none-any.whl → 0.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,13 @@
1
+ import os
1
2
  import math
3
+
2
4
  import torch
3
5
  from torch import nn
4
6
  from transformers import BertTokenizer, BertModel
5
7
 
6
8
  from deeplotx import __ROOT__
7
9
 
8
- CACHE_PATH = f'{__ROOT__}\\.cache'
10
+ CACHE_PATH = os.path.join(__ROOT__, '.cache')
9
11
  DEFAULT_BERT = 'bert-base-uncased'
10
12
 
11
13
 
@@ -24,13 +24,21 @@ class LongTextEncoder(BertEncoder):
24
24
  return input_tup[0], super().forward(input_tup[1], attention_mask=input_tup[2])
25
25
 
26
26
  @override
27
- def encode(self, text: str, use_cache: bool = True) -> torch.Tensor:
27
+ def encode(self, text: str, flatten: bool = True, use_cache: bool = True) -> torch.Tensor:
28
+ def postprocess(tensors: list[torch.Tensor], _flatten: bool) -> torch.Tensor:
29
+ if not _flatten:
30
+ return torch.stack(tensors, dim=0).squeeze()
31
+ _fin_emb_tensor = torch.tensor([], dtype=torch.float32)
32
+ for _emb in fin_embedding:
33
+ _fin_emb_tensor = torch.cat((_fin_emb_tensor.detach().clone(), _emb.detach().clone()), dim=-1)
34
+ return _fin_emb_tensor.squeeze()
35
+
28
36
  _text_to_show = text.replace("\n", str())
29
37
  logger.debug(f'Embedding \"{_text_to_show if len(_text_to_show) < 128 else _text_to_show[:128] + "..."}\".')
30
38
  # read cache
31
39
  _text_hash = md5(text)
32
40
  if _text_hash in self._cache.keys():
33
- return self._cache[_text_hash]
41
+ return postprocess(self._cache[_text_hash], flatten)
34
42
  _text_to_input_ids = self.tokenizer.encode(text.strip())[:self._max_length]
35
43
  _text_to_input_ids_att_mask = []
36
44
  # padding
@@ -53,11 +61,7 @@ class LongTextEncoder(BertEncoder):
53
61
  embeddings = list(executor.map(self.__chunk_embedding, chunks))
54
62
  embeddings.sort(key=lambda x: x[0])
55
63
  fin_embedding = [x[1] for x in embeddings]
56
- fin_emb_tensor = torch.tensor([], dtype=torch.float32)
57
- for emb in fin_embedding:
58
- fin_emb_tensor = torch.cat((fin_emb_tensor.detach().clone(), emb.detach().clone()), dim=-1)
59
- fin_emb_tensor = fin_emb_tensor.squeeze()
60
64
  # write cache
61
65
  if use_cache:
62
- self._cache[_text_hash] = fin_emb_tensor
63
- return fin_emb_tensor
66
+ self._cache[_text_hash] = fin_embedding
67
+ return postprocess(fin_embedding, flatten)
@@ -1,10 +1,12 @@
1
+ import os
2
+
1
3
  import torch
2
4
  from torch import nn
3
5
  from transformers import LongformerTokenizer, LongformerModel
4
6
 
5
7
  from deeplotx import __ROOT__
6
8
 
7
- CACHE_PATH = f'{__ROOT__}\\.cache'
9
+ CACHE_PATH = os.path.join(__ROOT__, '.cache')
8
10
  DEFAULT_LONGFORMER = 'allenai/longformer-base-4096'
9
11
 
10
12
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: deeplotx
3
- Version: 0.4.5
3
+ Version: 0.4.7
4
4
  Summary: Easy-2-use long text NLP toolkit.
5
5
  Requires-Python: >=3.10
6
6
  Description-Content-Type: text/markdown
@@ -1,8 +1,8 @@
1
1
  deeplotx/__init__.py,sha256=wMN_AI14V-0BPbQghYpvd2y7eUGfhr7jKTTuur-5Upg,1002
2
2
  deeplotx/encoder/__init__.py,sha256=EM-xrTsHoGaiiFpj-iFAxilMHXC_sQKWYrcq1qCnI3U,138
3
- deeplotx/encoder/bert_encoder.py,sha256=A-B7Gj94xv6UhvsFTBH7tnkAdGHRhfUZA2QjSnTKB6c,1970
4
- deeplotx/encoder/long_text_encoder.py,sha256=V6VxaHW6bMMaZHgU1UZ8n19UfSIV2f2sarWXquiFffQ,3018
5
- deeplotx/encoder/longformer_encoder.py,sha256=mZpC5TrGHQo98-ydGtVQQ9KRHgCGl1sRoxcQs7r4SSo,1409
3
+ deeplotx/encoder/bert_encoder.py,sha256=6QY2pOvayWNz4w749JAGndvQ-jeKJgy3BalQl2JCkgk,1994
4
+ deeplotx/encoder/long_text_encoder.py,sha256=KEld6LRHuUc2nP_t_KTVfb3F3TYkIieS4jSzkR0tEVs,3275
5
+ deeplotx/encoder/longformer_encoder.py,sha256=4avKYsLN6TTpPoky8BQ0nIhQm8lVxMvvzqkrdKCWj3Q,1433
6
6
  deeplotx/nn/__init__.py,sha256=oQ-vYXyuaGelfCOs2im_gZXAiiBlCCVXh1uw9yjvRMs,253
7
7
  deeplotx/nn/auto_regression.py,sha256=o82C9TREZbhGdj2knSVGTXhjJne0LGEqc7BllByJJWE,449
8
8
  deeplotx/nn/base_neural_network.py,sha256=xWKG4FX6Jzdlrfc1HOW1aO9uh0Af3D-dB5Jl7eCxsAk,1635
@@ -20,8 +20,8 @@ deeplotx/trainer/text_binary_classification_trainer.py,sha256=5O-5dwVMCj5EDX9gjJ
20
20
  deeplotx/util/__init__.py,sha256=JxqAK_WOOHcYVSTHBT1-WuBwWrPEVDTV3titeVWvNUM,74
21
21
  deeplotx/util/hash.py,sha256=wwsC6kOQvbpuvwKsNQOARd78_wePmW9i3oaUuXRUnpc,352
22
22
  deeplotx/util/read_file.py,sha256=ptzouvEQeeW8KU5BrWNJlXw-vFXVrpS9SkAUxsu6A8A,612
23
- deeplotx-0.4.5.dist-info/licenses/LICENSE,sha256=IwGE9guuL-ryRPEKi6wFPI_zOhg7zDZbTYuHbSt_SAk,35823
24
- deeplotx-0.4.5.dist-info/METADATA,sha256=DpSM3W-OH0ayM1AOt8OYfbpRHqZLkUpPf5qc0PEROzs,1656
25
- deeplotx-0.4.5.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
26
- deeplotx-0.4.5.dist-info/top_level.txt,sha256=hKg4pVDXZ-WWxkRfJFczRIll1Sv7VyfKCmzHLXbuh1U,9
27
- deeplotx-0.4.5.dist-info/RECORD,,
23
+ deeplotx-0.4.7.dist-info/licenses/LICENSE,sha256=IwGE9guuL-ryRPEKi6wFPI_zOhg7zDZbTYuHbSt_SAk,35823
24
+ deeplotx-0.4.7.dist-info/METADATA,sha256=oXBTSPd2wYHvbcM7A_-fTiln6CjxVkLQLF9nApv5B7g,1656
25
+ deeplotx-0.4.7.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
26
+ deeplotx-0.4.7.dist-info/top_level.txt,sha256=hKg4pVDXZ-WWxkRfJFczRIll1Sv7VyfKCmzHLXbuh1U,9
27
+ deeplotx-0.4.7.dist-info/RECORD,,