deeplotx 0.4.5__py3-none-any.whl → 0.4.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deeplotx/encoder/bert_encoder.py +3 -1
- deeplotx/encoder/long_text_encoder.py +12 -8
- deeplotx/encoder/longformer_encoder.py +3 -1
- {deeplotx-0.4.5.dist-info → deeplotx-0.4.7.dist-info}/METADATA +1 -1
- {deeplotx-0.4.5.dist-info → deeplotx-0.4.7.dist-info}/RECORD +8 -8
- {deeplotx-0.4.5.dist-info → deeplotx-0.4.7.dist-info}/WHEEL +0 -0
- {deeplotx-0.4.5.dist-info → deeplotx-0.4.7.dist-info}/licenses/LICENSE +0 -0
- {deeplotx-0.4.5.dist-info → deeplotx-0.4.7.dist-info}/top_level.txt +0 -0
deeplotx/encoder/bert_encoder.py
CHANGED
@@ -1,11 +1,13 @@
|
|
1
|
+
import os
|
1
2
|
import math
|
3
|
+
|
2
4
|
import torch
|
3
5
|
from torch import nn
|
4
6
|
from transformers import BertTokenizer, BertModel
|
5
7
|
|
6
8
|
from deeplotx import __ROOT__
|
7
9
|
|
8
|
-
CACHE_PATH =
|
10
|
+
CACHE_PATH = os.path.join(__ROOT__, '.cache')
|
9
11
|
DEFAULT_BERT = 'bert-base-uncased'
|
10
12
|
|
11
13
|
|
@@ -24,13 +24,21 @@ class LongTextEncoder(BertEncoder):
|
|
24
24
|
return input_tup[0], super().forward(input_tup[1], attention_mask=input_tup[2])
|
25
25
|
|
26
26
|
@override
|
27
|
-
def encode(self, text: str, use_cache: bool = True) -> torch.Tensor:
|
27
|
+
def encode(self, text: str, flatten: bool = True, use_cache: bool = True) -> torch.Tensor:
|
28
|
+
def postprocess(tensors: list[torch.Tensor], _flatten: bool) -> torch.Tensor:
|
29
|
+
if not _flatten:
|
30
|
+
return torch.stack(tensors, dim=0).squeeze()
|
31
|
+
_fin_emb_tensor = torch.tensor([], dtype=torch.float32)
|
32
|
+
for _emb in fin_embedding:
|
33
|
+
_fin_emb_tensor = torch.cat((_fin_emb_tensor.detach().clone(), _emb.detach().clone()), dim=-1)
|
34
|
+
return _fin_emb_tensor.squeeze()
|
35
|
+
|
28
36
|
_text_to_show = text.replace("\n", str())
|
29
37
|
logger.debug(f'Embedding \"{_text_to_show if len(_text_to_show) < 128 else _text_to_show[:128] + "..."}\".')
|
30
38
|
# read cache
|
31
39
|
_text_hash = md5(text)
|
32
40
|
if _text_hash in self._cache.keys():
|
33
|
-
return self._cache[_text_hash]
|
41
|
+
return postprocess(self._cache[_text_hash], flatten)
|
34
42
|
_text_to_input_ids = self.tokenizer.encode(text.strip())[:self._max_length]
|
35
43
|
_text_to_input_ids_att_mask = []
|
36
44
|
# padding
|
@@ -53,11 +61,7 @@ class LongTextEncoder(BertEncoder):
|
|
53
61
|
embeddings = list(executor.map(self.__chunk_embedding, chunks))
|
54
62
|
embeddings.sort(key=lambda x: x[0])
|
55
63
|
fin_embedding = [x[1] for x in embeddings]
|
56
|
-
fin_emb_tensor = torch.tensor([], dtype=torch.float32)
|
57
|
-
for emb in fin_embedding:
|
58
|
-
fin_emb_tensor = torch.cat((fin_emb_tensor.detach().clone(), emb.detach().clone()), dim=-1)
|
59
|
-
fin_emb_tensor = fin_emb_tensor.squeeze()
|
60
64
|
# write cache
|
61
65
|
if use_cache:
|
62
|
-
self._cache[_text_hash] =
|
63
|
-
return
|
66
|
+
self._cache[_text_hash] = fin_embedding
|
67
|
+
return postprocess(fin_embedding, flatten)
|
@@ -1,10 +1,12 @@
|
|
1
|
+
import os
|
2
|
+
|
1
3
|
import torch
|
2
4
|
from torch import nn
|
3
5
|
from transformers import LongformerTokenizer, LongformerModel
|
4
6
|
|
5
7
|
from deeplotx import __ROOT__
|
6
8
|
|
7
|
-
CACHE_PATH =
|
9
|
+
CACHE_PATH = os.path.join(__ROOT__, '.cache')
|
8
10
|
DEFAULT_LONGFORMER = 'allenai/longformer-base-4096'
|
9
11
|
|
10
12
|
|
@@ -1,8 +1,8 @@
|
|
1
1
|
deeplotx/__init__.py,sha256=wMN_AI14V-0BPbQghYpvd2y7eUGfhr7jKTTuur-5Upg,1002
|
2
2
|
deeplotx/encoder/__init__.py,sha256=EM-xrTsHoGaiiFpj-iFAxilMHXC_sQKWYrcq1qCnI3U,138
|
3
|
-
deeplotx/encoder/bert_encoder.py,sha256=
|
4
|
-
deeplotx/encoder/long_text_encoder.py,sha256=
|
5
|
-
deeplotx/encoder/longformer_encoder.py,sha256=
|
3
|
+
deeplotx/encoder/bert_encoder.py,sha256=6QY2pOvayWNz4w749JAGndvQ-jeKJgy3BalQl2JCkgk,1994
|
4
|
+
deeplotx/encoder/long_text_encoder.py,sha256=KEld6LRHuUc2nP_t_KTVfb3F3TYkIieS4jSzkR0tEVs,3275
|
5
|
+
deeplotx/encoder/longformer_encoder.py,sha256=4avKYsLN6TTpPoky8BQ0nIhQm8lVxMvvzqkrdKCWj3Q,1433
|
6
6
|
deeplotx/nn/__init__.py,sha256=oQ-vYXyuaGelfCOs2im_gZXAiiBlCCVXh1uw9yjvRMs,253
|
7
7
|
deeplotx/nn/auto_regression.py,sha256=o82C9TREZbhGdj2knSVGTXhjJne0LGEqc7BllByJJWE,449
|
8
8
|
deeplotx/nn/base_neural_network.py,sha256=xWKG4FX6Jzdlrfc1HOW1aO9uh0Af3D-dB5Jl7eCxsAk,1635
|
@@ -20,8 +20,8 @@ deeplotx/trainer/text_binary_classification_trainer.py,sha256=5O-5dwVMCj5EDX9gjJ
|
|
20
20
|
deeplotx/util/__init__.py,sha256=JxqAK_WOOHcYVSTHBT1-WuBwWrPEVDTV3titeVWvNUM,74
|
21
21
|
deeplotx/util/hash.py,sha256=wwsC6kOQvbpuvwKsNQOARd78_wePmW9i3oaUuXRUnpc,352
|
22
22
|
deeplotx/util/read_file.py,sha256=ptzouvEQeeW8KU5BrWNJlXw-vFXVrpS9SkAUxsu6A8A,612
|
23
|
-
deeplotx-0.4.
|
24
|
-
deeplotx-0.4.
|
25
|
-
deeplotx-0.4.
|
26
|
-
deeplotx-0.4.
|
27
|
-
deeplotx-0.4.
|
23
|
+
deeplotx-0.4.7.dist-info/licenses/LICENSE,sha256=IwGE9guuL-ryRPEKi6wFPI_zOhg7zDZbTYuHbSt_SAk,35823
|
24
|
+
deeplotx-0.4.7.dist-info/METADATA,sha256=oXBTSPd2wYHvbcM7A_-fTiln6CjxVkLQLF9nApv5B7g,1656
|
25
|
+
deeplotx-0.4.7.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
|
26
|
+
deeplotx-0.4.7.dist-info/top_level.txt,sha256=hKg4pVDXZ-WWxkRfJFczRIll1Sv7VyfKCmzHLXbuh1U,9
|
27
|
+
deeplotx-0.4.7.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|