deeplotx 0.4.6__py3-none-any.whl → 0.4.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deeplotx/encoder/long_text_encoder.py +13 -9
- {deeplotx-0.4.6.dist-info → deeplotx-0.4.8.dist-info}/METADATA +1 -1
- {deeplotx-0.4.6.dist-info → deeplotx-0.4.8.dist-info}/RECORD +6 -6
- {deeplotx-0.4.6.dist-info → deeplotx-0.4.8.dist-info}/WHEEL +0 -0
- {deeplotx-0.4.6.dist-info → deeplotx-0.4.8.dist-info}/licenses/LICENSE +0 -0
- {deeplotx-0.4.6.dist-info → deeplotx-0.4.8.dist-info}/top_level.txt +0 -0
@@ -24,13 +24,21 @@ class LongTextEncoder(BertEncoder):
|
|
24
24
|
return input_tup[0], super().forward(input_tup[1], attention_mask=input_tup[2])
|
25
25
|
|
26
26
|
@override
|
27
|
-
def encode(self, text: str, use_cache: bool = True) -> torch.Tensor:
|
27
|
+
def encode(self, text: str, flatten: bool = True, use_cache: bool = True) -> torch.Tensor:
|
28
|
+
def postprocess(tensors: list[torch.Tensor], _flatten: bool) -> torch.Tensor:
|
29
|
+
if not _flatten:
|
30
|
+
return torch.stack(tensors, dim=0).squeeze()
|
31
|
+
_fin_emb_tensor = torch.tensor([], dtype=tensors[0].dtype)
|
32
|
+
for _emb in tensors:
|
33
|
+
_fin_emb_tensor = torch.cat((_fin_emb_tensor.detach().clone(), _emb.detach().clone()), dim=-1)
|
34
|
+
return _fin_emb_tensor.squeeze()
|
35
|
+
|
28
36
|
_text_to_show = text.replace("\n", str())
|
29
37
|
logger.debug(f'Embedding \"{_text_to_show if len(_text_to_show) < 128 else _text_to_show[:128] + "..."}\".')
|
30
38
|
# read cache
|
31
39
|
_text_hash = md5(text)
|
32
40
|
if _text_hash in self._cache.keys():
|
33
|
-
return self._cache[_text_hash]
|
41
|
+
return postprocess(self._cache[_text_hash], flatten)
|
34
42
|
_text_to_input_ids = self.tokenizer.encode(text.strip())[:self._max_length]
|
35
43
|
_text_to_input_ids_att_mask = []
|
36
44
|
# padding
|
@@ -47,17 +55,13 @@ class LongTextEncoder(BertEncoder):
|
|
47
55
|
for i in range(num_chunks):
|
48
56
|
_tmp_left = max(i * self._chunk_size - self._overlapping, 0)
|
49
57
|
_tmp_right = (i + 1) * self._chunk_size + self._overlapping
|
50
|
-
chunks.append((i, torch.tensor([_text_to_input_ids[_tmp_left: _tmp_right]], dtype=torch.
|
58
|
+
chunks.append((i, torch.tensor([_text_to_input_ids[_tmp_left: _tmp_right]], dtype=torch.int),
|
51
59
|
torch.tensor([_text_to_input_ids_att_mask[_tmp_left: _tmp_right]], dtype=torch.int)))
|
52
60
|
with ThreadPoolExecutor(max_workers=min(num_chunks + 1, 3)) as executor:
|
53
61
|
embeddings = list(executor.map(self.__chunk_embedding, chunks))
|
54
62
|
embeddings.sort(key=lambda x: x[0])
|
55
63
|
fin_embedding = [x[1] for x in embeddings]
|
56
|
-
fin_emb_tensor = torch.tensor([], dtype=torch.float32)
|
57
|
-
for emb in fin_embedding:
|
58
|
-
fin_emb_tensor = torch.cat((fin_emb_tensor.detach().clone(), emb.detach().clone()), dim=-1)
|
59
|
-
fin_emb_tensor = fin_emb_tensor.squeeze()
|
60
64
|
# write cache
|
61
65
|
if use_cache:
|
62
|
-
self._cache[_text_hash] =
|
63
|
-
return
|
66
|
+
self._cache[_text_hash] = fin_embedding
|
67
|
+
return postprocess(fin_embedding, flatten)
|
@@ -1,7 +1,7 @@
|
|
1
1
|
deeplotx/__init__.py,sha256=wMN_AI14V-0BPbQghYpvd2y7eUGfhr7jKTTuur-5Upg,1002
|
2
2
|
deeplotx/encoder/__init__.py,sha256=EM-xrTsHoGaiiFpj-iFAxilMHXC_sQKWYrcq1qCnI3U,138
|
3
3
|
deeplotx/encoder/bert_encoder.py,sha256=6QY2pOvayWNz4w749JAGndvQ-jeKJgy3BalQl2JCkgk,1994
|
4
|
-
deeplotx/encoder/long_text_encoder.py,sha256=
|
4
|
+
deeplotx/encoder/long_text_encoder.py,sha256=FP0ACiOaOCjK2buRSWqBs-peg3IWQKuIdP2S00LNvSs,3271
|
5
5
|
deeplotx/encoder/longformer_encoder.py,sha256=4avKYsLN6TTpPoky8BQ0nIhQm8lVxMvvzqkrdKCWj3Q,1433
|
6
6
|
deeplotx/nn/__init__.py,sha256=oQ-vYXyuaGelfCOs2im_gZXAiiBlCCVXh1uw9yjvRMs,253
|
7
7
|
deeplotx/nn/auto_regression.py,sha256=o82C9TREZbhGdj2knSVGTXhjJne0LGEqc7BllByJJWE,449
|
@@ -20,8 +20,8 @@ deeplotx/trainer/text_binary_classification_trainer.py,sha256=5O-5dwVMCj5EDX9gjJ
|
|
20
20
|
deeplotx/util/__init__.py,sha256=JxqAK_WOOHcYVSTHBT1-WuBwWrPEVDTV3titeVWvNUM,74
|
21
21
|
deeplotx/util/hash.py,sha256=wwsC6kOQvbpuvwKsNQOARd78_wePmW9i3oaUuXRUnpc,352
|
22
22
|
deeplotx/util/read_file.py,sha256=ptzouvEQeeW8KU5BrWNJlXw-vFXVrpS9SkAUxsu6A8A,612
|
23
|
-
deeplotx-0.4.
|
24
|
-
deeplotx-0.4.
|
25
|
-
deeplotx-0.4.
|
26
|
-
deeplotx-0.4.
|
27
|
-
deeplotx-0.4.
|
23
|
+
deeplotx-0.4.8.dist-info/licenses/LICENSE,sha256=IwGE9guuL-ryRPEKi6wFPI_zOhg7zDZbTYuHbSt_SAk,35823
|
24
|
+
deeplotx-0.4.8.dist-info/METADATA,sha256=KMyt-My-d5261MGBfC1_HsyqbJ_KVEvZ--kZNFq8B2A,1656
|
25
|
+
deeplotx-0.4.8.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
|
26
|
+
deeplotx-0.4.8.dist-info/top_level.txt,sha256=hKg4pVDXZ-WWxkRfJFczRIll1Sv7VyfKCmzHLXbuh1U,9
|
27
|
+
deeplotx-0.4.8.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|