hamtaa-texttools 1.0.8__py3-none-any.whl → 1.0.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hamtaa-texttools might be problematic. Click here for more details.

@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hamtaa-texttools
3
- Version: 1.0.8
4
- Summary: TextTools is a high-level NLP toolkit built on top of modern LLMs.
3
+ Version: 1.0.9
4
+ Summary: A high-level NLP toolkit built on top of modern LLMs.
5
5
  Author-email: Tohidi <the.mohammad.tohidi@gmail.com>, Montazer <montazerh82@gmail.com>, Givechi <mohamad.m.givechi@gmail.com>, MoosaviNejad <erfanmoosavi84@gmail.com>
6
6
  License: MIT License
7
7
 
@@ -1,4 +1,4 @@
1
- hamtaa_texttools-1.0.8.dist-info/licenses/LICENSE,sha256=Hb2YOBKy2MJQLnyLrX37B4ZVuac8eaIcE71SvVIMOLg,1082
1
+ hamtaa_texttools-1.0.9.dist-info/licenses/LICENSE,sha256=Hb2YOBKy2MJQLnyLrX37B4ZVuac8eaIcE71SvVIMOLg,1082
2
2
  texttools/__init__.py,sha256=v3tQCH_Cjj47fCpuhK6sKSVAqEjNkc-cZbY4OJa4IZw,202
3
3
  texttools/batch/__init__.py,sha256=q50JsQsmQGp_8RW0KNasYeYWVV0R4FUNZ-ujXwEJemY,143
4
4
  texttools/batch/batch_manager.py,sha256=leVIFkR-3HpDkQi_MK3TgFNnHYsCN-wbS4mTWoPmO3c,8828
@@ -19,12 +19,12 @@ texttools/tools/__init__.py,sha256=hG1I28Q7BJ1Dbs95x6QMKXdsAlC5Eh_tqC-EbAibwiU,1
19
19
  texttools/tools/async_the_tool.py,sha256=9VY6ym7SvQqlokt0mwAwnytmu1CUIehDmnAvx74Z78o,12480
20
20
  texttools/tools/the_tool.py,sha256=JtQolr6i_6xogtuhX6IhyudFyAsitQy5NfSxUJAS-iA,12246
21
21
  texttools/tools/internals/async_operator.py,sha256=GPTHsjldQlmNYXkGCyypc9ENIxjeHUIHWWJ_ltUyNfs,6006
22
- texttools/tools/internals/base_operator.py,sha256=5tZy6QEGWJI9fnVIYwOecqv8teOFBIpWFOxQav11VRM,2941
22
+ texttools/tools/internals/base_operator.py,sha256=OWJe8ybA6qmmoc7ysYeB8ccHPneDlEtmFGH1jLWQCeY,3135
23
23
  texttools/tools/internals/formatters.py,sha256=tACNLP6PeoqaRpNudVxBaHA25zyWqWYPZQuYysIu88g,941
24
24
  texttools/tools/internals/operator.py,sha256=gl_vzcLueYi6cbxjIHhOCBtyhM6UXwJJ9Mstj8bA-Mg,5896
25
25
  texttools/tools/internals/output_models.py,sha256=Rf2x-UuGlmQHrvYIqnD11YuzMH_mPuir62HoMJQa2uk,1528
26
26
  texttools/tools/internals/prompt_loader.py,sha256=rbitJD3e8vAdcooP1Yx6KnSI83g28ho-FegfZ1cJ4j4,1979
27
- hamtaa_texttools-1.0.8.dist-info/METADATA,sha256=9yLqF7h34BREgmpbVxrEH2sXfglyJSNiddewvoeSy1c,7148
28
- hamtaa_texttools-1.0.8.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
29
- hamtaa_texttools-1.0.8.dist-info/top_level.txt,sha256=5Mh0jIxxZ5rOXHGJ6Mp-JPKviywwN0MYuH0xk5bEWqE,10
30
- hamtaa_texttools-1.0.8.dist-info/RECORD,,
27
+ hamtaa_texttools-1.0.9.dist-info/METADATA,sha256=mehOxCOj3f4MHTMwNEEHlSVMYVXgUuKZBysR0jcG2OU,7135
28
+ hamtaa_texttools-1.0.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
29
+ hamtaa_texttools-1.0.9.dist-info/top_level.txt,sha256=5Mh0jIxxZ5rOXHGJ6Mp-JPKviywwN0MYuH0xk5bEWqE,10
30
+ hamtaa_texttools-1.0.9.dist-info/RECORD,,
@@ -2,6 +2,7 @@ from typing import TypeVar, Type, Any
2
2
  import json
3
3
  import re
4
4
  import math
5
+ import logging
5
6
 
6
7
  from pydantic import BaseModel
7
8
  from openai import OpenAI, AsyncOpenAI
@@ -9,6 +10,10 @@ from openai import OpenAI, AsyncOpenAI
9
10
  # Base Model type for output models
10
11
  T = TypeVar("T", bound=BaseModel)
11
12
 
13
+ # Configure logger
14
+ logger = logging.getLogger("base_operator")
15
+ logger.setLevel(logging.INFO)
16
+
12
17
 
13
18
  class BaseOperator:
14
19
  def __init__(self, client: OpenAI | AsyncOpenAI, model: str):
@@ -61,7 +66,8 @@ class BaseOperator:
61
66
 
62
67
  for choice in completion.choices:
63
68
  if not getattr(choice, "logprobs", None):
64
- continue
69
+ logger.error("logprobs is not avalible in the chosen model.")
70
+ return []
65
71
 
66
72
  for logprob_item in choice.logprobs.content:
67
73
  if ignore_pattern.match(logprob_item.token):