hamtaa-texttools 0.1.55__py3-none-any.whl → 0.1.57__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of hamtaa-texttools might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hamtaa-texttools
3
- Version: 0.1.55
3
+ Version: 0.1.57
4
4
  Summary: A set of high-level NLP tools
5
5
  Author: Tohidi, Montazer, Givechi, Mousavinezhad
6
6
  Requires-Python: >=3.8
@@ -50,12 +50,12 @@ texttools/tools/summarizer/__init__.py,sha256=phrR7qO20CNhO3hjXQBzhTRVumdVdGSufm
50
50
  texttools/tools/summarizer/gemma_summarizer.py,sha256=ikhsBv7AiZD1dT_d12AyjXxojzSW92e2y5WjchI_3bE,4474
51
51
  texttools/tools/summarizer/llm_summerizer.py,sha256=-0rUKbSnl1aDeBfJ5DCSbIlwd2k-9qIaCKgoQJa0hWc,3412
52
52
  texttools/tools/translator/__init__.py,sha256=KO1m08J2BZwRqBGO9ICB4l4cnH1jfHLHL5HbgYFUWM8,72
53
- texttools/tools/translator/gemma_translator.py,sha256=KsKbD_hzbOmFt-K0pciZ1IXz66JGm480FdBqWL2mYL0,7272
53
+ texttools/tools/translator/gemma_translator.py,sha256=4tlaBoFCYWuMTfSnMfGppALZHCmsveTaAdlNxFGExUY,7451
54
54
  texttools/utils/flex_processor.py,sha256=C-lMwMjpIM6uAPFxXdgajxcFV1ccngEfJqq6xe5S1J8,3123
55
55
  texttools/utils/batch_manager/__init__.py,sha256=3ZkxA395lRD4gNxJ1vp0fNuz_XuBr50GoP51rrwQ0Ks,87
56
56
  texttools/utils/batch_manager/batch_manager.py,sha256=jAmKskL3OTYwwsO1mWsWAB3VxMlOF07c2GW1Ev83ZhY,9283
57
57
  texttools/utils/batch_manager/batch_runner.py,sha256=DE6TFz3i_jR-ZiUYbgIdLgjqr3aitw-JM_tKnSvzGL0,7424
58
- hamtaa_texttools-0.1.55.dist-info/METADATA,sha256=-WVDAY_TTcDZwiM8YkCsrA_qy8dlO669LM2oEPtYiA4,1481
59
- hamtaa_texttools-0.1.55.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
60
- hamtaa_texttools-0.1.55.dist-info/top_level.txt,sha256=5Mh0jIxxZ5rOXHGJ6Mp-JPKviywwN0MYuH0xk5bEWqE,10
61
- hamtaa_texttools-0.1.55.dist-info/RECORD,,
58
+ hamtaa_texttools-0.1.57.dist-info/METADATA,sha256=-lkXJeEX1jV8sNmrX5tggb0uDHE-V_9vvuAS9bdGwWU,1481
59
+ hamtaa_texttools-0.1.57.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
60
+ hamtaa_texttools-0.1.57.dist-info/top_level.txt,sha256=5Mh0jIxxZ5rOXHGJ6Mp-JPKviywwN0MYuH0xk5bEWqE,10
61
+ hamtaa_texttools-0.1.57.dist-info/RECORD,,
@@ -78,7 +78,7 @@ class GemmaTranslator(BaseTranslator):
78
78
  }
79
79
 
80
80
  # The entire set of instructions is formatted into a single JSON string
81
- content = json.dumps(prompt_data, indent=2)
81
+ content = json.dumps(prompt_data, ensure_ascii=False)
82
82
  messages = [{"role": "user", "content": content}]
83
83
 
84
84
  # Optional additional JSON template for more complex rules
@@ -107,7 +107,12 @@ class GemmaTranslator(BaseTranslator):
107
107
  },
108
108
  }
109
109
 
110
- messages = [{"role": "user", "content": json.dumps(prompt_data, indent=2)}]
110
+ messages = [
111
+ {
112
+ "role": "user",
113
+ "content": json.dumps(prompt_data, ensure_ascii=False),
114
+ }
115
+ ]
111
116
 
112
117
  restructured = self.chat_formatter.format(messages=messages)
113
118
  completion = self.client.chat.completions.create(
@@ -131,14 +136,19 @@ class GemmaTranslator(BaseTranslator):
131
136
  },
132
137
  }
133
138
 
134
- messages = [{"role": "user", "content": json.dumps(prompt_data, indent=2)}]
139
+ messages = [
140
+ {
141
+ "role": "user",
142
+ "content": json.dumps(prompt_data, ensure_ascii=False),
143
+ }
144
+ ]
135
145
 
136
146
  restructured = self.chat_formatter.format(messages=messages)
137
147
 
138
148
  completion = self.client.chat.completions.parse(
139
149
  model=self.model,
140
150
  messages=restructured,
141
- response_model=PreprocessorOutput,
151
+ response_format=PreprocessorOutput,
142
152
  temperature=self.temperature,
143
153
  extra_body={
144
154
  "guided_decoding_backend": "auto",
@@ -177,7 +187,7 @@ class GemmaTranslator(BaseTranslator):
177
187
  print(f"Reasoning Analysis:\n{reason_summary}")
178
188
  print("--- Final JSON Prompt Sent to Model ---")
179
189
  # Pretty-print the JSON content from the message
180
- print(json.dumps(json.loads(messages[0]["content"]), indent=2))
190
+ print(json.dumps(json.loads(messages[0]["content"]), ensure_ascii=False))
181
191
  print("---------------------------")
182
192
 
183
193
  completion = self.client.chat.completions.create(