hamtaa-texttools 1.0.4__tar.gz → 1.0.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of hamtaa-texttools might be problematic. Click here for more details.
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/LICENSE +20 -20
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/MANIFEST.in +2 -2
- {hamtaa_texttools-1.0.4/hamtaa_texttools.egg-info → hamtaa_texttools-1.0.6}/PKG-INFO +192 -141
- hamtaa_texttools-1.0.6/README.md +158 -0
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6/hamtaa_texttools.egg-info}/PKG-INFO +192 -141
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/hamtaa_texttools.egg-info/SOURCES.txt +6 -5
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/hamtaa_texttools.egg-info/dependency_links.txt +0 -0
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/hamtaa_texttools.egg-info/requires.txt +0 -0
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/hamtaa_texttools.egg-info/top_level.txt +0 -0
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/pyproject.toml +32 -32
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/setup.cfg +4 -4
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/__init__.py +9 -9
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/batch/__init__.py +4 -4
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/batch/batch_manager.py +229 -240
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/batch/batch_runner.py +263 -212
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/formatters/base_formatter.py +33 -33
- {hamtaa_texttools-1.0.4/texttools/formatters/user_merge_formatter → hamtaa_texttools-1.0.6/texttools/formatters}/user_merge_formatter.py +30 -30
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/prompts/README.md +35 -31
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/prompts/categorizer.yaml +28 -31
- hamtaa_texttools-1.0.4/texttools/prompts/question_detector.yaml → hamtaa_texttools-1.0.6/texttools/prompts/is_question.yaml +13 -14
- hamtaa_texttools-1.0.6/texttools/prompts/keyword_extractor.yaml +18 -0
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/prompts/ner_extractor.yaml +20 -21
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/prompts/question_merger.yaml +45 -48
- hamtaa_texttools-1.0.6/texttools/prompts/rewriter.yaml +111 -0
- hamtaa_texttools-1.0.6/texttools/prompts/run_custom.yaml +7 -0
- hamtaa_texttools-1.0.4/texttools/prompts/subject_question_generator.yaml → hamtaa_texttools-1.0.6/texttools/prompts/subject_to_question.yaml +22 -26
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/prompts/summarizer.yaml +13 -11
- hamtaa_texttools-1.0.4/texttools/prompts/question_generator.yaml → hamtaa_texttools-1.0.6/texttools/prompts/text_to_question.yaml +19 -22
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/prompts/translator.yaml +14 -14
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/tools/__init__.py +4 -4
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/tools/async_the_tool.py +277 -263
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/tools/internals/async_operator.py +308 -288
- {hamtaa_texttools-1.0.4 → hamtaa_texttools-1.0.6}/texttools/tools/internals/operator.py +295 -306
- hamtaa_texttools-1.0.6/texttools/tools/internals/output_models.py +52 -0
- hamtaa_texttools-1.0.6/texttools/tools/internals/prompt_loader.py +66 -0
- hamtaa_texttools-1.0.6/texttools/tools/the_tool.py +501 -0
- hamtaa_texttools-1.0.4/README.md +0 -107
- hamtaa_texttools-1.0.4/texttools/prompts/keyword_extractor.yaml +0 -14
- hamtaa_texttools-1.0.4/texttools/prompts/question_rewriter.yaml +0 -46
- hamtaa_texttools-1.0.4/texttools/tools/internals/output_models.py +0 -62
- hamtaa_texttools-1.0.4/texttools/tools/internals/prompt_loader.py +0 -82
- hamtaa_texttools-1.0.4/texttools/tools/the_tool.py +0 -400
|
@@ -1,21 +1,21 @@
|
|
|
1
|
-
MIT License
|
|
2
|
-
|
|
3
|
-
Copyright (c) 2025 Hamtaa
|
|
4
|
-
|
|
5
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
-
in the Software without restriction, including without limitation the rights
|
|
8
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
-
furnished to do so, subject to the following conditions:
|
|
11
|
-
|
|
12
|
-
The above copyright notice and this permission notice shall be included in all
|
|
13
|
-
copies or substantial portions of the Software.
|
|
14
|
-
|
|
15
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 Hamtaa
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
21
|
SOFTWARE.
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
graft texttools/prompts
|
|
2
|
-
global-exclude *.pyc
|
|
1
|
+
graft texttools/prompts
|
|
2
|
+
global-exclude *.pyc
|
|
@@ -1,141 +1,192 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: hamtaa-texttools
|
|
3
|
-
Version: 1.0.
|
|
4
|
-
Summary: TextTools is a high-level NLP toolkit built on top of modern LLMs.
|
|
5
|
-
Author-email: Tohidi <the.mohammad.tohidi@gmail.com>, Montazer <montazerh82@gmail.com>, Givechi <mohamad.m.givechi@gmail.com>, MoosaviNejad <erfanmoosavi84@gmail.com>
|
|
6
|
-
License: MIT License
|
|
7
|
-
|
|
8
|
-
Copyright (c) 2025 Hamtaa
|
|
9
|
-
|
|
10
|
-
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
11
|
-
of this software and associated documentation files (the "Software"), to deal
|
|
12
|
-
in the Software without restriction, including without limitation the rights
|
|
13
|
-
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
14
|
-
copies of the Software, and to permit persons to whom the Software is
|
|
15
|
-
furnished to do so, subject to the following conditions:
|
|
16
|
-
|
|
17
|
-
The above copyright notice and this permission notice shall be included in all
|
|
18
|
-
copies or substantial portions of the Software.
|
|
19
|
-
|
|
20
|
-
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
21
|
-
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
22
|
-
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
23
|
-
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
24
|
-
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
25
|
-
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
26
|
-
SOFTWARE.
|
|
27
|
-
Keywords: nlp,llm,text-processing,openai
|
|
28
|
-
Requires-Python: >=3.8
|
|
29
|
-
Description-Content-Type: text/markdown
|
|
30
|
-
License-File: LICENSE
|
|
31
|
-
Requires-Dist: openai==1.97.1
|
|
32
|
-
Requires-Dist: PyYAML>=6.0
|
|
33
|
-
Dynamic: license-file
|
|
34
|
-
|
|
35
|
-
# TextTools
|
|
36
|
-
|
|
37
|
-
## 📌 Overview
|
|
38
|
-
|
|
39
|
-
**TextTools** is a high-level **NLP toolkit** built on top of modern **LLMs**.
|
|
40
|
-
|
|
41
|
-
It provides
|
|
42
|
-
|
|
43
|
-
**
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
-
|
|
55
|
-
-
|
|
56
|
-
-
|
|
57
|
-
-
|
|
58
|
-
-
|
|
59
|
-
-
|
|
60
|
-
-
|
|
61
|
-
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
- **`
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
#
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: hamtaa-texttools
|
|
3
|
+
Version: 1.0.6
|
|
4
|
+
Summary: TextTools is a high-level NLP toolkit built on top of modern LLMs.
|
|
5
|
+
Author-email: Tohidi <the.mohammad.tohidi@gmail.com>, Montazer <montazerh82@gmail.com>, Givechi <mohamad.m.givechi@gmail.com>, MoosaviNejad <erfanmoosavi84@gmail.com>
|
|
6
|
+
License: MIT License
|
|
7
|
+
|
|
8
|
+
Copyright (c) 2025 Hamtaa
|
|
9
|
+
|
|
10
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
11
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
12
|
+
in the Software without restriction, including without limitation the rights
|
|
13
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
14
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
15
|
+
furnished to do so, subject to the following conditions:
|
|
16
|
+
|
|
17
|
+
The above copyright notice and this permission notice shall be included in all
|
|
18
|
+
copies or substantial portions of the Software.
|
|
19
|
+
|
|
20
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
21
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
22
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
23
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
24
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
25
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
26
|
+
SOFTWARE.
|
|
27
|
+
Keywords: nlp,llm,text-processing,openai
|
|
28
|
+
Requires-Python: >=3.8
|
|
29
|
+
Description-Content-Type: text/markdown
|
|
30
|
+
License-File: LICENSE
|
|
31
|
+
Requires-Dist: openai==1.97.1
|
|
32
|
+
Requires-Dist: PyYAML>=6.0
|
|
33
|
+
Dynamic: license-file
|
|
34
|
+
|
|
35
|
+
# TextTools
|
|
36
|
+
|
|
37
|
+
## 📌 Overview
|
|
38
|
+
|
|
39
|
+
**TextTools** is a high-level **NLP toolkit** built on top of modern **LLMs**.
|
|
40
|
+
|
|
41
|
+
It provides both **sync (`TheTool`)** and **async (`AsyncTheTool`)** APIs for maximum flexibility.
|
|
42
|
+
|
|
43
|
+
It provides ready-to-use utilities for **translation, question detection, keyword extraction, categorization, NER extractor, and more** — designed to help you integrate AI-powered text processing into your applications with minimal effort.
|
|
44
|
+
|
|
45
|
+
**Thread Safety:** All methods in AsyncTheTool are thread-safe, allowing concurrent usage across multiple threads without conflicts.
|
|
46
|
+
|
|
47
|
+
---
|
|
48
|
+
|
|
49
|
+
## ✨ Features
|
|
50
|
+
|
|
51
|
+
TextTools provides a rich collection of high-level NLP utilities built on top of LLMs.
|
|
52
|
+
Each tool is designed to work out-of-the-box with structured outputs (JSON / Pydantic).
|
|
53
|
+
|
|
54
|
+
- **`categorize()`** - Classifies text into Islamic studies categories
|
|
55
|
+
- **`is_question()`** - Binary detection of whether input is a question
|
|
56
|
+
- **`extract_keywords()`** - Extracts keywords from text
|
|
57
|
+
- **`extract_entities()`** - Named Entity Recognition (NER) system
|
|
58
|
+
- **`summarize()`** - Text summarization
|
|
59
|
+
- **`text_to_question()`** - Generates questions from text
|
|
60
|
+
- **`merge_questions()`** - Merges multiple questions with different modes
|
|
61
|
+
- **`rewrite()`** - Rewrites text with different wording/meaning
|
|
62
|
+
- **`subject_to_question()`** - Generates questions about a specific subject
|
|
63
|
+
- **`translate()`** - Text translation between languages
|
|
64
|
+
- **`run_custom()`** - Allows users to define a custom tool with arbitrary BaseModel
|
|
65
|
+
|
|
66
|
+
---
|
|
67
|
+
|
|
68
|
+
## ⚙️ `with_analysis`, `logprobs`, `output_lang`, and `user_prompt` parameters
|
|
69
|
+
|
|
70
|
+
TextTools provides several optional flags to customize LLM behavior:
|
|
71
|
+
|
|
72
|
+
- **`with_analysis=True`** → Adds a reasoning step before generating the final output. Useful for debugging, improving prompts, or understanding model behavior.
|
|
73
|
+
Note: This doubles token usage per call because it triggers an additional LLM request.
|
|
74
|
+
|
|
75
|
+
- **`logprobs=True`** → Returns token-level probabilities for the generated output. You can also specify `top_logprobs=<N>` to get the top N alternative tokens and their probabilities.
|
|
76
|
+
|
|
77
|
+
- **`output_lang="en"`** → Forces the model to respond in a specific language. The model will ignore other instructions about language and respond strictly in the requested language.
|
|
78
|
+
|
|
79
|
+
- **`user_prompt="..."`** → Allows you to inject a custom instruction or prompt into the model alongside the main template. This gives you fine-grained control over how the model interprets or modifies the input text.
|
|
80
|
+
|
|
81
|
+
All these flags can be used individually or together to tailor the behavior of any tool in **TextTools**.
|
|
82
|
+
|
|
83
|
+
---
|
|
84
|
+
|
|
85
|
+
## 🚀 Installation
|
|
86
|
+
|
|
87
|
+
Install the latest release via PyPI:
|
|
88
|
+
|
|
89
|
+
```bash
|
|
90
|
+
pip install -U hamtaa-texttools
|
|
91
|
+
```
|
|
92
|
+
|
|
93
|
+
---
|
|
94
|
+
|
|
95
|
+
## Sync vs Async
|
|
96
|
+
| Tool | Style | Use case |
|
|
97
|
+
|--------------|---------|---------------------------------------------|
|
|
98
|
+
| `TheTool` | Sync | Simple scripts, sequential workflows |
|
|
99
|
+
| `AsyncTheTool` | Async | High-throughput apps, APIs, concurrent tasks |
|
|
100
|
+
|
|
101
|
+
---
|
|
102
|
+
|
|
103
|
+
## ⚡ Quick Start (Sync)
|
|
104
|
+
|
|
105
|
+
```python
|
|
106
|
+
from openai import OpenAI
|
|
107
|
+
from pydantic import BaseModel
|
|
108
|
+
from texttools import TheTool
|
|
109
|
+
|
|
110
|
+
# Create your OpenAI client
|
|
111
|
+
client = OpenAI(base_url = "your_url", API_KEY = "your_api_key")
|
|
112
|
+
|
|
113
|
+
# Specify the model
|
|
114
|
+
model = "gpt-4o-mini"
|
|
115
|
+
|
|
116
|
+
# Create an instance of TheTool
|
|
117
|
+
# Note: You can give parameters to TheTool so that you don't need to give them to each tool
|
|
118
|
+
the_tool = TheTool(client=client, model=model, with_analysis=True, output_lang="English")
|
|
119
|
+
|
|
120
|
+
# Example: Question Detection
|
|
121
|
+
detection = the_tool.is_question("Is this project open source?", logprobs=True, top_logprobs=2)
|
|
122
|
+
print(detection["result"])
|
|
123
|
+
print(detection["logprobs"])
|
|
124
|
+
# Output: True
|
|
125
|
+
|
|
126
|
+
# Example: Translation
|
|
127
|
+
# Note: You can overwrite with_analysis if defined at TheTool
|
|
128
|
+
print(the_tool.translate("سلام، حالت چطوره؟", target_language="English", with_analysis=False)["result"])
|
|
129
|
+
# Output: "Hi! How are you?"
|
|
130
|
+
|
|
131
|
+
# Example: Custom Tool
|
|
132
|
+
# Note: Output model should only contain result key
|
|
133
|
+
# Everything else will be ignored
|
|
134
|
+
class Custom(BaseModel):
|
|
135
|
+
result: list[list[dict[str, int]]]
|
|
136
|
+
|
|
137
|
+
custom_prompt = "Something"
|
|
138
|
+
custom_result = the_tool.run_custom(custom_prompt, Custom)
|
|
139
|
+
print(custom_result)
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
---
|
|
143
|
+
|
|
144
|
+
## ⚡ Quick Start (Async)
|
|
145
|
+
|
|
146
|
+
```python
|
|
147
|
+
import asyncio
|
|
148
|
+
from openai import AsyncOpenAI
|
|
149
|
+
from texttools import AsyncTheTool
|
|
150
|
+
|
|
151
|
+
async def main():
|
|
152
|
+
# Create your async OpenAI client
|
|
153
|
+
async_client = AsyncOpenAI(base_url="your_url", api_key="your_api_key")
|
|
154
|
+
|
|
155
|
+
# Specify the model
|
|
156
|
+
model = "gpt-4o-mini"
|
|
157
|
+
|
|
158
|
+
# Create an instance of AsyncTheTool
|
|
159
|
+
the_tool = AsyncTheTool(client=async_client, model=model)
|
|
160
|
+
|
|
161
|
+
# Example: Async Translation
|
|
162
|
+
result = await the_tool.translate("سلام، حالت چطوره؟", target_language="English")
|
|
163
|
+
print(result["result"])
|
|
164
|
+
# Output: "Hi! How are you?"
|
|
165
|
+
|
|
166
|
+
asyncio.run(main())
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
---
|
|
170
|
+
|
|
171
|
+
## 📚 Use Cases
|
|
172
|
+
|
|
173
|
+
Use **TextTools** when you need to:
|
|
174
|
+
|
|
175
|
+
- 🔍 **Classify** large datasets quickly without model training
|
|
176
|
+
- 🌍 **Translate** and process multilingual corpora with ease
|
|
177
|
+
- 🧩 **Integrate** LLMs into production pipelines (structured outputs)
|
|
178
|
+
- 📊 **Analyze** large text collections using embeddings and categorization
|
|
179
|
+
- 👍 **Automate** common text-processing tasks without reinventing the wheel
|
|
180
|
+
|
|
181
|
+
---
|
|
182
|
+
|
|
183
|
+
## 🤝 Contributing
|
|
184
|
+
|
|
185
|
+
Contributions are welcome!
|
|
186
|
+
Feel free to **open issues, suggest new features, or submit pull requests**.
|
|
187
|
+
|
|
188
|
+
---
|
|
189
|
+
|
|
190
|
+
## License
|
|
191
|
+
|
|
192
|
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
# TextTools
|
|
2
|
+
|
|
3
|
+
## 📌 Overview
|
|
4
|
+
|
|
5
|
+
**TextTools** is a high-level **NLP toolkit** built on top of modern **LLMs**.
|
|
6
|
+
|
|
7
|
+
It provides both **sync (`TheTool`)** and **async (`AsyncTheTool`)** APIs for maximum flexibility.
|
|
8
|
+
|
|
9
|
+
It provides ready-to-use utilities for **translation, question detection, keyword extraction, categorization, NER extractor, and more** — designed to help you integrate AI-powered text processing into your applications with minimal effort.
|
|
10
|
+
|
|
11
|
+
**Thread Safety:** All methods in AsyncTheTool are thread-safe, allowing concurrent usage across multiple threads without conflicts.
|
|
12
|
+
|
|
13
|
+
---
|
|
14
|
+
|
|
15
|
+
## ✨ Features
|
|
16
|
+
|
|
17
|
+
TextTools provides a rich collection of high-level NLP utilities built on top of LLMs.
|
|
18
|
+
Each tool is designed to work out-of-the-box with structured outputs (JSON / Pydantic).
|
|
19
|
+
|
|
20
|
+
- **`categorize()`** - Classifies text into Islamic studies categories
|
|
21
|
+
- **`is_question()`** - Binary detection of whether input is a question
|
|
22
|
+
- **`extract_keywords()`** - Extracts keywords from text
|
|
23
|
+
- **`extract_entities()`** - Named Entity Recognition (NER) system
|
|
24
|
+
- **`summarize()`** - Text summarization
|
|
25
|
+
- **`text_to_question()`** - Generates questions from text
|
|
26
|
+
- **`merge_questions()`** - Merges multiple questions with different modes
|
|
27
|
+
- **`rewrite()`** - Rewrites text with different wording/meaning
|
|
28
|
+
- **`subject_to_question()`** - Generates questions about a specific subject
|
|
29
|
+
- **`translate()`** - Text translation between languages
|
|
30
|
+
- **`run_custom()`** - Allows users to define a custom tool with arbitrary BaseModel
|
|
31
|
+
|
|
32
|
+
---
|
|
33
|
+
|
|
34
|
+
## ⚙️ `with_analysis`, `logprobs`, `output_lang`, and `user_prompt` parameters
|
|
35
|
+
|
|
36
|
+
TextTools provides several optional flags to customize LLM behavior:
|
|
37
|
+
|
|
38
|
+
- **`with_analysis=True`** → Adds a reasoning step before generating the final output. Useful for debugging, improving prompts, or understanding model behavior.
|
|
39
|
+
Note: This doubles token usage per call because it triggers an additional LLM request.
|
|
40
|
+
|
|
41
|
+
- **`logprobs=True`** → Returns token-level probabilities for the generated output. You can also specify `top_logprobs=<N>` to get the top N alternative tokens and their probabilities.
|
|
42
|
+
|
|
43
|
+
- **`output_lang="en"`** → Forces the model to respond in a specific language. The model will ignore other instructions about language and respond strictly in the requested language.
|
|
44
|
+
|
|
45
|
+
- **`user_prompt="..."`** → Allows you to inject a custom instruction or prompt into the model alongside the main template. This gives you fine-grained control over how the model interprets or modifies the input text.
|
|
46
|
+
|
|
47
|
+
All these flags can be used individually or together to tailor the behavior of any tool in **TextTools**.
|
|
48
|
+
|
|
49
|
+
---
|
|
50
|
+
|
|
51
|
+
## 🚀 Installation
|
|
52
|
+
|
|
53
|
+
Install the latest release via PyPI:
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
pip install -U hamtaa-texttools
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
---
|
|
60
|
+
|
|
61
|
+
## Sync vs Async
|
|
62
|
+
| Tool | Style | Use case |
|
|
63
|
+
|--------------|---------|---------------------------------------------|
|
|
64
|
+
| `TheTool` | Sync | Simple scripts, sequential workflows |
|
|
65
|
+
| `AsyncTheTool` | Async | High-throughput apps, APIs, concurrent tasks |
|
|
66
|
+
|
|
67
|
+
---
|
|
68
|
+
|
|
69
|
+
## ⚡ Quick Start (Sync)
|
|
70
|
+
|
|
71
|
+
```python
|
|
72
|
+
from openai import OpenAI
|
|
73
|
+
from pydantic import BaseModel
|
|
74
|
+
from texttools import TheTool
|
|
75
|
+
|
|
76
|
+
# Create your OpenAI client
|
|
77
|
+
client = OpenAI(base_url = "your_url", API_KEY = "your_api_key")
|
|
78
|
+
|
|
79
|
+
# Specify the model
|
|
80
|
+
model = "gpt-4o-mini"
|
|
81
|
+
|
|
82
|
+
# Create an instance of TheTool
|
|
83
|
+
# Note: You can give parameters to TheTool so that you don't need to give them to each tool
|
|
84
|
+
the_tool = TheTool(client=client, model=model, with_analysis=True, output_lang="English")
|
|
85
|
+
|
|
86
|
+
# Example: Question Detection
|
|
87
|
+
detection = the_tool.is_question("Is this project open source?", logprobs=True, top_logprobs=2)
|
|
88
|
+
print(detection["result"])
|
|
89
|
+
print(detection["logprobs"])
|
|
90
|
+
# Output: True
|
|
91
|
+
|
|
92
|
+
# Example: Translation
|
|
93
|
+
# Note: You can overwrite with_analysis if defined at TheTool
|
|
94
|
+
print(the_tool.translate("سلام، حالت چطوره؟", target_language="English", with_analysis=False)["result"])
|
|
95
|
+
# Output: "Hi! How are you?"
|
|
96
|
+
|
|
97
|
+
# Example: Custom Tool
|
|
98
|
+
# Note: Output model should only contain result key
|
|
99
|
+
# Everything else will be ignored
|
|
100
|
+
class Custom(BaseModel):
|
|
101
|
+
result: list[list[dict[str, int]]]
|
|
102
|
+
|
|
103
|
+
custom_prompt = "Something"
|
|
104
|
+
custom_result = the_tool.run_custom(custom_prompt, Custom)
|
|
105
|
+
print(custom_result)
|
|
106
|
+
```
|
|
107
|
+
|
|
108
|
+
---
|
|
109
|
+
|
|
110
|
+
## ⚡ Quick Start (Async)
|
|
111
|
+
|
|
112
|
+
```python
|
|
113
|
+
import asyncio
|
|
114
|
+
from openai import AsyncOpenAI
|
|
115
|
+
from texttools import AsyncTheTool
|
|
116
|
+
|
|
117
|
+
async def main():
|
|
118
|
+
# Create your async OpenAI client
|
|
119
|
+
async_client = AsyncOpenAI(base_url="your_url", api_key="your_api_key")
|
|
120
|
+
|
|
121
|
+
# Specify the model
|
|
122
|
+
model = "gpt-4o-mini"
|
|
123
|
+
|
|
124
|
+
# Create an instance of AsyncTheTool
|
|
125
|
+
the_tool = AsyncTheTool(client=async_client, model=model)
|
|
126
|
+
|
|
127
|
+
# Example: Async Translation
|
|
128
|
+
result = await the_tool.translate("سلام، حالت چطوره؟", target_language="English")
|
|
129
|
+
print(result["result"])
|
|
130
|
+
# Output: "Hi! How are you?"
|
|
131
|
+
|
|
132
|
+
asyncio.run(main())
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
---
|
|
136
|
+
|
|
137
|
+
## 📚 Use Cases
|
|
138
|
+
|
|
139
|
+
Use **TextTools** when you need to:
|
|
140
|
+
|
|
141
|
+
- 🔍 **Classify** large datasets quickly without model training
|
|
142
|
+
- 🌍 **Translate** and process multilingual corpora with ease
|
|
143
|
+
- 🧩 **Integrate** LLMs into production pipelines (structured outputs)
|
|
144
|
+
- 📊 **Analyze** large text collections using embeddings and categorization
|
|
145
|
+
- 👍 **Automate** common text-processing tasks without reinventing the wheel
|
|
146
|
+
|
|
147
|
+
---
|
|
148
|
+
|
|
149
|
+
## 🤝 Contributing
|
|
150
|
+
|
|
151
|
+
Contributions are welcome!
|
|
152
|
+
Feel free to **open issues, suggest new features, or submit pull requests**.
|
|
153
|
+
|
|
154
|
+
---
|
|
155
|
+
|
|
156
|
+
## License
|
|
157
|
+
|
|
158
|
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|