neogram 9.3.2__py3-none-any.whl → 9.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
neogram/ii.py CHANGED
@@ -4,12 +4,16 @@ from curl_cffi.requests import Session
4
4
 
5
5
  #Блок - Нейросети
6
6
  class OnlySQ:
7
- def get_models(self, modality: str | list = None, can_tools: bool = None, can_stream: bool = None, status: str = None, max_cost: float = None, return_names: bool = False) -> list:
7
+ def __init__(self, key: str):
8
+ self.key = key
9
+
10
+ def get_models(self, modality: str | list = None, can_tools: bool = None, can_think: bool = None, can_stream: bool = None, status: str = None, max_cost: float = None, return_names: bool = False) -> list:
8
11
  """
9
12
  Фильтрует модели по заданным параметрам
10
13
  Args:
11
14
  modality: Модальность ('text', 'image', 'sound') или список модальностей
12
15
  can_tools: Фильтр по поддержке инструментов
16
+ can_think: Фильтр по возможности "думать" (reasoning)
13
17
  can_stream: Фильтр по возможности потоковой передачи
14
18
  status: Статус модели (например, 'work')
15
19
  max_cost: Максимальная стоимость (включительно)
@@ -35,6 +39,10 @@ class OnlySQ:
35
39
  model_tools = model_data.get("can-tools", False)
36
40
  if model_tools != can_tools:
37
41
  matches = False
42
+ if matches and can_think is not None:
43
+ model_can_think = model_data.get("can-think", False)
44
+ if model_can_think != can_think:
45
+ matches = False
38
46
  if matches and can_stream is not None:
39
47
  model_can_stream = model_data.get("can-stream", False)
40
48
  if model_can_stream != can_stream:
@@ -57,28 +65,28 @@ class OnlySQ:
57
65
  print(f"OnlySQ(get_models): {e}")
58
66
  return []
59
67
 
60
- def generate_answer(self, model: str = "gpt-5.2-chat", messages: dict = None, key: str = "openai") -> str:
68
+ def generate_answer(self, model: str = "gpt-5.2-chat", messages: dict = None) -> str:
61
69
  """Генерация ответа с использованием onlysq"""
62
70
  try:
63
71
  if messages is None:
64
72
  raise ValueError("Забыли указать messages")
65
73
  else:
66
74
  payload = {"model": model, "request": {"messages": messages}}
67
- response = requests.post("http://api.onlysq.ru/ai/v2", json=payload, headers={"Authorization": f"Bearer {key}"})
75
+ response = requests.post("http://api.onlysq.ru/ai/v2", json=payload, headers={"Authorization": f"Bearer {self.key}"})
68
76
  response.raise_for_status()
69
77
  return response.json()["choices"][0]["message"]["content"]
70
78
  except Exception as e:
71
79
  print(f"OnlySQ(generate_answer): {e}")
72
80
  return "Error"
73
81
 
74
- def generate_image(self, model: str = "flux", prompt: str = None, ratio: str = "16:9", filename: str = 'image.png', key: str = "openai") -> bool:
82
+ def generate_image(self, model: str = "flux", prompt: str = None, ratio: str = "16:9", filename: str = 'image.png') -> bool:
75
83
  """Генерация фотографии с использованием onlysq"""
76
84
  try:
77
85
  if prompt is None:
78
86
  raise ValueError("Забыли указать prompt")
79
87
  else:
80
88
  payload = {"model": model, "prompt": prompt, "ratio": ratio}
81
- response = requests.post("https://api.onlysq.ru/ai/imagen", json=payload, headers={"Authorization": f"Bearer {key}"})
89
+ response = requests.post("https://api.onlysq.ru/ai/imagen", json=payload, headers={"Authorization": f"Bearer {self.key}"})
82
90
  if response.status_code == 200:
83
91
  img_bytes = base64.b64decode(response.json()["files"][0])
84
92
  with open(filename, 'wb') as f:
@@ -133,60 +141,6 @@ class Deef:
133
141
  except FileNotFoundError:
134
142
  return None
135
143
 
136
- def gen_ai_response(self, model: str = "Qwen3 235B", messages: list = None) -> dict[str]:
137
- """
138
- Отправляет запрос к API и возвращает словарь с полной информацией
139
- Args:
140
- model: Модель нейросети (Qwen3 235B или GPT OSS 120B)
141
- messages: Список сообщений в формате [{"role": "...", "content": "..."}]
142
- Returns:
143
- dict[str]: Словарь с ключами:
144
- - reasoning: Размышления модели
145
- - answer: Финальный ответ модели
146
- - status: Статус выполнения
147
- - cluster_info: Информация о кластере (если есть)
148
- """
149
- try:
150
- if messages is None:
151
- raise ValueError("Забыли указать messages")
152
- else:
153
- model_to_cluster = {"Qwen3 235B": "hybrid", "GPT OSS 120B": "nvidia"}
154
- cluster_mode = model_to_cluster.get(model)
155
- if cluster_mode is None:
156
- raise ValueError(f"Неизвестная модель: {model}, Доступные модели: {list(model_to_cluster.keys())}")
157
- data = {"model": model, "clusterMode": cluster_mode, "messages": messages, "enableThinking": True}
158
- url = "https://chat.gradient.network/api/generate"
159
- response = requests.post(url, json=data, stream=True)
160
- result = {"reasoning": "", "answer": "", "status": "unknown", "cluster_info": None}
161
- for line in response.iter_lines():
162
- if line:
163
- try:
164
- json_obj = json.loads(line.decode('utf-8'))
165
- message_type = json_obj.get("type")
166
- if message_type == "reply":
167
- data_content = json_obj.get("data", {})
168
- if "reasoningContent" in data_content:
169
- result["reasoning"] += data_content.get("reasoningContent", "")
170
- if "content" in data_content:
171
- result["answer"] += data_content.get("content", "")
172
- elif message_type == "jobInfo":
173
- status = json_obj.get("data", {}).get("status")
174
- result["status"] = status
175
- if status == "completed":
176
- break
177
- elif message_type == "clusterInfo":
178
- result["cluster_info"] = json_obj.get("data", {})
179
- except json.JSONDecodeError as e:
180
- print(f"Ошибка декодирования JSON: {e}")
181
- continue
182
- except Exception as e:
183
- print(f"Неожиданная ошибка: {e}")
184
- continue
185
- return result
186
- except Exception as e:
187
- print(f"Deef(gen_ai_response): {e}")
188
- return {"reasoning": "Error", "answer": "Error", "status": "unknown", "cluster_info": None}
189
-
190
144
  def perplexity_ask(self, model: str, query: str) -> dict:
191
145
  """
192
146
  Вывод: словари с ключом 'type' и данными:
@@ -204,9 +158,7 @@ class Deef:
204
158
  "grok4", "gemini2flash", "pplx_pro", "pplx_pro_upgraded", "pplx_alpha",
205
159
  "pplx_beta", "comet_max_assistant", "o3_research", "o3pro_research", "claude40sonnet_research",
206
160
  "claude40sonnetthinking_research", "claude40opus_research", "claude40opusthinking_research", "o3_labs", "o3pro_labs",
207
- "claude40sonnetthinking_labs", "claude40opusthinking_labs", "o4mini", "o1", "gpt4o",
208
- "gpt45", "gpt4", "o3mini", "claude35haiku", "llama_x_large",
209
- "mistral", "claude3opus", "gemini", "pplx_reasoning", "r1"]
161
+ "claude40sonnetthinking_labs", "claude40opusthinking_labs", "o4mini", "claude3opus", "pplx_reasoning", "r1"]
210
162
  BASE_URL = "https://www.perplexity.ai"
211
163
  if model not in MODELS:
212
164
  model = MODELS[0]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: neogram
3
- Version: 9.3.2
3
+ Version: 9.3.5
4
4
  Summary: neogram is a lightweight Python module for working with the Telegram Bot API and AI. It combines simple Telegram workflows with powerful features like text and image generation, translation, and more.
5
5
  Author: SiriLV
6
6
  Author-email: siriteamrs@gmail.com
@@ -26,7 +26,7 @@ Dynamic: requires-dist
26
26
  Dynamic: requires-python
27
27
  Dynamic: summary
28
28
 
29
- # 📚 Документация neogram v9.3.01
29
+ # 📚 Документация neogram v9.3.5
30
30
 
31
31
  **Установка:**
32
32
 
@@ -79,7 +79,7 @@ bot = Bot(token="YOUR_TOKEN", timeout=60)
79
79
  Классы для интеграции с внешними AI-сервисами.
80
80
 
81
81
  ### Класс `OnlySQ`
82
- Интерфейс к сервису OnlySQ.
82
+ Интерфейс к сервису OnlySQ. Для пользования потребуетя api ключ: https://my.onlysq.ru/
83
83
  * `get_models(...)`: Получить список доступных моделей.
84
84
  * `generate_answer(model, messages)`: Генерация текста (чат).
85
85
  * `generate_image(model, prompt, ...)`: Генерация изображений.
@@ -88,7 +88,6 @@ bot = Bot(token="YOUR_TOKEN", timeout=60)
88
88
  Набор утилит и альтернативных API.
89
89
  * `translate(text, lang)`: Перевод текста (через Google Translate).
90
90
  * `short_url(long_url)`: Сокращение ссылок (clck.ru).
91
- * `gen_ai_response(model, messages)`: Генерация ответа (Qwen/GPT OSS).
92
91
  * `perplexity_ask(model, query)`: Генерация через PerplexityAI
93
92
  * `encode_base64(path)`: Кодирование файла в base64.
94
93
  * `run_in_bg(func, ...)`: Запуск функции в отдельном потоке.
@@ -0,0 +1,8 @@
1
+ neogram/__init__.py,sha256=C47Ofb8xvEqQlB-6Vt8hzOPpWcjASjSV8YID_bxbkW4,7307
2
+ neogram/fgram.py,sha256=zO3_MhJyCtJUzbU6FYY9FO747GSQLkdWqr45zYehNMs,257655
3
+ neogram/ii.py,sha256=YJaCu-rnwQJefeymwOQe_XS5oEDpHPV5WqlJVqxNCHY,18549
4
+ neogram-9.3.5.dist-info/licenses/LICENSE,sha256=FAb9EYIqo8kpOGEwL_lH45SL_SLJ9wDxbSSRFpsSzvs,1112
5
+ neogram-9.3.5.dist-info/METADATA,sha256=0CsSzA6ExQwOg4MDrg_M22dty2LGVJF3aVMQd20Wy2A,50188
6
+ neogram-9.3.5.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
7
+ neogram-9.3.5.dist-info/top_level.txt,sha256=NqTlzfnaxZNIo9TCYSbG_LtE05Kx6JfrSSgzVPMAawA,8
8
+ neogram-9.3.5.dist-info/RECORD,,
@@ -1,8 +0,0 @@
1
- neogram/__init__.py,sha256=C47Ofb8xvEqQlB-6Vt8hzOPpWcjASjSV8YID_bxbkW4,7307
2
- neogram/fgram.py,sha256=zO3_MhJyCtJUzbU6FYY9FO747GSQLkdWqr45zYehNMs,257655
3
- neogram/ii.py,sha256=RYBUPxHzP9YsYFvW-_FupUQMKrP4iNJ7tT-nqKliFxk,21651
4
- neogram-9.3.2.dist-info/licenses/LICENSE,sha256=FAb9EYIqo8kpOGEwL_lH45SL_SLJ9wDxbSSRFpsSzvs,1112
5
- neogram-9.3.2.dist-info/METADATA,sha256=txa--NPpzxW4js2ZVk8qNC4N6WYZ7JfptgkhUEQTcsE,50190
6
- neogram-9.3.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
7
- neogram-9.3.2.dist-info/top_level.txt,sha256=NqTlzfnaxZNIo9TCYSbG_LtE05Kx6JfrSSgzVPMAawA,8
8
- neogram-9.3.2.dist-info/RECORD,,