revengelibrary 0.1.0__tar.gz → 0.1.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,15 @@
1
+ include LICENSE
2
+ include README.md
3
+ include pyproject.toml
4
+
5
+ recursive-include revengelibrary *.py *.json
6
+
7
+ exclude .env
8
+ exclude .revengelibrary_memory.json
9
+
10
+ prune venv
11
+ prune build
12
+ prune dist
13
+
14
+ global-exclude __pycache__
15
+ global-exclude *.py[cod]
@@ -0,0 +1,36 @@
1
+ Metadata-Version: 2.4
2
+ Name: revengelibrary
3
+ Version: 0.1.6
4
+ Summary: Не нейросеть
5
+ Author: revengebibliotek contributors
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/example/revengelibrary
8
+ Keywords: chat,llm,openrouter,api,cli
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Requires-Python: >=3.9
13
+ Description-Content-Type: text/markdown
14
+ License-File: LICENSE
15
+ Requires-Dist: requests>=2.31.0
16
+ Dynamic: license-file
17
+
18
+ # revengelibrary
19
+
20
+ Не нейросеть
21
+
22
+ ## Установка
23
+
24
+ Из PyPI (после публикации):
25
+
26
+ ```bash
27
+ pip install revengelibrary
28
+ ```
29
+
30
+ ## Запуск из терминала
31
+
32
+ После установки доступна команда:
33
+
34
+ ```bash
35
+ revengelibrary "text"
36
+ ```
@@ -0,0 +1,19 @@
1
+ # revengelibrary
2
+
3
+ Не нейросеть
4
+
5
+ ## Установка
6
+
7
+ Из PyPI (после публикации):
8
+
9
+ ```bash
10
+ pip install revengelibrary
11
+ ```
12
+
13
+ ## Запуск из терминала
14
+
15
+ После установки доступна команда:
16
+
17
+ ```bash
18
+ revengelibrary "text"
19
+ ```
@@ -4,8 +4,8 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "revengelibrary"
7
- version = "0.1.0"
8
- description = "Python chat library and CLI for free LLM models via OpenRouter."
7
+ version = "0.1.6"
8
+ description = "Не нейросеть"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.9"
11
11
  license = { text = "MIT" }
@@ -29,7 +29,10 @@ Homepage = "https://github.com/example/revengelibrary"
29
29
  revengelibrary = "revengelibrary.cli:main"
30
30
 
31
31
  [tool.setuptools]
32
- include-package-data = false
32
+ include-package-data = true
33
33
 
34
34
  [tool.setuptools.packages.find]
35
35
  include = ["revengelibrary*"]
36
+
37
+ [tool.setuptools.package-data]
38
+ revengelibrary = ["memory_store.json"]
@@ -0,0 +1,21 @@
1
+ from .agents import DEFAULT_AGENT, AgentProfile, get_agent, list_agents
2
+ from .chat import (
3
+ APIError,
4
+ DEFAULT_MEMORY_FILE,
5
+ DEFAULT_MODEL,
6
+ DEFAULT_OPENROUTER_API_KEY,
7
+ FreeNeuroChatClient,
8
+ )
9
+
10
+ __all__ = [
11
+ "FreeNeuroChatClient",
12
+ "APIError",
13
+ "DEFAULT_OPENROUTER_API_KEY",
14
+ "DEFAULT_MODEL",
15
+ "DEFAULT_MEMORY_FILE",
16
+ "DEFAULT_AGENT",
17
+ "AgentProfile",
18
+ "list_agents",
19
+ "get_agent",
20
+ ]
21
+ __version__ = "0.1.6"
@@ -0,0 +1,60 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+
5
+ DEFAULT_AGENT = "general"
6
+
7
+
8
+ @dataclass(frozen=True)
9
+ class AgentProfile:
10
+ name: str
11
+ title: str
12
+ system_prompt: str
13
+
14
+
15
+ AGENT_PROFILES: dict[str, AgentProfile] = {
16
+ "general": AgentProfile(
17
+ name="general",
18
+ title="Универсальный ассистент",
19
+ system_prompt="You are a helpful assistant.",
20
+ ),
21
+ "frontend": AgentProfile(
22
+ name="frontend",
23
+ title="Senior Frontend + Mobile Design",
24
+ system_prompt=(
25
+ "Ты senior frontend-разработчик с сильной экспертизой в дизайне "
26
+ "мобильных приложений. Пиши ответы как практический инженер: "
27
+ "mobile-first, доступность, понятная архитектура компонентов, "
28
+ "чистый UI/UX, адаптив под iOS/Android, хорошая типографика и "
29
+ "согласованная дизайн-система."
30
+ ),
31
+ ),
32
+ "backend": AgentProfile(
33
+ name="backend",
34
+ title="Backend Engineer",
35
+ system_prompt=(
36
+ "Ты senior backend-разработчик. Предлагай надежные API-контракты, "
37
+ "чистую архитектуру, контроль ошибок, безопасность и масштабируемость."
38
+ ),
39
+ ),
40
+ "qa": AgentProfile(
41
+ name="qa",
42
+ title="QA Engineer",
43
+ system_prompt=(
44
+ "Ты senior QA-инженер. Фокус: тест-кейсы, граничные условия, "
45
+ "регрессия, воспроизводимость багов и риски релиза."
46
+ ),
47
+ ),
48
+ }
49
+
50
+
51
+ def list_agents() -> list[AgentProfile]:
52
+ return list(AGENT_PROFILES.values())
53
+
54
+
55
+ def get_agent(agent_name: str) -> AgentProfile:
56
+ name = (agent_name or DEFAULT_AGENT).strip().lower()
57
+ if name not in AGENT_PROFILES:
58
+ available = ", ".join(sorted(AGENT_PROFILES))
59
+ raise ValueError(f"Unknown agent '{agent_name}'. Available: {available}")
60
+ return AGENT_PROFILES[name]
@@ -0,0 +1,214 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ from dataclasses import dataclass, field
5
+ from pathlib import Path
6
+ from typing import Any
7
+
8
+ import requests
9
+
10
+ DEFAULT_OPENROUTER_API_KEY = (
11
+ "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"
12
+ )
13
+ DEFAULT_MODEL = "meta-llama/llama-3.1-8b-instruct:free"
14
+ DEFAULT_MEMORY_FILE = str(Path(__file__).resolve().with_name("memory_store.json"))
15
+ _MODEL_UNAVAILABLE_MARKER = "No endpoints found"
16
+
17
+
18
+ class APIError(RuntimeError):
19
+ """Raised when the upstream API returns an invalid response."""
20
+
21
+
22
+ @dataclass
23
+ class FreeNeuroChatClient:
24
+ """Small wrapper around OpenRouter's OpenAI-compatible chat endpoint."""
25
+
26
+ api_key: str = DEFAULT_OPENROUTER_API_KEY
27
+ model: str = DEFAULT_MODEL
28
+ base_url: str = "https://openrouter.ai/api/v1"
29
+ timeout: int = 60
30
+ system_prompt: str | None = "You are a helpful assistant."
31
+ memory_file: str | None = DEFAULT_MEMORY_FILE
32
+ autosave_memory: bool = True
33
+ messages: list[dict[str, str]] = field(default_factory=list)
34
+
35
+ def __post_init__(self) -> None:
36
+ if not self.api_key:
37
+ self.api_key = DEFAULT_OPENROUTER_API_KEY
38
+
39
+ if self.memory_file:
40
+ self.load_memory()
41
+
42
+ if not self.messages and self.system_prompt:
43
+ self.messages.append({"role": "system", "content": self.system_prompt})
44
+ elif self.system_prompt and not self._has_system_message(self.messages):
45
+ self.messages.insert(0, {"role": "system", "content": self.system_prompt})
46
+
47
+ self._save_memory_if_needed()
48
+
49
+ def send(self, user_message: str) -> str:
50
+ """Send user text and return assistant reply."""
51
+ user_message = user_message.strip()
52
+ if not user_message:
53
+ raise ValueError("user_message must not be empty")
54
+
55
+ self.messages.append({"role": "user", "content": user_message})
56
+ payload = {"model": self.model, "messages": self.messages}
57
+
58
+ response = self._chat_completion(payload)
59
+ if self._is_model_unavailable(response):
60
+ fallback_model = self._discover_fallback_free_model(exclude={self.model})
61
+ if fallback_model:
62
+ self.model = fallback_model
63
+ payload["model"] = fallback_model
64
+ response = self._chat_completion(payload)
65
+
66
+ if response.status_code >= 400:
67
+ raise APIError(
68
+ f"API returned {response.status_code}: {response.text[:500]}"
69
+ )
70
+
71
+ data = response.json()
72
+ content = self._extract_content(data)
73
+ self.messages.append({"role": "assistant", "content": content})
74
+ self._save_memory_if_needed()
75
+ return content
76
+
77
+ def reset(self) -> None:
78
+ """Clear dialog history but keep initial system prompt."""
79
+ self.messages = []
80
+ if self.system_prompt:
81
+ self.messages.append({"role": "system", "content": self.system_prompt})
82
+ self._save_memory_if_needed()
83
+
84
+ def save_memory(self, file_path: str | None = None) -> None:
85
+ """Persist full dialog history to JSON file."""
86
+ path = self._resolve_memory_path(file_path)
87
+ if path is None:
88
+ return
89
+ path.parent.mkdir(parents=True, exist_ok=True)
90
+ path.write_text(
91
+ json.dumps(self.messages, ensure_ascii=False, indent=2),
92
+ encoding="utf-8",
93
+ )
94
+
95
+ def load_memory(self, file_path: str | None = None) -> None:
96
+ """Load dialog history from JSON file if it exists."""
97
+ path = self._resolve_memory_path(file_path)
98
+ if path is None or not path.exists():
99
+ return
100
+ try:
101
+ data = json.loads(path.read_text(encoding="utf-8"))
102
+ except (OSError, json.JSONDecodeError):
103
+ return
104
+ self.messages = self._normalize_messages(data)
105
+
106
+ def clear_memory_file(self, file_path: str | None = None) -> None:
107
+ """Delete persisted history file."""
108
+ path = self._resolve_memory_path(file_path)
109
+ if path is None:
110
+ return
111
+ if path.exists():
112
+ path.unlink()
113
+
114
+ def _headers(self) -> dict[str, str]:
115
+ return {
116
+ "Authorization": f"Bearer {self.api_key}",
117
+ "Content-Type": "application/json",
118
+ }
119
+
120
+ def _chat_completion(self, payload: dict[str, Any]) -> requests.Response:
121
+ return requests.post(
122
+ f"{self.base_url}/chat/completions",
123
+ headers=self._headers(),
124
+ json=payload,
125
+ timeout=self.timeout,
126
+ )
127
+
128
+ def _is_model_unavailable(self, response: requests.Response) -> bool:
129
+ if response.status_code != 404:
130
+ return False
131
+ try:
132
+ data = response.json()
133
+ except ValueError:
134
+ return False
135
+ message = str(data.get("error", {}).get("message", ""))
136
+ return _MODEL_UNAVAILABLE_MARKER in message
137
+
138
+ def _discover_fallback_free_model(self, exclude: set[str]) -> str | None:
139
+ try:
140
+ response = requests.get(
141
+ f"{self.base_url}/models",
142
+ headers=self._headers(),
143
+ timeout=self.timeout,
144
+ )
145
+ if response.status_code >= 400:
146
+ return None
147
+ data = response.json()
148
+ except Exception: # noqa: BLE001
149
+ return None
150
+
151
+ model_ids = self._extract_free_model_ids(data.get("data"))
152
+ for model_id in model_ids:
153
+ if model_id not in exclude:
154
+ return model_id
155
+ return None
156
+
157
+ @staticmethod
158
+ def _extract_free_model_ids(data: Any) -> list[str]:
159
+ if not isinstance(data, list):
160
+ return []
161
+
162
+ result: list[str] = []
163
+ for item in data:
164
+ if not isinstance(item, dict):
165
+ continue
166
+ model_id = item.get("id")
167
+ if isinstance(model_id, str) and model_id.endswith(":free"):
168
+ result.append(model_id)
169
+ return result
170
+
171
+ def _save_memory_if_needed(self) -> None:
172
+ if self.autosave_memory:
173
+ self.save_memory()
174
+
175
+ def _resolve_memory_path(self, file_path: str | None = None) -> Path | None:
176
+ value = file_path if file_path is not None else self.memory_file
177
+ if not value:
178
+ return None
179
+ return _normalize_memory_path(value)
180
+
181
+ @staticmethod
182
+ def _normalize_messages(data: Any) -> list[dict[str, str]]:
183
+ if not isinstance(data, list):
184
+ return []
185
+ normalized: list[dict[str, str]] = []
186
+ for item in data:
187
+ if not isinstance(item, dict):
188
+ continue
189
+ role = item.get("role")
190
+ content = item.get("content")
191
+ if isinstance(role, str) and isinstance(content, str):
192
+ normalized.append({"role": role, "content": content})
193
+ return normalized
194
+
195
+ @staticmethod
196
+ def _has_system_message(messages: list[dict[str, str]]) -> bool:
197
+ return any(item.get("role") == "system" for item in messages)
198
+
199
+ @staticmethod
200
+ def _extract_content(data: dict[str, Any]) -> str:
201
+ try:
202
+ content = data["choices"][0]["message"]["content"]
203
+ if not isinstance(content, str) or not content.strip():
204
+ raise KeyError
205
+ return content
206
+ except (KeyError, TypeError, IndexError):
207
+ raise APIError(f"Unexpected API response: {data}") from None
208
+
209
+
210
+ def _normalize_memory_path(value: str) -> Path:
211
+ path = Path(value).expanduser()
212
+ if path.suffix:
213
+ return path
214
+ return path.with_suffix(".json")
@@ -0,0 +1,130 @@
1
+ from __future__ import annotations
2
+
3
+ import argparse
4
+ import os
5
+ from pathlib import Path
6
+
7
+ from .agents import DEFAULT_AGENT, get_agent, list_agents
8
+ from .chat import (
9
+ APIError,
10
+ DEFAULT_MEMORY_FILE,
11
+ DEFAULT_MODEL,
12
+ DEFAULT_OPENROUTER_API_KEY,
13
+ FreeNeuroChatClient,
14
+ )
15
+
16
+
17
+ def _build_parser() -> argparse.ArgumentParser:
18
+ parser = argparse.ArgumentParser(
19
+ prog="revengelibrary",
20
+ description="Interactive terminal chat with free OpenRouter models.",
21
+ )
22
+ parser.add_argument(
23
+ "--api-key",
24
+ default=os.getenv("OPENROUTER_API_KEY", DEFAULT_OPENROUTER_API_KEY),
25
+ help="OpenRouter API key. Fallback: OPENROUTER_API_KEY, then built-in key.",
26
+ )
27
+ parser.add_argument(
28
+ "--model",
29
+ default=DEFAULT_MODEL,
30
+ help="Model name on OpenRouter (default uses a free model).",
31
+ )
32
+ parser.add_argument(
33
+ "--system",
34
+ default=None,
35
+ help="System prompt override. If omitted, selected agent prompt is used.",
36
+ )
37
+ parser.add_argument(
38
+ "--agent",
39
+ default=os.getenv("REVENGELIBRARY_AGENT", DEFAULT_AGENT),
40
+ help="Agent role. Use --list-agents to view available roles.",
41
+ )
42
+ parser.add_argument(
43
+ "--list-agents",
44
+ action="store_true",
45
+ help="Show available agent roles and exit.",
46
+ )
47
+ parser.add_argument(
48
+ "--memory-file",
49
+ default=os.getenv("REVENGELIBRARY_MEMORY_FILE", DEFAULT_MEMORY_FILE),
50
+ help=(
51
+ "JSON file for chat memory. "
52
+ "Fallback: REVENGELIBRARY_MEMORY_FILE, then built-in package memory file."
53
+ ),
54
+ )
55
+ return parser
56
+
57
+
58
+ def main() -> int:
59
+ parser = _build_parser()
60
+ args = parser.parse_args()
61
+
62
+ if args.list_agents:
63
+ for profile in list_agents():
64
+ print(f"{profile.name}: {profile.title}")
65
+ return 0
66
+
67
+ try:
68
+ agent = get_agent(args.agent)
69
+ except ValueError as exc:
70
+ parser.error(str(exc))
71
+
72
+ system_prompt = args.system if args.system else agent.system_prompt
73
+ memory_file = _agent_memory_file(args.memory_file, agent.name)
74
+
75
+ client = FreeNeuroChatClient(
76
+ api_key=args.api_key or DEFAULT_OPENROUTER_API_KEY,
77
+ model=args.model,
78
+ system_prompt=system_prompt,
79
+ memory_file=memory_file,
80
+ )
81
+
82
+ print(
83
+ "Interactive chat started. "
84
+ "Type 'exit' to quit, 'reset' to clear history, 'forget' to delete memory file."
85
+ )
86
+ while True:
87
+ try:
88
+ text = input("you> ").strip()
89
+ except (EOFError, KeyboardInterrupt):
90
+ print("\nbye")
91
+ return 0
92
+
93
+ if not text:
94
+ continue
95
+ if text.lower() in {"exit", "quit"}:
96
+ print("bye")
97
+ return 0
98
+ if text.lower() == "reset":
99
+ client.reset()
100
+ print("history reset")
101
+ continue
102
+ if text.lower() == "forget":
103
+ client.reset()
104
+ client.clear_memory_file()
105
+ print("history reset and memory file deleted")
106
+ continue
107
+
108
+ try:
109
+ answer = client.send(text)
110
+ print(f"ai> {answer}")
111
+ except APIError as exc:
112
+ print(f"api error: {exc}")
113
+ except Exception as exc: # noqa: BLE001
114
+ print(f"unexpected error: {exc}")
115
+
116
+
117
+ def _agent_memory_file(memory_file: str | None, agent_name: str) -> str | None:
118
+ if not memory_file:
119
+ return None
120
+ if agent_name == DEFAULT_AGENT:
121
+ return memory_file
122
+
123
+ path = Path(memory_file).expanduser()
124
+ suffix = path.suffix or ".json"
125
+ stem = path.stem if path.suffix else path.name
126
+ return str(path.with_name(f"{stem}__{agent_name}{suffix}"))
127
+
128
+
129
+ if __name__ == "__main__":
130
+ raise SystemExit(main())
@@ -0,0 +1,36 @@
1
+ Metadata-Version: 2.4
2
+ Name: revengelibrary
3
+ Version: 0.1.6
4
+ Summary: Не нейросеть
5
+ Author: revengebibliotek contributors
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/example/revengelibrary
8
+ Keywords: chat,llm,openrouter,api,cli
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Requires-Python: >=3.9
13
+ Description-Content-Type: text/markdown
14
+ License-File: LICENSE
15
+ Requires-Dist: requests>=2.31.0
16
+ Dynamic: license-file
17
+
18
+ # revengelibrary
19
+
20
+ Не нейросеть
21
+
22
+ ## Установка
23
+
24
+ Из PyPI (после публикации):
25
+
26
+ ```bash
27
+ pip install revengelibrary
28
+ ```
29
+
30
+ ## Запуск из терминала
31
+
32
+ После установки доступна команда:
33
+
34
+ ```bash
35
+ revengelibrary "text"
36
+ ```
@@ -1,9 +1,12 @@
1
1
  LICENSE
2
+ MANIFEST.in
2
3
  README.md
3
4
  pyproject.toml
4
5
  revengelibrary/__init__.py
6
+ revengelibrary/agents.py
5
7
  revengelibrary/chat.py
6
8
  revengelibrary/cli.py
9
+ revengelibrary/memory_store.json
7
10
  revengelibrary.egg-info/PKG-INFO
8
11
  revengelibrary.egg-info/SOURCES.txt
9
12
  revengelibrary.egg-info/dependency_links.txt
@@ -1,88 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: revengelibrary
3
- Version: 0.1.0
4
- Summary: Python chat library and CLI for free LLM models via OpenRouter.
5
- Author: revengebibliotek contributors
6
- License: MIT
7
- Project-URL: Homepage, https://github.com/example/revengelibrary
8
- Keywords: chat,llm,openrouter,api,cli
9
- Classifier: Programming Language :: Python :: 3
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Operating System :: OS Independent
12
- Requires-Python: >=3.9
13
- Description-Content-Type: text/markdown
14
- License-File: LICENSE
15
- Requires-Dist: requests>=2.31.0
16
- Dynamic: license-file
17
-
18
- # revengelibrary
19
-
20
- Небольшая Python-библиотека и CLI для чата с нейросетью через бесплатные модели OpenRouter.
21
-
22
- ## Установка
23
-
24
- Из PyPI (после публикации):
25
-
26
- ```bash
27
- pip install revengelibrary
28
- ```
29
-
30
- Локально из папки проекта:
31
-
32
- ```bash
33
- pip install .
34
- ```
35
-
36
- Для разработки:
37
-
38
- ```bash
39
- pip install -e .
40
- ```
41
-
42
- ## Подготовка API ключа
43
-
44
- 1. Создай бесплатный API key в OpenRouter.
45
- 2. Экспортируй ключ:
46
-
47
- ```bash
48
- export OPENROUTER_API_KEY="your_key"
49
- ```
50
-
51
- ## Использование в Python
52
-
53
- ```python
54
- from revengelibrary import FreeNeuroChatClient
55
-
56
- client = FreeNeuroChatClient(api_key="your_key")
57
- reply = client.send("Привет! Расскажи коротко анекдот.")
58
- print(reply)
59
- ```
60
-
61
- ## Запуск из терминала
62
-
63
- После установки доступна команда:
64
-
65
- ```bash
66
- revengelibrary --model meta-llama/llama-3.1-8b-instruct:free
67
- ```
68
-
69
- Если ключ лежит в `OPENROUTER_API_KEY`, то `--api-key` можно не передавать.
70
-
71
- ## Минимальный API библиотеки
72
-
73
- - `FreeNeuroChatClient.send(text: str) -> str`
74
- - `FreeNeuroChatClient.reset() -> None`
75
-
76
- ## Публикация в PyPI
77
-
78
- ```bash
79
- python3 -m pip install --upgrade build twine
80
- python3 -m build
81
- python3 -m twine upload dist/*
82
- ```
83
-
84
- После этого любой человек сможет установить библиотеку:
85
-
86
- ```bash
87
- pip install revengelibrary
88
- ```
@@ -1,71 +0,0 @@
1
- # revengelibrary
2
-
3
- Небольшая Python-библиотека и CLI для чата с нейросетью через бесплатные модели OpenRouter.
4
-
5
- ## Установка
6
-
7
- Из PyPI (после публикации):
8
-
9
- ```bash
10
- pip install revengelibrary
11
- ```
12
-
13
- Локально из папки проекта:
14
-
15
- ```bash
16
- pip install .
17
- ```
18
-
19
- Для разработки:
20
-
21
- ```bash
22
- pip install -e .
23
- ```
24
-
25
- ## Подготовка API ключа
26
-
27
- 1. Создай бесплатный API key в OpenRouter.
28
- 2. Экспортируй ключ:
29
-
30
- ```bash
31
- export OPENROUTER_API_KEY="your_key"
32
- ```
33
-
34
- ## Использование в Python
35
-
36
- ```python
37
- from revengelibrary import FreeNeuroChatClient
38
-
39
- client = FreeNeuroChatClient(api_key="your_key")
40
- reply = client.send("Привет! Расскажи коротко анекдот.")
41
- print(reply)
42
- ```
43
-
44
- ## Запуск из терминала
45
-
46
- После установки доступна команда:
47
-
48
- ```bash
49
- revengelibrary --model meta-llama/llama-3.1-8b-instruct:free
50
- ```
51
-
52
- Если ключ лежит в `OPENROUTER_API_KEY`, то `--api-key` можно не передавать.
53
-
54
- ## Минимальный API библиотеки
55
-
56
- - `FreeNeuroChatClient.send(text: str) -> str`
57
- - `FreeNeuroChatClient.reset() -> None`
58
-
59
- ## Публикация в PyPI
60
-
61
- ```bash
62
- python3 -m pip install --upgrade build twine
63
- python3 -m build
64
- python3 -m twine upload dist/*
65
- ```
66
-
67
- После этого любой человек сможет установить библиотеку:
68
-
69
- ```bash
70
- pip install revengelibrary
71
- ```
@@ -1,4 +0,0 @@
1
- from .chat import FreeNeuroChatClient, APIError
2
-
3
- __all__ = ["FreeNeuroChatClient", "APIError"]
4
- __version__ = "0.1.0"
@@ -1,76 +0,0 @@
1
- from __future__ import annotations
2
-
3
- from dataclasses import dataclass, field
4
- from typing import Any
5
-
6
- import requests
7
-
8
-
9
- class APIError(RuntimeError):
10
- """Raised when the upstream API returns an invalid response."""
11
-
12
-
13
- @dataclass
14
- class FreeNeuroChatClient:
15
- """Small wrapper around OpenRouter's OpenAI-compatible chat endpoint."""
16
-
17
- api_key: str
18
- model: str = "meta-llama/llama-3.1-8b-instruct:free"
19
- base_url: str = "https://openrouter.ai/api/v1"
20
- timeout: int = 60
21
- system_prompt: str | None = "You are a helpful assistant."
22
- messages: list[dict[str, str]] = field(default_factory=list)
23
-
24
- def __post_init__(self) -> None:
25
- if not self.api_key:
26
- raise ValueError("api_key is required")
27
- if self.system_prompt and not self.messages:
28
- self.messages.append({"role": "system", "content": self.system_prompt})
29
-
30
- def send(self, user_message: str) -> str:
31
- """Send user text and return assistant reply."""
32
- user_message = user_message.strip()
33
- if not user_message:
34
- raise ValueError("user_message must not be empty")
35
-
36
- self.messages.append({"role": "user", "content": user_message})
37
- payload = {"model": self.model, "messages": self.messages}
38
-
39
- response = requests.post(
40
- f"{self.base_url}/chat/completions",
41
- headers=self._headers(),
42
- json=payload,
43
- timeout=self.timeout,
44
- )
45
-
46
- if response.status_code >= 400:
47
- raise APIError(
48
- f"API returned {response.status_code}: {response.text[:500]}"
49
- )
50
-
51
- data = response.json()
52
- content = self._extract_content(data)
53
- self.messages.append({"role": "assistant", "content": content})
54
- return content
55
-
56
- def reset(self) -> None:
57
- """Clear dialog history but keep initial system prompt."""
58
- self.messages = []
59
- if self.system_prompt:
60
- self.messages.append({"role": "system", "content": self.system_prompt})
61
-
62
- def _headers(self) -> dict[str, str]:
63
- return {
64
- "Authorization": f"Bearer {self.api_key}",
65
- "Content-Type": "application/json",
66
- }
67
-
68
- @staticmethod
69
- def _extract_content(data: dict[str, Any]) -> str:
70
- try:
71
- content = data["choices"][0]["message"]["content"]
72
- if not isinstance(content, str) or not content.strip():
73
- raise KeyError
74
- return content
75
- except (KeyError, TypeError, IndexError):
76
- raise APIError(f"Unexpected API response: {data}") from None
@@ -1,73 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import argparse
4
- import os
5
-
6
- from .chat import APIError, FreeNeuroChatClient
7
-
8
-
9
- def _build_parser() -> argparse.ArgumentParser:
10
- parser = argparse.ArgumentParser(
11
- prog="revengelibrary",
12
- description="Interactive terminal chat with free OpenRouter models.",
13
- )
14
- parser.add_argument(
15
- "--api-key",
16
- default=os.getenv("OPENROUTER_API_KEY", ""),
17
- help="OpenRouter API key. Fallback: OPENROUTER_API_KEY env var.",
18
- )
19
- parser.add_argument(
20
- "--model",
21
- default="meta-llama/llama-3.1-8b-instruct:free",
22
- help="Model name on OpenRouter (default uses a free model).",
23
- )
24
- parser.add_argument(
25
- "--system",
26
- default="You are a helpful assistant.",
27
- help="System prompt.",
28
- )
29
- return parser
30
-
31
-
32
- def main() -> int:
33
- parser = _build_parser()
34
- args = parser.parse_args()
35
-
36
- if not args.api_key:
37
- parser.error("API key is required: pass --api-key or set OPENROUTER_API_KEY.")
38
-
39
- client = FreeNeuroChatClient(
40
- api_key=args.api_key,
41
- model=args.model,
42
- system_prompt=args.system,
43
- )
44
-
45
- print("Interactive chat started. Type 'exit' to quit, 'reset' to clear history.")
46
- while True:
47
- try:
48
- text = input("you> ").strip()
49
- except (EOFError, KeyboardInterrupt):
50
- print("\nbye")
51
- return 0
52
-
53
- if not text:
54
- continue
55
- if text.lower() in {"exit", "quit"}:
56
- print("bye")
57
- return 0
58
- if text.lower() == "reset":
59
- client.reset()
60
- print("history reset")
61
- continue
62
-
63
- try:
64
- answer = client.send(text)
65
- print(f"ai> {answer}")
66
- except APIError as exc:
67
- print(f"api error: {exc}")
68
- except Exception as exc: # noqa: BLE001
69
- print(f"unexpected error: {exc}")
70
-
71
-
72
- if __name__ == "__main__":
73
- raise SystemExit(main())
@@ -1,88 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: revengelibrary
3
- Version: 0.1.0
4
- Summary: Python chat library and CLI for free LLM models via OpenRouter.
5
- Author: revengebibliotek contributors
6
- License: MIT
7
- Project-URL: Homepage, https://github.com/example/revengelibrary
8
- Keywords: chat,llm,openrouter,api,cli
9
- Classifier: Programming Language :: Python :: 3
10
- Classifier: License :: OSI Approved :: MIT License
11
- Classifier: Operating System :: OS Independent
12
- Requires-Python: >=3.9
13
- Description-Content-Type: text/markdown
14
- License-File: LICENSE
15
- Requires-Dist: requests>=2.31.0
16
- Dynamic: license-file
17
-
18
- # revengelibrary
19
-
20
- Небольшая Python-библиотека и CLI для чата с нейросетью через бесплатные модели OpenRouter.
21
-
22
- ## Установка
23
-
24
- Из PyPI (после публикации):
25
-
26
- ```bash
27
- pip install revengelibrary
28
- ```
29
-
30
- Локально из папки проекта:
31
-
32
- ```bash
33
- pip install .
34
- ```
35
-
36
- Для разработки:
37
-
38
- ```bash
39
- pip install -e .
40
- ```
41
-
42
- ## Подготовка API ключа
43
-
44
- 1. Создай бесплатный API key в OpenRouter.
45
- 2. Экспортируй ключ:
46
-
47
- ```bash
48
- export OPENROUTER_API_KEY="your_key"
49
- ```
50
-
51
- ## Использование в Python
52
-
53
- ```python
54
- from revengelibrary import FreeNeuroChatClient
55
-
56
- client = FreeNeuroChatClient(api_key="your_key")
57
- reply = client.send("Привет! Расскажи коротко анекдот.")
58
- print(reply)
59
- ```
60
-
61
- ## Запуск из терминала
62
-
63
- После установки доступна команда:
64
-
65
- ```bash
66
- revengelibrary --model meta-llama/llama-3.1-8b-instruct:free
67
- ```
68
-
69
- Если ключ лежит в `OPENROUTER_API_KEY`, то `--api-key` можно не передавать.
70
-
71
- ## Минимальный API библиотеки
72
-
73
- - `FreeNeuroChatClient.send(text: str) -> str`
74
- - `FreeNeuroChatClient.reset() -> None`
75
-
76
- ## Публикация в PyPI
77
-
78
- ```bash
79
- python3 -m pip install --upgrade build twine
80
- python3 -m build
81
- python3 -m twine upload dist/*
82
- ```
83
-
84
- После этого любой человек сможет установить библиотеку:
85
-
86
- ```bash
87
- pip install revengelibrary
88
- ```
File without changes
File without changes