euriai 0.3.4__tar.gz → 0.3.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {euriai-0.3.4 → euriai-0.3.5}/PKG-INFO +59 -11
- euriai-0.3.5/README.md +108 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai.egg-info/PKG-INFO +59 -11
- {euriai-0.3.4 → euriai-0.3.5}/setup.py +1 -1
- euriai-0.3.4/README.md +0 -60
- {euriai-0.3.4 → euriai-0.3.5}/euriai/__init__.py +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai/cli.py +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai/client.py +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai/embedding.py +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai/langchain_embed.py +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai/langchain_llm.py +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai.egg-info/SOURCES.txt +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai.egg-info/dependency_links.txt +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai.egg-info/entry_points.txt +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai.egg-info/requires.txt +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/euriai.egg-info/top_level.txt +0 -0
- {euriai-0.3.4 → euriai-0.3.5}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: euriai
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.5
|
4
4
|
Summary: Python client for EURI LLM API (euron.one) with CLI and interactive wizard
|
5
5
|
Author: euron.one
|
6
6
|
Author-email: sudhanshu@euron.one
|
@@ -23,20 +23,23 @@ Dynamic: requires-dist
|
|
23
23
|
Dynamic: requires-python
|
24
24
|
Dynamic: summary
|
25
25
|
|
26
|
-
# euriai 🧠
|
26
|
+
# euriai 🧠
|
27
27
|
|
28
|
-
**EURI AI Python Client** – A simple wrapper and CLI tool for the [Euron LLM API](https://api.euron.one).
|
29
|
-
Supports completions, streaming responses, CLI interaction, and an interactive guided wizard!
|
28
|
+
**EURI AI Python Client** – A simple wrapper and CLI tool for the [Euron LLM API](https://api.euron.one). Supports completions, streaming responses, embeddings, CLI interaction, and an interactive guided wizard!
|
30
29
|
|
31
30
|
---
|
32
31
|
|
33
|
-
## 🔧 Installation
|
32
|
+
## 🔧 Installation
|
34
33
|
|
35
34
|
```bash
|
36
35
|
pip install euriai
|
36
|
+
```
|
37
37
|
|
38
|
-
##
|
38
|
+
## 🚀 Python Usage
|
39
39
|
|
40
|
+
### Text Generation
|
41
|
+
|
42
|
+
```python
|
40
43
|
from euriai import EuriaiClient
|
41
44
|
|
42
45
|
client = EuriaiClient(
|
@@ -51,25 +54,42 @@ response = client.generate_completion(
|
|
51
54
|
)
|
52
55
|
|
53
56
|
print(response)
|
57
|
+
```
|
54
58
|
|
59
|
+
### Embeddings
|
60
|
+
|
61
|
+
```python
|
62
|
+
from euriai.embedding import EuriaiEmbeddingClient
|
63
|
+
|
64
|
+
client = EuriaiEmbeddingClient(api_key="your_key")
|
65
|
+
embedding = client.embed("Hello world")
|
66
|
+
print(embedding[:5]) # Print first 5 dimensions of the embedding vector
|
67
|
+
```
|
55
68
|
|
56
69
|
## 💻 Command-Line Interface (CLI) Usage
|
70
|
+
|
57
71
|
Run prompts directly from the terminal:
|
58
72
|
|
73
|
+
```bash
|
59
74
|
euriai --api_key YOUR_API_KEY --prompt "Tell me a joke"
|
75
|
+
```
|
60
76
|
|
77
|
+
Enable streaming output (if supported by the model):
|
61
78
|
|
62
|
-
|
63
|
-
|
79
|
+
```bash
|
64
80
|
euriai --api_key YOUR_API_KEY --prompt "Stream a fun fact" --stream
|
81
|
+
```
|
65
82
|
|
83
|
+
List all supported model IDs with recommended use-cases and temperature/token advice:
|
66
84
|
|
67
|
-
|
68
|
-
|
85
|
+
```bash
|
69
86
|
euriai --models
|
87
|
+
```
|
70
88
|
|
71
89
|
## 🤖 LangChain Integration
|
72
90
|
|
91
|
+
### Text Generation
|
92
|
+
|
73
93
|
Use Euriai with LangChain directly:
|
74
94
|
|
75
95
|
```python
|
@@ -77,9 +97,37 @@ from euriai import EuriaiLangChainLLM
|
|
77
97
|
|
78
98
|
llm = EuriaiLangChainLLM(
|
79
99
|
api_key="your_api_key",
|
80
|
-
model="gpt-4.1-nano",
|
100
|
+
model="gpt-4.1-nano",
|
81
101
|
temperature=0.7,
|
82
102
|
max_tokens=300
|
83
103
|
)
|
84
104
|
|
85
105
|
print(llm.invoke("Write a poem about time travel."))
|
106
|
+
```
|
107
|
+
|
108
|
+
### Embeddings
|
109
|
+
|
110
|
+
Use Euriai embeddings with LangChain:
|
111
|
+
|
112
|
+
```python
|
113
|
+
from euriai.langchain_embed import EuriaiEmbeddings
|
114
|
+
|
115
|
+
embedding_model = EuriaiEmbeddings(api_key="your_key")
|
116
|
+
print(embedding_model.embed_query("What's AI?")[:5]) # Print first 5 dimensions
|
117
|
+
```
|
118
|
+
|
119
|
+
## 📘 Documentation
|
120
|
+
|
121
|
+
For full documentation, visit our [official docs site](https://docs.euron.one).
|
122
|
+
|
123
|
+
## 🔑 Getting an API Key
|
124
|
+
|
125
|
+
Sign up for an API key at [Euron AI Platform](https://app.euron.one).
|
126
|
+
|
127
|
+
## 🤝 Contributing
|
128
|
+
|
129
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
130
|
+
|
131
|
+
## 📄 License
|
132
|
+
|
133
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
euriai-0.3.5/README.md
ADDED
@@ -0,0 +1,108 @@
|
|
1
|
+
# euriai 🧠
|
2
|
+
|
3
|
+
**EURI AI Python Client** – A simple wrapper and CLI tool for the [Euron LLM API](https://api.euron.one). Supports completions, streaming responses, embeddings, CLI interaction, and an interactive guided wizard!
|
4
|
+
|
5
|
+
---
|
6
|
+
|
7
|
+
## 🔧 Installation
|
8
|
+
|
9
|
+
```bash
|
10
|
+
pip install euriai
|
11
|
+
```
|
12
|
+
|
13
|
+
## 🚀 Python Usage
|
14
|
+
|
15
|
+
### Text Generation
|
16
|
+
|
17
|
+
```python
|
18
|
+
from euriai import EuriaiClient
|
19
|
+
|
20
|
+
client = EuriaiClient(
|
21
|
+
api_key="your_api_key_here",
|
22
|
+
model="gpt-4.1-nano" # You can also try: "gemini-2.0-flash-001", "llama-4-maverick", etc.
|
23
|
+
)
|
24
|
+
|
25
|
+
response = client.generate_completion(
|
26
|
+
prompt="Write a short poem about artificial intelligence.",
|
27
|
+
temperature=0.7,
|
28
|
+
max_tokens=300
|
29
|
+
)
|
30
|
+
|
31
|
+
print(response)
|
32
|
+
```
|
33
|
+
|
34
|
+
### Embeddings
|
35
|
+
|
36
|
+
```python
|
37
|
+
from euriai.embedding import EuriaiEmbeddingClient
|
38
|
+
|
39
|
+
client = EuriaiEmbeddingClient(api_key="your_key")
|
40
|
+
embedding = client.embed("Hello world")
|
41
|
+
print(embedding[:5]) # Print first 5 dimensions of the embedding vector
|
42
|
+
```
|
43
|
+
|
44
|
+
## 💻 Command-Line Interface (CLI) Usage
|
45
|
+
|
46
|
+
Run prompts directly from the terminal:
|
47
|
+
|
48
|
+
```bash
|
49
|
+
euriai --api_key YOUR_API_KEY --prompt "Tell me a joke"
|
50
|
+
```
|
51
|
+
|
52
|
+
Enable streaming output (if supported by the model):
|
53
|
+
|
54
|
+
```bash
|
55
|
+
euriai --api_key YOUR_API_KEY --prompt "Stream a fun fact" --stream
|
56
|
+
```
|
57
|
+
|
58
|
+
List all supported model IDs with recommended use-cases and temperature/token advice:
|
59
|
+
|
60
|
+
```bash
|
61
|
+
euriai --models
|
62
|
+
```
|
63
|
+
|
64
|
+
## 🤖 LangChain Integration
|
65
|
+
|
66
|
+
### Text Generation
|
67
|
+
|
68
|
+
Use Euriai with LangChain directly:
|
69
|
+
|
70
|
+
```python
|
71
|
+
from euriai import EuriaiLangChainLLM
|
72
|
+
|
73
|
+
llm = EuriaiLangChainLLM(
|
74
|
+
api_key="your_api_key",
|
75
|
+
model="gpt-4.1-nano",
|
76
|
+
temperature=0.7,
|
77
|
+
max_tokens=300
|
78
|
+
)
|
79
|
+
|
80
|
+
print(llm.invoke("Write a poem about time travel."))
|
81
|
+
```
|
82
|
+
|
83
|
+
### Embeddings
|
84
|
+
|
85
|
+
Use Euriai embeddings with LangChain:
|
86
|
+
|
87
|
+
```python
|
88
|
+
from euriai.langchain_embed import EuriaiEmbeddings
|
89
|
+
|
90
|
+
embedding_model = EuriaiEmbeddings(api_key="your_key")
|
91
|
+
print(embedding_model.embed_query("What's AI?")[:5]) # Print first 5 dimensions
|
92
|
+
```
|
93
|
+
|
94
|
+
## 📘 Documentation
|
95
|
+
|
96
|
+
For full documentation, visit our [official docs site](https://docs.euron.one).
|
97
|
+
|
98
|
+
## 🔑 Getting an API Key
|
99
|
+
|
100
|
+
Sign up for an API key at [Euron AI Platform](https://app.euron.one).
|
101
|
+
|
102
|
+
## 🤝 Contributing
|
103
|
+
|
104
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
105
|
+
|
106
|
+
## 📄 License
|
107
|
+
|
108
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: euriai
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.5
|
4
4
|
Summary: Python client for EURI LLM API (euron.one) with CLI and interactive wizard
|
5
5
|
Author: euron.one
|
6
6
|
Author-email: sudhanshu@euron.one
|
@@ -23,20 +23,23 @@ Dynamic: requires-dist
|
|
23
23
|
Dynamic: requires-python
|
24
24
|
Dynamic: summary
|
25
25
|
|
26
|
-
# euriai 🧠
|
26
|
+
# euriai 🧠
|
27
27
|
|
28
|
-
**EURI AI Python Client** – A simple wrapper and CLI tool for the [Euron LLM API](https://api.euron.one).
|
29
|
-
Supports completions, streaming responses, CLI interaction, and an interactive guided wizard!
|
28
|
+
**EURI AI Python Client** – A simple wrapper and CLI tool for the [Euron LLM API](https://api.euron.one). Supports completions, streaming responses, embeddings, CLI interaction, and an interactive guided wizard!
|
30
29
|
|
31
30
|
---
|
32
31
|
|
33
|
-
## 🔧 Installation
|
32
|
+
## 🔧 Installation
|
34
33
|
|
35
34
|
```bash
|
36
35
|
pip install euriai
|
36
|
+
```
|
37
37
|
|
38
|
-
##
|
38
|
+
## 🚀 Python Usage
|
39
39
|
|
40
|
+
### Text Generation
|
41
|
+
|
42
|
+
```python
|
40
43
|
from euriai import EuriaiClient
|
41
44
|
|
42
45
|
client = EuriaiClient(
|
@@ -51,25 +54,42 @@ response = client.generate_completion(
|
|
51
54
|
)
|
52
55
|
|
53
56
|
print(response)
|
57
|
+
```
|
54
58
|
|
59
|
+
### Embeddings
|
60
|
+
|
61
|
+
```python
|
62
|
+
from euriai.embedding import EuriaiEmbeddingClient
|
63
|
+
|
64
|
+
client = EuriaiEmbeddingClient(api_key="your_key")
|
65
|
+
embedding = client.embed("Hello world")
|
66
|
+
print(embedding[:5]) # Print first 5 dimensions of the embedding vector
|
67
|
+
```
|
55
68
|
|
56
69
|
## 💻 Command-Line Interface (CLI) Usage
|
70
|
+
|
57
71
|
Run prompts directly from the terminal:
|
58
72
|
|
73
|
+
```bash
|
59
74
|
euriai --api_key YOUR_API_KEY --prompt "Tell me a joke"
|
75
|
+
```
|
60
76
|
|
77
|
+
Enable streaming output (if supported by the model):
|
61
78
|
|
62
|
-
|
63
|
-
|
79
|
+
```bash
|
64
80
|
euriai --api_key YOUR_API_KEY --prompt "Stream a fun fact" --stream
|
81
|
+
```
|
65
82
|
|
83
|
+
List all supported model IDs with recommended use-cases and temperature/token advice:
|
66
84
|
|
67
|
-
|
68
|
-
|
85
|
+
```bash
|
69
86
|
euriai --models
|
87
|
+
```
|
70
88
|
|
71
89
|
## 🤖 LangChain Integration
|
72
90
|
|
91
|
+
### Text Generation
|
92
|
+
|
73
93
|
Use Euriai with LangChain directly:
|
74
94
|
|
75
95
|
```python
|
@@ -77,9 +97,37 @@ from euriai import EuriaiLangChainLLM
|
|
77
97
|
|
78
98
|
llm = EuriaiLangChainLLM(
|
79
99
|
api_key="your_api_key",
|
80
|
-
model="gpt-4.1-nano",
|
100
|
+
model="gpt-4.1-nano",
|
81
101
|
temperature=0.7,
|
82
102
|
max_tokens=300
|
83
103
|
)
|
84
104
|
|
85
105
|
print(llm.invoke("Write a poem about time travel."))
|
106
|
+
```
|
107
|
+
|
108
|
+
### Embeddings
|
109
|
+
|
110
|
+
Use Euriai embeddings with LangChain:
|
111
|
+
|
112
|
+
```python
|
113
|
+
from euriai.langchain_embed import EuriaiEmbeddings
|
114
|
+
|
115
|
+
embedding_model = EuriaiEmbeddings(api_key="your_key")
|
116
|
+
print(embedding_model.embed_query("What's AI?")[:5]) # Print first 5 dimensions
|
117
|
+
```
|
118
|
+
|
119
|
+
## 📘 Documentation
|
120
|
+
|
121
|
+
For full documentation, visit our [official docs site](https://docs.euron.one).
|
122
|
+
|
123
|
+
## 🔑 Getting an API Key
|
124
|
+
|
125
|
+
Sign up for an API key at [Euron AI Platform](https://app.euron.one).
|
126
|
+
|
127
|
+
## 🤝 Contributing
|
128
|
+
|
129
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
130
|
+
|
131
|
+
## 📄 License
|
132
|
+
|
133
|
+
This project is licensed under the MIT License - see the LICENSE file for details.
|
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
|
|
2
2
|
|
3
3
|
setup(
|
4
4
|
name="euriai",
|
5
|
-
version="0.3.
|
5
|
+
version="0.3.5",
|
6
6
|
description="Python client for EURI LLM API (euron.one) with CLI and interactive wizard",
|
7
7
|
long_description=open("README.md", encoding="utf-8").read(),
|
8
8
|
long_description_content_type="text/markdown",
|
euriai-0.3.4/README.md
DELETED
@@ -1,60 +0,0 @@
|
|
1
|
-
# euriai 🧠
|
2
|
-
|
3
|
-
**EURI AI Python Client** – A simple wrapper and CLI tool for the [Euron LLM API](https://api.euron.one).
|
4
|
-
Supports completions, streaming responses, CLI interaction, and an interactive guided wizard!
|
5
|
-
|
6
|
-
---
|
7
|
-
|
8
|
-
## 🔧 Installation
|
9
|
-
|
10
|
-
```bash
|
11
|
-
pip install euriai
|
12
|
-
|
13
|
-
## python sample Usage
|
14
|
-
|
15
|
-
from euriai import EuriaiClient
|
16
|
-
|
17
|
-
client = EuriaiClient(
|
18
|
-
api_key="your_api_key_here",
|
19
|
-
model="gpt-4.1-nano" # You can also try: "gemini-2.0-flash-001", "llama-4-maverick", etc.
|
20
|
-
)
|
21
|
-
|
22
|
-
response = client.generate_completion(
|
23
|
-
prompt="Write a short poem about artificial intelligence.",
|
24
|
-
temperature=0.7,
|
25
|
-
max_tokens=300
|
26
|
-
)
|
27
|
-
|
28
|
-
print(response)
|
29
|
-
|
30
|
-
|
31
|
-
## 💻 Command-Line Interface (CLI) Usage
|
32
|
-
Run prompts directly from the terminal:
|
33
|
-
|
34
|
-
euriai --api_key YOUR_API_KEY --prompt "Tell me a joke"
|
35
|
-
|
36
|
-
|
37
|
-
## Enable streaming output (if supported by the model):
|
38
|
-
|
39
|
-
euriai --api_key YOUR_API_KEY --prompt "Stream a fun fact" --stream
|
40
|
-
|
41
|
-
|
42
|
-
##List all supported model IDs with recommended use-cases and temperature/token advice:
|
43
|
-
|
44
|
-
euriai --models
|
45
|
-
|
46
|
-
## 🤖 LangChain Integration
|
47
|
-
|
48
|
-
Use Euriai with LangChain directly:
|
49
|
-
|
50
|
-
```python
|
51
|
-
from euriai import EuriaiLangChainLLM
|
52
|
-
|
53
|
-
llm = EuriaiLangChainLLM(
|
54
|
-
api_key="your_api_key",
|
55
|
-
model="gpt-4.1-nano",
|
56
|
-
temperature=0.7,
|
57
|
-
max_tokens=300
|
58
|
-
)
|
59
|
-
|
60
|
-
print(llm.invoke("Write a poem about time travel."))
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|