mira-network 0.1.1__tar.gz → 0.1.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mira-network
3
- Version: 0.1.1
3
+ Version: 0.1.3
4
4
  Summary: Python SDK for Mira Network API
5
5
  Author-Email: sarim2000 <sarimbleedblue@gmail.com>
6
6
  License: MIT
@@ -9,16 +9,17 @@ Requires-Dist: httpx>=0.28.1
9
9
  Requires-Dist: pydantic>=2.10.4
10
10
  Requires-Dist: typing-extensions>=4.8.0
11
11
  Requires-Dist: requests>=2.32.3
12
+ Requires-Dist: pytest-cov>=6.0.0
12
13
  Description-Content-Type: text/markdown
13
14
 
14
- # Mira SDK
15
+ # Mira Network SDK
15
16
 
16
17
  A Python SDK for interacting with the Mira Network API. This SDK provides a simple interface to access all Mira API endpoints including model inference, flow management, and credit system.
17
18
 
18
19
  ## Installation
19
20
 
20
21
  ```bash
21
- pip install mira-sdk
22
+ pip install mira-network
22
23
  ```
23
24
 
24
25
  ## Quick Start
@@ -40,7 +41,7 @@ async def main():
40
41
 
41
42
  # Generate text
42
43
  request = AiRequest(
43
- model="mira/llama3.1",
44
+ model="gpt-4o",
44
45
  messages=[
45
46
  Message(role="system", content="You are a helpful assistant."),
46
47
  Message(role="user", content="Hello!")
@@ -1,11 +1,11 @@
1
- # Mira SDK
1
+ # Mira Network SDK
2
2
 
3
3
  A Python SDK for interacting with the Mira Network API. This SDK provides a simple interface to access all Mira API endpoints including model inference, flow management, and credit system.
4
4
 
5
5
  ## Installation
6
6
 
7
7
  ```bash
8
- pip install mira-sdk
8
+ pip install mira-network
9
9
  ```
10
10
 
11
11
  ## Quick Start
@@ -27,7 +27,7 @@ async def main():
27
27
 
28
28
  # Generate text
29
29
  request = AiRequest(
30
- model="mira/llama3.1",
30
+ model="gpt-4o",
31
31
  messages=[
32
32
  Message(role="system", content="You are a helpful assistant."),
33
33
  Message(role="user", content="Hello!")
@@ -1,7 +1,4 @@
1
1
  [project]
2
- name = "mira-network"
3
- version = "0.1.1"
4
- description = "Python SDK for Mira Network API"
5
2
  authors = [
6
3
  { name = "sarim2000", email = "sarimbleedblue@gmail.com" },
7
4
  ]
@@ -10,18 +7,22 @@ dependencies = [
10
7
  "pydantic>=2.10.4",
11
8
  "typing-extensions>=4.8.0",
12
9
  "requests>=2.32.3",
10
+ "pytest-cov>=6.0.0",
13
11
  ]
14
- requires-python = "==3.10.*"
12
+ description = "Python SDK for Mira Network API"
13
+ name = "mira-network"
15
14
  readme = "README.md"
15
+ requires-python = "==3.10.*"
16
+ version = "0.1.3"
16
17
 
17
18
  [project.license]
18
19
  text = "MIT"
19
20
 
20
21
  [build-system]
22
+ build-backend = "pdm.backend"
21
23
  requires = [
22
24
  "pdm-backend",
23
25
  ]
24
- build-backend = "pdm.backend"
25
26
 
26
27
  [tool.pdm]
27
28
  distribution = true
@@ -1,6 +1,6 @@
1
- from typing import Optional, List, Dict, AsyncGenerator, Union
1
+ from typing import AsyncIterator, Optional, List, Dict, AsyncGenerator, Union
2
2
  import httpx
3
- from src.mira_sdk.models import (
3
+ from .models import (
4
4
  Message,
5
5
  ModelProvider,
6
6
  AiRequest,
@@ -12,7 +12,12 @@ from src.mira_sdk.models import (
12
12
 
13
13
 
14
14
  class MiraClient:
15
- def __init__(self, base_url: str, api_token: Optional[str] = None):
15
+
16
+ def __init__(
17
+ self,
18
+ base_url: str = "https://mira-network.alts.dev/",
19
+ api_token: Optional[str] = None,
20
+ ):
16
21
  """Initialize Mira client.
17
22
 
18
23
  Args:
@@ -44,9 +49,7 @@ class MiraClient:
44
49
  response.raise_for_status()
45
50
  return response.json()
46
51
 
47
- async def generate(
48
- self, request: AiRequest
49
- ) -> Union[str, AsyncGenerator[str, None]]:
52
+ async def generate(self, request: AiRequest) -> Union[str, AsyncIterator[str]]:
50
53
  """Generate text using the specified model."""
51
54
  response = await self._client.post(
52
55
  f"{self.base_url}/v1/chat/completions",
@@ -57,12 +60,7 @@ class MiraClient:
57
60
  response.raise_for_status()
58
61
 
59
62
  if request.stream:
60
-
61
- async def stream_response():
62
- async for chunk in response.aiter_text():
63
- yield chunk
64
-
65
- return stream_response()
63
+ return response.aiter_lines()
66
64
  else:
67
65
  return response.json()
68
66
 
@@ -127,7 +125,7 @@ class MiraClient:
127
125
  async def create_api_token(self, request: ApiTokenRequest) -> Dict:
128
126
  """Create a new API token."""
129
127
  response = await self._client.post(
130
- f"{self.base_url}/tokens",
128
+ f"{self.base_url}/api-tokens",
131
129
  headers=self._get_headers(),
132
130
  json=request.model_dump(),
133
131
  )
@@ -154,7 +152,7 @@ class MiraClient:
154
152
  async def get_user_credits(self) -> Dict:
155
153
  """Get user credits information."""
156
154
  response = await self._client.get(
157
- f"{self.base_url}/credits",
155
+ f"{self.base_url}/user-credits",
158
156
  headers=self._get_headers(),
159
157
  )
160
158
  response.raise_for_status()
@@ -13,16 +13,20 @@ from src.mira_sdk.models import (
13
13
  @pytest.fixture
14
14
  def client():
15
15
  return MiraClient(
16
- base_url="https://mira-client-balancer.alts.dev",
17
- api_token="sk-mira-8ac810228d32ff68fc93266fb9a0ba612724119ffab16dcc"
16
+ base_url="https://mira-network.alts.dev",
17
+ api_token="sk-mira-b9ecd5f43ef0363e691322df3295c2b98bebd1c1edb0b6d8",
18
18
  )
19
19
 
20
20
 
21
21
  @pytest.mark.asyncio
22
22
  async def test_list_models(client):
23
23
  result = await client.list_models()
24
- assert isinstance(result, list)
25
- # assert len(result) > 0
24
+ assert isinstance(result, dict)
25
+ assert result["object"] == "list"
26
+ assert isinstance(result["data"], list)
27
+ assert len(result["data"]) > 0
28
+ assert all(isinstance(model, dict) for model in result["data"])
29
+ assert all("id" in model and "object" in model for model in result["data"])
26
30
 
27
31
 
28
32
  @pytest.mark.asyncio
@@ -31,9 +35,9 @@ async def test_generate(client):
31
35
  model="mira/llama3.1",
32
36
  messages=[Message(role="user", content="Hi Who are you!")],
33
37
  stream=False,
34
- model_provider=None
38
+ model_provider=None,
35
39
  )
36
-
40
+
37
41
  result = await client.generate(request)
38
42
  assert isinstance(result, str)
39
43
  assert len(result) > 0
@@ -42,16 +46,21 @@ async def test_generate(client):
42
46
  @pytest.mark.asyncio
43
47
  async def test_generate_stream(client):
44
48
  request = AiRequest(
45
- model="mira/llama3.1",
49
+ model="gpt-4o",
46
50
  messages=[Message(role="user", content="Hi!")],
47
51
  stream=True,
48
- model_provider=None
52
+ model_provider=None,
49
53
  )
50
-
51
- stream = await client.generate(request)
54
+ print("Making generate request with streaming...")
55
+ response = await client.generate(request=request)
52
56
  chunks = []
53
- async for chunk in stream:
57
+ print("Starting to receive stream chunks...")
58
+ async for chunk in response:
59
+ print(f"Received chunk: {chunk}")
60
+ assert isinstance(chunk, str)
61
+ assert len(chunk) > 0
54
62
  chunks.append(chunk)
63
+ print(f"Received {len(chunks)} total chunks")
55
64
  assert len(chunks) > 0
56
65
 
57
66
 
@@ -64,14 +73,11 @@ async def test_list_flows(client):
64
73
  @pytest.mark.asyncio
65
74
  async def test_create_and_delete_flow(client):
66
75
  # Create flow
67
- request = FlowRequest(
68
- system_prompt="You are a helpful assistant",
69
- name="test_flow"
70
- )
71
-
76
+ request = FlowRequest(system_prompt="You are a helpful assistant", name="test_flow")
77
+
72
78
  flow = await client.create_flow(request)
73
79
  assert flow.get("name") == "test_flow"
74
-
80
+
75
81
  # Delete the created flow
76
82
  flow_id = flow.get("id")
77
83
  await client.delete_flow(flow_id)
@@ -98,7 +104,7 @@ async def test_error_handling(client):
98
104
  model="invalid_model",
99
105
  messages=[Message(role="user", content="Hi!")],
100
106
  stream=False,
101
- model_provider=None
107
+ model_provider=None,
102
108
  )
103
109
  await client.generate(request)
104
110
 
@@ -1,48 +0,0 @@
1
- import pytest
2
- from src.mira_sdk.client import MiraClient
3
- from src.mira_sdk.models import Message, AiRequest, ModelProvider
4
-
5
-
6
- @pytest.mark.asyncio
7
- async def test_real_generate():
8
- """This test makes a real API call to generate text."""
9
- client = MiraClient(
10
- base_url="https://mira-client-balancer.alts.dev",
11
- api_token="sk-mira-8ac810228d32ff68fc93266fb9a0ba612724119ffab16dcc"
12
- )
13
-
14
- request = AiRequest(
15
- model="mira/llama3.1",
16
- messages=[Message(role="user", content="Hi Who are you!")],
17
- stream=False,
18
- model_provider=None
19
- )
20
-
21
- async with client:
22
- result = await client.generate(request)
23
- print("Real API Response:", result)
24
- # assert len(result) > 0
25
-
26
-
27
- # @pytest.mark.asyncio
28
- # async def test_real_generate_stream():
29
- # """This test makes a real API call with streaming enabled."""
30
- # client = MiraClient(
31
- # base_url="https://mira-client-balancer.alts.dev",
32
- # api_token="sk-mira-8ac810228d32ff68fc93266fb9a0ba612724119ffab16dcc"
33
- # )
34
-
35
- # request = AiRequest(
36
- # messages=[Message(role="user", content="Count from 1 to 5")],
37
- # stream=True
38
- # )
39
-
40
- # async with client:
41
- # stream = await client.generate(request)
42
- # response = ""
43
- # async for chunk in stream:
44
- # print("Chunk:", chunk)
45
- # response += chunk
46
- # print("Final Response:", response)
47
- # assert isinstance(response, str)
48
- # assert len(response) > 0
@@ -1,109 +0,0 @@
1
- import pytest
2
- from src.mira_sdk.models import (
3
- Message,
4
- ModelProvider,
5
- AiRequest,
6
- FlowChatCompletion,
7
- FlowRequest,
8
- ApiTokenRequest,
9
- AddCreditRequest,
10
- )
11
-
12
-
13
- def test_message_model():
14
- message = Message(role="user", content="Hello!")
15
- assert message.role == "user"
16
- assert message.content == "Hello!"
17
-
18
-
19
- def test_model_provider():
20
- provider = ModelProvider(base_url="https://mira-client-balancer.alts.dev", api_key="sk-mira-8ac810228d32ff68fc93266fb9a0ba612724119ffab16dcc")
21
- assert provider.base_url == "https://mira-client-balancer.alts.dev"
22
- assert provider.api_key == "sk-mira-8ac810228d32ff68fc93266fb9a0ba612724119ffab16dcc"
23
-
24
-
25
- def test_ai_request():
26
- messages = [Message(role="user", content="Hello!")]
27
- provider = ModelProvider(base_url="https://mira-client-balancer.alts.dev", api_key="sk-mira-8ac810228d32ff68fc93266fb9a0ba612724119ffab16dcc")
28
-
29
- # Test with default values
30
- request = AiRequest(messages=messages)
31
- assert request.model == "mira/llama3.1"
32
- assert request.stream is False
33
- assert request.model_provider is None
34
- assert request.messages == messages
35
-
36
- # Test with custom values
37
- request = AiRequest(
38
- model="custom/model",
39
- messages=messages,
40
- model_provider=provider,
41
- stream=True
42
- )
43
- assert request.model == "custom/model"
44
- assert request.stream is True
45
- assert request.model_provider == provider
46
- assert request.messages == messages
47
-
48
-
49
- def test_flow_chat_completion():
50
- # Test with no variables
51
- completion = FlowChatCompletion()
52
- assert completion.variables is None
53
-
54
- # Test with variables
55
- variables = {"key": "value"}
56
- completion = FlowChatCompletion(variables=variables)
57
- assert completion.variables == variables
58
-
59
-
60
- def test_flow_request():
61
- request = FlowRequest(
62
- system_prompt="You are a helpful assistant",
63
- name="test-flow"
64
- )
65
- assert request.system_prompt == "You are a helpful assistant"
66
- assert request.name == "test-flow"
67
-
68
-
69
- def test_api_token_request():
70
- # Test with no description
71
- request = ApiTokenRequest()
72
- assert request.description is None
73
-
74
- # Test with description
75
- request = ApiTokenRequest(description="Test token")
76
- assert request.description == "Test token"
77
-
78
-
79
- def test_add_credit_request():
80
- # Test required fields
81
- request = AddCreditRequest(user_id="user123", amount=100.0)
82
- assert request.user_id == "user123"
83
- assert request.amount == 100.0
84
- assert request.description is None
85
-
86
- # Test with description
87
- request = AddCreditRequest(
88
- user_id="user123",
89
- amount=100.0,
90
- description="Test credit"
91
- )
92
- assert request.user_id == "user123"
93
- assert request.amount == 100.0
94
- assert request.description == "Test credit"
95
-
96
-
97
- def test_invalid_message():
98
- with pytest.raises(ValueError):
99
- Message(role="invalid", content="") # Invalid role
100
-
101
-
102
- def test_invalid_ai_request():
103
- with pytest.raises(ValueError):
104
- AiRequest(messages=[]) # Empty messages list
105
-
106
-
107
- def test_invalid_add_credit_request():
108
- with pytest.raises(ValueError):
109
- AddCreditRequest(user_id="", amount=-100) # Negative amount