casual-mcp 0.2.2__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- casual_mcp/__init__.py +2 -1
- casual_mcp/cli.py +19 -3
- casual_mcp/main.py +48 -44
- casual_mcp/mcp_tool_chat.py +57 -33
- {casual_mcp-0.2.2.dist-info → casual_mcp-0.3.1.dist-info}/METADATA +73 -29
- {casual_mcp-0.2.2.dist-info → casual_mcp-0.3.1.dist-info}/RECORD +10 -10
- {casual_mcp-0.2.2.dist-info → casual_mcp-0.3.1.dist-info}/WHEEL +0 -0
- {casual_mcp-0.2.2.dist-info → casual_mcp-0.3.1.dist-info}/entry_points.txt +0 -0
- {casual_mcp-0.2.2.dist-info → casual_mcp-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {casual_mcp-0.2.2.dist-info → casual_mcp-0.3.1.dist-info}/top_level.txt +0 -0
casual_mcp/__init__.py
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
from . import models
|
|
2
2
|
from .mcp_tool_chat import McpToolChat
|
|
3
3
|
from .providers.provider_factory import ProviderFactory
|
|
4
|
-
from .utils import load_config, load_mcp_client
|
|
4
|
+
from .utils import load_config, load_mcp_client, render_system_prompt
|
|
5
5
|
|
|
6
6
|
__all__ = [
|
|
7
7
|
"McpToolChat",
|
|
8
8
|
"ProviderFactory",
|
|
9
9
|
"load_config",
|
|
10
10
|
"load_mcp_client",
|
|
11
|
+
"render_system_prompt",
|
|
11
12
|
"models",
|
|
12
13
|
]
|
casual_mcp/cli.py
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import typer
|
|
2
3
|
import uvicorn
|
|
3
4
|
from rich.console import Console
|
|
4
5
|
from rich.table import Table
|
|
5
6
|
|
|
6
7
|
from casual_mcp.models.mcp_server_config import RemoteServerConfig
|
|
7
|
-
from casual_mcp.utils import load_config
|
|
8
|
+
from casual_mcp.utils import load_config, load_mcp_client
|
|
8
9
|
|
|
9
10
|
app = typer.Typer()
|
|
10
11
|
console = Console()
|
|
@@ -31,7 +32,7 @@ def servers():
|
|
|
31
32
|
table = Table("Name", "Type", "Command / Url", "Env")
|
|
32
33
|
|
|
33
34
|
for name, server in config.servers.items():
|
|
34
|
-
type = '
|
|
35
|
+
type = 'stdio'
|
|
35
36
|
if isinstance(server, RemoteServerConfig):
|
|
36
37
|
type = 'remote'
|
|
37
38
|
|
|
@@ -39,7 +40,7 @@ def servers():
|
|
|
39
40
|
if isinstance(server, RemoteServerConfig):
|
|
40
41
|
path = server.url
|
|
41
42
|
else:
|
|
42
|
-
path = f"{server.command} {
|
|
43
|
+
path = f"{server.command} {' '.join(server.args)}"
|
|
43
44
|
env = ''
|
|
44
45
|
|
|
45
46
|
table.add_row(name, type, path, env)
|
|
@@ -63,6 +64,21 @@ def models():
|
|
|
63
64
|
|
|
64
65
|
console.print(table)
|
|
65
66
|
|
|
67
|
+
@app.command()
|
|
68
|
+
def tools():
|
|
69
|
+
config = load_config('casual_mcp_config.json')
|
|
70
|
+
mcp_client = load_mcp_client(config)
|
|
71
|
+
table = Table("Name", "Description")
|
|
72
|
+
# async with mcp_client:
|
|
73
|
+
tools = asyncio.run(get_tools(mcp_client))
|
|
74
|
+
for tool in tools:
|
|
75
|
+
table.add_row(tool.name, tool.description)
|
|
76
|
+
console.print(table)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
async def get_tools(client):
|
|
80
|
+
async with client:
|
|
81
|
+
return await client.list_tools()
|
|
66
82
|
|
|
67
83
|
if __name__ == "__main__":
|
|
68
84
|
app()
|
casual_mcp/main.py
CHANGED
|
@@ -32,6 +32,7 @@ You must not speculate or guess about dates — if a date is given to you by a t
|
|
|
32
32
|
Always present information as current and factual.
|
|
33
33
|
"""
|
|
34
34
|
|
|
35
|
+
|
|
35
36
|
class GenerateRequest(BaseModel):
|
|
36
37
|
session_id: str | None = Field(
|
|
37
38
|
default=None, title="Session to use"
|
|
@@ -42,11 +43,20 @@ class GenerateRequest(BaseModel):
|
|
|
42
43
|
system_prompt: str | None = Field(
|
|
43
44
|
default=None, title="System Prompt to use"
|
|
44
45
|
)
|
|
45
|
-
|
|
46
|
+
prompt: str = Field(
|
|
46
47
|
title="User Prompt"
|
|
47
48
|
)
|
|
48
|
-
|
|
49
|
-
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ChatRequest(BaseModel):
|
|
52
|
+
model: str = Field(
|
|
53
|
+
title="Model to user"
|
|
54
|
+
)
|
|
55
|
+
system_prompt: str | None = Field(
|
|
56
|
+
default=None, title="System Prompt to use"
|
|
57
|
+
)
|
|
58
|
+
messages: list[ChatMessage] = Field(
|
|
59
|
+
title="Previous messages to supply to the LLM"
|
|
50
60
|
)
|
|
51
61
|
|
|
52
62
|
sys.path.append(str(Path(__file__).parent.resolve()))
|
|
@@ -55,44 +65,11 @@ sys.path.append(str(Path(__file__).parent.resolve()))
|
|
|
55
65
|
configure_logging(os.getenv("LOG_LEVEL", 'INFO'))
|
|
56
66
|
logger = get_logger("main")
|
|
57
67
|
|
|
58
|
-
async def perform_chat(
|
|
59
|
-
model,
|
|
60
|
-
user,
|
|
61
|
-
system: str | None = None,
|
|
62
|
-
messages: list[ChatMessage] = None,
|
|
63
|
-
session_id: str | None = None
|
|
64
|
-
) -> list[ChatMessage]:
|
|
65
|
-
# Get Provider from Model Config
|
|
66
|
-
model_config = config.models[model]
|
|
67
|
-
provider = await provider_factory.get_provider(model, model_config)
|
|
68
|
-
|
|
69
|
-
if not system:
|
|
70
|
-
if (model_config.template):
|
|
71
|
-
async with mcp_client:
|
|
72
|
-
system = render_system_prompt(
|
|
73
|
-
f"{model_config.template}.j2",
|
|
74
|
-
await mcp_client.list_tools()
|
|
75
|
-
)
|
|
76
|
-
else:
|
|
77
|
-
system = default_system_prompt
|
|
78
|
-
|
|
79
|
-
chat = McpToolChat(mcp_client, provider, system)
|
|
80
|
-
return await chat.chat(
|
|
81
|
-
prompt=user,
|
|
82
|
-
messages=messages,
|
|
83
|
-
session_id=session_id
|
|
84
|
-
)
|
|
85
|
-
|
|
86
68
|
|
|
87
69
|
@app.post("/chat")
|
|
88
|
-
async def chat(req:
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
system=req.system_prompt,
|
|
92
|
-
user=req.user_prompt,
|
|
93
|
-
messages=req.messages,
|
|
94
|
-
session_id=req.session_id
|
|
95
|
-
)
|
|
70
|
+
async def chat(req: ChatRequest):
|
|
71
|
+
chat = await get_chat(req.model, req.system_prompt)
|
|
72
|
+
messages = await chat.chat(req.messages)
|
|
96
73
|
|
|
97
74
|
return {
|
|
98
75
|
"messages": messages,
|
|
@@ -100,16 +77,43 @@ async def chat(req: GenerateRequest):
|
|
|
100
77
|
}
|
|
101
78
|
|
|
102
79
|
|
|
103
|
-
# This endpoint will either go away or be used for something else, don't use it
|
|
104
80
|
@app.post("/generate")
|
|
105
|
-
async def
|
|
106
|
-
|
|
81
|
+
async def generate(req: GenerateRequest):
|
|
82
|
+
chat = await get_chat(req.model, req.system_prompt)
|
|
83
|
+
messages = await chat.generate(
|
|
84
|
+
req.prompt,
|
|
85
|
+
req.session_id
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
return {
|
|
89
|
+
"messages": messages,
|
|
90
|
+
"response": messages[len(messages) - 1].content
|
|
91
|
+
}
|
|
107
92
|
|
|
108
93
|
|
|
109
|
-
@app.get("/
|
|
110
|
-
async def
|
|
94
|
+
@app.get("/generate/session/{session_id}")
|
|
95
|
+
async def get_generate_session(session_id):
|
|
111
96
|
session = McpToolChat.get_session(session_id)
|
|
112
97
|
if not session:
|
|
113
98
|
raise HTTPException(status_code=404, detail="Session not found")
|
|
114
99
|
|
|
115
100
|
return session
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
async def get_chat(model: str, system: str | None = None) -> McpToolChat:
|
|
104
|
+
# Get Provider from Model Config
|
|
105
|
+
model_config = config.models[model]
|
|
106
|
+
provider = await provider_factory.get_provider(model, model_config)
|
|
107
|
+
|
|
108
|
+
# Get the system prompt
|
|
109
|
+
if not system:
|
|
110
|
+
if (model_config.template):
|
|
111
|
+
async with mcp_client:
|
|
112
|
+
system = render_system_prompt(
|
|
113
|
+
f"{model_config.template}.j2",
|
|
114
|
+
await mcp_client.list_tools()
|
|
115
|
+
)
|
|
116
|
+
else:
|
|
117
|
+
system = default_system_prompt
|
|
118
|
+
|
|
119
|
+
return McpToolChat(mcp_client, provider, system)
|
casual_mcp/mcp_tool_chat.py
CHANGED
|
@@ -18,8 +18,26 @@ logger = get_logger("mcp_tool_chat")
|
|
|
18
18
|
sessions: dict[str, list[ChatMessage]] = {}
|
|
19
19
|
|
|
20
20
|
|
|
21
|
+
def get_session_messages(session_id: str | None):
|
|
22
|
+
global sessions
|
|
23
|
+
|
|
24
|
+
if not sessions.get(session_id):
|
|
25
|
+
logger.info(f"Starting new session {session_id}")
|
|
26
|
+
sessions[session_id] = []
|
|
27
|
+
else:
|
|
28
|
+
logger.info(
|
|
29
|
+
f"Retrieving session {session_id} of length {len(sessions[session_id])}"
|
|
30
|
+
)
|
|
31
|
+
return sessions[session_id].copy()
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def add_messages_to_session(session_id: str, messages: list[ChatMessage]):
|
|
35
|
+
global sessions
|
|
36
|
+
sessions[session_id].extend(messages.copy())
|
|
37
|
+
|
|
38
|
+
|
|
21
39
|
class McpToolChat:
|
|
22
|
-
def __init__(self, mcp_client: Client, provider: LLMProvider, system: str):
|
|
40
|
+
def __init__(self, mcp_client: Client, provider: LLMProvider, system: str = None):
|
|
23
41
|
self.provider = provider
|
|
24
42
|
self.mcp_client = mcp_client
|
|
25
43
|
self.system = system
|
|
@@ -29,47 +47,57 @@ class McpToolChat:
|
|
|
29
47
|
global sessions
|
|
30
48
|
return sessions.get(session_id)
|
|
31
49
|
|
|
32
|
-
async def
|
|
50
|
+
async def generate(
|
|
33
51
|
self,
|
|
34
|
-
prompt: str
|
|
35
|
-
messages: list[ChatMessage] = None,
|
|
52
|
+
prompt: str,
|
|
36
53
|
session_id: str | None = None
|
|
37
54
|
) -> list[ChatMessage]:
|
|
38
|
-
|
|
55
|
+
# Fetch the session if we have a session ID
|
|
56
|
+
if session_id:
|
|
57
|
+
messages = get_session_messages(session_id)
|
|
58
|
+
else:
|
|
59
|
+
messages: list[ChatMessage] = []
|
|
39
60
|
|
|
40
|
-
#
|
|
61
|
+
# Add the prompt as a user message
|
|
62
|
+
user_message = UserMessage(content=prompt)
|
|
63
|
+
messages.append(user_message)
|
|
41
64
|
|
|
42
|
-
#
|
|
65
|
+
# Add the user message to the session
|
|
43
66
|
if session_id:
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
67
|
+
add_messages_to_session(session_id, [user_message])
|
|
68
|
+
|
|
69
|
+
# Perform Chat
|
|
70
|
+
response = await self.chat(messages=messages)
|
|
71
|
+
|
|
72
|
+
# Add responses to session
|
|
73
|
+
if session_id:
|
|
74
|
+
add_messages_to_session(session_id, response)
|
|
75
|
+
|
|
76
|
+
return response
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
async def chat(
|
|
80
|
+
self,
|
|
81
|
+
messages: list[ChatMessage]
|
|
82
|
+
) -> list[ChatMessage]:
|
|
83
|
+
# Add a system message if required
|
|
84
|
+
has_system_message = any(message.role == 'system' for message in messages)
|
|
85
|
+
if self.system and not has_system_message:
|
|
86
|
+
# Insert the system message at the start of the messages
|
|
87
|
+
logger.debug(f"Adding System Message")
|
|
88
|
+
messages.insert(0, SystemMessage(content=self.system))
|
|
52
89
|
|
|
53
90
|
logger.info("Start Chat")
|
|
54
91
|
async with self.mcp_client:
|
|
55
92
|
tools = await self.mcp_client.list_tools()
|
|
56
93
|
|
|
57
|
-
|
|
58
|
-
message_history = []
|
|
59
|
-
messages = [SystemMessage(content=self.system)]
|
|
60
|
-
else:
|
|
61
|
-
message_history = messages.copy()
|
|
62
|
-
|
|
63
|
-
if prompt:
|
|
64
|
-
messages.append(UserMessage(content=prompt))
|
|
65
|
-
|
|
66
|
-
response: str | None = None
|
|
94
|
+
response_messages: list[ChatMessage] = []
|
|
67
95
|
while True:
|
|
68
96
|
logger.info("Calling the LLM")
|
|
69
97
|
ai_message = await self.provider.generate(messages, tools)
|
|
70
|
-
response = ai_message.content
|
|
71
98
|
|
|
72
99
|
# Add the assistant's message
|
|
100
|
+
response_messages.append(ai_message)
|
|
73
101
|
messages.append(ai_message)
|
|
74
102
|
|
|
75
103
|
if not ai_message.tool_calls:
|
|
@@ -86,18 +114,14 @@ class McpToolChat:
|
|
|
86
114
|
return messages
|
|
87
115
|
if result:
|
|
88
116
|
messages.append(result)
|
|
117
|
+
response_messages.append(result)
|
|
89
118
|
result_count = result_count + 1
|
|
90
119
|
|
|
91
120
|
logger.info(f"Added {result_count} tool results")
|
|
92
121
|
|
|
93
|
-
logger.debug(f"
|
|
94
|
-
{response} """)
|
|
95
|
-
|
|
96
|
-
new_messages = [item for item in messages if item not in message_history]
|
|
97
|
-
if session_id:
|
|
98
|
-
sessions[session_id].extend(new_messages)
|
|
122
|
+
logger.debug(f"Final Response: {response_messages[-1].content}")
|
|
99
123
|
|
|
100
|
-
return
|
|
124
|
+
return response_messages
|
|
101
125
|
|
|
102
126
|
|
|
103
127
|
async def execute(self, tool_call: AssistantToolCall):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: casual-mcp
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.3.1
|
|
4
4
|
Summary: Multi-server MCP client for LLM tool orchestration
|
|
5
5
|
Author: Alex Stansfield
|
|
6
6
|
License: MIT
|
|
@@ -12,7 +12,7 @@ Description-Content-Type: text/markdown
|
|
|
12
12
|
License-File: LICENSE
|
|
13
13
|
Requires-Dist: dateparser>=1.2.1
|
|
14
14
|
Requires-Dist: fastapi>=0.115.12
|
|
15
|
-
Requires-Dist: fastmcp>=2.5.
|
|
15
|
+
Requires-Dist: fastmcp>=2.5.2
|
|
16
16
|
Requires-Dist: jinja2>=3.1.6
|
|
17
17
|
Requires-Dist: ollama>=0.4.8
|
|
18
18
|
Requires-Dist: openai>=1.78.0
|
|
@@ -36,7 +36,7 @@ Dynamic: license-file
|
|
|
36
36
|
**Casual MCP** is a Python framework for building, evaluating, and serving LLMs with tool-calling capabilities using [Model Context Protocol (MCP)](https://modelcontextprotocol.io).
|
|
37
37
|
It includes:
|
|
38
38
|
|
|
39
|
-
- ✅ A multi-server MCP client
|
|
39
|
+
- ✅ A multi-server MCP client using [FastMCP](https://github.com/jlowin/fastmcp)
|
|
40
40
|
- ✅ Provider support for OpenAI (and OpenAI compatible APIs)
|
|
41
41
|
- ✅ A recursive tool-calling chat loop
|
|
42
42
|
- ✅ System prompt templating with Jinja2
|
|
@@ -151,12 +151,12 @@ Each model has:
|
|
|
151
151
|
|
|
152
152
|
Servers can either be local (over stdio) or remote.
|
|
153
153
|
|
|
154
|
-
Local Config:
|
|
154
|
+
#### Local Config:
|
|
155
155
|
- `command`: the command to run the server, e.g `python`, `npm`
|
|
156
156
|
- `args`: the arguments to pass to the server as a list, e.g `["time/server.py"]`
|
|
157
157
|
- Optional: `env`: for subprocess environments, `system_prompt` to override server prompt
|
|
158
158
|
|
|
159
|
-
Remote Config:
|
|
159
|
+
#### Remote Config:
|
|
160
160
|
- `url`: the url of the mcp server
|
|
161
161
|
- Optional: `transport`: the type of transport, `http`, `sse`, `streamable-http`. Defaults to `http`
|
|
162
162
|
|
|
@@ -184,12 +184,12 @@ Loads the config and outputs the list of MCP servers you have configured.
|
|
|
184
184
|
```
|
|
185
185
|
$ casual-mcp servers
|
|
186
186
|
┏━━━━━━━━━┳━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━┓
|
|
187
|
-
┃ Name ┃ Type ┃
|
|
187
|
+
┃ Name ┃ Type ┃ Command / Url ┃ Env ┃
|
|
188
188
|
┡━━━━━━━━━╇━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━┩
|
|
189
|
-
│ math │
|
|
190
|
-
│ time │
|
|
191
|
-
│ weather │
|
|
192
|
-
│ words │
|
|
189
|
+
│ math │ local │ mcp-servers/math/server.py │ │
|
|
190
|
+
│ time │ local │ mcp-servers/time-v2/server.py │ │
|
|
191
|
+
│ weather │ local │ mcp-servers/weather/server.py │ │
|
|
192
|
+
│ words │ remote │ https://localhost:3000/mcp │ │
|
|
193
193
|
└─────────┴────────┴───────────────────────────────┴─────┘
|
|
194
194
|
```
|
|
195
195
|
|
|
@@ -223,9 +223,24 @@ Orchestrates LLM interaction with tools using a recursive loop.
|
|
|
223
223
|
|
|
224
224
|
```python
|
|
225
225
|
from casual_mcp import McpToolChat
|
|
226
|
+
from casual_mcp.models import SystemMessage, UserMessage
|
|
226
227
|
|
|
227
228
|
chat = McpToolChat(mcp_client, provider, system_prompt)
|
|
228
|
-
|
|
229
|
+
|
|
230
|
+
# Generate method to take user prompt
|
|
231
|
+
response = await chat.generate("What time is it in London?")
|
|
232
|
+
|
|
233
|
+
# Generate method with session
|
|
234
|
+
response = await chat.generate("What time is it in London?", "my-session-id")
|
|
235
|
+
|
|
236
|
+
# Chat method that takes list of chat messages
|
|
237
|
+
# note: system prompt ignored if sent in messages so no need to set
|
|
238
|
+
chat = McpToolChat(mcp_client, provider)
|
|
239
|
+
messages = [
|
|
240
|
+
SystemMessage(content="You are a cool dude who likes to help the user"),
|
|
241
|
+
UserMessage(content="What time is it in London?")
|
|
242
|
+
]
|
|
243
|
+
response = await chat.chat(messages)
|
|
229
244
|
```
|
|
230
245
|
|
|
231
246
|
#### `ProviderFactory`
|
|
@@ -234,8 +249,8 @@ Instantiates LLM providers based on the selected model config.
|
|
|
234
249
|
```python
|
|
235
250
|
from casual_mcp import ProviderFactory
|
|
236
251
|
|
|
237
|
-
provider_factory = ProviderFactory()
|
|
238
|
-
provider = provider_factory.get_provider("lm-qwen-3", model_config)
|
|
252
|
+
provider_factory = ProviderFactory(mcp_client)
|
|
253
|
+
provider = await provider_factory.get_provider("lm-qwen-3", model_config)
|
|
239
254
|
```
|
|
240
255
|
|
|
241
256
|
#### `load_config`
|
|
@@ -308,18 +323,16 @@ Respond naturally and confidently, as if you already know all the facts."""),
|
|
|
308
323
|
# Load the Config from the File
|
|
309
324
|
config = load_config("casual_mcp_config.json")
|
|
310
325
|
|
|
311
|
-
# Setup the
|
|
326
|
+
# Setup the MCP Client
|
|
312
327
|
mcp_client = load_mcp_client(config)
|
|
313
|
-
await mcp_client.load_config(config.servers)
|
|
314
328
|
|
|
315
329
|
# Get the Provider for the Model
|
|
316
|
-
provider_factory
|
|
317
|
-
|
|
318
|
-
provider = provider_factory.get_provider(model, config.models[model])
|
|
330
|
+
provider_factory = ProviderFactory(mcp_client)
|
|
331
|
+
provider = await provider_factory.get_provider(model, config.models[model])
|
|
319
332
|
|
|
320
333
|
# Perform the Chat and Tool calling
|
|
321
|
-
chat = McpToolChat(mcp_client, provider
|
|
322
|
-
response_messages = await chat.chat(messages
|
|
334
|
+
chat = McpToolChat(mcp_client, provider)
|
|
335
|
+
response_messages = await chat.chat(messages)
|
|
323
336
|
```
|
|
324
337
|
|
|
325
338
|
## 🚀 API Usage
|
|
@@ -330,25 +343,56 @@ response_messages = await chat.chat(messages=messages)
|
|
|
330
343
|
casual-mcp serve --host 0.0.0.0 --port 8000
|
|
331
344
|
```
|
|
332
345
|
|
|
333
|
-
|
|
346
|
+
### Chat
|
|
347
|
+
|
|
348
|
+
#### Endpoint: `POST /chat`
|
|
334
349
|
|
|
335
|
-
|
|
350
|
+
#### Request Body:
|
|
336
351
|
- `model`: the LLM model to use
|
|
337
|
-
- `
|
|
338
|
-
|
|
339
|
-
|
|
352
|
+
- `messages`: list of chat messages (system, assistant, user, etc) that you can pass to the api, allowing you to keep your own chat session in the client calling the api
|
|
353
|
+
|
|
354
|
+
#### Example:
|
|
355
|
+
```
|
|
356
|
+
{
|
|
357
|
+
"model": "gpt-4.1-nano",
|
|
358
|
+
"messages": [
|
|
359
|
+
{
|
|
360
|
+
"role": "user",
|
|
361
|
+
"content": "can you explain what the word consistent means?"
|
|
362
|
+
}
|
|
363
|
+
]
|
|
364
|
+
}
|
|
365
|
+
```
|
|
366
|
+
|
|
367
|
+
### Generate
|
|
368
|
+
|
|
369
|
+
The generate endpoint allows you to send a user prompt as a string.
|
|
370
|
+
|
|
371
|
+
It also support sessions that keep a record of all messages in the session and feeds them back into the LLM for context. Sessions are stored in memory so are cleared when the server is restarted
|
|
372
|
+
|
|
373
|
+
#### Endpoint: `POST /generate`
|
|
340
374
|
|
|
341
|
-
|
|
375
|
+
#### Request Body:
|
|
376
|
+
- `model`: the LLM model to use
|
|
377
|
+
- `prompt`: the user prompt
|
|
378
|
+
- `session_id`: an optional ID that stores all the messages from the session and provides them back to the LLM for context
|
|
342
379
|
|
|
343
|
-
Example:
|
|
380
|
+
#### Example:
|
|
344
381
|
```
|
|
345
382
|
{
|
|
346
|
-
"session_id": "my-
|
|
383
|
+
"session_id": "my-session",
|
|
347
384
|
"model": "gpt-4o-mini",
|
|
348
|
-
"
|
|
385
|
+
"prompt": "can you explain what the word consistent means?"
|
|
349
386
|
}
|
|
350
387
|
```
|
|
351
388
|
|
|
389
|
+
### Get Session
|
|
390
|
+
|
|
391
|
+
Get all the messages from a session
|
|
392
|
+
|
|
393
|
+
#### Endpoint: `GET /generate/session/{session_id}`
|
|
394
|
+
|
|
395
|
+
|
|
352
396
|
## License
|
|
353
397
|
|
|
354
398
|
This software is released under the [MIT License](LICENSE)
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
casual_mcp/__init__.py,sha256=
|
|
2
|
-
casual_mcp/cli.py,sha256=
|
|
1
|
+
casual_mcp/__init__.py,sha256=pInJdGkFqSH8RwbQq-9mc96GWIQjLrtExeXnTYGtNHw,327
|
|
2
|
+
casual_mcp/cli.py,sha256=6P_d77qPbY43AW1Ix6FfbHyy6Qc6sFeFqGvXxJCW2_M,2090
|
|
3
3
|
casual_mcp/logging.py,sha256=o3rvT8GLJKGlu0ieeC9TY_SRSEUY-VO8jRQZjx-sSvY,863
|
|
4
|
-
casual_mcp/main.py,sha256=
|
|
5
|
-
casual_mcp/mcp_tool_chat.py,sha256=
|
|
4
|
+
casual_mcp/main.py,sha256=AzqQ6SUJsyKyMaqd3HIxLDozoftMd27KQAQNsfM9e2I,3385
|
|
5
|
+
casual_mcp/mcp_tool_chat.py,sha256=6MMRAEBDMRyw7-n1VGvIGdrh1ed2szZx8sC0MlR1g7I,4948
|
|
6
6
|
casual_mcp/utils.py,sha256=Nea0aRbPyjqm7mIjffJtGP2NssE7BsdPleO-yiuAWPE,2964
|
|
7
7
|
casual_mcp/models/__init__.py,sha256=qlKylcCyRJOSIVteU2feiLOigZoY-m-soVGp4NALM_c,538
|
|
8
8
|
casual_mcp/models/config.py,sha256=ITu3WAPMad7i2CS3ljkHapjT8lLm7k6HFUF6N73U1oo,294
|
|
@@ -16,9 +16,9 @@ casual_mcp/providers/abstract_provider.py,sha256=TTEP3FeTxOtbD0By_k17UxS8cqxYCOG
|
|
|
16
16
|
casual_mcp/providers/ollama_provider.py,sha256=IUSJFBtEYmza_-_7bk5YZKqed3N67l8A2lZEmHPiyHo,2581
|
|
17
17
|
casual_mcp/providers/openai_provider.py,sha256=uSjoqM-X9bVp_RVM8Ip6lqjZ7q3DdN0-p7o2HKrWxMI,6138
|
|
18
18
|
casual_mcp/providers/provider_factory.py,sha256=CyFHJ0mU2tjHqj04btF0SL0B3pf12LAJ52Msqsbnv_g,1766
|
|
19
|
-
casual_mcp-0.
|
|
20
|
-
casual_mcp-0.
|
|
21
|
-
casual_mcp-0.
|
|
22
|
-
casual_mcp-0.
|
|
23
|
-
casual_mcp-0.
|
|
24
|
-
casual_mcp-0.
|
|
19
|
+
casual_mcp-0.3.1.dist-info/licenses/LICENSE,sha256=U3Zu2tkrh5vXdy7gIdE8WJGM9D4gGp3hohAAWdre-yo,1058
|
|
20
|
+
casual_mcp-0.3.1.dist-info/METADATA,sha256=uqtEAq3-YfRInCxU79bwfBhrsGxFKbvUWAJ7D0XTA0g,12902
|
|
21
|
+
casual_mcp-0.3.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
22
|
+
casual_mcp-0.3.1.dist-info/entry_points.txt,sha256=X48Np2cwl-SlRQdV26y2vPZ-2tJaODgZeVtfpHho-zg,50
|
|
23
|
+
casual_mcp-0.3.1.dist-info/top_level.txt,sha256=K4CiI0Jf8PHICjuQVm32HuNMB44kp8Lb02bbbdiH5bo,11
|
|
24
|
+
casual_mcp-0.3.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|