clarifai 11.4.3rc1__py3-none-any.whl → 11.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/cli/base.py +1 -2
  3. clarifai/cli/model.py +0 -2
  4. clarifai/client/app.py +2 -1
  5. clarifai/client/auth/helper.py +6 -4
  6. clarifai/client/compute_cluster.py +2 -1
  7. clarifai/client/dataset.py +2 -1
  8. clarifai/client/deployment.py +2 -1
  9. clarifai/client/input.py +2 -1
  10. clarifai/client/model.py +2 -1
  11. clarifai/client/model_client.py +2 -2
  12. clarifai/client/module.py +2 -1
  13. clarifai/client/nodepool.py +2 -1
  14. clarifai/client/runner.py +2 -1
  15. clarifai/client/search.py +2 -1
  16. clarifai/client/user.py +2 -1
  17. clarifai/client/workflow.py +2 -1
  18. clarifai/runners/models/mcp_class.py +6 -35
  19. clarifai/runners/models/model_builder.py +46 -42
  20. clarifai/runners/utils/code_script.py +40 -11
  21. clarifai/runners/utils/data_types/data_types.py +48 -0
  22. clarifai/runners/utils/data_utils.py +67 -43
  23. clarifai/runners/utils/method_signatures.py +0 -20
  24. clarifai/runners/utils/openai_convertor.py +103 -0
  25. clarifai/urls/helper.py +80 -12
  26. clarifai/utils/config.py +1 -1
  27. clarifai/utils/constants.py +4 -0
  28. {clarifai-11.4.3rc1.dist-info → clarifai-11.4.4.dist-info}/METADATA +13 -2
  29. clarifai-11.4.4.dist-info/RECORD +112 -0
  30. {clarifai-11.4.3rc1.dist-info → clarifai-11.4.4.dist-info}/WHEEL +1 -1
  31. clarifai/__pycache__/__init__.cpython-312.pyc +0 -0
  32. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  33. clarifai/__pycache__/errors.cpython-312.pyc +0 -0
  34. clarifai/__pycache__/errors.cpython-39.pyc +0 -0
  35. clarifai/__pycache__/versions.cpython-312.pyc +0 -0
  36. clarifai/__pycache__/versions.cpython-39.pyc +0 -0
  37. clarifai/cli/__pycache__/__init__.cpython-312.pyc +0 -0
  38. clarifai/cli/__pycache__/base.cpython-312.pyc +0 -0
  39. clarifai/cli/__pycache__/compute_cluster.cpython-312.pyc +0 -0
  40. clarifai/cli/__pycache__/deployment.cpython-312.pyc +0 -0
  41. clarifai/cli/__pycache__/model.cpython-312.pyc +0 -0
  42. clarifai/cli/__pycache__/nodepool.cpython-312.pyc +0 -0
  43. clarifai/client/__pycache__/__init__.cpython-312.pyc +0 -0
  44. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  45. clarifai/client/__pycache__/app.cpython-312.pyc +0 -0
  46. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  47. clarifai/client/__pycache__/base.cpython-312.pyc +0 -0
  48. clarifai/client/__pycache__/base.cpython-39.pyc +0 -0
  49. clarifai/client/__pycache__/compute_cluster.cpython-312.pyc +0 -0
  50. clarifai/client/__pycache__/dataset.cpython-312.pyc +0 -0
  51. clarifai/client/__pycache__/deployment.cpython-312.pyc +0 -0
  52. clarifai/client/__pycache__/input.cpython-312.pyc +0 -0
  53. clarifai/client/__pycache__/lister.cpython-312.pyc +0 -0
  54. clarifai/client/__pycache__/model.cpython-312.pyc +0 -0
  55. clarifai/client/__pycache__/model_client.cpython-312.pyc +0 -0
  56. clarifai/client/__pycache__/module.cpython-312.pyc +0 -0
  57. clarifai/client/__pycache__/nodepool.cpython-312.pyc +0 -0
  58. clarifai/client/__pycache__/runner.cpython-312.pyc +0 -0
  59. clarifai/client/__pycache__/search.cpython-312.pyc +0 -0
  60. clarifai/client/__pycache__/user.cpython-312.pyc +0 -0
  61. clarifai/client/__pycache__/workflow.cpython-312.pyc +0 -0
  62. clarifai/client/auth/__pycache__/__init__.cpython-312.pyc +0 -0
  63. clarifai/client/auth/__pycache__/__init__.cpython-39.pyc +0 -0
  64. clarifai/client/auth/__pycache__/helper.cpython-312.pyc +0 -0
  65. clarifai/client/auth/__pycache__/helper.cpython-39.pyc +0 -0
  66. clarifai/client/auth/__pycache__/register.cpython-312.pyc +0 -0
  67. clarifai/client/auth/__pycache__/register.cpython-39.pyc +0 -0
  68. clarifai/client/auth/__pycache__/stub.cpython-312.pyc +0 -0
  69. clarifai/client/auth/__pycache__/stub.cpython-39.pyc +0 -0
  70. clarifai/constants/__pycache__/base.cpython-312.pyc +0 -0
  71. clarifai/constants/__pycache__/base.cpython-39.pyc +0 -0
  72. clarifai/constants/__pycache__/dataset.cpython-312.pyc +0 -0
  73. clarifai/constants/__pycache__/input.cpython-312.pyc +0 -0
  74. clarifai/constants/__pycache__/model.cpython-312.pyc +0 -0
  75. clarifai/constants/__pycache__/rag.cpython-312.pyc +0 -0
  76. clarifai/constants/__pycache__/search.cpython-312.pyc +0 -0
  77. clarifai/constants/__pycache__/workflow.cpython-312.pyc +0 -0
  78. clarifai/datasets/__pycache__/__init__.cpython-312.pyc +0 -0
  79. clarifai/datasets/export/__pycache__/__init__.cpython-312.pyc +0 -0
  80. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-312.pyc +0 -0
  81. clarifai/datasets/upload/__pycache__/__init__.cpython-312.pyc +0 -0
  82. clarifai/datasets/upload/__pycache__/base.cpython-312.pyc +0 -0
  83. clarifai/datasets/upload/__pycache__/features.cpython-312.pyc +0 -0
  84. clarifai/datasets/upload/__pycache__/image.cpython-312.pyc +0 -0
  85. clarifai/datasets/upload/__pycache__/multimodal.cpython-312.pyc +0 -0
  86. clarifai/datasets/upload/__pycache__/text.cpython-312.pyc +0 -0
  87. clarifai/datasets/upload/__pycache__/utils.cpython-312.pyc +0 -0
  88. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-312.pyc +0 -0
  89. clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-312.pyc +0 -0
  90. clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-312.pyc +0 -0
  91. clarifai/modules/__pycache__/__init__.cpython-312.pyc +0 -0
  92. clarifai/modules/__pycache__/css.cpython-312.pyc +0 -0
  93. clarifai/rag/__pycache__/__init__.cpython-312.pyc +0 -0
  94. clarifai/rag/__pycache__/rag.cpython-312.pyc +0 -0
  95. clarifai/rag/__pycache__/utils.cpython-312.pyc +0 -0
  96. clarifai/runners/__pycache__/__init__.cpython-312.pyc +0 -0
  97. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  98. clarifai/runners/__pycache__/server.cpython-312.pyc +0 -0
  99. clarifai/runners/models/__pycache__/__init__.cpython-312.pyc +0 -0
  100. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  101. clarifai/runners/models/__pycache__/base_typed_model.cpython-312.pyc +0 -0
  102. clarifai/runners/models/__pycache__/mcp_class.cpython-312.pyc +0 -0
  103. clarifai/runners/models/__pycache__/model_builder.cpython-312.pyc +0 -0
  104. clarifai/runners/models/__pycache__/model_builder.cpython-39.pyc +0 -0
  105. clarifai/runners/models/__pycache__/model_class.cpython-312.pyc +0 -0
  106. clarifai/runners/models/__pycache__/model_run_locally.cpython-312.pyc +0 -0
  107. clarifai/runners/models/__pycache__/model_runner.cpython-312.pyc +0 -0
  108. clarifai/runners/models/__pycache__/model_servicer.cpython-312.pyc +0 -0
  109. clarifai/runners/models/__pycache__/test_model_builder.cpython-312-pytest-8.3.5.pyc +0 -0
  110. clarifai/runners/models/base_typed_model.py +0 -238
  111. clarifai/runners/models/example_mcp_server.py +0 -44
  112. clarifai/runners/models/mcp_class.py~ +0 -149
  113. clarifai/runners/models/test_model_builder.py +0 -89
  114. clarifai/runners/utils/__pycache__/__init__.cpython-312.pyc +0 -0
  115. clarifai/runners/utils/__pycache__/code_script.cpython-312.pyc +0 -0
  116. clarifai/runners/utils/__pycache__/const.cpython-312.pyc +0 -0
  117. clarifai/runners/utils/__pycache__/data_handler.cpython-312.pyc +0 -0
  118. clarifai/runners/utils/__pycache__/data_types.cpython-312.pyc +0 -0
  119. clarifai/runners/utils/__pycache__/data_utils.cpython-312.pyc +0 -0
  120. clarifai/runners/utils/__pycache__/loader.cpython-312.pyc +0 -0
  121. clarifai/runners/utils/__pycache__/method_signatures.cpython-312.pyc +0 -0
  122. clarifai/runners/utils/__pycache__/serializers.cpython-312.pyc +0 -0
  123. clarifai/runners/utils/__pycache__/url_fetcher.cpython-312.pyc +0 -0
  124. clarifai/runners/utils/data_handler.py +0 -231
  125. clarifai/runners/utils/data_types/__pycache__/__init__.cpython-312.pyc +0 -0
  126. clarifai/runners/utils/data_types/__pycache__/data_types.cpython-312.pyc +0 -0
  127. clarifai/schema/__pycache__/search.cpython-312.pyc +0 -0
  128. clarifai/urls/__pycache__/helper.cpython-312.pyc +0 -0
  129. clarifai/utils/__pycache__/__init__.cpython-312.pyc +0 -0
  130. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  131. clarifai/utils/__pycache__/cli.cpython-312.pyc +0 -0
  132. clarifai/utils/__pycache__/config.cpython-312.pyc +0 -0
  133. clarifai/utils/__pycache__/constants.cpython-312.pyc +0 -0
  134. clarifai/utils/__pycache__/constants.cpython-39.pyc +0 -0
  135. clarifai/utils/__pycache__/logging.cpython-312.pyc +0 -0
  136. clarifai/utils/__pycache__/logging.cpython-39.pyc +0 -0
  137. clarifai/utils/__pycache__/misc.cpython-312.pyc +0 -0
  138. clarifai/utils/__pycache__/misc.cpython-39.pyc +0 -0
  139. clarifai/utils/__pycache__/model_train.cpython-312.pyc +0 -0
  140. clarifai/utils/__pycache__/protobuf.cpython-312.pyc +0 -0
  141. clarifai/utils/config.py~ +0 -145
  142. clarifai/utils/evaluation/__pycache__/__init__.cpython-312.pyc +0 -0
  143. clarifai/utils/evaluation/__pycache__/helpers.cpython-312.pyc +0 -0
  144. clarifai/utils/evaluation/__pycache__/main.cpython-312.pyc +0 -0
  145. clarifai/workflows/__pycache__/__init__.cpython-312.pyc +0 -0
  146. clarifai/workflows/__pycache__/export.cpython-312.pyc +0 -0
  147. clarifai/workflows/__pycache__/utils.cpython-312.pyc +0 -0
  148. clarifai/workflows/__pycache__/validate.cpython-312.pyc +0 -0
  149. clarifai-11.4.3rc1.dist-info/RECORD +0 -230
  150. {clarifai-11.4.3rc1.dist-info → clarifai-11.4.4.dist-info}/entry_points.txt +0 -0
  151. {clarifai-11.4.3rc1.dist-info → clarifai-11.4.4.dist-info/licenses}/LICENSE +0 -0
  152. {clarifai-11.4.3rc1.dist-info → clarifai-11.4.4.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.1.0)
2
+ Generator: setuptools (80.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
Binary file
Binary file
@@ -1,238 +0,0 @@
1
- import itertools
2
- from typing import Any, Dict, Iterator, List, Tuple
3
-
4
- import numpy as np
5
- from clarifai_grpc.grpc.api import resources_pb2, service_pb2
6
- from clarifai_grpc.grpc.api.service_pb2 import PostModelOutputsRequest
7
- from google.protobuf import json_format
8
-
9
- from ..utils.data_handler import InputDataHandler, OutputDataHandler
10
- from .model_class import ModelClass
11
-
12
-
13
- class AnyAnyModel(ModelClass):
14
-
15
- def load_model(self):
16
- """
17
- Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
18
- in this method so they are loaded only once for faster inference.
19
- """
20
- raise NotImplementedError
21
-
22
- def parse_input_request(
23
- self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
24
- list_input_dict = [
25
- InputDataHandler.from_proto(input).to_python() for input in input_request.inputs
26
- ]
27
- inference_params = json_format.MessageToDict(
28
- input_request.model.model_version.output_info.params)
29
-
30
- return list_input_dict, inference_params
31
-
32
- def convert_output_to_proto(self, outputs: list):
33
- assert (isinstance(outputs, Iterator) or isinstance(outputs, list) or
34
- isinstance(outputs, tuple)), "outputs must be an Iterator"
35
- output_protos = []
36
- for output in outputs:
37
- if isinstance(output, OutputDataHandler):
38
- output = output.proto
39
- elif isinstance(output, resources_pb2.Output):
40
- pass
41
- else:
42
- raise NotImplementedError
43
- output_protos.append(output)
44
-
45
- return service_pb2.MultiOutputResponse(outputs=output_protos)
46
-
47
- def predict_wrapper(
48
- self, request: service_pb2.PostModelOutputsRequest) -> service_pb2.MultiOutputResponse:
49
- list_dict_input, inference_params = self.parse_input_request(request)
50
- outputs = self.predict(list_dict_input, inference_parameters=inference_params)
51
- return self.convert_output_to_proto(outputs)
52
-
53
- def generate_wrapper(
54
- self, request: PostModelOutputsRequest) -> Iterator[service_pb2.MultiOutputResponse]:
55
- list_dict_input, inference_params = self.parse_input_request(request)
56
- outputs = self.generate(list_dict_input, inference_parameters=inference_params)
57
- for output in outputs:
58
- yield self.convert_output_to_proto(output)
59
-
60
- def _preprocess_stream(
61
- self, request: Iterator[PostModelOutputsRequest]) -> Iterator[Tuple[List[Dict], List[Dict]]]:
62
- """Return generator of processed data (from proto to python) and inference parameters like predict and generate"""
63
- for i, req in enumerate(request):
64
- input_data, _ = self.parse_input_request(req)
65
- yield input_data
66
-
67
- def stream_wrapper(self, request: Iterator[PostModelOutputsRequest]
68
- ) -> Iterator[service_pb2.MultiOutputResponse]:
69
- first_request = next(request)
70
- _, inference_params = self.parse_input_request(first_request)
71
- request_iterator = itertools.chain([first_request], request)
72
- outputs = self.stream(self._preprocess_stream(request_iterator), inference_params)
73
- for output in outputs:
74
- yield self.convert_output_to_proto(output)
75
-
76
- def predict(self, input_data: List[Dict],
77
- inference_parameters: Dict[str, Any] = {}) -> List[OutputDataHandler]:
78
- """
79
- Prediction method.
80
-
81
- Args:
82
- -----
83
- - input_data: is list of dict where key is input type name.
84
- * image: np.ndarray
85
- * text: str
86
- * audio: bytes
87
-
88
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
89
-
90
- Returns:
91
- --------
92
- List of OutputDataHandler
93
- """
94
- raise NotImplementedError
95
-
96
- def generate(self, input_data: List[Dict],
97
- inference_parameters: Dict[str, Any] = {}) -> Iterator[List[OutputDataHandler]]:
98
- """
99
- Generate method.
100
-
101
- Args:
102
- -----
103
- - input_data: is list of dict where key is input type name.
104
- * image: np.ndarray
105
- * text: str
106
- * audio: bytes
107
-
108
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
109
-
110
- Yield:
111
- --------
112
- List of OutputDataHandler
113
- """
114
- raise NotImplementedError
115
-
116
- def stream(self, inputs: Iterator[List[Dict[str, Any]]],
117
- inference_params: Dict[str, Any]) -> Iterator[List[OutputDataHandler]]:
118
- """
119
- Stream method.
120
-
121
- Args:
122
- -----
123
- input_request: is an Iterator of Tuple which
124
- - First element (List[Dict[str, Union[np.ndarray, str, bytes]]]) is list of dict input data type which keys and values are:
125
- * image: np.ndarray
126
- * text: str
127
- * audio: bytes
128
-
129
- - Second element (Dict[str, Union[bool, str, float, int]]): is a dict of inference_parameters
130
-
131
- Yield:
132
- --------
133
- List of OutputDataHandler
134
- """
135
- raise NotImplementedError
136
-
137
-
138
- class VisualInputModel(AnyAnyModel):
139
-
140
- def parse_input_request(
141
- self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
142
- list_input_dict = [
143
- InputDataHandler.from_proto(input).image(format="np") for input in input_request.inputs
144
- ]
145
- inference_params = json_format.MessageToDict(
146
- input_request.model.model_version.output_info.params)
147
-
148
- return list_input_dict, inference_params
149
-
150
- def load_model(self):
151
- """
152
- Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
153
- in this method so they are loaded only once for faster inference.
154
- """
155
- raise NotImplementedError
156
-
157
- def predict(self, input_data: List[np.ndarray],
158
- inference_parameters: Dict[str, Any] = {}) -> List[OutputDataHandler]:
159
- """
160
- Prediction method.
161
-
162
- Args:
163
- -----
164
- - input_data(List[np.ndarray]): is list of image as np.ndarray type
165
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
166
-
167
- Returns:
168
- --------
169
- List of OutputDataHandler
170
- """
171
- raise NotImplementedError
172
-
173
-
174
- class TextInputModel(AnyAnyModel):
175
-
176
- def load_model(self):
177
- """
178
- Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
179
- in this method so they are loaded only once for faster inference.
180
- """
181
- raise NotImplementedError
182
-
183
- def parse_input_request(
184
- self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
185
- list_input_text = [InputDataHandler.from_proto(input).text for input in input_request.inputs]
186
- inference_params = json_format.MessageToDict(
187
- input_request.model.model_version.output_info.params)
188
-
189
- return list_input_text, inference_params
190
-
191
- def predict(self, input_data: List[str],
192
- inference_parameters: Dict[str, Any] = {}) -> List[OutputDataHandler]:
193
- """
194
- Prediction method.
195
-
196
- Args:
197
- -----
198
- - input_data(List[str]): is list of text as str type
199
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
200
-
201
- Returns:
202
- --------
203
- List of OutputDataHandler
204
- """
205
- raise NotImplementedError
206
-
207
- def generate(self, input_data: List[str],
208
- inference_parameters: Dict[str, Any] = {}) -> Iterator[List[OutputDataHandler]]:
209
- """
210
- Prediction method.
211
-
212
- Args:
213
- -----
214
- - input_data(List[str]): is list of text as str type
215
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
216
-
217
- Yield:
218
- --------
219
- List of OutputDataHandler
220
- """
221
- raise NotImplementedError
222
-
223
- def stream(self, inputs: Iterator[List[str]],
224
- inference_params: Dict[str, Any]) -> Iterator[List[OutputDataHandler]]:
225
- """
226
- Stream method.
227
-
228
- Args:
229
- -----
230
- input_request: is an Iterator of Tuple which
231
- - First element (List[str]) is list of input text:
232
- - Second element (Dict[str, Union[bool, str, float, int]]): is a dict of inference_parameters
233
-
234
- Yield:
235
- --------
236
- List of OutputDataHandler
237
- """
238
- raise NotImplementedError
@@ -1,44 +0,0 @@
1
- """Example of how to create an MCP server using MCPClass."""
2
-
3
- from typing import Dict
4
- from fastmcp import Tool
5
-
6
- from clarifai.runners.models.mcp_class import MCPClass
7
-
8
-
9
- class ExampleMCPServer(MCPClass):
10
- """Example MCP server that provides a simple calculator tool."""
11
-
12
- def __init__(self):
13
- super().__init__()
14
-
15
- # Define and register a calculator tool
16
- calculator_tool = Tool(
17
- name="calculator",
18
- description="A simple calculator that can add two numbers",
19
- parameters={
20
- "type": "object",
21
- "properties": {
22
- "a": {"type": "number", "description": "First number"},
23
- "b": {"type": "number", "description": "Second number"}
24
- },
25
- "required": ["a", "b"]
26
- }
27
- )
28
-
29
- @calculator_tool
30
- async def add(params: Dict[str, float]) -> float:
31
- """Add two numbers together."""
32
- return params["a"] + params["b"]
33
-
34
- # Register the tool with the MCP server
35
- self.add_tool(calculator_tool)
36
-
37
-
38
- # Usage example:
39
- if __name__ == "__main__":
40
- # Create and run the server
41
- server = ExampleMCPServer()
42
-
43
- # The server is now ready to handle MCP requests through the mcp_transport method
44
- # For example, a client could send a request to list tools or call the calculator
@@ -1,149 +0,0 @@
1
- """Base class for creating Model Context Protocol (MCP) servers."""
2
-
3
- import asyncio
4
- import json
5
- from typing import Any
6
-
7
- from fastmcp import Client, FastMCP # use fastmcp v2 not the built in mcp
8
- from mcp import types
9
- from mcp.shared.exceptions import McpError
10
-
11
- from clarifai.runners.models.model_class import ModelClass
12
-
13
- # class MCPServerProvider(abc.ABC):
14
- # """
15
- # Base class for creating Model Context Protocol (MCP) servers.
16
-
17
- # This class provides a base implementation of the MCP server, including
18
- # methods for handling requests and responses, as well as error handling and
19
- # logging.
20
-
21
- # Attributes:
22
- # _server: The FastMCP server instance.
23
- # _tools: List of tools available in the server.
24
- # _resources: List of resources available in the server.
25
- # _prompts: List of prompts available in the server.
26
-
27
- # Methods:
28
- # get_server(): Returns the FastMCP server instance.
29
- # mcp_transport(msg): Handles incoming messages and sends them to the FastMCP server.
30
- # """
31
-
32
- # @abc.abstractmethod
33
- # def get_server(self) -> FastMCP:
34
- # """Required method for each subclass to implement to return the FastMCP server to use."""
35
- # if self._server is None:
36
- # raise ValueError("Server not initialized")
37
- # return self._server
38
-
39
-
40
- class MCPModelClass(ModelClass, MCPServerProvider):
41
- """Base class for wrapping FastMCP servers as a model running in Clarfai. This handles
42
- all the transport between the API and the MCP server here. Simply subclass this and implement
43
- the get_server() method to return the FastMCP server instance. The server is then used to
44
- handle all the requests and responses.
45
- """
46
-
47
- def load_model(self):
48
- # in memory transport provided in fastmcp v2 so we can easily use the client functions.
49
- self.client = Client(self.get_server())
50
-
51
- def get_server(self) -> FastMCP:
52
- """Required method for each subclass to implement to return the FastMCP server to use."""
53
- if self._server is None:
54
- raise ValueError("Server not initialized")
55
- return self._server
56
-
57
- @ModelClass.method
58
- def mcp_transport(self, msg: str) -> str:
59
- """The single model method to get the jsonrpc message and send it to the FastMCP server then
60
- return it's response.
61
-
62
- Arguments:
63
- msg: The incoming message to be handled in serialized JSONRPC format from an MCP client.
64
- Returns:
65
- str: The response to the incoming message in serialized JSONRPC format
66
- """
67
-
68
- async def send_notification(client_message: types.ClientNotification) -> None:
69
- async with self.client:
70
- # Strip the jsonrpc field since send_notification will also pass it in for some reason.
71
- client_message = types.ClientNotification.model_validate(
72
- client_message.model_dump(
73
- by_alias=True, mode="json", exclude_none=True, exclude={"jsonrpc"}
74
- )
75
- )
76
- try:
77
- return await self.client.session.send_notification(client_message)
78
- except McpError as e:
79
- return types.JSONRPCError(jsonrpc="2.0", error=e.error)
80
-
81
- async def send_request(client_message: types.ClientRequest, id: str) -> Any:
82
- async with self.client:
83
- # Strip the jsonrpc and id fields as send_request sets them again too.
84
- client_message = types.ClientRequest.model_validate(
85
- client_message.model_dump(
86
- by_alias=True, mode="json", exclude_none=True, exclude={"jsonrpc", "id"}
87
- )
88
- )
89
-
90
- result_type = None
91
- print("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA")
92
- print(types.PingRequest)
93
- if isinstance(client_message.root, types.PingRequest):
94
- result_type = types.EmptyResult
95
- elif isinstance(client_message.root, types.InitializeRequest):
96
- return await self.client.session.initialize()
97
- elif isinstance(client_message.root, types.SetLevelRequest):
98
- result_type = types.EmptyResult
99
- elif isinstance(client_message.root, types.ListResourcesRequest):
100
- result_type = types.ListResourcesResult
101
- elif isinstance(client_message.root, types.ListResourceTemplatesRequest):
102
- result_type = types.ListResourceTemplatesResult
103
- elif isinstance(client_message.root, types.ReadResourceRequest):
104
- result_type = types.ReadResourceResult
105
- elif isinstance(client_message.root, types.SubscribeRequest):
106
- result_type = types.EmptyResult
107
- elif isinstance(client_message.root, types.UnsubscribeRequest):
108
- result_type = types.EmptyResult
109
- elif isinstance(client_message.root, types.ListPromptsRequest):
110
- result_type = types.ListPromptsResult
111
- elif isinstance(client_message.root, types.GetPromptRequest):
112
- result_type = types.GetPromptResult
113
- elif isinstance(client_message.root, types.CompleteRequest):
114
- result_type = types.CompleteResult
115
- elif isinstance(client_message.root, types.ListToolsRequest):
116
- result_type = types.ListToolsResult
117
- elif isinstance(client_message.root, types.CallToolRequest):
118
- result_type = types.CallToolResult
119
- else:
120
- # this is a special case where we need to return the list of tools.
121
- raise NotImplementedError(f"Method {client_message.method} not implemented")
122
- # Call the mcp server using send_request() or send_notification() depending on the method.
123
- try:
124
- return await self.client.session.send_request(client_message, result_type)
125
- except McpError as e:
126
- return types.JSONRPCError(jsonrpc="2.0", id=id, error=e.error)
127
-
128
- # The message coming here is the generic request. We look at it's .method
129
- # to determine which client function to call and to further subparse the params.
130
- # Note(zeiler): unfortunately the pydantic types in mcp/types.py are not consistent.
131
- # The JSONRPCRequest are supposed to have an id but the InitializeRequest
132
- # does not have it.
133
- d = json.loads(msg)
134
-
135
- # If we have an id it's a JSONRPCRequest
136
- if not d.get('method', None).startswith("notifications/"):
137
- # rpc_message2 = types.JSONRPCRequest.model_validate(rpc_message)
138
- # underlying: types.JSONRPCRequest = rpc_message.root
139
- client_message = types.ClientRequest.model_validate(d)
140
- response = asyncio.run(
141
- send_request(client_message, id=d.get('id', ""))
142
- ) # underlying.id))
143
- else: # JSONRPCRequest
144
- client_message = types.ClientNotification.model_validate(d)
145
- response = asyncio.run(send_notification(client_message))
146
- if response is None:
147
- return ""
148
- # return as a serialized json string
149
- return response.model_dump_json(by_alias=True, exclude_none=True)