frogml-core 0.0.113__py3-none-any.whl → 0.0.114__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. frogml_core/__init__.py +1 -1
  2. frogml_core/inner/di_configuration/__init__.py +0 -6
  3. {frogml_core-0.0.113.dist-info → frogml_core-0.0.114.dist-info}/METADATA +1 -1
  4. {frogml_core-0.0.113.dist-info → frogml_core-0.0.114.dist-info}/RECORD +8 -115
  5. frogml_services_mock/mocks/frogml_mocks.py +0 -11
  6. frogml_services_mock/services_mock.py +0 -48
  7. frogml_storage/__init__.py +1 -1
  8. frogml_core/clients/prompt_manager/__init__.py +0 -0
  9. frogml_core/clients/prompt_manager/model_descriptor_mapper.py +0 -196
  10. frogml_core/clients/prompt_manager/prompt_manager_client.py +0 -190
  11. frogml_core/clients/prompt_manager/prompt_proto_mapper.py +0 -264
  12. frogml_core/clients/vector_store/__init__.py +0 -2
  13. frogml_core/clients/vector_store/management_client.py +0 -127
  14. frogml_core/clients/vector_store/serving_client.py +0 -157
  15. frogml_core/clients/workspace_manager/__init__.py +0 -1
  16. frogml_core/clients/workspace_manager/client.py +0 -224
  17. frogml_core/llmops/__init__.py +0 -0
  18. frogml_core/llmops/generation/__init__.py +0 -0
  19. frogml_core/llmops/generation/_steaming.py +0 -78
  20. frogml_core/llmops/generation/base.py +0 -5
  21. frogml_core/llmops/generation/chat/__init__.py +0 -0
  22. frogml_core/llmops/generation/chat/openai/LICENSE.txt +0 -201
  23. frogml_core/llmops/generation/chat/openai/types/__init__.py +0 -0
  24. frogml_core/llmops/generation/chat/openai/types/chat/__init__.py +0 -0
  25. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion.py +0 -88
  26. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_assistant_message_param.py +0 -65
  27. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_chunk.py +0 -153
  28. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_content_part_text_param.py +0 -28
  29. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_function_call_option_param.py +0 -25
  30. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_function_message_param.py +0 -33
  31. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message.py +0 -56
  32. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_param.py +0 -34
  33. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_tool_call.py +0 -46
  34. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_tool_call_param.py +0 -44
  35. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_named_tool_choice_param.py +0 -32
  36. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_role.py +0 -20
  37. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_system_message_param.py +0 -35
  38. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_token_logprob.py +0 -71
  39. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_choice_option_param.py +0 -28
  40. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_message_param.py +0 -31
  41. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_param.py +0 -29
  42. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_user_message_param.py +0 -35
  43. frogml_core/llmops/generation/chat/openai/types/chat/completion_create_params.py +0 -279
  44. frogml_core/llmops/generation/chat/openai/types/completion_choice.py +0 -47
  45. frogml_core/llmops/generation/chat/openai/types/completion_create_params.py +0 -209
  46. frogml_core/llmops/generation/chat/openai/types/completion_usage.py +0 -30
  47. frogml_core/llmops/generation/chat/openai/types/model.py +0 -35
  48. frogml_core/llmops/generation/chat/openai/types/shared/__init__.py +0 -3
  49. frogml_core/llmops/generation/chat/openai/types/shared/error_object.py +0 -27
  50. frogml_core/llmops/generation/chat/openai/types/shared/function_definition.py +0 -49
  51. frogml_core/llmops/generation/chat/openai/types/shared/function_parameters.py +0 -20
  52. frogml_core/llmops/generation/chat/openai/types/shared_params/__init__.py +0 -2
  53. frogml_core/llmops/generation/chat/openai/types/shared_params/function_definition.py +0 -49
  54. frogml_core/llmops/generation/chat/openai/types/shared_params/function_parameters.py +0 -22
  55. frogml_core/llmops/generation/streaming.py +0 -26
  56. frogml_core/llmops/model/__init__.py +0 -0
  57. frogml_core/llmops/model/descriptor.py +0 -40
  58. frogml_core/llmops/prompt/__init__.py +0 -0
  59. frogml_core/llmops/prompt/base.py +0 -136
  60. frogml_core/llmops/prompt/chat/__init__.py +0 -0
  61. frogml_core/llmops/prompt/chat/message.py +0 -24
  62. frogml_core/llmops/prompt/chat/template.py +0 -113
  63. frogml_core/llmops/prompt/chat/value.py +0 -10
  64. frogml_core/llmops/prompt/manager.py +0 -138
  65. frogml_core/llmops/prompt/template.py +0 -24
  66. frogml_core/llmops/prompt/value.py +0 -14
  67. frogml_core/llmops/provider/__init__.py +0 -0
  68. frogml_core/llmops/provider/chat.py +0 -44
  69. frogml_core/llmops/provider/openai/__init__.py +0 -0
  70. frogml_core/llmops/provider/openai/client.py +0 -126
  71. frogml_core/llmops/provider/openai/provider.py +0 -93
  72. frogml_core/vector_store/__init__.py +0 -4
  73. frogml_core/vector_store/client.py +0 -151
  74. frogml_core/vector_store/collection.py +0 -429
  75. frogml_core/vector_store/filters.py +0 -359
  76. frogml_core/vector_store/inference_client.py +0 -105
  77. frogml_core/vector_store/rest_helpers.py +0 -81
  78. frogml_core/vector_store/utils/__init__.py +0 -0
  79. frogml_core/vector_store/utils/filter_utils.py +0 -23
  80. frogml_core/vector_store/utils/upsert_utils.py +0 -218
  81. frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2.py +0 -77
  82. frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2.pyi +0 -417
  83. frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2_grpc.py +0 -441
  84. frogml_proto/qwak/prompt/v1/prompt/prompt_pb2.py +0 -69
  85. frogml_proto/qwak/prompt/v1/prompt/prompt_pb2.pyi +0 -415
  86. frogml_proto/qwak/prompt/v1/prompt/prompt_pb2_grpc.py +0 -4
  87. frogml_proto/qwak/vectors/v1/collection/collection_pb2.py +0 -46
  88. frogml_proto/qwak/vectors/v1/collection/collection_pb2.pyi +0 -287
  89. frogml_proto/qwak/vectors/v1/collection/collection_pb2_grpc.py +0 -4
  90. frogml_proto/qwak/vectors/v1/collection/collection_service_pb2.py +0 -60
  91. frogml_proto/qwak/vectors/v1/collection/collection_service_pb2.pyi +0 -258
  92. frogml_proto/qwak/vectors/v1/collection/collection_service_pb2_grpc.py +0 -304
  93. frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2.py +0 -28
  94. frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2.pyi +0 -41
  95. frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2_grpc.py +0 -4
  96. frogml_proto/qwak/vectors/v1/filters_pb2.py +0 -52
  97. frogml_proto/qwak/vectors/v1/filters_pb2.pyi +0 -297
  98. frogml_proto/qwak/vectors/v1/filters_pb2_grpc.py +0 -4
  99. frogml_proto/qwak/vectors/v1/vector_pb2.py +0 -38
  100. frogml_proto/qwak/vectors/v1/vector_pb2.pyi +0 -142
  101. frogml_proto/qwak/vectors/v1/vector_pb2_grpc.py +0 -4
  102. frogml_proto/qwak/vectors/v1/vector_service_pb2.py +0 -53
  103. frogml_proto/qwak/vectors/v1/vector_service_pb2.pyi +0 -243
  104. frogml_proto/qwak/vectors/v1/vector_service_pb2_grpc.py +0 -201
  105. frogml_proto/qwak/workspace/workspace_pb2.py +0 -50
  106. frogml_proto/qwak/workspace/workspace_pb2.pyi +0 -331
  107. frogml_proto/qwak/workspace/workspace_pb2_grpc.py +0 -4
  108. frogml_proto/qwak/workspace/workspace_service_pb2.py +0 -84
  109. frogml_proto/qwak/workspace/workspace_service_pb2.pyi +0 -393
  110. frogml_proto/qwak/workspace/workspace_service_pb2_grpc.py +0 -507
  111. frogml_services_mock/mocks/prompt_manager_service.py +0 -281
  112. frogml_services_mock/mocks/vector_serving_api.py +0 -159
  113. frogml_services_mock/mocks/vectors_management_api.py +0 -97
  114. frogml_services_mock/mocks/workspace_manager_service_mock.py +0 -202
  115. {frogml_core-0.0.113.dist-info → frogml_core-0.0.114.dist-info}/WHEEL +0 -0
@@ -1,190 +0,0 @@
1
- import grpc
2
- from dependency_injector.wiring import Provide, inject
3
-
4
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
5
- CreatePromptRequest as ProtoCreatePromptRequest,
6
- )
7
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
8
- CreatePromptResponse as ProtoCreatePromptResponse,
9
- )
10
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
11
- CreatePromptVersionRequest as ProtoCreatePromptVersionRequest,
12
- )
13
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
14
- CreatePromptVersionResponse as ProtoCreatePromptVersionResponse,
15
- )
16
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
17
- DeletePromptRequest as ProtoDeletePromptRequest,
18
- )
19
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
20
- DeletePromptVersionRequest as ProtoDeletePromptVersionRequest,
21
- )
22
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
23
- GetPromptByNameRequest as ProtoGetPromptByNameRequest,
24
- )
25
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
26
- GetPromptByNameResponse as ProtoGetPromptByNameResponse,
27
- )
28
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
29
- GetPromptVersionByPromptNameRequest as ProtoGetPromptVersionByPromptNameRequest,
30
- )
31
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
32
- GetPromptVersionByPromptNameResponse as ProtoGetPromptVersionByPromptNameResponse,
33
- )
34
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
35
- SetDefaultPromptVersionRequest as ProtoSetDefaultPromptVersionRequest,
36
- )
37
- from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2_grpc import (
38
- PromptManagerServiceStub,
39
- )
40
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import Prompt as ProtoPrompt
41
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import PromptSpec as ProtoPromptSpec
42
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
43
- PromptVersion as ProtoPromptVersion,
44
- )
45
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
46
- PromptVersionSpec as ProtoPromptVersionSpec,
47
- )
48
- from frogml_core.exceptions import FrogmlException
49
- from frogml_core.inner.di_configuration import FrogmlContainer
50
-
51
-
52
- class PromptManagerClient:
53
- @inject
54
- def __init__(self, grpc_channel=Provide[FrogmlContainer.core_grpc_channel]):
55
- self._grpc_client: PromptManagerServiceStub = PromptManagerServiceStub(
56
- grpc_channel
57
- )
58
-
59
- def create_prompt(
60
- self,
61
- name: str,
62
- prompt_description: str,
63
- version_spec: ProtoPromptVersionSpec,
64
- ) -> ProtoPrompt:
65
- request = ProtoCreatePromptRequest(
66
- prompt_name=name,
67
- prompt_spec=ProtoPromptSpec(description=prompt_description),
68
- prompt_version_spec=version_spec,
69
- )
70
- try:
71
- response: ProtoCreatePromptResponse = self._grpc_client.CreatePrompt(
72
- request
73
- )
74
- return response.prompt
75
- except grpc.RpcError as error:
76
- call: grpc.Call = error # noqa
77
- if call.code() == grpc.StatusCode.ALREADY_EXISTS:
78
- raise FrogmlException(f"Prompt with name: {name} already exists")
79
- elif call.code() == grpc.StatusCode.INVALID_ARGUMENT:
80
- raise FrogmlException(
81
- f"Got an illegal prompt specification: {call.details()}"
82
- )
83
- else:
84
- raise FrogmlException(f"Internal Error: {call.details()}")
85
-
86
- def create_prompt_version(
87
- self,
88
- name: str,
89
- version_spec: ProtoPromptVersionSpec,
90
- set_default: bool,
91
- ) -> ProtoPromptVersion:
92
- request = ProtoCreatePromptVersionRequest(
93
- prompt_name=name, prompt_version_spec=version_spec, set_default=set_default
94
- )
95
- try:
96
- response: ProtoCreatePromptVersionResponse = (
97
- self._grpc_client.CreatePromptVersion(request)
98
- )
99
- return response.prompt_version
100
- except grpc.RpcError as error:
101
- call: grpc.Call = error # noqa
102
- if call.code() == grpc.StatusCode.NOT_FOUND:
103
- raise FrogmlException(
104
- f"Can not update prompt: '{name}', prompt was not found"
105
- )
106
- elif call.code() == grpc.StatusCode.INVALID_ARGUMENT:
107
- raise FrogmlException(
108
- f"Got an illegal prompt specification: {call.details()}"
109
- )
110
- else:
111
- raise FrogmlException(f"Internal Error: {call.details()}")
112
-
113
- def delete_prompt(self, name: str):
114
- try:
115
- self._grpc_client.DeletePrompt(ProtoDeletePromptRequest(prompt_name=name))
116
- except grpc.RpcError as error:
117
- call: grpc.Call = error # noqa
118
- if call.code() == grpc.StatusCode.NOT_FOUND:
119
- raise FrogmlException(f"Prompt named '{name}' was not found")
120
- else:
121
- raise FrogmlException(f"Internal Error: {call.details()}")
122
-
123
- def delete_prompt_version(self, name: str, version: int):
124
- try:
125
- self._grpc_client.DeletePromptVersion(
126
- ProtoDeletePromptVersionRequest(
127
- prompt_name=name, version_number=version
128
- )
129
- )
130
- except grpc.RpcError as error:
131
- call: grpc.Call = error # noqa
132
- if call.code() == grpc.StatusCode.NOT_FOUND:
133
- raise FrogmlException(str(call.details()))
134
- elif call.code() == grpc.StatusCode.FAILED_PRECONDITION:
135
- raise FrogmlException(
136
- f"Cannot delete the default version '{version}' of a prompt '{name}',"
137
- f" please set another version as the default to delete this version."
138
- )
139
- else:
140
- raise FrogmlException(f"Internal Error: {call.details()}")
141
-
142
- def get_prompt_by_name(self, name: str) -> ProtoPrompt:
143
- """
144
- Get prompt's default version
145
- """
146
- try:
147
- response: ProtoGetPromptByNameResponse = self._grpc_client.GetPromptByName(
148
- ProtoGetPromptByNameRequest(prompt_name=name)
149
- )
150
- return response.prompt
151
- except grpc.RpcError as error:
152
- call: grpc.Call = error # noqa
153
- if call.code() == grpc.StatusCode.NOT_FOUND:
154
- raise FrogmlException(str(call.details()))
155
- else:
156
- raise FrogmlException(f"Internal Error: {call.details()}")
157
-
158
- def get_prompt_version_by_name(self, name: str, version: int) -> ProtoPromptVersion:
159
- """
160
- Get prompt specific version
161
- """
162
- try:
163
- response: ProtoGetPromptVersionByPromptNameResponse = (
164
- self._grpc_client.GetPromptVersionByPromptName(
165
- ProtoGetPromptVersionByPromptNameRequest(
166
- prompt_name=name, version_number=version
167
- )
168
- )
169
- )
170
- return response.prompt_version
171
- except grpc.RpcError as error:
172
- call: grpc.Call = error # noqa
173
- if call.code() == grpc.StatusCode.NOT_FOUND:
174
- raise FrogmlException(str(call.details()))
175
- else:
176
- raise FrogmlException(f"Internal Error: {call.details()}")
177
-
178
- def set_default_prompt_version(self, name: str, version: int):
179
- try:
180
- self._grpc_client.SetDefaultPromptVersion(
181
- ProtoSetDefaultPromptVersionRequest(
182
- prompt_name=name, version_number=version
183
- )
184
- )
185
- except grpc.RpcError as error:
186
- call: grpc.Call = error # noqa
187
- if call.code() == grpc.StatusCode.NOT_FOUND:
188
- raise FrogmlException(str(call.details()))
189
- else:
190
- raise FrogmlException(f"Internal Error: {call.details()}")
@@ -1,264 +0,0 @@
1
- from typing import List, Optional
2
-
3
- from frogml_proto.qwak.model_descriptor.open_ai_descriptor_pb2 import (
4
- OpenAIModelDescriptor as ProtoOpenAIModelDescriptor,
5
- )
6
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
7
- AIPromptMessageRole as ProtoAIPromptMessageRole,
8
- )
9
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
10
- ChatMessage as ProtoChatMessage,
11
- )
12
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
13
- ChatMessageTemplate as ProtoChatMessageTemplate,
14
- )
15
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
16
- ChatPromptTemplate as ProtoChatPromptTemplate,
17
- )
18
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
19
- HumanPromptMessageRole as ProtoHumanPromptMessageRole,
20
- )
21
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
22
- PromptMessageRole as ProtoPromptMessageRole,
23
- )
24
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
25
- PromptModelDescriptor as ProtoPromptModelDescriptor,
26
- )
27
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
28
- PromptOpenAIProvider as ProtoPromptOpenAIProvider,
29
- )
30
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
31
- PromptTemplate as ProtoPromptTemplate,
32
- )
33
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
34
- PromptVersionDefinition as ProtoPromptVersionDefinition,
35
- )
36
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
37
- PromptVersionSpec as ProtoPromptVersionSpec,
38
- )
39
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
40
- SystemPromptMessageRole as ProtoSystemPromptMessageRole,
41
- )
42
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
43
- TextTemplate as ProtoTextTemplate,
44
- )
45
- from frogml_core.clients.prompt_manager.model_descriptor_mapper import (
46
- ModelDescriptorMapper,
47
- )
48
- from frogml_core.exceptions import FrogmlException
49
- from frogml_core.llmops.model.descriptor import ChatModelDescriptor, ModelDescriptor
50
- from frogml_core.llmops.prompt.base import BasePrompt, ChatPrompt, RegisteredPrompt
51
- from frogml_core.llmops.prompt.chat.message import (
52
- AIMessage,
53
- BaseMessage,
54
- HumanMessage,
55
- SystemMessage,
56
- )
57
- from frogml_core.llmops.prompt.chat.template import (
58
- AIMessagePromptTemplate,
59
- BaseStringMessagePromptTemplate,
60
- ChatPromptTemplate,
61
- HumanMessagePromptTemplate,
62
- SystemMessagePromptTemplate,
63
- )
64
- from frogml_core.llmops.prompt.template import BasePromptTemplate
65
-
66
-
67
- class PromptProtoMapper:
68
- @staticmethod
69
- def from_chat_prompt_template(
70
- chat_prompt_template: ProtoChatPromptTemplate,
71
- ) -> ChatPromptTemplate:
72
- messages: List[BaseStringMessagePromptTemplate] = []
73
- for chat_message in chat_prompt_template.chat_messages:
74
- template: str = chat_message.template.text_template.template
75
- role_type: str = chat_message.role.WhichOneof("role")
76
- if role_type == "human_role":
77
- messages.append(HumanMessagePromptTemplate(template=template))
78
- elif role_type == "system_role":
79
- messages.append(SystemMessagePromptTemplate(template=template))
80
- elif role_type == "ai_role":
81
- messages.append(AIMessagePromptTemplate(template=template))
82
- else:
83
- raise FrogmlException(
84
- f"Got unsupported chat message type: {repr(chat_message)}"
85
- )
86
-
87
- return ChatPromptTemplate(messages=messages)
88
-
89
- @staticmethod
90
- def from_prompt_template(
91
- prompt_template: ProtoPromptTemplate,
92
- ) -> BasePromptTemplate:
93
- if prompt_template.WhichOneof("type") == "chat_prompt_template":
94
- return PromptProtoMapper.from_chat_prompt_template(
95
- chat_prompt_template=prompt_template.chat_prompt_template
96
- )
97
- else:
98
- raise FrogmlException(
99
- f"Got unsupported prompt template: {repr(prompt_template)}"
100
- )
101
-
102
- @staticmethod
103
- def to_prompt_model_descriptor(
104
- model_descriptor: ModelDescriptor,
105
- ) -> ProtoPromptModelDescriptor:
106
- proto_model_descriptor = ModelDescriptorMapper.to_model_descriptor(
107
- model_descriptor=model_descriptor
108
- )
109
- if isinstance(proto_model_descriptor, ProtoOpenAIModelDescriptor):
110
- return ProtoPromptModelDescriptor(
111
- open_ai_provider=ProtoPromptOpenAIProvider(
112
- open_ai_model_descriptor=proto_model_descriptor
113
- )
114
- )
115
-
116
- raise FrogmlException(
117
- f"Got unsupported model descriptor: {repr(model_descriptor)}"
118
- )
119
-
120
- @staticmethod
121
- def _prompt_template_to_chat_message(
122
- msg: BaseStringMessagePromptTemplate,
123
- ) -> ProtoChatMessage:
124
- role: ProtoPromptMessageRole
125
- text_template: str = msg.template.template
126
-
127
- if isinstance(msg, SystemMessagePromptTemplate):
128
- role = ProtoPromptMessageRole(system_role=ProtoSystemPromptMessageRole())
129
- elif isinstance(msg, HumanMessagePromptTemplate):
130
- role = ProtoPromptMessageRole(human_role=ProtoHumanPromptMessageRole())
131
- elif isinstance(msg, AIMessagePromptTemplate):
132
- role = ProtoPromptMessageRole(ai_role=ProtoAIPromptMessageRole())
133
- else:
134
- raise FrogmlException(f"Got unsupported prompt template role: {repr(msg)}")
135
-
136
- return ProtoChatMessage(
137
- role=role,
138
- template=ProtoChatMessageTemplate(
139
- text_template=ProtoTextTemplate(template=text_template)
140
- ),
141
- )
142
-
143
- @staticmethod
144
- def _base_message_to_chat_message(msg: BaseMessage) -> ProtoChatMessage:
145
- role: ProtoPromptMessageRole
146
- text_template: str = msg.content
147
-
148
- if isinstance(msg, AIMessage):
149
- role = ProtoPromptMessageRole(ai_role=ProtoAIPromptMessageRole())
150
- elif isinstance(msg, HumanMessage):
151
- role = ProtoPromptMessageRole(human_role=ProtoHumanPromptMessageRole())
152
- elif isinstance(msg, SystemMessage):
153
- role = ProtoPromptMessageRole(system_role=ProtoSystemPromptMessageRole())
154
- else:
155
- raise FrogmlException(f"Got unsupported prompt template role: {repr(msg)}")
156
-
157
- return ProtoChatMessage(
158
- role=role,
159
- template=ProtoChatMessageTemplate(
160
- text_template=ProtoTextTemplate(template=text_template)
161
- ),
162
- )
163
-
164
- @staticmethod
165
- def to_proto_chat_prompt_template(
166
- prompt_template: ChatPromptTemplate,
167
- ) -> ProtoPromptTemplate:
168
- chat_messages: List[ProtoChatMessage] = []
169
-
170
- for msg in prompt_template.messages:
171
- if isinstance(msg, BaseStringMessagePromptTemplate):
172
- chat_messages.append(
173
- PromptProtoMapper._prompt_template_to_chat_message(msg=msg)
174
- )
175
- elif isinstance(msg, BaseMessage):
176
- chat_messages.append(
177
- PromptProtoMapper._base_message_to_chat_message(msg=msg)
178
- )
179
-
180
- return ProtoPromptTemplate(
181
- chat_prompt_template=ProtoChatPromptTemplate(chat_messages=chat_messages)
182
- )
183
-
184
- @staticmethod
185
- def to_proto_prompt_template(
186
- prompt_template: BasePromptTemplate,
187
- ) -> ProtoPromptTemplate:
188
- if isinstance(prompt_template, ChatPromptTemplate):
189
- return PromptProtoMapper.to_proto_chat_prompt_template(
190
- prompt_template=prompt_template
191
- )
192
-
193
- raise FrogmlException(
194
- f"Got unsupported prompt template: {repr(prompt_template)}"
195
- )
196
-
197
- @staticmethod
198
- def to_prompt_version_spec(
199
- version_description: str,
200
- prompt_template: BasePromptTemplate,
201
- model_descriptor: Optional[ModelDescriptor],
202
- ) -> ProtoPromptVersionSpec:
203
- prompt_model_descriptor: Optional[ProtoPromptModelDescriptor] = None
204
-
205
- if model_descriptor:
206
- prompt_model_descriptor: ProtoPromptModelDescriptor = (
207
- PromptProtoMapper.to_prompt_model_descriptor(
208
- model_descriptor=model_descriptor
209
- )
210
- )
211
-
212
- proto_prompt_template: ProtoPromptTemplate = (
213
- PromptProtoMapper.to_proto_prompt_template(prompt_template=prompt_template)
214
- )
215
-
216
- return ProtoPromptVersionSpec(
217
- description=version_description,
218
- prompt_template=proto_prompt_template,
219
- model_descriptor=prompt_model_descriptor,
220
- )
221
-
222
- @staticmethod
223
- def from_prompt_version_definition(
224
- prompt_version_def: ProtoPromptVersionDefinition,
225
- ) -> BasePrompt:
226
- model_descriptor: Optional[ModelDescriptor] = None
227
- prompt_template: ProtoPromptTemplate = (
228
- prompt_version_def.version_spec.prompt_template
229
- )
230
- base_prompt_template: BasePromptTemplate = (
231
- PromptProtoMapper.from_prompt_template(prompt_template)
232
- )
233
-
234
- if prompt_version_def.version_spec.HasField("model_descriptor"):
235
- model_descriptor: ModelDescriptor = (
236
- ModelDescriptorMapper.from_prompt_model_descriptor(
237
- model_descriptor=prompt_version_def.version_spec.model_descriptor
238
- )
239
- )
240
-
241
- if isinstance(base_prompt_template, ChatPromptTemplate) and (
242
- not model_descriptor or isinstance(model_descriptor, ChatModelDescriptor)
243
- ):
244
- return ChatPrompt(template=base_prompt_template, model=model_descriptor)
245
-
246
- @staticmethod
247
- def from_prompt(
248
- name: str,
249
- prompt_description: str,
250
- version_description: str,
251
- version: int,
252
- target_default_version: bool,
253
- prompt_version_definition: ProtoPromptVersionDefinition,
254
- ) -> RegisteredPrompt:
255
- return RegisteredPrompt(
256
- name=name,
257
- prompt_description=prompt_description,
258
- version_description=version_description,
259
- version=version,
260
- _target_default_version=target_default_version,
261
- prompt=PromptProtoMapper.from_prompt_version_definition(
262
- prompt_version_def=prompt_version_definition
263
- ),
264
- )
@@ -1,2 +0,0 @@
1
- from .management_client import VectorManagementClient
2
- from .serving_client import VectorServingClient
@@ -1,127 +0,0 @@
1
- from typing import List, Optional
2
-
3
- import grpc
4
- from dependency_injector.wiring import Provide, inject
5
-
6
- from frogml_proto.qwak.vectors.v1.collection.collection_pb2 import (
7
- VectorCollection,
8
- VectorCollectionMetric,
9
- VectorCollectionSpec,
10
- VectorCollectionVectorizer,
11
- )
12
- from frogml_proto.qwak.vectors.v1.collection.collection_service_pb2 import (
13
- CreateCollectionRequest,
14
- DeleteCollectionByIdRequest,
15
- DeleteCollectionByNameRequest,
16
- GetCollectionByIdRequest,
17
- GetCollectionByNameRequest,
18
- ListCollectionsRequest,
19
- )
20
- from frogml_proto.qwak.vectors.v1.collection.collection_service_pb2_grpc import (
21
- VectorCollectionServiceStub,
22
- )
23
- from frogml_core.exceptions import FrogmlException
24
- from frogml_core.inner.di_configuration import FrogmlContainer
25
-
26
-
27
- class VectorManagementClient:
28
- @inject
29
- def __init__(self, grpc_channel=Provide[FrogmlContainer.core_grpc_channel]):
30
- self._vector_management_service: VectorCollectionServiceStub = (
31
- VectorCollectionServiceStub(grpc_channel)
32
- )
33
-
34
- def create_collection(
35
- self,
36
- name: str,
37
- dimension: int,
38
- description: str = None,
39
- metric: VectorCollectionMetric = VectorCollectionMetric.COLLECTION_METRIC_L2_SQUARED,
40
- vectorizer: Optional[str] = None,
41
- multi_tenant: bool = False,
42
- ) -> VectorCollection:
43
- """
44
- Create a collection
45
- """
46
- try:
47
- return self._vector_management_service.CreateCollection(
48
- CreateCollectionRequest(
49
- collection_spec=VectorCollectionSpec(
50
- name=name,
51
- description=description,
52
- vectorizer=VectorCollectionVectorizer(
53
- qwak_model_name=vectorizer
54
- ),
55
- metric=metric,
56
- dimension=dimension,
57
- multi_tenancy_enabled=multi_tenant,
58
- )
59
- )
60
- ).vector_collection
61
-
62
- except grpc.RpcError as e:
63
- raise FrogmlException(f"Failed to create collection: {e.details()}")
64
-
65
- def list_collections(self) -> List[VectorCollection]:
66
- """
67
- List all vector collections
68
- """
69
- try:
70
- return self._vector_management_service.ListCollections(
71
- ListCollectionsRequest()
72
- ).vector_collections
73
-
74
- except grpc.RpcError as e:
75
- raise FrogmlException(f"Failed to list collections: {e.details()}")
76
-
77
- def get_collection_by_name(self, name: str) -> VectorCollection:
78
- """
79
- Get vector collection by name
80
- """
81
- try:
82
- return self._vector_management_service.GetCollectionByName(
83
- GetCollectionByNameRequest(name=name)
84
- ).vector_collection
85
- except grpc.RpcError as e:
86
- raise FrogmlException(
87
- f"Failed to get collection by name '{name}': {e.details()}"
88
- )
89
-
90
- def get_collection_by_id(self, id: str) -> VectorCollection:
91
- """
92
- Get vector collection by id
93
- """
94
- try:
95
- return self._vector_management_service.GetCollectionById(
96
- GetCollectionByIdRequest(id=id)
97
- ).vector_collection
98
- except grpc.RpcError as e:
99
- raise FrogmlException(
100
- f"Failed to get collection by id '{id}': {e.details()}"
101
- )
102
-
103
- def delete_collection_by_id(self, id: str) -> None:
104
- """
105
- Delete vector collection by id
106
- """
107
- try:
108
- self._vector_management_service.DeleteCollectionById(
109
- DeleteCollectionByIdRequest(id=id)
110
- )
111
- except grpc.RpcError as e:
112
- raise FrogmlException(
113
- f"Failed to delete collection by id '{id}': {e.details()}"
114
- )
115
-
116
- def delete_collection_by_name(self, name: str) -> None:
117
- """
118
- Delete vector collection by id
119
- """
120
- try:
121
- self._vector_management_service.DeleteCollectionByName(
122
- DeleteCollectionByNameRequest(name=name)
123
- )
124
- except grpc.RpcError as e:
125
- raise FrogmlException(
126
- f"Failed to delete collection by name '{name}': {e.details()}"
127
- )