frogml-core 0.0.113__py3-none-any.whl → 0.0.115__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- frogml_core/__init__.py +1 -1
- frogml_core/clients/administration/authentication/client.py +2 -2
- frogml_core/clients/batch_job_management/client.py +4 -4
- frogml_core/clients/build_orchestrator/build_model_request_getter.py +6 -6
- frogml_core/clients/build_orchestrator/client.py +12 -12
- frogml_core/clients/build_orchestrator/internal_client.py +10 -10
- frogml_core/frogml_client/build_api_helpers/build_api_steps.py +3 -3
- frogml_core/inner/build_logic/constants/upload_tag.py +7 -7
- frogml_core/inner/build_logic/interface/context_interface.py +1 -1
- frogml_core/inner/build_logic/phases/phase_010_fetch_model/fetch_strategy_manager/strategy/strategy.py +4 -4
- frogml_core/inner/build_logic/phases/phase_010_fetch_model/set_version_step.py +3 -3
- frogml_core/inner/build_logic/phases/phase_020_remote_register_frogml_build/start_remote_build_step.py +3 -3
- frogml_core/inner/build_logic/phases/phase_020_remote_register_frogml_build/upload_step.py +11 -9
- frogml_core/inner/build_logic/tools/ignore_files.py +3 -3
- frogml_core/inner/di_configuration/__init__.py +0 -6
- frogml_core/model/adapters/__init__.py +1 -1
- frogml_core/model/analytics_logging.py +1 -1
- frogml_core/model/tools/adapters/input.py +6 -6
- frogml_core/model/tools/adapters/output.py +8 -8
- frogml_core/model/tools/run_model_locally.py +2 -2
- frogml_core/model/utils/feature_utils.py +1 -1
- {frogml_core-0.0.113.dist-info → frogml_core-0.0.115.dist-info}/METADATA +1 -1
- {frogml_core-0.0.113.dist-info → frogml_core-0.0.115.dist-info}/RECORD +30 -137
- frogml_services_mock/mocks/analytics_api.py +6 -6
- frogml_services_mock/mocks/ecosystem_service_api.py +2 -2
- frogml_services_mock/mocks/frogml_mocks.py +0 -11
- frogml_services_mock/services_mock.py +4 -52
- frogml_storage/__init__.py +1 -1
- frogml_core/clients/prompt_manager/__init__.py +0 -0
- frogml_core/clients/prompt_manager/model_descriptor_mapper.py +0 -196
- frogml_core/clients/prompt_manager/prompt_manager_client.py +0 -190
- frogml_core/clients/prompt_manager/prompt_proto_mapper.py +0 -264
- frogml_core/clients/vector_store/__init__.py +0 -2
- frogml_core/clients/vector_store/management_client.py +0 -127
- frogml_core/clients/vector_store/serving_client.py +0 -157
- frogml_core/clients/workspace_manager/__init__.py +0 -1
- frogml_core/clients/workspace_manager/client.py +0 -224
- frogml_core/llmops/__init__.py +0 -0
- frogml_core/llmops/generation/__init__.py +0 -0
- frogml_core/llmops/generation/_steaming.py +0 -78
- frogml_core/llmops/generation/base.py +0 -5
- frogml_core/llmops/generation/chat/__init__.py +0 -0
- frogml_core/llmops/generation/chat/openai/LICENSE.txt +0 -201
- frogml_core/llmops/generation/chat/openai/types/__init__.py +0 -0
- frogml_core/llmops/generation/chat/openai/types/chat/__init__.py +0 -0
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion.py +0 -88
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_assistant_message_param.py +0 -65
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_chunk.py +0 -153
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_content_part_text_param.py +0 -28
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_function_call_option_param.py +0 -25
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_function_message_param.py +0 -33
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message.py +0 -56
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_param.py +0 -34
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_tool_call.py +0 -46
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_tool_call_param.py +0 -44
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_named_tool_choice_param.py +0 -32
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_role.py +0 -20
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_system_message_param.py +0 -35
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_token_logprob.py +0 -71
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_choice_option_param.py +0 -28
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_message_param.py +0 -31
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_param.py +0 -29
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_user_message_param.py +0 -35
- frogml_core/llmops/generation/chat/openai/types/chat/completion_create_params.py +0 -279
- frogml_core/llmops/generation/chat/openai/types/completion_choice.py +0 -47
- frogml_core/llmops/generation/chat/openai/types/completion_create_params.py +0 -209
- frogml_core/llmops/generation/chat/openai/types/completion_usage.py +0 -30
- frogml_core/llmops/generation/chat/openai/types/model.py +0 -35
- frogml_core/llmops/generation/chat/openai/types/shared/__init__.py +0 -3
- frogml_core/llmops/generation/chat/openai/types/shared/error_object.py +0 -27
- frogml_core/llmops/generation/chat/openai/types/shared/function_definition.py +0 -49
- frogml_core/llmops/generation/chat/openai/types/shared/function_parameters.py +0 -20
- frogml_core/llmops/generation/chat/openai/types/shared_params/__init__.py +0 -2
- frogml_core/llmops/generation/chat/openai/types/shared_params/function_definition.py +0 -49
- frogml_core/llmops/generation/chat/openai/types/shared_params/function_parameters.py +0 -22
- frogml_core/llmops/generation/streaming.py +0 -26
- frogml_core/llmops/model/__init__.py +0 -0
- frogml_core/llmops/model/descriptor.py +0 -40
- frogml_core/llmops/prompt/__init__.py +0 -0
- frogml_core/llmops/prompt/base.py +0 -136
- frogml_core/llmops/prompt/chat/__init__.py +0 -0
- frogml_core/llmops/prompt/chat/message.py +0 -24
- frogml_core/llmops/prompt/chat/template.py +0 -113
- frogml_core/llmops/prompt/chat/value.py +0 -10
- frogml_core/llmops/prompt/manager.py +0 -138
- frogml_core/llmops/prompt/template.py +0 -24
- frogml_core/llmops/prompt/value.py +0 -14
- frogml_core/llmops/provider/__init__.py +0 -0
- frogml_core/llmops/provider/chat.py +0 -44
- frogml_core/llmops/provider/openai/__init__.py +0 -0
- frogml_core/llmops/provider/openai/client.py +0 -126
- frogml_core/llmops/provider/openai/provider.py +0 -93
- frogml_core/vector_store/__init__.py +0 -4
- frogml_core/vector_store/client.py +0 -151
- frogml_core/vector_store/collection.py +0 -429
- frogml_core/vector_store/filters.py +0 -359
- frogml_core/vector_store/inference_client.py +0 -105
- frogml_core/vector_store/rest_helpers.py +0 -81
- frogml_core/vector_store/utils/__init__.py +0 -0
- frogml_core/vector_store/utils/filter_utils.py +0 -23
- frogml_core/vector_store/utils/upsert_utils.py +0 -218
- frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2.py +0 -77
- frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2.pyi +0 -417
- frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2_grpc.py +0 -441
- frogml_proto/qwak/prompt/v1/prompt/prompt_pb2.py +0 -69
- frogml_proto/qwak/prompt/v1/prompt/prompt_pb2.pyi +0 -415
- frogml_proto/qwak/prompt/v1/prompt/prompt_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/collection/collection_pb2.py +0 -46
- frogml_proto/qwak/vectors/v1/collection/collection_pb2.pyi +0 -287
- frogml_proto/qwak/vectors/v1/collection/collection_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/collection/collection_service_pb2.py +0 -60
- frogml_proto/qwak/vectors/v1/collection/collection_service_pb2.pyi +0 -258
- frogml_proto/qwak/vectors/v1/collection/collection_service_pb2_grpc.py +0 -304
- frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2.py +0 -28
- frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2.pyi +0 -41
- frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/filters_pb2.py +0 -52
- frogml_proto/qwak/vectors/v1/filters_pb2.pyi +0 -297
- frogml_proto/qwak/vectors/v1/filters_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/vector_pb2.py +0 -38
- frogml_proto/qwak/vectors/v1/vector_pb2.pyi +0 -142
- frogml_proto/qwak/vectors/v1/vector_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/vector_service_pb2.py +0 -53
- frogml_proto/qwak/vectors/v1/vector_service_pb2.pyi +0 -243
- frogml_proto/qwak/vectors/v1/vector_service_pb2_grpc.py +0 -201
- frogml_proto/qwak/workspace/workspace_pb2.py +0 -50
- frogml_proto/qwak/workspace/workspace_pb2.pyi +0 -331
- frogml_proto/qwak/workspace/workspace_pb2_grpc.py +0 -4
- frogml_proto/qwak/workspace/workspace_service_pb2.py +0 -84
- frogml_proto/qwak/workspace/workspace_service_pb2.pyi +0 -393
- frogml_proto/qwak/workspace/workspace_service_pb2_grpc.py +0 -507
- frogml_services_mock/mocks/prompt_manager_service.py +0 -281
- frogml_services_mock/mocks/vector_serving_api.py +0 -159
- frogml_services_mock/mocks/vectors_management_api.py +0 -97
- frogml_services_mock/mocks/workspace_manager_service_mock.py +0 -202
- /frogml_core/model/adapters/output_adapters/{qwak_with_default_fallback.py → frogml_with_default_fallback.py} +0 -0
- {frogml_core-0.0.113.dist-info → frogml_core-0.0.115.dist-info}/WHEEL +0 -0
@@ -1,196 +0,0 @@
|
|
1
|
-
from typing import List, Optional, Union
|
2
|
-
|
3
|
-
from google.protobuf import json_format
|
4
|
-
from google.protobuf.struct_pb2 import ListValue, Struct
|
5
|
-
|
6
|
-
from frogml_proto.qwak.model_descriptor.open_ai_descriptor_pb2 import (
|
7
|
-
OpenAICapabilities as ProtoOpenAICapabilities,
|
8
|
-
)
|
9
|
-
from frogml_proto.qwak.model_descriptor.open_ai_descriptor_pb2 import (
|
10
|
-
OpenAIChatAPI as ProtoOpenAIChatAPI,
|
11
|
-
)
|
12
|
-
from frogml_proto.qwak.model_descriptor.open_ai_descriptor_pb2 import (
|
13
|
-
OpenAIChatModelParams as ProtoOpenAIChatModelParams,
|
14
|
-
)
|
15
|
-
from frogml_proto.qwak.model_descriptor.open_ai_descriptor_pb2 import (
|
16
|
-
OpenAIModelDescriptor as ProtoOpenAIModelDescriptor,
|
17
|
-
)
|
18
|
-
from frogml_proto.qwak.model_descriptor.open_ai_descriptor_pb2 import (
|
19
|
-
ToolChoice as ProtoToolChoice,
|
20
|
-
)
|
21
|
-
from frogml_proto.qwak.model_descriptor.open_ai_descriptor_pb2 import (
|
22
|
-
Tools as ProtoTools,
|
23
|
-
)
|
24
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
25
|
-
PromptModelDescriptor as ProtoPromptModelDescriptor,
|
26
|
-
)
|
27
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
28
|
-
PromptOpenAIProvider as ProtoPromptOpenAIProvider,
|
29
|
-
)
|
30
|
-
from frogml_core.exceptions import FrogmlException
|
31
|
-
from frogml_core.llmops.generation.chat.openai.types.chat.chat_completion_named_tool_choice_param import (
|
32
|
-
ChatCompletionNamedToolChoiceParam,
|
33
|
-
)
|
34
|
-
from frogml_core.llmops.generation.chat.openai.types.chat.chat_completion_tool_param import (
|
35
|
-
ChatCompletionToolParam,
|
36
|
-
)
|
37
|
-
from frogml_core.llmops.model.descriptor import ModelDescriptor, OpenAIChat
|
38
|
-
|
39
|
-
|
40
|
-
class ModelDescriptorMapper:
|
41
|
-
@staticmethod
|
42
|
-
def _from_tool_choice(
|
43
|
-
openai_chat_params: ProtoOpenAIChatModelParams,
|
44
|
-
) -> Optional[Union[str, ChatCompletionNamedToolChoiceParam]]:
|
45
|
-
_tool_choice: Optional[Union[str, ChatCompletionNamedToolChoiceParam]] = None
|
46
|
-
if openai_chat_params.HasField("tool_choice"):
|
47
|
-
if openai_chat_params.tool_choice.WhichOneof("value_type") == "json":
|
48
|
-
_tool_choice = json_format.MessageToDict(
|
49
|
-
openai_chat_params.tool_choice.json
|
50
|
-
)
|
51
|
-
elif openai_chat_params.tool_choice.WhichOneof("value_type") == "literal":
|
52
|
-
_tool_choice = openai_chat_params.tool_choice.literal
|
53
|
-
return _tool_choice
|
54
|
-
|
55
|
-
@staticmethod
|
56
|
-
def from_openai_chat_capability(
|
57
|
-
model_id: str, openai_chat_params: ProtoOpenAIChatModelParams
|
58
|
-
) -> OpenAIChat:
|
59
|
-
p = openai_chat_params
|
60
|
-
_tool_choice: Union[str, ChatCompletionNamedToolChoiceParam] = (
|
61
|
-
ModelDescriptorMapper._from_tool_choice(
|
62
|
-
openai_chat_params=openai_chat_params
|
63
|
-
)
|
64
|
-
)
|
65
|
-
_tools: List[ChatCompletionToolParam] = []
|
66
|
-
|
67
|
-
if p.HasField("tools_spec"):
|
68
|
-
for tool in p.tools_spec.tools:
|
69
|
-
_tools.append(json_format.MessageToDict(tool))
|
70
|
-
|
71
|
-
return OpenAIChat(
|
72
|
-
model_id=model_id,
|
73
|
-
frequency_penalty=(
|
74
|
-
p.frequency_penalty if p.HasField("frequency_penalty") else None
|
75
|
-
),
|
76
|
-
logit_bias=(
|
77
|
-
{k: int(v) for k, v in p.logit_bias.items()}
|
78
|
-
if p.HasField("logit_bias")
|
79
|
-
else None
|
80
|
-
),
|
81
|
-
logprobs=p.logprobs if p.HasField("logprobs") else None,
|
82
|
-
max_tokens=p.max_tokens if p.HasField("max_tokens") else None,
|
83
|
-
n=p.n if p.HasField("n") else None,
|
84
|
-
presence_penalty=(
|
85
|
-
p.presence_penalty if p.HasField("presence_penalty") else None
|
86
|
-
),
|
87
|
-
response_format=(
|
88
|
-
p.response_format if p.HasField("response_format") else None
|
89
|
-
), # noqa
|
90
|
-
seed=p.seed if p.HasField("seed") else None,
|
91
|
-
stop=[_ for _ in p.stop] if p.HasField("stop") else None,
|
92
|
-
temperature=p.temperature if p.HasField("temperature") else None,
|
93
|
-
top_p=p.top_p if p.HasField("top_p") else None,
|
94
|
-
top_logprobs=p.top_logprobs if p.HasField("top_logprobs") else None,
|
95
|
-
user=p.user if p.HasField("user") else None,
|
96
|
-
tool_choice=_tool_choice if p.HasField("tool_choice") else None,
|
97
|
-
tools=_tools if p.HasField("tools_spec") else None,
|
98
|
-
)
|
99
|
-
|
100
|
-
@staticmethod
|
101
|
-
def from_prompt_openai_provider(
|
102
|
-
open_ai_provider: ProtoPromptOpenAIProvider,
|
103
|
-
) -> ModelDescriptor:
|
104
|
-
descriptor: ProtoOpenAIModelDescriptor = (
|
105
|
-
open_ai_provider.open_ai_model_descriptor
|
106
|
-
)
|
107
|
-
model_id: str = descriptor.model_id
|
108
|
-
|
109
|
-
if descriptor.capabilities.WhichOneof("optional_chat"):
|
110
|
-
return ModelDescriptorMapper.from_openai_chat_capability(
|
111
|
-
model_id=model_id,
|
112
|
-
openai_chat_params=descriptor.capabilities.chat_api.chat_params,
|
113
|
-
)
|
114
|
-
else:
|
115
|
-
raise FrogmlException(
|
116
|
-
f"Got unsupported openai capability: {repr(open_ai_provider)}"
|
117
|
-
)
|
118
|
-
|
119
|
-
@staticmethod
|
120
|
-
def from_prompt_model_descriptor(
|
121
|
-
model_descriptor: ProtoPromptModelDescriptor,
|
122
|
-
) -> ModelDescriptor:
|
123
|
-
if model_descriptor.WhichOneof("model_provider") == "open_ai_provider":
|
124
|
-
return ModelDescriptorMapper.from_prompt_openai_provider(
|
125
|
-
model_descriptor.open_ai_provider
|
126
|
-
)
|
127
|
-
else:
|
128
|
-
raise FrogmlException(
|
129
|
-
f"Got unsupported model descriptor: {repr(model_descriptor)}"
|
130
|
-
)
|
131
|
-
|
132
|
-
@staticmethod
|
133
|
-
def to_openai_chat(model_descriptor: OpenAIChat) -> ProtoOpenAIModelDescriptor:
|
134
|
-
d: OpenAIChat = model_descriptor
|
135
|
-
logit_bias_struct = Struct()
|
136
|
-
|
137
|
-
logit_bias_struct.update(d.logit_bias) if d.logit_bias else None
|
138
|
-
|
139
|
-
stop_list_value = ListValue()
|
140
|
-
stop_list_value.extend(d.stop) if d.stop else None
|
141
|
-
tool_choice_proto: ProtoToolChoice
|
142
|
-
|
143
|
-
if isinstance(d.tool_choice, str):
|
144
|
-
tool_choice_proto = ProtoToolChoice(literal=d.tool_choice)
|
145
|
-
elif d.tool_choice is not None:
|
146
|
-
tool_choice_struct = Struct()
|
147
|
-
tool_choice_struct.update(d.tool_choice)
|
148
|
-
tool_choice_proto = ProtoToolChoice(json=tool_choice_struct)
|
149
|
-
|
150
|
-
tools_structs = []
|
151
|
-
if d.tools:
|
152
|
-
for tool in d.tools:
|
153
|
-
s = Struct()
|
154
|
-
s.update(tool)
|
155
|
-
tools_structs.append(s)
|
156
|
-
|
157
|
-
tools_proto = ProtoTools(tools=tools_structs)
|
158
|
-
|
159
|
-
model_capabilities = ProtoOpenAICapabilities(
|
160
|
-
chat_api=ProtoOpenAIChatAPI(
|
161
|
-
chat_params=ProtoOpenAIChatModelParams(
|
162
|
-
frequency_penalty=d.frequency_penalty,
|
163
|
-
logit_bias=logit_bias_struct if d.logit_bias else None,
|
164
|
-
logprobs=d.logprobs,
|
165
|
-
max_tokens=d.max_tokens,
|
166
|
-
n=d.n,
|
167
|
-
presence_penalty=d.presence_penalty,
|
168
|
-
response_format=(
|
169
|
-
d.response_format if d.response_format else None
|
170
|
-
), # noqa
|
171
|
-
seed=d.seed,
|
172
|
-
stop=stop_list_value if d.stop else None,
|
173
|
-
temperature=d.temperature,
|
174
|
-
tool_choice=tool_choice_proto if d.tool_choice else None,
|
175
|
-
tools_spec=tools_proto if d.tools else None,
|
176
|
-
top_logprobs=d.top_logprobs,
|
177
|
-
top_p=d.top_p,
|
178
|
-
user=d.user,
|
179
|
-
)
|
180
|
-
)
|
181
|
-
)
|
182
|
-
|
183
|
-
return ProtoOpenAIModelDescriptor(
|
184
|
-
model_id=model_descriptor.model_id, capabilities=model_capabilities
|
185
|
-
)
|
186
|
-
|
187
|
-
@staticmethod
|
188
|
-
def to_model_descriptor(
|
189
|
-
model_descriptor: ModelDescriptor,
|
190
|
-
) -> Union[ProtoOpenAIModelDescriptor]:
|
191
|
-
if isinstance(model_descriptor, OpenAIChat):
|
192
|
-
return ModelDescriptorMapper.to_openai_chat(model_descriptor)
|
193
|
-
|
194
|
-
raise FrogmlException(
|
195
|
-
f"Got unsupported model descriptor: {repr(model_descriptor)}"
|
196
|
-
)
|
@@ -1,190 +0,0 @@
|
|
1
|
-
import grpc
|
2
|
-
from dependency_injector.wiring import Provide, inject
|
3
|
-
|
4
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
5
|
-
CreatePromptRequest as ProtoCreatePromptRequest,
|
6
|
-
)
|
7
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
8
|
-
CreatePromptResponse as ProtoCreatePromptResponse,
|
9
|
-
)
|
10
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
11
|
-
CreatePromptVersionRequest as ProtoCreatePromptVersionRequest,
|
12
|
-
)
|
13
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
14
|
-
CreatePromptVersionResponse as ProtoCreatePromptVersionResponse,
|
15
|
-
)
|
16
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
17
|
-
DeletePromptRequest as ProtoDeletePromptRequest,
|
18
|
-
)
|
19
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
20
|
-
DeletePromptVersionRequest as ProtoDeletePromptVersionRequest,
|
21
|
-
)
|
22
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
23
|
-
GetPromptByNameRequest as ProtoGetPromptByNameRequest,
|
24
|
-
)
|
25
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
26
|
-
GetPromptByNameResponse as ProtoGetPromptByNameResponse,
|
27
|
-
)
|
28
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
29
|
-
GetPromptVersionByPromptNameRequest as ProtoGetPromptVersionByPromptNameRequest,
|
30
|
-
)
|
31
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
32
|
-
GetPromptVersionByPromptNameResponse as ProtoGetPromptVersionByPromptNameResponse,
|
33
|
-
)
|
34
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2 import (
|
35
|
-
SetDefaultPromptVersionRequest as ProtoSetDefaultPromptVersionRequest,
|
36
|
-
)
|
37
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_manager_service_pb2_grpc import (
|
38
|
-
PromptManagerServiceStub,
|
39
|
-
)
|
40
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import Prompt as ProtoPrompt
|
41
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import PromptSpec as ProtoPromptSpec
|
42
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
43
|
-
PromptVersion as ProtoPromptVersion,
|
44
|
-
)
|
45
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
46
|
-
PromptVersionSpec as ProtoPromptVersionSpec,
|
47
|
-
)
|
48
|
-
from frogml_core.exceptions import FrogmlException
|
49
|
-
from frogml_core.inner.di_configuration import FrogmlContainer
|
50
|
-
|
51
|
-
|
52
|
-
class PromptManagerClient:
|
53
|
-
@inject
|
54
|
-
def __init__(self, grpc_channel=Provide[FrogmlContainer.core_grpc_channel]):
|
55
|
-
self._grpc_client: PromptManagerServiceStub = PromptManagerServiceStub(
|
56
|
-
grpc_channel
|
57
|
-
)
|
58
|
-
|
59
|
-
def create_prompt(
|
60
|
-
self,
|
61
|
-
name: str,
|
62
|
-
prompt_description: str,
|
63
|
-
version_spec: ProtoPromptVersionSpec,
|
64
|
-
) -> ProtoPrompt:
|
65
|
-
request = ProtoCreatePromptRequest(
|
66
|
-
prompt_name=name,
|
67
|
-
prompt_spec=ProtoPromptSpec(description=prompt_description),
|
68
|
-
prompt_version_spec=version_spec,
|
69
|
-
)
|
70
|
-
try:
|
71
|
-
response: ProtoCreatePromptResponse = self._grpc_client.CreatePrompt(
|
72
|
-
request
|
73
|
-
)
|
74
|
-
return response.prompt
|
75
|
-
except grpc.RpcError as error:
|
76
|
-
call: grpc.Call = error # noqa
|
77
|
-
if call.code() == grpc.StatusCode.ALREADY_EXISTS:
|
78
|
-
raise FrogmlException(f"Prompt with name: {name} already exists")
|
79
|
-
elif call.code() == grpc.StatusCode.INVALID_ARGUMENT:
|
80
|
-
raise FrogmlException(
|
81
|
-
f"Got an illegal prompt specification: {call.details()}"
|
82
|
-
)
|
83
|
-
else:
|
84
|
-
raise FrogmlException(f"Internal Error: {call.details()}")
|
85
|
-
|
86
|
-
def create_prompt_version(
|
87
|
-
self,
|
88
|
-
name: str,
|
89
|
-
version_spec: ProtoPromptVersionSpec,
|
90
|
-
set_default: bool,
|
91
|
-
) -> ProtoPromptVersion:
|
92
|
-
request = ProtoCreatePromptVersionRequest(
|
93
|
-
prompt_name=name, prompt_version_spec=version_spec, set_default=set_default
|
94
|
-
)
|
95
|
-
try:
|
96
|
-
response: ProtoCreatePromptVersionResponse = (
|
97
|
-
self._grpc_client.CreatePromptVersion(request)
|
98
|
-
)
|
99
|
-
return response.prompt_version
|
100
|
-
except grpc.RpcError as error:
|
101
|
-
call: grpc.Call = error # noqa
|
102
|
-
if call.code() == grpc.StatusCode.NOT_FOUND:
|
103
|
-
raise FrogmlException(
|
104
|
-
f"Can not update prompt: '{name}', prompt was not found"
|
105
|
-
)
|
106
|
-
elif call.code() == grpc.StatusCode.INVALID_ARGUMENT:
|
107
|
-
raise FrogmlException(
|
108
|
-
f"Got an illegal prompt specification: {call.details()}"
|
109
|
-
)
|
110
|
-
else:
|
111
|
-
raise FrogmlException(f"Internal Error: {call.details()}")
|
112
|
-
|
113
|
-
def delete_prompt(self, name: str):
|
114
|
-
try:
|
115
|
-
self._grpc_client.DeletePrompt(ProtoDeletePromptRequest(prompt_name=name))
|
116
|
-
except grpc.RpcError as error:
|
117
|
-
call: grpc.Call = error # noqa
|
118
|
-
if call.code() == grpc.StatusCode.NOT_FOUND:
|
119
|
-
raise FrogmlException(f"Prompt named '{name}' was not found")
|
120
|
-
else:
|
121
|
-
raise FrogmlException(f"Internal Error: {call.details()}")
|
122
|
-
|
123
|
-
def delete_prompt_version(self, name: str, version: int):
|
124
|
-
try:
|
125
|
-
self._grpc_client.DeletePromptVersion(
|
126
|
-
ProtoDeletePromptVersionRequest(
|
127
|
-
prompt_name=name, version_number=version
|
128
|
-
)
|
129
|
-
)
|
130
|
-
except grpc.RpcError as error:
|
131
|
-
call: grpc.Call = error # noqa
|
132
|
-
if call.code() == grpc.StatusCode.NOT_FOUND:
|
133
|
-
raise FrogmlException(str(call.details()))
|
134
|
-
elif call.code() == grpc.StatusCode.FAILED_PRECONDITION:
|
135
|
-
raise FrogmlException(
|
136
|
-
f"Cannot delete the default version '{version}' of a prompt '{name}',"
|
137
|
-
f" please set another version as the default to delete this version."
|
138
|
-
)
|
139
|
-
else:
|
140
|
-
raise FrogmlException(f"Internal Error: {call.details()}")
|
141
|
-
|
142
|
-
def get_prompt_by_name(self, name: str) -> ProtoPrompt:
|
143
|
-
"""
|
144
|
-
Get prompt's default version
|
145
|
-
"""
|
146
|
-
try:
|
147
|
-
response: ProtoGetPromptByNameResponse = self._grpc_client.GetPromptByName(
|
148
|
-
ProtoGetPromptByNameRequest(prompt_name=name)
|
149
|
-
)
|
150
|
-
return response.prompt
|
151
|
-
except grpc.RpcError as error:
|
152
|
-
call: grpc.Call = error # noqa
|
153
|
-
if call.code() == grpc.StatusCode.NOT_FOUND:
|
154
|
-
raise FrogmlException(str(call.details()))
|
155
|
-
else:
|
156
|
-
raise FrogmlException(f"Internal Error: {call.details()}")
|
157
|
-
|
158
|
-
def get_prompt_version_by_name(self, name: str, version: int) -> ProtoPromptVersion:
|
159
|
-
"""
|
160
|
-
Get prompt specific version
|
161
|
-
"""
|
162
|
-
try:
|
163
|
-
response: ProtoGetPromptVersionByPromptNameResponse = (
|
164
|
-
self._grpc_client.GetPromptVersionByPromptName(
|
165
|
-
ProtoGetPromptVersionByPromptNameRequest(
|
166
|
-
prompt_name=name, version_number=version
|
167
|
-
)
|
168
|
-
)
|
169
|
-
)
|
170
|
-
return response.prompt_version
|
171
|
-
except grpc.RpcError as error:
|
172
|
-
call: grpc.Call = error # noqa
|
173
|
-
if call.code() == grpc.StatusCode.NOT_FOUND:
|
174
|
-
raise FrogmlException(str(call.details()))
|
175
|
-
else:
|
176
|
-
raise FrogmlException(f"Internal Error: {call.details()}")
|
177
|
-
|
178
|
-
def set_default_prompt_version(self, name: str, version: int):
|
179
|
-
try:
|
180
|
-
self._grpc_client.SetDefaultPromptVersion(
|
181
|
-
ProtoSetDefaultPromptVersionRequest(
|
182
|
-
prompt_name=name, version_number=version
|
183
|
-
)
|
184
|
-
)
|
185
|
-
except grpc.RpcError as error:
|
186
|
-
call: grpc.Call = error # noqa
|
187
|
-
if call.code() == grpc.StatusCode.NOT_FOUND:
|
188
|
-
raise FrogmlException(str(call.details()))
|
189
|
-
else:
|
190
|
-
raise FrogmlException(f"Internal Error: {call.details()}")
|
@@ -1,264 +0,0 @@
|
|
1
|
-
from typing import List, Optional
|
2
|
-
|
3
|
-
from frogml_proto.qwak.model_descriptor.open_ai_descriptor_pb2 import (
|
4
|
-
OpenAIModelDescriptor as ProtoOpenAIModelDescriptor,
|
5
|
-
)
|
6
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
7
|
-
AIPromptMessageRole as ProtoAIPromptMessageRole,
|
8
|
-
)
|
9
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
10
|
-
ChatMessage as ProtoChatMessage,
|
11
|
-
)
|
12
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
13
|
-
ChatMessageTemplate as ProtoChatMessageTemplate,
|
14
|
-
)
|
15
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
16
|
-
ChatPromptTemplate as ProtoChatPromptTemplate,
|
17
|
-
)
|
18
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
19
|
-
HumanPromptMessageRole as ProtoHumanPromptMessageRole,
|
20
|
-
)
|
21
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
22
|
-
PromptMessageRole as ProtoPromptMessageRole,
|
23
|
-
)
|
24
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
25
|
-
PromptModelDescriptor as ProtoPromptModelDescriptor,
|
26
|
-
)
|
27
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
28
|
-
PromptOpenAIProvider as ProtoPromptOpenAIProvider,
|
29
|
-
)
|
30
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
31
|
-
PromptTemplate as ProtoPromptTemplate,
|
32
|
-
)
|
33
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
34
|
-
PromptVersionDefinition as ProtoPromptVersionDefinition,
|
35
|
-
)
|
36
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
37
|
-
PromptVersionSpec as ProtoPromptVersionSpec,
|
38
|
-
)
|
39
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
40
|
-
SystemPromptMessageRole as ProtoSystemPromptMessageRole,
|
41
|
-
)
|
42
|
-
from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
|
43
|
-
TextTemplate as ProtoTextTemplate,
|
44
|
-
)
|
45
|
-
from frogml_core.clients.prompt_manager.model_descriptor_mapper import (
|
46
|
-
ModelDescriptorMapper,
|
47
|
-
)
|
48
|
-
from frogml_core.exceptions import FrogmlException
|
49
|
-
from frogml_core.llmops.model.descriptor import ChatModelDescriptor, ModelDescriptor
|
50
|
-
from frogml_core.llmops.prompt.base import BasePrompt, ChatPrompt, RegisteredPrompt
|
51
|
-
from frogml_core.llmops.prompt.chat.message import (
|
52
|
-
AIMessage,
|
53
|
-
BaseMessage,
|
54
|
-
HumanMessage,
|
55
|
-
SystemMessage,
|
56
|
-
)
|
57
|
-
from frogml_core.llmops.prompt.chat.template import (
|
58
|
-
AIMessagePromptTemplate,
|
59
|
-
BaseStringMessagePromptTemplate,
|
60
|
-
ChatPromptTemplate,
|
61
|
-
HumanMessagePromptTemplate,
|
62
|
-
SystemMessagePromptTemplate,
|
63
|
-
)
|
64
|
-
from frogml_core.llmops.prompt.template import BasePromptTemplate
|
65
|
-
|
66
|
-
|
67
|
-
class PromptProtoMapper:
|
68
|
-
@staticmethod
|
69
|
-
def from_chat_prompt_template(
|
70
|
-
chat_prompt_template: ProtoChatPromptTemplate,
|
71
|
-
) -> ChatPromptTemplate:
|
72
|
-
messages: List[BaseStringMessagePromptTemplate] = []
|
73
|
-
for chat_message in chat_prompt_template.chat_messages:
|
74
|
-
template: str = chat_message.template.text_template.template
|
75
|
-
role_type: str = chat_message.role.WhichOneof("role")
|
76
|
-
if role_type == "human_role":
|
77
|
-
messages.append(HumanMessagePromptTemplate(template=template))
|
78
|
-
elif role_type == "system_role":
|
79
|
-
messages.append(SystemMessagePromptTemplate(template=template))
|
80
|
-
elif role_type == "ai_role":
|
81
|
-
messages.append(AIMessagePromptTemplate(template=template))
|
82
|
-
else:
|
83
|
-
raise FrogmlException(
|
84
|
-
f"Got unsupported chat message type: {repr(chat_message)}"
|
85
|
-
)
|
86
|
-
|
87
|
-
return ChatPromptTemplate(messages=messages)
|
88
|
-
|
89
|
-
@staticmethod
|
90
|
-
def from_prompt_template(
|
91
|
-
prompt_template: ProtoPromptTemplate,
|
92
|
-
) -> BasePromptTemplate:
|
93
|
-
if prompt_template.WhichOneof("type") == "chat_prompt_template":
|
94
|
-
return PromptProtoMapper.from_chat_prompt_template(
|
95
|
-
chat_prompt_template=prompt_template.chat_prompt_template
|
96
|
-
)
|
97
|
-
else:
|
98
|
-
raise FrogmlException(
|
99
|
-
f"Got unsupported prompt template: {repr(prompt_template)}"
|
100
|
-
)
|
101
|
-
|
102
|
-
@staticmethod
|
103
|
-
def to_prompt_model_descriptor(
|
104
|
-
model_descriptor: ModelDescriptor,
|
105
|
-
) -> ProtoPromptModelDescriptor:
|
106
|
-
proto_model_descriptor = ModelDescriptorMapper.to_model_descriptor(
|
107
|
-
model_descriptor=model_descriptor
|
108
|
-
)
|
109
|
-
if isinstance(proto_model_descriptor, ProtoOpenAIModelDescriptor):
|
110
|
-
return ProtoPromptModelDescriptor(
|
111
|
-
open_ai_provider=ProtoPromptOpenAIProvider(
|
112
|
-
open_ai_model_descriptor=proto_model_descriptor
|
113
|
-
)
|
114
|
-
)
|
115
|
-
|
116
|
-
raise FrogmlException(
|
117
|
-
f"Got unsupported model descriptor: {repr(model_descriptor)}"
|
118
|
-
)
|
119
|
-
|
120
|
-
@staticmethod
|
121
|
-
def _prompt_template_to_chat_message(
|
122
|
-
msg: BaseStringMessagePromptTemplate,
|
123
|
-
) -> ProtoChatMessage:
|
124
|
-
role: ProtoPromptMessageRole
|
125
|
-
text_template: str = msg.template.template
|
126
|
-
|
127
|
-
if isinstance(msg, SystemMessagePromptTemplate):
|
128
|
-
role = ProtoPromptMessageRole(system_role=ProtoSystemPromptMessageRole())
|
129
|
-
elif isinstance(msg, HumanMessagePromptTemplate):
|
130
|
-
role = ProtoPromptMessageRole(human_role=ProtoHumanPromptMessageRole())
|
131
|
-
elif isinstance(msg, AIMessagePromptTemplate):
|
132
|
-
role = ProtoPromptMessageRole(ai_role=ProtoAIPromptMessageRole())
|
133
|
-
else:
|
134
|
-
raise FrogmlException(f"Got unsupported prompt template role: {repr(msg)}")
|
135
|
-
|
136
|
-
return ProtoChatMessage(
|
137
|
-
role=role,
|
138
|
-
template=ProtoChatMessageTemplate(
|
139
|
-
text_template=ProtoTextTemplate(template=text_template)
|
140
|
-
),
|
141
|
-
)
|
142
|
-
|
143
|
-
@staticmethod
|
144
|
-
def _base_message_to_chat_message(msg: BaseMessage) -> ProtoChatMessage:
|
145
|
-
role: ProtoPromptMessageRole
|
146
|
-
text_template: str = msg.content
|
147
|
-
|
148
|
-
if isinstance(msg, AIMessage):
|
149
|
-
role = ProtoPromptMessageRole(ai_role=ProtoAIPromptMessageRole())
|
150
|
-
elif isinstance(msg, HumanMessage):
|
151
|
-
role = ProtoPromptMessageRole(human_role=ProtoHumanPromptMessageRole())
|
152
|
-
elif isinstance(msg, SystemMessage):
|
153
|
-
role = ProtoPromptMessageRole(system_role=ProtoSystemPromptMessageRole())
|
154
|
-
else:
|
155
|
-
raise FrogmlException(f"Got unsupported prompt template role: {repr(msg)}")
|
156
|
-
|
157
|
-
return ProtoChatMessage(
|
158
|
-
role=role,
|
159
|
-
template=ProtoChatMessageTemplate(
|
160
|
-
text_template=ProtoTextTemplate(template=text_template)
|
161
|
-
),
|
162
|
-
)
|
163
|
-
|
164
|
-
@staticmethod
|
165
|
-
def to_proto_chat_prompt_template(
|
166
|
-
prompt_template: ChatPromptTemplate,
|
167
|
-
) -> ProtoPromptTemplate:
|
168
|
-
chat_messages: List[ProtoChatMessage] = []
|
169
|
-
|
170
|
-
for msg in prompt_template.messages:
|
171
|
-
if isinstance(msg, BaseStringMessagePromptTemplate):
|
172
|
-
chat_messages.append(
|
173
|
-
PromptProtoMapper._prompt_template_to_chat_message(msg=msg)
|
174
|
-
)
|
175
|
-
elif isinstance(msg, BaseMessage):
|
176
|
-
chat_messages.append(
|
177
|
-
PromptProtoMapper._base_message_to_chat_message(msg=msg)
|
178
|
-
)
|
179
|
-
|
180
|
-
return ProtoPromptTemplate(
|
181
|
-
chat_prompt_template=ProtoChatPromptTemplate(chat_messages=chat_messages)
|
182
|
-
)
|
183
|
-
|
184
|
-
@staticmethod
|
185
|
-
def to_proto_prompt_template(
|
186
|
-
prompt_template: BasePromptTemplate,
|
187
|
-
) -> ProtoPromptTemplate:
|
188
|
-
if isinstance(prompt_template, ChatPromptTemplate):
|
189
|
-
return PromptProtoMapper.to_proto_chat_prompt_template(
|
190
|
-
prompt_template=prompt_template
|
191
|
-
)
|
192
|
-
|
193
|
-
raise FrogmlException(
|
194
|
-
f"Got unsupported prompt template: {repr(prompt_template)}"
|
195
|
-
)
|
196
|
-
|
197
|
-
@staticmethod
|
198
|
-
def to_prompt_version_spec(
|
199
|
-
version_description: str,
|
200
|
-
prompt_template: BasePromptTemplate,
|
201
|
-
model_descriptor: Optional[ModelDescriptor],
|
202
|
-
) -> ProtoPromptVersionSpec:
|
203
|
-
prompt_model_descriptor: Optional[ProtoPromptModelDescriptor] = None
|
204
|
-
|
205
|
-
if model_descriptor:
|
206
|
-
prompt_model_descriptor: ProtoPromptModelDescriptor = (
|
207
|
-
PromptProtoMapper.to_prompt_model_descriptor(
|
208
|
-
model_descriptor=model_descriptor
|
209
|
-
)
|
210
|
-
)
|
211
|
-
|
212
|
-
proto_prompt_template: ProtoPromptTemplate = (
|
213
|
-
PromptProtoMapper.to_proto_prompt_template(prompt_template=prompt_template)
|
214
|
-
)
|
215
|
-
|
216
|
-
return ProtoPromptVersionSpec(
|
217
|
-
description=version_description,
|
218
|
-
prompt_template=proto_prompt_template,
|
219
|
-
model_descriptor=prompt_model_descriptor,
|
220
|
-
)
|
221
|
-
|
222
|
-
@staticmethod
|
223
|
-
def from_prompt_version_definition(
|
224
|
-
prompt_version_def: ProtoPromptVersionDefinition,
|
225
|
-
) -> BasePrompt:
|
226
|
-
model_descriptor: Optional[ModelDescriptor] = None
|
227
|
-
prompt_template: ProtoPromptTemplate = (
|
228
|
-
prompt_version_def.version_spec.prompt_template
|
229
|
-
)
|
230
|
-
base_prompt_template: BasePromptTemplate = (
|
231
|
-
PromptProtoMapper.from_prompt_template(prompt_template)
|
232
|
-
)
|
233
|
-
|
234
|
-
if prompt_version_def.version_spec.HasField("model_descriptor"):
|
235
|
-
model_descriptor: ModelDescriptor = (
|
236
|
-
ModelDescriptorMapper.from_prompt_model_descriptor(
|
237
|
-
model_descriptor=prompt_version_def.version_spec.model_descriptor
|
238
|
-
)
|
239
|
-
)
|
240
|
-
|
241
|
-
if isinstance(base_prompt_template, ChatPromptTemplate) and (
|
242
|
-
not model_descriptor or isinstance(model_descriptor, ChatModelDescriptor)
|
243
|
-
):
|
244
|
-
return ChatPrompt(template=base_prompt_template, model=model_descriptor)
|
245
|
-
|
246
|
-
@staticmethod
|
247
|
-
def from_prompt(
|
248
|
-
name: str,
|
249
|
-
prompt_description: str,
|
250
|
-
version_description: str,
|
251
|
-
version: int,
|
252
|
-
target_default_version: bool,
|
253
|
-
prompt_version_definition: ProtoPromptVersionDefinition,
|
254
|
-
) -> RegisteredPrompt:
|
255
|
-
return RegisteredPrompt(
|
256
|
-
name=name,
|
257
|
-
prompt_description=prompt_description,
|
258
|
-
version_description=version_description,
|
259
|
-
version=version,
|
260
|
-
_target_default_version=target_default_version,
|
261
|
-
prompt=PromptProtoMapper.from_prompt_version_definition(
|
262
|
-
prompt_version_def=prompt_version_definition
|
263
|
-
),
|
264
|
-
)
|