frogml-core 0.0.113__py3-none-any.whl → 0.0.115__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. frogml_core/__init__.py +1 -1
  2. frogml_core/clients/administration/authentication/client.py +2 -2
  3. frogml_core/clients/batch_job_management/client.py +4 -4
  4. frogml_core/clients/build_orchestrator/build_model_request_getter.py +6 -6
  5. frogml_core/clients/build_orchestrator/client.py +12 -12
  6. frogml_core/clients/build_orchestrator/internal_client.py +10 -10
  7. frogml_core/frogml_client/build_api_helpers/build_api_steps.py +3 -3
  8. frogml_core/inner/build_logic/constants/upload_tag.py +7 -7
  9. frogml_core/inner/build_logic/interface/context_interface.py +1 -1
  10. frogml_core/inner/build_logic/phases/phase_010_fetch_model/fetch_strategy_manager/strategy/strategy.py +4 -4
  11. frogml_core/inner/build_logic/phases/phase_010_fetch_model/set_version_step.py +3 -3
  12. frogml_core/inner/build_logic/phases/phase_020_remote_register_frogml_build/start_remote_build_step.py +3 -3
  13. frogml_core/inner/build_logic/phases/phase_020_remote_register_frogml_build/upload_step.py +11 -9
  14. frogml_core/inner/build_logic/tools/ignore_files.py +3 -3
  15. frogml_core/inner/di_configuration/__init__.py +0 -6
  16. frogml_core/model/adapters/__init__.py +1 -1
  17. frogml_core/model/analytics_logging.py +1 -1
  18. frogml_core/model/tools/adapters/input.py +6 -6
  19. frogml_core/model/tools/adapters/output.py +8 -8
  20. frogml_core/model/tools/run_model_locally.py +2 -2
  21. frogml_core/model/utils/feature_utils.py +1 -1
  22. {frogml_core-0.0.113.dist-info → frogml_core-0.0.115.dist-info}/METADATA +1 -1
  23. {frogml_core-0.0.113.dist-info → frogml_core-0.0.115.dist-info}/RECORD +30 -137
  24. frogml_services_mock/mocks/analytics_api.py +6 -6
  25. frogml_services_mock/mocks/ecosystem_service_api.py +2 -2
  26. frogml_services_mock/mocks/frogml_mocks.py +0 -11
  27. frogml_services_mock/services_mock.py +4 -52
  28. frogml_storage/__init__.py +1 -1
  29. frogml_core/clients/prompt_manager/__init__.py +0 -0
  30. frogml_core/clients/prompt_manager/model_descriptor_mapper.py +0 -196
  31. frogml_core/clients/prompt_manager/prompt_manager_client.py +0 -190
  32. frogml_core/clients/prompt_manager/prompt_proto_mapper.py +0 -264
  33. frogml_core/clients/vector_store/__init__.py +0 -2
  34. frogml_core/clients/vector_store/management_client.py +0 -127
  35. frogml_core/clients/vector_store/serving_client.py +0 -157
  36. frogml_core/clients/workspace_manager/__init__.py +0 -1
  37. frogml_core/clients/workspace_manager/client.py +0 -224
  38. frogml_core/llmops/__init__.py +0 -0
  39. frogml_core/llmops/generation/__init__.py +0 -0
  40. frogml_core/llmops/generation/_steaming.py +0 -78
  41. frogml_core/llmops/generation/base.py +0 -5
  42. frogml_core/llmops/generation/chat/__init__.py +0 -0
  43. frogml_core/llmops/generation/chat/openai/LICENSE.txt +0 -201
  44. frogml_core/llmops/generation/chat/openai/types/__init__.py +0 -0
  45. frogml_core/llmops/generation/chat/openai/types/chat/__init__.py +0 -0
  46. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion.py +0 -88
  47. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_assistant_message_param.py +0 -65
  48. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_chunk.py +0 -153
  49. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_content_part_text_param.py +0 -28
  50. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_function_call_option_param.py +0 -25
  51. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_function_message_param.py +0 -33
  52. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message.py +0 -56
  53. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_param.py +0 -34
  54. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_tool_call.py +0 -46
  55. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_tool_call_param.py +0 -44
  56. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_named_tool_choice_param.py +0 -32
  57. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_role.py +0 -20
  58. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_system_message_param.py +0 -35
  59. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_token_logprob.py +0 -71
  60. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_choice_option_param.py +0 -28
  61. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_message_param.py +0 -31
  62. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_param.py +0 -29
  63. frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_user_message_param.py +0 -35
  64. frogml_core/llmops/generation/chat/openai/types/chat/completion_create_params.py +0 -279
  65. frogml_core/llmops/generation/chat/openai/types/completion_choice.py +0 -47
  66. frogml_core/llmops/generation/chat/openai/types/completion_create_params.py +0 -209
  67. frogml_core/llmops/generation/chat/openai/types/completion_usage.py +0 -30
  68. frogml_core/llmops/generation/chat/openai/types/model.py +0 -35
  69. frogml_core/llmops/generation/chat/openai/types/shared/__init__.py +0 -3
  70. frogml_core/llmops/generation/chat/openai/types/shared/error_object.py +0 -27
  71. frogml_core/llmops/generation/chat/openai/types/shared/function_definition.py +0 -49
  72. frogml_core/llmops/generation/chat/openai/types/shared/function_parameters.py +0 -20
  73. frogml_core/llmops/generation/chat/openai/types/shared_params/__init__.py +0 -2
  74. frogml_core/llmops/generation/chat/openai/types/shared_params/function_definition.py +0 -49
  75. frogml_core/llmops/generation/chat/openai/types/shared_params/function_parameters.py +0 -22
  76. frogml_core/llmops/generation/streaming.py +0 -26
  77. frogml_core/llmops/model/__init__.py +0 -0
  78. frogml_core/llmops/model/descriptor.py +0 -40
  79. frogml_core/llmops/prompt/__init__.py +0 -0
  80. frogml_core/llmops/prompt/base.py +0 -136
  81. frogml_core/llmops/prompt/chat/__init__.py +0 -0
  82. frogml_core/llmops/prompt/chat/message.py +0 -24
  83. frogml_core/llmops/prompt/chat/template.py +0 -113
  84. frogml_core/llmops/prompt/chat/value.py +0 -10
  85. frogml_core/llmops/prompt/manager.py +0 -138
  86. frogml_core/llmops/prompt/template.py +0 -24
  87. frogml_core/llmops/prompt/value.py +0 -14
  88. frogml_core/llmops/provider/__init__.py +0 -0
  89. frogml_core/llmops/provider/chat.py +0 -44
  90. frogml_core/llmops/provider/openai/__init__.py +0 -0
  91. frogml_core/llmops/provider/openai/client.py +0 -126
  92. frogml_core/llmops/provider/openai/provider.py +0 -93
  93. frogml_core/vector_store/__init__.py +0 -4
  94. frogml_core/vector_store/client.py +0 -151
  95. frogml_core/vector_store/collection.py +0 -429
  96. frogml_core/vector_store/filters.py +0 -359
  97. frogml_core/vector_store/inference_client.py +0 -105
  98. frogml_core/vector_store/rest_helpers.py +0 -81
  99. frogml_core/vector_store/utils/__init__.py +0 -0
  100. frogml_core/vector_store/utils/filter_utils.py +0 -23
  101. frogml_core/vector_store/utils/upsert_utils.py +0 -218
  102. frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2.py +0 -77
  103. frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2.pyi +0 -417
  104. frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2_grpc.py +0 -441
  105. frogml_proto/qwak/prompt/v1/prompt/prompt_pb2.py +0 -69
  106. frogml_proto/qwak/prompt/v1/prompt/prompt_pb2.pyi +0 -415
  107. frogml_proto/qwak/prompt/v1/prompt/prompt_pb2_grpc.py +0 -4
  108. frogml_proto/qwak/vectors/v1/collection/collection_pb2.py +0 -46
  109. frogml_proto/qwak/vectors/v1/collection/collection_pb2.pyi +0 -287
  110. frogml_proto/qwak/vectors/v1/collection/collection_pb2_grpc.py +0 -4
  111. frogml_proto/qwak/vectors/v1/collection/collection_service_pb2.py +0 -60
  112. frogml_proto/qwak/vectors/v1/collection/collection_service_pb2.pyi +0 -258
  113. frogml_proto/qwak/vectors/v1/collection/collection_service_pb2_grpc.py +0 -304
  114. frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2.py +0 -28
  115. frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2.pyi +0 -41
  116. frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2_grpc.py +0 -4
  117. frogml_proto/qwak/vectors/v1/filters_pb2.py +0 -52
  118. frogml_proto/qwak/vectors/v1/filters_pb2.pyi +0 -297
  119. frogml_proto/qwak/vectors/v1/filters_pb2_grpc.py +0 -4
  120. frogml_proto/qwak/vectors/v1/vector_pb2.py +0 -38
  121. frogml_proto/qwak/vectors/v1/vector_pb2.pyi +0 -142
  122. frogml_proto/qwak/vectors/v1/vector_pb2_grpc.py +0 -4
  123. frogml_proto/qwak/vectors/v1/vector_service_pb2.py +0 -53
  124. frogml_proto/qwak/vectors/v1/vector_service_pb2.pyi +0 -243
  125. frogml_proto/qwak/vectors/v1/vector_service_pb2_grpc.py +0 -201
  126. frogml_proto/qwak/workspace/workspace_pb2.py +0 -50
  127. frogml_proto/qwak/workspace/workspace_pb2.pyi +0 -331
  128. frogml_proto/qwak/workspace/workspace_pb2_grpc.py +0 -4
  129. frogml_proto/qwak/workspace/workspace_service_pb2.py +0 -84
  130. frogml_proto/qwak/workspace/workspace_service_pb2.pyi +0 -393
  131. frogml_proto/qwak/workspace/workspace_service_pb2_grpc.py +0 -507
  132. frogml_services_mock/mocks/prompt_manager_service.py +0 -281
  133. frogml_services_mock/mocks/vector_serving_api.py +0 -159
  134. frogml_services_mock/mocks/vectors_management_api.py +0 -97
  135. frogml_services_mock/mocks/workspace_manager_service_mock.py +0 -202
  136. /frogml_core/model/adapters/output_adapters/{qwak_with_default_fallback.py → frogml_with_default_fallback.py} +0 -0
  137. {frogml_core-0.0.113.dist-info → frogml_core-0.0.115.dist-info}/WHEEL +0 -0
@@ -1,20 +0,0 @@
1
- #
2
- # Copyright 2024 OpenAI
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- from typing import Dict
17
-
18
- __all__ = ["FunctionParameters"]
19
-
20
- FunctionParameters = Dict[str, object]
@@ -1,2 +0,0 @@
1
- from .function_definition import FunctionDefinition as FunctionDefinition
2
- from .function_parameters import FunctionParameters as FunctionParameters
@@ -1,49 +0,0 @@
1
- #
2
- # Copyright 2024 OpenAI
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- from __future__ import annotations
17
-
18
- from typing_extensions import Required, TypedDict
19
-
20
- from ...types import shared_params
21
-
22
- __all__ = ["FunctionDefinition"]
23
-
24
-
25
- class FunctionDefinition(TypedDict, total=False):
26
- name: Required[str]
27
- """The name of the function to be called.
28
-
29
- Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length
30
- of 64.
31
- """
32
-
33
- description: str
34
- """
35
- A description of what the function does, used by the model to choose when and
36
- how to call the function.
37
- """
38
-
39
- parameters: shared_params.FunctionParameters
40
- """The parameters the functions accepts, described as a JSON Schema object.
41
-
42
- See the
43
- [guide](https://platform.openai.com/docs/guides/text-generation/function-calling)
44
- for examples, and the
45
- [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for
46
- documentation about the format.
47
-
48
- Omitting `parameters` defines a function with an empty parameter list.
49
- """
@@ -1,22 +0,0 @@
1
- #
2
- # Copyright 2024 OpenAI
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
-
16
- from __future__ import annotations
17
-
18
- from typing import Dict
19
-
20
- __all__ = ["FunctionParameters"]
21
-
22
- FunctionParameters = Dict[str, object]
@@ -1,26 +0,0 @@
1
- from abc import ABC
2
- from typing import Generic, TypeVar
3
-
4
- from frogml_core.llmops.generation.chat.openai.types.chat.chat_completion_chunk import (
5
- ChatCompletionChunk,
6
- )
7
-
8
- try:
9
- from collections import Iterable
10
-
11
- iterableABC = Iterable
12
- except ImportError:
13
- from collections.abc import Iterable
14
-
15
- iterableABC = Iterable
16
-
17
-
18
- _T = TypeVar("_T")
19
-
20
-
21
- class Stream(Generic[_T], ABC, iterableABC):
22
- pass
23
-
24
-
25
- class ChatCompletionStream(Stream[ChatCompletionChunk], ABC):
26
- pass
File without changes
@@ -1,40 +0,0 @@
1
- from abc import ABC
2
- from dataclasses import dataclass, field
3
- from typing import Dict, Iterable, List, Optional, Union
4
-
5
- from typing_extensions import Literal
6
-
7
- from frogml_core.llmops.generation.chat.openai.types.chat.chat_completion_tool_choice_option_param import (
8
- ChatCompletionToolChoiceOptionParam,
9
- )
10
- from frogml_core.llmops.generation.chat.openai.types.chat.chat_completion_tool_param import (
11
- ChatCompletionToolParam,
12
- )
13
-
14
-
15
- class ModelDescriptor(ABC):
16
- pass
17
-
18
-
19
- class ChatModelDescriptor(ModelDescriptor):
20
- pass
21
-
22
-
23
- @dataclass
24
- class OpenAIChat(ChatModelDescriptor):
25
- model_id: str
26
- frequency_penalty: Optional[float] = field(default=None)
27
- logit_bias: Optional[Dict[str, int]] = field(default=None)
28
- logprobs: Optional[bool] = field(default=None)
29
- max_tokens: Optional[int] = field(default=None)
30
- n: Optional[int] = field(default=None)
31
- presence_penalty: Optional[float] = field(default=None)
32
- response_format: Literal["text", "json_object"] = "text"
33
- seed: Optional[int] = field(default=None)
34
- stop: Union[Optional[str], List[str], None] = field(default=None)
35
- temperature: Optional[float] = field(default=None)
36
- top_p: Optional[float] = field(default=None)
37
- top_logprobs: Optional[int] = field(default=None)
38
- tool_choice: Optional[ChatCompletionToolChoiceOptionParam] = field(default=None)
39
- tools: Optional[Iterable[ChatCompletionToolParam]] = field(default=None)
40
- user: Optional[str] = field(default=None)
File without changes
@@ -1,136 +0,0 @@
1
- import os
2
- from abc import ABC, abstractmethod
3
- from dataclasses import dataclass, field
4
- from typing import Callable, Dict, Optional, Union
5
-
6
- from cachetools import TTLCache, cached
7
-
8
- from frogml_core.exceptions import FrogmlException
9
- from frogml_core.llmops.generation.base import ModelResponse
10
- from frogml_core.llmops.generation.chat.openai.types.chat.chat_completion import (
11
- ChatCompletion,
12
- )
13
- from frogml_core.llmops.generation.streaming import ChatCompletionStream, Stream
14
- from frogml_core.llmops.model.descriptor import ChatModelDescriptor, ModelDescriptor
15
- from frogml_core.llmops.prompt.chat.template import ChatPromptTemplate
16
- from frogml_core.llmops.prompt.chat.value import ChatPromptValue
17
- from frogml_core.llmops.prompt.value import PromptValue
18
- from frogml_core.llmops.provider.chat import ChatCompletionProvider
19
-
20
-
21
- @dataclass
22
- class BasePrompt(ABC):
23
- @abstractmethod
24
- def render(self, variables: Dict[str, any]) -> PromptValue:
25
- pass
26
-
27
- @abstractmethod
28
- def invoke(
29
- self,
30
- variables: Optional[Dict[str, any]] = None,
31
- model_override: Optional[ModelDescriptor] = None,
32
- stream: bool = False,
33
- ) -> Union[ModelResponse, Stream]:
34
- pass
35
-
36
-
37
- @dataclass
38
- class ChatPrompt(BasePrompt):
39
- template: ChatPromptTemplate
40
- model: Optional[ChatModelDescriptor]
41
-
42
- def __post_init__(self):
43
- self._validate()
44
-
45
- def _validate(self):
46
- if not isinstance(self.template, ChatPromptTemplate) or (
47
- self.model and not isinstance(self.model, ChatModelDescriptor)
48
- ):
49
- raise ValueError("ChatPrompt initiated with non-chat type fields!")
50
-
51
- def render(self, variables: Dict[str, any]) -> ChatPromptValue:
52
- return self.template.render(variables=variables)
53
-
54
- def invoke(
55
- self,
56
- variables: Optional[Dict[str, any]] = None,
57
- model_override: Optional[ModelDescriptor] = None,
58
- stream: bool = False,
59
- ) -> Union[ChatCompletion, ChatCompletionStream]:
60
- if not variables:
61
- variables = dict()
62
- if not self.model and not model_override:
63
- raise FrogmlException(
64
- "Can't invoke a prompt without a `ModelDescriptor`."
65
- " Please provide one using the model_override "
66
- "or create a ChatPrompt with a model."
67
- )
68
-
69
- return ChatCompletionProvider.invoke(
70
- chat_prompt_value=self.render(variables=variables),
71
- chat_model_descriptor=model_override if model_override else self.model,
72
- stream=stream,
73
- )
74
-
75
-
76
- @dataclass
77
- class RegisteredPrompt(BasePrompt):
78
- name: str
79
- prompt_description: str
80
- version_description: str
81
- version: int
82
- _target_default_version: bool
83
- prompt: BasePrompt
84
- _cache: Callable[[str], "RegisteredPrompt"] = field(init=False, default=None)
85
- _prompt_manager: "PromptManager" = None # noqa
86
-
87
- def _get_prompt_manager(self) -> "PromptManager": # noqa
88
- from frogml_core.llmops.prompt.manager import PromptManager
89
-
90
- if not self._prompt_manager:
91
- self._prompt_manager = PromptManager()
92
- return self._prompt_manager
93
-
94
- def _get_prompt_default_version_internal(self, name: str) -> "RegisteredPrompt":
95
- return self._get_prompt_manager().get_prompt(name=name, version=None)
96
-
97
- def _get_prompt_default_version(self, *, name: str) -> "RegisteredPrompt":
98
- if not self._cache:
99
- cache = cached(
100
- cache=TTLCache(
101
- maxsize=1, ttl=float(os.environ.get("_PROMPT_CACHE_SECONDS", "60"))
102
- ),
103
- key=lambda *args, **kwargs: kwargs["name"],
104
- )
105
- self._cache = cache(self._get_prompt_default_version_internal)
106
- return self._cache(name=name) # noqa
107
-
108
- def _handle_default_version_swap(self):
109
- if self._target_default_version:
110
- new_default_version_prompt: RegisteredPrompt = (
111
- self._get_prompt_default_version(name=self.name)
112
- )
113
- self.prompt_description = new_default_version_prompt.prompt_description
114
- if self.version != new_default_version_prompt.version:
115
- self.version_description = (
116
- new_default_version_prompt.version_description
117
- )
118
- self.version = new_default_version_prompt.version
119
- self.prompt = new_default_version_prompt.prompt
120
-
121
- def render(self, variables: Dict[str, any]) -> PromptValue:
122
- self._handle_default_version_swap()
123
- return self.prompt.render(variables=variables)
124
-
125
- def invoke(
126
- self,
127
- variables: Optional[Dict[str, any]] = None,
128
- model_override: Optional[ModelDescriptor] = None,
129
- stream: bool = False,
130
- ) -> Union[ModelResponse, Stream]:
131
- if not variables:
132
- variables = dict()
133
- self._handle_default_version_swap()
134
- return self.prompt.invoke(
135
- variables=variables, model_override=model_override, stream=stream
136
- )
File without changes
@@ -1,24 +0,0 @@
1
- from abc import ABC
2
- from dataclasses import dataclass, field
3
-
4
- from frogml_core.llmops.prompt.value import PromptValue
5
-
6
-
7
- @dataclass
8
- class BaseMessage(PromptValue, ABC):
9
- content: str
10
- role_name: str = field(
11
- init=False,
12
- )
13
-
14
-
15
- class AIMessage(BaseMessage):
16
- role_name: str = "ai"
17
-
18
-
19
- class HumanMessage(BaseMessage):
20
- role_name: str = "human"
21
-
22
-
23
- class SystemMessage(BaseMessage):
24
- role_name: str = "system"
@@ -1,113 +0,0 @@
1
- import re
2
- from abc import ABC, abstractmethod
3
- from dataclasses import dataclass, field
4
- from typing import Dict, List, Tuple, Union
5
-
6
- from frogml_core.llmops.prompt.chat.message import (
7
- AIMessage,
8
- BaseMessage,
9
- HumanMessage,
10
- SystemMessage,
11
- )
12
- from frogml_core.llmops.prompt.chat.value import ChatPromptValue
13
- from frogml_core.llmops.prompt.template import BasePromptTemplate, StringPromptTemplate
14
-
15
-
16
- @dataclass
17
- class BaseMessagePromptTemplate(BasePromptTemplate):
18
- @abstractmethod
19
- def render(self, variables: Dict[str, any]) -> BaseMessage:
20
- pass
21
-
22
-
23
- @dataclass
24
- class BaseStringMessagePromptTemplate(BaseMessagePromptTemplate, ABC):
25
- template: StringPromptTemplate = field(init=False)
26
- role_name: str = field(init=False)
27
-
28
- def __init__(self, template: str):
29
- self.template = StringPromptTemplate(template=template)
30
-
31
-
32
- class AIMessagePromptTemplate(BaseStringMessagePromptTemplate):
33
- role_name: str = "ai"
34
-
35
- def render(self, variables: Dict[str, any]) -> BaseMessage:
36
- return AIMessage(content=self.template.render(variables=variables).to_string())
37
-
38
-
39
- class HumanMessagePromptTemplate(BaseStringMessagePromptTemplate):
40
- role_name: str = "human"
41
-
42
- def render(self, variables: Dict[str, any]) -> BaseMessage:
43
- return HumanMessage(
44
- content=self.template.render(variables=variables).to_string()
45
- )
46
-
47
-
48
- class SystemMessagePromptTemplate(BaseStringMessagePromptTemplate):
49
- role_name: str = "system"
50
-
51
- def render(self, variables: Dict[str, any]) -> BaseMessage:
52
- return SystemMessage(
53
- content=self.template.render(variables=variables).to_string()
54
- )
55
-
56
-
57
- @dataclass
58
- class ChatPromptTemplate(BasePromptTemplate):
59
- messages: List[Union[BaseMessage, BaseStringMessagePromptTemplate]]
60
-
61
- def render(self, variables: Dict[str, any]) -> ChatPromptValue:
62
- resulting_messages: List[BaseMessage] = list()
63
-
64
- for message in self.messages:
65
- if isinstance(message, BaseMessage):
66
- resulting_messages.append(message)
67
- elif isinstance(message, BaseStringMessagePromptTemplate):
68
- resulting_messages.append(message.render(variables=variables))
69
- else:
70
- raise ValueError(
71
- f"Got unsupported message type: {repr(message)}. \n"
72
- "Supported messages are: "
73
- "AIMessagePromptTemplate, HumanMessagePromptTemplate, SystemMessagePromptTemplate, \n"
74
- "AIMessage, HumanMessage, SystemMessage."
75
- )
76
-
77
- return ChatPromptValue(messages=resulting_messages)
78
-
79
- def to_messages(self) -> List[Tuple[str, str]]:
80
- """
81
- Useful for integration with other libraries such as Langchain.
82
-
83
- ```
84
- ChatPromptTemplate(
85
- messages=[
86
- SystemMessage("you are an assistant"),
87
- HumanMessagePromptTemplate("{{question}}")
88
- ]
89
- ).to_messages()
90
-
91
- resulting in:
92
-
93
- [("system", "you are an assistant"),
94
- ("human", "{question}")]
95
- ```
96
-
97
- """
98
-
99
- def strip_curly(string: str) -> str:
100
- return re.sub(r"\{\{\s*([\w\s]+)\s*\}\}", repl=r"{\g<1>}", string=string)
101
-
102
- if not self.messages:
103
- return []
104
-
105
- result: List[Tuple[str, str]] = []
106
-
107
- for msg in self.messages:
108
- if isinstance(msg, BaseMessage):
109
- result.append((msg.role_name, msg.content))
110
- elif isinstance(msg, BaseStringMessagePromptTemplate):
111
- result.append((msg.role_name, strip_curly(msg.template.template)))
112
-
113
- return result
@@ -1,10 +0,0 @@
1
- from dataclasses import dataclass
2
- from typing import List
3
-
4
- from frogml_core.llmops.prompt.chat.message import BaseMessage
5
- from frogml_core.llmops.prompt.value import PromptValue
6
-
7
-
8
- @dataclass
9
- class ChatPromptValue(PromptValue):
10
- messages: List[BaseMessage]
@@ -1,138 +0,0 @@
1
- from typing import Optional
2
-
3
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import Prompt as ProtoPrompt
4
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
5
- PromptVersion as ProtoPromptVersion,
6
- )
7
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
8
- PromptVersionDefinition as ProtoPromptVersionDefinition,
9
- )
10
- from frogml_proto.qwak.prompt.v1.prompt.prompt_pb2 import (
11
- PromptVersionSpec as ProtoPromptVersionSpec,
12
- )
13
- from frogml_core.clients.prompt_manager.prompt_manager_client import PromptManagerClient
14
- from frogml_core.clients.prompt_manager.prompt_proto_mapper import PromptProtoMapper
15
- from frogml_core.exceptions import FrogmlException
16
- from frogml_core.llmops.prompt.base import BasePrompt, ChatPrompt, RegisteredPrompt
17
-
18
-
19
- class PromptManager:
20
- _prompt_manager_client: PromptManagerClient
21
-
22
- def __init__(self):
23
- self._prompt_manager_client = PromptManagerClient()
24
-
25
- def register(
26
- self,
27
- name: str,
28
- prompt: BasePrompt,
29
- prompt_description: Optional[str] = None,
30
- version_description: Optional[str] = None,
31
- ) -> RegisteredPrompt:
32
- """
33
- Registers a new prompt in Qwak platform. Name must be unique
34
- and conform to ^[a-z0-9](?:[-_]?[a-z0-9]+)+$
35
- """
36
- if not isinstance(prompt, ChatPrompt):
37
- raise FrogmlException(f"Got unsupported prompt type: {prompt}")
38
-
39
- version_spec: ProtoPromptVersionSpec = PromptProtoMapper.to_prompt_version_spec(
40
- version_description=version_description,
41
- prompt_template=prompt.template,
42
- model_descriptor=prompt.model,
43
- )
44
-
45
- registered_prompt: ProtoPrompt = self._prompt_manager_client.create_prompt(
46
- name=name, prompt_description=prompt_description, version_spec=version_spec
47
- )
48
-
49
- return PromptProtoMapper.from_prompt(
50
- name=registered_prompt.name,
51
- prompt_description=registered_prompt.prompt_spec.description,
52
- version_description=registered_prompt.default_version_definition.version_spec.description,
53
- version=registered_prompt.default_version_definition.version_number,
54
- target_default_version=True,
55
- prompt_version_definition=registered_prompt.default_version_definition,
56
- )
57
-
58
- def update(
59
- self,
60
- name: str,
61
- prompt: BasePrompt,
62
- version_description: Optional[str] = None,
63
- set_default: bool = False,
64
- ) -> RegisteredPrompt:
65
- """
66
- Creates a new version for an existing prompt, prompt name must already exist.
67
- `set_default` set to True if this version is to become the default one immediately.
68
- """
69
- if not isinstance(prompt, ChatPrompt):
70
- raise FrogmlException(f"Got unsupported prompt type: {prompt}")
71
-
72
- version_spec: ProtoPromptVersionSpec = PromptProtoMapper.to_prompt_version_spec(
73
- version_description=version_description,
74
- prompt_template=prompt.template,
75
- model_descriptor=prompt.model,
76
- )
77
-
78
- prompt_version: ProtoPromptVersion = (
79
- self._prompt_manager_client.create_prompt_version(
80
- name=name, version_spec=version_spec, set_default=set_default
81
- )
82
- )
83
-
84
- version_for_get_request = (
85
- None
86
- if set_default
87
- else prompt_version.prompt_version_definition.version_number
88
- )
89
- return self.get_prompt(name=name, version=version_for_get_request)
90
-
91
- def set_default(self, name: str, version: int):
92
- """
93
- Set a version of a registered prompt named: `name`, as the default version
94
- """
95
- self._prompt_manager_client.set_default_prompt_version(
96
- name=name, version=version
97
- )
98
-
99
- def delete_prompt(self, name: str):
100
- """
101
- Delete all version of a prompt, by name
102
- """
103
- self._prompt_manager_client.delete_prompt(name=name)
104
-
105
- def delete_prompt_version(self, name: str, version: int):
106
- """
107
- Deletes a specific version of a registered prompt
108
- """
109
- self._prompt_manager_client.delete_prompt_version(name=name, version=version)
110
-
111
- def get_prompt(self, name: str, version: Optional[int] = None) -> RegisteredPrompt:
112
- """
113
- Get a registered prompt by name. To get the default version omit the `version` param, else
114
- fetch the specified version.
115
- """
116
-
117
- prompt_default_version: ProtoPrompt = (
118
- self._prompt_manager_client.get_prompt_by_name(name=name)
119
- )
120
- prompt_version_definition: ProtoPromptVersionDefinition = (
121
- prompt_default_version.default_version_definition
122
- )
123
- if version:
124
- prompt_version: ProtoPromptVersion = (
125
- self._prompt_manager_client.get_prompt_version_by_name(
126
- name=name, version=version
127
- )
128
- )
129
- prompt_version_definition = prompt_version.prompt_version_definition
130
-
131
- return PromptProtoMapper.from_prompt(
132
- name=prompt_default_version.name,
133
- prompt_description=prompt_default_version.prompt_spec.description,
134
- version_description=prompt_version_definition.version_spec.description,
135
- version=prompt_version_definition.version_number,
136
- target_default_version=not bool(version),
137
- prompt_version_definition=prompt_version_definition,
138
- )
@@ -1,24 +0,0 @@
1
- from abc import ABC, abstractmethod
2
- from dataclasses import dataclass
3
- from typing import Dict
4
-
5
- from frogml_core.llmops.prompt.value import PromptValue, StringPromptValue
6
-
7
-
8
- @dataclass
9
- class BasePromptTemplate(ABC):
10
- @abstractmethod
11
- def render(self, variables: Dict[str, any]) -> PromptValue:
12
- pass
13
-
14
-
15
- @dataclass
16
- class StringPromptTemplate(BasePromptTemplate):
17
- template: str
18
-
19
- def render(self, variables: Dict[str, any]) -> StringPromptValue:
20
- from chevron import renderer
21
-
22
- return StringPromptValue(
23
- text=renderer.render(template=self.template, data=variables, warn=True)
24
- )
@@ -1,14 +0,0 @@
1
- from abc import ABC
2
- from dataclasses import dataclass
3
-
4
-
5
- class PromptValue(ABC):
6
- pass
7
-
8
-
9
- @dataclass
10
- class StringPromptValue(PromptValue):
11
- text: str
12
-
13
- def to_string(self) -> str:
14
- return self.text
File without changes
@@ -1,44 +0,0 @@
1
- from functools import lru_cache
2
- from typing import Union
3
-
4
- from frogml_core.llmops.generation.chat.openai.types.chat.chat_completion import (
5
- ChatCompletion,
6
- )
7
- from frogml_core.llmops.generation.streaming import ChatCompletionStream
8
- from frogml_core.llmops.model.descriptor import ChatModelDescriptor, OpenAIChat
9
- from frogml_core.llmops.prompt.chat.value import ChatPromptValue
10
- from frogml_core.llmops.provider.openai.provider import OpenAIProvider
11
-
12
-
13
- class ChatCompletionProvider:
14
- @staticmethod
15
- @lru_cache(maxsize=None)
16
- def _get_openai_provider():
17
- return OpenAIProvider()
18
-
19
- @staticmethod
20
- def invoke(
21
- chat_prompt_value: ChatPromptValue,
22
- chat_model_descriptor: ChatModelDescriptor,
23
- stream: bool = False,
24
- ) -> Union[ChatCompletion, ChatCompletionStream]:
25
- if isinstance(chat_model_descriptor, OpenAIChat):
26
- return ChatCompletionProvider._invoke_openai_chat(
27
- chat_prompt_value=chat_prompt_value,
28
- chat_model_descriptor=chat_model_descriptor,
29
- stream=stream,
30
- )
31
- else:
32
- raise ValueError("Can't invoke prompt and model combination!")
33
-
34
- @staticmethod
35
- def _invoke_openai_chat(
36
- chat_prompt_value: ChatPromptValue,
37
- chat_model_descriptor: OpenAIChat,
38
- stream: bool = False,
39
- ) -> Union[ChatCompletion, ChatCompletionStream]:
40
- return ChatCompletionProvider._get_openai_provider().create_chat_completion(
41
- chat_prompt_value=chat_prompt_value,
42
- chat_model_descriptor=chat_model_descriptor,
43
- stream=stream,
44
- )
File without changes