frogml-core 0.0.113__py3-none-any.whl → 0.0.114__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- frogml_core/__init__.py +1 -1
- frogml_core/inner/di_configuration/__init__.py +0 -6
- {frogml_core-0.0.113.dist-info → frogml_core-0.0.114.dist-info}/METADATA +1 -1
- {frogml_core-0.0.113.dist-info → frogml_core-0.0.114.dist-info}/RECORD +8 -115
- frogml_services_mock/mocks/frogml_mocks.py +0 -11
- frogml_services_mock/services_mock.py +0 -48
- frogml_storage/__init__.py +1 -1
- frogml_core/clients/prompt_manager/__init__.py +0 -0
- frogml_core/clients/prompt_manager/model_descriptor_mapper.py +0 -196
- frogml_core/clients/prompt_manager/prompt_manager_client.py +0 -190
- frogml_core/clients/prompt_manager/prompt_proto_mapper.py +0 -264
- frogml_core/clients/vector_store/__init__.py +0 -2
- frogml_core/clients/vector_store/management_client.py +0 -127
- frogml_core/clients/vector_store/serving_client.py +0 -157
- frogml_core/clients/workspace_manager/__init__.py +0 -1
- frogml_core/clients/workspace_manager/client.py +0 -224
- frogml_core/llmops/__init__.py +0 -0
- frogml_core/llmops/generation/__init__.py +0 -0
- frogml_core/llmops/generation/_steaming.py +0 -78
- frogml_core/llmops/generation/base.py +0 -5
- frogml_core/llmops/generation/chat/__init__.py +0 -0
- frogml_core/llmops/generation/chat/openai/LICENSE.txt +0 -201
- frogml_core/llmops/generation/chat/openai/types/__init__.py +0 -0
- frogml_core/llmops/generation/chat/openai/types/chat/__init__.py +0 -0
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion.py +0 -88
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_assistant_message_param.py +0 -65
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_chunk.py +0 -153
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_content_part_text_param.py +0 -28
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_function_call_option_param.py +0 -25
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_function_message_param.py +0 -33
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message.py +0 -56
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_param.py +0 -34
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_tool_call.py +0 -46
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_message_tool_call_param.py +0 -44
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_named_tool_choice_param.py +0 -32
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_role.py +0 -20
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_system_message_param.py +0 -35
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_token_logprob.py +0 -71
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_choice_option_param.py +0 -28
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_message_param.py +0 -31
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_tool_param.py +0 -29
- frogml_core/llmops/generation/chat/openai/types/chat/chat_completion_user_message_param.py +0 -35
- frogml_core/llmops/generation/chat/openai/types/chat/completion_create_params.py +0 -279
- frogml_core/llmops/generation/chat/openai/types/completion_choice.py +0 -47
- frogml_core/llmops/generation/chat/openai/types/completion_create_params.py +0 -209
- frogml_core/llmops/generation/chat/openai/types/completion_usage.py +0 -30
- frogml_core/llmops/generation/chat/openai/types/model.py +0 -35
- frogml_core/llmops/generation/chat/openai/types/shared/__init__.py +0 -3
- frogml_core/llmops/generation/chat/openai/types/shared/error_object.py +0 -27
- frogml_core/llmops/generation/chat/openai/types/shared/function_definition.py +0 -49
- frogml_core/llmops/generation/chat/openai/types/shared/function_parameters.py +0 -20
- frogml_core/llmops/generation/chat/openai/types/shared_params/__init__.py +0 -2
- frogml_core/llmops/generation/chat/openai/types/shared_params/function_definition.py +0 -49
- frogml_core/llmops/generation/chat/openai/types/shared_params/function_parameters.py +0 -22
- frogml_core/llmops/generation/streaming.py +0 -26
- frogml_core/llmops/model/__init__.py +0 -0
- frogml_core/llmops/model/descriptor.py +0 -40
- frogml_core/llmops/prompt/__init__.py +0 -0
- frogml_core/llmops/prompt/base.py +0 -136
- frogml_core/llmops/prompt/chat/__init__.py +0 -0
- frogml_core/llmops/prompt/chat/message.py +0 -24
- frogml_core/llmops/prompt/chat/template.py +0 -113
- frogml_core/llmops/prompt/chat/value.py +0 -10
- frogml_core/llmops/prompt/manager.py +0 -138
- frogml_core/llmops/prompt/template.py +0 -24
- frogml_core/llmops/prompt/value.py +0 -14
- frogml_core/llmops/provider/__init__.py +0 -0
- frogml_core/llmops/provider/chat.py +0 -44
- frogml_core/llmops/provider/openai/__init__.py +0 -0
- frogml_core/llmops/provider/openai/client.py +0 -126
- frogml_core/llmops/provider/openai/provider.py +0 -93
- frogml_core/vector_store/__init__.py +0 -4
- frogml_core/vector_store/client.py +0 -151
- frogml_core/vector_store/collection.py +0 -429
- frogml_core/vector_store/filters.py +0 -359
- frogml_core/vector_store/inference_client.py +0 -105
- frogml_core/vector_store/rest_helpers.py +0 -81
- frogml_core/vector_store/utils/__init__.py +0 -0
- frogml_core/vector_store/utils/filter_utils.py +0 -23
- frogml_core/vector_store/utils/upsert_utils.py +0 -218
- frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2.py +0 -77
- frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2.pyi +0 -417
- frogml_proto/qwak/prompt/v1/prompt/prompt_manager_service_pb2_grpc.py +0 -441
- frogml_proto/qwak/prompt/v1/prompt/prompt_pb2.py +0 -69
- frogml_proto/qwak/prompt/v1/prompt/prompt_pb2.pyi +0 -415
- frogml_proto/qwak/prompt/v1/prompt/prompt_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/collection/collection_pb2.py +0 -46
- frogml_proto/qwak/vectors/v1/collection/collection_pb2.pyi +0 -287
- frogml_proto/qwak/vectors/v1/collection/collection_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/collection/collection_service_pb2.py +0 -60
- frogml_proto/qwak/vectors/v1/collection/collection_service_pb2.pyi +0 -258
- frogml_proto/qwak/vectors/v1/collection/collection_service_pb2_grpc.py +0 -304
- frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2.py +0 -28
- frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2.pyi +0 -41
- frogml_proto/qwak/vectors/v1/collection/event/collection_event_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/filters_pb2.py +0 -52
- frogml_proto/qwak/vectors/v1/filters_pb2.pyi +0 -297
- frogml_proto/qwak/vectors/v1/filters_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/vector_pb2.py +0 -38
- frogml_proto/qwak/vectors/v1/vector_pb2.pyi +0 -142
- frogml_proto/qwak/vectors/v1/vector_pb2_grpc.py +0 -4
- frogml_proto/qwak/vectors/v1/vector_service_pb2.py +0 -53
- frogml_proto/qwak/vectors/v1/vector_service_pb2.pyi +0 -243
- frogml_proto/qwak/vectors/v1/vector_service_pb2_grpc.py +0 -201
- frogml_proto/qwak/workspace/workspace_pb2.py +0 -50
- frogml_proto/qwak/workspace/workspace_pb2.pyi +0 -331
- frogml_proto/qwak/workspace/workspace_pb2_grpc.py +0 -4
- frogml_proto/qwak/workspace/workspace_service_pb2.py +0 -84
- frogml_proto/qwak/workspace/workspace_service_pb2.pyi +0 -393
- frogml_proto/qwak/workspace/workspace_service_pb2_grpc.py +0 -507
- frogml_services_mock/mocks/prompt_manager_service.py +0 -281
- frogml_services_mock/mocks/vector_serving_api.py +0 -159
- frogml_services_mock/mocks/vectors_management_api.py +0 -97
- frogml_services_mock/mocks/workspace_manager_service_mock.py +0 -202
- {frogml_core-0.0.113.dist-info → frogml_core-0.0.114.dist-info}/WHEEL +0 -0
@@ -1,218 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
from functools import partial
|
3
|
-
from math import ceil
|
4
|
-
from multiprocessing import Pool, set_start_method
|
5
|
-
from typing import Dict, Iterable, List, Optional, Tuple, TypeVar, Union
|
6
|
-
|
7
|
-
from dependency_injector.wiring import Provide, inject
|
8
|
-
from typeguard import typechecked
|
9
|
-
|
10
|
-
from frogml_proto.qwak.vectors.v1.vector_pb2 import (
|
11
|
-
DoubleVector,
|
12
|
-
Property,
|
13
|
-
StoredVector,
|
14
|
-
VectorIdentifier,
|
15
|
-
)
|
16
|
-
from frogml_core.clients.vector_store.serving_client import VectorServingClient
|
17
|
-
from frogml_core.exceptions import FrogmlException
|
18
|
-
from frogml_core.inner.di_configuration import FrogmlContainer
|
19
|
-
from frogml_core.vector_store.inference_client import VectorStoreInferenceClient
|
20
|
-
|
21
|
-
_NaturalInput = TypeVar("T")
|
22
|
-
_NaturalInputs = List[_NaturalInput]
|
23
|
-
_Vector = List[float]
|
24
|
-
_Properties = Dict[str, Union[str, int, bool, float]]
|
25
|
-
|
26
|
-
_type_to_proto_property_mapping: Dict = {
|
27
|
-
str: "string_val",
|
28
|
-
bool: "bool_val",
|
29
|
-
int: "int_val",
|
30
|
-
float: "double_val",
|
31
|
-
}
|
32
|
-
|
33
|
-
|
34
|
-
def _build_property(key: str, value: Union[str, int, bool, float]):
|
35
|
-
type_val = _type_to_proto_property_mapping.get(type(value), None)
|
36
|
-
if not type_val:
|
37
|
-
raise FrogmlException(
|
38
|
-
f"Cannot upsert vector with property value type {type(value)}. "
|
39
|
-
f"Supported types are: {list(_type_to_proto_property_mapping.keys())}"
|
40
|
-
)
|
41
|
-
|
42
|
-
property_args = {"name": key, type_val: value}
|
43
|
-
return Property(**property_args)
|
44
|
-
|
45
|
-
|
46
|
-
def _rewire_qwak_container(config):
|
47
|
-
# re-creating the container using the config from the original container.
|
48
|
-
# note that this runs in a fresh interpreter - at that point there's a running
|
49
|
-
# container because of the imports, but it won't necessarily have the same config
|
50
|
-
# as the container in the parent process that spawned this one.
|
51
|
-
# rewiring only the vector store + ecosystem and authentication - if using stuff from
|
52
|
-
# other modules pls feel free to add it here.
|
53
|
-
new_container = FrogmlContainer(config=config)
|
54
|
-
from frogml_core.clients import vector_store
|
55
|
-
from frogml_core.clients.administration import authentication, eco_system
|
56
|
-
|
57
|
-
new_container.wire(
|
58
|
-
packages=[
|
59
|
-
authentication,
|
60
|
-
eco_system,
|
61
|
-
vector_store,
|
62
|
-
]
|
63
|
-
)
|
64
|
-
|
65
|
-
|
66
|
-
@typechecked
|
67
|
-
def _divide_chunks(lst: List, chunk_size: int):
|
68
|
-
if chunk_size <= 0:
|
69
|
-
raise FrogmlException("Chunk size must be a positive integer")
|
70
|
-
|
71
|
-
num_items: int = len(lst)
|
72
|
-
num_chunks: int = ceil(num_items / chunk_size)
|
73
|
-
for i in range(num_chunks):
|
74
|
-
yield lst[i * chunk_size : (i + 1) * chunk_size]
|
75
|
-
|
76
|
-
|
77
|
-
def _get_vector_identifier(t: Union[str, Tuple[str, str]]):
|
78
|
-
if type(t) is str:
|
79
|
-
return VectorIdentifier(vector_id=t)
|
80
|
-
return VectorIdentifier(vector_id=t[0], tenant_id=t[1])
|
81
|
-
|
82
|
-
|
83
|
-
def _upsert_vector_block(
|
84
|
-
vector_tuples: List[Tuple[Union[str, Tuple[str, str]], _Vector, _Properties]],
|
85
|
-
chunk_size: int,
|
86
|
-
collection_name: str,
|
87
|
-
edge_services_url: str,
|
88
|
-
) -> None:
|
89
|
-
vector_serving_client: VectorServingClient = VectorServingClient(
|
90
|
-
edge_services_url=edge_services_url
|
91
|
-
)
|
92
|
-
for chunk in _divide_chunks(vector_tuples, chunk_size):
|
93
|
-
# chunk is a list of (id, vector, properties) tuples
|
94
|
-
vector_serving_client.upsert_vectors(
|
95
|
-
collection_name=collection_name,
|
96
|
-
vectors=[
|
97
|
-
StoredVector(
|
98
|
-
vector_identifier=_get_vector_identifier(tpl[0]),
|
99
|
-
vector=DoubleVector(element=tpl[1]),
|
100
|
-
properties=[
|
101
|
-
_build_property(key, value) for (key, value) in tpl[2].items()
|
102
|
-
],
|
103
|
-
)
|
104
|
-
for tpl in chunk
|
105
|
-
],
|
106
|
-
)
|
107
|
-
|
108
|
-
|
109
|
-
def _upsert_natural_input_block(
|
110
|
-
vector_tuples: List[Tuple[VectorIdentifier, _NaturalInput, _Properties]],
|
111
|
-
chunk_size: int,
|
112
|
-
vectorizer_name: str,
|
113
|
-
collection_name: str,
|
114
|
-
edge_services_url: str,
|
115
|
-
) -> None:
|
116
|
-
vector_serving_client: VectorServingClient = VectorServingClient(
|
117
|
-
edge_services_url=edge_services_url
|
118
|
-
)
|
119
|
-
inference_client: VectorStoreInferenceClient = VectorStoreInferenceClient(
|
120
|
-
model_id=vectorizer_name
|
121
|
-
)
|
122
|
-
for chunk in _divide_chunks(vector_tuples, chunk_size):
|
123
|
-
# chunk is a list of (id, _NaturalInput, properties) tuples
|
124
|
-
vector_serving_client.upsert_vectors(
|
125
|
-
collection_name=collection_name,
|
126
|
-
vectors=[
|
127
|
-
StoredVector(
|
128
|
-
vector_identifier=_get_vector_identifier(tpl[0]),
|
129
|
-
vector=DoubleVector(
|
130
|
-
element=inference_client.get_embedding(natural_input=tpl[1])
|
131
|
-
),
|
132
|
-
properties=[
|
133
|
-
_build_property(key, value) for (key, value) in tpl[2].items()
|
134
|
-
],
|
135
|
-
)
|
136
|
-
for tpl in chunk
|
137
|
-
],
|
138
|
-
)
|
139
|
-
|
140
|
-
|
141
|
-
@inject
|
142
|
-
def _upsert_natural_input(
|
143
|
-
vector_tuples: List[Tuple[Union[str, Tuple[str, str]], _NaturalInput, _Properties]],
|
144
|
-
chunk_size: int,
|
145
|
-
vectorizer_name: str,
|
146
|
-
collection_name: str,
|
147
|
-
edge_services_url: str,
|
148
|
-
multiproc: bool = False,
|
149
|
-
max_processes: Optional[int] = None,
|
150
|
-
config=Provide[FrogmlContainer.config],
|
151
|
-
):
|
152
|
-
if not multiproc:
|
153
|
-
_upsert_natural_input_block(
|
154
|
-
vector_tuples=vector_tuples,
|
155
|
-
chunk_size=chunk_size,
|
156
|
-
vectorizer_name=vectorizer_name,
|
157
|
-
collection_name=collection_name,
|
158
|
-
edge_services_url=edge_services_url,
|
159
|
-
)
|
160
|
-
else:
|
161
|
-
if max_processes is None:
|
162
|
-
max_processes = os.cpu_count()
|
163
|
-
effective_block_size: int = ceil(len(vector_tuples) / (max_processes * 4))
|
164
|
-
|
165
|
-
f = partial(
|
166
|
-
_upsert_natural_input_block,
|
167
|
-
chunk_size=chunk_size,
|
168
|
-
vectorizer_name=vectorizer_name,
|
169
|
-
collection_name=collection_name,
|
170
|
-
edge_services_url=edge_services_url,
|
171
|
-
)
|
172
|
-
|
173
|
-
blocks: Iterable[List[Tuple[str, _NaturalInput, _Properties]]] = _divide_chunks(
|
174
|
-
vector_tuples, effective_block_size
|
175
|
-
)
|
176
|
-
initializer = partial(_rewire_qwak_container, config=config)
|
177
|
-
set_start_method("spawn", force=True)
|
178
|
-
|
179
|
-
with Pool(processes=max_processes, initializer=initializer) as p:
|
180
|
-
p.map(f, blocks)
|
181
|
-
|
182
|
-
|
183
|
-
@inject
|
184
|
-
def _upsert_vectors(
|
185
|
-
vector_tuples: List[Tuple[Union[str, Tuple[str, str]], _Vector, _Properties]],
|
186
|
-
chunk_size: int,
|
187
|
-
collection_name: str,
|
188
|
-
edge_services_url: str,
|
189
|
-
multiproc: bool = False,
|
190
|
-
max_processes: Optional[int] = None,
|
191
|
-
config=Provide[FrogmlContainer.config],
|
192
|
-
):
|
193
|
-
if not multiproc:
|
194
|
-
_upsert_vector_block(
|
195
|
-
vector_tuples=vector_tuples,
|
196
|
-
chunk_size=chunk_size,
|
197
|
-
collection_name=collection_name,
|
198
|
-
edge_services_url=edge_services_url,
|
199
|
-
)
|
200
|
-
else:
|
201
|
-
if max_processes is None:
|
202
|
-
max_processes = os.cpu_count()
|
203
|
-
effective_block_size: int = ceil(len(vector_tuples) / (max_processes * 4))
|
204
|
-
|
205
|
-
f = partial(
|
206
|
-
_upsert_vector_block,
|
207
|
-
chunk_size=chunk_size,
|
208
|
-
collection_name=collection_name,
|
209
|
-
edge_services_url=edge_services_url,
|
210
|
-
)
|
211
|
-
blocks: Iterable[List[Tuple[str, _Vector, _Properties]]] = list(
|
212
|
-
_divide_chunks(vector_tuples, effective_block_size)
|
213
|
-
)
|
214
|
-
|
215
|
-
set_start_method("spawn", force=True)
|
216
|
-
initializer = partial(_rewire_qwak_container, config=config)
|
217
|
-
with Pool(processes=max_processes, initializer=initializer) as p:
|
218
|
-
p.map(f, blocks)
|
@@ -1,77 +0,0 @@
|
|
1
|
-
# -*- coding: utf-8 -*-
|
2
|
-
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
3
|
-
# source: frogml_proto.qwak.prompt/v1/prompt/prompt_manager_service.proto
|
4
|
-
"""Generated protocol buffer code."""
|
5
|
-
from google.protobuf import descriptor as _descriptor
|
6
|
-
from google.protobuf import descriptor_pool as _descriptor_pool
|
7
|
-
from google.protobuf import symbol_database as _symbol_database
|
8
|
-
from google.protobuf.internal import builder as _builder
|
9
|
-
# @@protoc_insertion_point(imports)
|
10
|
-
|
11
|
-
_sym_db = _symbol_database.Default()
|
12
|
-
|
13
|
-
|
14
|
-
from frogml_proto.qwak.prompt.v1.prompt import prompt_pb2 as qwak_dot_prompt_dot_v1_dot_prompt_dot_prompt__pb2
|
15
|
-
from frogml_proto.qwak.model_descriptor import open_ai_descriptor_pb2 as qwak_dot_model__descriptor_dot_open__ai__descriptor__pb2
|
16
|
-
|
17
|
-
|
18
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2qwak/prompt/v1/prompt/prompt_manager_service.proto\x12\x15qwak.prompt.v1.prompt\x1a\"qwak/prompt/v1/prompt/prompt.proto\x1a.qwak/model_descriptor/open_ai_descriptor.proto\"\xa9\x01\n\x13\x43reatePromptRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\x12\x45\n\x13prompt_version_spec\x18\x02 \x01(\x0b\x32(.qwak.prompt.v1.prompt.PromptVersionSpec\x12\x36\n\x0bprompt_spec\x18\x03 \x01(\x0b\x32!.qwak.prompt.v1.prompt.PromptSpec\"E\n\x14\x43reatePromptResponse\x12-\n\x06prompt\x18\x01 \x01(\x0b\x32\x1d.qwak.prompt.v1.prompt.Prompt\"\x8d\x01\n\x1a\x43reatePromptVersionRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\x12\x45\n\x13prompt_version_spec\x18\x02 \x01(\x0b\x32(.qwak.prompt.v1.prompt.PromptVersionSpec\x12\x13\n\x0bset_default\x18\x03 \x01(\x08\"[\n\x1b\x43reatePromptVersionResponse\x12<\n\x0eprompt_version\x18\x01 \x01(\x0b\x32$.qwak.prompt.v1.prompt.PromptVersion\"b\n\x13UpdatePromptRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\x12\x36\n\x0bprompt_spec\x18\x02 \x01(\x0b\x32!.qwak.prompt.v1.prompt.PromptSpec\"\x16\n\x14UpdatePromptResponse\"o\n#GetPromptVersionByPromptNameRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\x12\x18\n\x0eversion_number\x18\x02 \x01(\x05H\x00\x42\x19\n\x17optional_version_number\"d\n$GetPromptVersionByPromptNameResponse\x12<\n\x0eprompt_version\x18\x01 \x01(\x0b\x32$.qwak.prompt.v1.prompt.PromptVersion\"-\n\x16GetPromptByNameRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\"H\n\x17GetPromptByNameResponse\x12-\n\x06prompt\x18\x01 \x01(\x0b\x32\x1d.qwak.prompt.v1.prompt.Prompt\"\x14\n\x12ListPromptsRequest\"E\n\x13ListPromptsResponse\x12.\n\x07prompts\x18\x01 \x03(\x0b\x32\x1d.qwak.prompt.v1.prompt.Prompt\"8\n!InitPaginationByPromptNameRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\"V\n\"InitPaginationByPromptNameResponse\x12\x14\n\x0crecord_count\x18\x01 \x01(\x03\x12\x1a\n\x12max_version_number\x18\x02 \x01(\x05\"\x80\x01\n%ListPromptVersionsByPromptNameRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x13\n\x0bpage_number\x18\x03 \x01(\x05\x12\x1a\n\x12max_version_number\x18\x04 \x01(\x05\"g\n&ListPromptVersionsByPromptNameResponse\x12=\n\x0fprompt_versions\x18\x01 \x03(\x0b\x32$.qwak.prompt.v1.prompt.PromptVersion\"*\n\x13\x44\x65letePromptRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\"\x16\n\x14\x44\x65letePromptResponse\"I\n\x1a\x44\x65letePromptVersionRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\x12\x16\n\x0eversion_number\x18\x03 \x01(\x05\"\x1d\n\x1b\x44\x65letePromptVersionResponse\"M\n\x1eSetDefaultPromptVersionRequest\x12\x13\n\x0bprompt_name\x18\x01 \x01(\t\x12\x16\n\x0eversion_number\x18\x02 \x01(\x05\"!\n\x1fSetDefaultPromptVersionResponse\"\x19\n\x17ListPromptBriefsRequest\"U\n\x18ListPromptBriefsResponse\x12\x39\n\rprompt_briefs\x18\x01 \x03(\x0b\x32\".qwak.prompt.v1.prompt.PromptBrief2\xf3\x0b\n\x14PromptManagerService\x12g\n\x0c\x43reatePrompt\x12*.qwak.prompt.v1.prompt.CreatePromptRequest\x1a+.qwak.prompt.v1.prompt.CreatePromptResponse\x12|\n\x13\x43reatePromptVersion\x12\x31.qwak.prompt.v1.prompt.CreatePromptVersionRequest\x1a\x32.qwak.prompt.v1.prompt.CreatePromptVersionResponse\x12g\n\x0cUpdatePrompt\x12*.qwak.prompt.v1.prompt.UpdatePromptRequest\x1a+.qwak.prompt.v1.prompt.UpdatePromptResponse\x12p\n\x0fGetPromptByName\x12-.qwak.prompt.v1.prompt.GetPromptByNameRequest\x1a..qwak.prompt.v1.prompt.GetPromptByNameResponse\x12\x64\n\x0bListPrompts\x12).qwak.prompt.v1.prompt.ListPromptsRequest\x1a*.qwak.prompt.v1.prompt.ListPromptsResponse\x12\x97\x01\n\x1cGetPromptVersionByPromptName\x12:.qwak.prompt.v1.prompt.GetPromptVersionByPromptNameRequest\x1a;.qwak.prompt.v1.prompt.GetPromptVersionByPromptNameResponse\x12\x91\x01\n\x1aInitPaginationByPromptName\x12\x38.qwak.prompt.v1.prompt.InitPaginationByPromptNameRequest\x1a\x39.qwak.prompt.v1.prompt.InitPaginationByPromptNameResponse\x12\x9d\x01\n\x1eListPromptVersionsByPromptName\x12<.qwak.prompt.v1.prompt.ListPromptVersionsByPromptNameRequest\x1a=.qwak.prompt.v1.prompt.ListPromptVersionsByPromptNameResponse\x12g\n\x0c\x44\x65letePrompt\x12*.qwak.prompt.v1.prompt.DeletePromptRequest\x1a+.qwak.prompt.v1.prompt.DeletePromptResponse\x12|\n\x13\x44\x65letePromptVersion\x12\x31.qwak.prompt.v1.prompt.DeletePromptVersionRequest\x1a\x32.qwak.prompt.v1.prompt.DeletePromptVersionResponse\x12\x88\x01\n\x17SetDefaultPromptVersion\x12\x35.qwak.prompt.v1.prompt.SetDefaultPromptVersionRequest\x1a\x36.qwak.prompt.v1.prompt.SetDefaultPromptVersionResponse\x12s\n\x10ListPromptBriefs\x12..qwak.prompt.v1.prompt.ListPromptBriefsRequest\x1a/.qwak.prompt.v1.prompt.ListPromptBriefsResponseB$\n com.qwak.ai.prompt.api.v1.promptP\x01\x62\x06proto3')
|
19
|
-
|
20
|
-
_globals = globals()
|
21
|
-
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
22
|
-
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'qwak.prompt.v1.prompt.prompt_manager_service_pb2', _globals)
|
23
|
-
if _descriptor._USE_C_DESCRIPTORS == False:
|
24
|
-
|
25
|
-
DESCRIPTOR._options = None
|
26
|
-
DESCRIPTOR._serialized_options = b'\n com.qwak.ai.prompt.api.v1.promptP\001'
|
27
|
-
_globals['_CREATEPROMPTREQUEST']._serialized_start=162
|
28
|
-
_globals['_CREATEPROMPTREQUEST']._serialized_end=331
|
29
|
-
_globals['_CREATEPROMPTRESPONSE']._serialized_start=333
|
30
|
-
_globals['_CREATEPROMPTRESPONSE']._serialized_end=402
|
31
|
-
_globals['_CREATEPROMPTVERSIONREQUEST']._serialized_start=405
|
32
|
-
_globals['_CREATEPROMPTVERSIONREQUEST']._serialized_end=546
|
33
|
-
_globals['_CREATEPROMPTVERSIONRESPONSE']._serialized_start=548
|
34
|
-
_globals['_CREATEPROMPTVERSIONRESPONSE']._serialized_end=639
|
35
|
-
_globals['_UPDATEPROMPTREQUEST']._serialized_start=641
|
36
|
-
_globals['_UPDATEPROMPTREQUEST']._serialized_end=739
|
37
|
-
_globals['_UPDATEPROMPTRESPONSE']._serialized_start=741
|
38
|
-
_globals['_UPDATEPROMPTRESPONSE']._serialized_end=763
|
39
|
-
_globals['_GETPROMPTVERSIONBYPROMPTNAMEREQUEST']._serialized_start=765
|
40
|
-
_globals['_GETPROMPTVERSIONBYPROMPTNAMEREQUEST']._serialized_end=876
|
41
|
-
_globals['_GETPROMPTVERSIONBYPROMPTNAMERESPONSE']._serialized_start=878
|
42
|
-
_globals['_GETPROMPTVERSIONBYPROMPTNAMERESPONSE']._serialized_end=978
|
43
|
-
_globals['_GETPROMPTBYNAMEREQUEST']._serialized_start=980
|
44
|
-
_globals['_GETPROMPTBYNAMEREQUEST']._serialized_end=1025
|
45
|
-
_globals['_GETPROMPTBYNAMERESPONSE']._serialized_start=1027
|
46
|
-
_globals['_GETPROMPTBYNAMERESPONSE']._serialized_end=1099
|
47
|
-
_globals['_LISTPROMPTSREQUEST']._serialized_start=1101
|
48
|
-
_globals['_LISTPROMPTSREQUEST']._serialized_end=1121
|
49
|
-
_globals['_LISTPROMPTSRESPONSE']._serialized_start=1123
|
50
|
-
_globals['_LISTPROMPTSRESPONSE']._serialized_end=1192
|
51
|
-
_globals['_INITPAGINATIONBYPROMPTNAMEREQUEST']._serialized_start=1194
|
52
|
-
_globals['_INITPAGINATIONBYPROMPTNAMEREQUEST']._serialized_end=1250
|
53
|
-
_globals['_INITPAGINATIONBYPROMPTNAMERESPONSE']._serialized_start=1252
|
54
|
-
_globals['_INITPAGINATIONBYPROMPTNAMERESPONSE']._serialized_end=1338
|
55
|
-
_globals['_LISTPROMPTVERSIONSBYPROMPTNAMEREQUEST']._serialized_start=1341
|
56
|
-
_globals['_LISTPROMPTVERSIONSBYPROMPTNAMEREQUEST']._serialized_end=1469
|
57
|
-
_globals['_LISTPROMPTVERSIONSBYPROMPTNAMERESPONSE']._serialized_start=1471
|
58
|
-
_globals['_LISTPROMPTVERSIONSBYPROMPTNAMERESPONSE']._serialized_end=1574
|
59
|
-
_globals['_DELETEPROMPTREQUEST']._serialized_start=1576
|
60
|
-
_globals['_DELETEPROMPTREQUEST']._serialized_end=1618
|
61
|
-
_globals['_DELETEPROMPTRESPONSE']._serialized_start=1620
|
62
|
-
_globals['_DELETEPROMPTRESPONSE']._serialized_end=1642
|
63
|
-
_globals['_DELETEPROMPTVERSIONREQUEST']._serialized_start=1644
|
64
|
-
_globals['_DELETEPROMPTVERSIONREQUEST']._serialized_end=1717
|
65
|
-
_globals['_DELETEPROMPTVERSIONRESPONSE']._serialized_start=1719
|
66
|
-
_globals['_DELETEPROMPTVERSIONRESPONSE']._serialized_end=1748
|
67
|
-
_globals['_SETDEFAULTPROMPTVERSIONREQUEST']._serialized_start=1750
|
68
|
-
_globals['_SETDEFAULTPROMPTVERSIONREQUEST']._serialized_end=1827
|
69
|
-
_globals['_SETDEFAULTPROMPTVERSIONRESPONSE']._serialized_start=1829
|
70
|
-
_globals['_SETDEFAULTPROMPTVERSIONRESPONSE']._serialized_end=1862
|
71
|
-
_globals['_LISTPROMPTBRIEFSREQUEST']._serialized_start=1864
|
72
|
-
_globals['_LISTPROMPTBRIEFSREQUEST']._serialized_end=1889
|
73
|
-
_globals['_LISTPROMPTBRIEFSRESPONSE']._serialized_start=1891
|
74
|
-
_globals['_LISTPROMPTBRIEFSRESPONSE']._serialized_end=1976
|
75
|
-
_globals['_PROMPTMANAGERSERVICE']._serialized_start=1979
|
76
|
-
_globals['_PROMPTMANAGERSERVICE']._serialized_end=3502
|
77
|
-
# @@protoc_insertion_point(module_scope)
|