yandexcloud 0.339.0__py3-none-any.whl → 0.341.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of yandexcloud might be problematic. Click here for more details.

Files changed (115) hide show
  1. yandex/cloud/ai/assistants/v1/searchindex/common_pb2.py +9 -3
  2. yandex/cloud/ai/assistants/v1/searchindex/common_pb2.pyi +46 -0
  3. yandex/cloud/ai/assistants/v1/searchindex/search_index_pb2.py +8 -6
  4. yandex/cloud/ai/assistants/v1/searchindex/search_index_pb2.pyi +26 -3
  5. yandex/cloud/ai/batch_inference/__init__.py +0 -0
  6. yandex/cloud/ai/batch_inference/v1/__init__.py +0 -0
  7. yandex/cloud/ai/batch_inference/v1/batch_inference_service_pb2.py +67 -0
  8. yandex/cloud/ai/batch_inference/v1/batch_inference_service_pb2.pyi +195 -0
  9. yandex/cloud/ai/batch_inference/v1/batch_inference_service_pb2_grpc.py +230 -0
  10. yandex/cloud/ai/batch_inference/v1/batch_inference_service_pb2_grpc.pyi +104 -0
  11. yandex/cloud/ai/batch_inference/v1/batch_inference_task_pb2.py +45 -0
  12. yandex/cloud/ai/batch_inference/v1/batch_inference_task_pb2.pyi +119 -0
  13. yandex/cloud/ai/batch_inference/v1/batch_inference_task_pb2_grpc.py +24 -0
  14. yandex/cloud/ai/batch_inference/v1/batch_inference_task_pb2_grpc.pyi +17 -0
  15. yandex/cloud/ai/batch_inference/v1/inference_options_pb2.py +52 -0
  16. yandex/cloud/ai/batch_inference/v1/inference_options_pb2.pyi +126 -0
  17. yandex/cloud/ai/batch_inference/v1/inference_options_pb2_grpc.py +24 -0
  18. yandex/cloud/ai/batch_inference/v1/inference_options_pb2_grpc.pyi +17 -0
  19. yandex/cloud/ai/foundation_models/v1/text_classification/text_classification_service_pb2.py +8 -8
  20. yandex/cloud/ai/foundation_models/v1/text_classification/text_classification_service_pb2.pyi +15 -7
  21. yandex/cloud/ai/foundation_models/v1/text_classification/text_classification_service_pb2_grpc.py +2 -2
  22. yandex/cloud/ai/foundation_models/v1/text_classification/text_classification_service_pb2_grpc.pyi +6 -6
  23. yandex/cloud/ai/stt/v3/stt_pb2.py +5 -3
  24. yandex/cloud/ai/stt/v3/stt_pb2.pyi +16 -0
  25. yandex/cloud/ai/translate/v2/translation_service_pb2.pyi +1 -1
  26. yandex/cloud/ai/tuning/v1/tuning_service_pb2_grpc.py +8 -4
  27. yandex/cloud/ai/tuning/v1/tuning_service_pb2_grpc.pyi +16 -4
  28. yandex/cloud/ai/vision/v1/vision_service_pb2.pyi +1 -1
  29. yandex/cloud/billing/v1/budget_pb2.pyi +4 -4
  30. yandex/cloud/cdn/v1/shielding_pb2.py +37 -0
  31. yandex/cloud/cdn/v1/shielding_pb2.pyi +41 -0
  32. yandex/cloud/cdn/v1/shielding_pb2_grpc.py +24 -0
  33. yandex/cloud/cdn/v1/shielding_pb2_grpc.pyi +17 -0
  34. yandex/cloud/cdn/v1/shielding_service_pb2.py +89 -0
  35. yandex/cloud/cdn/v1/shielding_service_pb2.pyi +201 -0
  36. yandex/cloud/cdn/v1/shielding_service_pb2_grpc.py +281 -0
  37. yandex/cloud/cdn/v1/shielding_service_pb2_grpc.pyi +141 -0
  38. yandex/cloud/cloudregistry/v1/registry_service_pb2.py +6 -8
  39. yandex/cloud/cloudrouter/v1/routing_instance_service_pb2.py +40 -24
  40. yandex/cloud/cloudrouter/v1/routing_instance_service_pb2.pyi +72 -0
  41. yandex/cloud/cloudrouter/v1/routing_instance_service_pb2_grpc.py +90 -0
  42. yandex/cloud/cloudrouter/v1/routing_instance_service_pb2_grpc.pyi +52 -0
  43. yandex/cloud/compute/v1/instancegroup/instance_group_pb2.pyi +1 -1
  44. yandex/cloud/compute/v1/instancegroup/instance_group_service_pb2.py +4 -4
  45. yandex/cloud/dataproc/v1/cluster_pb2.py +10 -10
  46. yandex/cloud/dataproc/v1/cluster_pb2.pyi +5 -1
  47. yandex/cloud/k8s/v1/cluster_service_pb2.py +80 -73
  48. yandex/cloud/k8s/v1/cluster_service_pb2_grpc.py +133 -0
  49. yandex/cloud/k8s/v1/cluster_service_pb2_grpc.pyi +61 -0
  50. yandex/cloud/kms/v1/symmetric_key_service_pb2.py +50 -48
  51. yandex/cloud/logging/v1/log_entry_pb2.py +12 -12
  52. yandex/cloud/mdb/clickhouse/v1/cluster_extension_pb2.py +46 -0
  53. yandex/cloud/mdb/clickhouse/v1/cluster_extension_pb2.pyi +53 -0
  54. yandex/cloud/mdb/clickhouse/v1/cluster_extension_pb2_grpc.py +24 -0
  55. yandex/cloud/mdb/clickhouse/v1/cluster_extension_pb2_grpc.pyi +17 -0
  56. yandex/cloud/mdb/clickhouse/v1/cluster_extension_service_pb2.py +106 -0
  57. yandex/cloud/mdb/clickhouse/v1/cluster_extension_service_pb2.pyi +248 -0
  58. yandex/cloud/mdb/clickhouse/v1/cluster_extension_service_pb2_grpc.py +315 -0
  59. yandex/cloud/mdb/clickhouse/v1/cluster_extension_service_pb2_grpc.pyi +131 -0
  60. yandex/cloud/mdb/clickhouse/v1/extension_pb2.py +44 -0
  61. yandex/cloud/mdb/clickhouse/v1/extension_pb2.pyi +59 -0
  62. yandex/cloud/mdb/clickhouse/v1/extension_pb2_grpc.py +24 -0
  63. yandex/cloud/mdb/clickhouse/v1/extension_pb2_grpc.pyi +17 -0
  64. yandex/cloud/mdb/clickhouse/v1/extension_service_pb2.py +60 -0
  65. yandex/cloud/mdb/clickhouse/v1/extension_service_pb2.pyi +72 -0
  66. yandex/cloud/mdb/clickhouse/v1/extension_service_pb2_grpc.py +141 -0
  67. yandex/cloud/mdb/clickhouse/v1/extension_service_pb2_grpc.pyi +59 -0
  68. yandex/cloud/mdb/greenplum/v1/cluster_service_pb2.py +59 -43
  69. yandex/cloud/mdb/greenplum/v1/cluster_service_pb2.pyi +67 -0
  70. yandex/cloud/mdb/greenplum/v1/cluster_service_pb2_grpc.py +44 -0
  71. yandex/cloud/mdb/greenplum/v1/cluster_service_pb2_grpc.pyi +20 -0
  72. yandex/cloud/mdb/greenplum/v1/user_pb2.py +44 -0
  73. yandex/cloud/mdb/greenplum/v1/user_pb2.pyi +35 -0
  74. yandex/cloud/mdb/greenplum/v1/user_pb2_grpc.py +24 -0
  75. yandex/cloud/mdb/greenplum/v1/user_pb2_grpc.pyi +17 -0
  76. yandex/cloud/mdb/greenplum/v1/user_service_pb2.py +101 -0
  77. yandex/cloud/mdb/greenplum/v1/user_service_pb2.pyi +187 -0
  78. yandex/cloud/mdb/greenplum/v1/user_service_pb2_grpc.py +231 -0
  79. yandex/cloud/mdb/greenplum/v1/user_service_pb2_grpc.pyi +105 -0
  80. yandex/cloud/mdb/mysql/v1/user_pb2.py +4 -4
  81. yandex/cloud/mdb/mysql/v1/user_pb2.pyi +4 -0
  82. yandex/cloud/mdb/opensearch/v1/extension/__init__.py +0 -0
  83. yandex/cloud/mdb/opensearch/v1/extension/extension_type_pb2.py +37 -0
  84. yandex/cloud/mdb/opensearch/v1/extension/extension_type_pb2.pyi +34 -0
  85. yandex/cloud/mdb/opensearch/v1/extension/extension_type_pb2_grpc.py +24 -0
  86. yandex/cloud/mdb/opensearch/v1/extension/extension_type_pb2_grpc.pyi +17 -0
  87. yandex/cloud/mdb/opensearch/v1/extension_pb2.py +47 -0
  88. yandex/cloud/mdb/opensearch/v1/extension_pb2.pyi +77 -0
  89. yandex/cloud/mdb/opensearch/v1/extension_pb2_grpc.py +24 -0
  90. yandex/cloud/mdb/opensearch/v1/extension_pb2_grpc.pyi +17 -0
  91. yandex/cloud/mdb/opensearch/v1/extension_service_pb2.py +104 -0
  92. yandex/cloud/mdb/opensearch/v1/extension_service_pb2.pyi +215 -0
  93. yandex/cloud/mdb/opensearch/v1/extension_service_pb2_grpc.py +276 -0
  94. yandex/cloud/mdb/opensearch/v1/extension_service_pb2_grpc.pyi +126 -0
  95. yandex/cloud/mdb/spqr/v1/cluster_service_pb2.py +120 -120
  96. yandex/cloud/mdb/spqr/v1/cluster_service_pb2.pyi +8 -2
  97. yandex/cloud/monitoring/v3/timeline_pb2.py +2 -2
  98. yandex/cloud/monitoring/v3/timeline_pb2.pyi +4 -2
  99. yandex/cloud/organizationmanager/v1/user_account_pb2.pyi +1 -1
  100. yandex/cloud/video/v1/channel_service_pb2.pyi +5 -2
  101. yandex/cloud/video/v1/episode_service_pb2.pyi +5 -2
  102. yandex/cloud/video/v1/playlist_service_pb2.pyi +5 -2
  103. yandex/cloud/video/v1/stream_line_service_pb2.pyi +5 -2
  104. yandex/cloud/video/v1/stream_service_pb2.pyi +5 -2
  105. yandex/cloud/video/v1/video_service_pb2.pyi +5 -2
  106. yandexcloud/__init__.py +1 -1
  107. yandexcloud/_sdk.py +1 -0
  108. yandexcloud/_wrappers/__init__.py +15 -0
  109. yandexcloud/_wrappers/spark/__init__.py +603 -0
  110. {yandexcloud-0.339.0.dist-info → yandexcloud-0.341.0.dist-info}/METADATA +2 -1
  111. {yandexcloud-0.339.0.dist-info → yandexcloud-0.341.0.dist-info}/RECORD +115 -55
  112. {yandexcloud-0.339.0.dist-info → yandexcloud-0.341.0.dist-info}/AUTHORS +0 -0
  113. {yandexcloud-0.339.0.dist-info → yandexcloud-0.341.0.dist-info}/LICENSE +0 -0
  114. {yandexcloud-0.339.0.dist-info → yandexcloud-0.341.0.dist-info}/WHEEL +0 -0
  115. {yandexcloud-0.339.0.dist-info → yandexcloud-0.341.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,126 @@
1
+ """
2
+ @generated by mypy-protobuf. Do not edit manually!
3
+ isort:skip_file
4
+ """
5
+
6
+ import builtins
7
+ import google.protobuf.descriptor
8
+ import google.protobuf.internal.enum_type_wrapper
9
+ import google.protobuf.message
10
+ import google.protobuf.struct_pb2
11
+ import google.protobuf.wrappers_pb2
12
+ import sys
13
+ import typing
14
+
15
+ if sys.version_info >= (3, 10):
16
+ import typing as typing_extensions
17
+ else:
18
+ import typing_extensions
19
+
20
+ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
21
+
22
+ @typing.final
23
+ class BatchCompletionRequest(google.protobuf.message.Message):
24
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
25
+
26
+ MODEL_URI_FIELD_NUMBER: builtins.int
27
+ SOURCE_DATASET_ID_FIELD_NUMBER: builtins.int
28
+ COMPLETION_OPTIONS_FIELD_NUMBER: builtins.int
29
+ DATA_LOGGING_ENABLED_FIELD_NUMBER: builtins.int
30
+ JSON_OBJECT_FIELD_NUMBER: builtins.int
31
+ JSON_SCHEMA_FIELD_NUMBER: builtins.int
32
+ model_uri: builtins.str
33
+ source_dataset_id: builtins.str
34
+ data_logging_enabled: builtins.bool
35
+ json_object: builtins.bool
36
+ @property
37
+ def completion_options(self) -> global___CompletionOptions: ...
38
+ @property
39
+ def json_schema(self) -> global___JsonSchema: ...
40
+ def __init__(
41
+ self,
42
+ *,
43
+ model_uri: builtins.str = ...,
44
+ source_dataset_id: builtins.str = ...,
45
+ completion_options: global___CompletionOptions | None = ...,
46
+ data_logging_enabled: builtins.bool = ...,
47
+ json_object: builtins.bool = ...,
48
+ json_schema: global___JsonSchema | None = ...,
49
+ ) -> None: ...
50
+ def HasField(self, field_name: typing.Literal["StructuredOutput", b"StructuredOutput", "completion_options", b"completion_options", "json_object", b"json_object", "json_schema", b"json_schema"]) -> builtins.bool: ...
51
+ def ClearField(self, field_name: typing.Literal["StructuredOutput", b"StructuredOutput", "completion_options", b"completion_options", "data_logging_enabled", b"data_logging_enabled", "json_object", b"json_object", "json_schema", b"json_schema", "model_uri", b"model_uri", "source_dataset_id", b"source_dataset_id"]) -> None: ...
52
+ def WhichOneof(self, oneof_group: typing.Literal["StructuredOutput", b"StructuredOutput"]) -> typing.Literal["json_object", "json_schema"] | None: ...
53
+
54
+ global___BatchCompletionRequest = BatchCompletionRequest
55
+
56
+ @typing.final
57
+ class CompletionOptions(google.protobuf.message.Message):
58
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
59
+
60
+ TEMPERATURE_FIELD_NUMBER: builtins.int
61
+ MAX_TOKENS_FIELD_NUMBER: builtins.int
62
+ REASONING_OPTIONS_FIELD_NUMBER: builtins.int
63
+ @property
64
+ def temperature(self) -> google.protobuf.wrappers_pb2.DoubleValue: ...
65
+ @property
66
+ def max_tokens(self) -> google.protobuf.wrappers_pb2.Int64Value: ...
67
+ @property
68
+ def reasoning_options(self) -> global___ReasoningOptions: ...
69
+ def __init__(
70
+ self,
71
+ *,
72
+ temperature: google.protobuf.wrappers_pb2.DoubleValue | None = ...,
73
+ max_tokens: google.protobuf.wrappers_pb2.Int64Value | None = ...,
74
+ reasoning_options: global___ReasoningOptions | None = ...,
75
+ ) -> None: ...
76
+ def HasField(self, field_name: typing.Literal["max_tokens", b"max_tokens", "reasoning_options", b"reasoning_options", "temperature", b"temperature"]) -> builtins.bool: ...
77
+ def ClearField(self, field_name: typing.Literal["max_tokens", b"max_tokens", "reasoning_options", b"reasoning_options", "temperature", b"temperature"]) -> None: ...
78
+
79
+ global___CompletionOptions = CompletionOptions
80
+
81
+ @typing.final
82
+ class ReasoningOptions(google.protobuf.message.Message):
83
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
84
+
85
+ class _ReasoningMode:
86
+ ValueType = typing.NewType("ValueType", builtins.int)
87
+ V: typing_extensions.TypeAlias = ValueType
88
+
89
+ class _ReasoningModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ReasoningOptions._ReasoningMode.ValueType], builtins.type):
90
+ DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
91
+ REASONING_MODE_UNSPECIFIED: ReasoningOptions._ReasoningMode.ValueType # 0
92
+ DISABLED: ReasoningOptions._ReasoningMode.ValueType # 1
93
+ ENABLED_HIDDEN: ReasoningOptions._ReasoningMode.ValueType # 2
94
+
95
+ class ReasoningMode(_ReasoningMode, metaclass=_ReasoningModeEnumTypeWrapper): ...
96
+ REASONING_MODE_UNSPECIFIED: ReasoningOptions.ReasoningMode.ValueType # 0
97
+ DISABLED: ReasoningOptions.ReasoningMode.ValueType # 1
98
+ ENABLED_HIDDEN: ReasoningOptions.ReasoningMode.ValueType # 2
99
+
100
+ MODE_FIELD_NUMBER: builtins.int
101
+ mode: global___ReasoningOptions.ReasoningMode.ValueType
102
+ def __init__(
103
+ self,
104
+ *,
105
+ mode: global___ReasoningOptions.ReasoningMode.ValueType = ...,
106
+ ) -> None: ...
107
+ def ClearField(self, field_name: typing.Literal["mode", b"mode"]) -> None: ...
108
+
109
+ global___ReasoningOptions = ReasoningOptions
110
+
111
+ @typing.final
112
+ class JsonSchema(google.protobuf.message.Message):
113
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
114
+
115
+ SCHEMA_FIELD_NUMBER: builtins.int
116
+ @property
117
+ def schema(self) -> google.protobuf.struct_pb2.Struct: ...
118
+ def __init__(
119
+ self,
120
+ *,
121
+ schema: google.protobuf.struct_pb2.Struct | None = ...,
122
+ ) -> None: ...
123
+ def HasField(self, field_name: typing.Literal["schema", b"schema"]) -> builtins.bool: ...
124
+ def ClearField(self, field_name: typing.Literal["schema", b"schema"]) -> None: ...
125
+
126
+ global___JsonSchema = JsonSchema
@@ -0,0 +1,24 @@
1
+ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2
+ """Client and server classes corresponding to protobuf-defined services."""
3
+ import grpc
4
+ import warnings
5
+
6
+
7
+ GRPC_GENERATED_VERSION = '1.70.0'
8
+ GRPC_VERSION = grpc.__version__
9
+ _version_not_supported = False
10
+
11
+ try:
12
+ from grpc._utilities import first_version_is_lower
13
+ _version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
14
+ except ImportError:
15
+ _version_not_supported = True
16
+
17
+ if _version_not_supported:
18
+ raise RuntimeError(
19
+ f'The grpc package installed is at version {GRPC_VERSION},'
20
+ + f' but the generated code in yandex/cloud/ai/batch_inference/v1/inference_options_pb2_grpc.py depends on'
21
+ + f' grpcio>={GRPC_GENERATED_VERSION}.'
22
+ + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
23
+ + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
24
+ )
@@ -0,0 +1,17 @@
1
+ """
2
+ @generated by mypy-protobuf. Do not edit manually!
3
+ isort:skip_file
4
+ """
5
+
6
+ import abc
7
+ import collections.abc
8
+ import grpc
9
+ import grpc.aio
10
+ import typing
11
+
12
+ _T = typing.TypeVar("_T")
13
+
14
+ class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ...
15
+
16
+ class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg]
17
+ ...
@@ -26,7 +26,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
26
26
  from yandex.cloud.ai.foundation_models.v1.text_classification import text_classification_pb2 as yandex_dot_cloud_dot_ai_dot_foundation__models_dot_v1_dot_text__classification_dot_text__classification__pb2
27
27
 
28
28
 
29
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nZyandex/cloud/ai/foundation_models/v1/text_classification/text_classification_service.proto\x12\x38yandex.cloud.ai.foundation_models.v1.text_classification\x1a\x1cgoogle/api/annotations.proto\x1aRyandex/cloud/ai/foundation_models/v1/text_classification/text_classification.proto\"<\n\x19TextClassificationRequest\x12\x11\n\tmodel_uri\x18\x01 \x01(\t\x12\x0c\n\x04text\x18\x02 \x01(\t\"\x97\x01\n\x1aTextClassificationResponse\x12\x62\n\x0bpredictions\x18\x01 \x03(\x0b\x32M.yandex.cloud.ai.foundation_models.v1.text_classification.ClassificationLabel\x12\x15\n\rmodel_version\x18\x02 \x01(\t\"\xce\x01\n FewShotTextClassificationRequest\x12\x11\n\tmodel_uri\x18\x01 \x01(\t\x12\x18\n\x10task_description\x18\x02 \x01(\t\x12\x0e\n\x06labels\x18\x03 \x03(\t\x12\x0c\n\x04text\x18\x04 \x01(\t\x12_\n\x07samples\x18\x05 \x03(\x0b\x32N.yandex.cloud.ai.foundation_models.v1.text_classification.ClassificationSample\"\x9e\x01\n!FewShotTextClassificationResponse\x12\x62\n\x0bpredictions\x18\x01 \x03(\x0b\x32M.yandex.cloud.ai.foundation_models.v1.text_classification.ClassificationLabel\x12\x15\n\rmodel_version\x18\x02 \x01(\t2\x8f\x04\n\x19TextClassificationService\x12\xe9\x01\n\x08\x43lassify\x12S.yandex.cloud.ai.foundation_models.v1.text_classification.TextClassificationRequest\x1aT.yandex.cloud.ai.foundation_models.v1.text_classification.TextClassificationResponse\"2\x82\xd3\xe4\x93\x02,\"\'/foundationModels/v1/textClassification:\x01*\x12\x85\x02\n\x0f\x46\x65wShotClassify\x12Z.yandex.cloud.ai.foundation_models.v1.text_classification.FewShotTextClassificationRequest\x1a[.yandex.cloud.ai.foundation_models.v1.text_classification.FewShotTextClassificationResponse\"9\x82\xd3\xe4\x93\x02\x33\"./foundationModels/v1/fewShotTextClassification:\x01*B\xb0\x01\n<yandex.cloud.api.ai.foundation_models.v1.text_classificationZpgithub.com/yandex-cloud/go-genproto/yandex/cloud/ai/foundation_models/v1/text_classification;text_classificationb\x06proto3')
29
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nZyandex/cloud/ai/foundation_models/v1/text_classification/text_classification_service.proto\x12\x38yandex.cloud.ai.foundation_models.v1.text_classification\x1a\x1cgoogle/api/annotations.proto\x1aRyandex/cloud/ai/foundation_models/v1/text_classification/text_classification.proto\"<\n\x19TextClassificationRequest\x12\x11\n\tmodel_uri\x18\x01 \x01(\t\x12\x0c\n\x04text\x18\x02 \x01(\t\"\xad\x01\n\x1aTextClassificationResponse\x12\x62\n\x0bpredictions\x18\x01 \x03(\x0b\x32M.yandex.cloud.ai.foundation_models.v1.text_classification.ClassificationLabel\x12\x15\n\rmodel_version\x18\x02 \x01(\t\x12\x14\n\x0cinput_tokens\x18\x03 \x01(\x03\"\xce\x01\n FewShotTextClassificationRequest\x12\x11\n\tmodel_uri\x18\x01 \x01(\t\x12\x18\n\x10task_description\x18\x02 \x01(\t\x12\x0e\n\x06labels\x18\x03 \x03(\t\x12\x0c\n\x04text\x18\x04 \x01(\t\x12_\n\x07samples\x18\x05 \x03(\x0b\x32N.yandex.cloud.ai.foundation_models.v1.text_classification.ClassificationSample\"\xb4\x01\n!FewShotTextClassificationResponse\x12\x62\n\x0bpredictions\x18\x01 \x03(\x0b\x32M.yandex.cloud.ai.foundation_models.v1.text_classification.ClassificationLabel\x12\x15\n\rmodel_version\x18\x02 \x01(\t\x12\x14\n\x0cinput_tokens\x18\x03 \x01(\x03\x32\x8f\x04\n\x19TextClassificationService\x12\xe9\x01\n\x08\x43lassify\x12S.yandex.cloud.ai.foundation_models.v1.text_classification.TextClassificationRequest\x1aT.yandex.cloud.ai.foundation_models.v1.text_classification.TextClassificationResponse\"2\x82\xd3\xe4\x93\x02,\"\'/foundationModels/v1/textClassification:\x01*\x12\x85\x02\n\x0f\x46\x65wShotClassify\x12Z.yandex.cloud.ai.foundation_models.v1.text_classification.FewShotTextClassificationRequest\x1a[.yandex.cloud.ai.foundation_models.v1.text_classification.FewShotTextClassificationResponse\"9\x82\xd3\xe4\x93\x02\x33\"./foundationModels/v1/fewShotTextClassification:\x01*B\xb0\x01\n<yandex.cloud.api.ai.foundation_models.v1.text_classificationZpgithub.com/yandex-cloud/go-genproto/yandex/cloud/ai/foundation_models/v1/text_classification;text_classificationb\x06proto3')
30
30
 
31
31
  _globals = globals()
32
32
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -41,11 +41,11 @@ if not _descriptor._USE_C_DESCRIPTORS:
41
41
  _globals['_TEXTCLASSIFICATIONREQUEST']._serialized_start=266
42
42
  _globals['_TEXTCLASSIFICATIONREQUEST']._serialized_end=326
43
43
  _globals['_TEXTCLASSIFICATIONRESPONSE']._serialized_start=329
44
- _globals['_TEXTCLASSIFICATIONRESPONSE']._serialized_end=480
45
- _globals['_FEWSHOTTEXTCLASSIFICATIONREQUEST']._serialized_start=483
46
- _globals['_FEWSHOTTEXTCLASSIFICATIONREQUEST']._serialized_end=689
47
- _globals['_FEWSHOTTEXTCLASSIFICATIONRESPONSE']._serialized_start=692
48
- _globals['_FEWSHOTTEXTCLASSIFICATIONRESPONSE']._serialized_end=850
49
- _globals['_TEXTCLASSIFICATIONSERVICE']._serialized_start=853
50
- _globals['_TEXTCLASSIFICATIONSERVICE']._serialized_end=1380
44
+ _globals['_TEXTCLASSIFICATIONRESPONSE']._serialized_end=502
45
+ _globals['_FEWSHOTTEXTCLASSIFICATIONREQUEST']._serialized_start=505
46
+ _globals['_FEWSHOTTEXTCLASSIFICATIONREQUEST']._serialized_end=711
47
+ _globals['_FEWSHOTTEXTCLASSIFICATIONRESPONSE']._serialized_start=714
48
+ _globals['_FEWSHOTTEXTCLASSIFICATIONRESPONSE']._serialized_end=894
49
+ _globals['_TEXTCLASSIFICATIONSERVICE']._serialized_start=897
50
+ _globals['_TEXTCLASSIFICATIONSERVICE']._serialized_end=1424
51
51
  # @@protoc_insertion_point(module_scope)
@@ -17,7 +17,7 @@ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
17
17
  class TextClassificationRequest(google.protobuf.message.Message):
18
18
  """Request for the service to classify text with tuned model.
19
19
 
20
- The names of the classes between which the model will be distributing requests must be specified during model tuning;
20
+ The names of the classes between which the model will be distributing requests must be specified during model tuning;
21
21
  therefore, they are not provided in the request.
22
22
 
23
23
  For examples of usage, see [step-by-step guides](/docs/foundation-models/operations/classifier/additionally-trained).
@@ -49,11 +49,14 @@ class TextClassificationResponse(google.protobuf.message.Message):
49
49
 
50
50
  PREDICTIONS_FIELD_NUMBER: builtins.int
51
51
  MODEL_VERSION_FIELD_NUMBER: builtins.int
52
+ INPUT_TOKENS_FIELD_NUMBER: builtins.int
52
53
  model_version: builtins.str
53
54
  """The model version changes with each new releases."""
55
+ input_tokens: builtins.int
56
+ """Number of input tokens"""
54
57
  @property
55
58
  def predictions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_pb2.ClassificationLabel]:
56
- """The classification results with the `confidence`` values
59
+ """The classification results with the `confidence`` values
57
60
  for the probability of classifying the request text into each class.
58
61
  """
59
62
 
@@ -62,15 +65,16 @@ class TextClassificationResponse(google.protobuf.message.Message):
62
65
  *,
63
66
  predictions: collections.abc.Iterable[yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_pb2.ClassificationLabel] | None = ...,
64
67
  model_version: builtins.str = ...,
68
+ input_tokens: builtins.int = ...,
65
69
  ) -> None: ...
66
- def ClearField(self, field_name: typing.Literal["model_version", b"model_version", "predictions", b"predictions"]) -> None: ...
70
+ def ClearField(self, field_name: typing.Literal["input_tokens", b"input_tokens", "model_version", b"model_version", "predictions", b"predictions"]) -> None: ...
67
71
 
68
72
  global___TextClassificationResponse = TextClassificationResponse
69
73
 
70
74
  @typing.final
71
75
  class FewShotTextClassificationRequest(google.protobuf.message.Message):
72
76
  """Request for the service to classify text.
73
- For examples of usage, see [step-by-step guides](/docs/operations/classifier/readymade).
77
+ For examples of usage, see [step-by-step guides](/docs/foundation-models/operations/classifier/readymade).
74
78
  """
75
79
 
76
80
  DESCRIPTOR: google.protobuf.descriptor.Descriptor
@@ -89,7 +93,7 @@ class FewShotTextClassificationRequest(google.protobuf.message.Message):
89
93
  @property
90
94
  def labels(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
91
95
  """List of available labels for the classification result.
92
- Give meaningful names to label classes: this is essential for correct classification results.
96
+ Give meaningful names to label classes: this is essential for correct classification results.
93
97
  For example, use ``chemistry`` and ``physics`` rather than ``chm`` and ``phs`` for class names.
94
98
  """
95
99
 
@@ -118,11 +122,14 @@ class FewShotTextClassificationResponse(google.protobuf.message.Message):
118
122
 
119
123
  PREDICTIONS_FIELD_NUMBER: builtins.int
120
124
  MODEL_VERSION_FIELD_NUMBER: builtins.int
125
+ INPUT_TOKENS_FIELD_NUMBER: builtins.int
121
126
  model_version: builtins.str
122
127
  """The model version changes with each new releases."""
128
+ input_tokens: builtins.int
129
+ """Number of input tokens"""
123
130
  @property
124
131
  def predictions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_pb2.ClassificationLabel]:
125
- """The classification results with the `confidence`` values
132
+ """The classification results with the `confidence`` values
126
133
  for the probability of classifying the request text into each class.
127
134
  """
128
135
 
@@ -131,7 +138,8 @@ class FewShotTextClassificationResponse(google.protobuf.message.Message):
131
138
  *,
132
139
  predictions: collections.abc.Iterable[yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_pb2.ClassificationLabel] | None = ...,
133
140
  model_version: builtins.str = ...,
141
+ input_tokens: builtins.int = ...,
134
142
  ) -> None: ...
135
- def ClearField(self, field_name: typing.Literal["model_version", b"model_version", "predictions", b"predictions"]) -> None: ...
143
+ def ClearField(self, field_name: typing.Literal["input_tokens", b"input_tokens", "model_version", b"model_version", "predictions", b"predictions"]) -> None: ...
136
144
 
137
145
  global___FewShotTextClassificationResponse = FewShotTextClassificationResponse
@@ -54,7 +54,7 @@ class TextClassificationServiceServicer(object):
54
54
  def Classify(self, request, context):
55
55
  """RPC method to classify text with tuned model.
56
56
 
57
- The names of the classes between which the model will be distributing requests
57
+ The names of the classes between which the model will be distributing requests
58
58
  must be specified during model tuning and are not provided in the request.
59
59
  """
60
60
  context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -62,7 +62,7 @@ class TextClassificationServiceServicer(object):
62
62
  raise NotImplementedError('Method not implemented!')
63
63
 
64
64
  def FewShotClassify(self, request, context):
65
- """RPC method for binary and multi-class classification.
65
+ """RPC method for binary and multi-class classification.
66
66
 
67
67
  You can provide up to 20 classes for few-shot text classification
68
68
  with optional examples.
@@ -27,7 +27,7 @@ class TextClassificationServiceStub:
27
27
  ]
28
28
  """RPC method to classify text with tuned model.
29
29
 
30
- The names of the classes between which the model will be distributing requests
30
+ The names of the classes between which the model will be distributing requests
31
31
  must be specified during model tuning and are not provided in the request.
32
32
  """
33
33
 
@@ -35,7 +35,7 @@ class TextClassificationServiceStub:
35
35
  yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_service_pb2.FewShotTextClassificationRequest,
36
36
  yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_service_pb2.FewShotTextClassificationResponse,
37
37
  ]
38
- """RPC method for binary and multi-class classification.
38
+ """RPC method for binary and multi-class classification.
39
39
 
40
40
  You can provide up to 20 classes for few-shot text classification
41
41
  with optional examples.
@@ -50,7 +50,7 @@ class TextClassificationServiceAsyncStub:
50
50
  ]
51
51
  """RPC method to classify text with tuned model.
52
52
 
53
- The names of the classes between which the model will be distributing requests
53
+ The names of the classes between which the model will be distributing requests
54
54
  must be specified during model tuning and are not provided in the request.
55
55
  """
56
56
 
@@ -58,7 +58,7 @@ class TextClassificationServiceAsyncStub:
58
58
  yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_service_pb2.FewShotTextClassificationRequest,
59
59
  yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_service_pb2.FewShotTextClassificationResponse,
60
60
  ]
61
- """RPC method for binary and multi-class classification.
61
+ """RPC method for binary and multi-class classification.
62
62
 
63
63
  You can provide up to 20 classes for few-shot text classification
64
64
  with optional examples.
@@ -75,7 +75,7 @@ class TextClassificationServiceServicer(metaclass=abc.ABCMeta):
75
75
  ) -> typing.Union[yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_service_pb2.TextClassificationResponse, collections.abc.Awaitable[yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_service_pb2.TextClassificationResponse]]:
76
76
  """RPC method to classify text with tuned model.
77
77
 
78
- The names of the classes between which the model will be distributing requests
78
+ The names of the classes between which the model will be distributing requests
79
79
  must be specified during model tuning and are not provided in the request.
80
80
  """
81
81
 
@@ -85,7 +85,7 @@ class TextClassificationServiceServicer(metaclass=abc.ABCMeta):
85
85
  request: yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_service_pb2.FewShotTextClassificationRequest,
86
86
  context: _ServicerContext,
87
87
  ) -> typing.Union[yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_service_pb2.FewShotTextClassificationResponse, collections.abc.Awaitable[yandex.cloud.ai.foundation_models.v1.text_classification.text_classification_service_pb2.FewShotTextClassificationResponse]]:
88
- """RPC method for binary and multi-class classification.
88
+ """RPC method for binary and multi-class classification.
89
89
 
90
90
  You can provide up to 20 classes for few-shot text classification
91
91
  with optional examples.
@@ -24,7 +24,7 @@ _sym_db = _symbol_database.Default()
24
24
 
25
25
 
26
26
 
27
- DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n yandex/cloud/ai/stt/v3/stt.proto\x12\x10speechkit.stt.v3\"\xe2\x03\n\x18TextNormalizationOptions\x12X\n\x12text_normalization\x18\x01 \x01(\x0e\x32<.speechkit.stt.v3.TextNormalizationOptions.TextNormalization\x12\x18\n\x10profanity_filter\x18\x02 \x01(\x08\x12\x17\n\x0fliterature_text\x18\x03 \x01(\x08\x12]\n\x15phone_formatting_mode\x18\x04 \x01(\x0e\x32>.speechkit.stt.v3.TextNormalizationOptions.PhoneFormattingMode\"x\n\x11TextNormalization\x12\"\n\x1eTEXT_NORMALIZATION_UNSPECIFIED\x10\x00\x12\x1e\n\x1aTEXT_NORMALIZATION_ENABLED\x10\x01\x12\x1f\n\x1bTEXT_NORMALIZATION_DISABLED\x10\x02\"`\n\x13PhoneFormattingMode\x12%\n!PHONE_FORMATTING_MODE_UNSPECIFIED\x10\x00\x12\"\n\x1ePHONE_FORMATTING_MODE_DISABLED\x10\x01\"\xce\x01\n\x14\x44\x65\x66\x61ultEouClassifier\x12\x43\n\x04type\x18\x01 \x01(\x0e\x32\x35.speechkit.stt.v3.DefaultEouClassifier.EouSensitivity\x12\'\n\x1fmax_pause_between_words_hint_ms\x18\x02 \x01(\x03\"H\n\x0e\x45ouSensitivity\x12\x1f\n\x1b\x45OU_SENSITIVITY_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x01\x12\x08\n\x04HIGH\x10\x02\"\x17\n\x15\x45xternalEouClassifier\"\xb2\x01\n\x14\x45ouClassifierOptions\x12\x44\n\x12\x64\x65\x66\x61ult_classifier\x18\x01 \x01(\x0b\x32&.speechkit.stt.v3.DefaultEouClassifierH\x00\x12\x46\n\x13\x65xternal_classifier\x18\x02 \x01(\x0b\x32\'.speechkit.stt.v3.ExternalEouClassifierH\x00\x42\x0c\n\nClassifier\"\xd3\x01\n\x15RecognitionClassifier\x12\x12\n\nclassifier\x18\x01 \x01(\t\x12\x45\n\x08triggers\x18\x02 \x03(\x0e\x32\x33.speechkit.stt.v3.RecognitionClassifier.TriggerType\"_\n\x0bTriggerType\x12 \n\x18TRIGGER_TYPE_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x10\n\x0cON_UTTERANCE\x10\x01\x12\x0c\n\x08ON_FINAL\x10\x02\x12\x0e\n\nON_PARTIAL\x10\x03\"\\\n\x1cRecognitionClassifierOptions\x12<\n\x0b\x63lassifiers\x18\x01 \x03(\x0b\x32\'.speechkit.stt.v3.RecognitionClassifier\"\x88\x01\n\x15SpeechAnalysisOptions\x12\x1f\n\x17\x65nable_speaker_analysis\x18\x01 \x01(\x08\x12$\n\x1c\x65nable_conversation_analysis\x18\x02 \x01(\x08\x12(\n descriptive_statistics_quantiles\x18\x03 \x03(\x01\"\xc7\x01\n\x08RawAudio\x12@\n\x0e\x61udio_encoding\x18\x01 \x01(\x0e\x32(.speechkit.stt.v3.RawAudio.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x03\x12\x1b\n\x13\x61udio_channel_count\x18\x03 \x01(\x03\"A\n\rAudioEncoding\x12\x1e\n\x1a\x41UDIO_ENCODING_UNSPECIFIED\x10\x00\x12\x10\n\x0cLINEAR16_PCM\x10\x01\"\xbf\x01\n\x0e\x43ontainerAudio\x12Q\n\x14\x63ontainer_audio_type\x18\x01 \x01(\x0e\x32\x33.speechkit.stt.v3.ContainerAudio.ContainerAudioType\"Z\n\x12\x43ontainerAudioType\x12$\n CONTAINER_AUDIO_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03WAV\x10\x01\x12\x0c\n\x08OGG_OPUS\x10\x02\x12\x07\n\x03MP3\x10\x03\"\x91\x01\n\x12\x41udioFormatOptions\x12/\n\traw_audio\x18\x01 \x01(\x0b\x32\x1a.speechkit.stt.v3.RawAudioH\x00\x12;\n\x0f\x63ontainer_audio\x18\x02 \x01(\x0b\x32 .speechkit.stt.v3.ContainerAudioH\x00\x42\r\n\x0b\x41udioFormat\"\xf7\x01\n\x1aLanguageRestrictionOptions\x12^\n\x10restriction_type\x18\x01 \x01(\x0e\x32\x44.speechkit.stt.v3.LanguageRestrictionOptions.LanguageRestrictionType\x12\x15\n\rlanguage_code\x18\x02 \x03(\t\"b\n\x17LanguageRestrictionType\x12)\n%LANGUAGE_RESTRICTION_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tWHITELIST\x10\x01\x12\r\n\tBLACKLIST\x10\x02\"\xb2\x03\n\x17RecognitionModelOptions\x12\r\n\x05model\x18\x01 \x01(\t\x12:\n\x0c\x61udio_format\x18\x02 \x01(\x0b\x32$.speechkit.stt.v3.AudioFormatOptions\x12\x46\n\x12text_normalization\x18\x03 \x01(\x0b\x32*.speechkit.stt.v3.TextNormalizationOptions\x12J\n\x14language_restriction\x18\x04 \x01(\x0b\x32,.speechkit.stt.v3.LanguageRestrictionOptions\x12\\\n\x15\x61udio_processing_type\x18\x05 \x01(\x0e\x32=.speechkit.stt.v3.RecognitionModelOptions.AudioProcessingType\"Z\n\x13\x41udioProcessingType\x12%\n!AUDIO_PROCESSING_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tREAL_TIME\x10\x01\x12\r\n\tFULL_DATA\x10\x02\"\xde\x01\n\x16SpeakerLabelingOptions\x12R\n\x10speaker_labeling\x18\x01 \x01(\x0e\x32\x38.speechkit.stt.v3.SpeakerLabelingOptions.SpeakerLabeling\"p\n\x0fSpeakerLabeling\x12 \n\x1cSPEAKER_LABELING_UNSPECIFIED\x10\x00\x12\x1c\n\x18SPEAKER_LABELING_ENABLED\x10\x01\x12\x1d\n\x19SPEAKER_LABELING_DISABLED\x10\x02\"\xee\x02\n\x10StreamingOptions\x12\x44\n\x11recognition_model\x18\x01 \x01(\x0b\x32).speechkit.stt.v3.RecognitionModelOptions\x12>\n\x0e\x65ou_classifier\x18\x02 \x01(\x0b\x32&.speechkit.stt.v3.EouClassifierOptions\x12N\n\x16recognition_classifier\x18\x03 \x01(\x0b\x32..speechkit.stt.v3.RecognitionClassifierOptions\x12@\n\x0fspeech_analysis\x18\x04 \x01(\x0b\x32\'.speechkit.stt.v3.SpeechAnalysisOptions\x12\x42\n\x10speaker_labeling\x18\x05 \x01(\x0b\x32(.speechkit.stt.v3.SpeakerLabelingOptions\"\x1a\n\nAudioChunk\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"#\n\x0cSilenceChunk\x12\x13\n\x0b\x64uration_ms\x18\x01 \x01(\x03\"\x05\n\x03\x45ou\"\xe8\x01\n\x10StreamingRequest\x12=\n\x0fsession_options\x18\x01 \x01(\x0b\x32\".speechkit.stt.v3.StreamingOptionsH\x00\x12-\n\x05\x63hunk\x18\x02 \x01(\x0b\x32\x1c.speechkit.stt.v3.AudioChunkH\x00\x12\x37\n\rsilence_chunk\x18\x03 \x01(\x0b\x32\x1e.speechkit.stt.v3.SilenceChunkH\x00\x12$\n\x03\x65ou\x18\x04 \x01(\x0b\x32\x15.speechkit.stt.v3.EouH\x00\x42\x07\n\x05\x45vent\"\xe3\x02\n\x14RecognizeFileRequest\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x12\x44\n\x11recognition_model\x18\x03 \x01(\x0b\x32).speechkit.stt.v3.RecognitionModelOptions\x12N\n\x16recognition_classifier\x18\x04 \x01(\x0b\x32..speechkit.stt.v3.RecognitionClassifierOptions\x12@\n\x0fspeech_analysis\x18\x05 \x01(\x0b\x32\'.speechkit.stt.v3.SpeechAnalysisOptions\x12\x42\n\x10speaker_labeling\x18\x06 \x01(\x0b\x32(.speechkit.stt.v3.SpeakerLabelingOptionsB\r\n\x0b\x41udioSource\"@\n\x04Word\x12\x0c\n\x04text\x18\x01 \x01(\t\x12\x15\n\rstart_time_ms\x18\x02 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x03 \x01(\x03\"@\n\x12LanguageEstimation\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x13\n\x0bprobability\x18\x02 \x01(\x01\"\xbb\x01\n\x0b\x41lternative\x12%\n\x05words\x18\x01 \x03(\x0b\x32\x16.speechkit.stt.v3.Word\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x15\n\rstart_time_ms\x18\x03 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x04 \x01(\x03\x12\x12\n\nconfidence\x18\x05 \x01(\x01\x12\x37\n\tlanguages\x18\x06 \x03(\x0b\x32$.speechkit.stt.v3.LanguageEstimation\"\x1c\n\tEouUpdate\x12\x0f\n\x07time_ms\x18\x02 \x01(\x03\"a\n\x11\x41lternativeUpdate\x12\x33\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x1d.speechkit.stt.v3.Alternative\x12\x17\n\x0b\x63hannel_tag\x18\x02 \x01(\tB\x02\x18\x01\"\x99\x01\n\x0c\x41udioCursors\x12\x18\n\x10received_data_ms\x18\x01 \x01(\x03\x12\x15\n\rreset_time_ms\x18\x02 \x01(\x03\x12\x17\n\x0fpartial_time_ms\x18\x03 \x01(\x03\x12\x15\n\rfinal_time_ms\x18\x04 \x01(\x03\x12\x13\n\x0b\x66inal_index\x18\x05 \x01(\x03\x12\x13\n\x0b\x65ou_time_ms\x18\x06 \x01(\x03\"n\n\x0f\x46inalRefinement\x12\x13\n\x0b\x66inal_index\x18\x01 \x01(\x03\x12>\n\x0fnormalized_text\x18\x02 \x01(\x0b\x32#.speechkit.stt.v3.AlternativeUpdateH\x00\x42\x06\n\x04Type\"L\n\nStatusCode\x12-\n\tcode_type\x18\x01 \x01(\x0e\x32\x1a.speechkit.stt.v3.CodeType\x12\x0f\n\x07message\x18\x02 \x01(\t\"4\n\x0bSessionUuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x17\n\x0fuser_request_id\x18\x02 \x01(\t\"K\n\x0fPhraseHighlight\x12\x0c\n\x04text\x18\x01 \x01(\t\x12\x15\n\rstart_time_ms\x18\x02 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x03 \x01(\x03\"?\n\x1aRecognitionClassifierLabel\x12\r\n\x05label\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x01\"\xa6\x01\n\x1bRecognitionClassifierResult\x12\x12\n\nclassifier\x18\x01 \x01(\t\x12\x35\n\nhighlights\x18\x02 \x03(\x0b\x32!.speechkit.stt.v3.PhraseHighlight\x12<\n\x06labels\x18\x03 \x03(\x0b\x32,.speechkit.stt.v3.RecognitionClassifierLabel\"\xc7\x02\n\x1bRecognitionClassifierUpdate\x12M\n\x0bwindow_type\x18\x01 \x01(\x0e\x32\x38.speechkit.stt.v3.RecognitionClassifierUpdate.WindowType\x12\x15\n\rstart_time_ms\x18\x02 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x03 \x01(\x03\x12H\n\x11\x63lassifier_result\x18\x04 \x01(\x0b\x32-.speechkit.stt.v3.RecognitionClassifierResult\"c\n\nWindowType\x12\x1f\n\x17WINDOW_TYPE_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x12\n\x0eLAST_UTTERANCE\x10\x01\x12\x0e\n\nLAST_FINAL\x10\x02\x12\x10\n\x0cLAST_PARTIAL\x10\x03\"\xbb\x01\n\x15\x44\x65scriptiveStatistics\x12\x0b\n\x03min\x18\x01 \x01(\x01\x12\x0b\n\x03max\x18\x02 \x01(\x01\x12\x0c\n\x04mean\x18\x03 \x01(\x01\x12\x0b\n\x03std\x18\x04 \x01(\x01\x12\x43\n\tquantiles\x18\x05 \x03(\x0b\x32\x30.speechkit.stt.v3.DescriptiveStatistics.Quantile\x1a(\n\x08Quantile\x12\r\n\x05level\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01\"D\n\x16\x41udioSegmentBoundaries\x12\x15\n\rstart_time_ms\x18\x01 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x02 \x01(\x03\"\x87\x06\n\x0fSpeakerAnalysis\x12\x13\n\x0bspeaker_tag\x18\x01 \x01(\t\x12\x41\n\x0bwindow_type\x18\x02 \x01(\x0e\x32,.speechkit.stt.v3.SpeakerAnalysis.WindowType\x12\x43\n\x11speech_boundaries\x18\x03 \x01(\x0b\x32(.speechkit.stt.v3.AudioSegmentBoundaries\x12\x17\n\x0ftotal_speech_ms\x18\x04 \x01(\x03\x12\x14\n\x0cspeech_ratio\x18\x05 \x01(\x01\x12\x18\n\x10total_silence_ms\x18\x06 \x01(\x03\x12\x15\n\rsilence_ratio\x18\x07 \x01(\x01\x12\x13\n\x0bwords_count\x18\x08 \x01(\x03\x12\x15\n\rletters_count\x18\t \x01(\x03\x12\x41\n\x10words_per_second\x18\n \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12\x43\n\x12letters_per_second\x18\x0b \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12\x44\n\x13words_per_utterance\x18\x0c \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12\x46\n\x15letters_per_utterance\x18\r \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12\x17\n\x0futterance_count\x18\x0e \x01(\x03\x12N\n\x1dutterance_duration_estimation\x18\x0f \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\"L\n\nWindowType\x12\x1f\n\x17WINDOW_TYPE_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\t\n\x05TOTAL\x10\x01\x12\x12\n\x0eLAST_UTTERANCE\x10\x02\"\x85\x06\n\x14\x43onversationAnalysis\x12I\n\x17\x63onversation_boundaries\x18\x01 \x01(\x0b\x32(.speechkit.stt.v3.AudioSegmentBoundaries\x12.\n&total_simultaneous_silence_duration_ms\x18\x02 \x01(\x03\x12(\n total_simultaneous_silence_ratio\x18\x03 \x01(\x01\x12Y\n(simultaneous_silence_duration_estimation\x18\x04 \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12-\n%total_simultaneous_speech_duration_ms\x18\x05 \x01(\x03\x12\'\n\x1ftotal_simultaneous_speech_ratio\x18\x06 \x01(\x01\x12X\n\'simultaneous_speech_duration_estimation\x18\x07 \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12W\n\x12speaker_interrupts\x18\x08 \x03(\x0b\x32;.speechkit.stt.v3.ConversationAnalysis.InterruptsEvaluation\x12 \n\x18total_speech_duration_ms\x18\t \x01(\x03\x12\x1a\n\x12total_speech_ratio\x18\n \x01(\x01\x1a\xa3\x01\n\x14InterruptsEvaluation\x12\x13\n\x0bspeaker_tag\x18\x01 \x01(\t\x12\x18\n\x10interrupts_count\x18\x02 \x01(\x03\x12\x1e\n\x16interrupts_duration_ms\x18\x03 \x01(\x03\x12<\n\ninterrupts\x18\x04 \x03(\x0b\x32(.speechkit.stt.v3.AudioSegmentBoundaries\"\xa5\x05\n\x11StreamingResponse\x12\x33\n\x0csession_uuid\x18\x01 \x01(\x0b\x32\x1d.speechkit.stt.v3.SessionUuid\x12\x35\n\raudio_cursors\x18\x02 \x01(\x0b\x32\x1e.speechkit.stt.v3.AudioCursors\x12\x1d\n\x15response_wall_time_ms\x18\x03 \x01(\x03\x12\x36\n\x07partial\x18\x04 \x01(\x0b\x32#.speechkit.stt.v3.AlternativeUpdateH\x00\x12\x34\n\x05\x66inal\x18\x05 \x01(\x0b\x32#.speechkit.stt.v3.AlternativeUpdateH\x00\x12\x31\n\neou_update\x18\x06 \x01(\x0b\x32\x1b.speechkit.stt.v3.EouUpdateH\x00\x12=\n\x10\x66inal_refinement\x18\x07 \x01(\x0b\x32!.speechkit.stt.v3.FinalRefinementH\x00\x12\x33\n\x0bstatus_code\x18\x08 \x01(\x0b\x32\x1c.speechkit.stt.v3.StatusCodeH\x00\x12J\n\x11\x63lassifier_update\x18\n \x01(\x0b\x32-.speechkit.stt.v3.RecognitionClassifierUpdateH\x00\x12=\n\x10speaker_analysis\x18\x0b \x01(\x0b\x32!.speechkit.stt.v3.SpeakerAnalysisH\x00\x12G\n\x15\x63onversation_analysis\x18\x0c \x01(\x0b\x32&.speechkit.stt.v3.ConversationAnalysisH\x00\x12\x13\n\x0b\x63hannel_tag\x18\t \x01(\tB\x07\n\x05\x45vent\"0\n\x18\x44\x65leteRecognitionRequest\x12\x14\n\x0coperation_id\x18\x01 \x01(\t*O\n\x08\x43odeType\x12\x1d\n\x15\x43ODE_TYPE_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x0b\n\x07WORKING\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\n\n\x06\x43LOSED\x10\x03\x42\\\n\x1ayandex.cloud.api.ai.stt.v3Z>github.com/yandex-cloud/go-genproto/yandex/cloud/ai/stt/v3;sttb\x06proto3')
27
+ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n yandex/cloud/ai/stt/v3/stt.proto\x12\x10speechkit.stt.v3\"\xe2\x03\n\x18TextNormalizationOptions\x12X\n\x12text_normalization\x18\x01 \x01(\x0e\x32<.speechkit.stt.v3.TextNormalizationOptions.TextNormalization\x12\x18\n\x10profanity_filter\x18\x02 \x01(\x08\x12\x17\n\x0fliterature_text\x18\x03 \x01(\x08\x12]\n\x15phone_formatting_mode\x18\x04 \x01(\x0e\x32>.speechkit.stt.v3.TextNormalizationOptions.PhoneFormattingMode\"x\n\x11TextNormalization\x12\"\n\x1eTEXT_NORMALIZATION_UNSPECIFIED\x10\x00\x12\x1e\n\x1aTEXT_NORMALIZATION_ENABLED\x10\x01\x12\x1f\n\x1bTEXT_NORMALIZATION_DISABLED\x10\x02\"`\n\x13PhoneFormattingMode\x12%\n!PHONE_FORMATTING_MODE_UNSPECIFIED\x10\x00\x12\"\n\x1ePHONE_FORMATTING_MODE_DISABLED\x10\x01\"\xce\x01\n\x14\x44\x65\x66\x61ultEouClassifier\x12\x43\n\x04type\x18\x01 \x01(\x0e\x32\x35.speechkit.stt.v3.DefaultEouClassifier.EouSensitivity\x12\'\n\x1fmax_pause_between_words_hint_ms\x18\x02 \x01(\x03\"H\n\x0e\x45ouSensitivity\x12\x1f\n\x1b\x45OU_SENSITIVITY_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x01\x12\x08\n\x04HIGH\x10\x02\"\x17\n\x15\x45xternalEouClassifier\"\xb2\x01\n\x14\x45ouClassifierOptions\x12\x44\n\x12\x64\x65\x66\x61ult_classifier\x18\x01 \x01(\x0b\x32&.speechkit.stt.v3.DefaultEouClassifierH\x00\x12\x46\n\x13\x65xternal_classifier\x18\x02 \x01(\x0b\x32\'.speechkit.stt.v3.ExternalEouClassifierH\x00\x42\x0c\n\nClassifier\"\xd3\x01\n\x15RecognitionClassifier\x12\x12\n\nclassifier\x18\x01 \x01(\t\x12\x45\n\x08triggers\x18\x02 \x03(\x0e\x32\x33.speechkit.stt.v3.RecognitionClassifier.TriggerType\"_\n\x0bTriggerType\x12 \n\x18TRIGGER_TYPE_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x10\n\x0cON_UTTERANCE\x10\x01\x12\x0c\n\x08ON_FINAL\x10\x02\x12\x0e\n\nON_PARTIAL\x10\x03\"\\\n\x1cRecognitionClassifierOptions\x12<\n\x0b\x63lassifiers\x18\x01 \x03(\x0b\x32\'.speechkit.stt.v3.RecognitionClassifier\"\x88\x01\n\x15SpeechAnalysisOptions\x12\x1f\n\x17\x65nable_speaker_analysis\x18\x01 \x01(\x08\x12$\n\x1c\x65nable_conversation_analysis\x18\x02 \x01(\x08\x12(\n descriptive_statistics_quantiles\x18\x03 \x03(\x01\"\xc7\x01\n\x08RawAudio\x12@\n\x0e\x61udio_encoding\x18\x01 \x01(\x0e\x32(.speechkit.stt.v3.RawAudio.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x03\x12\x1b\n\x13\x61udio_channel_count\x18\x03 \x01(\x03\"A\n\rAudioEncoding\x12\x1e\n\x1a\x41UDIO_ENCODING_UNSPECIFIED\x10\x00\x12\x10\n\x0cLINEAR16_PCM\x10\x01\"\xbf\x01\n\x0e\x43ontainerAudio\x12Q\n\x14\x63ontainer_audio_type\x18\x01 \x01(\x0e\x32\x33.speechkit.stt.v3.ContainerAudio.ContainerAudioType\"Z\n\x12\x43ontainerAudioType\x12$\n CONTAINER_AUDIO_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03WAV\x10\x01\x12\x0c\n\x08OGG_OPUS\x10\x02\x12\x07\n\x03MP3\x10\x03\"\x91\x01\n\x12\x41udioFormatOptions\x12/\n\traw_audio\x18\x01 \x01(\x0b\x32\x1a.speechkit.stt.v3.RawAudioH\x00\x12;\n\x0f\x63ontainer_audio\x18\x02 \x01(\x0b\x32 .speechkit.stt.v3.ContainerAudioH\x00\x42\r\n\x0b\x41udioFormat\"\xf7\x01\n\x1aLanguageRestrictionOptions\x12^\n\x10restriction_type\x18\x01 \x01(\x0e\x32\x44.speechkit.stt.v3.LanguageRestrictionOptions.LanguageRestrictionType\x12\x15\n\rlanguage_code\x18\x02 \x03(\t\"b\n\x17LanguageRestrictionType\x12)\n%LANGUAGE_RESTRICTION_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tWHITELIST\x10\x01\x12\r\n\tBLACKLIST\x10\x02\"\xb2\x03\n\x17RecognitionModelOptions\x12\r\n\x05model\x18\x01 \x01(\t\x12:\n\x0c\x61udio_format\x18\x02 \x01(\x0b\x32$.speechkit.stt.v3.AudioFormatOptions\x12\x46\n\x12text_normalization\x18\x03 \x01(\x0b\x32*.speechkit.stt.v3.TextNormalizationOptions\x12J\n\x14language_restriction\x18\x04 \x01(\x0b\x32,.speechkit.stt.v3.LanguageRestrictionOptions\x12\\\n\x15\x61udio_processing_type\x18\x05 \x01(\x0e\x32=.speechkit.stt.v3.RecognitionModelOptions.AudioProcessingType\"Z\n\x13\x41udioProcessingType\x12%\n!AUDIO_PROCESSING_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tREAL_TIME\x10\x01\x12\r\n\tFULL_DATA\x10\x02\"\xde\x01\n\x16SpeakerLabelingOptions\x12R\n\x10speaker_labeling\x18\x01 \x01(\x0e\x32\x38.speechkit.stt.v3.SpeakerLabelingOptions.SpeakerLabeling\"p\n\x0fSpeakerLabeling\x12 \n\x1cSPEAKER_LABELING_UNSPECIFIED\x10\x00\x12\x1c\n\x18SPEAKER_LABELING_ENABLED\x10\x01\x12\x1d\n\x19SPEAKER_LABELING_DISABLED\x10\x02\"\xee\x02\n\x10StreamingOptions\x12\x44\n\x11recognition_model\x18\x01 \x01(\x0b\x32).speechkit.stt.v3.RecognitionModelOptions\x12>\n\x0e\x65ou_classifier\x18\x02 \x01(\x0b\x32&.speechkit.stt.v3.EouClassifierOptions\x12N\n\x16recognition_classifier\x18\x03 \x01(\x0b\x32..speechkit.stt.v3.RecognitionClassifierOptions\x12@\n\x0fspeech_analysis\x18\x04 \x01(\x0b\x32\'.speechkit.stt.v3.SpeechAnalysisOptions\x12\x42\n\x10speaker_labeling\x18\x05 \x01(\x0b\x32(.speechkit.stt.v3.SpeakerLabelingOptions\"\x1a\n\nAudioChunk\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\"#\n\x0cSilenceChunk\x12\x13\n\x0b\x64uration_ms\x18\x01 \x01(\x03\"\x05\n\x03\x45ou\"\xe8\x01\n\x10StreamingRequest\x12=\n\x0fsession_options\x18\x01 \x01(\x0b\x32\".speechkit.stt.v3.StreamingOptionsH\x00\x12-\n\x05\x63hunk\x18\x02 \x01(\x0b\x32\x1c.speechkit.stt.v3.AudioChunkH\x00\x12\x37\n\rsilence_chunk\x18\x03 \x01(\x0b\x32\x1e.speechkit.stt.v3.SilenceChunkH\x00\x12$\n\x03\x65ou\x18\x04 \x01(\x0b\x32\x15.speechkit.stt.v3.EouH\x00\x42\x07\n\x05\x45vent\"\xe3\x02\n\x14RecognizeFileRequest\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x12\x44\n\x11recognition_model\x18\x03 \x01(\x0b\x32).speechkit.stt.v3.RecognitionModelOptions\x12N\n\x16recognition_classifier\x18\x04 \x01(\x0b\x32..speechkit.stt.v3.RecognitionClassifierOptions\x12@\n\x0fspeech_analysis\x18\x05 \x01(\x0b\x32\'.speechkit.stt.v3.SpeechAnalysisOptions\x12\x42\n\x10speaker_labeling\x18\x06 \x01(\x0b\x32(.speechkit.stt.v3.SpeakerLabelingOptionsB\r\n\x0b\x41udioSource\"@\n\x04Word\x12\x0c\n\x04text\x18\x01 \x01(\t\x12\x15\n\rstart_time_ms\x18\x02 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x03 \x01(\x03\"@\n\x12LanguageEstimation\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x13\n\x0bprobability\x18\x02 \x01(\x01\"\xbb\x01\n\x0b\x41lternative\x12%\n\x05words\x18\x01 \x03(\x0b\x32\x16.speechkit.stt.v3.Word\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x15\n\rstart_time_ms\x18\x03 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x04 \x01(\x03\x12\x12\n\nconfidence\x18\x05 \x01(\x01\x12\x37\n\tlanguages\x18\x06 \x03(\x0b\x32$.speechkit.stt.v3.LanguageEstimation\"\x1c\n\tEouUpdate\x12\x0f\n\x07time_ms\x18\x02 \x01(\x03\"a\n\x11\x41lternativeUpdate\x12\x33\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x1d.speechkit.stt.v3.Alternative\x12\x17\n\x0b\x63hannel_tag\x18\x02 \x01(\tB\x02\x18\x01\"\x99\x01\n\x0c\x41udioCursors\x12\x18\n\x10received_data_ms\x18\x01 \x01(\x03\x12\x15\n\rreset_time_ms\x18\x02 \x01(\x03\x12\x17\n\x0fpartial_time_ms\x18\x03 \x01(\x03\x12\x15\n\rfinal_time_ms\x18\x04 \x01(\x03\x12\x13\n\x0b\x66inal_index\x18\x05 \x01(\x03\x12\x13\n\x0b\x65ou_time_ms\x18\x06 \x01(\x03\"n\n\x0f\x46inalRefinement\x12\x13\n\x0b\x66inal_index\x18\x01 \x01(\x03\x12>\n\x0fnormalized_text\x18\x02 \x01(\x0b\x32#.speechkit.stt.v3.AlternativeUpdateH\x00\x42\x06\n\x04Type\"L\n\nStatusCode\x12-\n\tcode_type\x18\x01 \x01(\x0e\x32\x1a.speechkit.stt.v3.CodeType\x12\x0f\n\x07message\x18\x02 \x01(\t\"4\n\x0bSessionUuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\x12\x17\n\x0fuser_request_id\x18\x02 \x01(\t\"K\n\x0fPhraseHighlight\x12\x0c\n\x04text\x18\x01 \x01(\t\x12\x15\n\rstart_time_ms\x18\x02 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x03 \x01(\x03\"?\n\x1aRecognitionClassifierLabel\x12\r\n\x05label\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x01\"\xa6\x01\n\x1bRecognitionClassifierResult\x12\x12\n\nclassifier\x18\x01 \x01(\t\x12\x35\n\nhighlights\x18\x02 \x03(\x0b\x32!.speechkit.stt.v3.PhraseHighlight\x12<\n\x06labels\x18\x03 \x03(\x0b\x32,.speechkit.stt.v3.RecognitionClassifierLabel\"\xc7\x02\n\x1bRecognitionClassifierUpdate\x12M\n\x0bwindow_type\x18\x01 \x01(\x0e\x32\x38.speechkit.stt.v3.RecognitionClassifierUpdate.WindowType\x12\x15\n\rstart_time_ms\x18\x02 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x03 \x01(\x03\x12H\n\x11\x63lassifier_result\x18\x04 \x01(\x0b\x32-.speechkit.stt.v3.RecognitionClassifierResult\"c\n\nWindowType\x12\x1f\n\x17WINDOW_TYPE_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x12\n\x0eLAST_UTTERANCE\x10\x01\x12\x0e\n\nLAST_FINAL\x10\x02\x12\x10\n\x0cLAST_PARTIAL\x10\x03\"\xbb\x01\n\x15\x44\x65scriptiveStatistics\x12\x0b\n\x03min\x18\x01 \x01(\x01\x12\x0b\n\x03max\x18\x02 \x01(\x01\x12\x0c\n\x04mean\x18\x03 \x01(\x01\x12\x0b\n\x03std\x18\x04 \x01(\x01\x12\x43\n\tquantiles\x18\x05 \x03(\x0b\x32\x30.speechkit.stt.v3.DescriptiveStatistics.Quantile\x1a(\n\x08Quantile\x12\r\n\x05level\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01\"D\n\x16\x41udioSegmentBoundaries\x12\x15\n\rstart_time_ms\x18\x01 \x01(\x03\x12\x13\n\x0b\x65nd_time_ms\x18\x02 \x01(\x03\"\x87\x06\n\x0fSpeakerAnalysis\x12\x13\n\x0bspeaker_tag\x18\x01 \x01(\t\x12\x41\n\x0bwindow_type\x18\x02 \x01(\x0e\x32,.speechkit.stt.v3.SpeakerAnalysis.WindowType\x12\x43\n\x11speech_boundaries\x18\x03 \x01(\x0b\x32(.speechkit.stt.v3.AudioSegmentBoundaries\x12\x17\n\x0ftotal_speech_ms\x18\x04 \x01(\x03\x12\x14\n\x0cspeech_ratio\x18\x05 \x01(\x01\x12\x18\n\x10total_silence_ms\x18\x06 \x01(\x03\x12\x15\n\rsilence_ratio\x18\x07 \x01(\x01\x12\x13\n\x0bwords_count\x18\x08 \x01(\x03\x12\x15\n\rletters_count\x18\t \x01(\x03\x12\x41\n\x10words_per_second\x18\n \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12\x43\n\x12letters_per_second\x18\x0b \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12\x44\n\x13words_per_utterance\x18\x0c \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12\x46\n\x15letters_per_utterance\x18\r \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12\x17\n\x0futterance_count\x18\x0e \x01(\x03\x12N\n\x1dutterance_duration_estimation\x18\x0f \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\"L\n\nWindowType\x12\x1f\n\x17WINDOW_TYPE_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\t\n\x05TOTAL\x10\x01\x12\x12\n\x0eLAST_UTTERANCE\x10\x02\"\x85\x06\n\x14\x43onversationAnalysis\x12I\n\x17\x63onversation_boundaries\x18\x01 \x01(\x0b\x32(.speechkit.stt.v3.AudioSegmentBoundaries\x12.\n&total_simultaneous_silence_duration_ms\x18\x02 \x01(\x03\x12(\n total_simultaneous_silence_ratio\x18\x03 \x01(\x01\x12Y\n(simultaneous_silence_duration_estimation\x18\x04 \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12-\n%total_simultaneous_speech_duration_ms\x18\x05 \x01(\x03\x12\'\n\x1ftotal_simultaneous_speech_ratio\x18\x06 \x01(\x01\x12X\n\'simultaneous_speech_duration_estimation\x18\x07 \x01(\x0b\x32\'.speechkit.stt.v3.DescriptiveStatistics\x12W\n\x12speaker_interrupts\x18\x08 \x03(\x0b\x32;.speechkit.stt.v3.ConversationAnalysis.InterruptsEvaluation\x12 \n\x18total_speech_duration_ms\x18\t \x01(\x03\x12\x1a\n\x12total_speech_ratio\x18\n \x01(\x01\x1a\xa3\x01\n\x14InterruptsEvaluation\x12\x13\n\x0bspeaker_tag\x18\x01 \x01(\t\x12\x18\n\x10interrupts_count\x18\x02 \x01(\x03\x12\x1e\n\x16interrupts_duration_ms\x18\x03 \x01(\x03\x12<\n\ninterrupts\x18\x04 \x03(\x0b\x32(.speechkit.stt.v3.AudioSegmentBoundaries\"\xa5\x05\n\x11StreamingResponse\x12\x33\n\x0csession_uuid\x18\x01 \x01(\x0b\x32\x1d.speechkit.stt.v3.SessionUuid\x12\x35\n\raudio_cursors\x18\x02 \x01(\x0b\x32\x1e.speechkit.stt.v3.AudioCursors\x12\x1d\n\x15response_wall_time_ms\x18\x03 \x01(\x03\x12\x36\n\x07partial\x18\x04 \x01(\x0b\x32#.speechkit.stt.v3.AlternativeUpdateH\x00\x12\x34\n\x05\x66inal\x18\x05 \x01(\x0b\x32#.speechkit.stt.v3.AlternativeUpdateH\x00\x12\x31\n\neou_update\x18\x06 \x01(\x0b\x32\x1b.speechkit.stt.v3.EouUpdateH\x00\x12=\n\x10\x66inal_refinement\x18\x07 \x01(\x0b\x32!.speechkit.stt.v3.FinalRefinementH\x00\x12\x33\n\x0bstatus_code\x18\x08 \x01(\x0b\x32\x1c.speechkit.stt.v3.StatusCodeH\x00\x12J\n\x11\x63lassifier_update\x18\n \x01(\x0b\x32-.speechkit.stt.v3.RecognitionClassifierUpdateH\x00\x12=\n\x10speaker_analysis\x18\x0b \x01(\x0b\x32!.speechkit.stt.v3.SpeakerAnalysisH\x00\x12G\n\x15\x63onversation_analysis\x18\x0c \x01(\x0b\x32&.speechkit.stt.v3.ConversationAnalysisH\x00\x12\x13\n\x0b\x63hannel_tag\x18\t \x01(\tB\x07\n\x05\x45vent\"0\n\x18\x44\x65leteRecognitionRequest\x12\x14\n\x0coperation_id\x18\x01 \x01(\t\"Y\n\x15StreamingResponseList\x12@\n\x13streaming_responses\x18\x01 \x03(\x0b\x32#.speechkit.stt.v3.StreamingResponse*O\n\x08\x43odeType\x12\x1d\n\x15\x43ODE_TYPE_UNSPECIFIED\x10\x00\x1a\x02\x08\x01\x12\x0b\n\x07WORKING\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\n\n\x06\x43LOSED\x10\x03\x42\\\n\x1ayandex.cloud.api.ai.stt.v3Z>github.com/yandex-cloud/go-genproto/yandex/cloud/ai/stt/v3;sttb\x06proto3')
28
28
 
29
29
  _globals = globals()
30
30
  _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -42,8 +42,8 @@ if not _descriptor._USE_C_DESCRIPTORS:
42
42
  _globals['_RECOGNITIONCLASSIFIERUPDATE_WINDOWTYPE'].values_by_name["WINDOW_TYPE_UNSPECIFIED"]._serialized_options = b'\010\001'
43
43
  _globals['_SPEAKERANALYSIS_WINDOWTYPE'].values_by_name["WINDOW_TYPE_UNSPECIFIED"]._loaded_options = None
44
44
  _globals['_SPEAKERANALYSIS_WINDOWTYPE'].values_by_name["WINDOW_TYPE_UNSPECIFIED"]._serialized_options = b'\010\001'
45
- _globals['_CODETYPE']._serialized_start=7927
46
- _globals['_CODETYPE']._serialized_end=8006
45
+ _globals['_CODETYPE']._serialized_start=8018
46
+ _globals['_CODETYPE']._serialized_end=8097
47
47
  _globals['_TEXTNORMALIZATIONOPTIONS']._serialized_start=55
48
48
  _globals['_TEXTNORMALIZATIONOPTIONS']._serialized_end=537
49
49
  _globals['_TEXTNORMALIZATIONOPTIONS_TEXTNORMALIZATION']._serialized_start=319
@@ -146,4 +146,6 @@ if not _descriptor._USE_C_DESCRIPTORS:
146
146
  _globals['_STREAMINGRESPONSE']._serialized_end=7875
147
147
  _globals['_DELETERECOGNITIONREQUEST']._serialized_start=7877
148
148
  _globals['_DELETERECOGNITIONREQUEST']._serialized_end=7925
149
+ _globals['_STREAMINGRESPONSELIST']._serialized_start=7927
150
+ _globals['_STREAMINGRESPONSELIST']._serialized_end=8016
149
151
  # @@protoc_insertion_point(module_scope)
@@ -1450,3 +1450,19 @@ class DeleteRecognitionRequest(google.protobuf.message.Message):
1450
1450
  def ClearField(self, field_name: typing.Literal["operation_id", b"operation_id"]) -> None: ...
1451
1451
 
1452
1452
  global___DeleteRecognitionRequest = DeleteRecognitionRequest
1453
+
1454
+ @typing.final
1455
+ class StreamingResponseList(google.protobuf.message.Message):
1456
+ DESCRIPTOR: google.protobuf.descriptor.Descriptor
1457
+
1458
+ STREAMING_RESPONSES_FIELD_NUMBER: builtins.int
1459
+ @property
1460
+ def streaming_responses(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StreamingResponse]: ...
1461
+ def __init__(
1462
+ self,
1463
+ *,
1464
+ streaming_responses: collections.abc.Iterable[global___StreamingResponse] | None = ...,
1465
+ ) -> None: ...
1466
+ def ClearField(self, field_name: typing.Literal["streaming_responses", b"streaming_responses"]) -> None: ...
1467
+
1468
+ global___StreamingResponseList = StreamingResponseList
@@ -194,7 +194,7 @@ class DetectLanguageRequest(google.protobuf.message.Message):
194
194
  """The text to detect the language for."""
195
195
  folder_id: builtins.str
196
196
  """ID of the folder to which you have access.
197
- Required for authorization with a user account (see [yandex.cloud.iam.v1.UserAccount] resource).
197
+ Required for authorization with a user account.
198
198
  Don't specify this field if you make the request on behalf of a service account.
199
199
  """
200
200
  @property
@@ -138,25 +138,29 @@ class TuningServiceServicer(object):
138
138
  raise NotImplementedError('Method not implemented!')
139
139
 
140
140
  def CreateDraft(self, request, context):
141
- """Missing associated documentation comment in .proto file."""
141
+ """Unimplemented
142
+ """
142
143
  context.set_code(grpc.StatusCode.UNIMPLEMENTED)
143
144
  context.set_details('Method not implemented!')
144
145
  raise NotImplementedError('Method not implemented!')
145
146
 
146
147
  def UpdateDraft(self, request, context):
147
- """Missing associated documentation comment in .proto file."""
148
+ """Unimplemented
149
+ """
148
150
  context.set_code(grpc.StatusCode.UNIMPLEMENTED)
149
151
  context.set_details('Method not implemented!')
150
152
  raise NotImplementedError('Method not implemented!')
151
153
 
152
154
  def DeleteDraft(self, request, context):
153
- """Missing associated documentation comment in .proto file."""
155
+ """Unimplemented
156
+ """
154
157
  context.set_code(grpc.StatusCode.UNIMPLEMENTED)
155
158
  context.set_details('Method not implemented!')
156
159
  raise NotImplementedError('Method not implemented!')
157
160
 
158
161
  def TuneDraft(self, request, context):
159
- """Missing associated documentation comment in .proto file."""
162
+ """Unimplemented
163
+ """
160
164
  context.set_code(grpc.StatusCode.UNIMPLEMENTED)
161
165
  context.set_details('Method not implemented!')
162
166
  raise NotImplementedError('Method not implemented!')
@@ -59,21 +59,25 @@ class TuningServiceStub:
59
59
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.CreateTuningDraftRequest,
60
60
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.CreateTuningDraftResponse,
61
61
  ]
62
+ """Unimplemented"""
62
63
 
63
64
  UpdateDraft: grpc.UnaryUnaryMultiCallable[
64
65
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.UpdateTuningDraftRequest,
65
66
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.UpdateTuningDraftResponse,
66
67
  ]
68
+ """Unimplemented"""
67
69
 
68
70
  DeleteDraft: grpc.UnaryUnaryMultiCallable[
69
71
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.DeleteTuningDraftRequest,
70
72
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.DeleteTuningDraftResponse,
71
73
  ]
74
+ """Unimplemented"""
72
75
 
73
76
  TuneDraft: grpc.UnaryUnaryMultiCallable[
74
77
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.TuneDraftRequest,
75
78
  yandex.cloud.operation.operation_pb2.Operation,
76
79
  ]
80
+ """Unimplemented"""
77
81
 
78
82
  class TuningServiceAsyncStub:
79
83
  Tune: grpc.aio.UnaryUnaryMultiCallable[
@@ -115,21 +119,25 @@ class TuningServiceAsyncStub:
115
119
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.CreateTuningDraftRequest,
116
120
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.CreateTuningDraftResponse,
117
121
  ]
122
+ """Unimplemented"""
118
123
 
119
124
  UpdateDraft: grpc.aio.UnaryUnaryMultiCallable[
120
125
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.UpdateTuningDraftRequest,
121
126
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.UpdateTuningDraftResponse,
122
127
  ]
128
+ """Unimplemented"""
123
129
 
124
130
  DeleteDraft: grpc.aio.UnaryUnaryMultiCallable[
125
131
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.DeleteTuningDraftRequest,
126
132
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.DeleteTuningDraftResponse,
127
133
  ]
134
+ """Unimplemented"""
128
135
 
129
136
  TuneDraft: grpc.aio.UnaryUnaryMultiCallable[
130
137
  yandex.cloud.ai.tuning.v1.tuning_service_pb2.TuneDraftRequest,
131
138
  yandex.cloud.operation.operation_pb2.Operation,
132
139
  ]
140
+ """Unimplemented"""
133
141
 
134
142
  class TuningServiceServicer(metaclass=abc.ABCMeta):
135
143
  @abc.abstractmethod
@@ -186,27 +194,31 @@ class TuningServiceServicer(metaclass=abc.ABCMeta):
186
194
  self,
187
195
  request: yandex.cloud.ai.tuning.v1.tuning_service_pb2.CreateTuningDraftRequest,
188
196
  context: _ServicerContext,
189
- ) -> typing.Union[yandex.cloud.ai.tuning.v1.tuning_service_pb2.CreateTuningDraftResponse, collections.abc.Awaitable[yandex.cloud.ai.tuning.v1.tuning_service_pb2.CreateTuningDraftResponse]]: ...
197
+ ) -> typing.Union[yandex.cloud.ai.tuning.v1.tuning_service_pb2.CreateTuningDraftResponse, collections.abc.Awaitable[yandex.cloud.ai.tuning.v1.tuning_service_pb2.CreateTuningDraftResponse]]:
198
+ """Unimplemented"""
190
199
 
191
200
  @abc.abstractmethod
192
201
  def UpdateDraft(
193
202
  self,
194
203
  request: yandex.cloud.ai.tuning.v1.tuning_service_pb2.UpdateTuningDraftRequest,
195
204
  context: _ServicerContext,
196
- ) -> typing.Union[yandex.cloud.ai.tuning.v1.tuning_service_pb2.UpdateTuningDraftResponse, collections.abc.Awaitable[yandex.cloud.ai.tuning.v1.tuning_service_pb2.UpdateTuningDraftResponse]]: ...
205
+ ) -> typing.Union[yandex.cloud.ai.tuning.v1.tuning_service_pb2.UpdateTuningDraftResponse, collections.abc.Awaitable[yandex.cloud.ai.tuning.v1.tuning_service_pb2.UpdateTuningDraftResponse]]:
206
+ """Unimplemented"""
197
207
 
198
208
  @abc.abstractmethod
199
209
  def DeleteDraft(
200
210
  self,
201
211
  request: yandex.cloud.ai.tuning.v1.tuning_service_pb2.DeleteTuningDraftRequest,
202
212
  context: _ServicerContext,
203
- ) -> typing.Union[yandex.cloud.ai.tuning.v1.tuning_service_pb2.DeleteTuningDraftResponse, collections.abc.Awaitable[yandex.cloud.ai.tuning.v1.tuning_service_pb2.DeleteTuningDraftResponse]]: ...
213
+ ) -> typing.Union[yandex.cloud.ai.tuning.v1.tuning_service_pb2.DeleteTuningDraftResponse, collections.abc.Awaitable[yandex.cloud.ai.tuning.v1.tuning_service_pb2.DeleteTuningDraftResponse]]:
214
+ """Unimplemented"""
204
215
 
205
216
  @abc.abstractmethod
206
217
  def TuneDraft(
207
218
  self,
208
219
  request: yandex.cloud.ai.tuning.v1.tuning_service_pb2.TuneDraftRequest,
209
220
  context: _ServicerContext,
210
- ) -> typing.Union[yandex.cloud.operation.operation_pb2.Operation, collections.abc.Awaitable[yandex.cloud.operation.operation_pb2.Operation]]: ...
221
+ ) -> typing.Union[yandex.cloud.operation.operation_pb2.Operation, collections.abc.Awaitable[yandex.cloud.operation.operation_pb2.Operation]]:
222
+ """Unimplemented"""
211
223
 
212
224
  def add_TuningServiceServicer_to_server(servicer: TuningServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ...
@@ -32,7 +32,7 @@ class BatchAnalyzeRequest(google.protobuf.message.Message):
32
32
  FOLDER_ID_FIELD_NUMBER: builtins.int
33
33
  folder_id: builtins.str
34
34
  """ID of the folder to which you have access.
35
- Required for authorization with a user account (see [yandex.cloud.iam.v1.UserAccount] resource).
35
+ Required for authorization with a user account.
36
36
  Don't specify this field if you make the request on behalf of a service account.
37
37
  """
38
38
  @property
@@ -177,7 +177,7 @@ class CostBudgetSpec(google.protobuf.message.Message):
177
177
  """
178
178
  @property
179
179
  def notification_user_account_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
180
- """IDs of the [yandex.cloud.iam.v1.UserAccount].
180
+ """User account IDs.
181
181
  Specified users will be be notified if the budget exceeds.
182
182
  """
183
183
 
@@ -237,7 +237,7 @@ class ExpenseBudgetSpec(google.protobuf.message.Message):
237
237
  """
238
238
  @property
239
239
  def notification_user_account_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
240
- """IDs of the [yandex.cloud.iam.v1.UserAccount].
240
+ """User account IDs.
241
241
  Specified users will be be notified if the budget exceeds.
242
242
  """
243
243
 
@@ -291,7 +291,7 @@ class BalanceBudgetSpec(google.protobuf.message.Message):
291
291
  """
292
292
  @property
293
293
  def notification_user_account_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
294
- """IDs of the [yandex.cloud.iam.v1.UserAccount].
294
+ """User account IDs.
295
295
  Specified users will be be notified if the budget exceeds.
296
296
  """
297
297
 
@@ -393,7 +393,7 @@ class ThresholdRule(google.protobuf.message.Message):
393
393
  """
394
394
  @property
395
395
  def notification_user_account_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
396
- """IDs of the [yandex.cloud.iam.v1.UserAccount].
396
+ """User account IDs.
397
397
  Specified users will be be notified if the threshold exceeds.
398
398
  """
399
399