qwak-core 0.4.378__py3-none-any.whl → 0.5.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. _qwak_proto/qwak/administration/account/v1/account_pb2.py +20 -18
  2. _qwak_proto/qwak/administration/account/v1/account_pb2.pyi +21 -2
  3. _qwak_proto/qwak/admiral/secret/v0/secret_pb2.py +16 -14
  4. _qwak_proto/qwak/admiral/secret/v0/secret_pb2.pyi +21 -2
  5. _qwak_proto/qwak/builds/build_values_pb2.py +24 -18
  6. _qwak_proto/qwak/builds/build_values_pb2.pyi +21 -1
  7. _qwak_proto/qwak/execution/v1/streaming_aggregation_pb2.py +18 -11
  8. _qwak_proto/qwak/execution/v1/streaming_aggregation_pb2.pyi +71 -1
  9. _qwak_proto/qwak/feature_store/features/feature_set_pb2.py +4 -4
  10. _qwak_proto/qwak/feature_store/features/feature_set_pb2.pyi +4 -0
  11. _qwak_proto/qwak/feature_store/features/feature_set_types_pb2.py +60 -58
  12. _qwak_proto/qwak/feature_store/features/feature_set_types_pb2.pyi +7 -2
  13. _qwak_proto/qwak/kube_deployment_captain/batch_job_pb2.py +40 -40
  14. _qwak_proto/qwak/kube_deployment_captain/batch_job_pb2.pyi +7 -1
  15. _qwak_proto/qwak/model_group/model_group_repository_details_pb2.py +16 -12
  16. _qwak_proto/qwak/model_group/model_group_repository_details_pb2.pyi +44 -6
  17. _qwak_proto/qwak/projects/projects_pb2.py +17 -15
  18. _qwak_proto/qwak/secret_service/secret_service_pb2.pyi +1 -1
  19. qwak/__init__.py +1 -1
  20. qwak/clients/feature_store/execution_management_client.py +28 -0
  21. qwak/exceptions/__init__.py +1 -0
  22. qwak/exceptions/qwak_grpc_address_exception.py +9 -0
  23. qwak/feature_store/execution/streaming_backfill.py +48 -0
  24. qwak/feature_store/feature_sets/streaming.py +84 -63
  25. qwak/feature_store/feature_sets/streaming_backfill.py +88 -124
  26. qwak/inner/const.py +2 -6
  27. qwak/inner/di_configuration/__init__.py +1 -67
  28. qwak/inner/di_configuration/dependency_wiring.py +98 -0
  29. qwak/inner/tool/grpc/grpc_tools.py +123 -3
  30. qwak/llmops/generation/chat/openai/types/chat/chat_completion.py +24 -6
  31. qwak/llmops/generation/chat/openai/types/chat/chat_completion_chunk.py +44 -8
  32. qwak/llmops/generation/chat/openai/types/chat/chat_completion_message.py +6 -3
  33. {qwak_core-0.4.378.dist-info → qwak_core-0.5.12.dist-info}/METADATA +4 -6
  34. {qwak_core-0.4.378.dist-info → qwak_core-0.5.12.dist-info}/RECORD +36 -33
  35. qwak_services_mock/mocks/execution_management_service.py +9 -1
  36. {qwak_core-0.4.378.dist-info → qwak_core-0.5.12.dist-info}/WHEEL +0 -0
@@ -1,24 +1,18 @@
1
- from abc import ABC, abstractmethod
2
1
  from dataclasses import dataclass
3
2
  from datetime import datetime, timezone
4
3
  from typing import List, Optional, Set, Union
5
4
 
6
5
  from _qwak_proto.qwak.feature_store.features.execution_pb2 import (
7
- BackfillExecutionSpec as ProtoBackfillExecutionSpec,
6
+ ExecutionSpec as ProtoExecutionSpec,
8
7
  )
9
- from _qwak_proto.qwak.feature_store.features.feature_set_types_pb2 import (
10
- BackfillBatchDataSourceSpec as ProtoBackfillBatchDataSourceSpec,
11
- BackfillDataSourceSpec as ProtoBackfillDataSourceSpec,
12
- BackfillSpec as ProtoBackfillSpec,
13
- )
14
- from _qwak_proto.qwak.feature_store.sources.batch_pb2 import (
15
- BatchSource as ProtoBatchSource,
8
+ from _qwak_proto.qwak.execution.v1.streaming_aggregation_pb2 import (
9
+ StreamingAggregationBackfillIngestion as ProtoStreamingAggregationBackfillIngestion,
10
+ BackfillDataSource as ProtoBackfillDataSource,
11
+ TimeRange as ProtoTimeRange,
16
12
  )
17
13
  from google.protobuf.timestamp_pb2 import Timestamp as ProtoTimestamp
18
- from qwak.clients.feature_store import FeatureRegistryClient
19
14
  from qwak.exceptions import QwakException
20
15
  from qwak.feature_store._common.artifact_utils import ArtifactSpec, ArtifactsUploader
21
- from qwak.feature_store._common.feature_set_utils import get_batch_source_for_featureset
22
16
  from qwak.feature_store.feature_sets.execution_spec import ClusterTemplate
23
17
  from qwak.feature_store.feature_sets.transformations import SparkSqlTransformation
24
18
 
@@ -26,36 +20,15 @@ _BACKFILL_ = "_qwak_backfill_specification"
26
20
 
27
21
 
28
22
  @dataclass
29
- class DataSourceBackfillSpec(ABC):
23
+ class BackfillDataSource:
30
24
  data_source_name: str
31
-
32
- @abstractmethod
33
- def _to_proto(self, feature_registry: FeatureRegistryClient):
34
- pass
35
-
36
- @classmethod
37
- def _from_proto(cls, proto: ProtoBackfillDataSourceSpec):
38
- function_mapping = {"batch_data_source_spec": BackfillBatchDataSourceSpec}
39
-
40
- backfill_source_type: str = proto.WhichOneof("type")
41
-
42
- if backfill_source_type in function_mapping:
43
- function_class = function_mapping.get(backfill_source_type)
44
- return function_class._from_proto(proto)
45
-
46
- raise QwakException(
47
- f"Got unsupported backfill source type {backfill_source_type} for streaming backfill"
48
- )
49
-
50
-
51
- @dataclass
52
- class BackfillBatchDataSourceSpec(DataSourceBackfillSpec):
53
25
  start_datetime: Optional[datetime] = None
54
26
  end_datetime: Optional[datetime] = None
55
27
 
56
- def _to_proto(
57
- self, feature_registry: FeatureRegistryClient
58
- ) -> ProtoBackfillBatchDataSourceSpec:
28
+ def __post_init__(self):
29
+ self._validate()
30
+
31
+ def _to_proto(self) -> ProtoBackfillDataSource:
59
32
  start_timestamp: Optional[ProtoTimestamp] = None
60
33
  end_timestamp: Optional[ProtoTimestamp] = None
61
34
 
@@ -67,63 +40,94 @@ class BackfillBatchDataSourceSpec(DataSourceBackfillSpec):
67
40
  start_timestamp = ProtoTimestamp()
68
41
  start_timestamp.FromDatetime(self.start_datetime.astimezone(timezone.utc))
69
42
 
70
- proto_data_source: ProtoBatchSource = get_batch_source_for_featureset(
71
- batch_ds_name=self.data_source_name, feature_registry=feature_registry
72
- )
73
-
74
- return ProtoBackfillBatchDataSourceSpec(
75
- data_source=proto_data_source,
43
+ time_range = ProtoTimeRange(
76
44
  start_timestamp=start_timestamp,
77
45
  end_timestamp=end_timestamp,
78
46
  )
79
47
 
48
+ return ProtoBackfillDataSource(
49
+ data_source_name=self.data_source_name,
50
+ time_range=time_range,
51
+ )
52
+
80
53
  @classmethod
81
- def _from_proto(
82
- cls, proto: ProtoBackfillDataSourceSpec
83
- ) -> "BackfillBatchDataSourceSpec":
54
+ def _from_proto(cls, proto: ProtoBackfillDataSource) -> "BackfillDataSource":
84
55
  start_datetime: Optional[datetime] = None
85
56
  end_datetime: Optional[datetime] = None
86
57
 
87
- batch_backfill_spec: ProtoBackfillBatchDataSourceSpec = (
88
- proto.batch_data_source_spec
89
- )
58
+ time_range: ProtoTimeRange = proto.time_range
90
59
 
91
- proto_start_timestamp: ProtoTimestamp = batch_backfill_spec.start_timestamp
92
- proto_end_timestamp: ProtoTimestamp = batch_backfill_spec.end_timestamp
60
+ proto_start_timestamp: Optional[ProtoTimestamp] = (
61
+ time_range.start_timestamp if time_range.start_timestamp else None
62
+ )
63
+ proto_end_timestamp: Optional[ProtoTimestamp] = (
64
+ time_range.end_timestamp if time_range.end_timestamp else None
65
+ )
93
66
 
94
- start_datetime = datetime.fromtimestamp(
95
- proto_start_timestamp.seconds + proto_start_timestamp.nanos / 1e9
67
+ start_datetime = (
68
+ datetime.fromtimestamp(
69
+ proto_start_timestamp.seconds + proto_start_timestamp.nanos / 1e9
70
+ )
71
+ if proto_start_timestamp
72
+ else None
96
73
  )
97
74
 
98
- end_datetime = datetime.fromtimestamp(
99
- proto_end_timestamp.seconds + proto_end_timestamp.nanos / 1e9
75
+ end_datetime = (
76
+ datetime.fromtimestamp(
77
+ proto_end_timestamp.seconds + proto_end_timestamp.nanos / 1e9
78
+ )
79
+ if proto_end_timestamp
80
+ else None
100
81
  )
101
82
 
102
83
  return cls(
103
- data_source_name=batch_backfill_spec.data_source.name,
84
+ data_source_name=proto.data_source_name,
104
85
  start_datetime=start_datetime,
105
86
  end_datetime=end_datetime,
106
87
  )
107
88
 
89
+ def _validate(self):
90
+ if self.start_datetime and self.end_datetime:
91
+ if self.start_datetime >= self.end_datetime:
92
+ raise QwakException(
93
+ f"Backfill data source {self.data_source_name} has invalid time range: "
94
+ f"start_datetime {self.start_datetime} is after or equal end_datetime {self.end_datetime}."
95
+ )
96
+
97
+ if not self.data_source_name:
98
+ raise QwakException(
99
+ "Backfill data source must have a valid data source name."
100
+ )
101
+
108
102
 
109
103
  @dataclass
110
104
  class StreamingBackfill:
105
+ featureset_name: str
111
106
  start_datetime: datetime
112
107
  end_datetime: datetime
113
- data_sources_specs: List[DataSourceBackfillSpec]
108
+ data_sources: List[BackfillDataSource]
114
109
  transform: "SparkSqlTransformation"
115
110
  cluster_template: Optional[ClusterTemplate] = ClusterTemplate.SMALL
116
111
 
117
112
  def __post_init__(self):
118
- if not self.data_sources_specs:
113
+ if not self.featureset_name:
114
+ raise QwakException("featureset_name must be provided for backfill.")
115
+
116
+ if not self.start_datetime or not self.end_datetime:
119
117
  raise QwakException(
120
- "Trying to create a streaming backfill with no data sources. "
121
- "At least one data source has to be provided when trying to create a streaming backfill."
118
+ "For Streaming backfill, start_datetime and end_datetime are mandatory fields."
122
119
  )
123
120
 
124
- if not self.start_datetime or not self.end_datetime:
121
+ if self.start_datetime >= self.end_datetime:
125
122
  raise QwakException(
126
- "For backfill, start_datetime and end_datetime are mandatory fields."
123
+ f"Backfill has invalid time range: "
124
+ f"start_datetime {self.start_datetime} is after or equal end_datetime {self.end_datetime}."
125
+ )
126
+
127
+ if not self.data_sources:
128
+ raise QwakException(
129
+ "Trying to create a streaming backfill with no data sources. "
130
+ "At least one data source has to be provided when trying to create a streaming backfill."
127
131
  )
128
132
 
129
133
  if type(self.transform) is not SparkSqlTransformation:
@@ -135,7 +139,7 @@ class StreamingBackfill:
135
139
 
136
140
  def _validate_unique_sources(self):
137
141
  source_names: List[str] = [
138
- data_source.data_source_name for data_source in self.data_sources_specs
142
+ data_source.data_source_name for data_source in self.data_sources
139
143
  ]
140
144
  duplicates: Set[str] = {
141
145
  item for item in source_names if source_names.count(item) > 1
@@ -146,23 +150,14 @@ class StreamingBackfill:
146
150
  f"Found these duplicates: {', '.join(set(duplicates))}"
147
151
  )
148
152
 
149
- def _validate_tile_size(self, initial_tile_size: int):
150
- if self.end_datetime.timestamp() % initial_tile_size != 0:
151
- raise QwakException(
152
- f"Chosen backfill end datetime is invalid,"
153
- f" it has to be exactly dividable by slice size of {initial_tile_size} seconds."
154
- )
155
-
156
153
  def _to_proto(
157
154
  self,
158
- feature_registry: FeatureRegistryClient,
159
- featureset_name: str,
160
155
  original_instance_module_path: str,
161
- ) -> ProtoBackfillSpec:
156
+ ) -> ProtoStreamingAggregationBackfillIngestion:
162
157
  artifact_url: Optional[str] = None
163
158
  artifact_spec: Optional[ArtifactSpec] = ArtifactsUploader.get_artifact_spec(
164
159
  transformation=self.transform,
165
- featureset_name=f"{featureset_name}-backfill",
160
+ featureset_name=f"{self.featureset_name}-backfill",
166
161
  __instance_module_path__=original_instance_module_path,
167
162
  )
168
163
 
@@ -175,85 +170,54 @@ class StreamingBackfill:
175
170
  start_timestamp = ProtoTimestamp()
176
171
  start_timestamp.FromDatetime(self.start_datetime.astimezone(timezone.utc))
177
172
 
178
- return ProtoBackfillSpec(
173
+ return ProtoStreamingAggregationBackfillIngestion(
174
+ featureset_name=self.featureset_name,
179
175
  start_timestamp=start_timestamp,
180
176
  end_timestamp=end_timestamp,
181
- execution_spec=ProtoBackfillExecutionSpec(
182
- **{"cluster_template": ClusterTemplate.to_proto(self.cluster_template)}
177
+ execution_spec=ProtoExecutionSpec(
178
+ cluster_template=ClusterTemplate.to_proto(self.cluster_template)
183
179
  ),
184
180
  transformation=self.transform._to_proto(artifact_path=artifact_url),
185
181
  data_source_specs=[
186
- ProtoBackfillDataSourceSpec(
187
- batch_data_source_spec=data_source_spec._to_proto(
188
- feature_registry=feature_registry
189
- )
190
- )
191
- for data_source_spec in self.data_sources_specs
182
+ data_source._to_proto() for data_source in self.data_sources
192
183
  ],
193
184
  )
194
185
 
195
186
  @classmethod
196
- def _from_proto(cls, proto: ProtoBackfillSpec):
197
- datetime.fromtimestamp(
198
- proto.start_timestamp.seconds + proto.start_timestamp.nanos / 1e9
199
- )
200
-
201
- data_sources_specs = [
202
- BackfillBatchDataSourceSpec._from_proto(ds)
203
- for ds in proto.data_source_specs
187
+ def _from_proto(cls, proto: ProtoStreamingAggregationBackfillIngestion):
188
+ backfill_data_sources = [
189
+ BackfillDataSource._from_proto(ds) for ds in proto.data_source_specs
204
190
  ]
205
191
 
206
192
  return cls(
193
+ featureset_name=proto.featureset_name,
207
194
  start_datetime=datetime.fromtimestamp(
208
195
  proto.start_timestamp.seconds + proto.start_timestamp.nanos / 1e9
209
196
  ),
210
197
  end_datetime=datetime.fromtimestamp(
211
198
  proto.end_timestamp.seconds + proto.end_timestamp.nanos / 1e9
212
199
  ),
213
- data_sources_specs=data_sources_specs,
200
+ data_sources=backfill_data_sources,
214
201
  transform=SparkSqlTransformation._from_proto(
215
202
  proto.transformation.sql_transformation
216
203
  ),
204
+ cluster_template=(
205
+ ClusterTemplate.from_proto(proto.execution_spec.cluster_template)
206
+ if proto.execution_spec.cluster_template
207
+ else None
208
+ ),
217
209
  )
218
210
 
219
211
  @staticmethod
220
212
  def _get_normalized_backfill_sources_spec(
221
- data_sources: Union[List[str], List[DataSourceBackfillSpec]],
222
- ) -> List[DataSourceBackfillSpec]:
223
- # reformat all data source specs to 'DataSourceBackfillSpec'
213
+ data_sources: Union[List[str], List[BackfillDataSource]],
214
+ ) -> List[BackfillDataSource]:
215
+ # reformat all data source names to 'BackfillDataSource'
224
216
  return [
225
217
  (
226
- BackfillBatchDataSourceSpec(data_source_name=data_source)
218
+ BackfillDataSource(data_source_name=data_source)
227
219
  if isinstance(data_source, str)
228
220
  else data_source
229
221
  )
230
222
  for data_source in data_sources
231
223
  ]
232
-
233
- @classmethod
234
- def set_streaming_backfill_on_function(
235
- cls,
236
- function,
237
- start_date: datetime,
238
- end_date: datetime,
239
- data_sources: Union[List[str], List[DataSourceBackfillSpec]],
240
- backfill_transformation: SparkSqlTransformation,
241
- backfill_cluster_template: Optional[ClusterTemplate] = ClusterTemplate.SMALL,
242
- ):
243
- setattr(
244
- function,
245
- _BACKFILL_,
246
- cls(
247
- start_datetime=start_date,
248
- end_datetime=end_date,
249
- data_sources_specs=StreamingBackfill._get_normalized_backfill_sources_spec(
250
- data_sources
251
- ),
252
- transform=backfill_transformation,
253
- cluster_template=backfill_cluster_template,
254
- ),
255
- )
256
-
257
- @staticmethod
258
- def get_streaming_backfill_from_function(function):
259
- return getattr(function, _BACKFILL_, None)
qwak/inner/const.py CHANGED
@@ -33,10 +33,6 @@ class QwakConstants:
33
33
 
34
34
  TOKEN_AUDIENCE: str = "https://auth-token.qwak.ai/" # nosec B105
35
35
 
36
- QWAK_AUTHENTICATION_URL = "https://grpc.qwak.ai/api/v1/authentication/qwak-api-key"
37
-
38
- QWAK_AUTHENTICATED_USER_ENDPOINT: str = (
39
- "https://grpc.qwak.ai/api/v0/runtime/get-authenticated-user-context"
40
- )
41
-
42
36
  QWAK_APP_URL: str = "https://app.qwak.ai"
37
+
38
+ CONTROL_PLANE_GRPC_ADDRESS_ENVAR_NAME: str = "CONTROL_PLANE_GRPC_ADDRESS"
@@ -1,69 +1,3 @@
1
- import os
2
-
3
1
  from .account import UserAccountConfiguration
4
2
  from .containers import QwakContainer
5
-
6
-
7
- def wire_dependencies():
8
- container = QwakContainer()
9
-
10
- default_config_file = os.path.join(os.path.dirname(__file__), "config.yml")
11
- container.config.from_yaml(default_config_file)
12
-
13
- from qwak.clients import (
14
- administration,
15
- alert_management,
16
- alerts_registry,
17
- analytics,
18
- audience,
19
- automation_management,
20
- autoscaling,
21
- batch_job_management,
22
- build_orchestrator,
23
- data_versioning,
24
- deployment,
25
- feature_store,
26
- file_versioning,
27
- instance_template,
28
- integration_management,
29
- kube_deployment_captain,
30
- logging_client,
31
- model_management,
32
- project,
33
- prompt_manager,
34
- system_secret,
35
- user_application_instance,
36
- vector_store,
37
- workspace_manager,
38
- )
39
-
40
- container.wire(
41
- packages=[
42
- administration,
43
- alert_management,
44
- audience,
45
- automation_management,
46
- autoscaling,
47
- analytics,
48
- batch_job_management,
49
- build_orchestrator,
50
- data_versioning,
51
- deployment,
52
- file_versioning,
53
- instance_template,
54
- kube_deployment_captain,
55
- logging_client,
56
- model_management,
57
- project,
58
- feature_store,
59
- user_application_instance,
60
- alerts_registry,
61
- workspace_manager,
62
- vector_store,
63
- integration_management,
64
- system_secret,
65
- prompt_manager,
66
- ]
67
- )
68
-
69
- return container
3
+ from .dependency_wiring import wire_dependencies
@@ -0,0 +1,98 @@
1
+ import os
2
+ from pathlib import Path
3
+ from typing import Optional
4
+
5
+ from qwak.inner.const import QwakConstants
6
+ from qwak.inner.di_configuration import QwakContainer
7
+ from qwak.inner.tool.grpc.grpc_tools import validate_grpc_address
8
+ from qwak.tools.logger import get_qwak_logger
9
+
10
+
11
+ logger = get_qwak_logger()
12
+
13
+ __DEFAULT_CONFIG_FILE_PATH: Path = Path(__file__).parent / "config.yml"
14
+
15
+
16
+ def wire_dependencies():
17
+ container = QwakContainer()
18
+
19
+ container.config.from_yaml(__DEFAULT_CONFIG_FILE_PATH)
20
+ control_plane_grpc_address_override: Optional[str] = os.getenv(
21
+ QwakConstants.CONTROL_PLANE_GRPC_ADDRESS_ENVAR_NAME
22
+ )
23
+
24
+ if control_plane_grpc_address_override:
25
+ validate_grpc_address(control_plane_grpc_address_override)
26
+ __override_control_plane_grpc_address(
27
+ container, control_plane_grpc_address_override
28
+ )
29
+
30
+ from qwak.clients import (
31
+ administration,
32
+ alert_management,
33
+ alerts_registry,
34
+ analytics,
35
+ audience,
36
+ automation_management,
37
+ autoscaling,
38
+ batch_job_management,
39
+ build_orchestrator,
40
+ data_versioning,
41
+ deployment,
42
+ feature_store,
43
+ file_versioning,
44
+ instance_template,
45
+ integration_management,
46
+ kube_deployment_captain,
47
+ logging_client,
48
+ model_management,
49
+ project,
50
+ prompt_manager,
51
+ system_secret,
52
+ user_application_instance,
53
+ vector_store,
54
+ workspace_manager,
55
+ )
56
+
57
+ container.wire(
58
+ packages=[
59
+ administration,
60
+ alert_management,
61
+ audience,
62
+ automation_management,
63
+ autoscaling,
64
+ analytics,
65
+ batch_job_management,
66
+ build_orchestrator,
67
+ data_versioning,
68
+ deployment,
69
+ file_versioning,
70
+ instance_template,
71
+ kube_deployment_captain,
72
+ logging_client,
73
+ model_management,
74
+ project,
75
+ feature_store,
76
+ user_application_instance,
77
+ alerts_registry,
78
+ workspace_manager,
79
+ vector_store,
80
+ integration_management,
81
+ system_secret,
82
+ prompt_manager,
83
+ ]
84
+ )
85
+
86
+ return container
87
+
88
+
89
+ def __override_control_plane_grpc_address(
90
+ container: "QwakContainer", control_plane_grpc_address_override: str
91
+ ):
92
+ logger.debug(
93
+ "Overriding control plane gRPC address from environment variable to %s.",
94
+ control_plane_grpc_address_override,
95
+ )
96
+ container.config.grpc.core.address.from_value(
97
+ control_plane_grpc_address_override.strip()
98
+ )
@@ -1,15 +1,18 @@
1
1
  import logging
2
+ import re
2
3
  import time
3
4
  from abc import ABC, abstractmethod
4
5
  from random import randint
5
6
  from typing import Callable, Optional, Tuple
7
+ from urllib.parse import urlparse, ParseResult
6
8
 
7
9
  import grpc
8
- from qwak.exceptions import QwakException
9
10
 
11
+ from qwak.exceptions import QwakException, QwakGrpcAddressException
10
12
  from .grpc_auth import Auth0Client
11
13
 
12
14
  logger = logging.getLogger()
15
+ HOSTNAME_REGEX: str = r"^(?!-)(?:[A-Za-z0-9-]{1,63}\.)*[A-Za-z0-9-]{1,63}(?<!-)$"
13
16
 
14
17
 
15
18
  def create_grpc_channel(
@@ -19,7 +22,7 @@ def create_grpc_channel(
19
22
  auth_metadata_plugin: grpc.AuthMetadataPlugin = None,
20
23
  timeout: int = 100,
21
24
  options=None,
22
- backoff_options={},
25
+ backoff_options=None,
23
26
  max_attempts=4,
24
27
  status_for_retry=(grpc.StatusCode.UNAVAILABLE,),
25
28
  attempt=0,
@@ -40,6 +43,9 @@ def create_grpc_channel(
40
43
  status_for_retry: grpc statuses to retry upon
41
44
  Returns: Returns a grpc.Channel
42
45
  """
46
+ if backoff_options is None:
47
+ backoff_options = {}
48
+
43
49
  if not url:
44
50
  raise QwakException("Unable to create gRPC channel. URL has not been defined.")
45
51
 
@@ -101,11 +107,14 @@ def create_grpc_channel_or_none(
101
107
  auth_metadata_plugin: grpc.AuthMetadataPlugin = None,
102
108
  timeout: int = 30,
103
109
  options=None,
104
- backoff_options={},
110
+ backoff_options=None,
105
111
  max_attempts=2,
106
112
  status_for_retry=(grpc.StatusCode.UNAVAILABLE,),
107
113
  attempt=0,
108
114
  ) -> Callable[[Optional[str], Optional[bool]], Optional[grpc.Channel]]:
115
+ if backoff_options is None:
116
+ backoff_options = {}
117
+
109
118
  def deferred_channel(
110
119
  url_overwrite: Optional[str] = None, ssl_overwrite: Optional[bool] = None
111
120
  ):
@@ -129,6 +138,117 @@ def create_grpc_channel_or_none(
129
138
  return deferred_channel
130
139
 
131
140
 
141
+ def validate_grpc_address(
142
+ grpc_address: str,
143
+ is_port_specification_allowed: bool = False,
144
+ is_url_scheme_allowed: bool = False,
145
+ ):
146
+ """
147
+ Validate gRPC address format
148
+ Args:
149
+ grpc_address (str): gRPC address to validate
150
+ is_port_specification_allowed (bool): Whether to allow port specification in the address
151
+ is_url_scheme_allowed (bool): Whether to allow URL scheme in the address
152
+ Raises:
153
+ QwakGrpcAddressException: If the gRPC address is invalid
154
+ """
155
+ parsed_grpc_address: ParseResult = parse_address(grpc_address)
156
+ hostname: str = get_hostname_from_address(parsed_grpc_address)
157
+ validate_paths_are_not_included_in_address(parsed_grpc_address)
158
+
159
+ if not is_url_scheme_allowed:
160
+ __validate_url_scheme_not_included_in_address(parsed_grpc_address)
161
+
162
+ if not is_port_specification_allowed:
163
+ __validate_port_not_included_in_address(parsed_grpc_address)
164
+
165
+ if not is_valid_hostname(hostname):
166
+ raise QwakGrpcAddressException(
167
+ "gRPC address must be a simple hostname or fully qualified domain name.",
168
+ parsed_grpc_address,
169
+ )
170
+
171
+
172
+ def validate_paths_are_not_included_in_address(
173
+ parsed_grpc_address: ParseResult,
174
+ ) -> None:
175
+ has_invalid_path: bool = (
176
+ parsed_grpc_address.path not in {"", "/"}
177
+ or parsed_grpc_address.query
178
+ or parsed_grpc_address.fragment
179
+ )
180
+
181
+ if has_invalid_path:
182
+ raise QwakGrpcAddressException(
183
+ "gRPC address must not contain paths, queries, or fragments.",
184
+ parsed_grpc_address,
185
+ )
186
+
187
+
188
+ def get_hostname_from_address(parsed_grpc_address: ParseResult) -> str:
189
+ hostname: Optional[str] = parsed_grpc_address.hostname
190
+ if not hostname:
191
+ raise QwakGrpcAddressException(
192
+ "gRPC address must contain a valid hostname.", parsed_grpc_address
193
+ )
194
+
195
+ return hostname
196
+
197
+
198
+ def __validate_url_scheme_not_included_in_address(
199
+ parsed_grpc_address: ParseResult,
200
+ ) -> None:
201
+ if parsed_grpc_address.scheme:
202
+ raise QwakGrpcAddressException(
203
+ "URL scheme is not allowed in the gRPC address.", parsed_grpc_address
204
+ )
205
+
206
+
207
+ def __validate_port_not_included_in_address(parsed_grpc_address: ParseResult):
208
+ try:
209
+ port: Optional[int] = parsed_grpc_address.port
210
+ except ValueError as exc:
211
+ raise QwakGrpcAddressException(
212
+ "Invalid port specification in the gRPC address.", parsed_grpc_address
213
+ ) from exc
214
+
215
+ if port:
216
+ raise QwakGrpcAddressException(
217
+ "Port specification is not allowed in the gRPC address.",
218
+ parsed_grpc_address,
219
+ )
220
+
221
+
222
+ def parse_address(grpc_address: str) -> ParseResult:
223
+ if not grpc_address or not grpc_address.strip():
224
+ raise QwakGrpcAddressException(
225
+ "gRPC address must not be empty or whitespace.", grpc_address
226
+ )
227
+
228
+ trimmed_address: str = grpc_address.strip()
229
+ parsed_address: ParseResult = urlparse(
230
+ trimmed_address if "://" in trimmed_address else f"//{trimmed_address}"
231
+ )
232
+
233
+ return parsed_address
234
+
235
+
236
+ def is_valid_hostname(hostname: str) -> bool:
237
+ """
238
+ Validate that the supplied hostname conforms to RFC-style label rules:
239
+ anchored pattern enforces full-string validation, negative lookahead/lookbehind block
240
+ leading or trailing hyphens per label, and each dot-separated label must be 1-63
241
+ alphanumeric/hyphen characters.
242
+
243
+ Args:
244
+ hostname (str): The hostname to validate.
245
+ Returns:
246
+ bool: True if the hostname is valid, False otherwise.
247
+ """
248
+ hostname_pattern: re.Pattern = re.compile(HOSTNAME_REGEX)
249
+ return bool(hostname_pattern.fullmatch(hostname))
250
+
251
+
132
252
  class SleepingPolicy(ABC):
133
253
  @abstractmethod
134
254
  def sleep(self, try_i: int):