feldera 0.148.0__tar.gz → 0.150.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of feldera might be problematic. Click here for more details.
- {feldera-0.148.0 → feldera-0.150.0}/PKG-INFO +2 -1
- {feldera-0.148.0 → feldera-0.150.0}/feldera/_callback_runner.py +4 -1
- {feldera-0.148.0 → feldera-0.150.0}/feldera/enums.py +8 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/pipeline.py +34 -31
- {feldera-0.148.0 → feldera-0.150.0}/feldera/pipeline_builder.py +5 -2
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/feldera_client.py +19 -7
- {feldera-0.148.0 → feldera-0.150.0}/feldera/testutils.py +23 -0
- feldera-0.150.0/feldera/testutils_oidc.py +368 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera.egg-info/PKG-INFO +2 -1
- {feldera-0.148.0 → feldera-0.150.0}/feldera.egg-info/SOURCES.txt +1 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera.egg-info/requires.txt +1 -0
- {feldera-0.148.0 → feldera-0.150.0}/pyproject.toml +3 -2
- {feldera-0.148.0 → feldera-0.150.0}/README.md +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/__init__.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/_helpers.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/output_handler.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/__init__.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/_helpers.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/_httprequests.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/config.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/errors.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/feldera_config.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/pipeline.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/sql_table.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/rest/sql_view.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/runtime_config.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/stats.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera/tests/test_datafusionize.py +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera.egg-info/dependency_links.txt +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/feldera.egg-info/top_level.txt +0 -0
- {feldera-0.148.0 → feldera-0.150.0}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: feldera
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.150.0
|
|
4
4
|
Summary: The feldera python client
|
|
5
5
|
Author-email: Feldera Team <dev@feldera.com>
|
|
6
6
|
License: MIT
|
|
@@ -20,6 +20,7 @@ Requires-Dist: typing-extensions
|
|
|
20
20
|
Requires-Dist: numpy>=2.2.4
|
|
21
21
|
Requires-Dist: pretty-errors
|
|
22
22
|
Requires-Dist: ruff>=0.6.9
|
|
23
|
+
Requires-Dist: PyJWT>=2.8.0
|
|
23
24
|
|
|
24
25
|
# Feldera Python SDK
|
|
25
26
|
|
|
@@ -6,6 +6,7 @@ from queue import Queue, Empty
|
|
|
6
6
|
import pandas as pd
|
|
7
7
|
from feldera import FelderaClient
|
|
8
8
|
from feldera._helpers import dataframe_from_response
|
|
9
|
+
from feldera.enums import PipelineFieldSelector
|
|
9
10
|
|
|
10
11
|
|
|
11
12
|
class _CallbackRunnerInstruction(Enum):
|
|
@@ -38,7 +39,9 @@ class CallbackRunner(Thread):
|
|
|
38
39
|
:meta private:
|
|
39
40
|
"""
|
|
40
41
|
|
|
41
|
-
pipeline = self.client.get_pipeline(
|
|
42
|
+
pipeline = self.client.get_pipeline(
|
|
43
|
+
self.pipeline_name, PipelineFieldSelector.ALL
|
|
44
|
+
)
|
|
42
45
|
|
|
43
46
|
schemas = pipeline.tables + pipeline.views
|
|
44
47
|
for schema in schemas:
|
|
@@ -336,3 +336,11 @@ class FaultToleranceModel(Enum):
|
|
|
336
336
|
raise ValueError(
|
|
337
337
|
f"Unknown value '{value}' for enum {FaultToleranceModel.__name__}"
|
|
338
338
|
)
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
class PipelineFieldSelector(Enum):
|
|
342
|
+
ALL = "all"
|
|
343
|
+
"""Select all fields of a pipeline."""
|
|
344
|
+
|
|
345
|
+
STATUS = "status"
|
|
346
|
+
"""Select only the fields required to know the status of a pipeline."""
|
|
@@ -12,6 +12,7 @@ from queue import Queue
|
|
|
12
12
|
|
|
13
13
|
from feldera.rest.errors import FelderaAPIError
|
|
14
14
|
from feldera.enums import (
|
|
15
|
+
PipelineFieldSelector,
|
|
15
16
|
PipelineStatus,
|
|
16
17
|
ProgramStatus,
|
|
17
18
|
CheckpointStatus,
|
|
@@ -60,14 +61,16 @@ class Pipeline:
|
|
|
60
61
|
# block until the callback runner is ready
|
|
61
62
|
queue.join()
|
|
62
63
|
|
|
63
|
-
def refresh(self):
|
|
64
|
+
def refresh(self, field_selector: PipelineFieldSelector):
|
|
64
65
|
"""
|
|
65
66
|
Calls the backend to get the updated, latest version of the pipeline.
|
|
66
67
|
|
|
68
|
+
:param field_selector: Choose what pipeline information to refresh; see PipelineFieldSelector enum definition.
|
|
69
|
+
|
|
67
70
|
:raises FelderaConnectionError: If there is an issue connecting to the backend.
|
|
68
71
|
"""
|
|
69
72
|
|
|
70
|
-
self._inner = self.client.get_pipeline(self.name)
|
|
73
|
+
self._inner = self.client.get_pipeline(self.name, field_selector)
|
|
71
74
|
|
|
72
75
|
def status(self) -> PipelineStatus:
|
|
73
76
|
"""
|
|
@@ -75,7 +78,7 @@ class Pipeline:
|
|
|
75
78
|
"""
|
|
76
79
|
|
|
77
80
|
try:
|
|
78
|
-
self.refresh()
|
|
81
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
79
82
|
return PipelineStatus.from_str(self._inner.deployment_status)
|
|
80
83
|
|
|
81
84
|
except FelderaAPIError as err:
|
|
@@ -124,7 +127,7 @@ class Pipeline:
|
|
|
124
127
|
|
|
125
128
|
ensure_dataframe_has_columns(df)
|
|
126
129
|
|
|
127
|
-
pipeline = self.client.get_pipeline(self.name)
|
|
130
|
+
pipeline = self.client.get_pipeline(self.name, PipelineFieldSelector.ALL)
|
|
128
131
|
if table_name.lower() != "now" and table_name.lower() not in [
|
|
129
132
|
tbl.name.lower() for tbl in pipeline.tables
|
|
130
133
|
]:
|
|
@@ -672,7 +675,7 @@ metrics"""
|
|
|
672
675
|
"""
|
|
673
676
|
|
|
674
677
|
try:
|
|
675
|
-
inner = client.get_pipeline(name)
|
|
678
|
+
inner = client.get_pipeline(name, PipelineFieldSelector.ALL)
|
|
676
679
|
return Pipeline._from_inner(inner, client)
|
|
677
680
|
except FelderaAPIError as err:
|
|
678
681
|
if err.status_code == 404:
|
|
@@ -948,7 +951,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
948
951
|
Return the program SQL code of the pipeline.
|
|
949
952
|
"""
|
|
950
953
|
|
|
951
|
-
self.refresh()
|
|
954
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
952
955
|
return self._inner.program_code
|
|
953
956
|
|
|
954
957
|
def modify(
|
|
@@ -988,7 +991,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
988
991
|
Return the storage status of the pipeline.
|
|
989
992
|
"""
|
|
990
993
|
|
|
991
|
-
self.refresh()
|
|
994
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
992
995
|
return StorageStatus.from_str(self._inner.storage_status)
|
|
993
996
|
|
|
994
997
|
def program_status(self) -> ProgramStatus:
|
|
@@ -1000,7 +1003,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1000
1003
|
Rust code to a binary.
|
|
1001
1004
|
"""
|
|
1002
1005
|
|
|
1003
|
-
self.refresh()
|
|
1006
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1004
1007
|
return ProgramStatus.from_value(self._inner.program_status)
|
|
1005
1008
|
|
|
1006
1009
|
def program_status_since(self) -> datetime:
|
|
@@ -1008,7 +1011,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1008
1011
|
Return the timestamp when the current program status was set.
|
|
1009
1012
|
"""
|
|
1010
1013
|
|
|
1011
|
-
self.refresh()
|
|
1014
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1012
1015
|
return datetime.fromisoformat(self._inner.program_status_since)
|
|
1013
1016
|
|
|
1014
1017
|
def udf_rust(self) -> str:
|
|
@@ -1016,7 +1019,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1016
1019
|
Return the Rust code for UDFs.
|
|
1017
1020
|
"""
|
|
1018
1021
|
|
|
1019
|
-
self.refresh()
|
|
1022
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
1020
1023
|
return self._inner.udf_rust
|
|
1021
1024
|
|
|
1022
1025
|
def udf_toml(self) -> str:
|
|
@@ -1024,7 +1027,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1024
1027
|
Return the Rust dependencies required by UDFs (in the TOML format).
|
|
1025
1028
|
"""
|
|
1026
1029
|
|
|
1027
|
-
self.refresh()
|
|
1030
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
1028
1031
|
return self._inner.udf_toml
|
|
1029
1032
|
|
|
1030
1033
|
def program_config(self) -> Mapping[str, Any]:
|
|
@@ -1032,7 +1035,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1032
1035
|
Return the program config of the pipeline.
|
|
1033
1036
|
"""
|
|
1034
1037
|
|
|
1035
|
-
self.refresh()
|
|
1038
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
1036
1039
|
return self._inner.program_config
|
|
1037
1040
|
|
|
1038
1041
|
def runtime_config(self) -> RuntimeConfig:
|
|
@@ -1040,7 +1043,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1040
1043
|
Return the runtime config of the pipeline.
|
|
1041
1044
|
"""
|
|
1042
1045
|
|
|
1043
|
-
self.refresh()
|
|
1046
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
1044
1047
|
return RuntimeConfig.from_dict(self._inner.runtime_config)
|
|
1045
1048
|
|
|
1046
1049
|
def set_runtime_config(self, runtime_config: RuntimeConfig):
|
|
@@ -1065,7 +1068,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1065
1068
|
Return the ID of the pipeline.
|
|
1066
1069
|
"""
|
|
1067
1070
|
|
|
1068
|
-
self.refresh()
|
|
1071
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1069
1072
|
return self._inner.id
|
|
1070
1073
|
|
|
1071
1074
|
def description(self) -> str:
|
|
@@ -1073,7 +1076,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1073
1076
|
Return the description of the pipeline.
|
|
1074
1077
|
"""
|
|
1075
1078
|
|
|
1076
|
-
self.refresh()
|
|
1079
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1077
1080
|
return self._inner.description
|
|
1078
1081
|
|
|
1079
1082
|
def tables(self) -> List[SQLTable]:
|
|
@@ -1081,7 +1084,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1081
1084
|
Return the tables of the pipeline.
|
|
1082
1085
|
"""
|
|
1083
1086
|
|
|
1084
|
-
self.refresh()
|
|
1087
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
1085
1088
|
return self._inner.tables
|
|
1086
1089
|
|
|
1087
1090
|
def views(self) -> List[SQLView]:
|
|
@@ -1089,7 +1092,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1089
1092
|
Return the views of the pipeline.
|
|
1090
1093
|
"""
|
|
1091
1094
|
|
|
1092
|
-
self.refresh()
|
|
1095
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
1093
1096
|
return self._inner.views
|
|
1094
1097
|
|
|
1095
1098
|
def created_at(self) -> datetime:
|
|
@@ -1097,7 +1100,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1097
1100
|
Return the creation time of the pipeline.
|
|
1098
1101
|
"""
|
|
1099
1102
|
|
|
1100
|
-
self.refresh()
|
|
1103
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1101
1104
|
return datetime.fromisoformat(self._inner.created_at)
|
|
1102
1105
|
|
|
1103
1106
|
def version(self) -> int:
|
|
@@ -1105,7 +1108,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1105
1108
|
Return the version of the pipeline.
|
|
1106
1109
|
"""
|
|
1107
1110
|
|
|
1108
|
-
self.refresh()
|
|
1111
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1109
1112
|
return self._inner.version
|
|
1110
1113
|
|
|
1111
1114
|
def program_version(self) -> int:
|
|
@@ -1113,7 +1116,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1113
1116
|
Return the program version of the pipeline.
|
|
1114
1117
|
"""
|
|
1115
1118
|
|
|
1116
|
-
self.refresh()
|
|
1119
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1117
1120
|
return self._inner.program_version
|
|
1118
1121
|
|
|
1119
1122
|
def deployment_status_since(self) -> datetime:
|
|
@@ -1122,7 +1125,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1122
1125
|
was set.
|
|
1123
1126
|
"""
|
|
1124
1127
|
|
|
1125
|
-
self.refresh()
|
|
1128
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1126
1129
|
return datetime.fromisoformat(self._inner.deployment_status_since)
|
|
1127
1130
|
|
|
1128
1131
|
def deployment_config(self) -> Mapping[str, Any]:
|
|
@@ -1130,7 +1133,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1130
1133
|
Return the deployment config of the pipeline.
|
|
1131
1134
|
"""
|
|
1132
1135
|
|
|
1133
|
-
self.refresh()
|
|
1136
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
1134
1137
|
return self._inner.deployment_config
|
|
1135
1138
|
|
|
1136
1139
|
def deployment_desired_status(self) -> DeploymentDesiredStatus:
|
|
@@ -1139,7 +1142,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1139
1142
|
This is the next state that the pipeline should transition to.
|
|
1140
1143
|
"""
|
|
1141
1144
|
|
|
1142
|
-
self.refresh()
|
|
1145
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1143
1146
|
return DeploymentDesiredStatus.from_str(self._inner.deployment_desired_status)
|
|
1144
1147
|
|
|
1145
1148
|
def deployment_resources_desired_status(self) -> DeploymentResourcesDesiredStatus:
|
|
@@ -1147,7 +1150,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1147
1150
|
Return the desired status of the the deployment resources.
|
|
1148
1151
|
"""
|
|
1149
1152
|
|
|
1150
|
-
self.refresh()
|
|
1153
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1151
1154
|
return DeploymentResourcesDesiredStatus.from_str(
|
|
1152
1155
|
self._inner.deployment_resources_desired_status
|
|
1153
1156
|
)
|
|
@@ -1157,7 +1160,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1157
1160
|
Return the status of the deployment resources.
|
|
1158
1161
|
"""
|
|
1159
1162
|
|
|
1160
|
-
self.refresh()
|
|
1163
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1161
1164
|
return DeploymentResourcesStatus.from_str(
|
|
1162
1165
|
self._inner.deployment_resources_status
|
|
1163
1166
|
)
|
|
@@ -1167,7 +1170,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1167
1170
|
Return the deployment runtime desired status.
|
|
1168
1171
|
"""
|
|
1169
1172
|
|
|
1170
|
-
self.refresh()
|
|
1173
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1171
1174
|
return DeploymentRuntimeDesiredStatus.from_str(
|
|
1172
1175
|
self._inner.deployment_runtime_desired_status
|
|
1173
1176
|
)
|
|
@@ -1177,7 +1180,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1177
1180
|
Return the deployment runtime status.
|
|
1178
1181
|
"""
|
|
1179
1182
|
|
|
1180
|
-
self.refresh()
|
|
1183
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1181
1184
|
return DeploymentRuntimeStatus.from_str(self._inner.deployment_runtime_status)
|
|
1182
1185
|
|
|
1183
1186
|
def deployment_error(self) -> Mapping[str, Any]:
|
|
@@ -1186,7 +1189,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1186
1189
|
Returns an empty string if there is no error.
|
|
1187
1190
|
"""
|
|
1188
1191
|
|
|
1189
|
-
self.refresh()
|
|
1192
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1190
1193
|
return self._inner.deployment_error
|
|
1191
1194
|
|
|
1192
1195
|
def deployment_location(self) -> str:
|
|
@@ -1196,7 +1199,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1196
1199
|
at runtime (a TCP port number or a URI).
|
|
1197
1200
|
"""
|
|
1198
1201
|
|
|
1199
|
-
self.refresh()
|
|
1202
|
+
self.refresh(PipelineFieldSelector.STATUS)
|
|
1200
1203
|
return self._inner.deployment_location
|
|
1201
1204
|
|
|
1202
1205
|
def program_info(self) -> Mapping[str, Any]:
|
|
@@ -1207,7 +1210,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1207
1210
|
and the SQL program schema.
|
|
1208
1211
|
"""
|
|
1209
1212
|
|
|
1210
|
-
self.refresh()
|
|
1213
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
1211
1214
|
return self._inner.program_info
|
|
1212
1215
|
|
|
1213
1216
|
def program_error(self) -> Mapping[str, Any]:
|
|
@@ -1217,7 +1220,7 @@ pipeline '{self.name}' to sync checkpoint '{uuid}'"""
|
|
|
1217
1220
|
`sql_compilation` and `rust_compilation` will be 0.
|
|
1218
1221
|
"""
|
|
1219
1222
|
|
|
1220
|
-
self.refresh()
|
|
1223
|
+
self.refresh(PipelineFieldSelector.ALL)
|
|
1221
1224
|
return self._inner.program_error
|
|
1222
1225
|
|
|
1223
1226
|
def errors(self) -> List[Mapping[str, Any]]:
|
|
@@ -4,7 +4,7 @@ from typing import Optional
|
|
|
4
4
|
from feldera.rest.feldera_client import FelderaClient
|
|
5
5
|
from feldera.rest.pipeline import Pipeline as InnerPipeline
|
|
6
6
|
from feldera.pipeline import Pipeline
|
|
7
|
-
from feldera.enums import CompilationProfile
|
|
7
|
+
from feldera.enums import CompilationProfile, PipelineFieldSelector
|
|
8
8
|
from feldera.runtime_config import RuntimeConfig
|
|
9
9
|
from feldera.rest.errors import FelderaAPIError
|
|
10
10
|
|
|
@@ -60,7 +60,10 @@ class PipelineBuilder:
|
|
|
60
60
|
raise ValueError("Name and SQL are required to create a pipeline")
|
|
61
61
|
|
|
62
62
|
try:
|
|
63
|
-
if
|
|
63
|
+
if (
|
|
64
|
+
self.client.get_pipeline(self.name, PipelineFieldSelector.STATUS)
|
|
65
|
+
is not None
|
|
66
|
+
):
|
|
64
67
|
raise RuntimeError(f"Pipeline with name {self.name} already exists")
|
|
65
68
|
except FelderaAPIError as err:
|
|
66
69
|
if err.error_code != "UnknownPipelineName":
|
|
@@ -7,6 +7,7 @@ from decimal import Decimal
|
|
|
7
7
|
from typing import Generator, Mapping
|
|
8
8
|
from urllib.parse import quote
|
|
9
9
|
|
|
10
|
+
from feldera.enums import PipelineFieldSelector
|
|
10
11
|
from feldera.rest.config import Config
|
|
11
12
|
from feldera.rest.feldera_config import FelderaConfig
|
|
12
13
|
from feldera.rest.errors import FelderaTimeoutError, FelderaAPIError
|
|
@@ -93,14 +94,19 @@ class FelderaClient:
|
|
|
93
94
|
|
|
94
95
|
return FelderaClient(f"http://127.0.0.1:{port}")
|
|
95
96
|
|
|
96
|
-
def get_pipeline(
|
|
97
|
+
def get_pipeline(
|
|
98
|
+
self, pipeline_name: str, field_selector: PipelineFieldSelector
|
|
99
|
+
) -> Pipeline:
|
|
97
100
|
"""
|
|
98
101
|
Get a pipeline by name
|
|
99
102
|
|
|
100
103
|
:param pipeline_name: The name of the pipeline
|
|
104
|
+
:param field_selector: Choose what pipeline information to refresh; see PipelineFieldSelector enum definition.
|
|
101
105
|
"""
|
|
102
106
|
|
|
103
|
-
resp = self.http.get(
|
|
107
|
+
resp = self.http.get(
|
|
108
|
+
f"/pipelines/{pipeline_name}?selector={field_selector.value}"
|
|
109
|
+
)
|
|
104
110
|
|
|
105
111
|
return Pipeline.from_dict(resp)
|
|
106
112
|
|
|
@@ -130,12 +136,14 @@ class FelderaClient:
|
|
|
130
136
|
wait = ["Pending", "CompilingSql", "SqlCompiled", "CompilingRust"]
|
|
131
137
|
|
|
132
138
|
while True:
|
|
133
|
-
p = self.get_pipeline(name)
|
|
139
|
+
p = self.get_pipeline(name, PipelineFieldSelector.STATUS)
|
|
134
140
|
status = p.program_status
|
|
135
141
|
|
|
136
142
|
if status == "Success":
|
|
137
|
-
return
|
|
143
|
+
return self.get_pipeline(name, PipelineFieldSelector.ALL)
|
|
138
144
|
elif status not in wait:
|
|
145
|
+
p = self.get_pipeline(name, PipelineFieldSelector.ALL)
|
|
146
|
+
|
|
139
147
|
# error handling for SQL compilation errors
|
|
140
148
|
if status == "SqlError":
|
|
141
149
|
sql_errors = p.program_error["sql_compilation"]["messages"]
|
|
@@ -181,7 +189,7 @@ class FelderaClient:
|
|
|
181
189
|
f"transition to '{state}' state"
|
|
182
190
|
)
|
|
183
191
|
|
|
184
|
-
resp = self.get_pipeline(pipeline_name)
|
|
192
|
+
resp = self.get_pipeline(pipeline_name, PipelineFieldSelector.STATUS)
|
|
185
193
|
status = resp.deployment_status
|
|
186
194
|
|
|
187
195
|
if status.lower() == state.lower():
|
|
@@ -502,7 +510,9 @@ Reason: The pipeline is in a STOPPED state due to the following error:
|
|
|
502
510
|
start = time.monotonic()
|
|
503
511
|
|
|
504
512
|
while time.monotonic() - start < timeout_s:
|
|
505
|
-
status = self.get_pipeline(
|
|
513
|
+
status = self.get_pipeline(
|
|
514
|
+
pipeline_name, PipelineFieldSelector.STATUS
|
|
515
|
+
).deployment_status
|
|
506
516
|
|
|
507
517
|
if status == "Stopped":
|
|
508
518
|
return
|
|
@@ -536,7 +546,9 @@ Reason: The pipeline is in a STOPPED state due to the following error:
|
|
|
536
546
|
start = time.monotonic()
|
|
537
547
|
|
|
538
548
|
while time.monotonic() - start < timeout_s:
|
|
539
|
-
status = self.get_pipeline(
|
|
549
|
+
status = self.get_pipeline(
|
|
550
|
+
pipeline_name, PipelineFieldSelector.STATUS
|
|
551
|
+
).storage_status
|
|
540
552
|
|
|
541
553
|
if status == "Cleared":
|
|
542
554
|
return
|
|
@@ -15,6 +15,28 @@ from feldera.runtime_config import Resources, RuntimeConfig
|
|
|
15
15
|
from feldera.rest import FelderaClient
|
|
16
16
|
|
|
17
17
|
API_KEY = os.environ.get("FELDERA_API_KEY")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
# OIDC authentication support
|
|
21
|
+
def _get_oidc_token():
|
|
22
|
+
"""Get OIDC token if environment is configured, otherwise return None"""
|
|
23
|
+
try:
|
|
24
|
+
from feldera.testutils_oidc import get_oidc_test_helper
|
|
25
|
+
|
|
26
|
+
oidc_helper = get_oidc_test_helper()
|
|
27
|
+
if oidc_helper is not None:
|
|
28
|
+
return oidc_helper.obtain_access_token()
|
|
29
|
+
except ImportError:
|
|
30
|
+
pass
|
|
31
|
+
return None
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _get_effective_api_key():
|
|
35
|
+
"""Get effective API key - OIDC token takes precedence over static API key"""
|
|
36
|
+
oidc_token = _get_oidc_token()
|
|
37
|
+
return oidc_token if oidc_token else API_KEY
|
|
38
|
+
|
|
39
|
+
|
|
18
40
|
BASE_URL = (
|
|
19
41
|
os.environ.get("FELDERA_HOST")
|
|
20
42
|
or os.environ.get("FELDERA_BASE_URL")
|
|
@@ -44,6 +66,7 @@ class _LazyClient:
|
|
|
44
66
|
if self._client is None:
|
|
45
67
|
self._client = FelderaClient(
|
|
46
68
|
connection_timeout=10,
|
|
69
|
+
api_key=_get_effective_api_key(),
|
|
47
70
|
)
|
|
48
71
|
return self._client
|
|
49
72
|
|
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
"""
|
|
2
|
+
OIDC Authentication Test Helper
|
|
3
|
+
|
|
4
|
+
Utilities for testing OIDC authentication integration with remote providers.
|
|
5
|
+
Provides token generation, validation helpers, and test configuration.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import os
|
|
9
|
+
import time
|
|
10
|
+
import json
|
|
11
|
+
import requests
|
|
12
|
+
import jwt
|
|
13
|
+
import logging
|
|
14
|
+
from typing import Optional, Dict, Any
|
|
15
|
+
from dataclasses import dataclass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class OidcTestConfig:
|
|
20
|
+
"""Configuration for OIDC authentication tests using Resource Owner Password Flow"""
|
|
21
|
+
|
|
22
|
+
issuer: str
|
|
23
|
+
client_id: str
|
|
24
|
+
client_secret: str
|
|
25
|
+
username: str
|
|
26
|
+
password: str
|
|
27
|
+
scope: str = "openid profile email"
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def from_environment(cls) -> Optional["OidcTestConfig"]:
|
|
31
|
+
"""Load OIDC test configuration from environment variables"""
|
|
32
|
+
issuer = os.getenv("OIDC_TEST_ISSUER")
|
|
33
|
+
client_id = os.getenv("OIDC_TEST_CLIENT_ID")
|
|
34
|
+
client_secret = os.getenv("OIDC_TEST_CLIENT_SECRET")
|
|
35
|
+
username = os.getenv("OIDC_TEST_USERNAME")
|
|
36
|
+
password = os.getenv("OIDC_TEST_PASSWORD")
|
|
37
|
+
|
|
38
|
+
# All fields are required
|
|
39
|
+
if not all([issuer, client_id, client_secret, username, password]):
|
|
40
|
+
return None
|
|
41
|
+
|
|
42
|
+
return cls(
|
|
43
|
+
issuer=issuer,
|
|
44
|
+
client_id=client_id,
|
|
45
|
+
client_secret=client_secret,
|
|
46
|
+
username=username,
|
|
47
|
+
password=password,
|
|
48
|
+
scope=os.getenv("OIDC_TEST_SCOPE", "openid profile email"),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class OidcTestHelper:
|
|
53
|
+
"""Helper class for OIDC authentication testing"""
|
|
54
|
+
|
|
55
|
+
def __init__(self, config: OidcTestConfig):
|
|
56
|
+
self.config = config
|
|
57
|
+
self._discovery_doc = None
|
|
58
|
+
self._jwks = None
|
|
59
|
+
self._access_token = None
|
|
60
|
+
self._token_expires_at = 0
|
|
61
|
+
|
|
62
|
+
def get_discovery_document(self) -> Dict[str, Any]:
|
|
63
|
+
"""Fetch and cache the OIDC discovery document"""
|
|
64
|
+
if self._discovery_doc is None:
|
|
65
|
+
discovery_url = (
|
|
66
|
+
f"{self.config.issuer.rstrip('/')}/.well-known/openid-configuration"
|
|
67
|
+
)
|
|
68
|
+
response = requests.get(discovery_url, timeout=30)
|
|
69
|
+
response.raise_for_status()
|
|
70
|
+
self._discovery_doc = response.json()
|
|
71
|
+
return self._discovery_doc
|
|
72
|
+
|
|
73
|
+
def get_jwks(self) -> Dict[str, Any]:
|
|
74
|
+
"""Fetch and cache the JSON Web Key Set"""
|
|
75
|
+
if self._jwks is None:
|
|
76
|
+
discovery_doc = self.get_discovery_document()
|
|
77
|
+
jwks_uri = discovery_doc["jwks_uri"]
|
|
78
|
+
response = requests.get(jwks_uri, timeout=30)
|
|
79
|
+
response.raise_for_status()
|
|
80
|
+
self._jwks = response.json()
|
|
81
|
+
return self._jwks
|
|
82
|
+
|
|
83
|
+
def get_token_endpoint(self) -> str:
|
|
84
|
+
"""Get the token endpoint URL from discovery document"""
|
|
85
|
+
discovery_doc = self.get_discovery_document()
|
|
86
|
+
token_endpoint = discovery_doc.get("token_endpoint")
|
|
87
|
+
if not token_endpoint:
|
|
88
|
+
raise ValueError("Token endpoint not found in OIDC discovery document")
|
|
89
|
+
return token_endpoint
|
|
90
|
+
|
|
91
|
+
def obtain_access_token(self, pytest_cache=None) -> str:
|
|
92
|
+
"""
|
|
93
|
+
Obtain access token using environment variable set by pytest master node.
|
|
94
|
+
|
|
95
|
+
The actual token fetching is handled by pytest_configure hooks in conftest.py,
|
|
96
|
+
which guarantees only one auth request per test session across all workers.
|
|
97
|
+
|
|
98
|
+
If OIDC is configured but no token is available, this will fail fast.
|
|
99
|
+
"""
|
|
100
|
+
logger = logging.getLogger(__name__)
|
|
101
|
+
current_time = time.time()
|
|
102
|
+
|
|
103
|
+
# Check environment variable for cross-process token sharing
|
|
104
|
+
token_data = get_cached_token_from_env()
|
|
105
|
+
if token_data:
|
|
106
|
+
logger.info("Using environment variable cached access token")
|
|
107
|
+
# Cache in instance for future calls to avoid repeated parsing
|
|
108
|
+
self._access_token = token_data["access_token"]
|
|
109
|
+
self._token_expires_at = token_data["expires_at"]
|
|
110
|
+
return token_data["access_token"]
|
|
111
|
+
|
|
112
|
+
# Fallback: Check instance cache
|
|
113
|
+
if self._access_token and current_time < self._token_expires_at - 30:
|
|
114
|
+
logger.info("Using instance cached access token")
|
|
115
|
+
return self._access_token
|
|
116
|
+
|
|
117
|
+
# If OIDC is configured but no token is available, this is a critical failure
|
|
118
|
+
raise RuntimeError(
|
|
119
|
+
"OIDC authentication is configured but no valid token is available. "
|
|
120
|
+
"This indicates the oidc_token_fixture failed to retrieve a token. "
|
|
121
|
+
"Check OIDC configuration and ensure pytest hooks ran properly."
|
|
122
|
+
)
|
|
123
|
+
|
|
124
|
+
def decode_token_claims(self, token: str) -> Dict[str, Any]:
|
|
125
|
+
"""Decode JWT token claims without signature verification (for testing)"""
|
|
126
|
+
return jwt.decode(token, options={"verify_signature": False})
|
|
127
|
+
|
|
128
|
+
def is_token_expired(self, token: str) -> bool:
|
|
129
|
+
"""Check if a JWT token is expired"""
|
|
130
|
+
try:
|
|
131
|
+
claims = self.decode_token_claims(token)
|
|
132
|
+
exp = claims.get("exp")
|
|
133
|
+
if exp is None:
|
|
134
|
+
return False # No expiration claim
|
|
135
|
+
return time.time() > exp
|
|
136
|
+
except Exception:
|
|
137
|
+
return True # Invalid token is considered expired
|
|
138
|
+
|
|
139
|
+
def validate_token_structure(self, token: str) -> bool:
|
|
140
|
+
"""Validate that token has correct JWT structure"""
|
|
141
|
+
try:
|
|
142
|
+
# Just check if it can be decoded (ignoring signature)
|
|
143
|
+
self.decode_token_claims(token)
|
|
144
|
+
return True
|
|
145
|
+
except Exception:
|
|
146
|
+
return False
|
|
147
|
+
|
|
148
|
+
def create_authenticated_headers(self) -> Dict[str, str]:
|
|
149
|
+
"""Create HTTP headers with valid authentication token"""
|
|
150
|
+
token = self.obtain_access_token()
|
|
151
|
+
return {"Accept": "application/json", "Authorization": f"Bearer {token}"}
|
|
152
|
+
|
|
153
|
+
def get_malformed_test_tokens(self) -> Dict[str, str]:
|
|
154
|
+
"""Get various malformed tokens for negative testing"""
|
|
155
|
+
return {
|
|
156
|
+
"malformed_structure": "not.a.jwt",
|
|
157
|
+
"empty": "",
|
|
158
|
+
"malformed_header": "eyJhbGciOiJub25lIn0.eyJzdWIiOiJ0ZXN0In0.invalid",
|
|
159
|
+
"wrong_issuer": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJpc3MiOiJodHRwczovL3dyb25nLWlzc3Vlci5jb20iLCJhdWQiOiJ0ZXN0IiwiZXhwIjo5OTk5OTk5OTk5fQ.signature",
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def skip_if_oidc_not_configured():
|
|
164
|
+
"""Decorator to skip tests if OIDC test environment is not configured"""
|
|
165
|
+
import pytest
|
|
166
|
+
|
|
167
|
+
config = OidcTestConfig.from_environment()
|
|
168
|
+
return pytest.mark.skipif(
|
|
169
|
+
config is None,
|
|
170
|
+
reason="OIDC test environment not configured. Set OIDC_TEST_ISSUER, OIDC_TEST_CLIENT_ID, OIDC_TEST_CLIENT_SECRET, OIDC_TEST_USERNAME, OIDC_TEST_PASSWORD",
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
# Global test helper instance (lazy loaded)
|
|
175
|
+
_test_helper = None
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def get_oidc_test_helper() -> Optional[OidcTestHelper]:
|
|
179
|
+
"""Get global OIDC test helper instance"""
|
|
180
|
+
global _test_helper
|
|
181
|
+
if _test_helper is None:
|
|
182
|
+
config = OidcTestConfig.from_environment()
|
|
183
|
+
if config:
|
|
184
|
+
_test_helper = OidcTestHelper(config)
|
|
185
|
+
return _test_helper
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def parse_cached_token(env_token: str) -> Optional[Dict[str, Any]]:
|
|
189
|
+
"""
|
|
190
|
+
Parse and validate a base64-encoded token from environment variable.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
env_token: Base64-encoded JSON token data from environment variable
|
|
194
|
+
|
|
195
|
+
Returns:
|
|
196
|
+
Parsed token data dict if valid, None if invalid or expired
|
|
197
|
+
"""
|
|
198
|
+
try:
|
|
199
|
+
import base64
|
|
200
|
+
|
|
201
|
+
token_json = base64.b64decode(env_token.encode()).decode()
|
|
202
|
+
token_data = json.loads(token_json)
|
|
203
|
+
return token_data
|
|
204
|
+
except Exception as e:
|
|
205
|
+
logging.getLogger(__name__).warning(f"Failed to parse cached token: {e}")
|
|
206
|
+
return None
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def is_token_valid(token_data: Dict[str, Any], buffer_seconds: int = 30) -> bool:
|
|
210
|
+
"""
|
|
211
|
+
Check if token data is valid and not expired.
|
|
212
|
+
|
|
213
|
+
Args:
|
|
214
|
+
token_data: Dictionary containing token information
|
|
215
|
+
buffer_seconds: Safety buffer before expiration (default 30 seconds)
|
|
216
|
+
|
|
217
|
+
Returns:
|
|
218
|
+
True if token is valid and not expired, False otherwise
|
|
219
|
+
"""
|
|
220
|
+
if not token_data:
|
|
221
|
+
return False
|
|
222
|
+
|
|
223
|
+
access_token = token_data.get("access_token")
|
|
224
|
+
expires_at = token_data.get("expires_at", 0)
|
|
225
|
+
|
|
226
|
+
if not access_token:
|
|
227
|
+
return False
|
|
228
|
+
|
|
229
|
+
current_time = time.time()
|
|
230
|
+
return current_time < expires_at - buffer_seconds
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def encode_token_for_env(token_data: Dict[str, Any]) -> str:
|
|
234
|
+
"""
|
|
235
|
+
Encode token data as base64 for storage in environment variables.
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
token_data: Dictionary containing token information
|
|
239
|
+
|
|
240
|
+
Returns:
|
|
241
|
+
Base64-encoded JSON string suitable for environment variable storage
|
|
242
|
+
"""
|
|
243
|
+
import base64
|
|
244
|
+
|
|
245
|
+
token_json = json.dumps(token_data)
|
|
246
|
+
return base64.b64encode(token_json.encode()).decode()
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def get_cached_token_from_env(
|
|
250
|
+
env_var_name: str = "FELDERA_PYTEST_OIDC_TOKEN",
|
|
251
|
+
) -> Optional[Dict[str, Any]]:
|
|
252
|
+
"""
|
|
253
|
+
Retrieve and validate cached token from environment variable.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
env_var_name: Name of environment variable containing cached token
|
|
257
|
+
|
|
258
|
+
Returns:
|
|
259
|
+
Valid token data if available and not expired, None otherwise
|
|
260
|
+
"""
|
|
261
|
+
import os
|
|
262
|
+
|
|
263
|
+
env_token = os.getenv(env_var_name)
|
|
264
|
+
if not env_token:
|
|
265
|
+
return None
|
|
266
|
+
|
|
267
|
+
token_data = parse_cached_token(env_token)
|
|
268
|
+
if token_data and is_token_valid(token_data):
|
|
269
|
+
return token_data
|
|
270
|
+
|
|
271
|
+
return None
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def setup_token_cache() -> Optional[Dict[str, Any]]:
|
|
275
|
+
"""
|
|
276
|
+
Set up OIDC token cache in environment variable if not already present.
|
|
277
|
+
|
|
278
|
+
This function:
|
|
279
|
+
1. Checks if a valid token is already cached
|
|
280
|
+
2. If not, fetches a new token
|
|
281
|
+
3. Stores the token in environment variable for cross-process access
|
|
282
|
+
|
|
283
|
+
Used by both pytest hooks and demo runners to ensure consistent token caching.
|
|
284
|
+
|
|
285
|
+
Returns:
|
|
286
|
+
Token data if successfully cached, None if OIDC not configured
|
|
287
|
+
"""
|
|
288
|
+
import os
|
|
289
|
+
|
|
290
|
+
# Check if token is already cached and still valid
|
|
291
|
+
cached_token = get_cached_token_from_env()
|
|
292
|
+
if cached_token:
|
|
293
|
+
print("🔐 AUTH: Using existing cached OIDC token")
|
|
294
|
+
return cached_token
|
|
295
|
+
|
|
296
|
+
# Fetch new token if needed
|
|
297
|
+
token_data = fetch_oidc_token()
|
|
298
|
+
if token_data:
|
|
299
|
+
# Store in environment variable for reuse by subsequent processes
|
|
300
|
+
token_b64 = encode_token_for_env(token_data)
|
|
301
|
+
os.environ["FELDERA_PYTEST_OIDC_TOKEN"] = token_b64
|
|
302
|
+
print("🔐 AUTH: Token cached in environment for cross-process access")
|
|
303
|
+
return token_data
|
|
304
|
+
else:
|
|
305
|
+
print("🔐 AUTH: No OIDC configuration found, using fallback authentication")
|
|
306
|
+
return None
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
def fetch_oidc_token() -> Optional[Dict[str, Any]]:
|
|
310
|
+
"""
|
|
311
|
+
Fetch OIDC token using Resource Owner Password Grant flow.
|
|
312
|
+
|
|
313
|
+
This function is used by both pytest hooks and demo runners to ensure
|
|
314
|
+
consistent token fetching behavior across the entire test infrastructure.
|
|
315
|
+
|
|
316
|
+
Returns:
|
|
317
|
+
Dict containing access_token, expires_at, and cached_at if successful,
|
|
318
|
+
None if OIDC is not configured.
|
|
319
|
+
"""
|
|
320
|
+
oidc_helper = get_oidc_test_helper()
|
|
321
|
+
if oidc_helper is None:
|
|
322
|
+
return None
|
|
323
|
+
|
|
324
|
+
print("🔐 AUTH: Fetching OIDC token")
|
|
325
|
+
|
|
326
|
+
try:
|
|
327
|
+
token_endpoint = oidc_helper.get_token_endpoint()
|
|
328
|
+
data = {
|
|
329
|
+
"grant_type": "password",
|
|
330
|
+
"username": oidc_helper.config.username,
|
|
331
|
+
"password": oidc_helper.config.password,
|
|
332
|
+
"client_id": oidc_helper.config.client_id,
|
|
333
|
+
"client_secret": oidc_helper.config.client_secret,
|
|
334
|
+
"scope": oidc_helper.config.scope,
|
|
335
|
+
"audience": "feldera-api",
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
headers = {
|
|
339
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
340
|
+
"Accept": "application/json",
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
response = requests.post(token_endpoint, data=data, headers=headers, timeout=30)
|
|
344
|
+
|
|
345
|
+
if not response.ok:
|
|
346
|
+
print(f"🔐 AUTH: ❌ Token request FAILED: {response.status_code}")
|
|
347
|
+
raise Exception(
|
|
348
|
+
f"Token request failed: {response.status_code} - {response.text}"
|
|
349
|
+
)
|
|
350
|
+
|
|
351
|
+
token_response = response.json()
|
|
352
|
+
print("🔐 AUTH: ✅ Token request SUCCESS!")
|
|
353
|
+
|
|
354
|
+
access_token = token_response["access_token"]
|
|
355
|
+
expires_in = token_response.get("expires_in", 3600)
|
|
356
|
+
expires_at = time.time() + expires_in
|
|
357
|
+
|
|
358
|
+
return {
|
|
359
|
+
"access_token": access_token,
|
|
360
|
+
"expires_at": expires_at,
|
|
361
|
+
"cached_at": time.time(),
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
except Exception as e:
|
|
365
|
+
print(f"🔐 AUTH: CRITICAL FAILURE - Failed to fetch OIDC token: {e}")
|
|
366
|
+
raise RuntimeError(
|
|
367
|
+
f"OIDC authentication is configured but token retrieval failed: {e}"
|
|
368
|
+
) from e
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: feldera
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.150.0
|
|
4
4
|
Summary: The feldera python client
|
|
5
5
|
Author-email: Feldera Team <dev@feldera.com>
|
|
6
6
|
License: MIT
|
|
@@ -20,6 +20,7 @@ Requires-Dist: typing-extensions
|
|
|
20
20
|
Requires-Dist: numpy>=2.2.4
|
|
21
21
|
Requires-Dist: pretty-errors
|
|
22
22
|
Requires-Dist: ruff>=0.6.9
|
|
23
|
+
Requires-Dist: PyJWT>=2.8.0
|
|
23
24
|
|
|
24
25
|
# Feldera Python SDK
|
|
25
26
|
|
|
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
|
|
|
6
6
|
name = "feldera"
|
|
7
7
|
readme = "README.md"
|
|
8
8
|
description = "The feldera python client"
|
|
9
|
-
version = "0.
|
|
9
|
+
version = "0.150.0"
|
|
10
10
|
license = { text = "MIT" }
|
|
11
11
|
requires-python = ">=3.10"
|
|
12
12
|
authors = [
|
|
@@ -28,6 +28,7 @@ dependencies = [
|
|
|
28
28
|
"numpy>=2.2.4",
|
|
29
29
|
"pretty-errors",
|
|
30
30
|
"ruff>=0.6.9",
|
|
31
|
+
"PyJWT>=2.8.0",
|
|
31
32
|
]
|
|
32
33
|
[project.urls]
|
|
33
34
|
Homepage = "https://www.feldera.com"
|
|
@@ -43,7 +44,7 @@ dev-dependencies = [
|
|
|
43
44
|
"pytest>=8.3.5",
|
|
44
45
|
"sphinx-rtd-theme==2.0.0",
|
|
45
46
|
"sphinx==7.3.7",
|
|
46
|
-
"simplejson==3.20.1"
|
|
47
|
+
"simplejson==3.20.1",
|
|
47
48
|
]
|
|
48
49
|
|
|
49
50
|
[tool.pytest.ini_options]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|