cribl-control-plane 0.0.24__py3-none-any.whl → 0.0.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/destinations.py +10 -8
- cribl_control_plane/errors/healthstatus_error.py +1 -1
- cribl_control_plane/groups_sdk.py +10 -10
- cribl_control_plane/{healthinfo.py → health.py} +3 -1
- cribl_control_plane/models/__init__.py +21 -27
- cribl_control_plane/models/healthstatus.py +3 -3
- cribl_control_plane/models/input.py +4 -4
- cribl_control_plane/models/inputappscope.py +5 -5
- cribl_control_plane/models/inputcollection.py +2 -2
- cribl_control_plane/models/inputconfluentcloud.py +17 -3
- cribl_control_plane/models/inputcribl.py +5 -5
- cribl_control_plane/models/inputcriblhttp.py +3 -3
- cribl_control_plane/models/inputcribllakehttp.py +3 -3
- cribl_control_plane/models/inputcriblmetrics.py +5 -5
- cribl_control_plane/models/inputcribltcp.py +3 -3
- cribl_control_plane/models/inputdatadogagent.py +3 -3
- cribl_control_plane/models/inputedgeprometheus.py +3 -3
- cribl_control_plane/models/inputelastic.py +3 -3
- cribl_control_plane/models/inputeventhub.py +3 -3
- cribl_control_plane/models/inputfile.py +5 -5
- cribl_control_plane/models/inputfirehose.py +3 -3
- cribl_control_plane/models/inputgooglepubsub.py +16 -9
- cribl_control_plane/models/inputgrafana.py +20 -6
- cribl_control_plane/models/inputhttp.py +3 -3
- cribl_control_plane/models/inputhttpraw.py +3 -3
- cribl_control_plane/models/inputjournalfiles.py +3 -3
- cribl_control_plane/models/inputkafka.py +17 -3
- cribl_control_plane/models/inputkinesis.py +3 -3
- cribl_control_plane/models/inputkubeevents.py +5 -5
- cribl_control_plane/models/inputkubelogs.py +5 -5
- cribl_control_plane/models/inputkubemetrics.py +5 -5
- cribl_control_plane/models/inputloki.py +10 -3
- cribl_control_plane/models/inputmodeldriventelemetry.py +3 -3
- cribl_control_plane/models/inputmsk.py +17 -3
- cribl_control_plane/models/inputnetflow.py +3 -3
- cribl_control_plane/models/inputoffice365mgmt.py +3 -3
- cribl_control_plane/models/inputoffice365msgtrace.py +3 -3
- cribl_control_plane/models/inputoffice365service.py +3 -3
- cribl_control_plane/models/inputopentelemetry.py +3 -3
- cribl_control_plane/models/inputprometheus.py +3 -3
- cribl_control_plane/models/inputprometheusrw.py +3 -3
- cribl_control_plane/models/inputrawudp.py +3 -3
- cribl_control_plane/models/inputsnmp.py +3 -3
- cribl_control_plane/models/inputsplunk.py +3 -3
- cribl_control_plane/models/inputsplunkhec.py +3 -3
- cribl_control_plane/models/inputsplunksearch.py +3 -3
- cribl_control_plane/models/inputsqs.py +3 -3
- cribl_control_plane/models/inputsystemmetrics.py +5 -5
- cribl_control_plane/models/inputsystemstate.py +5 -5
- cribl_control_plane/models/inputtcp.py +3 -3
- cribl_control_plane/models/inputtcpjson.py +3 -3
- cribl_control_plane/models/inputwef.py +3 -3
- cribl_control_plane/models/inputwindowsmetrics.py +5 -5
- cribl_control_plane/models/inputwiz.py +3 -3
- cribl_control_plane/models/inputzscalerhec.py +3 -3
- cribl_control_plane/models/output.py +14 -14
- cribl_control_plane/models/outputazureblob.py +3 -3
- cribl_control_plane/models/outputazuredataexplorer.py +3 -3
- cribl_control_plane/models/outputazureeventhub.py +3 -3
- cribl_control_plane/models/outputclickhouse.py +3 -3
- cribl_control_plane/models/outputcloudwatch.py +3 -3
- cribl_control_plane/models/outputconfluentcloud.py +17 -3
- cribl_control_plane/models/outputcriblhttp.py +5 -5
- cribl_control_plane/models/outputcribllake.py +5 -5
- cribl_control_plane/models/outputcribltcp.py +5 -5
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +3 -3
- cribl_control_plane/models/outputdatadog.py +5 -5
- cribl_control_plane/models/outputdataset.py +5 -5
- cribl_control_plane/models/outputdevnull.py +5 -5
- cribl_control_plane/models/outputdiskspool.py +5 -5
- cribl_control_plane/models/outputdls3.py +5 -5
- cribl_control_plane/models/outputdynatracehttp.py +3 -3
- cribl_control_plane/models/outputdynatraceotlp.py +3 -3
- cribl_control_plane/models/outputelasticcloud.py +3 -3
- cribl_control_plane/models/outputexabeam.py +3 -3
- cribl_control_plane/models/outputgooglecloudlogging.py +3 -3
- cribl_control_plane/models/outputgooglecloudstorage.py +5 -5
- cribl_control_plane/models/outputgrafanacloud.py +24 -10
- cribl_control_plane/models/outputgraphite.py +3 -3
- cribl_control_plane/models/outputhumiohec.py +3 -3
- cribl_control_plane/models/outputkafka.py +17 -3
- cribl_control_plane/models/outputkinesis.py +3 -3
- cribl_control_plane/models/outputloki.py +14 -0
- cribl_control_plane/models/outputminio.py +3 -3
- cribl_control_plane/models/outputmsk.py +17 -3
- cribl_control_plane/models/outputnewrelic.py +5 -5
- cribl_control_plane/models/outputnewrelicevents.py +3 -3
- cribl_control_plane/models/outputring.py +5 -5
- cribl_control_plane/models/outputs3.py +5 -5
- cribl_control_plane/models/outputsecuritylake.py +3 -3
- cribl_control_plane/models/outputsentinel.py +3 -3
- cribl_control_plane/models/outputsentineloneaisiem.py +3 -3
- cribl_control_plane/models/outputservicenow.py +3 -3
- cribl_control_plane/models/outputsns.py +3 -3
- cribl_control_plane/models/outputsplunk.py +3 -3
- cribl_control_plane/models/outputsplunkhec.py +5 -5
- cribl_control_plane/models/outputsqs.py +3 -3
- cribl_control_plane/models/outputstatsd.py +3 -3
- cribl_control_plane/models/outputstatsdext.py +3 -3
- cribl_control_plane/models/outputsyslog.py +5 -5
- cribl_control_plane/models/outputtcpjson.py +5 -5
- cribl_control_plane/models/outputwebhook.py +5 -5
- cribl_control_plane/models/outputxsiam.py +5 -5
- cribl_control_plane/nodes.py +100 -90
- cribl_control_plane/pipelines.py +20 -20
- cribl_control_plane/sdk.py +6 -6
- cribl_control_plane/sources.py +2 -0
- cribl_control_plane/versioning.py +14 -14
- {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/METADATA +24 -28
- {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/RECORD +112 -115
- cribl_control_plane/deployments.py +0 -185
- cribl_control_plane/models/restartresponse.py +0 -26
- cribl_control_plane/models/updateworkersrestartop.py +0 -24
- {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/WHEEL +0 -0
cribl_control_plane/_version.py
CHANGED
|
@@ -3,10 +3,10 @@
|
|
|
3
3
|
import importlib.metadata
|
|
4
4
|
|
|
5
5
|
__title__: str = "cribl-control-plane"
|
|
6
|
-
__version__: str = "0.0.
|
|
7
|
-
__openapi_doc_version__: str = "4.14.0-alpha.
|
|
6
|
+
__version__: str = "0.0.26"
|
|
7
|
+
__openapi_doc_version__: str = "4.14.0-alpha.1755082027273-8cf9b57a"
|
|
8
8
|
__gen_version__: str = "2.660.0"
|
|
9
|
-
__user_agent__: str = "speakeasy-sdk/python 0.0.
|
|
9
|
+
__user_agent__: str = "speakeasy-sdk/python 0.0.26 2.660.0 4.14.0-alpha.1755082027273-8cf9b57a cribl-control-plane"
|
|
10
10
|
|
|
11
11
|
try:
|
|
12
12
|
if __package__ is not None:
|
|
@@ -10,6 +10,8 @@ from typing import Any, List, Mapping, Optional, Union, cast
|
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class Destinations(BaseSDK):
|
|
13
|
+
r"""Actions related to Destinations"""
|
|
14
|
+
|
|
13
15
|
def list(
|
|
14
16
|
self,
|
|
15
17
|
*,
|
|
@@ -884,7 +886,7 @@ class Destinations(BaseSDK):
|
|
|
884
886
|
|
|
885
887
|
raise errors.APIError("Unexpected response received", http_res)
|
|
886
888
|
|
|
887
|
-
def
|
|
889
|
+
def clear_pq(
|
|
888
890
|
self,
|
|
889
891
|
*,
|
|
890
892
|
id: str,
|
|
@@ -971,7 +973,7 @@ class Destinations(BaseSDK):
|
|
|
971
973
|
|
|
972
974
|
raise errors.APIError("Unexpected response received", http_res)
|
|
973
975
|
|
|
974
|
-
async def
|
|
976
|
+
async def clear_pq_async(
|
|
975
977
|
self,
|
|
976
978
|
*,
|
|
977
979
|
id: str,
|
|
@@ -1058,7 +1060,7 @@ class Destinations(BaseSDK):
|
|
|
1058
1060
|
|
|
1059
1061
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1060
1062
|
|
|
1061
|
-
def
|
|
1063
|
+
def get_pq_status(
|
|
1062
1064
|
self,
|
|
1063
1065
|
*,
|
|
1064
1066
|
id: str,
|
|
@@ -1145,7 +1147,7 @@ class Destinations(BaseSDK):
|
|
|
1145
1147
|
|
|
1146
1148
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1147
1149
|
|
|
1148
|
-
async def
|
|
1150
|
+
async def get_pq_status_async(
|
|
1149
1151
|
self,
|
|
1150
1152
|
*,
|
|
1151
1153
|
id: str,
|
|
@@ -1232,7 +1234,7 @@ class Destinations(BaseSDK):
|
|
|
1232
1234
|
|
|
1233
1235
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1234
1236
|
|
|
1235
|
-
def
|
|
1237
|
+
def get_sample(
|
|
1236
1238
|
self,
|
|
1237
1239
|
*,
|
|
1238
1240
|
id: str,
|
|
@@ -1321,7 +1323,7 @@ class Destinations(BaseSDK):
|
|
|
1321
1323
|
|
|
1322
1324
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1323
1325
|
|
|
1324
|
-
async def
|
|
1326
|
+
async def get_sample_async(
|
|
1325
1327
|
self,
|
|
1326
1328
|
*,
|
|
1327
1329
|
id: str,
|
|
@@ -1410,7 +1412,7 @@ class Destinations(BaseSDK):
|
|
|
1410
1412
|
|
|
1411
1413
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1412
1414
|
|
|
1413
|
-
def
|
|
1415
|
+
def create_sample(
|
|
1414
1416
|
self,
|
|
1415
1417
|
*,
|
|
1416
1418
|
id: str,
|
|
@@ -1511,7 +1513,7 @@ class Destinations(BaseSDK):
|
|
|
1511
1513
|
|
|
1512
1514
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1513
1515
|
|
|
1514
|
-
async def
|
|
1516
|
+
async def create_sample_async(
|
|
1515
1517
|
self,
|
|
1516
1518
|
*,
|
|
1517
1519
|
id: str,
|
|
@@ -190,7 +190,7 @@ class GroupsSDK(BaseSDK):
|
|
|
190
190
|
|
|
191
191
|
raise errors.APIError("Unexpected response received", http_res)
|
|
192
192
|
|
|
193
|
-
def
|
|
193
|
+
def create(
|
|
194
194
|
self,
|
|
195
195
|
*,
|
|
196
196
|
product: models.CreateProductsGroupsByProductProduct,
|
|
@@ -361,7 +361,7 @@ class GroupsSDK(BaseSDK):
|
|
|
361
361
|
|
|
362
362
|
raise errors.APIError("Unexpected response received", http_res)
|
|
363
363
|
|
|
364
|
-
async def
|
|
364
|
+
async def create_async(
|
|
365
365
|
self,
|
|
366
366
|
*,
|
|
367
367
|
product: models.CreateProductsGroupsByProductProduct,
|
|
@@ -532,7 +532,7 @@ class GroupsSDK(BaseSDK):
|
|
|
532
532
|
|
|
533
533
|
raise errors.APIError("Unexpected response received", http_res)
|
|
534
534
|
|
|
535
|
-
def
|
|
535
|
+
def list(
|
|
536
536
|
self,
|
|
537
537
|
*,
|
|
538
538
|
product: models.GetProductsGroupsByProductProduct,
|
|
@@ -624,7 +624,7 @@ class GroupsSDK(BaseSDK):
|
|
|
624
624
|
|
|
625
625
|
raise errors.APIError("Unexpected response received", http_res)
|
|
626
626
|
|
|
627
|
-
async def
|
|
627
|
+
async def list_async(
|
|
628
628
|
self,
|
|
629
629
|
*,
|
|
630
630
|
product: models.GetProductsGroupsByProductProduct,
|
|
@@ -1408,7 +1408,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1408
1408
|
|
|
1409
1409
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1410
1410
|
|
|
1411
|
-
def
|
|
1411
|
+
def deploy(
|
|
1412
1412
|
self,
|
|
1413
1413
|
*,
|
|
1414
1414
|
id: str,
|
|
@@ -1515,7 +1515,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1515
1515
|
|
|
1516
1516
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1517
1517
|
|
|
1518
|
-
async def
|
|
1518
|
+
async def deploy_async(
|
|
1519
1519
|
self,
|
|
1520
1520
|
*,
|
|
1521
1521
|
id: str,
|
|
@@ -1622,7 +1622,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1622
1622
|
|
|
1623
1623
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1624
1624
|
|
|
1625
|
-
def
|
|
1625
|
+
def get_team_acl(
|
|
1626
1626
|
self,
|
|
1627
1627
|
*,
|
|
1628
1628
|
product: models.GetProductsGroupsACLTeamsByProductAndIDProduct,
|
|
@@ -1717,7 +1717,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1717
1717
|
|
|
1718
1718
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1719
1719
|
|
|
1720
|
-
async def
|
|
1720
|
+
async def get_team_acl_async(
|
|
1721
1721
|
self,
|
|
1722
1722
|
*,
|
|
1723
1723
|
product: models.GetProductsGroupsACLTeamsByProductAndIDProduct,
|
|
@@ -1812,7 +1812,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1812
1812
|
|
|
1813
1813
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1814
1814
|
|
|
1815
|
-
def
|
|
1815
|
+
def get_acl(
|
|
1816
1816
|
self,
|
|
1817
1817
|
*,
|
|
1818
1818
|
id: str,
|
|
@@ -1902,7 +1902,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1902
1902
|
|
|
1903
1903
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1904
1904
|
|
|
1905
|
-
async def
|
|
1905
|
+
async def get_acl_async(
|
|
1906
1906
|
self,
|
|
1907
1907
|
*,
|
|
1908
1908
|
id: str,
|
|
@@ -8,7 +8,9 @@ from cribl_control_plane.utils.unmarshal_json_response import unmarshal_json_res
|
|
|
8
8
|
from typing import Any, Mapping, Optional
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
class
|
|
11
|
+
class Health(BaseSDK):
|
|
12
|
+
r"""Actions related to REST server health"""
|
|
13
|
+
|
|
12
14
|
def get(
|
|
13
15
|
self,
|
|
14
16
|
*,
|
|
@@ -342,12 +342,7 @@ if TYPE_CHECKING:
|
|
|
342
342
|
)
|
|
343
343
|
from .hbcriblinfo import Config, ConfigTypedDict, HBCriblInfo, HBCriblInfoTypedDict
|
|
344
344
|
from .hbleaderinfo import HBLeaderInfo, HBLeaderInfoTypedDict
|
|
345
|
-
from .healthstatus import
|
|
346
|
-
HealthStatus,
|
|
347
|
-
HealthStatusStatus,
|
|
348
|
-
HealthStatusTypedDict,
|
|
349
|
-
Role,
|
|
350
|
-
)
|
|
345
|
+
from .healthstatus import HealthStatus, HealthStatusTypedDict, Role, Status
|
|
351
346
|
from .heartbeatmetadata import (
|
|
352
347
|
HeartbeatMetadata,
|
|
353
348
|
HeartbeatMetadataAws,
|
|
@@ -444,6 +439,7 @@ if TYPE_CHECKING:
|
|
|
444
439
|
InputConfluentCloudPq,
|
|
445
440
|
InputConfluentCloudPqTypedDict,
|
|
446
441
|
InputConfluentCloudSASLMechanism,
|
|
442
|
+
InputConfluentCloudSchemaType,
|
|
447
443
|
InputConfluentCloudTLSSettingsClientSide,
|
|
448
444
|
InputConfluentCloudTLSSettingsClientSideTypedDict,
|
|
449
445
|
InputConfluentCloudType,
|
|
@@ -852,6 +848,7 @@ if TYPE_CHECKING:
|
|
|
852
848
|
InputKafkaPq,
|
|
853
849
|
InputKafkaPqTypedDict,
|
|
854
850
|
InputKafkaSASLMechanism,
|
|
851
|
+
InputKafkaSchemaType,
|
|
855
852
|
InputKafkaTLSSettingsClientSide,
|
|
856
853
|
InputKafkaTLSSettingsClientSideTypedDict,
|
|
857
854
|
InputKafkaType,
|
|
@@ -1003,6 +1000,7 @@ if TYPE_CHECKING:
|
|
|
1003
1000
|
InputMskMode,
|
|
1004
1001
|
InputMskPq,
|
|
1005
1002
|
InputMskPqTypedDict,
|
|
1003
|
+
InputMskSchemaType,
|
|
1006
1004
|
InputMskSignatureVersion,
|
|
1007
1005
|
InputMskTLSSettingsClientSide,
|
|
1008
1006
|
InputMskTLSSettingsClientSideTypedDict,
|
|
@@ -1806,6 +1804,7 @@ if TYPE_CHECKING:
|
|
|
1806
1804
|
OutputConfluentCloudQueueFullBehavior,
|
|
1807
1805
|
OutputConfluentCloudRecordDataFormat,
|
|
1808
1806
|
OutputConfluentCloudSASLMechanism,
|
|
1807
|
+
OutputConfluentCloudSchemaType,
|
|
1809
1808
|
OutputConfluentCloudTLSSettingsClientSide,
|
|
1810
1809
|
OutputConfluentCloudTLSSettingsClientSideTypedDict,
|
|
1811
1810
|
OutputConfluentCloudType,
|
|
@@ -2309,6 +2308,7 @@ if TYPE_CHECKING:
|
|
|
2309
2308
|
OutputKafkaQueueFullBehavior,
|
|
2310
2309
|
OutputKafkaRecordDataFormat,
|
|
2311
2310
|
OutputKafkaSASLMechanism,
|
|
2311
|
+
OutputKafkaSchemaType,
|
|
2312
2312
|
OutputKafkaTLSSettingsClientSide,
|
|
2313
2313
|
OutputKafkaTLSSettingsClientSideTypedDict,
|
|
2314
2314
|
OutputKafkaType,
|
|
@@ -2391,6 +2391,7 @@ if TYPE_CHECKING:
|
|
|
2391
2391
|
OutputMskPqControlsTypedDict,
|
|
2392
2392
|
OutputMskQueueFullBehavior,
|
|
2393
2393
|
OutputMskRecordDataFormat,
|
|
2394
|
+
OutputMskSchemaType,
|
|
2394
2395
|
OutputMskSignatureVersion,
|
|
2395
2396
|
OutputMskTLSSettingsClientSide,
|
|
2396
2397
|
OutputMskTLSSettingsClientSideTypedDict,
|
|
@@ -2938,11 +2939,6 @@ if TYPE_CHECKING:
|
|
|
2938
2939
|
)
|
|
2939
2940
|
from .rbacresource import RbacResource
|
|
2940
2941
|
from .resourcepolicy import ResourcePolicy, ResourcePolicyTypedDict
|
|
2941
|
-
from .restartresponse import (
|
|
2942
|
-
RestartResponse,
|
|
2943
|
-
RestartResponseStatus,
|
|
2944
|
-
RestartResponseTypedDict,
|
|
2945
|
-
)
|
|
2946
2942
|
from .routecloneconf import RouteCloneConf, RouteCloneConfTypedDict
|
|
2947
2943
|
from .routeconf import RouteConf, RouteConfTypedDict
|
|
2948
2944
|
from .routes import (
|
|
@@ -3020,10 +3016,6 @@ if TYPE_CHECKING:
|
|
|
3020
3016
|
UpdateRoutesByIDResponse,
|
|
3021
3017
|
UpdateRoutesByIDResponseTypedDict,
|
|
3022
3018
|
)
|
|
3023
|
-
from .updateworkersrestartop import (
|
|
3024
|
-
UpdateWorkersRestartResponse,
|
|
3025
|
-
UpdateWorkersRestartResponseTypedDict,
|
|
3026
|
-
)
|
|
3027
3019
|
from .useraccesscontrollist import (
|
|
3028
3020
|
UserAccessControlList,
|
|
3029
3021
|
UserAccessControlListTypedDict,
|
|
@@ -3330,7 +3322,6 @@ __all__ = [
|
|
|
3330
3322
|
"HBLeaderInfo",
|
|
3331
3323
|
"HBLeaderInfoTypedDict",
|
|
3332
3324
|
"HealthStatus",
|
|
3333
|
-
"HealthStatusStatus",
|
|
3334
3325
|
"HealthStatusTypedDict",
|
|
3335
3326
|
"HeartbeatMetadata",
|
|
3336
3327
|
"HeartbeatMetadataAws",
|
|
@@ -3428,6 +3419,7 @@ __all__ = [
|
|
|
3428
3419
|
"InputConfluentCloudPq",
|
|
3429
3420
|
"InputConfluentCloudPqTypedDict",
|
|
3430
3421
|
"InputConfluentCloudSASLMechanism",
|
|
3422
|
+
"InputConfluentCloudSchemaType",
|
|
3431
3423
|
"InputConfluentCloudTLSSettingsClientSide",
|
|
3432
3424
|
"InputConfluentCloudTLSSettingsClientSideTypedDict",
|
|
3433
3425
|
"InputConfluentCloudType",
|
|
@@ -3771,6 +3763,7 @@ __all__ = [
|
|
|
3771
3763
|
"InputKafkaPq",
|
|
3772
3764
|
"InputKafkaPqTypedDict",
|
|
3773
3765
|
"InputKafkaSASLMechanism",
|
|
3766
|
+
"InputKafkaSchemaType",
|
|
3774
3767
|
"InputKafkaTLSSettingsClientSide",
|
|
3775
3768
|
"InputKafkaTLSSettingsClientSideTypedDict",
|
|
3776
3769
|
"InputKafkaType",
|
|
@@ -3904,6 +3897,7 @@ __all__ = [
|
|
|
3904
3897
|
"InputMskMode",
|
|
3905
3898
|
"InputMskPq",
|
|
3906
3899
|
"InputMskPqTypedDict",
|
|
3900
|
+
"InputMskSchemaType",
|
|
3907
3901
|
"InputMskSignatureVersion",
|
|
3908
3902
|
"InputMskTLSSettingsClientSide",
|
|
3909
3903
|
"InputMskTLSSettingsClientSideTypedDict",
|
|
@@ -4603,6 +4597,7 @@ __all__ = [
|
|
|
4603
4597
|
"OutputConfluentCloudQueueFullBehavior",
|
|
4604
4598
|
"OutputConfluentCloudRecordDataFormat",
|
|
4605
4599
|
"OutputConfluentCloudSASLMechanism",
|
|
4600
|
+
"OutputConfluentCloudSchemaType",
|
|
4606
4601
|
"OutputConfluentCloudTLSSettingsClientSide",
|
|
4607
4602
|
"OutputConfluentCloudTLSSettingsClientSideTypedDict",
|
|
4608
4603
|
"OutputConfluentCloudType",
|
|
@@ -5038,6 +5033,7 @@ __all__ = [
|
|
|
5038
5033
|
"OutputKafkaQueueFullBehavior",
|
|
5039
5034
|
"OutputKafkaRecordDataFormat",
|
|
5040
5035
|
"OutputKafkaSASLMechanism",
|
|
5036
|
+
"OutputKafkaSchemaType",
|
|
5041
5037
|
"OutputKafkaTLSSettingsClientSide",
|
|
5042
5038
|
"OutputKafkaTLSSettingsClientSideTypedDict",
|
|
5043
5039
|
"OutputKafkaType",
|
|
@@ -5112,6 +5108,7 @@ __all__ = [
|
|
|
5112
5108
|
"OutputMskPqControlsTypedDict",
|
|
5113
5109
|
"OutputMskQueueFullBehavior",
|
|
5114
5110
|
"OutputMskRecordDataFormat",
|
|
5111
|
+
"OutputMskSchemaType",
|
|
5115
5112
|
"OutputMskSignatureVersion",
|
|
5116
5113
|
"OutputMskTLSSettingsClientSide",
|
|
5117
5114
|
"OutputMskTLSSettingsClientSideTypedDict",
|
|
@@ -5604,9 +5601,6 @@ __all__ = [
|
|
|
5604
5601
|
"ResourcePolicyTypedDict",
|
|
5605
5602
|
"ResourceTypeLabel",
|
|
5606
5603
|
"ResourceTypeLabelTypedDict",
|
|
5607
|
-
"RestartResponse",
|
|
5608
|
-
"RestartResponseStatus",
|
|
5609
|
-
"RestartResponseTypedDict",
|
|
5610
5604
|
"Role",
|
|
5611
5605
|
"RouteCloneConf",
|
|
5612
5606
|
"RouteCloneConfTypedDict",
|
|
@@ -5636,6 +5630,7 @@ __all__ = [
|
|
|
5636
5630
|
"ServicesTypedDict",
|
|
5637
5631
|
"ShardIteratorStart",
|
|
5638
5632
|
"ShardLoadBalancing",
|
|
5633
|
+
"Status",
|
|
5639
5634
|
"Subscription",
|
|
5640
5635
|
"SubscriptionMetadatum",
|
|
5641
5636
|
"SubscriptionMetadatumTypedDict",
|
|
@@ -5690,8 +5685,6 @@ __all__ = [
|
|
|
5690
5685
|
"UpdateRoutesByIDRequestTypedDict",
|
|
5691
5686
|
"UpdateRoutesByIDResponse",
|
|
5692
5687
|
"UpdateRoutesByIDResponseTypedDict",
|
|
5693
|
-
"UpdateWorkersRestartResponse",
|
|
5694
|
-
"UpdateWorkersRestartResponseTypedDict",
|
|
5695
5688
|
"UserAccessControlList",
|
|
5696
5689
|
"UserAccessControlListTypedDict",
|
|
5697
5690
|
"UsersAndGroups",
|
|
@@ -5957,9 +5950,9 @@ _dynamic_imports: dict[str, str] = {
|
|
|
5957
5950
|
"HBLeaderInfo": ".hbleaderinfo",
|
|
5958
5951
|
"HBLeaderInfoTypedDict": ".hbleaderinfo",
|
|
5959
5952
|
"HealthStatus": ".healthstatus",
|
|
5960
|
-
"HealthStatusStatus": ".healthstatus",
|
|
5961
5953
|
"HealthStatusTypedDict": ".healthstatus",
|
|
5962
5954
|
"Role": ".healthstatus",
|
|
5955
|
+
"Status": ".healthstatus",
|
|
5963
5956
|
"HeartbeatMetadata": ".heartbeatmetadata",
|
|
5964
5957
|
"HeartbeatMetadataAws": ".heartbeatmetadata",
|
|
5965
5958
|
"HeartbeatMetadataAwsTypedDict": ".heartbeatmetadata",
|
|
@@ -6048,6 +6041,7 @@ _dynamic_imports: dict[str, str] = {
|
|
|
6048
6041
|
"InputConfluentCloudPq": ".inputconfluentcloud",
|
|
6049
6042
|
"InputConfluentCloudPqTypedDict": ".inputconfluentcloud",
|
|
6050
6043
|
"InputConfluentCloudSASLMechanism": ".inputconfluentcloud",
|
|
6044
|
+
"InputConfluentCloudSchemaType": ".inputconfluentcloud",
|
|
6051
6045
|
"InputConfluentCloudTLSSettingsClientSide": ".inputconfluentcloud",
|
|
6052
6046
|
"InputConfluentCloudTLSSettingsClientSideTypedDict": ".inputconfluentcloud",
|
|
6053
6047
|
"InputConfluentCloudType": ".inputconfluentcloud",
|
|
@@ -6416,6 +6410,7 @@ _dynamic_imports: dict[str, str] = {
|
|
|
6416
6410
|
"InputKafkaPq": ".inputkafka",
|
|
6417
6411
|
"InputKafkaPqTypedDict": ".inputkafka",
|
|
6418
6412
|
"InputKafkaSASLMechanism": ".inputkafka",
|
|
6413
|
+
"InputKafkaSchemaType": ".inputkafka",
|
|
6419
6414
|
"InputKafkaTLSSettingsClientSide": ".inputkafka",
|
|
6420
6415
|
"InputKafkaTLSSettingsClientSideTypedDict": ".inputkafka",
|
|
6421
6416
|
"InputKafkaType": ".inputkafka",
|
|
@@ -6551,6 +6546,7 @@ _dynamic_imports: dict[str, str] = {
|
|
|
6551
6546
|
"InputMskMode": ".inputmsk",
|
|
6552
6547
|
"InputMskPq": ".inputmsk",
|
|
6553
6548
|
"InputMskPqTypedDict": ".inputmsk",
|
|
6549
|
+
"InputMskSchemaType": ".inputmsk",
|
|
6554
6550
|
"InputMskSignatureVersion": ".inputmsk",
|
|
6555
6551
|
"InputMskTLSSettingsClientSide": ".inputmsk",
|
|
6556
6552
|
"InputMskTLSSettingsClientSideTypedDict": ".inputmsk",
|
|
@@ -7290,6 +7286,7 @@ _dynamic_imports: dict[str, str] = {
|
|
|
7290
7286
|
"OutputConfluentCloudQueueFullBehavior": ".outputconfluentcloud",
|
|
7291
7287
|
"OutputConfluentCloudRecordDataFormat": ".outputconfluentcloud",
|
|
7292
7288
|
"OutputConfluentCloudSASLMechanism": ".outputconfluentcloud",
|
|
7289
|
+
"OutputConfluentCloudSchemaType": ".outputconfluentcloud",
|
|
7293
7290
|
"OutputConfluentCloudTLSSettingsClientSide": ".outputconfluentcloud",
|
|
7294
7291
|
"OutputConfluentCloudTLSSettingsClientSideTypedDict": ".outputconfluentcloud",
|
|
7295
7292
|
"OutputConfluentCloudType": ".outputconfluentcloud",
|
|
@@ -7749,6 +7746,7 @@ _dynamic_imports: dict[str, str] = {
|
|
|
7749
7746
|
"OutputKafkaQueueFullBehavior": ".outputkafka",
|
|
7750
7747
|
"OutputKafkaRecordDataFormat": ".outputkafka",
|
|
7751
7748
|
"OutputKafkaSASLMechanism": ".outputkafka",
|
|
7749
|
+
"OutputKafkaSchemaType": ".outputkafka",
|
|
7752
7750
|
"OutputKafkaTLSSettingsClientSide": ".outputkafka",
|
|
7753
7751
|
"OutputKafkaTLSSettingsClientSideTypedDict": ".outputkafka",
|
|
7754
7752
|
"OutputKafkaType": ".outputkafka",
|
|
@@ -7823,6 +7821,7 @@ _dynamic_imports: dict[str, str] = {
|
|
|
7823
7821
|
"OutputMskPqControlsTypedDict": ".outputmsk",
|
|
7824
7822
|
"OutputMskQueueFullBehavior": ".outputmsk",
|
|
7825
7823
|
"OutputMskRecordDataFormat": ".outputmsk",
|
|
7824
|
+
"OutputMskSchemaType": ".outputmsk",
|
|
7826
7825
|
"OutputMskSignatureVersion": ".outputmsk",
|
|
7827
7826
|
"OutputMskTLSSettingsClientSide": ".outputmsk",
|
|
7828
7827
|
"OutputMskTLSSettingsClientSideTypedDict": ".outputmsk",
|
|
@@ -8306,9 +8305,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
8306
8305
|
"RbacResource": ".rbacresource",
|
|
8307
8306
|
"ResourcePolicy": ".resourcepolicy",
|
|
8308
8307
|
"ResourcePolicyTypedDict": ".resourcepolicy",
|
|
8309
|
-
"RestartResponse": ".restartresponse",
|
|
8310
|
-
"RestartResponseStatus": ".restartresponse",
|
|
8311
|
-
"RestartResponseTypedDict": ".restartresponse",
|
|
8312
8308
|
"RouteCloneConf": ".routecloneconf",
|
|
8313
8309
|
"RouteCloneConfTypedDict": ".routecloneconf",
|
|
8314
8310
|
"RouteConf": ".routeconf",
|
|
@@ -8367,8 +8363,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
8367
8363
|
"UpdateRoutesByIDRequestTypedDict": ".updateroutesbyidop",
|
|
8368
8364
|
"UpdateRoutesByIDResponse": ".updateroutesbyidop",
|
|
8369
8365
|
"UpdateRoutesByIDResponseTypedDict": ".updateroutesbyidop",
|
|
8370
|
-
"UpdateWorkersRestartResponse": ".updateworkersrestartop",
|
|
8371
|
-
"UpdateWorkersRestartResponseTypedDict": ".updateworkersrestartop",
|
|
8372
8366
|
"UserAccessControlList": ".useraccesscontrollist",
|
|
8373
8367
|
"UserAccessControlListTypedDict": ".useraccesscontrollist",
|
|
8374
8368
|
}
|
|
@@ -13,20 +13,20 @@ class Role(str, Enum):
|
|
|
13
13
|
STANDBY = "standby"
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
class
|
|
16
|
+
class Status(str, Enum):
|
|
17
17
|
HEALTHY = "healthy"
|
|
18
18
|
SHUTTING_DOWN = "shutting down"
|
|
19
19
|
STANDBY = "standby"
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
class HealthStatusTypedDict(TypedDict):
|
|
23
|
-
status:
|
|
23
|
+
status: Status
|
|
24
24
|
start_time: float
|
|
25
25
|
role: NotRequired[Role]
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
class HealthStatus(BaseModel):
|
|
29
|
-
status:
|
|
29
|
+
status: Status
|
|
30
30
|
|
|
31
31
|
start_time: Annotated[float, pydantic.Field(alias="startTime")]
|
|
32
32
|
|
|
@@ -91,8 +91,8 @@ InputTypedDict = TypeAliasType(
|
|
|
91
91
|
InputSnmpTypedDict,
|
|
92
92
|
InputCriblTCPTypedDict,
|
|
93
93
|
InputNetflowTypedDict,
|
|
94
|
-
InputGooglePubsubTypedDict,
|
|
95
94
|
InputTcpjsonTypedDict,
|
|
95
|
+
InputGooglePubsubTypedDict,
|
|
96
96
|
InputOffice365ServiceTypedDict,
|
|
97
97
|
InputTCPTypedDict,
|
|
98
98
|
InputWizTypedDict,
|
|
@@ -117,8 +117,8 @@ InputTypedDict = TypeAliasType(
|
|
|
117
117
|
InputElasticTypedDict,
|
|
118
118
|
InputSplunkHecTypedDict,
|
|
119
119
|
InputOffice365MsgTraceTypedDict,
|
|
120
|
-
InputLokiTypedDict,
|
|
121
120
|
InputPrometheusRwTypedDict,
|
|
121
|
+
InputLokiTypedDict,
|
|
122
122
|
InputCrowdstrikeTypedDict,
|
|
123
123
|
InputPrometheusTypedDict,
|
|
124
124
|
InputEdgePrometheusTypedDict,
|
|
@@ -156,8 +156,8 @@ Input = TypeAliasType(
|
|
|
156
156
|
InputSnmp,
|
|
157
157
|
InputCriblTCP,
|
|
158
158
|
InputNetflow,
|
|
159
|
-
InputGooglePubsub,
|
|
160
159
|
InputTcpjson,
|
|
160
|
+
InputGooglePubsub,
|
|
161
161
|
InputOffice365Service,
|
|
162
162
|
InputTCP,
|
|
163
163
|
InputWiz,
|
|
@@ -182,8 +182,8 @@ Input = TypeAliasType(
|
|
|
182
182
|
InputElastic,
|
|
183
183
|
InputSplunkHec,
|
|
184
184
|
InputOffice365MsgTrace,
|
|
185
|
-
InputLoki,
|
|
186
185
|
InputPrometheusRw,
|
|
186
|
+
InputLoki,
|
|
187
187
|
InputCrowdstrike,
|
|
188
188
|
InputPrometheus,
|
|
189
189
|
InputEdgePrometheus,
|
|
@@ -255,9 +255,9 @@ class InputAppscopeTLSSettingsServerSide(BaseModel):
|
|
|
255
255
|
|
|
256
256
|
|
|
257
257
|
class InputAppscopeTypedDict(TypedDict):
|
|
258
|
-
id: str
|
|
259
|
-
r"""Unique ID for this input"""
|
|
260
258
|
type: InputAppscopeType
|
|
259
|
+
id: NotRequired[str]
|
|
260
|
+
r"""Unique ID for this input"""
|
|
261
261
|
disabled: NotRequired[bool]
|
|
262
262
|
pipeline: NotRequired[str]
|
|
263
263
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -313,11 +313,11 @@ class InputAppscopeTypedDict(TypedDict):
|
|
|
313
313
|
|
|
314
314
|
|
|
315
315
|
class InputAppscope(BaseModel):
|
|
316
|
-
id: str
|
|
317
|
-
r"""Unique ID for this input"""
|
|
318
|
-
|
|
319
316
|
type: InputAppscopeType
|
|
320
317
|
|
|
318
|
+
id: Optional[str] = None
|
|
319
|
+
r"""Unique ID for this input"""
|
|
320
|
+
|
|
321
321
|
disabled: Optional[bool] = False
|
|
322
322
|
|
|
323
323
|
pipeline: Optional[str] = None
|
|
@@ -115,7 +115,7 @@ class InputCollectionMetadatum(BaseModel):
|
|
|
115
115
|
|
|
116
116
|
|
|
117
117
|
class InputCollectionTypedDict(TypedDict):
|
|
118
|
-
id: str
|
|
118
|
+
id: NotRequired[str]
|
|
119
119
|
r"""Unique ID for this input"""
|
|
120
120
|
type: NotRequired[InputCollectionType]
|
|
121
121
|
disabled: NotRequired[bool]
|
|
@@ -146,7 +146,7 @@ class InputCollectionTypedDict(TypedDict):
|
|
|
146
146
|
|
|
147
147
|
|
|
148
148
|
class InputCollection(BaseModel):
|
|
149
|
-
id: str
|
|
149
|
+
id: Optional[str] = None
|
|
150
150
|
r"""Unique ID for this input"""
|
|
151
151
|
|
|
152
152
|
type: Optional[InputCollectionType] = InputCollectionType.COLLECTION
|
|
@@ -162,6 +162,13 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
|
|
|
162
162
|
] = None
|
|
163
163
|
|
|
164
164
|
|
|
165
|
+
class InputConfluentCloudSchemaType(str, Enum):
|
|
166
|
+
r"""The schema format used to encode and decode event data"""
|
|
167
|
+
|
|
168
|
+
AVRO = "avro"
|
|
169
|
+
JSON = "json"
|
|
170
|
+
|
|
171
|
+
|
|
165
172
|
class InputConfluentCloudAuthTypedDict(TypedDict):
|
|
166
173
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
167
174
|
|
|
@@ -262,6 +269,8 @@ class InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
262
269
|
disabled: NotRequired[bool]
|
|
263
270
|
schema_registry_url: NotRequired[str]
|
|
264
271
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
272
|
+
schema_type: NotRequired[InputConfluentCloudSchemaType]
|
|
273
|
+
r"""The schema format used to encode and decode event data"""
|
|
265
274
|
connection_timeout: NotRequired[float]
|
|
266
275
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
267
276
|
request_timeout: NotRequired[float]
|
|
@@ -283,6 +292,11 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
283
292
|
] = "http://localhost:8081"
|
|
284
293
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
285
294
|
|
|
295
|
+
schema_type: Annotated[
|
|
296
|
+
Optional[InputConfluentCloudSchemaType], pydantic.Field(alias="schemaType")
|
|
297
|
+
] = InputConfluentCloudSchemaType.AVRO
|
|
298
|
+
r"""The schema format used to encode and decode event data"""
|
|
299
|
+
|
|
286
300
|
connection_timeout: Annotated[
|
|
287
301
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
288
302
|
] = 30000
|
|
@@ -340,13 +354,13 @@ class InputConfluentCloudMetadatum(BaseModel):
|
|
|
340
354
|
|
|
341
355
|
|
|
342
356
|
class InputConfluentCloudTypedDict(TypedDict):
|
|
357
|
+
type: InputConfluentCloudType
|
|
343
358
|
brokers: List[str]
|
|
344
359
|
r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
|
|
345
360
|
topics: List[str]
|
|
346
361
|
r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
|
|
347
362
|
id: NotRequired[str]
|
|
348
363
|
r"""Unique ID for this input"""
|
|
349
|
-
type: NotRequired[InputConfluentCloudType]
|
|
350
364
|
disabled: NotRequired[bool]
|
|
351
365
|
pipeline: NotRequired[str]
|
|
352
366
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -420,6 +434,8 @@ class InputConfluentCloudTypedDict(TypedDict):
|
|
|
420
434
|
|
|
421
435
|
|
|
422
436
|
class InputConfluentCloud(BaseModel):
|
|
437
|
+
type: InputConfluentCloudType
|
|
438
|
+
|
|
423
439
|
brokers: List[str]
|
|
424
440
|
r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
|
|
425
441
|
|
|
@@ -429,8 +445,6 @@ class InputConfluentCloud(BaseModel):
|
|
|
429
445
|
id: Optional[str] = None
|
|
430
446
|
r"""Unique ID for this input"""
|
|
431
447
|
|
|
432
|
-
type: Optional[InputConfluentCloudType] = None
|
|
433
|
-
|
|
434
448
|
disabled: Optional[bool] = False
|
|
435
449
|
|
|
436
450
|
pipeline: Optional[str] = None
|
|
@@ -97,9 +97,9 @@ class InputCriblMetadatum(BaseModel):
|
|
|
97
97
|
|
|
98
98
|
|
|
99
99
|
class InputCriblTypedDict(TypedDict):
|
|
100
|
-
id: str
|
|
101
|
-
r"""Unique ID for this input"""
|
|
102
100
|
type: InputCriblType
|
|
101
|
+
id: NotRequired[str]
|
|
102
|
+
r"""Unique ID for this input"""
|
|
103
103
|
disabled: NotRequired[bool]
|
|
104
104
|
pipeline: NotRequired[str]
|
|
105
105
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -121,11 +121,11 @@ class InputCriblTypedDict(TypedDict):
|
|
|
121
121
|
|
|
122
122
|
|
|
123
123
|
class InputCribl(BaseModel):
|
|
124
|
-
id: str
|
|
125
|
-
r"""Unique ID for this input"""
|
|
126
|
-
|
|
127
124
|
type: InputCriblType
|
|
128
125
|
|
|
126
|
+
id: Optional[str] = None
|
|
127
|
+
r"""Unique ID for this input"""
|
|
128
|
+
|
|
129
129
|
disabled: Optional[bool] = False
|
|
130
130
|
|
|
131
131
|
pipeline: Optional[str] = None
|
|
@@ -171,11 +171,11 @@ class InputCriblHTTPMetadatum(BaseModel):
|
|
|
171
171
|
|
|
172
172
|
|
|
173
173
|
class InputCriblHTTPTypedDict(TypedDict):
|
|
174
|
+
type: InputCriblHTTPType
|
|
174
175
|
port: float
|
|
175
176
|
r"""Port to listen on"""
|
|
176
177
|
id: NotRequired[str]
|
|
177
178
|
r"""Unique ID for this input"""
|
|
178
|
-
type: NotRequired[InputCriblHTTPType]
|
|
179
179
|
disabled: NotRequired[bool]
|
|
180
180
|
pipeline: NotRequired[str]
|
|
181
181
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -223,14 +223,14 @@ class InputCriblHTTPTypedDict(TypedDict):
|
|
|
223
223
|
|
|
224
224
|
|
|
225
225
|
class InputCriblHTTP(BaseModel):
|
|
226
|
+
type: InputCriblHTTPType
|
|
227
|
+
|
|
226
228
|
port: float
|
|
227
229
|
r"""Port to listen on"""
|
|
228
230
|
|
|
229
231
|
id: Optional[str] = None
|
|
230
232
|
r"""Unique ID for this input"""
|
|
231
233
|
|
|
232
|
-
type: Optional[InputCriblHTTPType] = None
|
|
233
|
-
|
|
234
234
|
disabled: Optional[bool] = False
|
|
235
235
|
|
|
236
236
|
pipeline: Optional[str] = None
|