cribl-control-plane 0.0.25__py3-none-any.whl → 0.0.26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/destinations.py +10 -8
- cribl_control_plane/errors/healthstatus_error.py +1 -1
- cribl_control_plane/groups_sdk.py +10 -10
- cribl_control_plane/{healthinfo.py → health.py} +3 -1
- cribl_control_plane/models/__init__.py +3 -27
- cribl_control_plane/models/healthstatus.py +3 -3
- cribl_control_plane/models/inputappscope.py +5 -5
- cribl_control_plane/models/inputcollection.py +2 -2
- cribl_control_plane/models/inputconfluentcloud.py +3 -3
- cribl_control_plane/models/inputcribl.py +5 -5
- cribl_control_plane/models/inputcriblhttp.py +3 -3
- cribl_control_plane/models/inputcribllakehttp.py +3 -3
- cribl_control_plane/models/inputcriblmetrics.py +5 -5
- cribl_control_plane/models/inputcribltcp.py +3 -3
- cribl_control_plane/models/inputdatadogagent.py +3 -3
- cribl_control_plane/models/inputedgeprometheus.py +3 -3
- cribl_control_plane/models/inputelastic.py +3 -3
- cribl_control_plane/models/inputeventhub.py +3 -3
- cribl_control_plane/models/inputfile.py +5 -5
- cribl_control_plane/models/inputfirehose.py +3 -3
- cribl_control_plane/models/inputgooglepubsub.py +3 -3
- cribl_control_plane/models/inputgrafana.py +6 -6
- cribl_control_plane/models/inputhttp.py +3 -3
- cribl_control_plane/models/inputhttpraw.py +3 -3
- cribl_control_plane/models/inputjournalfiles.py +3 -3
- cribl_control_plane/models/inputkafka.py +3 -3
- cribl_control_plane/models/inputkinesis.py +3 -3
- cribl_control_plane/models/inputkubeevents.py +5 -5
- cribl_control_plane/models/inputkubelogs.py +5 -5
- cribl_control_plane/models/inputkubemetrics.py +5 -5
- cribl_control_plane/models/inputloki.py +3 -3
- cribl_control_plane/models/inputmodeldriventelemetry.py +3 -3
- cribl_control_plane/models/inputmsk.py +3 -3
- cribl_control_plane/models/inputnetflow.py +3 -3
- cribl_control_plane/models/inputoffice365mgmt.py +3 -3
- cribl_control_plane/models/inputoffice365msgtrace.py +3 -3
- cribl_control_plane/models/inputoffice365service.py +3 -3
- cribl_control_plane/models/inputopentelemetry.py +3 -3
- cribl_control_plane/models/inputprometheus.py +3 -3
- cribl_control_plane/models/inputprometheusrw.py +3 -3
- cribl_control_plane/models/inputrawudp.py +3 -3
- cribl_control_plane/models/inputsnmp.py +3 -3
- cribl_control_plane/models/inputsplunk.py +3 -3
- cribl_control_plane/models/inputsplunkhec.py +3 -3
- cribl_control_plane/models/inputsplunksearch.py +3 -3
- cribl_control_plane/models/inputsqs.py +3 -3
- cribl_control_plane/models/inputsystemmetrics.py +5 -5
- cribl_control_plane/models/inputsystemstate.py +5 -5
- cribl_control_plane/models/inputtcp.py +3 -3
- cribl_control_plane/models/inputtcpjson.py +3 -3
- cribl_control_plane/models/inputwef.py +3 -3
- cribl_control_plane/models/inputwindowsmetrics.py +5 -5
- cribl_control_plane/models/inputwiz.py +3 -3
- cribl_control_plane/models/inputzscalerhec.py +3 -3
- cribl_control_plane/models/outputazureblob.py +3 -3
- cribl_control_plane/models/outputazuredataexplorer.py +3 -3
- cribl_control_plane/models/outputazureeventhub.py +3 -3
- cribl_control_plane/models/outputclickhouse.py +3 -3
- cribl_control_plane/models/outputcloudwatch.py +3 -3
- cribl_control_plane/models/outputconfluentcloud.py +3 -3
- cribl_control_plane/models/outputcriblhttp.py +5 -5
- cribl_control_plane/models/outputcribllake.py +5 -5
- cribl_control_plane/models/outputcribltcp.py +5 -5
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +3 -3
- cribl_control_plane/models/outputdatadog.py +5 -5
- cribl_control_plane/models/outputdataset.py +5 -5
- cribl_control_plane/models/outputdevnull.py +5 -5
- cribl_control_plane/models/outputdiskspool.py +5 -5
- cribl_control_plane/models/outputdls3.py +3 -3
- cribl_control_plane/models/outputdynatracehttp.py +3 -3
- cribl_control_plane/models/outputdynatraceotlp.py +3 -3
- cribl_control_plane/models/outputelasticcloud.py +3 -3
- cribl_control_plane/models/outputexabeam.py +3 -3
- cribl_control_plane/models/outputgooglecloudlogging.py +3 -3
- cribl_control_plane/models/outputgooglecloudstorage.py +3 -3
- cribl_control_plane/models/outputgrafanacloud.py +10 -10
- cribl_control_plane/models/outputgraphite.py +3 -3
- cribl_control_plane/models/outputhumiohec.py +3 -3
- cribl_control_plane/models/outputkafka.py +3 -3
- cribl_control_plane/models/outputkinesis.py +3 -3
- cribl_control_plane/models/outputminio.py +3 -3
- cribl_control_plane/models/outputmsk.py +3 -3
- cribl_control_plane/models/outputnewrelic.py +5 -5
- cribl_control_plane/models/outputnewrelicevents.py +3 -3
- cribl_control_plane/models/outputring.py +5 -5
- cribl_control_plane/models/outputs3.py +3 -3
- cribl_control_plane/models/outputsecuritylake.py +3 -3
- cribl_control_plane/models/outputsentinel.py +3 -3
- cribl_control_plane/models/outputsentineloneaisiem.py +3 -3
- cribl_control_plane/models/outputservicenow.py +3 -3
- cribl_control_plane/models/outputsns.py +3 -3
- cribl_control_plane/models/outputsplunk.py +3 -3
- cribl_control_plane/models/outputsplunkhec.py +5 -5
- cribl_control_plane/models/outputsqs.py +3 -3
- cribl_control_plane/models/outputstatsd.py +3 -3
- cribl_control_plane/models/outputstatsdext.py +3 -3
- cribl_control_plane/models/outputsyslog.py +5 -5
- cribl_control_plane/models/outputtcpjson.py +5 -5
- cribl_control_plane/models/outputwebhook.py +5 -5
- cribl_control_plane/models/outputxsiam.py +5 -5
- cribl_control_plane/nodes.py +252 -68
- cribl_control_plane/sdk.py +8 -12
- cribl_control_plane/sources.py +2 -0
- cribl_control_plane/versioning.py +10 -10
- {cribl_control_plane-0.0.25.dist-info → cribl_control_plane-0.0.26.dist-info}/METADATA +20 -27
- {cribl_control_plane-0.0.25.dist-info → cribl_control_plane-0.0.26.dist-info}/RECORD +108 -112
- cribl_control_plane/deployments.py +0 -185
- cribl_control_plane/models/restartresponse.py +0 -26
- cribl_control_plane/models/updateworkersrestartop.py +0 -24
- cribl_control_plane/workers_sdk.py +0 -187
- {cribl_control_plane-0.0.25.dist-info → cribl_control_plane-0.0.26.dist-info}/WHEEL +0 -0
cribl_control_plane/_version.py
CHANGED
|
@@ -3,10 +3,10 @@
|
|
|
3
3
|
import importlib.metadata
|
|
4
4
|
|
|
5
5
|
__title__: str = "cribl-control-plane"
|
|
6
|
-
__version__: str = "0.0.
|
|
7
|
-
__openapi_doc_version__: str = "4.14.0-alpha.
|
|
6
|
+
__version__: str = "0.0.26"
|
|
7
|
+
__openapi_doc_version__: str = "4.14.0-alpha.1755082027273-8cf9b57a"
|
|
8
8
|
__gen_version__: str = "2.660.0"
|
|
9
|
-
__user_agent__: str = "speakeasy-sdk/python 0.0.
|
|
9
|
+
__user_agent__: str = "speakeasy-sdk/python 0.0.26 2.660.0 4.14.0-alpha.1755082027273-8cf9b57a cribl-control-plane"
|
|
10
10
|
|
|
11
11
|
try:
|
|
12
12
|
if __package__ is not None:
|
|
@@ -10,6 +10,8 @@ from typing import Any, List, Mapping, Optional, Union, cast
|
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class Destinations(BaseSDK):
|
|
13
|
+
r"""Actions related to Destinations"""
|
|
14
|
+
|
|
13
15
|
def list(
|
|
14
16
|
self,
|
|
15
17
|
*,
|
|
@@ -884,7 +886,7 @@ class Destinations(BaseSDK):
|
|
|
884
886
|
|
|
885
887
|
raise errors.APIError("Unexpected response received", http_res)
|
|
886
888
|
|
|
887
|
-
def
|
|
889
|
+
def clear_pq(
|
|
888
890
|
self,
|
|
889
891
|
*,
|
|
890
892
|
id: str,
|
|
@@ -971,7 +973,7 @@ class Destinations(BaseSDK):
|
|
|
971
973
|
|
|
972
974
|
raise errors.APIError("Unexpected response received", http_res)
|
|
973
975
|
|
|
974
|
-
async def
|
|
976
|
+
async def clear_pq_async(
|
|
975
977
|
self,
|
|
976
978
|
*,
|
|
977
979
|
id: str,
|
|
@@ -1058,7 +1060,7 @@ class Destinations(BaseSDK):
|
|
|
1058
1060
|
|
|
1059
1061
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1060
1062
|
|
|
1061
|
-
def
|
|
1063
|
+
def get_pq_status(
|
|
1062
1064
|
self,
|
|
1063
1065
|
*,
|
|
1064
1066
|
id: str,
|
|
@@ -1145,7 +1147,7 @@ class Destinations(BaseSDK):
|
|
|
1145
1147
|
|
|
1146
1148
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1147
1149
|
|
|
1148
|
-
async def
|
|
1150
|
+
async def get_pq_status_async(
|
|
1149
1151
|
self,
|
|
1150
1152
|
*,
|
|
1151
1153
|
id: str,
|
|
@@ -1232,7 +1234,7 @@ class Destinations(BaseSDK):
|
|
|
1232
1234
|
|
|
1233
1235
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1234
1236
|
|
|
1235
|
-
def
|
|
1237
|
+
def get_sample(
|
|
1236
1238
|
self,
|
|
1237
1239
|
*,
|
|
1238
1240
|
id: str,
|
|
@@ -1321,7 +1323,7 @@ class Destinations(BaseSDK):
|
|
|
1321
1323
|
|
|
1322
1324
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1323
1325
|
|
|
1324
|
-
async def
|
|
1326
|
+
async def get_sample_async(
|
|
1325
1327
|
self,
|
|
1326
1328
|
*,
|
|
1327
1329
|
id: str,
|
|
@@ -1410,7 +1412,7 @@ class Destinations(BaseSDK):
|
|
|
1410
1412
|
|
|
1411
1413
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1412
1414
|
|
|
1413
|
-
def
|
|
1415
|
+
def create_sample(
|
|
1414
1416
|
self,
|
|
1415
1417
|
*,
|
|
1416
1418
|
id: str,
|
|
@@ -1511,7 +1513,7 @@ class Destinations(BaseSDK):
|
|
|
1511
1513
|
|
|
1512
1514
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1513
1515
|
|
|
1514
|
-
async def
|
|
1516
|
+
async def create_sample_async(
|
|
1515
1517
|
self,
|
|
1516
1518
|
*,
|
|
1517
1519
|
id: str,
|
|
@@ -190,7 +190,7 @@ class GroupsSDK(BaseSDK):
|
|
|
190
190
|
|
|
191
191
|
raise errors.APIError("Unexpected response received", http_res)
|
|
192
192
|
|
|
193
|
-
def
|
|
193
|
+
def create(
|
|
194
194
|
self,
|
|
195
195
|
*,
|
|
196
196
|
product: models.CreateProductsGroupsByProductProduct,
|
|
@@ -361,7 +361,7 @@ class GroupsSDK(BaseSDK):
|
|
|
361
361
|
|
|
362
362
|
raise errors.APIError("Unexpected response received", http_res)
|
|
363
363
|
|
|
364
|
-
async def
|
|
364
|
+
async def create_async(
|
|
365
365
|
self,
|
|
366
366
|
*,
|
|
367
367
|
product: models.CreateProductsGroupsByProductProduct,
|
|
@@ -532,7 +532,7 @@ class GroupsSDK(BaseSDK):
|
|
|
532
532
|
|
|
533
533
|
raise errors.APIError("Unexpected response received", http_res)
|
|
534
534
|
|
|
535
|
-
def
|
|
535
|
+
def list(
|
|
536
536
|
self,
|
|
537
537
|
*,
|
|
538
538
|
product: models.GetProductsGroupsByProductProduct,
|
|
@@ -624,7 +624,7 @@ class GroupsSDK(BaseSDK):
|
|
|
624
624
|
|
|
625
625
|
raise errors.APIError("Unexpected response received", http_res)
|
|
626
626
|
|
|
627
|
-
async def
|
|
627
|
+
async def list_async(
|
|
628
628
|
self,
|
|
629
629
|
*,
|
|
630
630
|
product: models.GetProductsGroupsByProductProduct,
|
|
@@ -1408,7 +1408,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1408
1408
|
|
|
1409
1409
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1410
1410
|
|
|
1411
|
-
def
|
|
1411
|
+
def deploy(
|
|
1412
1412
|
self,
|
|
1413
1413
|
*,
|
|
1414
1414
|
id: str,
|
|
@@ -1515,7 +1515,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1515
1515
|
|
|
1516
1516
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1517
1517
|
|
|
1518
|
-
async def
|
|
1518
|
+
async def deploy_async(
|
|
1519
1519
|
self,
|
|
1520
1520
|
*,
|
|
1521
1521
|
id: str,
|
|
@@ -1622,7 +1622,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1622
1622
|
|
|
1623
1623
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1624
1624
|
|
|
1625
|
-
def
|
|
1625
|
+
def get_team_acl(
|
|
1626
1626
|
self,
|
|
1627
1627
|
*,
|
|
1628
1628
|
product: models.GetProductsGroupsACLTeamsByProductAndIDProduct,
|
|
@@ -1717,7 +1717,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1717
1717
|
|
|
1718
1718
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1719
1719
|
|
|
1720
|
-
async def
|
|
1720
|
+
async def get_team_acl_async(
|
|
1721
1721
|
self,
|
|
1722
1722
|
*,
|
|
1723
1723
|
product: models.GetProductsGroupsACLTeamsByProductAndIDProduct,
|
|
@@ -1812,7 +1812,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1812
1812
|
|
|
1813
1813
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1814
1814
|
|
|
1815
|
-
def
|
|
1815
|
+
def get_acl(
|
|
1816
1816
|
self,
|
|
1817
1817
|
*,
|
|
1818
1818
|
id: str,
|
|
@@ -1902,7 +1902,7 @@ class GroupsSDK(BaseSDK):
|
|
|
1902
1902
|
|
|
1903
1903
|
raise errors.APIError("Unexpected response received", http_res)
|
|
1904
1904
|
|
|
1905
|
-
async def
|
|
1905
|
+
async def get_acl_async(
|
|
1906
1906
|
self,
|
|
1907
1907
|
*,
|
|
1908
1908
|
id: str,
|
|
@@ -8,7 +8,9 @@ from cribl_control_plane.utils.unmarshal_json_response import unmarshal_json_res
|
|
|
8
8
|
from typing import Any, Mapping, Optional
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
class
|
|
11
|
+
class Health(BaseSDK):
|
|
12
|
+
r"""Actions related to REST server health"""
|
|
13
|
+
|
|
12
14
|
def get(
|
|
13
15
|
self,
|
|
14
16
|
*,
|
|
@@ -342,12 +342,7 @@ if TYPE_CHECKING:
|
|
|
342
342
|
)
|
|
343
343
|
from .hbcriblinfo import Config, ConfigTypedDict, HBCriblInfo, HBCriblInfoTypedDict
|
|
344
344
|
from .hbleaderinfo import HBLeaderInfo, HBLeaderInfoTypedDict
|
|
345
|
-
from .healthstatus import
|
|
346
|
-
HealthStatus,
|
|
347
|
-
HealthStatusStatus,
|
|
348
|
-
HealthStatusTypedDict,
|
|
349
|
-
Role,
|
|
350
|
-
)
|
|
345
|
+
from .healthstatus import HealthStatus, HealthStatusTypedDict, Role, Status
|
|
351
346
|
from .heartbeatmetadata import (
|
|
352
347
|
HeartbeatMetadata,
|
|
353
348
|
HeartbeatMetadataAws,
|
|
@@ -2944,11 +2939,6 @@ if TYPE_CHECKING:
|
|
|
2944
2939
|
)
|
|
2945
2940
|
from .rbacresource import RbacResource
|
|
2946
2941
|
from .resourcepolicy import ResourcePolicy, ResourcePolicyTypedDict
|
|
2947
|
-
from .restartresponse import (
|
|
2948
|
-
RestartResponse,
|
|
2949
|
-
RestartResponseStatus,
|
|
2950
|
-
RestartResponseTypedDict,
|
|
2951
|
-
)
|
|
2952
2942
|
from .routecloneconf import RouteCloneConf, RouteCloneConfTypedDict
|
|
2953
2943
|
from .routeconf import RouteConf, RouteConfTypedDict
|
|
2954
2944
|
from .routes import (
|
|
@@ -3026,10 +3016,6 @@ if TYPE_CHECKING:
|
|
|
3026
3016
|
UpdateRoutesByIDResponse,
|
|
3027
3017
|
UpdateRoutesByIDResponseTypedDict,
|
|
3028
3018
|
)
|
|
3029
|
-
from .updateworkersrestartop import (
|
|
3030
|
-
UpdateWorkersRestartResponse,
|
|
3031
|
-
UpdateWorkersRestartResponseTypedDict,
|
|
3032
|
-
)
|
|
3033
3019
|
from .useraccesscontrollist import (
|
|
3034
3020
|
UserAccessControlList,
|
|
3035
3021
|
UserAccessControlListTypedDict,
|
|
@@ -3336,7 +3322,6 @@ __all__ = [
|
|
|
3336
3322
|
"HBLeaderInfo",
|
|
3337
3323
|
"HBLeaderInfoTypedDict",
|
|
3338
3324
|
"HealthStatus",
|
|
3339
|
-
"HealthStatusStatus",
|
|
3340
3325
|
"HealthStatusTypedDict",
|
|
3341
3326
|
"HeartbeatMetadata",
|
|
3342
3327
|
"HeartbeatMetadataAws",
|
|
@@ -5616,9 +5601,6 @@ __all__ = [
|
|
|
5616
5601
|
"ResourcePolicyTypedDict",
|
|
5617
5602
|
"ResourceTypeLabel",
|
|
5618
5603
|
"ResourceTypeLabelTypedDict",
|
|
5619
|
-
"RestartResponse",
|
|
5620
|
-
"RestartResponseStatus",
|
|
5621
|
-
"RestartResponseTypedDict",
|
|
5622
5604
|
"Role",
|
|
5623
5605
|
"RouteCloneConf",
|
|
5624
5606
|
"RouteCloneConfTypedDict",
|
|
@@ -5648,6 +5630,7 @@ __all__ = [
|
|
|
5648
5630
|
"ServicesTypedDict",
|
|
5649
5631
|
"ShardIteratorStart",
|
|
5650
5632
|
"ShardLoadBalancing",
|
|
5633
|
+
"Status",
|
|
5651
5634
|
"Subscription",
|
|
5652
5635
|
"SubscriptionMetadatum",
|
|
5653
5636
|
"SubscriptionMetadatumTypedDict",
|
|
@@ -5702,8 +5685,6 @@ __all__ = [
|
|
|
5702
5685
|
"UpdateRoutesByIDRequestTypedDict",
|
|
5703
5686
|
"UpdateRoutesByIDResponse",
|
|
5704
5687
|
"UpdateRoutesByIDResponseTypedDict",
|
|
5705
|
-
"UpdateWorkersRestartResponse",
|
|
5706
|
-
"UpdateWorkersRestartResponseTypedDict",
|
|
5707
5688
|
"UserAccessControlList",
|
|
5708
5689
|
"UserAccessControlListTypedDict",
|
|
5709
5690
|
"UsersAndGroups",
|
|
@@ -5969,9 +5950,9 @@ _dynamic_imports: dict[str, str] = {
|
|
|
5969
5950
|
"HBLeaderInfo": ".hbleaderinfo",
|
|
5970
5951
|
"HBLeaderInfoTypedDict": ".hbleaderinfo",
|
|
5971
5952
|
"HealthStatus": ".healthstatus",
|
|
5972
|
-
"HealthStatusStatus": ".healthstatus",
|
|
5973
5953
|
"HealthStatusTypedDict": ".healthstatus",
|
|
5974
5954
|
"Role": ".healthstatus",
|
|
5955
|
+
"Status": ".healthstatus",
|
|
5975
5956
|
"HeartbeatMetadata": ".heartbeatmetadata",
|
|
5976
5957
|
"HeartbeatMetadataAws": ".heartbeatmetadata",
|
|
5977
5958
|
"HeartbeatMetadataAwsTypedDict": ".heartbeatmetadata",
|
|
@@ -8324,9 +8305,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
8324
8305
|
"RbacResource": ".rbacresource",
|
|
8325
8306
|
"ResourcePolicy": ".resourcepolicy",
|
|
8326
8307
|
"ResourcePolicyTypedDict": ".resourcepolicy",
|
|
8327
|
-
"RestartResponse": ".restartresponse",
|
|
8328
|
-
"RestartResponseStatus": ".restartresponse",
|
|
8329
|
-
"RestartResponseTypedDict": ".restartresponse",
|
|
8330
8308
|
"RouteCloneConf": ".routecloneconf",
|
|
8331
8309
|
"RouteCloneConfTypedDict": ".routecloneconf",
|
|
8332
8310
|
"RouteConf": ".routeconf",
|
|
@@ -8385,8 +8363,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
8385
8363
|
"UpdateRoutesByIDRequestTypedDict": ".updateroutesbyidop",
|
|
8386
8364
|
"UpdateRoutesByIDResponse": ".updateroutesbyidop",
|
|
8387
8365
|
"UpdateRoutesByIDResponseTypedDict": ".updateroutesbyidop",
|
|
8388
|
-
"UpdateWorkersRestartResponse": ".updateworkersrestartop",
|
|
8389
|
-
"UpdateWorkersRestartResponseTypedDict": ".updateworkersrestartop",
|
|
8390
8366
|
"UserAccessControlList": ".useraccesscontrollist",
|
|
8391
8367
|
"UserAccessControlListTypedDict": ".useraccesscontrollist",
|
|
8392
8368
|
}
|
|
@@ -13,20 +13,20 @@ class Role(str, Enum):
|
|
|
13
13
|
STANDBY = "standby"
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
class
|
|
16
|
+
class Status(str, Enum):
|
|
17
17
|
HEALTHY = "healthy"
|
|
18
18
|
SHUTTING_DOWN = "shutting down"
|
|
19
19
|
STANDBY = "standby"
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
class HealthStatusTypedDict(TypedDict):
|
|
23
|
-
status:
|
|
23
|
+
status: Status
|
|
24
24
|
start_time: float
|
|
25
25
|
role: NotRequired[Role]
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
class HealthStatus(BaseModel):
|
|
29
|
-
status:
|
|
29
|
+
status: Status
|
|
30
30
|
|
|
31
31
|
start_time: Annotated[float, pydantic.Field(alias="startTime")]
|
|
32
32
|
|
|
@@ -255,9 +255,9 @@ class InputAppscopeTLSSettingsServerSide(BaseModel):
|
|
|
255
255
|
|
|
256
256
|
|
|
257
257
|
class InputAppscopeTypedDict(TypedDict):
|
|
258
|
-
id: str
|
|
259
|
-
r"""Unique ID for this input"""
|
|
260
258
|
type: InputAppscopeType
|
|
259
|
+
id: NotRequired[str]
|
|
260
|
+
r"""Unique ID for this input"""
|
|
261
261
|
disabled: NotRequired[bool]
|
|
262
262
|
pipeline: NotRequired[str]
|
|
263
263
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -313,11 +313,11 @@ class InputAppscopeTypedDict(TypedDict):
|
|
|
313
313
|
|
|
314
314
|
|
|
315
315
|
class InputAppscope(BaseModel):
|
|
316
|
-
id: str
|
|
317
|
-
r"""Unique ID for this input"""
|
|
318
|
-
|
|
319
316
|
type: InputAppscopeType
|
|
320
317
|
|
|
318
|
+
id: Optional[str] = None
|
|
319
|
+
r"""Unique ID for this input"""
|
|
320
|
+
|
|
321
321
|
disabled: Optional[bool] = False
|
|
322
322
|
|
|
323
323
|
pipeline: Optional[str] = None
|
|
@@ -115,7 +115,7 @@ class InputCollectionMetadatum(BaseModel):
|
|
|
115
115
|
|
|
116
116
|
|
|
117
117
|
class InputCollectionTypedDict(TypedDict):
|
|
118
|
-
id: str
|
|
118
|
+
id: NotRequired[str]
|
|
119
119
|
r"""Unique ID for this input"""
|
|
120
120
|
type: NotRequired[InputCollectionType]
|
|
121
121
|
disabled: NotRequired[bool]
|
|
@@ -146,7 +146,7 @@ class InputCollectionTypedDict(TypedDict):
|
|
|
146
146
|
|
|
147
147
|
|
|
148
148
|
class InputCollection(BaseModel):
|
|
149
|
-
id: str
|
|
149
|
+
id: Optional[str] = None
|
|
150
150
|
r"""Unique ID for this input"""
|
|
151
151
|
|
|
152
152
|
type: Optional[InputCollectionType] = InputCollectionType.COLLECTION
|
|
@@ -354,13 +354,13 @@ class InputConfluentCloudMetadatum(BaseModel):
|
|
|
354
354
|
|
|
355
355
|
|
|
356
356
|
class InputConfluentCloudTypedDict(TypedDict):
|
|
357
|
+
type: InputConfluentCloudType
|
|
357
358
|
brokers: List[str]
|
|
358
359
|
r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
|
|
359
360
|
topics: List[str]
|
|
360
361
|
r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
|
|
361
362
|
id: NotRequired[str]
|
|
362
363
|
r"""Unique ID for this input"""
|
|
363
|
-
type: NotRequired[InputConfluentCloudType]
|
|
364
364
|
disabled: NotRequired[bool]
|
|
365
365
|
pipeline: NotRequired[str]
|
|
366
366
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -434,6 +434,8 @@ class InputConfluentCloudTypedDict(TypedDict):
|
|
|
434
434
|
|
|
435
435
|
|
|
436
436
|
class InputConfluentCloud(BaseModel):
|
|
437
|
+
type: InputConfluentCloudType
|
|
438
|
+
|
|
437
439
|
brokers: List[str]
|
|
438
440
|
r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
|
|
439
441
|
|
|
@@ -443,8 +445,6 @@ class InputConfluentCloud(BaseModel):
|
|
|
443
445
|
id: Optional[str] = None
|
|
444
446
|
r"""Unique ID for this input"""
|
|
445
447
|
|
|
446
|
-
type: Optional[InputConfluentCloudType] = None
|
|
447
|
-
|
|
448
448
|
disabled: Optional[bool] = False
|
|
449
449
|
|
|
450
450
|
pipeline: Optional[str] = None
|
|
@@ -97,9 +97,9 @@ class InputCriblMetadatum(BaseModel):
|
|
|
97
97
|
|
|
98
98
|
|
|
99
99
|
class InputCriblTypedDict(TypedDict):
|
|
100
|
-
id: str
|
|
101
|
-
r"""Unique ID for this input"""
|
|
102
100
|
type: InputCriblType
|
|
101
|
+
id: NotRequired[str]
|
|
102
|
+
r"""Unique ID for this input"""
|
|
103
103
|
disabled: NotRequired[bool]
|
|
104
104
|
pipeline: NotRequired[str]
|
|
105
105
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -121,11 +121,11 @@ class InputCriblTypedDict(TypedDict):
|
|
|
121
121
|
|
|
122
122
|
|
|
123
123
|
class InputCribl(BaseModel):
|
|
124
|
-
id: str
|
|
125
|
-
r"""Unique ID for this input"""
|
|
126
|
-
|
|
127
124
|
type: InputCriblType
|
|
128
125
|
|
|
126
|
+
id: Optional[str] = None
|
|
127
|
+
r"""Unique ID for this input"""
|
|
128
|
+
|
|
129
129
|
disabled: Optional[bool] = False
|
|
130
130
|
|
|
131
131
|
pipeline: Optional[str] = None
|
|
@@ -171,11 +171,11 @@ class InputCriblHTTPMetadatum(BaseModel):
|
|
|
171
171
|
|
|
172
172
|
|
|
173
173
|
class InputCriblHTTPTypedDict(TypedDict):
|
|
174
|
+
type: InputCriblHTTPType
|
|
174
175
|
port: float
|
|
175
176
|
r"""Port to listen on"""
|
|
176
177
|
id: NotRequired[str]
|
|
177
178
|
r"""Unique ID for this input"""
|
|
178
|
-
type: NotRequired[InputCriblHTTPType]
|
|
179
179
|
disabled: NotRequired[bool]
|
|
180
180
|
pipeline: NotRequired[str]
|
|
181
181
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -223,14 +223,14 @@ class InputCriblHTTPTypedDict(TypedDict):
|
|
|
223
223
|
|
|
224
224
|
|
|
225
225
|
class InputCriblHTTP(BaseModel):
|
|
226
|
+
type: InputCriblHTTPType
|
|
227
|
+
|
|
226
228
|
port: float
|
|
227
229
|
r"""Port to listen on"""
|
|
228
230
|
|
|
229
231
|
id: Optional[str] = None
|
|
230
232
|
r"""Unique ID for this input"""
|
|
231
233
|
|
|
232
|
-
type: Optional[InputCriblHTTPType] = None
|
|
233
|
-
|
|
234
234
|
disabled: Optional[bool] = False
|
|
235
235
|
|
|
236
236
|
pipeline: Optional[str] = None
|
|
@@ -175,11 +175,11 @@ class InputCriblLakeHTTPMetadatum(BaseModel):
|
|
|
175
175
|
|
|
176
176
|
|
|
177
177
|
class InputCriblLakeHTTPTypedDict(TypedDict):
|
|
178
|
+
type: InputCriblLakeHTTPType
|
|
178
179
|
port: float
|
|
179
180
|
r"""Port to listen on"""
|
|
180
181
|
id: NotRequired[str]
|
|
181
182
|
r"""Unique ID for this input"""
|
|
182
|
-
type: NotRequired[InputCriblLakeHTTPType]
|
|
183
183
|
disabled: NotRequired[bool]
|
|
184
184
|
pipeline: NotRequired[str]
|
|
185
185
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -227,14 +227,14 @@ class InputCriblLakeHTTPTypedDict(TypedDict):
|
|
|
227
227
|
|
|
228
228
|
|
|
229
229
|
class InputCriblLakeHTTP(BaseModel):
|
|
230
|
+
type: InputCriblLakeHTTPType
|
|
231
|
+
|
|
230
232
|
port: float
|
|
231
233
|
r"""Port to listen on"""
|
|
232
234
|
|
|
233
235
|
id: Optional[str] = None
|
|
234
236
|
r"""Unique ID for this input"""
|
|
235
237
|
|
|
236
|
-
type: Optional[InputCriblLakeHTTPType] = None
|
|
237
|
-
|
|
238
238
|
disabled: Optional[bool] = False
|
|
239
239
|
|
|
240
240
|
pipeline: Optional[str] = None
|
|
@@ -97,9 +97,9 @@ class InputCriblmetricsMetadatum(BaseModel):
|
|
|
97
97
|
|
|
98
98
|
|
|
99
99
|
class InputCriblmetricsTypedDict(TypedDict):
|
|
100
|
-
id: str
|
|
101
|
-
r"""Unique ID for this input"""
|
|
102
100
|
type: InputCriblmetricsType
|
|
101
|
+
id: NotRequired[str]
|
|
102
|
+
r"""Unique ID for this input"""
|
|
103
103
|
disabled: NotRequired[bool]
|
|
104
104
|
pipeline: NotRequired[str]
|
|
105
105
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -124,11 +124,11 @@ class InputCriblmetricsTypedDict(TypedDict):
|
|
|
124
124
|
|
|
125
125
|
|
|
126
126
|
class InputCriblmetrics(BaseModel):
|
|
127
|
-
id: str
|
|
128
|
-
r"""Unique ID for this input"""
|
|
129
|
-
|
|
130
127
|
type: InputCriblmetricsType
|
|
131
128
|
|
|
129
|
+
id: Optional[str] = None
|
|
130
|
+
r"""Unique ID for this input"""
|
|
131
|
+
|
|
132
132
|
disabled: Optional[bool] = False
|
|
133
133
|
|
|
134
134
|
pipeline: Optional[str] = None
|
|
@@ -171,11 +171,11 @@ class InputCriblTCPMetadatum(BaseModel):
|
|
|
171
171
|
|
|
172
172
|
|
|
173
173
|
class InputCriblTCPTypedDict(TypedDict):
|
|
174
|
+
type: InputCriblTCPType
|
|
174
175
|
port: float
|
|
175
176
|
r"""Port to listen on"""
|
|
176
177
|
id: NotRequired[str]
|
|
177
178
|
r"""Unique ID for this input"""
|
|
178
|
-
type: NotRequired[InputCriblTCPType]
|
|
179
179
|
disabled: NotRequired[bool]
|
|
180
180
|
pipeline: NotRequired[str]
|
|
181
181
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -211,14 +211,14 @@ class InputCriblTCPTypedDict(TypedDict):
|
|
|
211
211
|
|
|
212
212
|
|
|
213
213
|
class InputCriblTCP(BaseModel):
|
|
214
|
+
type: InputCriblTCPType
|
|
215
|
+
|
|
214
216
|
port: float
|
|
215
217
|
r"""Port to listen on"""
|
|
216
218
|
|
|
217
219
|
id: Optional[str] = None
|
|
218
220
|
r"""Unique ID for this input"""
|
|
219
221
|
|
|
220
|
-
type: Optional[InputCriblTCPType] = None
|
|
221
|
-
|
|
222
222
|
disabled: Optional[bool] = False
|
|
223
223
|
|
|
224
224
|
pipeline: Optional[str] = None
|
|
@@ -188,11 +188,11 @@ class InputDatadogAgentProxyMode(BaseModel):
|
|
|
188
188
|
|
|
189
189
|
|
|
190
190
|
class InputDatadogAgentTypedDict(TypedDict):
|
|
191
|
+
type: InputDatadogAgentType
|
|
191
192
|
port: float
|
|
192
193
|
r"""Port to listen on"""
|
|
193
194
|
id: NotRequired[str]
|
|
194
195
|
r"""Unique ID for this input"""
|
|
195
|
-
type: NotRequired[InputDatadogAgentType]
|
|
196
196
|
disabled: NotRequired[bool]
|
|
197
197
|
pipeline: NotRequired[str]
|
|
198
198
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -241,14 +241,14 @@ class InputDatadogAgentTypedDict(TypedDict):
|
|
|
241
241
|
|
|
242
242
|
|
|
243
243
|
class InputDatadogAgent(BaseModel):
|
|
244
|
+
type: InputDatadogAgentType
|
|
245
|
+
|
|
244
246
|
port: float
|
|
245
247
|
r"""Port to listen on"""
|
|
246
248
|
|
|
247
249
|
id: Optional[str] = None
|
|
248
250
|
r"""Unique ID for this input"""
|
|
249
251
|
|
|
250
|
-
type: Optional[InputDatadogAgentType] = None
|
|
251
|
-
|
|
252
252
|
disabled: Optional[bool] = False
|
|
253
253
|
|
|
254
254
|
pipeline: Optional[str] = None
|
|
@@ -248,9 +248,9 @@ class PodFilter(BaseModel):
|
|
|
248
248
|
|
|
249
249
|
|
|
250
250
|
class InputEdgePrometheusTypedDict(TypedDict):
|
|
251
|
+
type: InputEdgePrometheusType
|
|
251
252
|
id: NotRequired[str]
|
|
252
253
|
r"""Unique ID for this input"""
|
|
253
|
-
type: NotRequired[InputEdgePrometheusType]
|
|
254
254
|
disabled: NotRequired[bool]
|
|
255
255
|
pipeline: NotRequired[str]
|
|
256
256
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -338,11 +338,11 @@ class InputEdgePrometheusTypedDict(TypedDict):
|
|
|
338
338
|
|
|
339
339
|
|
|
340
340
|
class InputEdgePrometheus(BaseModel):
|
|
341
|
+
type: InputEdgePrometheusType
|
|
342
|
+
|
|
341
343
|
id: Optional[str] = None
|
|
342
344
|
r"""Unique ID for this input"""
|
|
343
345
|
|
|
344
|
-
type: Optional[InputEdgePrometheusType] = None
|
|
345
|
-
|
|
346
346
|
disabled: Optional[bool] = False
|
|
347
347
|
|
|
348
348
|
pipeline: Optional[str] = None
|
|
@@ -246,11 +246,11 @@ class InputElasticProxyMode(BaseModel):
|
|
|
246
246
|
|
|
247
247
|
|
|
248
248
|
class InputElasticTypedDict(TypedDict):
|
|
249
|
+
type: InputElasticType
|
|
249
250
|
port: float
|
|
250
251
|
r"""Port to listen on"""
|
|
251
252
|
id: NotRequired[str]
|
|
252
253
|
r"""Unique ID for this input"""
|
|
253
|
-
type: NotRequired[InputElasticType]
|
|
254
254
|
disabled: NotRequired[bool]
|
|
255
255
|
pipeline: NotRequired[str]
|
|
256
256
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -312,14 +312,14 @@ class InputElasticTypedDict(TypedDict):
|
|
|
312
312
|
|
|
313
313
|
|
|
314
314
|
class InputElastic(BaseModel):
|
|
315
|
+
type: InputElasticType
|
|
316
|
+
|
|
315
317
|
port: float
|
|
316
318
|
r"""Port to listen on"""
|
|
317
319
|
|
|
318
320
|
id: Optional[str] = None
|
|
319
321
|
r"""Unique ID for this input"""
|
|
320
322
|
|
|
321
|
-
type: Optional[InputElasticType] = None
|
|
322
|
-
|
|
323
323
|
disabled: Optional[bool] = False
|
|
324
324
|
|
|
325
325
|
pipeline: Optional[str] = None
|
|
@@ -132,13 +132,13 @@ class InputEventhubMetadatum(BaseModel):
|
|
|
132
132
|
|
|
133
133
|
|
|
134
134
|
class InputEventhubTypedDict(TypedDict):
|
|
135
|
+
type: InputEventhubType
|
|
135
136
|
brokers: List[str]
|
|
136
137
|
r"""List of Event Hubs Kafka brokers to connect to (example: yourdomain.servicebus.windows.net:9093). The hostname can be found in the host portion of the primary or secondary connection string in Shared Access Policies."""
|
|
137
138
|
topics: List[str]
|
|
138
139
|
r"""The name of the Event Hub (Kafka topic) to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Event Hubs Source to only a single topic."""
|
|
139
140
|
id: NotRequired[str]
|
|
140
141
|
r"""Unique ID for this input"""
|
|
141
|
-
type: NotRequired[InputEventhubType]
|
|
142
142
|
disabled: NotRequired[bool]
|
|
143
143
|
pipeline: NotRequired[str]
|
|
144
144
|
r"""Pipeline to process data from this Source before sending it through the Routes"""
|
|
@@ -210,6 +210,8 @@ class InputEventhubTypedDict(TypedDict):
|
|
|
210
210
|
|
|
211
211
|
|
|
212
212
|
class InputEventhub(BaseModel):
|
|
213
|
+
type: InputEventhubType
|
|
214
|
+
|
|
213
215
|
brokers: List[str]
|
|
214
216
|
r"""List of Event Hubs Kafka brokers to connect to (example: yourdomain.servicebus.windows.net:9093). The hostname can be found in the host portion of the primary or secondary connection string in Shared Access Policies."""
|
|
215
217
|
|
|
@@ -219,8 +221,6 @@ class InputEventhub(BaseModel):
|
|
|
219
221
|
id: Optional[str] = None
|
|
220
222
|
r"""Unique ID for this input"""
|
|
221
223
|
|
|
222
|
-
type: Optional[InputEventhubType] = None
|
|
223
|
-
|
|
224
224
|
disabled: Optional[bool] = False
|
|
225
225
|
|
|
226
226
|
pipeline: Optional[str] = None
|