cribl-control-plane 0.1.0a1__py3-none-any.whl → 0.1.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/models/__init__.py +50 -27
- cribl_control_plane/models/hbcriblinfo.py +19 -3
- cribl_control_plane/models/heartbeatmetadata.py +3 -0
- cribl_control_plane/models/inputconfluentcloud.py +0 -18
- cribl_control_plane/models/inputkafka.py +0 -17
- cribl_control_plane/models/inputmsk.py +0 -17
- cribl_control_plane/models/nodeprovidedinfo.py +3 -0
- cribl_control_plane/models/output.py +3 -0
- cribl_control_plane/models/outputconfluentcloud.py +0 -18
- cribl_control_plane/models/outputdatabricks.py +439 -0
- cribl_control_plane/models/outputkafka.py +0 -17
- cribl_control_plane/models/outputmsk.py +0 -17
- cribl_control_plane/models/routeconf.py +3 -4
- {cribl_control_plane-0.1.0a1.dist-info → cribl_control_plane-0.1.0b1.dist-info}/METADATA +1 -1
- {cribl_control_plane-0.1.0a1.dist-info → cribl_control_plane-0.1.0b1.dist-info}/RECORD +17 -18
- cribl_control_plane/models/appmode.py +0 -14
- cribl_control_plane/models/routecloneconf.py +0 -13
- {cribl_control_plane-0.1.0a1.dist-info → cribl_control_plane-0.1.0b1.dist-info}/WHEEL +0 -0
cribl_control_plane/_version.py
CHANGED
|
@@ -3,10 +3,10 @@
|
|
|
3
3
|
import importlib.metadata
|
|
4
4
|
|
|
5
5
|
__title__: str = "cribl-control-plane"
|
|
6
|
-
__version__: str = "0.1.
|
|
7
|
-
__openapi_doc_version__: str = "4.
|
|
6
|
+
__version__: str = "0.1.0b1"
|
|
7
|
+
__openapi_doc_version__: str = "4.15.0-alpha.1758872649120-b192666f"
|
|
8
8
|
__gen_version__: str = "2.716.16"
|
|
9
|
-
__user_agent__: str = "speakeasy-sdk/python 0.1.
|
|
9
|
+
__user_agent__: str = "speakeasy-sdk/python 0.1.0b1 2.716.16 4.15.0-alpha.1758872649120-b192666f cribl-control-plane"
|
|
10
10
|
|
|
11
11
|
try:
|
|
12
12
|
if __package__ is not None:
|
|
@@ -12,7 +12,6 @@ if TYPE_CHECKING:
|
|
|
12
12
|
AddHecTokenRequestMetadatumTypedDict,
|
|
13
13
|
AddHecTokenRequestTypedDict,
|
|
14
14
|
)
|
|
15
|
-
from .appmode import AppMode
|
|
16
15
|
from .authtoken import AuthToken, AuthTokenTypedDict
|
|
17
16
|
from .branchinfo import BranchInfo, BranchInfoTypedDict
|
|
18
17
|
from .cacheconnection import CacheConnection, CacheConnectionTypedDict
|
|
@@ -356,7 +355,13 @@ if TYPE_CHECKING:
|
|
|
356
355
|
Renamed,
|
|
357
356
|
RenamedTypedDict,
|
|
358
357
|
)
|
|
359
|
-
from .hbcriblinfo import
|
|
358
|
+
from .hbcriblinfo import (
|
|
359
|
+
Config,
|
|
360
|
+
ConfigTypedDict,
|
|
361
|
+
DistMode,
|
|
362
|
+
HBCriblInfo,
|
|
363
|
+
HBCriblInfoTypedDict,
|
|
364
|
+
)
|
|
360
365
|
from .hbleaderinfo import HBLeaderInfo, HBLeaderInfoTypedDict
|
|
361
366
|
from .healthstatus import HealthStatus, HealthStatusTypedDict, Role, Status
|
|
362
367
|
from .heartbeatmetadata import (
|
|
@@ -463,7 +468,6 @@ if TYPE_CHECKING:
|
|
|
463
468
|
InputConfluentCloudPqControlsTypedDict,
|
|
464
469
|
InputConfluentCloudPqTypedDict,
|
|
465
470
|
InputConfluentCloudSASLMechanism,
|
|
466
|
-
InputConfluentCloudSchemaType,
|
|
467
471
|
InputConfluentCloudTLSSettingsClientSide,
|
|
468
472
|
InputConfluentCloudTLSSettingsClientSideTypedDict,
|
|
469
473
|
InputConfluentCloudType,
|
|
@@ -922,7 +926,6 @@ if TYPE_CHECKING:
|
|
|
922
926
|
InputKafkaPqControlsTypedDict,
|
|
923
927
|
InputKafkaPqTypedDict,
|
|
924
928
|
InputKafkaSASLMechanism,
|
|
925
|
-
InputKafkaSchemaType,
|
|
926
929
|
InputKafkaTLSSettingsClientSide,
|
|
927
930
|
InputKafkaTLSSettingsClientSideTypedDict,
|
|
928
931
|
InputKafkaType,
|
|
@@ -1090,7 +1093,6 @@ if TYPE_CHECKING:
|
|
|
1090
1093
|
InputMskPqControls,
|
|
1091
1094
|
InputMskPqControlsTypedDict,
|
|
1092
1095
|
InputMskPqTypedDict,
|
|
1093
|
-
InputMskSchemaType,
|
|
1094
1096
|
InputMskSignatureVersion,
|
|
1095
1097
|
InputMskTLSSettingsClientSide,
|
|
1096
1098
|
InputMskTLSSettingsClientSideTypedDict,
|
|
@@ -1985,7 +1987,6 @@ if TYPE_CHECKING:
|
|
|
1985
1987
|
OutputConfluentCloudQueueFullBehavior,
|
|
1986
1988
|
OutputConfluentCloudRecordDataFormat,
|
|
1987
1989
|
OutputConfluentCloudSASLMechanism,
|
|
1988
|
-
OutputConfluentCloudSchemaType,
|
|
1989
1990
|
OutputConfluentCloudTLSSettingsClientSide,
|
|
1990
1991
|
OutputConfluentCloudTLSSettingsClientSideTypedDict,
|
|
1991
1992
|
OutputConfluentCloudType,
|
|
@@ -2068,6 +2069,21 @@ if TYPE_CHECKING:
|
|
|
2068
2069
|
OutputCrowdstrikeNextGenSiemType,
|
|
2069
2070
|
OutputCrowdstrikeNextGenSiemTypedDict,
|
|
2070
2071
|
)
|
|
2072
|
+
from .outputdatabricks import (
|
|
2073
|
+
OutputDatabricks,
|
|
2074
|
+
OutputDatabricksAuthenticationMethod,
|
|
2075
|
+
OutputDatabricksBackpressureBehavior,
|
|
2076
|
+
OutputDatabricksCompression,
|
|
2077
|
+
OutputDatabricksCompressionLevel,
|
|
2078
|
+
OutputDatabricksDataFormat,
|
|
2079
|
+
OutputDatabricksDataPageVersion,
|
|
2080
|
+
OutputDatabricksDiskSpaceProtection,
|
|
2081
|
+
OutputDatabricksKeyValueMetadatum,
|
|
2082
|
+
OutputDatabricksKeyValueMetadatumTypedDict,
|
|
2083
|
+
OutputDatabricksParquetVersion,
|
|
2084
|
+
OutputDatabricksType,
|
|
2085
|
+
OutputDatabricksTypedDict,
|
|
2086
|
+
)
|
|
2071
2087
|
from .outputdatadog import (
|
|
2072
2088
|
DatadogSite,
|
|
2073
2089
|
OutputDatadog,
|
|
@@ -2486,7 +2502,6 @@ if TYPE_CHECKING:
|
|
|
2486
2502
|
OutputKafkaQueueFullBehavior,
|
|
2487
2503
|
OutputKafkaRecordDataFormat,
|
|
2488
2504
|
OutputKafkaSASLMechanism,
|
|
2489
|
-
OutputKafkaSchemaType,
|
|
2490
2505
|
OutputKafkaTLSSettingsClientSide,
|
|
2491
2506
|
OutputKafkaTLSSettingsClientSideTypedDict,
|
|
2492
2507
|
OutputKafkaType,
|
|
@@ -2569,7 +2584,6 @@ if TYPE_CHECKING:
|
|
|
2569
2584
|
OutputMskPqControlsTypedDict,
|
|
2570
2585
|
OutputMskQueueFullBehavior,
|
|
2571
2586
|
OutputMskRecordDataFormat,
|
|
2572
|
-
OutputMskSchemaType,
|
|
2573
2587
|
OutputMskSignatureVersion,
|
|
2574
2588
|
OutputMskTLSSettingsClientSide,
|
|
2575
2589
|
OutputMskTLSSettingsClientSideTypedDict,
|
|
@@ -3128,7 +3142,6 @@ if TYPE_CHECKING:
|
|
|
3128
3142
|
from .productscore import ProductsCore
|
|
3129
3143
|
from .rbacresource import RbacResource
|
|
3130
3144
|
from .resourcepolicy import ResourcePolicy, ResourcePolicyTypedDict
|
|
3131
|
-
from .routecloneconf import RouteCloneConf, RouteCloneConfTypedDict
|
|
3132
3145
|
from .routeconf import RouteConf, RouteConfTypedDict
|
|
3133
3146
|
from .routes import (
|
|
3134
3147
|
Comment,
|
|
@@ -3289,7 +3302,6 @@ __all__ = [
|
|
|
3289
3302
|
"AdditionalPropertyTypedDict",
|
|
3290
3303
|
"Allow",
|
|
3291
3304
|
"AllowTypedDict",
|
|
3292
|
-
"AppMode",
|
|
3293
3305
|
"Audit",
|
|
3294
3306
|
"AuditTypedDict",
|
|
3295
3307
|
"AuthToken",
|
|
@@ -3439,6 +3451,7 @@ __all__ = [
|
|
|
3439
3451
|
"DiffFilesTypedDict",
|
|
3440
3452
|
"DisksAndFileSystems",
|
|
3441
3453
|
"DisksAndFileSystemsTypedDict",
|
|
3454
|
+
"DistMode",
|
|
3442
3455
|
"DistributedSummary",
|
|
3443
3456
|
"DistributedSummaryGroups",
|
|
3444
3457
|
"DistributedSummaryGroupsTypedDict",
|
|
@@ -3708,7 +3721,6 @@ __all__ = [
|
|
|
3708
3721
|
"InputConfluentCloudPqControlsTypedDict",
|
|
3709
3722
|
"InputConfluentCloudPqTypedDict",
|
|
3710
3723
|
"InputConfluentCloudSASLMechanism",
|
|
3711
|
-
"InputConfluentCloudSchemaType",
|
|
3712
3724
|
"InputConfluentCloudTLSSettingsClientSide",
|
|
3713
3725
|
"InputConfluentCloudTLSSettingsClientSideTypedDict",
|
|
3714
3726
|
"InputConfluentCloudType",
|
|
@@ -4098,7 +4110,6 @@ __all__ = [
|
|
|
4098
4110
|
"InputKafkaPqControlsTypedDict",
|
|
4099
4111
|
"InputKafkaPqTypedDict",
|
|
4100
4112
|
"InputKafkaSASLMechanism",
|
|
4101
|
-
"InputKafkaSchemaType",
|
|
4102
4113
|
"InputKafkaTLSSettingsClientSide",
|
|
4103
4114
|
"InputKafkaTLSSettingsClientSideTypedDict",
|
|
4104
4115
|
"InputKafkaType",
|
|
@@ -4248,7 +4259,6 @@ __all__ = [
|
|
|
4248
4259
|
"InputMskPqControls",
|
|
4249
4260
|
"InputMskPqControlsTypedDict",
|
|
4250
4261
|
"InputMskPqTypedDict",
|
|
4251
|
-
"InputMskSchemaType",
|
|
4252
4262
|
"InputMskSignatureVersion",
|
|
4253
4263
|
"InputMskTLSSettingsClientSide",
|
|
4254
4264
|
"InputMskTLSSettingsClientSideTypedDict",
|
|
@@ -5046,7 +5056,6 @@ __all__ = [
|
|
|
5046
5056
|
"OutputConfluentCloudQueueFullBehavior",
|
|
5047
5057
|
"OutputConfluentCloudRecordDataFormat",
|
|
5048
5058
|
"OutputConfluentCloudSASLMechanism",
|
|
5049
|
-
"OutputConfluentCloudSchemaType",
|
|
5050
5059
|
"OutputConfluentCloudTLSSettingsClientSide",
|
|
5051
5060
|
"OutputConfluentCloudTLSSettingsClientSideTypedDict",
|
|
5052
5061
|
"OutputConfluentCloudType",
|
|
@@ -5119,6 +5128,19 @@ __all__ = [
|
|
|
5119
5128
|
"OutputCrowdstrikeNextGenSiemTimeoutRetrySettingsTypedDict",
|
|
5120
5129
|
"OutputCrowdstrikeNextGenSiemType",
|
|
5121
5130
|
"OutputCrowdstrikeNextGenSiemTypedDict",
|
|
5131
|
+
"OutputDatabricks",
|
|
5132
|
+
"OutputDatabricksAuthenticationMethod",
|
|
5133
|
+
"OutputDatabricksBackpressureBehavior",
|
|
5134
|
+
"OutputDatabricksCompression",
|
|
5135
|
+
"OutputDatabricksCompressionLevel",
|
|
5136
|
+
"OutputDatabricksDataFormat",
|
|
5137
|
+
"OutputDatabricksDataPageVersion",
|
|
5138
|
+
"OutputDatabricksDiskSpaceProtection",
|
|
5139
|
+
"OutputDatabricksKeyValueMetadatum",
|
|
5140
|
+
"OutputDatabricksKeyValueMetadatumTypedDict",
|
|
5141
|
+
"OutputDatabricksParquetVersion",
|
|
5142
|
+
"OutputDatabricksType",
|
|
5143
|
+
"OutputDatabricksTypedDict",
|
|
5122
5144
|
"OutputDatadog",
|
|
5123
5145
|
"OutputDatadogAuthenticationMethod",
|
|
5124
5146
|
"OutputDatadogBackpressureBehavior",
|
|
@@ -5482,7 +5504,6 @@ __all__ = [
|
|
|
5482
5504
|
"OutputKafkaQueueFullBehavior",
|
|
5483
5505
|
"OutputKafkaRecordDataFormat",
|
|
5484
5506
|
"OutputKafkaSASLMechanism",
|
|
5485
|
-
"OutputKafkaSchemaType",
|
|
5486
5507
|
"OutputKafkaTLSSettingsClientSide",
|
|
5487
5508
|
"OutputKafkaTLSSettingsClientSideTypedDict",
|
|
5488
5509
|
"OutputKafkaType",
|
|
@@ -5557,7 +5578,6 @@ __all__ = [
|
|
|
5557
5578
|
"OutputMskPqControlsTypedDict",
|
|
5558
5579
|
"OutputMskQueueFullBehavior",
|
|
5559
5580
|
"OutputMskRecordDataFormat",
|
|
5560
|
-
"OutputMskSchemaType",
|
|
5561
5581
|
"OutputMskSignatureVersion",
|
|
5562
5582
|
"OutputMskTLSSettingsClientSide",
|
|
5563
5583
|
"OutputMskTLSSettingsClientSideTypedDict",
|
|
@@ -6063,8 +6083,6 @@ __all__ = [
|
|
|
6063
6083
|
"ResourceTypeLabel",
|
|
6064
6084
|
"ResourceTypeLabelTypedDict",
|
|
6065
6085
|
"Role",
|
|
6066
|
-
"RouteCloneConf",
|
|
6067
|
-
"RouteCloneConfTypedDict",
|
|
6068
6086
|
"RouteConf",
|
|
6069
6087
|
"RouteConfTypedDict",
|
|
6070
6088
|
"Routes",
|
|
@@ -6215,7 +6233,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
6215
6233
|
"AddHecTokenRequestMetadatum": ".addhectokenrequest",
|
|
6216
6234
|
"AddHecTokenRequestMetadatumTypedDict": ".addhectokenrequest",
|
|
6217
6235
|
"AddHecTokenRequestTypedDict": ".addhectokenrequest",
|
|
6218
|
-
"AppMode": ".appmode",
|
|
6219
6236
|
"AuthToken": ".authtoken",
|
|
6220
6237
|
"AuthTokenTypedDict": ".authtoken",
|
|
6221
6238
|
"BranchInfo": ".branchinfo",
|
|
@@ -6485,6 +6502,7 @@ _dynamic_imports: dict[str, str] = {
|
|
|
6485
6502
|
"RenamedTypedDict": ".gitstatusresult",
|
|
6486
6503
|
"Config": ".hbcriblinfo",
|
|
6487
6504
|
"ConfigTypedDict": ".hbcriblinfo",
|
|
6505
|
+
"DistMode": ".hbcriblinfo",
|
|
6488
6506
|
"HBCriblInfo": ".hbcriblinfo",
|
|
6489
6507
|
"HBCriblInfoTypedDict": ".hbcriblinfo",
|
|
6490
6508
|
"HBLeaderInfo": ".hbleaderinfo",
|
|
@@ -6589,7 +6607,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
6589
6607
|
"InputConfluentCloudPqControlsTypedDict": ".inputconfluentcloud",
|
|
6590
6608
|
"InputConfluentCloudPqTypedDict": ".inputconfluentcloud",
|
|
6591
6609
|
"InputConfluentCloudSASLMechanism": ".inputconfluentcloud",
|
|
6592
|
-
"InputConfluentCloudSchemaType": ".inputconfluentcloud",
|
|
6593
6610
|
"InputConfluentCloudTLSSettingsClientSide": ".inputconfluentcloud",
|
|
6594
6611
|
"InputConfluentCloudTLSSettingsClientSideTypedDict": ".inputconfluentcloud",
|
|
6595
6612
|
"InputConfluentCloudType": ".inputconfluentcloud",
|
|
@@ -7008,7 +7025,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
7008
7025
|
"InputKafkaPqControlsTypedDict": ".inputkafka",
|
|
7009
7026
|
"InputKafkaPqTypedDict": ".inputkafka",
|
|
7010
7027
|
"InputKafkaSASLMechanism": ".inputkafka",
|
|
7011
|
-
"InputKafkaSchemaType": ".inputkafka",
|
|
7012
7028
|
"InputKafkaTLSSettingsClientSide": ".inputkafka",
|
|
7013
7029
|
"InputKafkaTLSSettingsClientSideTypedDict": ".inputkafka",
|
|
7014
7030
|
"InputKafkaType": ".inputkafka",
|
|
@@ -7160,7 +7176,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
7160
7176
|
"InputMskPqControls": ".inputmsk",
|
|
7161
7177
|
"InputMskPqControlsTypedDict": ".inputmsk",
|
|
7162
7178
|
"InputMskPqTypedDict": ".inputmsk",
|
|
7163
|
-
"InputMskSchemaType": ".inputmsk",
|
|
7164
7179
|
"InputMskSignatureVersion": ".inputmsk",
|
|
7165
7180
|
"InputMskTLSSettingsClientSide": ".inputmsk",
|
|
7166
7181
|
"InputMskTLSSettingsClientSideTypedDict": ".inputmsk",
|
|
@@ -7987,7 +8002,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
7987
8002
|
"OutputConfluentCloudQueueFullBehavior": ".outputconfluentcloud",
|
|
7988
8003
|
"OutputConfluentCloudRecordDataFormat": ".outputconfluentcloud",
|
|
7989
8004
|
"OutputConfluentCloudSASLMechanism": ".outputconfluentcloud",
|
|
7990
|
-
"OutputConfluentCloudSchemaType": ".outputconfluentcloud",
|
|
7991
8005
|
"OutputConfluentCloudTLSSettingsClientSide": ".outputconfluentcloud",
|
|
7992
8006
|
"OutputConfluentCloudTLSSettingsClientSideTypedDict": ".outputconfluentcloud",
|
|
7993
8007
|
"OutputConfluentCloudType": ".outputconfluentcloud",
|
|
@@ -8061,6 +8075,19 @@ _dynamic_imports: dict[str, str] = {
|
|
|
8061
8075
|
"OutputCrowdstrikeNextGenSiemTimeoutRetrySettingsTypedDict": ".outputcrowdstrikenextgensiem",
|
|
8062
8076
|
"OutputCrowdstrikeNextGenSiemType": ".outputcrowdstrikenextgensiem",
|
|
8063
8077
|
"OutputCrowdstrikeNextGenSiemTypedDict": ".outputcrowdstrikenextgensiem",
|
|
8078
|
+
"OutputDatabricks": ".outputdatabricks",
|
|
8079
|
+
"OutputDatabricksAuthenticationMethod": ".outputdatabricks",
|
|
8080
|
+
"OutputDatabricksBackpressureBehavior": ".outputdatabricks",
|
|
8081
|
+
"OutputDatabricksCompression": ".outputdatabricks",
|
|
8082
|
+
"OutputDatabricksCompressionLevel": ".outputdatabricks",
|
|
8083
|
+
"OutputDatabricksDataFormat": ".outputdatabricks",
|
|
8084
|
+
"OutputDatabricksDataPageVersion": ".outputdatabricks",
|
|
8085
|
+
"OutputDatabricksDiskSpaceProtection": ".outputdatabricks",
|
|
8086
|
+
"OutputDatabricksKeyValueMetadatum": ".outputdatabricks",
|
|
8087
|
+
"OutputDatabricksKeyValueMetadatumTypedDict": ".outputdatabricks",
|
|
8088
|
+
"OutputDatabricksParquetVersion": ".outputdatabricks",
|
|
8089
|
+
"OutputDatabricksType": ".outputdatabricks",
|
|
8090
|
+
"OutputDatabricksTypedDict": ".outputdatabricks",
|
|
8064
8091
|
"DatadogSite": ".outputdatadog",
|
|
8065
8092
|
"OutputDatadog": ".outputdatadog",
|
|
8066
8093
|
"OutputDatadogAuthenticationMethod": ".outputdatadog",
|
|
@@ -8444,7 +8471,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
8444
8471
|
"OutputKafkaQueueFullBehavior": ".outputkafka",
|
|
8445
8472
|
"OutputKafkaRecordDataFormat": ".outputkafka",
|
|
8446
8473
|
"OutputKafkaSASLMechanism": ".outputkafka",
|
|
8447
|
-
"OutputKafkaSchemaType": ".outputkafka",
|
|
8448
8474
|
"OutputKafkaTLSSettingsClientSide": ".outputkafka",
|
|
8449
8475
|
"OutputKafkaTLSSettingsClientSideTypedDict": ".outputkafka",
|
|
8450
8476
|
"OutputKafkaType": ".outputkafka",
|
|
@@ -8519,7 +8545,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
8519
8545
|
"OutputMskPqControlsTypedDict": ".outputmsk",
|
|
8520
8546
|
"OutputMskQueueFullBehavior": ".outputmsk",
|
|
8521
8547
|
"OutputMskRecordDataFormat": ".outputmsk",
|
|
8522
|
-
"OutputMskSchemaType": ".outputmsk",
|
|
8523
8548
|
"OutputMskSignatureVersion": ".outputmsk",
|
|
8524
8549
|
"OutputMskTLSSettingsClientSide": ".outputmsk",
|
|
8525
8550
|
"OutputMskTLSSettingsClientSideTypedDict": ".outputmsk",
|
|
@@ -9015,8 +9040,6 @@ _dynamic_imports: dict[str, str] = {
|
|
|
9015
9040
|
"RbacResource": ".rbacresource",
|
|
9016
9041
|
"ResourcePolicy": ".resourcepolicy",
|
|
9017
9042
|
"ResourcePolicyTypedDict": ".resourcepolicy",
|
|
9018
|
-
"RouteCloneConf": ".routecloneconf",
|
|
9019
|
-
"RouteCloneConfTypedDict": ".routecloneconf",
|
|
9020
9043
|
"RouteConf": ".routeconf",
|
|
9021
9044
|
"RouteConfTypedDict": ".routeconf",
|
|
9022
9045
|
"Comment": ".routes",
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from .appmode import AppMode
|
|
5
4
|
from .hbleaderinfo import HBLeaderInfo, HBLeaderInfoTypedDict
|
|
6
5
|
from .lookupversions import LookupVersions, LookupVersionsTypedDict
|
|
6
|
+
from cribl_control_plane import utils
|
|
7
7
|
from cribl_control_plane.types import BaseModel
|
|
8
8
|
from cribl_control_plane.utils import validate_open_enum
|
|
9
|
+
from enum import Enum
|
|
9
10
|
import pydantic
|
|
10
11
|
from pydantic.functional_validators import PlainValidator
|
|
11
12
|
from typing import List, Optional
|
|
@@ -36,9 +37,19 @@ class Config(BaseModel):
|
|
|
36
37
|
version: Optional[str] = None
|
|
37
38
|
|
|
38
39
|
|
|
40
|
+
class DistMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
41
|
+
EDGE = "edge"
|
|
42
|
+
WORKER = "worker"
|
|
43
|
+
SINGLE = "single"
|
|
44
|
+
MASTER = "master"
|
|
45
|
+
MANAGED_EDGE = "managed-edge"
|
|
46
|
+
OUTPOST = "outpost"
|
|
47
|
+
SEARCH_SUPERVISOR = "search-supervisor"
|
|
48
|
+
|
|
49
|
+
|
|
39
50
|
class HBCriblInfoTypedDict(TypedDict):
|
|
40
51
|
config: ConfigTypedDict
|
|
41
|
-
dist_mode:
|
|
52
|
+
dist_mode: DistMode
|
|
42
53
|
group: str
|
|
43
54
|
guid: str
|
|
44
55
|
start_time: float
|
|
@@ -50,6 +61,7 @@ class HBCriblInfoTypedDict(TypedDict):
|
|
|
50
61
|
lookup_versions: NotRequired[LookupVersionsTypedDict]
|
|
51
62
|
master: NotRequired[HBLeaderInfoTypedDict]
|
|
52
63
|
pid: NotRequired[float]
|
|
64
|
+
socks_enabled: NotRequired[bool]
|
|
53
65
|
version: NotRequired[str]
|
|
54
66
|
|
|
55
67
|
|
|
@@ -57,7 +69,7 @@ class HBCriblInfo(BaseModel):
|
|
|
57
69
|
config: Config
|
|
58
70
|
|
|
59
71
|
dist_mode: Annotated[
|
|
60
|
-
Annotated[
|
|
72
|
+
Annotated[DistMode, PlainValidator(validate_open_enum(False))],
|
|
61
73
|
pydantic.Field(alias="distMode"),
|
|
62
74
|
]
|
|
63
75
|
|
|
@@ -87,4 +99,8 @@ class HBCriblInfo(BaseModel):
|
|
|
87
99
|
|
|
88
100
|
pid: Optional[float] = None
|
|
89
101
|
|
|
102
|
+
socks_enabled: Annotated[Optional[bool], pydantic.Field(alias="socksEnabled")] = (
|
|
103
|
+
None
|
|
104
|
+
)
|
|
105
|
+
|
|
90
106
|
version: Optional[str] = None
|
|
@@ -17,6 +17,7 @@ class HeartbeatMetadataTags(BaseModel):
|
|
|
17
17
|
|
|
18
18
|
class HeartbeatMetadataAwsTypedDict(TypedDict):
|
|
19
19
|
enabled: bool
|
|
20
|
+
instance_id: str
|
|
20
21
|
region: str
|
|
21
22
|
type: str
|
|
22
23
|
zone: str
|
|
@@ -26,6 +27,8 @@ class HeartbeatMetadataAwsTypedDict(TypedDict):
|
|
|
26
27
|
class HeartbeatMetadataAws(BaseModel):
|
|
27
28
|
enabled: bool
|
|
28
29
|
|
|
30
|
+
instance_id: Annotated[str, pydantic.Field(alias="instanceId")]
|
|
31
|
+
|
|
29
32
|
region: str
|
|
30
33
|
|
|
31
34
|
type: str
|
|
@@ -187,13 +187,6 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
|
|
|
187
187
|
] = None
|
|
188
188
|
|
|
189
189
|
|
|
190
|
-
class InputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
191
|
-
r"""The schema format used to encode and decode event data"""
|
|
192
|
-
|
|
193
|
-
AVRO = "avro"
|
|
194
|
-
JSON = "json"
|
|
195
|
-
|
|
196
|
-
|
|
197
190
|
class InputConfluentCloudAuthTypedDict(TypedDict):
|
|
198
191
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
199
192
|
|
|
@@ -304,8 +297,6 @@ class InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
304
297
|
disabled: NotRequired[bool]
|
|
305
298
|
schema_registry_url: NotRequired[str]
|
|
306
299
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
307
|
-
schema_type: NotRequired[InputConfluentCloudSchemaType]
|
|
308
|
-
r"""The schema format used to encode and decode event data"""
|
|
309
300
|
connection_timeout: NotRequired[float]
|
|
310
301
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
311
302
|
request_timeout: NotRequired[float]
|
|
@@ -327,15 +318,6 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
327
318
|
] = "http://localhost:8081"
|
|
328
319
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
329
320
|
|
|
330
|
-
schema_type: Annotated[
|
|
331
|
-
Annotated[
|
|
332
|
-
Optional[InputConfluentCloudSchemaType],
|
|
333
|
-
PlainValidator(validate_open_enum(False)),
|
|
334
|
-
],
|
|
335
|
-
pydantic.Field(alias="schemaType"),
|
|
336
|
-
] = InputConfluentCloudSchemaType.AVRO
|
|
337
|
-
r"""The schema format used to encode and decode event data"""
|
|
338
|
-
|
|
339
321
|
connection_timeout: Annotated[
|
|
340
322
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
341
323
|
] = 30000
|
|
@@ -103,13 +103,6 @@ class InputKafkaPq(BaseModel):
|
|
|
103
103
|
] = None
|
|
104
104
|
|
|
105
105
|
|
|
106
|
-
class InputKafkaSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
107
|
-
r"""The schema format used to encode and decode event data"""
|
|
108
|
-
|
|
109
|
-
AVRO = "avro"
|
|
110
|
-
JSON = "json"
|
|
111
|
-
|
|
112
|
-
|
|
113
106
|
class InputKafkaAuthTypedDict(TypedDict):
|
|
114
107
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
115
108
|
|
|
@@ -220,8 +213,6 @@ class InputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
220
213
|
disabled: NotRequired[bool]
|
|
221
214
|
schema_registry_url: NotRequired[str]
|
|
222
215
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
223
|
-
schema_type: NotRequired[InputKafkaSchemaType]
|
|
224
|
-
r"""The schema format used to encode and decode event data"""
|
|
225
216
|
connection_timeout: NotRequired[float]
|
|
226
217
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
227
218
|
request_timeout: NotRequired[float]
|
|
@@ -241,14 +232,6 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
241
232
|
] = "http://localhost:8081"
|
|
242
233
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
243
234
|
|
|
244
|
-
schema_type: Annotated[
|
|
245
|
-
Annotated[
|
|
246
|
-
Optional[InputKafkaSchemaType], PlainValidator(validate_open_enum(False))
|
|
247
|
-
],
|
|
248
|
-
pydantic.Field(alias="schemaType"),
|
|
249
|
-
] = InputKafkaSchemaType.AVRO
|
|
250
|
-
r"""The schema format used to encode and decode event data"""
|
|
251
|
-
|
|
252
235
|
connection_timeout: Annotated[
|
|
253
236
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
254
237
|
] = 30000
|
|
@@ -116,13 +116,6 @@ class InputMskMetadatum(BaseModel):
|
|
|
116
116
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
117
117
|
|
|
118
118
|
|
|
119
|
-
class InputMskSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
120
|
-
r"""The schema format used to encode and decode event data"""
|
|
121
|
-
|
|
122
|
-
AVRO = "avro"
|
|
123
|
-
JSON = "json"
|
|
124
|
-
|
|
125
|
-
|
|
126
119
|
class InputMskAuthTypedDict(TypedDict):
|
|
127
120
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
128
121
|
|
|
@@ -233,8 +226,6 @@ class InputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
233
226
|
disabled: NotRequired[bool]
|
|
234
227
|
schema_registry_url: NotRequired[str]
|
|
235
228
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
236
|
-
schema_type: NotRequired[InputMskSchemaType]
|
|
237
|
-
r"""The schema format used to encode and decode event data"""
|
|
238
229
|
connection_timeout: NotRequired[float]
|
|
239
230
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
240
231
|
request_timeout: NotRequired[float]
|
|
@@ -254,14 +245,6 @@ class InputMskKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
254
245
|
] = "http://localhost:8081"
|
|
255
246
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
256
247
|
|
|
257
|
-
schema_type: Annotated[
|
|
258
|
-
Annotated[
|
|
259
|
-
Optional[InputMskSchemaType], PlainValidator(validate_open_enum(False))
|
|
260
|
-
],
|
|
261
|
-
pydantic.Field(alias="schemaType"),
|
|
262
|
-
] = InputMskSchemaType.AVRO
|
|
263
|
-
r"""The schema format used to encode and decode event data"""
|
|
264
|
-
|
|
265
248
|
connection_timeout: Annotated[
|
|
266
249
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
267
250
|
] = 30000
|
|
@@ -19,6 +19,7 @@ class NodeProvidedInfoTags(BaseModel):
|
|
|
19
19
|
|
|
20
20
|
class NodeProvidedInfoAwsTypedDict(TypedDict):
|
|
21
21
|
enabled: bool
|
|
22
|
+
instance_id: str
|
|
22
23
|
region: str
|
|
23
24
|
type: str
|
|
24
25
|
zone: str
|
|
@@ -28,6 +29,8 @@ class NodeProvidedInfoAwsTypedDict(TypedDict):
|
|
|
28
29
|
class NodeProvidedInfoAws(BaseModel):
|
|
29
30
|
enabled: bool
|
|
30
31
|
|
|
32
|
+
instance_id: Annotated[str, pydantic.Field(alias="instanceId")]
|
|
33
|
+
|
|
31
34
|
region: str
|
|
32
35
|
|
|
33
36
|
type: str
|
|
@@ -18,6 +18,7 @@ from .outputcrowdstrikenextgensiem import (
|
|
|
18
18
|
OutputCrowdstrikeNextGenSiem,
|
|
19
19
|
OutputCrowdstrikeNextGenSiemTypedDict,
|
|
20
20
|
)
|
|
21
|
+
from .outputdatabricks import OutputDatabricks, OutputDatabricksTypedDict
|
|
21
22
|
from .outputdatadog import OutputDatadog, OutputDatadogTypedDict
|
|
22
23
|
from .outputdataset import OutputDataset, OutputDatasetTypedDict
|
|
23
24
|
from .outputdefault import OutputDefault, OutputDefaultTypedDict
|
|
@@ -137,6 +138,7 @@ OutputTypedDict = TypeAliasType(
|
|
|
137
138
|
OutputPrometheusTypedDict,
|
|
138
139
|
OutputMskTypedDict,
|
|
139
140
|
OutputSentinelOneAiSiemTypedDict,
|
|
141
|
+
OutputDatabricksTypedDict,
|
|
140
142
|
OutputSentinelTypedDict,
|
|
141
143
|
OutputInfluxdbTypedDict,
|
|
142
144
|
OutputGoogleCloudStorageTypedDict,
|
|
@@ -208,6 +210,7 @@ Output = TypeAliasType(
|
|
|
208
210
|
OutputPrometheus,
|
|
209
211
|
OutputMsk,
|
|
210
212
|
OutputSentinelOneAiSiem,
|
|
213
|
+
OutputDatabricks,
|
|
211
214
|
OutputSentinel,
|
|
212
215
|
OutputInfluxdb,
|
|
213
216
|
OutputGoogleCloudStorage,
|
|
@@ -123,13 +123,6 @@ class OutputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
|
123
123
|
LZ4 = "lz4"
|
|
124
124
|
|
|
125
125
|
|
|
126
|
-
class OutputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
127
|
-
r"""The schema format used to encode and decode event data"""
|
|
128
|
-
|
|
129
|
-
AVRO = "avro"
|
|
130
|
-
JSON = "json"
|
|
131
|
-
|
|
132
|
-
|
|
133
126
|
class OutputConfluentCloudAuthTypedDict(TypedDict):
|
|
134
127
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
135
128
|
|
|
@@ -240,8 +233,6 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
240
233
|
disabled: NotRequired[bool]
|
|
241
234
|
schema_registry_url: NotRequired[str]
|
|
242
235
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
243
|
-
schema_type: NotRequired[OutputConfluentCloudSchemaType]
|
|
244
|
-
r"""The schema format used to encode and decode event data"""
|
|
245
236
|
connection_timeout: NotRequired[float]
|
|
246
237
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
247
238
|
request_timeout: NotRequired[float]
|
|
@@ -267,15 +258,6 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
267
258
|
] = "http://localhost:8081"
|
|
268
259
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
269
260
|
|
|
270
|
-
schema_type: Annotated[
|
|
271
|
-
Annotated[
|
|
272
|
-
Optional[OutputConfluentCloudSchemaType],
|
|
273
|
-
PlainValidator(validate_open_enum(False)),
|
|
274
|
-
],
|
|
275
|
-
pydantic.Field(alias="schemaType"),
|
|
276
|
-
] = OutputConfluentCloudSchemaType.AVRO
|
|
277
|
-
r"""The schema format used to encode and decode event data"""
|
|
278
|
-
|
|
279
261
|
connection_timeout: Annotated[
|
|
280
262
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
281
263
|
] = 30000
|
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import utils
|
|
5
|
+
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
7
|
+
from enum import Enum
|
|
8
|
+
import pydantic
|
|
9
|
+
from pydantic.functional_validators import PlainValidator
|
|
10
|
+
from typing import List, Optional
|
|
11
|
+
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class OutputDatabricksType(str, Enum):
|
|
15
|
+
DATABRICKS = "databricks"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class OutputDatabricksDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
19
|
+
r"""Format of the output data"""
|
|
20
|
+
|
|
21
|
+
JSON = "json"
|
|
22
|
+
RAW = "raw"
|
|
23
|
+
PARQUET = "parquet"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class OutputDatabricksBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
27
|
+
r"""How to handle events when all receivers are exerting backpressure"""
|
|
28
|
+
|
|
29
|
+
BLOCK = "block"
|
|
30
|
+
DROP = "drop"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class OutputDatabricksDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
34
|
+
r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
|
|
35
|
+
|
|
36
|
+
BLOCK = "block"
|
|
37
|
+
DROP = "drop"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class OutputDatabricksAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
41
|
+
r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
|
|
42
|
+
|
|
43
|
+
MANUAL = "manual"
|
|
44
|
+
SECRET = "secret"
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class OutputDatabricksCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
48
|
+
r"""Data compression format to apply to HTTP content before it is delivered"""
|
|
49
|
+
|
|
50
|
+
NONE = "none"
|
|
51
|
+
GZIP = "gzip"
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class OutputDatabricksCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
55
|
+
r"""Compression level to apply before moving files to final destination"""
|
|
56
|
+
|
|
57
|
+
BEST_SPEED = "best_speed"
|
|
58
|
+
NORMAL = "normal"
|
|
59
|
+
BEST_COMPRESSION = "best_compression"
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class OutputDatabricksParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
63
|
+
r"""Determines which data types are supported and how they are represented"""
|
|
64
|
+
|
|
65
|
+
PARQUET_1_0 = "PARQUET_1_0"
|
|
66
|
+
PARQUET_2_4 = "PARQUET_2_4"
|
|
67
|
+
PARQUET_2_6 = "PARQUET_2_6"
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class OutputDatabricksDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
71
|
+
r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
|
|
72
|
+
|
|
73
|
+
DATA_PAGE_V1 = "DATA_PAGE_V1"
|
|
74
|
+
DATA_PAGE_V2 = "DATA_PAGE_V2"
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class OutputDatabricksKeyValueMetadatumTypedDict(TypedDict):
|
|
78
|
+
value: str
|
|
79
|
+
key: NotRequired[str]
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class OutputDatabricksKeyValueMetadatum(BaseModel):
|
|
83
|
+
value: str
|
|
84
|
+
|
|
85
|
+
key: Optional[str] = ""
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class OutputDatabricksTypedDict(TypedDict):
|
|
89
|
+
type: OutputDatabricksType
|
|
90
|
+
id: NotRequired[str]
|
|
91
|
+
r"""Unique ID for this output"""
|
|
92
|
+
pipeline: NotRequired[str]
|
|
93
|
+
r"""Pipeline to process data before sending out to this output"""
|
|
94
|
+
system_fields: NotRequired[List[str]]
|
|
95
|
+
r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
|
|
96
|
+
environment: NotRequired[str]
|
|
97
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
98
|
+
streamtags: NotRequired[List[str]]
|
|
99
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
100
|
+
dest_path: NotRequired[str]
|
|
101
|
+
r"""Optional path to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myEventsVolumePath-${C.vars.myVar}`"""
|
|
102
|
+
stage_path: NotRequired[str]
|
|
103
|
+
r"""Filesystem location in which to buffer files before compressing and moving to final destination. Use performant, stable storage."""
|
|
104
|
+
add_id_to_stage_path: NotRequired[bool]
|
|
105
|
+
r"""Add the Output ID value to staging location"""
|
|
106
|
+
remove_empty_dirs: NotRequired[bool]
|
|
107
|
+
r"""Remove empty staging directories after moving files"""
|
|
108
|
+
partition_expr: NotRequired[str]
|
|
109
|
+
r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
|
|
110
|
+
format_: NotRequired[OutputDatabricksDataFormat]
|
|
111
|
+
r"""Format of the output data"""
|
|
112
|
+
base_file_name: NotRequired[str]
|
|
113
|
+
r"""JavaScript expression to define the output filename prefix (can be constant)"""
|
|
114
|
+
file_name_suffix: NotRequired[str]
|
|
115
|
+
r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
|
|
116
|
+
max_file_size_mb: NotRequired[float]
|
|
117
|
+
r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
|
|
118
|
+
max_file_open_time_sec: NotRequired[float]
|
|
119
|
+
r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
|
|
120
|
+
max_file_idle_time_sec: NotRequired[float]
|
|
121
|
+
r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
|
|
122
|
+
max_open_files: NotRequired[float]
|
|
123
|
+
r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
|
|
124
|
+
header_line: NotRequired[str]
|
|
125
|
+
r"""If set, this line will be written to the beginning of each output file"""
|
|
126
|
+
write_high_water_mark: NotRequired[float]
|
|
127
|
+
r"""Buffer size used to write to a file"""
|
|
128
|
+
on_backpressure: NotRequired[OutputDatabricksBackpressureBehavior]
|
|
129
|
+
r"""How to handle events when all receivers are exerting backpressure"""
|
|
130
|
+
deadletter_enabled: NotRequired[bool]
|
|
131
|
+
r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
|
|
132
|
+
on_disk_full_backpressure: NotRequired[OutputDatabricksDiskSpaceProtection]
|
|
133
|
+
r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
|
|
134
|
+
unity_auth_method: NotRequired[OutputDatabricksAuthenticationMethod]
|
|
135
|
+
r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
|
|
136
|
+
login_url: NotRequired[str]
|
|
137
|
+
r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
|
|
138
|
+
client_id: NotRequired[str]
|
|
139
|
+
r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
|
|
140
|
+
client_secret: NotRequired[str]
|
|
141
|
+
r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
|
|
142
|
+
client_text_secret: NotRequired[str]
|
|
143
|
+
r"""Select or create a stored secret that references your Client ID and Client Secret"""
|
|
144
|
+
scope: NotRequired[str]
|
|
145
|
+
r"""OAuth scope for Unity Catalog authentication"""
|
|
146
|
+
token_timeout_secs: NotRequired[float]
|
|
147
|
+
r"""How often the OAuth token should be refreshed"""
|
|
148
|
+
default_catalog: NotRequired[str]
|
|
149
|
+
r"""Name of the catalog to use for the output"""
|
|
150
|
+
default_schema: NotRequired[str]
|
|
151
|
+
r"""Name of the catalog schema to use for the output"""
|
|
152
|
+
events_volume_name: NotRequired[str]
|
|
153
|
+
r"""Name of the events volume in Databricks"""
|
|
154
|
+
over_write_files: NotRequired[bool]
|
|
155
|
+
r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
|
|
156
|
+
description: NotRequired[str]
|
|
157
|
+
compress: NotRequired[OutputDatabricksCompression]
|
|
158
|
+
r"""Data compression format to apply to HTTP content before it is delivered"""
|
|
159
|
+
compression_level: NotRequired[OutputDatabricksCompressionLevel]
|
|
160
|
+
r"""Compression level to apply before moving files to final destination"""
|
|
161
|
+
automatic_schema: NotRequired[bool]
|
|
162
|
+
r"""Automatically calculate the schema based on the events of each Parquet file generated"""
|
|
163
|
+
parquet_version: NotRequired[OutputDatabricksParquetVersion]
|
|
164
|
+
r"""Determines which data types are supported and how they are represented"""
|
|
165
|
+
parquet_data_page_version: NotRequired[OutputDatabricksDataPageVersion]
|
|
166
|
+
r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
|
|
167
|
+
parquet_row_group_length: NotRequired[float]
|
|
168
|
+
r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
|
|
169
|
+
parquet_page_size: NotRequired[str]
|
|
170
|
+
r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
|
|
171
|
+
should_log_invalid_rows: NotRequired[bool]
|
|
172
|
+
r"""Log up to 3 rows that @{product} skips due to data mismatch"""
|
|
173
|
+
key_value_metadata: NotRequired[List[OutputDatabricksKeyValueMetadatumTypedDict]]
|
|
174
|
+
r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
|
|
175
|
+
enable_statistics: NotRequired[bool]
|
|
176
|
+
r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
|
|
177
|
+
enable_write_page_index: NotRequired[bool]
|
|
178
|
+
r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
|
|
179
|
+
enable_page_checksum: NotRequired[bool]
|
|
180
|
+
r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
|
|
181
|
+
empty_dir_cleanup_sec: NotRequired[float]
|
|
182
|
+
r"""How frequently, in seconds, to clean up empty directories"""
|
|
183
|
+
deadletter_path: NotRequired[str]
|
|
184
|
+
r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
|
|
185
|
+
max_retry_num: NotRequired[float]
|
|
186
|
+
r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
class OutputDatabricks(BaseModel):
|
|
190
|
+
type: OutputDatabricksType
|
|
191
|
+
|
|
192
|
+
id: Optional[str] = None
|
|
193
|
+
r"""Unique ID for this output"""
|
|
194
|
+
|
|
195
|
+
pipeline: Optional[str] = None
|
|
196
|
+
r"""Pipeline to process data before sending out to this output"""
|
|
197
|
+
|
|
198
|
+
system_fields: Annotated[
|
|
199
|
+
Optional[List[str]], pydantic.Field(alias="systemFields")
|
|
200
|
+
] = None
|
|
201
|
+
r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
|
|
202
|
+
|
|
203
|
+
environment: Optional[str] = None
|
|
204
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
205
|
+
|
|
206
|
+
streamtags: Optional[List[str]] = None
|
|
207
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
208
|
+
|
|
209
|
+
dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
|
|
210
|
+
r"""Optional path to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myEventsVolumePath-${C.vars.myVar}`"""
|
|
211
|
+
|
|
212
|
+
stage_path: Annotated[Optional[str], pydantic.Field(alias="stagePath")] = (
|
|
213
|
+
"$CRIBL_HOME/state/outputs/staging"
|
|
214
|
+
)
|
|
215
|
+
r"""Filesystem location in which to buffer files before compressing and moving to final destination. Use performant, stable storage."""
|
|
216
|
+
|
|
217
|
+
add_id_to_stage_path: Annotated[
|
|
218
|
+
Optional[bool], pydantic.Field(alias="addIdToStagePath")
|
|
219
|
+
] = True
|
|
220
|
+
r"""Add the Output ID value to staging location"""
|
|
221
|
+
|
|
222
|
+
remove_empty_dirs: Annotated[
|
|
223
|
+
Optional[bool], pydantic.Field(alias="removeEmptyDirs")
|
|
224
|
+
] = True
|
|
225
|
+
r"""Remove empty staging directories after moving files"""
|
|
226
|
+
|
|
227
|
+
partition_expr: Annotated[Optional[str], pydantic.Field(alias="partitionExpr")] = (
|
|
228
|
+
"C.Time.strftime(_time ? _time : Date.now()/1000, '%Y/%m/%d')"
|
|
229
|
+
)
|
|
230
|
+
r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
|
|
231
|
+
|
|
232
|
+
format_: Annotated[
|
|
233
|
+
Annotated[
|
|
234
|
+
Optional[OutputDatabricksDataFormat],
|
|
235
|
+
PlainValidator(validate_open_enum(False)),
|
|
236
|
+
],
|
|
237
|
+
pydantic.Field(alias="format"),
|
|
238
|
+
] = OutputDatabricksDataFormat.JSON
|
|
239
|
+
r"""Format of the output data"""
|
|
240
|
+
|
|
241
|
+
base_file_name: Annotated[Optional[str], pydantic.Field(alias="baseFileName")] = (
|
|
242
|
+
"`CriblOut`"
|
|
243
|
+
)
|
|
244
|
+
r"""JavaScript expression to define the output filename prefix (can be constant)"""
|
|
245
|
+
|
|
246
|
+
file_name_suffix: Annotated[
|
|
247
|
+
Optional[str], pydantic.Field(alias="fileNameSuffix")
|
|
248
|
+
] = '`.${C.env["CRIBL_WORKER_ID"]}.${__format}${__compression === "gzip" ? ".gz" : ""}`'
|
|
249
|
+
r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
|
|
250
|
+
|
|
251
|
+
max_file_size_mb: Annotated[
|
|
252
|
+
Optional[float], pydantic.Field(alias="maxFileSizeMB")
|
|
253
|
+
] = 32
|
|
254
|
+
r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
|
|
255
|
+
|
|
256
|
+
max_file_open_time_sec: Annotated[
|
|
257
|
+
Optional[float], pydantic.Field(alias="maxFileOpenTimeSec")
|
|
258
|
+
] = 300
|
|
259
|
+
r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
|
|
260
|
+
|
|
261
|
+
max_file_idle_time_sec: Annotated[
|
|
262
|
+
Optional[float], pydantic.Field(alias="maxFileIdleTimeSec")
|
|
263
|
+
] = 30
|
|
264
|
+
r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
|
|
265
|
+
|
|
266
|
+
max_open_files: Annotated[Optional[float], pydantic.Field(alias="maxOpenFiles")] = (
|
|
267
|
+
100
|
|
268
|
+
)
|
|
269
|
+
r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
|
|
270
|
+
|
|
271
|
+
header_line: Annotated[Optional[str], pydantic.Field(alias="headerLine")] = ""
|
|
272
|
+
r"""If set, this line will be written to the beginning of each output file"""
|
|
273
|
+
|
|
274
|
+
write_high_water_mark: Annotated[
|
|
275
|
+
Optional[float], pydantic.Field(alias="writeHighWaterMark")
|
|
276
|
+
] = 64
|
|
277
|
+
r"""Buffer size used to write to a file"""
|
|
278
|
+
|
|
279
|
+
on_backpressure: Annotated[
|
|
280
|
+
Annotated[
|
|
281
|
+
Optional[OutputDatabricksBackpressureBehavior],
|
|
282
|
+
PlainValidator(validate_open_enum(False)),
|
|
283
|
+
],
|
|
284
|
+
pydantic.Field(alias="onBackpressure"),
|
|
285
|
+
] = OutputDatabricksBackpressureBehavior.BLOCK
|
|
286
|
+
r"""How to handle events when all receivers are exerting backpressure"""
|
|
287
|
+
|
|
288
|
+
deadletter_enabled: Annotated[
|
|
289
|
+
Optional[bool], pydantic.Field(alias="deadletterEnabled")
|
|
290
|
+
] = False
|
|
291
|
+
r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
|
|
292
|
+
|
|
293
|
+
on_disk_full_backpressure: Annotated[
|
|
294
|
+
Annotated[
|
|
295
|
+
Optional[OutputDatabricksDiskSpaceProtection],
|
|
296
|
+
PlainValidator(validate_open_enum(False)),
|
|
297
|
+
],
|
|
298
|
+
pydantic.Field(alias="onDiskFullBackpressure"),
|
|
299
|
+
] = OutputDatabricksDiskSpaceProtection.BLOCK
|
|
300
|
+
r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
|
|
301
|
+
|
|
302
|
+
unity_auth_method: Annotated[
|
|
303
|
+
Annotated[
|
|
304
|
+
Optional[OutputDatabricksAuthenticationMethod],
|
|
305
|
+
PlainValidator(validate_open_enum(False)),
|
|
306
|
+
],
|
|
307
|
+
pydantic.Field(alias="unityAuthMethod"),
|
|
308
|
+
] = OutputDatabricksAuthenticationMethod.MANUAL
|
|
309
|
+
r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
|
|
310
|
+
|
|
311
|
+
login_url: Annotated[Optional[str], pydantic.Field(alias="loginUrl")] = None
|
|
312
|
+
r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
|
|
313
|
+
|
|
314
|
+
client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
|
|
315
|
+
r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
|
|
316
|
+
|
|
317
|
+
client_secret: Annotated[Optional[str], pydantic.Field(alias="clientSecret")] = None
|
|
318
|
+
r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
|
|
319
|
+
|
|
320
|
+
client_text_secret: Annotated[
|
|
321
|
+
Optional[str], pydantic.Field(alias="clientTextSecret")
|
|
322
|
+
] = None
|
|
323
|
+
r"""Select or create a stored secret that references your Client ID and Client Secret"""
|
|
324
|
+
|
|
325
|
+
scope: Optional[str] = "all-apis"
|
|
326
|
+
r"""OAuth scope for Unity Catalog authentication"""
|
|
327
|
+
|
|
328
|
+
token_timeout_secs: Annotated[
|
|
329
|
+
Optional[float], pydantic.Field(alias="tokenTimeoutSecs")
|
|
330
|
+
] = 3600
|
|
331
|
+
r"""How often the OAuth token should be refreshed"""
|
|
332
|
+
|
|
333
|
+
default_catalog: Annotated[
|
|
334
|
+
Optional[str], pydantic.Field(alias="defaultCatalog")
|
|
335
|
+
] = "main"
|
|
336
|
+
r"""Name of the catalog to use for the output"""
|
|
337
|
+
|
|
338
|
+
default_schema: Annotated[Optional[str], pydantic.Field(alias="defaultSchema")] = (
|
|
339
|
+
"external"
|
|
340
|
+
)
|
|
341
|
+
r"""Name of the catalog schema to use for the output"""
|
|
342
|
+
|
|
343
|
+
events_volume_name: Annotated[
|
|
344
|
+
Optional[str], pydantic.Field(alias="eventsVolumeName")
|
|
345
|
+
] = "events"
|
|
346
|
+
r"""Name of the events volume in Databricks"""
|
|
347
|
+
|
|
348
|
+
over_write_files: Annotated[
|
|
349
|
+
Optional[bool], pydantic.Field(alias="overWriteFiles")
|
|
350
|
+
] = False
|
|
351
|
+
r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
|
|
352
|
+
|
|
353
|
+
description: Optional[str] = None
|
|
354
|
+
|
|
355
|
+
compress: Annotated[
|
|
356
|
+
Optional[OutputDatabricksCompression], PlainValidator(validate_open_enum(False))
|
|
357
|
+
] = OutputDatabricksCompression.GZIP
|
|
358
|
+
r"""Data compression format to apply to HTTP content before it is delivered"""
|
|
359
|
+
|
|
360
|
+
compression_level: Annotated[
|
|
361
|
+
Annotated[
|
|
362
|
+
Optional[OutputDatabricksCompressionLevel],
|
|
363
|
+
PlainValidator(validate_open_enum(False)),
|
|
364
|
+
],
|
|
365
|
+
pydantic.Field(alias="compressionLevel"),
|
|
366
|
+
] = OutputDatabricksCompressionLevel.BEST_SPEED
|
|
367
|
+
r"""Compression level to apply before moving files to final destination"""
|
|
368
|
+
|
|
369
|
+
automatic_schema: Annotated[
|
|
370
|
+
Optional[bool], pydantic.Field(alias="automaticSchema")
|
|
371
|
+
] = False
|
|
372
|
+
r"""Automatically calculate the schema based on the events of each Parquet file generated"""
|
|
373
|
+
|
|
374
|
+
parquet_version: Annotated[
|
|
375
|
+
Annotated[
|
|
376
|
+
Optional[OutputDatabricksParquetVersion],
|
|
377
|
+
PlainValidator(validate_open_enum(False)),
|
|
378
|
+
],
|
|
379
|
+
pydantic.Field(alias="parquetVersion"),
|
|
380
|
+
] = OutputDatabricksParquetVersion.PARQUET_2_6
|
|
381
|
+
r"""Determines which data types are supported and how they are represented"""
|
|
382
|
+
|
|
383
|
+
parquet_data_page_version: Annotated[
|
|
384
|
+
Annotated[
|
|
385
|
+
Optional[OutputDatabricksDataPageVersion],
|
|
386
|
+
PlainValidator(validate_open_enum(False)),
|
|
387
|
+
],
|
|
388
|
+
pydantic.Field(alias="parquetDataPageVersion"),
|
|
389
|
+
] = OutputDatabricksDataPageVersion.DATA_PAGE_V2
|
|
390
|
+
r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
|
|
391
|
+
|
|
392
|
+
parquet_row_group_length: Annotated[
|
|
393
|
+
Optional[float], pydantic.Field(alias="parquetRowGroupLength")
|
|
394
|
+
] = 10000
|
|
395
|
+
r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
|
|
396
|
+
|
|
397
|
+
parquet_page_size: Annotated[
|
|
398
|
+
Optional[str], pydantic.Field(alias="parquetPageSize")
|
|
399
|
+
] = "1MB"
|
|
400
|
+
r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
|
|
401
|
+
|
|
402
|
+
should_log_invalid_rows: Annotated[
|
|
403
|
+
Optional[bool], pydantic.Field(alias="shouldLogInvalidRows")
|
|
404
|
+
] = None
|
|
405
|
+
r"""Log up to 3 rows that @{product} skips due to data mismatch"""
|
|
406
|
+
|
|
407
|
+
key_value_metadata: Annotated[
|
|
408
|
+
Optional[List[OutputDatabricksKeyValueMetadatum]],
|
|
409
|
+
pydantic.Field(alias="keyValueMetadata"),
|
|
410
|
+
] = None
|
|
411
|
+
r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
|
|
412
|
+
|
|
413
|
+
enable_statistics: Annotated[
|
|
414
|
+
Optional[bool], pydantic.Field(alias="enableStatistics")
|
|
415
|
+
] = True
|
|
416
|
+
r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
|
|
417
|
+
|
|
418
|
+
enable_write_page_index: Annotated[
|
|
419
|
+
Optional[bool], pydantic.Field(alias="enableWritePageIndex")
|
|
420
|
+
] = True
|
|
421
|
+
r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
|
|
422
|
+
|
|
423
|
+
enable_page_checksum: Annotated[
|
|
424
|
+
Optional[bool], pydantic.Field(alias="enablePageChecksum")
|
|
425
|
+
] = False
|
|
426
|
+
r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
|
|
427
|
+
|
|
428
|
+
empty_dir_cleanup_sec: Annotated[
|
|
429
|
+
Optional[float], pydantic.Field(alias="emptyDirCleanupSec")
|
|
430
|
+
] = 300
|
|
431
|
+
r"""How frequently, in seconds, to clean up empty directories"""
|
|
432
|
+
|
|
433
|
+
deadletter_path: Annotated[
|
|
434
|
+
Optional[str], pydantic.Field(alias="deadletterPath")
|
|
435
|
+
] = "$CRIBL_HOME/state/outputs/dead-letter"
|
|
436
|
+
r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
|
|
437
|
+
|
|
438
|
+
max_retry_num: Annotated[Optional[float], pydantic.Field(alias="maxRetryNum")] = 20
|
|
439
|
+
r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
|
|
@@ -40,13 +40,6 @@ class OutputKafkaCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
|
40
40
|
LZ4 = "lz4"
|
|
41
41
|
|
|
42
42
|
|
|
43
|
-
class OutputKafkaSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
44
|
-
r"""The schema format used to encode and decode event data"""
|
|
45
|
-
|
|
46
|
-
AVRO = "avro"
|
|
47
|
-
JSON = "json"
|
|
48
|
-
|
|
49
|
-
|
|
50
43
|
class OutputKafkaAuthTypedDict(TypedDict):
|
|
51
44
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
52
45
|
|
|
@@ -157,8 +150,6 @@ class OutputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
157
150
|
disabled: NotRequired[bool]
|
|
158
151
|
schema_registry_url: NotRequired[str]
|
|
159
152
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
160
|
-
schema_type: NotRequired[OutputKafkaSchemaType]
|
|
161
|
-
r"""The schema format used to encode and decode event data"""
|
|
162
153
|
connection_timeout: NotRequired[float]
|
|
163
154
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
164
155
|
request_timeout: NotRequired[float]
|
|
@@ -182,14 +173,6 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
182
173
|
] = "http://localhost:8081"
|
|
183
174
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
184
175
|
|
|
185
|
-
schema_type: Annotated[
|
|
186
|
-
Annotated[
|
|
187
|
-
Optional[OutputKafkaSchemaType], PlainValidator(validate_open_enum(False))
|
|
188
|
-
],
|
|
189
|
-
pydantic.Field(alias="schemaType"),
|
|
190
|
-
] = OutputKafkaSchemaType.AVRO
|
|
191
|
-
r"""The schema format used to encode and decode event data"""
|
|
192
|
-
|
|
193
176
|
connection_timeout: Annotated[
|
|
194
177
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
195
178
|
] = 30000
|
|
@@ -40,13 +40,6 @@ class OutputMskCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
|
40
40
|
LZ4 = "lz4"
|
|
41
41
|
|
|
42
42
|
|
|
43
|
-
class OutputMskSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
44
|
-
r"""The schema format used to encode and decode event data"""
|
|
45
|
-
|
|
46
|
-
AVRO = "avro"
|
|
47
|
-
JSON = "json"
|
|
48
|
-
|
|
49
|
-
|
|
50
43
|
class OutputMskAuthTypedDict(TypedDict):
|
|
51
44
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
52
45
|
|
|
@@ -157,8 +150,6 @@ class OutputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
157
150
|
disabled: NotRequired[bool]
|
|
158
151
|
schema_registry_url: NotRequired[str]
|
|
159
152
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
160
|
-
schema_type: NotRequired[OutputMskSchemaType]
|
|
161
|
-
r"""The schema format used to encode and decode event data"""
|
|
162
153
|
connection_timeout: NotRequired[float]
|
|
163
154
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
164
155
|
request_timeout: NotRequired[float]
|
|
@@ -182,14 +173,6 @@ class OutputMskKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
182
173
|
] = "http://localhost:8081"
|
|
183
174
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
184
175
|
|
|
185
|
-
schema_type: Annotated[
|
|
186
|
-
Annotated[
|
|
187
|
-
Optional[OutputMskSchemaType], PlainValidator(validate_open_enum(False))
|
|
188
|
-
],
|
|
189
|
-
pydantic.Field(alias="schemaType"),
|
|
190
|
-
] = OutputMskSchemaType.AVRO
|
|
191
|
-
r"""The schema format used to encode and decode event data"""
|
|
192
|
-
|
|
193
176
|
connection_timeout: Annotated[
|
|
194
177
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
195
178
|
] = 30000
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
-
from .routecloneconf import RouteCloneConf, RouteCloneConfTypedDict
|
|
5
4
|
from cribl_control_plane.types import BaseModel
|
|
6
5
|
import pydantic
|
|
7
|
-
from typing import List, Optional
|
|
6
|
+
from typing import Dict, List, Optional
|
|
8
7
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
8
|
|
|
10
9
|
|
|
@@ -13,7 +12,7 @@ class RouteConfTypedDict(TypedDict):
|
|
|
13
12
|
id: str
|
|
14
13
|
name: str
|
|
15
14
|
pipeline: str
|
|
16
|
-
clones: NotRequired[List[
|
|
15
|
+
clones: NotRequired[List[Dict[str, str]]]
|
|
17
16
|
context: NotRequired[str]
|
|
18
17
|
description: NotRequired[str]
|
|
19
18
|
disabled: NotRequired[bool]
|
|
@@ -33,7 +32,7 @@ class RouteConf(BaseModel):
|
|
|
33
32
|
|
|
34
33
|
pipeline: str
|
|
35
34
|
|
|
36
|
-
clones: Optional[List[
|
|
35
|
+
clones: Optional[List[Dict[str, str]]] = None
|
|
37
36
|
|
|
38
37
|
context: Optional[str] = None
|
|
39
38
|
|
|
@@ -4,7 +4,7 @@ cribl_control_plane/_hooks/clientcredentials.py,sha256=p1WN7LL3PHrAf4AxXrsOZF_NB
|
|
|
4
4
|
cribl_control_plane/_hooks/registration.py,sha256=1QZB41w6If7I9dXiOSQx6dhSc6BPWrnI5Q5bMOr4iVA,624
|
|
5
5
|
cribl_control_plane/_hooks/sdkhooks.py,sha256=ggXjME1_Rdv8CVCg1XHqB83eYtbxzKyhXyfQ36Yc1gA,2816
|
|
6
6
|
cribl_control_plane/_hooks/types.py,sha256=Tw_C4zTZm01rW_89VDEUpvQ8KQr1WxN0Gu_-s_fYSPc,2998
|
|
7
|
-
cribl_control_plane/_version.py,sha256=
|
|
7
|
+
cribl_control_plane/_version.py,sha256=fEJ_XGOE2S0NaLK3-Wh2Bp8ixPeJOi1-yjcxCI4YbvE,546
|
|
8
8
|
cribl_control_plane/acl.py,sha256=8lvYOKAli4PzsQhOVaCU6YCwblPMh9jQo04L0r4HJuQ,9025
|
|
9
9
|
cribl_control_plane/auth_sdk.py,sha256=3sjf1VoyWwfhSyuMDQLixgWISSf03BOZwmkiT8g5Ruw,626
|
|
10
10
|
cribl_control_plane/basesdk.py,sha256=y4yIXSNVXLMd0sLS2htBFdTCI3gkPQbIWd-C671kg1I,12249
|
|
@@ -27,9 +27,8 @@ cribl_control_plane/health.py,sha256=N8pX8RHkJVtLFd4nZ8ypJPrzT_JezciEVry9s9qvCRc
|
|
|
27
27
|
cribl_control_plane/hectokens.py,sha256=0EGgGGrM83m1YmTZwkN5S4xFkHQGnw1IZe3y6uMwmLw,19151
|
|
28
28
|
cribl_control_plane/httpclient.py,sha256=Eu73urOAiZQtdUIyOUnPccxCiBbWEKrXG-JrRG3SLM4,3946
|
|
29
29
|
cribl_control_plane/lakedatasets.py,sha256=7WYWcjXMzliDW1j3TQlgikc_h54IUq4lsysVy_39l38,46578
|
|
30
|
-
cribl_control_plane/models/__init__.py,sha256
|
|
30
|
+
cribl_control_plane/models/__init__.py,sha256=IkETDpbByd70u_j2-Pz0sWPhTJneGxWwQw9HyYcOA_8,388599
|
|
31
31
|
cribl_control_plane/models/addhectokenrequest.py,sha256=mzQLKrMWlwxNheqEs5SM_yrT-gyenfCWgHKhmb5oXFQ,800
|
|
32
|
-
cribl_control_plane/models/appmode.py,sha256=29pjcPnHZ7AzaIScZ2TuWEsPvhK53dBH5tfxtY95ig4,368
|
|
33
32
|
cribl_control_plane/models/authtoken.py,sha256=uW0aIs8j14CQzFM2ueY5GIWFulna91cigBWQ3oPlDgY,295
|
|
34
33
|
cribl_control_plane/models/branchinfo.py,sha256=jCX31O5TMG9jTjqigPvvUiBwpgPpVxHtSuhYrNykXiI,291
|
|
35
34
|
cribl_control_plane/models/cacheconnection.py,sha256=IaqcKQhOxuY_SYdMpD2FqBGMbraqk8msS5DzkhvjHbQ,1802
|
|
@@ -106,15 +105,15 @@ cribl_control_plane/models/gitrevertparams.py,sha256=wMVlEcrprmZHUA01vi3CC8fMMDF
|
|
|
106
105
|
cribl_control_plane/models/gitrevertresult.py,sha256=RQ7-QhPP7zerEEF2bUhVI_IVft7tqYVOZrNLCWeB32c,1056
|
|
107
106
|
cribl_control_plane/models/gitshowresult.py,sha256=XTYNDfyix6mxWGL1bzevhttxf6OMyvVVOSoS0duMh9Y,592
|
|
108
107
|
cribl_control_plane/models/gitstatusresult.py,sha256=7-pEpOnb4xzQwWo3rPBRN0tbM6UdG4KSIhkiUPyU3to,1166
|
|
109
|
-
cribl_control_plane/models/hbcriblinfo.py,sha256=
|
|
108
|
+
cribl_control_plane/models/hbcriblinfo.py,sha256=gpr4NYt7kAoDGSjyIND4vbMOtldcyyrLQ7p-P9GUmTM,3035
|
|
110
109
|
cribl_control_plane/models/hbleaderinfo.py,sha256=SU5iM_I4sqxoTOzAQsw-rpOMfXwKl1ymze9nUrw6z6U,503
|
|
111
110
|
cribl_control_plane/models/healthstatus.py,sha256=oGS-ntDNekMLdbjGQtGTDsFh7gDn_Fz9KUVyLix29m8,1056
|
|
112
|
-
cribl_control_plane/models/heartbeatmetadata.py,sha256=
|
|
111
|
+
cribl_control_plane/models/heartbeatmetadata.py,sha256=mKMhlT2jo0zX2UQ4qFQns2ft1zWtYBHtS96BXEvfKhs,2345
|
|
113
112
|
cribl_control_plane/models/input.py,sha256=Zd6wdxKwa9pdoT3GmGKnlzwhV8oqIKG2CAnjy2khTxk,7682
|
|
114
113
|
cribl_control_plane/models/inputappscope.py,sha256=4DBz29S82rynEUOuHuz_-kuB_F2lhpxsfJs_ZlaNNJ0,21057
|
|
115
114
|
cribl_control_plane/models/inputazureblob.py,sha256=-T9zWYCKwsy8p3BIRYewiXea92dPNdy2bFIBaL_7Cmc,15601
|
|
116
115
|
cribl_control_plane/models/inputcollection.py,sha256=1iBiUz5LLFag-n7mSqupwM1GdSgXkugumESDWOSbCog,9922
|
|
117
|
-
cribl_control_plane/models/inputconfluentcloud.py,sha256=
|
|
116
|
+
cribl_control_plane/models/inputconfluentcloud.py,sha256=g8rLOf7hk5B8WtQ39KvM5llrnz283TLa6BOoFES1smg,29810
|
|
118
117
|
cribl_control_plane/models/inputcribl.py,sha256=d5mS1RsSVTjOrMzTm2k35vk5-ehBRuYqAAFrADxEwUo,7588
|
|
119
118
|
cribl_control_plane/models/inputcriblhttp.py,sha256=KKhepZlvtOHWHPMtMGFd6KP3lzXFVoPJYBiOMJhZKL0,16213
|
|
120
119
|
cribl_control_plane/models/inputcribllakehttp.py,sha256=ugxBc7LoNQpbsN0HdoYDzRVmuOly571ehNx80nQKUhM,19698
|
|
@@ -134,7 +133,7 @@ cribl_control_plane/models/inputgrafana.py,sha256=21d-FCUBUC8MIGwTVCY-_d7WMlD9uk
|
|
|
134
133
|
cribl_control_plane/models/inputhttp.py,sha256=bCbg1LHnWc46HNXM9BFlV9xwWLuSU1i08cgRR84ufEM,18927
|
|
135
134
|
cribl_control_plane/models/inputhttpraw.py,sha256=oH7XZngzKH9MhyOaPJIbsd9X_HRQJfzoERJv3ktoT5s,19314
|
|
136
135
|
cribl_control_plane/models/inputjournalfiles.py,sha256=A-fL2FlhahycMrR_r63BgBAT3rhxmL7GqTJswfzWrQ8,10069
|
|
137
|
-
cribl_control_plane/models/inputkafka.py,sha256=
|
|
136
|
+
cribl_control_plane/models/inputkafka.py,sha256=TYC02E64yVcDh79EWVz5Y9kil7H4-k28h6yEnbX1tqQ,29352
|
|
138
137
|
cribl_control_plane/models/inputkinesis.py,sha256=4feXQwIBijQ_9QTeqbDVwAA8JPbI5bXoVlbREQJ1g28,16678
|
|
139
138
|
cribl_control_plane/models/inputkubeevents.py,sha256=Q6DS2jXGBZ-b2jOop4x9aeZSdjqdYCoqxCLjDeWLIcc,8360
|
|
140
139
|
cribl_control_plane/models/inputkubelogs.py,sha256=ZxBd4qQzpYxKHN6lHIIRwsQ2_84ZRsZnLdm-SpW9S8g,12555
|
|
@@ -142,7 +141,7 @@ cribl_control_plane/models/inputkubemetrics.py,sha256=wgbpd-7CpZ1QTNG9cvI8Zi1-6H
|
|
|
142
141
|
cribl_control_plane/models/inputloki.py,sha256=COV9nDP9F6aF_Vqbx8WtX3Ee2dal-4rzAur5z_LfOGE,21542
|
|
143
142
|
cribl_control_plane/models/inputmetrics.py,sha256=iHB5Wgwb6qolo25SwmSKjyAbUDLYt-EiUE-5p7-qvpc,13433
|
|
144
143
|
cribl_control_plane/models/inputmodeldriventelemetry.py,sha256=cW1xM0EjVYpiNo0bzF4m__8Z8iWMLRJaUoZhc-E2rfw,12429
|
|
145
|
-
cribl_control_plane/models/inputmsk.py,sha256=
|
|
144
|
+
cribl_control_plane/models/inputmsk.py,sha256=Qq37J7nLBrDslY5MjmCUuh9GpQlrJUPi1jycY8SqkwU,32258
|
|
146
145
|
cribl_control_plane/models/inputnetflow.py,sha256=YoM2Iy31iM-pFIWKyck8cRoib1ghZ2vj1m-oTT3PTec,11389
|
|
147
146
|
cribl_control_plane/models/inputoffice365mgmt.py,sha256=eSN5EuvnX6nq3XNMaKpgVuv0V4MNpmOQLUXH5agb0ho,18817
|
|
148
147
|
cribl_control_plane/models/inputoffice365msgtrace.py,sha256=MLpZs6BEIlA3SB3uyi-H0ceOjdCxxsNGaI5Kjv-2JP0,21326
|
|
@@ -185,22 +184,23 @@ cribl_control_plane/models/lookupversions.py,sha256=PLk5hD1WPEIoePfJbhllePawNTa1
|
|
|
185
184
|
cribl_control_plane/models/masterworkerentry.py,sha256=KT8bTu5t20ZwhybN8yz4MtG8CQZGpqv3I1JGjVItY7Q,2481
|
|
186
185
|
cribl_control_plane/models/nodeactiveupgradestatus.py,sha256=knwgNh1octWr6oY-TadH0StJmzv0cktlJ4tc5pq_ChM,279
|
|
187
186
|
cribl_control_plane/models/nodefailedupgradestatus.py,sha256=EE4tSjcWyQxASftW9xJCS8K5QjpLkjCl3YDIys4r7FA,267
|
|
188
|
-
cribl_control_plane/models/nodeprovidedinfo.py,sha256=
|
|
187
|
+
cribl_control_plane/models/nodeprovidedinfo.py,sha256=XElCf2bdE4ahiO7MaVmbtGziKgyJZIW7T143VPCUAvE,3742
|
|
189
188
|
cribl_control_plane/models/nodeskippedupgradestatus.py,sha256=EY-U3cUPwMa3H-X-hn5gdaEBmSAP3hB9gRPdiQZs5yU,294
|
|
190
189
|
cribl_control_plane/models/nodeupgradestate.py,sha256=EerzMMQeFl-iHKHsJwEIxRroH6w97S7-em9YoY2-ASk,286
|
|
191
190
|
cribl_control_plane/models/nodeupgradestatus.py,sha256=Ygdb7jTFOvD6M3Fjl3brliLCKbkdX3aCwkPYjTE4Dw0,1346
|
|
192
|
-
cribl_control_plane/models/output.py,sha256=
|
|
191
|
+
cribl_control_plane/models/output.py,sha256=iok_sCpufe4AGE1O2mFQfz7fOX56UqSpEXbj5ps3Ixs,8830
|
|
193
192
|
cribl_control_plane/models/outputazureblob.py,sha256=l3N9XWrzsjgpoV8AZ59lmHXL7ZNrGek1VyO2bA13DKQ,23132
|
|
194
193
|
cribl_control_plane/models/outputazuredataexplorer.py,sha256=dHMj1qBpoNwusSety4OG3KgaKKNixTaqcdKSsxDAo2Q,31977
|
|
195
194
|
cribl_control_plane/models/outputazureeventhub.py,sha256=mccmtlZjwoeyr3AagtPRleOpZKB6roJvwekRjScqa8w,15710
|
|
196
195
|
cribl_control_plane/models/outputazurelogs.py,sha256=EYwBDVXq5PMZPsy36dCxaIrGSNespqwqFEhe2u3QeQI,20483
|
|
197
196
|
cribl_control_plane/models/outputclickhouse.py,sha256=NkvKphhga95zlbVGlD0iaVtdBiRY0wcZqR1las5DO1Y,30465
|
|
198
197
|
cribl_control_plane/models/outputcloudwatch.py,sha256=iSxxunhSekwgZTFw1HKYnzy8YzXRoDacn2pEWlWjs4k,12677
|
|
199
|
-
cribl_control_plane/models/outputconfluentcloud.py,sha256=
|
|
198
|
+
cribl_control_plane/models/outputconfluentcloud.py,sha256=4BQCKXjJ5Bc4C6YKDAjYimRkp4ryE2_0883bqdXAVic,27449
|
|
200
199
|
cribl_control_plane/models/outputcriblhttp.py,sha256=qH_DLlLfUDMTgyjLJ-EyMytmHAJC868gYofoNP1-_ec,24062
|
|
201
200
|
cribl_control_plane/models/outputcribllake.py,sha256=h-J1uPLPV3teRWJIyrJLV1oWjxiWlQAiwDoV3rNX-Ks,17914
|
|
202
201
|
cribl_control_plane/models/outputcribltcp.py,sha256=wpiSsXcSBFXe2ZO0muijlA58TFVgL1Fw7FTt_zodf_8,17482
|
|
203
202
|
cribl_control_plane/models/outputcrowdstrikenextgensiem.py,sha256=mFJowiqrVy6peiPXOcLZTyGtxGTLLuIpoG56gw81dMY,19864
|
|
203
|
+
cribl_control_plane/models/outputdatabricks.py,sha256=SsZhmfnZ-VMxa_dcuLj0_y98NASgKHl_CDQi5UzQuGs,21879
|
|
204
204
|
cribl_control_plane/models/outputdatadog.py,sha256=KUFjLBY9FtfzSuS6XiWsvru_HmAgnSfeQFw_J-T3OVk,23291
|
|
205
205
|
cribl_control_plane/models/outputdataset.py,sha256=-iXA-e7awghBU6Plu-774F1P2TQN2w7oZX-JexG7hS4,21606
|
|
206
206
|
cribl_control_plane/models/outputdefault.py,sha256=2tjMKYSksR-0qWLd_u3PPLXL0gZiSlUdj9JTPYeYMps,1952
|
|
@@ -222,11 +222,11 @@ cribl_control_plane/models/outputgraphite.py,sha256=XMKOU6BQZ7E2H-RGFruKE2QIqD8F
|
|
|
222
222
|
cribl_control_plane/models/outputhoneycomb.py,sha256=vjdU1Mw7NVGiRHeT6XxcfSqJuXsdYUh7Ek_gItuC1d4,17971
|
|
223
223
|
cribl_control_plane/models/outputhumiohec.py,sha256=dylW7PH9J6Ya2WHEdMMo4e941QVghdtAwumud7RcSgo,19306
|
|
224
224
|
cribl_control_plane/models/outputinfluxdb.py,sha256=lSok0YjCkxnJ5YjXKTRQAXBcMhJo0Lj60qz04o8NgXY,24733
|
|
225
|
-
cribl_control_plane/models/outputkafka.py,sha256=
|
|
225
|
+
cribl_control_plane/models/outputkafka.py,sha256=PxL7KOrDo8QEt9daYE0RBYAqzCmYE8SWA3Ma4hyg7z8,26845
|
|
226
226
|
cribl_control_plane/models/outputkinesis.py,sha256=_DwZ-GtZGta2FFKWC2BxCyJFwFGsVFuhtvfQyPk4aN0,14124
|
|
227
227
|
cribl_control_plane/models/outputloki.py,sha256=TV2Bir0X9M0mkMZ9LQV-jsIYrmcIQ9ucIdtCqsURXYM,23017
|
|
228
228
|
cribl_control_plane/models/outputminio.py,sha256=Fx4Zpl4DjwYbPqITqRGewIUd5vehVC1h_mshgj_PU3g,23862
|
|
229
|
-
cribl_control_plane/models/outputmsk.py,sha256=
|
|
229
|
+
cribl_control_plane/models/outputmsk.py,sha256=2JuqEv92JwGta8YXJ1fusvcO9BBc3Xt3XmnAFgpMk3o,29735
|
|
230
230
|
cribl_control_plane/models/outputnetflow.py,sha256=xOBy2Q48SfhNT2ifAQU-bPVQ5nOpUqMJ5B40SlZ3-0o,2790
|
|
231
231
|
cribl_control_plane/models/outputnewrelic.py,sha256=mktht54b3JtYdQngRPaN712dmjY1wdNh8hQo03eeNrw,20527
|
|
232
232
|
cribl_control_plane/models/outputnewrelicevents.py,sha256=-WrvmX4K1t5qeh7q7ZaF7V9ol5seduQNHZYLAJfZH0c,19501
|
|
@@ -266,8 +266,7 @@ cribl_control_plane/models/pipelinefunctionconf.py,sha256=X61RPaoYpa_UZWavnQiNSa
|
|
|
266
266
|
cribl_control_plane/models/productscore.py,sha256=iR4tV3eQI39kjOmyXM3RxJTxkisfVdio0p8nfmZ7t90,271
|
|
267
267
|
cribl_control_plane/models/rbacresource.py,sha256=gN2zY3kwlIC-gL_K2N4ORuyTaKuqAttzaZaVftT1qQ4,429
|
|
268
268
|
cribl_control_plane/models/resourcepolicy.py,sha256=NBWadVgjY9ctVazi9xRkj2bXg-_x_DAQXowYarTu5BU,696
|
|
269
|
-
cribl_control_plane/models/
|
|
270
|
-
cribl_control_plane/models/routeconf.py,sha256=whFyvzWwmEqAo_0HoTFKJTZqQ2p8kdPKaZJIlh9nS58,1451
|
|
269
|
+
cribl_control_plane/models/routeconf.py,sha256=5QEcL6QMsAfoofsS8OJr8LkgCekLq2P7-byTNcepuHQ,1380
|
|
271
270
|
cribl_control_plane/models/routes.py,sha256=2MRVmc4zvUjQw6moQmRYss_XaoGcaauj2Jpdb3VX8pA,2022
|
|
272
271
|
cribl_control_plane/models/routesroute.py,sha256=7hFUWpgVDBj0N117IFxZRGkFqJntbe4NyBakVyMKsTY,2339
|
|
273
272
|
cribl_control_plane/models/runnablejob.py,sha256=hyWHdW7SypvxfnwGcpRfXRAt7HgQWEyq3rqsm4TsEWM,812
|
|
@@ -323,6 +322,6 @@ cribl_control_plane/utils/url.py,sha256=BgGPgcTA6MRK4bF8fjP2dUopN3NzEzxWMXPBVg8N
|
|
|
323
322
|
cribl_control_plane/utils/values.py,sha256=CcaCXEa3xHhkUDROyXZocN8f0bdITftv9Y0P9lTf0YM,3517
|
|
324
323
|
cribl_control_plane/versions.py,sha256=4xdTYbM84Xyjr5qkixqNpgn2q6V8aXVYXkEPDU2Ele0,1156
|
|
325
324
|
cribl_control_plane/versions_configs.py,sha256=5CKcfN4SzuyFgggrx6O8H_h3GhNyKSbfdVhSkVGZKi4,7284
|
|
326
|
-
cribl_control_plane-0.1.
|
|
327
|
-
cribl_control_plane-0.1.
|
|
328
|
-
cribl_control_plane-0.1.
|
|
325
|
+
cribl_control_plane-0.1.0b1.dist-info/METADATA,sha256=xKgYEPVnci4W3V8TLkmV4KtMnf8iFAUQUc1fKI7TsyA,38885
|
|
326
|
+
cribl_control_plane-0.1.0b1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
|
|
327
|
+
cribl_control_plane-0.1.0b1.dist-info/RECORD,,
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane import utils
|
|
5
|
-
from enum import Enum
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class AppMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
9
|
-
SINGLE = "single"
|
|
10
|
-
MASTER = "master"
|
|
11
|
-
WORKER = "worker"
|
|
12
|
-
EDGE = "edge"
|
|
13
|
-
MANAGED_EDGE = "managed-edge"
|
|
14
|
-
OUTPOST = "outpost"
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
from cribl_control_plane.types import BaseModel
|
|
5
|
-
from typing_extensions import TypedDict
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class RouteCloneConfTypedDict(TypedDict):
|
|
9
|
-
pass
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class RouteCloneConf(BaseModel):
|
|
13
|
-
pass
|
|
File without changes
|