cribl-control-plane 0.1.0a1__py3-none-any.whl → 0.1.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

@@ -3,10 +3,10 @@
3
3
  import importlib.metadata
4
4
 
5
5
  __title__: str = "cribl-control-plane"
6
- __version__: str = "0.1.0a1"
7
- __openapi_doc_version__: str = "4.14.1-alpha.1758626754698-b3680673"
6
+ __version__: str = "0.1.0b2"
7
+ __openapi_doc_version__: str = "4.15.0-alpha.1759327501989-3e9845e9"
8
8
  __gen_version__: str = "2.716.16"
9
- __user_agent__: str = "speakeasy-sdk/python 0.1.0a1 2.716.16 4.14.1-alpha.1758626754698-b3680673 cribl-control-plane"
9
+ __user_agent__: str = "speakeasy-sdk/python 0.1.0b2 2.716.16 4.15.0-alpha.1759327501989-3e9845e9 cribl-control-plane"
10
10
 
11
11
  try:
12
12
  if __package__ is not None:
@@ -224,7 +224,7 @@ class GroupsSDK(BaseSDK):
224
224
  config_version: Optional[str] = None,
225
225
  deploying_worker_count: Optional[float] = None,
226
226
  description: Optional[str] = None,
227
- estimated_ingest_rate: Optional[float] = None,
227
+ estimated_ingest_rate: Optional[models.EstimatedIngestRate] = None,
228
228
  git: Optional[Union[models.Git, models.GitTypedDict]] = None,
229
229
  incompatible_worker_count: Optional[float] = None,
230
230
  inherits: Optional[str] = None,
@@ -395,7 +395,7 @@ class GroupsSDK(BaseSDK):
395
395
  config_version: Optional[str] = None,
396
396
  deploying_worker_count: Optional[float] = None,
397
397
  description: Optional[str] = None,
398
- estimated_ingest_rate: Optional[float] = None,
398
+ estimated_ingest_rate: Optional[models.EstimatedIngestRate] = None,
399
399
  git: Optional[Union[models.Git, models.GitTypedDict]] = None,
400
400
  incompatible_worker_count: Optional[float] = None,
401
401
  inherits: Optional[str] = None,
@@ -757,7 +757,7 @@ class GroupsSDK(BaseSDK):
757
757
  config_version: Optional[str] = None,
758
758
  deploying_worker_count: Optional[float] = None,
759
759
  description: Optional[str] = None,
760
- estimated_ingest_rate: Optional[float] = None,
760
+ estimated_ingest_rate: Optional[models.EstimatedIngestRate] = None,
761
761
  git: Optional[Union[models.Git, models.GitTypedDict]] = None,
762
762
  incompatible_worker_count: Optional[float] = None,
763
763
  inherits: Optional[str] = None,
@@ -931,7 +931,7 @@ class GroupsSDK(BaseSDK):
931
931
  config_version: Optional[str] = None,
932
932
  deploying_worker_count: Optional[float] = None,
933
933
  description: Optional[str] = None,
934
- estimated_ingest_rate: Optional[float] = None,
934
+ estimated_ingest_rate: Optional[models.EstimatedIngestRate] = None,
935
935
  git: Optional[Union[models.Git, models.GitTypedDict]] = None,
936
936
  incompatible_worker_count: Optional[float] = None,
937
937
  inherits: Optional[str] = None,
@@ -12,7 +12,6 @@ if TYPE_CHECKING:
12
12
  AddHecTokenRequestMetadatumTypedDict,
13
13
  AddHecTokenRequestTypedDict,
14
14
  )
15
- from .appmode import AppMode
16
15
  from .authtoken import AuthToken, AuthTokenTypedDict
17
16
  from .branchinfo import BranchInfo, BranchInfoTypedDict
18
17
  from .cacheconnection import CacheConnection, CacheConnectionTypedDict
@@ -23,6 +22,7 @@ if TYPE_CHECKING:
23
22
  ConfigGroup,
24
23
  ConfigGroupType,
25
24
  ConfigGroupTypedDict,
25
+ EstimatedIngestRate,
26
26
  Git,
27
27
  GitTypedDict,
28
28
  )
@@ -356,7 +356,13 @@ if TYPE_CHECKING:
356
356
  Renamed,
357
357
  RenamedTypedDict,
358
358
  )
359
- from .hbcriblinfo import Config, ConfigTypedDict, HBCriblInfo, HBCriblInfoTypedDict
359
+ from .hbcriblinfo import (
360
+ Config,
361
+ ConfigTypedDict,
362
+ DistMode,
363
+ HBCriblInfo,
364
+ HBCriblInfoTypedDict,
365
+ )
360
366
  from .hbleaderinfo import HBLeaderInfo, HBLeaderInfoTypedDict
361
367
  from .healthstatus import HealthStatus, HealthStatusTypedDict, Role, Status
362
368
  from .heartbeatmetadata import (
@@ -463,7 +469,6 @@ if TYPE_CHECKING:
463
469
  InputConfluentCloudPqControlsTypedDict,
464
470
  InputConfluentCloudPqTypedDict,
465
471
  InputConfluentCloudSASLMechanism,
466
- InputConfluentCloudSchemaType,
467
472
  InputConfluentCloudTLSSettingsClientSide,
468
473
  InputConfluentCloudTLSSettingsClientSideTypedDict,
469
474
  InputConfluentCloudType,
@@ -922,7 +927,6 @@ if TYPE_CHECKING:
922
927
  InputKafkaPqControlsTypedDict,
923
928
  InputKafkaPqTypedDict,
924
929
  InputKafkaSASLMechanism,
925
- InputKafkaSchemaType,
926
930
  InputKafkaTLSSettingsClientSide,
927
931
  InputKafkaTLSSettingsClientSideTypedDict,
928
932
  InputKafkaType,
@@ -1090,7 +1094,6 @@ if TYPE_CHECKING:
1090
1094
  InputMskPqControls,
1091
1095
  InputMskPqControlsTypedDict,
1092
1096
  InputMskPqTypedDict,
1093
- InputMskSchemaType,
1094
1097
  InputMskSignatureVersion,
1095
1098
  InputMskTLSSettingsClientSide,
1096
1099
  InputMskTLSSettingsClientSideTypedDict,
@@ -1985,7 +1988,6 @@ if TYPE_CHECKING:
1985
1988
  OutputConfluentCloudQueueFullBehavior,
1986
1989
  OutputConfluentCloudRecordDataFormat,
1987
1990
  OutputConfluentCloudSASLMechanism,
1988
- OutputConfluentCloudSchemaType,
1989
1991
  OutputConfluentCloudTLSSettingsClientSide,
1990
1992
  OutputConfluentCloudTLSSettingsClientSideTypedDict,
1991
1993
  OutputConfluentCloudType,
@@ -2068,6 +2070,15 @@ if TYPE_CHECKING:
2068
2070
  OutputCrowdstrikeNextGenSiemType,
2069
2071
  OutputCrowdstrikeNextGenSiemTypedDict,
2070
2072
  )
2073
+ from .outputdatabricks import (
2074
+ OutputDatabricks,
2075
+ OutputDatabricksAuthenticationMethod,
2076
+ OutputDatabricksBackpressureBehavior,
2077
+ OutputDatabricksDataFormat,
2078
+ OutputDatabricksDiskSpaceProtection,
2079
+ OutputDatabricksType,
2080
+ OutputDatabricksTypedDict,
2081
+ )
2071
2082
  from .outputdatadog import (
2072
2083
  DatadogSite,
2073
2084
  OutputDatadog,
@@ -2486,7 +2497,6 @@ if TYPE_CHECKING:
2486
2497
  OutputKafkaQueueFullBehavior,
2487
2498
  OutputKafkaRecordDataFormat,
2488
2499
  OutputKafkaSASLMechanism,
2489
- OutputKafkaSchemaType,
2490
2500
  OutputKafkaTLSSettingsClientSide,
2491
2501
  OutputKafkaTLSSettingsClientSideTypedDict,
2492
2502
  OutputKafkaType,
@@ -2569,7 +2579,6 @@ if TYPE_CHECKING:
2569
2579
  OutputMskPqControlsTypedDict,
2570
2580
  OutputMskQueueFullBehavior,
2571
2581
  OutputMskRecordDataFormat,
2572
- OutputMskSchemaType,
2573
2582
  OutputMskSignatureVersion,
2574
2583
  OutputMskTLSSettingsClientSide,
2575
2584
  OutputMskTLSSettingsClientSideTypedDict,
@@ -3128,7 +3137,6 @@ if TYPE_CHECKING:
3128
3137
  from .productscore import ProductsCore
3129
3138
  from .rbacresource import RbacResource
3130
3139
  from .resourcepolicy import ResourcePolicy, ResourcePolicyTypedDict
3131
- from .routecloneconf import RouteCloneConf, RouteCloneConfTypedDict
3132
3140
  from .routeconf import RouteConf, RouteConfTypedDict
3133
3141
  from .routes import (
3134
3142
  Comment,
@@ -3289,7 +3297,6 @@ __all__ = [
3289
3297
  "AdditionalPropertyTypedDict",
3290
3298
  "Allow",
3291
3299
  "AllowTypedDict",
3292
- "AppMode",
3293
3300
  "Audit",
3294
3301
  "AuditTypedDict",
3295
3302
  "AuthToken",
@@ -3439,6 +3446,7 @@ __all__ = [
3439
3446
  "DiffFilesTypedDict",
3440
3447
  "DisksAndFileSystems",
3441
3448
  "DisksAndFileSystemsTypedDict",
3449
+ "DistMode",
3442
3450
  "DistributedSummary",
3443
3451
  "DistributedSummaryGroups",
3444
3452
  "DistributedSummaryGroupsTypedDict",
@@ -3457,6 +3465,7 @@ __all__ = [
3457
3465
  "EndpointType",
3458
3466
  "Error",
3459
3467
  "ErrorTypedDict",
3468
+ "EstimatedIngestRate",
3460
3469
  "EventFormat",
3461
3470
  "Executor",
3462
3471
  "ExecutorSpecificSettings",
@@ -3708,7 +3717,6 @@ __all__ = [
3708
3717
  "InputConfluentCloudPqControlsTypedDict",
3709
3718
  "InputConfluentCloudPqTypedDict",
3710
3719
  "InputConfluentCloudSASLMechanism",
3711
- "InputConfluentCloudSchemaType",
3712
3720
  "InputConfluentCloudTLSSettingsClientSide",
3713
3721
  "InputConfluentCloudTLSSettingsClientSideTypedDict",
3714
3722
  "InputConfluentCloudType",
@@ -4098,7 +4106,6 @@ __all__ = [
4098
4106
  "InputKafkaPqControlsTypedDict",
4099
4107
  "InputKafkaPqTypedDict",
4100
4108
  "InputKafkaSASLMechanism",
4101
- "InputKafkaSchemaType",
4102
4109
  "InputKafkaTLSSettingsClientSide",
4103
4110
  "InputKafkaTLSSettingsClientSideTypedDict",
4104
4111
  "InputKafkaType",
@@ -4248,7 +4255,6 @@ __all__ = [
4248
4255
  "InputMskPqControls",
4249
4256
  "InputMskPqControlsTypedDict",
4250
4257
  "InputMskPqTypedDict",
4251
- "InputMskSchemaType",
4252
4258
  "InputMskSignatureVersion",
4253
4259
  "InputMskTLSSettingsClientSide",
4254
4260
  "InputMskTLSSettingsClientSideTypedDict",
@@ -5046,7 +5052,6 @@ __all__ = [
5046
5052
  "OutputConfluentCloudQueueFullBehavior",
5047
5053
  "OutputConfluentCloudRecordDataFormat",
5048
5054
  "OutputConfluentCloudSASLMechanism",
5049
- "OutputConfluentCloudSchemaType",
5050
5055
  "OutputConfluentCloudTLSSettingsClientSide",
5051
5056
  "OutputConfluentCloudTLSSettingsClientSideTypedDict",
5052
5057
  "OutputConfluentCloudType",
@@ -5119,6 +5124,13 @@ __all__ = [
5119
5124
  "OutputCrowdstrikeNextGenSiemTimeoutRetrySettingsTypedDict",
5120
5125
  "OutputCrowdstrikeNextGenSiemType",
5121
5126
  "OutputCrowdstrikeNextGenSiemTypedDict",
5127
+ "OutputDatabricks",
5128
+ "OutputDatabricksAuthenticationMethod",
5129
+ "OutputDatabricksBackpressureBehavior",
5130
+ "OutputDatabricksDataFormat",
5131
+ "OutputDatabricksDiskSpaceProtection",
5132
+ "OutputDatabricksType",
5133
+ "OutputDatabricksTypedDict",
5122
5134
  "OutputDatadog",
5123
5135
  "OutputDatadogAuthenticationMethod",
5124
5136
  "OutputDatadogBackpressureBehavior",
@@ -5482,7 +5494,6 @@ __all__ = [
5482
5494
  "OutputKafkaQueueFullBehavior",
5483
5495
  "OutputKafkaRecordDataFormat",
5484
5496
  "OutputKafkaSASLMechanism",
5485
- "OutputKafkaSchemaType",
5486
5497
  "OutputKafkaTLSSettingsClientSide",
5487
5498
  "OutputKafkaTLSSettingsClientSideTypedDict",
5488
5499
  "OutputKafkaType",
@@ -5557,7 +5568,6 @@ __all__ = [
5557
5568
  "OutputMskPqControlsTypedDict",
5558
5569
  "OutputMskQueueFullBehavior",
5559
5570
  "OutputMskRecordDataFormat",
5560
- "OutputMskSchemaType",
5561
5571
  "OutputMskSignatureVersion",
5562
5572
  "OutputMskTLSSettingsClientSide",
5563
5573
  "OutputMskTLSSettingsClientSideTypedDict",
@@ -6063,8 +6073,6 @@ __all__ = [
6063
6073
  "ResourceTypeLabel",
6064
6074
  "ResourceTypeLabelTypedDict",
6065
6075
  "Role",
6066
- "RouteCloneConf",
6067
- "RouteCloneConfTypedDict",
6068
6076
  "RouteConf",
6069
6077
  "RouteConfTypedDict",
6070
6078
  "Routes",
@@ -6215,7 +6223,6 @@ _dynamic_imports: dict[str, str] = {
6215
6223
  "AddHecTokenRequestMetadatum": ".addhectokenrequest",
6216
6224
  "AddHecTokenRequestMetadatumTypedDict": ".addhectokenrequest",
6217
6225
  "AddHecTokenRequestTypedDict": ".addhectokenrequest",
6218
- "AppMode": ".appmode",
6219
6226
  "AuthToken": ".authtoken",
6220
6227
  "AuthTokenTypedDict": ".authtoken",
6221
6228
  "BranchInfo": ".branchinfo",
@@ -6229,6 +6236,7 @@ _dynamic_imports: dict[str, str] = {
6229
6236
  "ConfigGroup": ".configgroup",
6230
6237
  "ConfigGroupType": ".configgroup",
6231
6238
  "ConfigGroupTypedDict": ".configgroup",
6239
+ "EstimatedIngestRate": ".configgroup",
6232
6240
  "Git": ".configgroup",
6233
6241
  "GitTypedDict": ".configgroup",
6234
6242
  "ConfigGroupCloud": ".configgroupcloud",
@@ -6485,6 +6493,7 @@ _dynamic_imports: dict[str, str] = {
6485
6493
  "RenamedTypedDict": ".gitstatusresult",
6486
6494
  "Config": ".hbcriblinfo",
6487
6495
  "ConfigTypedDict": ".hbcriblinfo",
6496
+ "DistMode": ".hbcriblinfo",
6488
6497
  "HBCriblInfo": ".hbcriblinfo",
6489
6498
  "HBCriblInfoTypedDict": ".hbcriblinfo",
6490
6499
  "HBLeaderInfo": ".hbleaderinfo",
@@ -6589,7 +6598,6 @@ _dynamic_imports: dict[str, str] = {
6589
6598
  "InputConfluentCloudPqControlsTypedDict": ".inputconfluentcloud",
6590
6599
  "InputConfluentCloudPqTypedDict": ".inputconfluentcloud",
6591
6600
  "InputConfluentCloudSASLMechanism": ".inputconfluentcloud",
6592
- "InputConfluentCloudSchemaType": ".inputconfluentcloud",
6593
6601
  "InputConfluentCloudTLSSettingsClientSide": ".inputconfluentcloud",
6594
6602
  "InputConfluentCloudTLSSettingsClientSideTypedDict": ".inputconfluentcloud",
6595
6603
  "InputConfluentCloudType": ".inputconfluentcloud",
@@ -7008,7 +7016,6 @@ _dynamic_imports: dict[str, str] = {
7008
7016
  "InputKafkaPqControlsTypedDict": ".inputkafka",
7009
7017
  "InputKafkaPqTypedDict": ".inputkafka",
7010
7018
  "InputKafkaSASLMechanism": ".inputkafka",
7011
- "InputKafkaSchemaType": ".inputkafka",
7012
7019
  "InputKafkaTLSSettingsClientSide": ".inputkafka",
7013
7020
  "InputKafkaTLSSettingsClientSideTypedDict": ".inputkafka",
7014
7021
  "InputKafkaType": ".inputkafka",
@@ -7160,7 +7167,6 @@ _dynamic_imports: dict[str, str] = {
7160
7167
  "InputMskPqControls": ".inputmsk",
7161
7168
  "InputMskPqControlsTypedDict": ".inputmsk",
7162
7169
  "InputMskPqTypedDict": ".inputmsk",
7163
- "InputMskSchemaType": ".inputmsk",
7164
7170
  "InputMskSignatureVersion": ".inputmsk",
7165
7171
  "InputMskTLSSettingsClientSide": ".inputmsk",
7166
7172
  "InputMskTLSSettingsClientSideTypedDict": ".inputmsk",
@@ -7987,7 +7993,6 @@ _dynamic_imports: dict[str, str] = {
7987
7993
  "OutputConfluentCloudQueueFullBehavior": ".outputconfluentcloud",
7988
7994
  "OutputConfluentCloudRecordDataFormat": ".outputconfluentcloud",
7989
7995
  "OutputConfluentCloudSASLMechanism": ".outputconfluentcloud",
7990
- "OutputConfluentCloudSchemaType": ".outputconfluentcloud",
7991
7996
  "OutputConfluentCloudTLSSettingsClientSide": ".outputconfluentcloud",
7992
7997
  "OutputConfluentCloudTLSSettingsClientSideTypedDict": ".outputconfluentcloud",
7993
7998
  "OutputConfluentCloudType": ".outputconfluentcloud",
@@ -8061,6 +8066,13 @@ _dynamic_imports: dict[str, str] = {
8061
8066
  "OutputCrowdstrikeNextGenSiemTimeoutRetrySettingsTypedDict": ".outputcrowdstrikenextgensiem",
8062
8067
  "OutputCrowdstrikeNextGenSiemType": ".outputcrowdstrikenextgensiem",
8063
8068
  "OutputCrowdstrikeNextGenSiemTypedDict": ".outputcrowdstrikenextgensiem",
8069
+ "OutputDatabricks": ".outputdatabricks",
8070
+ "OutputDatabricksAuthenticationMethod": ".outputdatabricks",
8071
+ "OutputDatabricksBackpressureBehavior": ".outputdatabricks",
8072
+ "OutputDatabricksDataFormat": ".outputdatabricks",
8073
+ "OutputDatabricksDiskSpaceProtection": ".outputdatabricks",
8074
+ "OutputDatabricksType": ".outputdatabricks",
8075
+ "OutputDatabricksTypedDict": ".outputdatabricks",
8064
8076
  "DatadogSite": ".outputdatadog",
8065
8077
  "OutputDatadog": ".outputdatadog",
8066
8078
  "OutputDatadogAuthenticationMethod": ".outputdatadog",
@@ -8444,7 +8456,6 @@ _dynamic_imports: dict[str, str] = {
8444
8456
  "OutputKafkaQueueFullBehavior": ".outputkafka",
8445
8457
  "OutputKafkaRecordDataFormat": ".outputkafka",
8446
8458
  "OutputKafkaSASLMechanism": ".outputkafka",
8447
- "OutputKafkaSchemaType": ".outputkafka",
8448
8459
  "OutputKafkaTLSSettingsClientSide": ".outputkafka",
8449
8460
  "OutputKafkaTLSSettingsClientSideTypedDict": ".outputkafka",
8450
8461
  "OutputKafkaType": ".outputkafka",
@@ -8519,7 +8530,6 @@ _dynamic_imports: dict[str, str] = {
8519
8530
  "OutputMskPqControlsTypedDict": ".outputmsk",
8520
8531
  "OutputMskQueueFullBehavior": ".outputmsk",
8521
8532
  "OutputMskRecordDataFormat": ".outputmsk",
8522
- "OutputMskSchemaType": ".outputmsk",
8523
8533
  "OutputMskSignatureVersion": ".outputmsk",
8524
8534
  "OutputMskTLSSettingsClientSide": ".outputmsk",
8525
8535
  "OutputMskTLSSettingsClientSideTypedDict": ".outputmsk",
@@ -9015,8 +9025,6 @@ _dynamic_imports: dict[str, str] = {
9015
9025
  "RbacResource": ".rbacresource",
9016
9026
  "ResourcePolicy": ".resourcepolicy",
9017
9027
  "ResourcePolicyTypedDict": ".resourcepolicy",
9018
- "RouteCloneConf": ".routecloneconf",
9019
- "RouteCloneConfTypedDict": ".routecloneconf",
9020
9028
  "RouteConf": ".routeconf",
9021
9029
  "RouteConfTypedDict": ".routeconf",
9022
9030
  "Comment": ".routes",
@@ -14,6 +14,18 @@ from typing import List, Optional
14
14
  from typing_extensions import Annotated, NotRequired, TypedDict
15
15
 
16
16
 
17
+ class EstimatedIngestRate(int, Enum, metaclass=utils.OpenEnumMeta):
18
+ ONE_THOUSAND_AND_TWENTY_FOUR = 1024
19
+ FOUR_THOUSAND_AND_NINETY_SIX = 4096
20
+ TEN_THOUSAND_TWO_HUNDRED_AND_FORTY = 10240
21
+ TWO_THOUSAND_AND_FORTY_EIGHT = 2048
22
+ THREE_THOUSAND_AND_SEVENTY_TWO = 3072
23
+ FIVE_THOUSAND_ONE_HUNDRED_AND_TWENTY = 5120
24
+ SEVEN_THOUSAND_ONE_HUNDRED_AND_SIXTY_EIGHT = 7168
25
+ THIRTEEN_THOUSAND_THREE_HUNDRED_AND_TWELVE = 13312
26
+ FIFTEEN_THOUSAND_THREE_HUNDRED_AND_SIXTY = 15360
27
+
28
+
17
29
  class GitTypedDict(TypedDict):
18
30
  commit: NotRequired[str]
19
31
  local_changes: NotRequired[float]
@@ -40,7 +52,7 @@ class ConfigGroupTypedDict(TypedDict):
40
52
  config_version: NotRequired[str]
41
53
  deploying_worker_count: NotRequired[float]
42
54
  description: NotRequired[str]
43
- estimated_ingest_rate: NotRequired[float]
55
+ estimated_ingest_rate: NotRequired[EstimatedIngestRate]
44
56
  git: NotRequired[GitTypedDict]
45
57
  incompatible_worker_count: NotRequired[float]
46
58
  inherits: NotRequired[str]
@@ -75,7 +87,10 @@ class ConfigGroup(BaseModel):
75
87
  description: Optional[str] = None
76
88
 
77
89
  estimated_ingest_rate: Annotated[
78
- Optional[float], pydantic.Field(alias="estimatedIngestRate")
90
+ Annotated[
91
+ Optional[EstimatedIngestRate], PlainValidator(validate_open_enum(True))
92
+ ],
93
+ pydantic.Field(alias="estimatedIngestRate"),
79
94
  ] = None
80
95
 
81
96
  git: Optional[Git] = None
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from .appmode import AppMode
5
4
  from .hbleaderinfo import HBLeaderInfo, HBLeaderInfoTypedDict
6
5
  from .lookupversions import LookupVersions, LookupVersionsTypedDict
6
+ from cribl_control_plane import utils
7
7
  from cribl_control_plane.types import BaseModel
8
8
  from cribl_control_plane.utils import validate_open_enum
9
+ from enum import Enum
9
10
  import pydantic
10
11
  from pydantic.functional_validators import PlainValidator
11
12
  from typing import List, Optional
@@ -36,9 +37,19 @@ class Config(BaseModel):
36
37
  version: Optional[str] = None
37
38
 
38
39
 
40
+ class DistMode(str, Enum, metaclass=utils.OpenEnumMeta):
41
+ EDGE = "edge"
42
+ WORKER = "worker"
43
+ SINGLE = "single"
44
+ MASTER = "master"
45
+ MANAGED_EDGE = "managed-edge"
46
+ OUTPOST = "outpost"
47
+ SEARCH_SUPERVISOR = "search-supervisor"
48
+
49
+
39
50
  class HBCriblInfoTypedDict(TypedDict):
40
51
  config: ConfigTypedDict
41
- dist_mode: AppMode
52
+ dist_mode: DistMode
42
53
  group: str
43
54
  guid: str
44
55
  start_time: float
@@ -50,6 +61,7 @@ class HBCriblInfoTypedDict(TypedDict):
50
61
  lookup_versions: NotRequired[LookupVersionsTypedDict]
51
62
  master: NotRequired[HBLeaderInfoTypedDict]
52
63
  pid: NotRequired[float]
64
+ socks_enabled: NotRequired[bool]
53
65
  version: NotRequired[str]
54
66
 
55
67
 
@@ -57,7 +69,7 @@ class HBCriblInfo(BaseModel):
57
69
  config: Config
58
70
 
59
71
  dist_mode: Annotated[
60
- Annotated[AppMode, PlainValidator(validate_open_enum(False))],
72
+ Annotated[DistMode, PlainValidator(validate_open_enum(False))],
61
73
  pydantic.Field(alias="distMode"),
62
74
  ]
63
75
 
@@ -87,4 +99,8 @@ class HBCriblInfo(BaseModel):
87
99
 
88
100
  pid: Optional[float] = None
89
101
 
102
+ socks_enabled: Annotated[Optional[bool], pydantic.Field(alias="socksEnabled")] = (
103
+ None
104
+ )
105
+
90
106
  version: Optional[str] = None
@@ -17,6 +17,7 @@ class HeartbeatMetadataTags(BaseModel):
17
17
 
18
18
  class HeartbeatMetadataAwsTypedDict(TypedDict):
19
19
  enabled: bool
20
+ instance_id: str
20
21
  region: str
21
22
  type: str
22
23
  zone: str
@@ -26,6 +27,8 @@ class HeartbeatMetadataAwsTypedDict(TypedDict):
26
27
  class HeartbeatMetadataAws(BaseModel):
27
28
  enabled: bool
28
29
 
30
+ instance_id: Annotated[str, pydantic.Field(alias="instanceId")]
31
+
29
32
  region: str
30
33
 
31
34
  type: str
@@ -187,13 +187,6 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
187
187
  ] = None
188
188
 
189
189
 
190
- class InputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
191
- r"""The schema format used to encode and decode event data"""
192
-
193
- AVRO = "avro"
194
- JSON = "json"
195
-
196
-
197
190
  class InputConfluentCloudAuthTypedDict(TypedDict):
198
191
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
199
192
 
@@ -304,8 +297,6 @@ class InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
304
297
  disabled: NotRequired[bool]
305
298
  schema_registry_url: NotRequired[str]
306
299
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
307
- schema_type: NotRequired[InputConfluentCloudSchemaType]
308
- r"""The schema format used to encode and decode event data"""
309
300
  connection_timeout: NotRequired[float]
310
301
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
311
302
  request_timeout: NotRequired[float]
@@ -327,15 +318,6 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
327
318
  ] = "http://localhost:8081"
328
319
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
329
320
 
330
- schema_type: Annotated[
331
- Annotated[
332
- Optional[InputConfluentCloudSchemaType],
333
- PlainValidator(validate_open_enum(False)),
334
- ],
335
- pydantic.Field(alias="schemaType"),
336
- ] = InputConfluentCloudSchemaType.AVRO
337
- r"""The schema format used to encode and decode event data"""
338
-
339
321
  connection_timeout: Annotated[
340
322
  Optional[float], pydantic.Field(alias="connectionTimeout")
341
323
  ] = 30000
@@ -103,13 +103,6 @@ class InputKafkaPq(BaseModel):
103
103
  ] = None
104
104
 
105
105
 
106
- class InputKafkaSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
107
- r"""The schema format used to encode and decode event data"""
108
-
109
- AVRO = "avro"
110
- JSON = "json"
111
-
112
-
113
106
  class InputKafkaAuthTypedDict(TypedDict):
114
107
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
115
108
 
@@ -220,8 +213,6 @@ class InputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
220
213
  disabled: NotRequired[bool]
221
214
  schema_registry_url: NotRequired[str]
222
215
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
223
- schema_type: NotRequired[InputKafkaSchemaType]
224
- r"""The schema format used to encode and decode event data"""
225
216
  connection_timeout: NotRequired[float]
226
217
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
227
218
  request_timeout: NotRequired[float]
@@ -241,14 +232,6 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
241
232
  ] = "http://localhost:8081"
242
233
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
243
234
 
244
- schema_type: Annotated[
245
- Annotated[
246
- Optional[InputKafkaSchemaType], PlainValidator(validate_open_enum(False))
247
- ],
248
- pydantic.Field(alias="schemaType"),
249
- ] = InputKafkaSchemaType.AVRO
250
- r"""The schema format used to encode and decode event data"""
251
-
252
235
  connection_timeout: Annotated[
253
236
  Optional[float], pydantic.Field(alias="connectionTimeout")
254
237
  ] = 30000
@@ -116,13 +116,6 @@ class InputMskMetadatum(BaseModel):
116
116
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
117
117
 
118
118
 
119
- class InputMskSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
120
- r"""The schema format used to encode and decode event data"""
121
-
122
- AVRO = "avro"
123
- JSON = "json"
124
-
125
-
126
119
  class InputMskAuthTypedDict(TypedDict):
127
120
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
128
121
 
@@ -233,8 +226,6 @@ class InputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
233
226
  disabled: NotRequired[bool]
234
227
  schema_registry_url: NotRequired[str]
235
228
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
236
- schema_type: NotRequired[InputMskSchemaType]
237
- r"""The schema format used to encode and decode event data"""
238
229
  connection_timeout: NotRequired[float]
239
230
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
240
231
  request_timeout: NotRequired[float]
@@ -254,14 +245,6 @@ class InputMskKafkaSchemaRegistryAuthentication(BaseModel):
254
245
  ] = "http://localhost:8081"
255
246
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
256
247
 
257
- schema_type: Annotated[
258
- Annotated[
259
- Optional[InputMskSchemaType], PlainValidator(validate_open_enum(False))
260
- ],
261
- pydantic.Field(alias="schemaType"),
262
- ] = InputMskSchemaType.AVRO
263
- r"""The schema format used to encode and decode event data"""
264
-
265
248
  connection_timeout: Annotated[
266
249
  Optional[float], pydantic.Field(alias="connectionTimeout")
267
250
  ] = 30000
@@ -19,6 +19,7 @@ class NodeProvidedInfoTags(BaseModel):
19
19
 
20
20
  class NodeProvidedInfoAwsTypedDict(TypedDict):
21
21
  enabled: bool
22
+ instance_id: str
22
23
  region: str
23
24
  type: str
24
25
  zone: str
@@ -28,6 +29,8 @@ class NodeProvidedInfoAwsTypedDict(TypedDict):
28
29
  class NodeProvidedInfoAws(BaseModel):
29
30
  enabled: bool
30
31
 
32
+ instance_id: Annotated[str, pydantic.Field(alias="instanceId")]
33
+
31
34
  region: str
32
35
 
33
36
  type: str