cribl-control-plane 0.1.0a1__py3-none-any.whl → 0.1.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

@@ -18,6 +18,7 @@ from .outputcrowdstrikenextgensiem import (
18
18
  OutputCrowdstrikeNextGenSiem,
19
19
  OutputCrowdstrikeNextGenSiemTypedDict,
20
20
  )
21
+ from .outputdatabricks import OutputDatabricks, OutputDatabricksTypedDict
21
22
  from .outputdatadog import OutputDatadog, OutputDatadogTypedDict
22
23
  from .outputdataset import OutputDataset, OutputDatasetTypedDict
23
24
  from .outputdefault import OutputDefault, OutputDefaultTypedDict
@@ -90,31 +91,32 @@ OutputTypedDict = TypeAliasType(
90
91
  OutputDevnullTypedDict,
91
92
  OutputDefaultTypedDict,
92
93
  OutputRouterTypedDict,
93
- OutputSnmpTypedDict,
94
94
  OutputNetflowTypedDict,
95
+ OutputSnmpTypedDict,
95
96
  OutputDiskSpoolTypedDict,
96
97
  OutputRingTypedDict,
97
- OutputStatsdExtTypedDict,
98
98
  OutputGraphiteTypedDict,
99
+ OutputStatsdExtTypedDict,
99
100
  OutputStatsdTypedDict,
100
101
  OutputGooglePubsubTypedDict,
101
- OutputCriblTCPTypedDict,
102
102
  OutputSplunkTypedDict,
103
+ OutputCriblTCPTypedDict,
103
104
  OutputSnsTypedDict,
104
105
  OutputCloudwatchTypedDict,
105
106
  OutputAzureEventhubTypedDict,
106
- OutputWavefrontTypedDict,
107
107
  OutputSignalfxTypedDict,
108
+ OutputWavefrontTypedDict,
108
109
  OutputHoneycombTypedDict,
109
- OutputSumoLogicTypedDict,
110
- OutputCrowdstrikeNextGenSiemTypedDict,
111
110
  OutputHumioHecTypedDict,
112
111
  OutputTcpjsonTypedDict,
112
+ OutputSumoLogicTypedDict,
113
+ OutputCrowdstrikeNextGenSiemTypedDict,
113
114
  OutputElasticCloudTypedDict,
114
- OutputKinesisTypedDict,
115
- OutputConfluentCloudTypedDict,
116
- OutputKafkaTypedDict,
117
115
  OutputExabeamTypedDict,
116
+ OutputKafkaTypedDict,
117
+ OutputConfluentCloudTypedDict,
118
+ OutputKinesisTypedDict,
119
+ OutputDatabricksTypedDict,
118
120
  OutputNewrelicEventsTypedDict,
119
121
  OutputAzureLogsTypedDict,
120
122
  OutputSplunkLbTypedDict,
@@ -130,10 +132,10 @@ OutputTypedDict = TypeAliasType(
130
132
  OutputDynatraceHTTPTypedDict,
131
133
  OutputServiceNowTypedDict,
132
134
  OutputDynatraceOtlpTypedDict,
133
- OutputElasticTypedDict,
134
135
  OutputGoogleChronicleTypedDict,
135
- OutputCriblLakeTypedDict,
136
+ OutputElasticTypedDict,
136
137
  OutputDatadogTypedDict,
138
+ OutputCriblLakeTypedDict,
137
139
  OutputPrometheusTypedDict,
138
140
  OutputMskTypedDict,
139
141
  OutputSentinelOneAiSiemTypedDict,
@@ -161,31 +163,32 @@ Output = TypeAliasType(
161
163
  OutputDevnull,
162
164
  OutputDefault,
163
165
  OutputRouter,
164
- OutputSnmp,
165
166
  OutputNetflow,
167
+ OutputSnmp,
166
168
  OutputDiskSpool,
167
169
  OutputRing,
168
- OutputStatsdExt,
169
170
  OutputGraphite,
171
+ OutputStatsdExt,
170
172
  OutputStatsd,
171
173
  OutputGooglePubsub,
172
- OutputCriblTCP,
173
174
  OutputSplunk,
175
+ OutputCriblTCP,
174
176
  OutputSns,
175
177
  OutputCloudwatch,
176
178
  OutputAzureEventhub,
177
- OutputWavefront,
178
179
  OutputSignalfx,
180
+ OutputWavefront,
179
181
  OutputHoneycomb,
180
- OutputSumoLogic,
181
- OutputCrowdstrikeNextGenSiem,
182
182
  OutputHumioHec,
183
183
  OutputTcpjson,
184
+ OutputSumoLogic,
185
+ OutputCrowdstrikeNextGenSiem,
184
186
  OutputElasticCloud,
185
- OutputKinesis,
186
- OutputConfluentCloud,
187
- OutputKafka,
188
187
  OutputExabeam,
188
+ OutputKafka,
189
+ OutputConfluentCloud,
190
+ OutputKinesis,
191
+ OutputDatabricks,
189
192
  OutputNewrelicEvents,
190
193
  OutputAzureLogs,
191
194
  OutputSplunkLb,
@@ -201,10 +204,10 @@ Output = TypeAliasType(
201
204
  OutputDynatraceHTTP,
202
205
  OutputServiceNow,
203
206
  OutputDynatraceOtlp,
204
- OutputElastic,
205
207
  OutputGoogleChronicle,
206
- OutputCriblLake,
208
+ OutputElastic,
207
209
  OutputDatadog,
210
+ OutputCriblLake,
208
211
  OutputPrometheus,
209
212
  OutputMsk,
210
213
  OutputSentinelOneAiSiem,
@@ -123,13 +123,6 @@ class OutputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
123
123
  LZ4 = "lz4"
124
124
 
125
125
 
126
- class OutputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
127
- r"""The schema format used to encode and decode event data"""
128
-
129
- AVRO = "avro"
130
- JSON = "json"
131
-
132
-
133
126
  class OutputConfluentCloudAuthTypedDict(TypedDict):
134
127
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
135
128
 
@@ -240,8 +233,6 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
240
233
  disabled: NotRequired[bool]
241
234
  schema_registry_url: NotRequired[str]
242
235
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
243
- schema_type: NotRequired[OutputConfluentCloudSchemaType]
244
- r"""The schema format used to encode and decode event data"""
245
236
  connection_timeout: NotRequired[float]
246
237
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
247
238
  request_timeout: NotRequired[float]
@@ -267,15 +258,6 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
267
258
  ] = "http://localhost:8081"
268
259
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
269
260
 
270
- schema_type: Annotated[
271
- Annotated[
272
- Optional[OutputConfluentCloudSchemaType],
273
- PlainValidator(validate_open_enum(False)),
274
- ],
275
- pydantic.Field(alias="schemaType"),
276
- ] = OutputConfluentCloudSchemaType.AVRO
277
- r"""The schema format used to encode and decode event data"""
278
-
279
261
  connection_timeout: Annotated[
280
262
  Optional[float], pydantic.Field(alias="connectionTimeout")
281
263
  ] = 30000
@@ -0,0 +1,282 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class OutputDatabricksType(str, Enum):
15
+ DATABRICKS = "databricks"
16
+
17
+
18
+ class OutputDatabricksDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
19
+ r"""Format of the output data"""
20
+
21
+ JSON = "json"
22
+ RAW = "raw"
23
+ PARQUET = "parquet"
24
+
25
+
26
+ class OutputDatabricksBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
27
+ r"""How to handle events when all receivers are exerting backpressure"""
28
+
29
+ BLOCK = "block"
30
+ DROP = "drop"
31
+
32
+
33
+ class OutputDatabricksDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
34
+ r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
35
+
36
+ BLOCK = "block"
37
+ DROP = "drop"
38
+
39
+
40
+ class OutputDatabricksAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
41
+ r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
42
+
43
+ MANUAL = "manual"
44
+ SECRET = "secret"
45
+
46
+
47
+ class OutputDatabricksTypedDict(TypedDict):
48
+ type: OutputDatabricksType
49
+ login_url: str
50
+ r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
51
+ client_id: str
52
+ r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
53
+ id: NotRequired[str]
54
+ r"""Unique ID for this output"""
55
+ pipeline: NotRequired[str]
56
+ r"""Pipeline to process data before sending out to this output"""
57
+ system_fields: NotRequired[List[str]]
58
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
59
+ environment: NotRequired[str]
60
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
61
+ streamtags: NotRequired[List[str]]
62
+ r"""Tags for filtering and grouping in @{product}"""
63
+ dest_path: NotRequired[str]
64
+ r"""Optional path to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myEventsVolumePath-${C.vars.myVar}`"""
65
+ stage_path: NotRequired[str]
66
+ r"""Filesystem location in which to buffer files before compressing and moving to final destination. Use performant, stable storage."""
67
+ add_id_to_stage_path: NotRequired[bool]
68
+ r"""Add the Output ID value to staging location"""
69
+ remove_empty_dirs: NotRequired[bool]
70
+ r"""Remove empty staging directories after moving files"""
71
+ partition_expr: NotRequired[str]
72
+ r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
73
+ format_: NotRequired[OutputDatabricksDataFormat]
74
+ r"""Format of the output data"""
75
+ base_file_name: NotRequired[str]
76
+ r"""JavaScript expression to define the output filename prefix (can be constant)"""
77
+ file_name_suffix: NotRequired[str]
78
+ r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
79
+ max_file_size_mb: NotRequired[float]
80
+ r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
81
+ max_file_open_time_sec: NotRequired[float]
82
+ r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
83
+ max_file_idle_time_sec: NotRequired[float]
84
+ r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
85
+ max_open_files: NotRequired[float]
86
+ r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
87
+ header_line: NotRequired[str]
88
+ r"""If set, this line will be written to the beginning of each output file"""
89
+ write_high_water_mark: NotRequired[float]
90
+ r"""Buffer size used to write to a file"""
91
+ on_backpressure: NotRequired[OutputDatabricksBackpressureBehavior]
92
+ r"""How to handle events when all receivers are exerting backpressure"""
93
+ deadletter_enabled: NotRequired[bool]
94
+ r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
95
+ on_disk_full_backpressure: NotRequired[OutputDatabricksDiskSpaceProtection]
96
+ r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
97
+ unity_auth_method: NotRequired[OutputDatabricksAuthenticationMethod]
98
+ r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
99
+ scope: NotRequired[str]
100
+ r"""OAuth scope for Unity Catalog authentication"""
101
+ token_timeout_secs: NotRequired[float]
102
+ r"""How often the OAuth token should be refreshed"""
103
+ default_catalog: NotRequired[str]
104
+ r"""Name of the catalog to use for the output"""
105
+ default_schema: NotRequired[str]
106
+ r"""Name of the catalog schema to use for the output"""
107
+ events_volume_name: NotRequired[str]
108
+ r"""Name of the events volume in Databricks"""
109
+ over_write_files: NotRequired[bool]
110
+ r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
111
+ description: NotRequired[str]
112
+ client_secret: NotRequired[str]
113
+ r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
114
+ client_text_secret: NotRequired[str]
115
+ r"""Select or create a stored text secret"""
116
+
117
+
118
+ class OutputDatabricks(BaseModel):
119
+ type: OutputDatabricksType
120
+
121
+ login_url: Annotated[str, pydantic.Field(alias="loginUrl")]
122
+ r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
123
+
124
+ client_id: Annotated[str, pydantic.Field(alias="clientId")]
125
+ r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
126
+
127
+ id: Optional[str] = None
128
+ r"""Unique ID for this output"""
129
+
130
+ pipeline: Optional[str] = None
131
+ r"""Pipeline to process data before sending out to this output"""
132
+
133
+ system_fields: Annotated[
134
+ Optional[List[str]], pydantic.Field(alias="systemFields")
135
+ ] = None
136
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
137
+
138
+ environment: Optional[str] = None
139
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
140
+
141
+ streamtags: Optional[List[str]] = None
142
+ r"""Tags for filtering and grouping in @{product}"""
143
+
144
+ dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
145
+ r"""Optional path to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myEventsVolumePath-${C.vars.myVar}`"""
146
+
147
+ stage_path: Annotated[Optional[str], pydantic.Field(alias="stagePath")] = (
148
+ "$CRIBL_HOME/state/outputs/staging"
149
+ )
150
+ r"""Filesystem location in which to buffer files before compressing and moving to final destination. Use performant, stable storage."""
151
+
152
+ add_id_to_stage_path: Annotated[
153
+ Optional[bool], pydantic.Field(alias="addIdToStagePath")
154
+ ] = True
155
+ r"""Add the Output ID value to staging location"""
156
+
157
+ remove_empty_dirs: Annotated[
158
+ Optional[bool], pydantic.Field(alias="removeEmptyDirs")
159
+ ] = True
160
+ r"""Remove empty staging directories after moving files"""
161
+
162
+ partition_expr: Annotated[Optional[str], pydantic.Field(alias="partitionExpr")] = (
163
+ "C.Time.strftime(_time ? _time : Date.now()/1000, '%Y/%m/%d')"
164
+ )
165
+ r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
166
+
167
+ format_: Annotated[
168
+ Annotated[
169
+ Optional[OutputDatabricksDataFormat],
170
+ PlainValidator(validate_open_enum(False)),
171
+ ],
172
+ pydantic.Field(alias="format"),
173
+ ] = OutputDatabricksDataFormat.JSON
174
+ r"""Format of the output data"""
175
+
176
+ base_file_name: Annotated[Optional[str], pydantic.Field(alias="baseFileName")] = (
177
+ "`CriblOut`"
178
+ )
179
+ r"""JavaScript expression to define the output filename prefix (can be constant)"""
180
+
181
+ file_name_suffix: Annotated[
182
+ Optional[str], pydantic.Field(alias="fileNameSuffix")
183
+ ] = '`.${C.env["CRIBL_WORKER_ID"]}.${__format}${__compression === "gzip" ? ".gz" : ""}`'
184
+ r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
185
+
186
+ max_file_size_mb: Annotated[
187
+ Optional[float], pydantic.Field(alias="maxFileSizeMB")
188
+ ] = 32
189
+ r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
190
+
191
+ max_file_open_time_sec: Annotated[
192
+ Optional[float], pydantic.Field(alias="maxFileOpenTimeSec")
193
+ ] = 300
194
+ r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
195
+
196
+ max_file_idle_time_sec: Annotated[
197
+ Optional[float], pydantic.Field(alias="maxFileIdleTimeSec")
198
+ ] = 30
199
+ r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
200
+
201
+ max_open_files: Annotated[Optional[float], pydantic.Field(alias="maxOpenFiles")] = (
202
+ 100
203
+ )
204
+ r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
205
+
206
+ header_line: Annotated[Optional[str], pydantic.Field(alias="headerLine")] = ""
207
+ r"""If set, this line will be written to the beginning of each output file"""
208
+
209
+ write_high_water_mark: Annotated[
210
+ Optional[float], pydantic.Field(alias="writeHighWaterMark")
211
+ ] = 64
212
+ r"""Buffer size used to write to a file"""
213
+
214
+ on_backpressure: Annotated[
215
+ Annotated[
216
+ Optional[OutputDatabricksBackpressureBehavior],
217
+ PlainValidator(validate_open_enum(False)),
218
+ ],
219
+ pydantic.Field(alias="onBackpressure"),
220
+ ] = OutputDatabricksBackpressureBehavior.BLOCK
221
+ r"""How to handle events when all receivers are exerting backpressure"""
222
+
223
+ deadletter_enabled: Annotated[
224
+ Optional[bool], pydantic.Field(alias="deadletterEnabled")
225
+ ] = False
226
+ r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
227
+
228
+ on_disk_full_backpressure: Annotated[
229
+ Annotated[
230
+ Optional[OutputDatabricksDiskSpaceProtection],
231
+ PlainValidator(validate_open_enum(False)),
232
+ ],
233
+ pydantic.Field(alias="onDiskFullBackpressure"),
234
+ ] = OutputDatabricksDiskSpaceProtection.BLOCK
235
+ r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
236
+
237
+ unity_auth_method: Annotated[
238
+ Annotated[
239
+ Optional[OutputDatabricksAuthenticationMethod],
240
+ PlainValidator(validate_open_enum(False)),
241
+ ],
242
+ pydantic.Field(alias="unityAuthMethod"),
243
+ ] = OutputDatabricksAuthenticationMethod.MANUAL
244
+ r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
245
+
246
+ scope: Optional[str] = "all-apis"
247
+ r"""OAuth scope for Unity Catalog authentication"""
248
+
249
+ token_timeout_secs: Annotated[
250
+ Optional[float], pydantic.Field(alias="tokenTimeoutSecs")
251
+ ] = 3600
252
+ r"""How often the OAuth token should be refreshed"""
253
+
254
+ default_catalog: Annotated[
255
+ Optional[str], pydantic.Field(alias="defaultCatalog")
256
+ ] = "main"
257
+ r"""Name of the catalog to use for the output"""
258
+
259
+ default_schema: Annotated[Optional[str], pydantic.Field(alias="defaultSchema")] = (
260
+ "external"
261
+ )
262
+ r"""Name of the catalog schema to use for the output"""
263
+
264
+ events_volume_name: Annotated[
265
+ Optional[str], pydantic.Field(alias="eventsVolumeName")
266
+ ] = "events"
267
+ r"""Name of the events volume in Databricks"""
268
+
269
+ over_write_files: Annotated[
270
+ Optional[bool], pydantic.Field(alias="overWriteFiles")
271
+ ] = False
272
+ r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
273
+
274
+ description: Optional[str] = None
275
+
276
+ client_secret: Annotated[Optional[str], pydantic.Field(alias="clientSecret")] = None
277
+ r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
278
+
279
+ client_text_secret: Annotated[
280
+ Optional[str], pydantic.Field(alias="clientTextSecret")
281
+ ] = None
282
+ r"""Select or create a stored text secret"""
@@ -40,13 +40,6 @@ class OutputKafkaCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
40
  LZ4 = "lz4"
41
41
 
42
42
 
43
- class OutputKafkaSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
44
- r"""The schema format used to encode and decode event data"""
45
-
46
- AVRO = "avro"
47
- JSON = "json"
48
-
49
-
50
43
  class OutputKafkaAuthTypedDict(TypedDict):
51
44
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
52
45
 
@@ -157,8 +150,6 @@ class OutputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
157
150
  disabled: NotRequired[bool]
158
151
  schema_registry_url: NotRequired[str]
159
152
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
160
- schema_type: NotRequired[OutputKafkaSchemaType]
161
- r"""The schema format used to encode and decode event data"""
162
153
  connection_timeout: NotRequired[float]
163
154
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
164
155
  request_timeout: NotRequired[float]
@@ -182,14 +173,6 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
182
173
  ] = "http://localhost:8081"
183
174
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
184
175
 
185
- schema_type: Annotated[
186
- Annotated[
187
- Optional[OutputKafkaSchemaType], PlainValidator(validate_open_enum(False))
188
- ],
189
- pydantic.Field(alias="schemaType"),
190
- ] = OutputKafkaSchemaType.AVRO
191
- r"""The schema format used to encode and decode event data"""
192
-
193
176
  connection_timeout: Annotated[
194
177
  Optional[float], pydantic.Field(alias="connectionTimeout")
195
178
  ] = 30000
@@ -40,13 +40,6 @@ class OutputMskCompression(str, Enum, metaclass=utils.OpenEnumMeta):
40
40
  LZ4 = "lz4"
41
41
 
42
42
 
43
- class OutputMskSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
44
- r"""The schema format used to encode and decode event data"""
45
-
46
- AVRO = "avro"
47
- JSON = "json"
48
-
49
-
50
43
  class OutputMskAuthTypedDict(TypedDict):
51
44
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
52
45
 
@@ -157,8 +150,6 @@ class OutputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
157
150
  disabled: NotRequired[bool]
158
151
  schema_registry_url: NotRequired[str]
159
152
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
160
- schema_type: NotRequired[OutputMskSchemaType]
161
- r"""The schema format used to encode and decode event data"""
162
153
  connection_timeout: NotRequired[float]
163
154
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
164
155
  request_timeout: NotRequired[float]
@@ -182,14 +173,6 @@ class OutputMskKafkaSchemaRegistryAuthentication(BaseModel):
182
173
  ] = "http://localhost:8081"
183
174
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
184
175
 
185
- schema_type: Annotated[
186
- Annotated[
187
- Optional[OutputMskSchemaType], PlainValidator(validate_open_enum(False))
188
- ],
189
- pydantic.Field(alias="schemaType"),
190
- ] = OutputMskSchemaType.AVRO
191
- r"""The schema format used to encode and decode event data"""
192
-
193
176
  connection_timeout: Annotated[
194
177
  Optional[float], pydantic.Field(alias="connectionTimeout")
195
178
  ] = 30000
@@ -1,10 +1,9 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from .routecloneconf import RouteCloneConf, RouteCloneConfTypedDict
5
4
  from cribl_control_plane.types import BaseModel
6
5
  import pydantic
7
- from typing import List, Optional
6
+ from typing import Dict, List, Optional
8
7
  from typing_extensions import Annotated, NotRequired, TypedDict
9
8
 
10
9
 
@@ -13,7 +12,7 @@ class RouteConfTypedDict(TypedDict):
13
12
  id: str
14
13
  name: str
15
14
  pipeline: str
16
- clones: NotRequired[List[RouteCloneConfTypedDict]]
15
+ clones: NotRequired[List[Dict[str, str]]]
17
16
  context: NotRequired[str]
18
17
  description: NotRequired[str]
19
18
  disabled: NotRequired[bool]
@@ -33,7 +32,7 @@ class RouteConf(BaseModel):
33
32
 
34
33
  pipeline: str
35
34
 
36
- clones: Optional[List[RouteCloneConf]] = None
35
+ clones: Optional[List[Dict[str, str]]] = None
37
36
 
38
37
  context: Optional[str] = None
39
38
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cribl-control-plane
3
- Version: 0.1.0a1
3
+ Version: 0.1.0b2
4
4
  Summary: Python Client SDK Generated by Speakeasy.
5
5
  Author: Speakeasy
6
6
  Requires-Python: >=3.9.2