cribl-control-plane 0.0.15__py3-none-any.whl → 0.0.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (55) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/{outputs.py → destinations.py} +69 -71
  3. cribl_control_plane/models/__init__.py +5234 -2
  4. cribl_control_plane/models/createinputop.py +19253 -2
  5. cribl_control_plane/models/createoutputop.py +20578 -4
  6. cribl_control_plane/models/createoutputtestbyidop.py +2 -2
  7. cribl_control_plane/models/deleteoutputbyidop.py +2 -2
  8. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  9. cribl_control_plane/models/getoutputbyidop.py +2 -2
  10. cribl_control_plane/models/getoutputpqbyidop.py +2 -2
  11. cribl_control_plane/models/getoutputsamplesbyidop.py +2 -2
  12. cribl_control_plane/models/inputconfluentcloud.py +5 -3
  13. cribl_control_plane/models/inputcriblhttp.py +5 -3
  14. cribl_control_plane/models/inputcribllakehttp.py +5 -3
  15. cribl_control_plane/models/inputcribltcp.py +5 -3
  16. cribl_control_plane/models/inputdatadogagent.py +5 -3
  17. cribl_control_plane/models/inputedgeprometheus.py +5 -3
  18. cribl_control_plane/models/inputelastic.py +5 -3
  19. cribl_control_plane/models/inputeventhub.py +5 -3
  20. cribl_control_plane/models/inputfirehose.py +5 -3
  21. cribl_control_plane/models/inputgooglepubsub.py +5 -3
  22. cribl_control_plane/models/inputgrafana_union.py +10 -6
  23. cribl_control_plane/models/inputhttp.py +5 -3
  24. cribl_control_plane/models/inputhttpraw.py +5 -3
  25. cribl_control_plane/models/inputjournalfiles.py +5 -3
  26. cribl_control_plane/models/inputkafka.py +5 -3
  27. cribl_control_plane/models/inputkinesis.py +5 -3
  28. cribl_control_plane/models/inputloki.py +5 -3
  29. cribl_control_plane/models/inputmodeldriventelemetry.py +6 -5
  30. cribl_control_plane/models/inputmsk.py +5 -3
  31. cribl_control_plane/models/inputnetflow.py +5 -3
  32. cribl_control_plane/models/inputoffice365mgmt.py +5 -3
  33. cribl_control_plane/models/inputoffice365msgtrace.py +5 -5
  34. cribl_control_plane/models/inputoffice365service.py +5 -5
  35. cribl_control_plane/models/inputopentelemetry.py +5 -3
  36. cribl_control_plane/models/inputprometheus.py +5 -3
  37. cribl_control_plane/models/inputprometheusrw.py +5 -3
  38. cribl_control_plane/models/inputrawudp.py +5 -3
  39. cribl_control_plane/models/inputsnmp.py +5 -3
  40. cribl_control_plane/models/inputsplunk.py +5 -3
  41. cribl_control_plane/models/inputsplunkhec.py +5 -3
  42. cribl_control_plane/models/inputsplunksearch.py +5 -3
  43. cribl_control_plane/models/inputsqs.py +5 -3
  44. cribl_control_plane/models/inputtcp.py +5 -3
  45. cribl_control_plane/models/inputtcpjson.py +5 -3
  46. cribl_control_plane/models/inputwef.py +5 -3
  47. cribl_control_plane/models/inputwiz.py +5 -3
  48. cribl_control_plane/models/inputzscalerhec.py +5 -3
  49. cribl_control_plane/models/listoutputop.py +2 -2
  50. cribl_control_plane/models/updateoutputbyidop.py +4 -4
  51. cribl_control_plane/sdk.py +3 -5
  52. cribl_control_plane/sources.py +8 -10
  53. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.16.dist-info}/METADATA +13 -13
  54. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.16.dist-info}/RECORD +55 -55
  55. {cribl_control_plane-0.0.15.dist-info → cribl_control_plane-0.0.16.dist-info}/WHEEL +0 -0
@@ -11,7 +11,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict
11
11
 
12
12
  class CreateOutputTestByIDRequestTypedDict(TypedDict):
13
13
  id: str
14
- r"""Output Id"""
14
+ r"""Destination Id"""
15
15
  output_test_request: OutputTestRequestTypedDict
16
16
  r"""OutputTestRequest object"""
17
17
 
@@ -20,7 +20,7 @@ class CreateOutputTestByIDRequest(BaseModel):
20
20
  id: Annotated[
21
21
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
22
22
  ]
23
- r"""Output Id"""
23
+ r"""Destination Id"""
24
24
 
25
25
  output_test_request: Annotated[
26
26
  OutputTestRequest,
@@ -21,7 +21,7 @@ class DeleteOutputByIDRequest(BaseModel):
21
21
 
22
22
 
23
23
  class DeleteOutputByIDResponseTypedDict(TypedDict):
24
- r"""a list of Output objects"""
24
+ r"""a list of Destination objects"""
25
25
 
26
26
  count: NotRequired[int]
27
27
  r"""number of items present in the items array"""
@@ -29,7 +29,7 @@ class DeleteOutputByIDResponseTypedDict(TypedDict):
29
29
 
30
30
 
31
31
  class DeleteOutputByIDResponse(BaseModel):
32
- r"""a list of Output objects"""
32
+ r"""a list of Destination objects"""
33
33
 
34
34
  count: Optional[int] = None
35
35
  r"""number of items present in the items array"""
@@ -9,14 +9,14 @@ from typing_extensions import Annotated, NotRequired, TypedDict
9
9
 
10
10
  class DeleteOutputPqByIDRequestTypedDict(TypedDict):
11
11
  id: str
12
- r"""Output Id"""
12
+ r"""Destination Id"""
13
13
 
14
14
 
15
15
  class DeleteOutputPqByIDRequest(BaseModel):
16
16
  id: Annotated[
17
17
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
18
18
  ]
19
- r"""Output Id"""
19
+ r"""Destination Id"""
20
20
 
21
21
 
22
22
  class DeleteOutputPqByIDResponseTypedDict(TypedDict):
@@ -21,7 +21,7 @@ class GetOutputByIDRequest(BaseModel):
21
21
 
22
22
 
23
23
  class GetOutputByIDResponseTypedDict(TypedDict):
24
- r"""a list of Output objects"""
24
+ r"""a list of Destination objects"""
25
25
 
26
26
  count: NotRequired[int]
27
27
  r"""number of items present in the items array"""
@@ -29,7 +29,7 @@ class GetOutputByIDResponseTypedDict(TypedDict):
29
29
 
30
30
 
31
31
  class GetOutputByIDResponse(BaseModel):
32
- r"""a list of Output objects"""
32
+ r"""a list of Destination objects"""
33
33
 
34
34
  count: Optional[int] = None
35
35
  r"""number of items present in the items array"""
@@ -9,14 +9,14 @@ from typing_extensions import Annotated, NotRequired, TypedDict
9
9
 
10
10
  class GetOutputPqByIDRequestTypedDict(TypedDict):
11
11
  id: str
12
- r"""Output Id"""
12
+ r"""Destination Id"""
13
13
 
14
14
 
15
15
  class GetOutputPqByIDRequest(BaseModel):
16
16
  id: Annotated[
17
17
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
18
18
  ]
19
- r"""Output Id"""
19
+ r"""Destination Id"""
20
20
 
21
21
 
22
22
  class GetOutputPqByIDResponseTypedDict(TypedDict):
@@ -10,14 +10,14 @@ from typing_extensions import Annotated, NotRequired, TypedDict
10
10
 
11
11
  class GetOutputSamplesByIDRequestTypedDict(TypedDict):
12
12
  id: str
13
- r"""Output Id"""
13
+ r"""Destination Id"""
14
14
 
15
15
 
16
16
  class GetOutputSamplesByIDRequest(BaseModel):
17
17
  id: Annotated[
18
18
  str, FieldMetadata(path=PathParamMetadata(style="simple", explode=False))
19
19
  ]
20
- r"""Output Id"""
20
+ r"""Destination Id"""
21
21
 
22
22
 
23
23
  class GetOutputSamplesByIDResponseTypedDict(TypedDict):
@@ -363,13 +363,13 @@ class InputConfluentCloudMetadatum(BaseModel):
363
363
 
364
364
 
365
365
  class InputConfluentCloudTypedDict(TypedDict):
366
- type: InputConfluentCloudType
367
366
  brokers: List[str]
368
367
  r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
369
368
  topics: List[str]
370
369
  r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
371
370
  id: NotRequired[str]
372
371
  r"""Unique ID for this input"""
372
+ type: NotRequired[InputConfluentCloudType]
373
373
  disabled: NotRequired[bool]
374
374
  pipeline: NotRequired[str]
375
375
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -443,8 +443,6 @@ class InputConfluentCloudTypedDict(TypedDict):
443
443
 
444
444
 
445
445
  class InputConfluentCloud(BaseModel):
446
- type: Annotated[InputConfluentCloudType, PlainValidator(validate_open_enum(False))]
447
-
448
446
  brokers: List[str]
449
447
  r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092"""
450
448
 
@@ -454,6 +452,10 @@ class InputConfluentCloud(BaseModel):
454
452
  id: Optional[str] = None
455
453
  r"""Unique ID for this input"""
456
454
 
455
+ type: Annotated[
456
+ Optional[InputConfluentCloudType], PlainValidator(validate_open_enum(False))
457
+ ] = None
458
+
457
459
  disabled: Optional[bool] = False
458
460
 
459
461
  pipeline: Optional[str] = None
@@ -186,11 +186,11 @@ class InputCriblHTTPMetadatum(BaseModel):
186
186
 
187
187
 
188
188
  class InputCriblHTTPTypedDict(TypedDict):
189
- type: InputCriblHTTPType
190
189
  port: float
191
190
  r"""Port to listen on"""
192
191
  id: NotRequired[str]
193
192
  r"""Unique ID for this input"""
193
+ type: NotRequired[InputCriblHTTPType]
194
194
  disabled: NotRequired[bool]
195
195
  pipeline: NotRequired[str]
196
196
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -238,14 +238,16 @@ class InputCriblHTTPTypedDict(TypedDict):
238
238
 
239
239
 
240
240
  class InputCriblHTTP(BaseModel):
241
- type: Annotated[InputCriblHTTPType, PlainValidator(validate_open_enum(False))]
242
-
243
241
  port: float
244
242
  r"""Port to listen on"""
245
243
 
246
244
  id: Optional[str] = None
247
245
  r"""Unique ID for this input"""
248
246
 
247
+ type: Annotated[
248
+ Optional[InputCriblHTTPType], PlainValidator(validate_open_enum(False))
249
+ ] = None
250
+
249
251
  disabled: Optional[bool] = False
250
252
 
251
253
  pipeline: Optional[str] = None
@@ -187,11 +187,11 @@ class InputCriblLakeHTTPMetadatum(BaseModel):
187
187
 
188
188
 
189
189
  class InputCriblLakeHTTPTypedDict(TypedDict):
190
- type: InputCriblLakeHTTPType
191
190
  port: float
192
191
  r"""Port to listen on"""
193
192
  id: NotRequired[str]
194
193
  r"""Unique ID for this input"""
194
+ type: NotRequired[InputCriblLakeHTTPType]
195
195
  disabled: NotRequired[bool]
196
196
  pipeline: NotRequired[str]
197
197
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -239,14 +239,16 @@ class InputCriblLakeHTTPTypedDict(TypedDict):
239
239
 
240
240
 
241
241
  class InputCriblLakeHTTP(BaseModel):
242
- type: Annotated[InputCriblLakeHTTPType, PlainValidator(validate_open_enum(False))]
243
-
244
242
  port: float
245
243
  r"""Port to listen on"""
246
244
 
247
245
  id: Optional[str] = None
248
246
  r"""Unique ID for this input"""
249
247
 
248
+ type: Annotated[
249
+ Optional[InputCriblLakeHTTPType], PlainValidator(validate_open_enum(False))
250
+ ] = None
251
+
250
252
  disabled: Optional[bool] = False
251
253
 
252
254
  pipeline: Optional[str] = None
@@ -186,11 +186,11 @@ class InputCriblTCPMetadatum(BaseModel):
186
186
 
187
187
 
188
188
  class InputCriblTCPTypedDict(TypedDict):
189
- type: InputCriblTCPType
190
189
  port: float
191
190
  r"""Port to listen on"""
192
191
  id: NotRequired[str]
193
192
  r"""Unique ID for this input"""
193
+ type: NotRequired[InputCriblTCPType]
194
194
  disabled: NotRequired[bool]
195
195
  pipeline: NotRequired[str]
196
196
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -226,14 +226,16 @@ class InputCriblTCPTypedDict(TypedDict):
226
226
 
227
227
 
228
228
  class InputCriblTCP(BaseModel):
229
- type: Annotated[InputCriblTCPType, PlainValidator(validate_open_enum(False))]
230
-
231
229
  port: float
232
230
  r"""Port to listen on"""
233
231
 
234
232
  id: Optional[str] = None
235
233
  r"""Unique ID for this input"""
236
234
 
235
+ type: Annotated[
236
+ Optional[InputCriblTCPType], PlainValidator(validate_open_enum(False))
237
+ ] = None
238
+
237
239
  disabled: Optional[bool] = False
238
240
 
239
241
  pipeline: Optional[str] = None
@@ -204,11 +204,11 @@ class InputDatadogAgentProxyMode(BaseModel):
204
204
 
205
205
 
206
206
  class InputDatadogAgentTypedDict(TypedDict):
207
- type: InputDatadogAgentType
208
207
  port: float
209
208
  r"""Port to listen on"""
210
209
  id: NotRequired[str]
211
210
  r"""Unique ID for this input"""
211
+ type: NotRequired[InputDatadogAgentType]
212
212
  disabled: NotRequired[bool]
213
213
  pipeline: NotRequired[str]
214
214
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -257,14 +257,16 @@ class InputDatadogAgentTypedDict(TypedDict):
257
257
 
258
258
 
259
259
  class InputDatadogAgent(BaseModel):
260
- type: Annotated[InputDatadogAgentType, PlainValidator(validate_open_enum(False))]
261
-
262
260
  port: float
263
261
  r"""Port to listen on"""
264
262
 
265
263
  id: Optional[str] = None
266
264
  r"""Unique ID for this input"""
267
265
 
266
+ type: Annotated[
267
+ Optional[InputDatadogAgentType], PlainValidator(validate_open_enum(False))
268
+ ] = None
269
+
268
270
  disabled: Optional[bool] = False
269
271
 
270
272
  pipeline: Optional[str] = None
@@ -263,9 +263,9 @@ class PodFilter(BaseModel):
263
263
 
264
264
 
265
265
  class InputEdgePrometheusTypedDict(TypedDict):
266
- type: InputEdgePrometheusType
267
266
  id: NotRequired[str]
268
267
  r"""Unique ID for this input"""
268
+ type: NotRequired[InputEdgePrometheusType]
269
269
  disabled: NotRequired[bool]
270
270
  pipeline: NotRequired[str]
271
271
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -353,11 +353,13 @@ class InputEdgePrometheusTypedDict(TypedDict):
353
353
 
354
354
 
355
355
  class InputEdgePrometheus(BaseModel):
356
- type: Annotated[InputEdgePrometheusType, PlainValidator(validate_open_enum(False))]
357
-
358
356
  id: Optional[str] = None
359
357
  r"""Unique ID for this input"""
360
358
 
359
+ type: Annotated[
360
+ Optional[InputEdgePrometheusType], PlainValidator(validate_open_enum(False))
361
+ ] = None
362
+
361
363
  disabled: Optional[bool] = False
362
364
 
363
365
  pipeline: Optional[str] = None
@@ -265,11 +265,11 @@ class InputElasticProxyMode(BaseModel):
265
265
 
266
266
 
267
267
  class InputElasticTypedDict(TypedDict):
268
- type: InputElasticType
269
268
  port: float
270
269
  r"""Port to listen on"""
271
270
  id: NotRequired[str]
272
271
  r"""Unique ID for this input"""
272
+ type: NotRequired[InputElasticType]
273
273
  disabled: NotRequired[bool]
274
274
  pipeline: NotRequired[str]
275
275
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -331,14 +331,16 @@ class InputElasticTypedDict(TypedDict):
331
331
 
332
332
 
333
333
  class InputElastic(BaseModel):
334
- type: Annotated[InputElasticType, PlainValidator(validate_open_enum(False))]
335
-
336
334
  port: float
337
335
  r"""Port to listen on"""
338
336
 
339
337
  id: Optional[str] = None
340
338
  r"""Unique ID for this input"""
341
339
 
340
+ type: Annotated[
341
+ Optional[InputElasticType], PlainValidator(validate_open_enum(False))
342
+ ] = None
343
+
342
344
  disabled: Optional[bool] = False
343
345
 
344
346
  pipeline: Optional[str] = None
@@ -141,13 +141,13 @@ class InputEventhubMetadatum(BaseModel):
141
141
 
142
142
 
143
143
  class InputEventhubTypedDict(TypedDict):
144
- type: InputEventhubType
145
144
  brokers: List[str]
146
145
  r"""List of Event Hubs Kafka brokers to connect to (example: yourdomain.servicebus.windows.net:9093). The hostname can be found in the host portion of the primary or secondary connection string in Shared Access Policies."""
147
146
  topics: List[str]
148
147
  r"""The name of the Event Hub (Kafka topic) to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Event Hubs Source to only a single topic."""
149
148
  id: NotRequired[str]
150
149
  r"""Unique ID for this input"""
150
+ type: NotRequired[InputEventhubType]
151
151
  disabled: NotRequired[bool]
152
152
  pipeline: NotRequired[str]
153
153
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -219,8 +219,6 @@ class InputEventhubTypedDict(TypedDict):
219
219
 
220
220
 
221
221
  class InputEventhub(BaseModel):
222
- type: Annotated[InputEventhubType, PlainValidator(validate_open_enum(False))]
223
-
224
222
  brokers: List[str]
225
223
  r"""List of Event Hubs Kafka brokers to connect to (example: yourdomain.servicebus.windows.net:9093). The hostname can be found in the host portion of the primary or secondary connection string in Shared Access Policies."""
226
224
 
@@ -230,6 +228,10 @@ class InputEventhub(BaseModel):
230
228
  id: Optional[str] = None
231
229
  r"""Unique ID for this input"""
232
230
 
231
+ type: Annotated[
232
+ Optional[InputEventhubType], PlainValidator(validate_open_enum(False))
233
+ ] = None
234
+
233
235
  disabled: Optional[bool] = False
234
236
 
235
237
  pipeline: Optional[str] = None
@@ -186,11 +186,11 @@ class InputFirehoseMetadatum(BaseModel):
186
186
 
187
187
 
188
188
  class InputFirehoseTypedDict(TypedDict):
189
- type: InputFirehoseType
190
189
  port: float
191
190
  r"""Port to listen on"""
192
191
  id: NotRequired[str]
193
192
  r"""Unique ID for this input"""
193
+ type: NotRequired[InputFirehoseType]
194
194
  disabled: NotRequired[bool]
195
195
  pipeline: NotRequired[str]
196
196
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -238,14 +238,16 @@ class InputFirehoseTypedDict(TypedDict):
238
238
 
239
239
 
240
240
  class InputFirehose(BaseModel):
241
- type: Annotated[InputFirehoseType, PlainValidator(validate_open_enum(False))]
242
-
243
241
  port: float
244
242
  r"""Port to listen on"""
245
243
 
246
244
  id: Optional[str] = None
247
245
  r"""Unique ID for this input"""
248
246
 
247
+ type: Annotated[
248
+ Optional[InputFirehoseType], PlainValidator(validate_open_enum(False))
249
+ ] = None
250
+
249
251
  disabled: Optional[bool] = False
250
252
 
251
253
  pipeline: Optional[str] = None
@@ -115,13 +115,13 @@ class InputGooglePubsubMetadatum(BaseModel):
115
115
 
116
116
 
117
117
  class InputGooglePubsubTypedDict(TypedDict):
118
- type: InputGooglePubsubType
119
118
  topic_name: str
120
119
  r"""ID of the topic to receive events from"""
121
120
  subscription_name: str
122
121
  r"""ID of the subscription to use when receiving events"""
123
122
  id: NotRequired[str]
124
123
  r"""Unique ID for this input"""
124
+ type: NotRequired[InputGooglePubsubType]
125
125
  disabled: NotRequired[bool]
126
126
  pipeline: NotRequired[str]
127
127
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -162,8 +162,6 @@ class InputGooglePubsubTypedDict(TypedDict):
162
162
 
163
163
 
164
164
  class InputGooglePubsub(BaseModel):
165
- type: Annotated[InputGooglePubsubType, PlainValidator(validate_open_enum(False))]
166
-
167
165
  topic_name: Annotated[str, pydantic.Field(alias="topicName")]
168
166
  r"""ID of the topic to receive events from"""
169
167
 
@@ -173,6 +171,10 @@ class InputGooglePubsub(BaseModel):
173
171
  id: Optional[str] = None
174
172
  r"""Unique ID for this input"""
175
173
 
174
+ type: Annotated[
175
+ Optional[InputGooglePubsubType], PlainValidator(validate_open_enum(False))
176
+ ] = None
177
+
176
178
  disabled: Optional[bool] = False
177
179
 
178
180
  pipeline: Optional[str] = None
@@ -452,11 +452,11 @@ class InputGrafanaMetadatum2(BaseModel):
452
452
 
453
453
 
454
454
  class InputGrafana2TypedDict(TypedDict):
455
- type: InputGrafanaType2
456
455
  port: float
457
456
  r"""Port to listen on"""
458
457
  id: NotRequired[str]
459
458
  r"""Unique ID for this input"""
459
+ type: NotRequired[InputGrafanaType2]
460
460
  disabled: NotRequired[bool]
461
461
  pipeline: NotRequired[str]
462
462
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -508,14 +508,16 @@ class InputGrafana2TypedDict(TypedDict):
508
508
 
509
509
 
510
510
  class InputGrafana2(BaseModel):
511
- type: Annotated[InputGrafanaType2, PlainValidator(validate_open_enum(False))]
512
-
513
511
  port: float
514
512
  r"""Port to listen on"""
515
513
 
516
514
  id: Optional[str] = None
517
515
  r"""Unique ID for this input"""
518
516
 
517
+ type: Annotated[
518
+ Optional[InputGrafanaType2], PlainValidator(validate_open_enum(False))
519
+ ] = None
520
+
519
521
  disabled: Optional[bool] = False
520
522
 
521
523
  pipeline: Optional[str] = None
@@ -1065,11 +1067,11 @@ class InputGrafanaMetadatum1(BaseModel):
1065
1067
 
1066
1068
 
1067
1069
  class InputGrafana1TypedDict(TypedDict):
1068
- type: InputGrafanaType1
1069
1070
  port: float
1070
1071
  r"""Port to listen on"""
1071
1072
  id: NotRequired[str]
1072
1073
  r"""Unique ID for this input"""
1074
+ type: NotRequired[InputGrafanaType1]
1073
1075
  disabled: NotRequired[bool]
1074
1076
  pipeline: NotRequired[str]
1075
1077
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -1121,14 +1123,16 @@ class InputGrafana1TypedDict(TypedDict):
1121
1123
 
1122
1124
 
1123
1125
  class InputGrafana1(BaseModel):
1124
- type: Annotated[InputGrafanaType1, PlainValidator(validate_open_enum(False))]
1125
-
1126
1126
  port: float
1127
1127
  r"""Port to listen on"""
1128
1128
 
1129
1129
  id: Optional[str] = None
1130
1130
  r"""Unique ID for this input"""
1131
1131
 
1132
+ type: Annotated[
1133
+ Optional[InputGrafanaType1], PlainValidator(validate_open_enum(False))
1134
+ ] = None
1135
+
1132
1136
  disabled: Optional[bool] = False
1133
1137
 
1134
1138
  pipeline: Optional[str] = None
@@ -217,11 +217,11 @@ class InputHTTPAuthTokensExt(BaseModel):
217
217
 
218
218
 
219
219
  class InputHTTPTypedDict(TypedDict):
220
- type: InputHTTPType
221
220
  port: float
222
221
  r"""Port to listen on"""
223
222
  id: NotRequired[str]
224
223
  r"""Unique ID for this input"""
224
+ type: NotRequired[InputHTTPType]
225
225
  disabled: NotRequired[bool]
226
226
  pipeline: NotRequired[str]
227
227
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -278,14 +278,16 @@ class InputHTTPTypedDict(TypedDict):
278
278
 
279
279
 
280
280
  class InputHTTP(BaseModel):
281
- type: Annotated[InputHTTPType, PlainValidator(validate_open_enum(False))]
282
-
283
281
  port: float
284
282
  r"""Port to listen on"""
285
283
 
286
284
  id: Optional[str] = None
287
285
  r"""Unique ID for this input"""
288
286
 
287
+ type: Annotated[
288
+ Optional[InputHTTPType], PlainValidator(validate_open_enum(False))
289
+ ] = None
290
+
289
291
  disabled: Optional[bool] = False
290
292
 
291
293
  pipeline: Optional[str] = None
@@ -217,11 +217,11 @@ class InputHTTPRawAuthTokensExt(BaseModel):
217
217
 
218
218
 
219
219
  class InputHTTPRawTypedDict(TypedDict):
220
- type: InputHTTPRawType
221
220
  port: float
222
221
  r"""Port to listen on"""
223
222
  id: NotRequired[str]
224
223
  r"""Unique ID for this input"""
224
+ type: NotRequired[InputHTTPRawType]
225
225
  disabled: NotRequired[bool]
226
226
  pipeline: NotRequired[str]
227
227
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -279,14 +279,16 @@ class InputHTTPRawTypedDict(TypedDict):
279
279
 
280
280
 
281
281
  class InputHTTPRaw(BaseModel):
282
- type: Annotated[InputHTTPRawType, PlainValidator(validate_open_enum(False))]
283
-
284
282
  port: float
285
283
  r"""Port to listen on"""
286
284
 
287
285
  id: Optional[str] = None
288
286
  r"""Unique ID for this input"""
289
287
 
288
+ type: Annotated[
289
+ Optional[InputHTTPRawType], PlainValidator(validate_open_enum(False))
290
+ ] = None
291
+
290
292
  disabled: Optional[bool] = False
291
293
 
292
294
  pipeline: Optional[str] = None
@@ -120,13 +120,13 @@ class InputJournalFilesMetadatum(BaseModel):
120
120
 
121
121
 
122
122
  class InputJournalFilesTypedDict(TypedDict):
123
- type: InputJournalFilesType
124
123
  path: str
125
124
  r"""Directory path to search for journals. Environment variables will be resolved, e.g. $CRIBL_EDGE_FS_ROOT/var/log/journal/$MACHINE_ID."""
126
125
  journals: List[str]
127
126
  r"""The full path of discovered journals are matched against this wildcard list."""
128
127
  id: NotRequired[str]
129
128
  r"""Unique ID for this input"""
129
+ type: NotRequired[InputJournalFilesType]
130
130
  disabled: NotRequired[bool]
131
131
  pipeline: NotRequired[str]
132
132
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -155,8 +155,6 @@ class InputJournalFilesTypedDict(TypedDict):
155
155
 
156
156
 
157
157
  class InputJournalFiles(BaseModel):
158
- type: Annotated[InputJournalFilesType, PlainValidator(validate_open_enum(False))]
159
-
160
158
  path: str
161
159
  r"""Directory path to search for journals. Environment variables will be resolved, e.g. $CRIBL_EDGE_FS_ROOT/var/log/journal/$MACHINE_ID."""
162
160
 
@@ -166,6 +164,10 @@ class InputJournalFiles(BaseModel):
166
164
  id: Optional[str] = None
167
165
  r"""Unique ID for this input"""
168
166
 
167
+ type: Annotated[
168
+ Optional[InputJournalFilesType], PlainValidator(validate_open_enum(False))
169
+ ] = None
170
+
169
171
  disabled: Optional[bool] = False
170
172
 
171
173
  pipeline: Optional[str] = None
@@ -359,13 +359,13 @@ class InputKafkaMetadatum(BaseModel):
359
359
 
360
360
 
361
361
  class InputKafkaTypedDict(TypedDict):
362
- type: InputKafkaType
363
362
  brokers: List[str]
364
363
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
365
364
  topics: List[str]
366
365
  r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
367
366
  id: NotRequired[str]
368
367
  r"""Unique ID for this input"""
368
+ type: NotRequired[InputKafkaType]
369
369
  disabled: NotRequired[bool]
370
370
  pipeline: NotRequired[str]
371
371
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -439,8 +439,6 @@ class InputKafkaTypedDict(TypedDict):
439
439
 
440
440
 
441
441
  class InputKafka(BaseModel):
442
- type: Annotated[InputKafkaType, PlainValidator(validate_open_enum(False))]
443
-
444
442
  brokers: List[str]
445
443
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
446
444
 
@@ -450,6 +448,10 @@ class InputKafka(BaseModel):
450
448
  id: Optional[str] = None
451
449
  r"""Unique ID for this input"""
452
450
 
451
+ type: Annotated[
452
+ Optional[InputKafkaType], PlainValidator(validate_open_enum(False))
453
+ ] = None
454
+
453
455
  disabled: Optional[bool] = False
454
456
 
455
457
  pipeline: Optional[str] = None
@@ -142,13 +142,13 @@ class InputKinesisMetadatum(BaseModel):
142
142
 
143
143
 
144
144
  class InputKinesisTypedDict(TypedDict):
145
- type: InputKinesisType
146
145
  stream_name: str
147
146
  r"""Kinesis Data Stream to read data from"""
148
147
  region: str
149
148
  r"""Region where the Kinesis stream is located"""
150
149
  id: NotRequired[str]
151
150
  r"""Unique ID for this input"""
151
+ type: NotRequired[InputKinesisType]
152
152
  disabled: NotRequired[bool]
153
153
  pipeline: NotRequired[str]
154
154
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -209,8 +209,6 @@ class InputKinesisTypedDict(TypedDict):
209
209
 
210
210
 
211
211
  class InputKinesis(BaseModel):
212
- type: Annotated[InputKinesisType, PlainValidator(validate_open_enum(False))]
213
-
214
212
  stream_name: Annotated[str, pydantic.Field(alias="streamName")]
215
213
  r"""Kinesis Data Stream to read data from"""
216
214
 
@@ -220,6 +218,10 @@ class InputKinesis(BaseModel):
220
218
  id: Optional[str] = None
221
219
  r"""Unique ID for this input"""
222
220
 
221
+ type: Annotated[
222
+ Optional[InputKinesisType], PlainValidator(validate_open_enum(False))
223
+ ] = None
224
+
223
225
  disabled: Optional[bool] = False
224
226
 
225
227
  pipeline: Optional[str] = None