cribl-control-plane 0.0.25__py3-none-any.whl → 0.0.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (112) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/destinations.py +10 -8
  3. cribl_control_plane/errors/healthstatus_error.py +1 -1
  4. cribl_control_plane/groups_sdk.py +10 -10
  5. cribl_control_plane/{healthinfo.py → health.py} +3 -1
  6. cribl_control_plane/models/__init__.py +3 -27
  7. cribl_control_plane/models/healthstatus.py +3 -3
  8. cribl_control_plane/models/inputappscope.py +5 -5
  9. cribl_control_plane/models/inputcollection.py +2 -2
  10. cribl_control_plane/models/inputconfluentcloud.py +3 -3
  11. cribl_control_plane/models/inputcribl.py +5 -5
  12. cribl_control_plane/models/inputcriblhttp.py +3 -3
  13. cribl_control_plane/models/inputcribllakehttp.py +3 -3
  14. cribl_control_plane/models/inputcriblmetrics.py +5 -5
  15. cribl_control_plane/models/inputcribltcp.py +3 -3
  16. cribl_control_plane/models/inputdatadogagent.py +3 -3
  17. cribl_control_plane/models/inputedgeprometheus.py +3 -3
  18. cribl_control_plane/models/inputelastic.py +3 -3
  19. cribl_control_plane/models/inputeventhub.py +3 -3
  20. cribl_control_plane/models/inputfile.py +5 -5
  21. cribl_control_plane/models/inputfirehose.py +3 -3
  22. cribl_control_plane/models/inputgooglepubsub.py +3 -3
  23. cribl_control_plane/models/inputgrafana.py +6 -6
  24. cribl_control_plane/models/inputhttp.py +3 -3
  25. cribl_control_plane/models/inputhttpraw.py +3 -3
  26. cribl_control_plane/models/inputjournalfiles.py +3 -3
  27. cribl_control_plane/models/inputkafka.py +3 -3
  28. cribl_control_plane/models/inputkinesis.py +3 -3
  29. cribl_control_plane/models/inputkubeevents.py +5 -5
  30. cribl_control_plane/models/inputkubelogs.py +5 -5
  31. cribl_control_plane/models/inputkubemetrics.py +5 -5
  32. cribl_control_plane/models/inputloki.py +3 -3
  33. cribl_control_plane/models/inputmodeldriventelemetry.py +3 -3
  34. cribl_control_plane/models/inputmsk.py +3 -3
  35. cribl_control_plane/models/inputnetflow.py +3 -3
  36. cribl_control_plane/models/inputoffice365mgmt.py +3 -3
  37. cribl_control_plane/models/inputoffice365msgtrace.py +3 -3
  38. cribl_control_plane/models/inputoffice365service.py +3 -3
  39. cribl_control_plane/models/inputopentelemetry.py +3 -3
  40. cribl_control_plane/models/inputprometheus.py +3 -3
  41. cribl_control_plane/models/inputprometheusrw.py +3 -3
  42. cribl_control_plane/models/inputrawudp.py +3 -3
  43. cribl_control_plane/models/inputsnmp.py +3 -3
  44. cribl_control_plane/models/inputsplunk.py +3 -3
  45. cribl_control_plane/models/inputsplunkhec.py +3 -3
  46. cribl_control_plane/models/inputsplunksearch.py +3 -3
  47. cribl_control_plane/models/inputsqs.py +3 -3
  48. cribl_control_plane/models/inputsystemmetrics.py +5 -5
  49. cribl_control_plane/models/inputsystemstate.py +5 -5
  50. cribl_control_plane/models/inputtcp.py +3 -3
  51. cribl_control_plane/models/inputtcpjson.py +3 -3
  52. cribl_control_plane/models/inputwef.py +3 -3
  53. cribl_control_plane/models/inputwindowsmetrics.py +5 -5
  54. cribl_control_plane/models/inputwiz.py +3 -3
  55. cribl_control_plane/models/inputzscalerhec.py +3 -3
  56. cribl_control_plane/models/outputazureblob.py +3 -3
  57. cribl_control_plane/models/outputazuredataexplorer.py +3 -3
  58. cribl_control_plane/models/outputazureeventhub.py +3 -3
  59. cribl_control_plane/models/outputclickhouse.py +3 -3
  60. cribl_control_plane/models/outputcloudwatch.py +3 -3
  61. cribl_control_plane/models/outputconfluentcloud.py +3 -3
  62. cribl_control_plane/models/outputcriblhttp.py +5 -5
  63. cribl_control_plane/models/outputcribllake.py +5 -5
  64. cribl_control_plane/models/outputcribltcp.py +5 -5
  65. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +3 -3
  66. cribl_control_plane/models/outputdatadog.py +5 -5
  67. cribl_control_plane/models/outputdataset.py +5 -5
  68. cribl_control_plane/models/outputdevnull.py +5 -5
  69. cribl_control_plane/models/outputdiskspool.py +5 -5
  70. cribl_control_plane/models/outputdls3.py +3 -3
  71. cribl_control_plane/models/outputdynatracehttp.py +3 -3
  72. cribl_control_plane/models/outputdynatraceotlp.py +3 -3
  73. cribl_control_plane/models/outputelasticcloud.py +3 -3
  74. cribl_control_plane/models/outputexabeam.py +3 -3
  75. cribl_control_plane/models/outputgooglecloudlogging.py +3 -3
  76. cribl_control_plane/models/outputgooglecloudstorage.py +3 -3
  77. cribl_control_plane/models/outputgrafanacloud.py +10 -10
  78. cribl_control_plane/models/outputgraphite.py +3 -3
  79. cribl_control_plane/models/outputhumiohec.py +3 -3
  80. cribl_control_plane/models/outputkafka.py +3 -3
  81. cribl_control_plane/models/outputkinesis.py +3 -3
  82. cribl_control_plane/models/outputminio.py +3 -3
  83. cribl_control_plane/models/outputmsk.py +3 -3
  84. cribl_control_plane/models/outputnewrelic.py +5 -5
  85. cribl_control_plane/models/outputnewrelicevents.py +3 -3
  86. cribl_control_plane/models/outputring.py +5 -5
  87. cribl_control_plane/models/outputs3.py +3 -3
  88. cribl_control_plane/models/outputsecuritylake.py +3 -3
  89. cribl_control_plane/models/outputsentinel.py +3 -3
  90. cribl_control_plane/models/outputsentineloneaisiem.py +3 -3
  91. cribl_control_plane/models/outputservicenow.py +3 -3
  92. cribl_control_plane/models/outputsns.py +3 -3
  93. cribl_control_plane/models/outputsplunk.py +3 -3
  94. cribl_control_plane/models/outputsplunkhec.py +5 -5
  95. cribl_control_plane/models/outputsqs.py +3 -3
  96. cribl_control_plane/models/outputstatsd.py +3 -3
  97. cribl_control_plane/models/outputstatsdext.py +3 -3
  98. cribl_control_plane/models/outputsyslog.py +5 -5
  99. cribl_control_plane/models/outputtcpjson.py +5 -5
  100. cribl_control_plane/models/outputwebhook.py +5 -5
  101. cribl_control_plane/models/outputxsiam.py +5 -5
  102. cribl_control_plane/nodes.py +252 -68
  103. cribl_control_plane/sdk.py +8 -12
  104. cribl_control_plane/sources.py +2 -0
  105. cribl_control_plane/versioning.py +10 -10
  106. {cribl_control_plane-0.0.25.dist-info → cribl_control_plane-0.0.26.dist-info}/METADATA +20 -27
  107. {cribl_control_plane-0.0.25.dist-info → cribl_control_plane-0.0.26.dist-info}/RECORD +108 -112
  108. cribl_control_plane/deployments.py +0 -185
  109. cribl_control_plane/models/restartresponse.py +0 -26
  110. cribl_control_plane/models/updateworkersrestartop.py +0 -24
  111. cribl_control_plane/workers_sdk.py +0 -187
  112. {cribl_control_plane-0.0.25.dist-info → cribl_control_plane-0.0.26.dist-info}/WHEEL +0 -0
@@ -104,9 +104,9 @@ class InputFileMetadatum(BaseModel):
104
104
 
105
105
 
106
106
  class InputFileTypedDict(TypedDict):
107
- id: str
108
- r"""Unique ID for this input"""
109
107
  type: InputFileType
108
+ id: NotRequired[str]
109
+ r"""Unique ID for this input"""
110
110
  disabled: NotRequired[bool]
111
111
  pipeline: NotRequired[str]
112
112
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -158,11 +158,11 @@ class InputFileTypedDict(TypedDict):
158
158
 
159
159
 
160
160
  class InputFile(BaseModel):
161
- id: str
162
- r"""Unique ID for this input"""
163
-
164
161
  type: InputFileType
165
162
 
163
+ id: Optional[str] = None
164
+ r"""Unique ID for this input"""
165
+
166
166
  disabled: Optional[bool] = False
167
167
 
168
168
  pipeline: Optional[str] = None
@@ -171,11 +171,11 @@ class InputFirehoseMetadatum(BaseModel):
171
171
 
172
172
 
173
173
  class InputFirehoseTypedDict(TypedDict):
174
+ type: InputFirehoseType
174
175
  port: float
175
176
  r"""Port to listen on"""
176
177
  id: NotRequired[str]
177
178
  r"""Unique ID for this input"""
178
- type: NotRequired[InputFirehoseType]
179
179
  disabled: NotRequired[bool]
180
180
  pipeline: NotRequired[str]
181
181
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -223,14 +223,14 @@ class InputFirehoseTypedDict(TypedDict):
223
223
 
224
224
 
225
225
  class InputFirehose(BaseModel):
226
+ type: InputFirehoseType
227
+
226
228
  port: float
227
229
  r"""Port to listen on"""
228
230
 
229
231
  id: Optional[str] = None
230
232
  r"""Unique ID for this input"""
231
233
 
232
- type: Optional[InputFirehoseType] = None
233
-
234
234
  disabled: Optional[bool] = False
235
235
 
236
236
  pipeline: Optional[str] = None
@@ -105,11 +105,11 @@ class InputGooglePubsubMetadatum(BaseModel):
105
105
 
106
106
 
107
107
  class InputGooglePubsubTypedDict(TypedDict):
108
+ type: InputGooglePubsubType
108
109
  subscription_name: str
109
110
  r"""ID of the subscription to use when receiving events. When Monitor subscription is enabled, the fully qualified subscription name must be entered. Example: projects/myProject/subscriptions/mySubscription"""
110
111
  id: NotRequired[str]
111
112
  r"""Unique ID for this input"""
112
- type: NotRequired[InputGooglePubsubType]
113
113
  disabled: NotRequired[bool]
114
114
  pipeline: NotRequired[str]
115
115
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -154,14 +154,14 @@ class InputGooglePubsubTypedDict(TypedDict):
154
154
 
155
155
 
156
156
  class InputGooglePubsub(BaseModel):
157
+ type: InputGooglePubsubType
158
+
157
159
  subscription_name: Annotated[str, pydantic.Field(alias="subscriptionName")]
158
160
  r"""ID of the subscription to use when receiving events. When Monitor subscription is enabled, the fully qualified subscription name must be entered. Example: projects/myProject/subscriptions/mySubscription"""
159
161
 
160
162
  id: Optional[str] = None
161
163
  r"""Unique ID for this input"""
162
164
 
163
- type: Optional[InputGooglePubsubType] = None
164
-
165
165
  disabled: Optional[bool] = False
166
166
 
167
167
  pipeline: Optional[str] = None
@@ -429,11 +429,11 @@ class InputGrafanaMetadatum2(BaseModel):
429
429
 
430
430
 
431
431
  class InputGrafanaGrafana2TypedDict(TypedDict):
432
+ type: InputGrafanaType2
432
433
  port: float
433
434
  r"""Port to listen on"""
434
435
  id: NotRequired[str]
435
436
  r"""Unique ID for this input"""
436
- type: NotRequired[InputGrafanaType2]
437
437
  disabled: NotRequired[bool]
438
438
  pipeline: NotRequired[str]
439
439
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -487,14 +487,14 @@ class InputGrafanaGrafana2TypedDict(TypedDict):
487
487
 
488
488
 
489
489
  class InputGrafanaGrafana2(BaseModel):
490
+ type: InputGrafanaType2
491
+
490
492
  port: float
491
493
  r"""Port to listen on"""
492
494
 
493
495
  id: Optional[str] = None
494
496
  r"""Unique ID for this input"""
495
497
 
496
- type: Optional[InputGrafanaType2] = None
497
-
498
498
  disabled: Optional[bool] = False
499
499
 
500
500
  pipeline: Optional[str] = None
@@ -1029,11 +1029,11 @@ class InputGrafanaMetadatum1(BaseModel):
1029
1029
 
1030
1030
 
1031
1031
  class InputGrafanaGrafana1TypedDict(TypedDict):
1032
+ type: InputGrafanaType1
1032
1033
  port: float
1033
1034
  r"""Port to listen on"""
1034
1035
  id: NotRequired[str]
1035
1036
  r"""Unique ID for this input"""
1036
- type: NotRequired[InputGrafanaType1]
1037
1037
  disabled: NotRequired[bool]
1038
1038
  pipeline: NotRequired[str]
1039
1039
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -1087,14 +1087,14 @@ class InputGrafanaGrafana1TypedDict(TypedDict):
1087
1087
 
1088
1088
 
1089
1089
  class InputGrafanaGrafana1(BaseModel):
1090
+ type: InputGrafanaType1
1091
+
1090
1092
  port: float
1091
1093
  r"""Port to listen on"""
1092
1094
 
1093
1095
  id: Optional[str] = None
1094
1096
  r"""Unique ID for this input"""
1095
1097
 
1096
- type: Optional[InputGrafanaType1] = None
1097
-
1098
1098
  disabled: Optional[bool] = False
1099
1099
 
1100
1100
  pipeline: Optional[str] = None
@@ -202,11 +202,11 @@ class InputHTTPAuthTokensExt(BaseModel):
202
202
 
203
203
 
204
204
  class InputHTTPTypedDict(TypedDict):
205
+ type: InputHTTPType
205
206
  port: float
206
207
  r"""Port to listen on"""
207
208
  id: NotRequired[str]
208
209
  r"""Unique ID for this input"""
209
- type: NotRequired[InputHTTPType]
210
210
  disabled: NotRequired[bool]
211
211
  pipeline: NotRequired[str]
212
212
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -263,14 +263,14 @@ class InputHTTPTypedDict(TypedDict):
263
263
 
264
264
 
265
265
  class InputHTTP(BaseModel):
266
+ type: InputHTTPType
267
+
266
268
  port: float
267
269
  r"""Port to listen on"""
268
270
 
269
271
  id: Optional[str] = None
270
272
  r"""Unique ID for this input"""
271
273
 
272
- type: Optional[InputHTTPType] = None
273
-
274
274
  disabled: Optional[bool] = False
275
275
 
276
276
  pipeline: Optional[str] = None
@@ -202,11 +202,11 @@ class InputHTTPRawAuthTokensExt(BaseModel):
202
202
 
203
203
 
204
204
  class InputHTTPRawTypedDict(TypedDict):
205
+ type: InputHTTPRawType
205
206
  port: float
206
207
  r"""Port to listen on"""
207
208
  id: NotRequired[str]
208
209
  r"""Unique ID for this input"""
209
- type: NotRequired[InputHTTPRawType]
210
210
  disabled: NotRequired[bool]
211
211
  pipeline: NotRequired[str]
212
212
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -264,14 +264,14 @@ class InputHTTPRawTypedDict(TypedDict):
264
264
 
265
265
 
266
266
  class InputHTTPRaw(BaseModel):
267
+ type: InputHTTPRawType
268
+
267
269
  port: float
268
270
  r"""Port to listen on"""
269
271
 
270
272
  id: Optional[str] = None
271
273
  r"""Unique ID for this input"""
272
274
 
273
- type: Optional[InputHTTPRawType] = None
274
-
275
275
  disabled: Optional[bool] = False
276
276
 
277
277
  pipeline: Optional[str] = None
@@ -112,13 +112,13 @@ class InputJournalFilesMetadatum(BaseModel):
112
112
 
113
113
 
114
114
  class InputJournalFilesTypedDict(TypedDict):
115
+ type: InputJournalFilesType
115
116
  path: str
116
117
  r"""Directory path to search for journals. Environment variables will be resolved, e.g. $CRIBL_EDGE_FS_ROOT/var/log/journal/$MACHINE_ID."""
117
118
  journals: List[str]
118
119
  r"""The full path of discovered journals are matched against this wildcard list."""
119
120
  id: NotRequired[str]
120
121
  r"""Unique ID for this input"""
121
- type: NotRequired[InputJournalFilesType]
122
122
  disabled: NotRequired[bool]
123
123
  pipeline: NotRequired[str]
124
124
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -147,6 +147,8 @@ class InputJournalFilesTypedDict(TypedDict):
147
147
 
148
148
 
149
149
  class InputJournalFiles(BaseModel):
150
+ type: InputJournalFilesType
151
+
150
152
  path: str
151
153
  r"""Directory path to search for journals. Environment variables will be resolved, e.g. $CRIBL_EDGE_FS_ROOT/var/log/journal/$MACHINE_ID."""
152
154
 
@@ -156,8 +158,6 @@ class InputJournalFiles(BaseModel):
156
158
  id: Optional[str] = None
157
159
  r"""Unique ID for this input"""
158
160
 
159
- type: Optional[InputJournalFilesType] = None
160
-
161
161
  disabled: Optional[bool] = False
162
162
 
163
163
  pipeline: Optional[str] = None
@@ -346,13 +346,13 @@ class InputKafkaMetadatum(BaseModel):
346
346
 
347
347
 
348
348
  class InputKafkaTypedDict(TypedDict):
349
+ type: InputKafkaType
349
350
  brokers: List[str]
350
351
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
351
352
  topics: List[str]
352
353
  r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
353
354
  id: NotRequired[str]
354
355
  r"""Unique ID for this input"""
355
- type: NotRequired[InputKafkaType]
356
356
  disabled: NotRequired[bool]
357
357
  pipeline: NotRequired[str]
358
358
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -426,6 +426,8 @@ class InputKafkaTypedDict(TypedDict):
426
426
 
427
427
 
428
428
  class InputKafka(BaseModel):
429
+ type: InputKafkaType
430
+
429
431
  brokers: List[str]
430
432
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
431
433
 
@@ -435,8 +437,6 @@ class InputKafka(BaseModel):
435
437
  id: Optional[str] = None
436
438
  r"""Unique ID for this input"""
437
439
 
438
- type: Optional[InputKafkaType] = None
439
-
440
440
  disabled: Optional[bool] = False
441
441
 
442
442
  pipeline: Optional[str] = None
@@ -135,13 +135,13 @@ class InputKinesisMetadatum(BaseModel):
135
135
 
136
136
 
137
137
  class InputKinesisTypedDict(TypedDict):
138
+ type: InputKinesisType
138
139
  stream_name: str
139
140
  r"""Kinesis Data Stream to read data from"""
140
141
  region: str
141
142
  r"""Region where the Kinesis stream is located"""
142
143
  id: NotRequired[str]
143
144
  r"""Unique ID for this input"""
144
- type: NotRequired[InputKinesisType]
145
145
  disabled: NotRequired[bool]
146
146
  pipeline: NotRequired[str]
147
147
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -202,6 +202,8 @@ class InputKinesisTypedDict(TypedDict):
202
202
 
203
203
 
204
204
  class InputKinesis(BaseModel):
205
+ type: InputKinesisType
206
+
205
207
  stream_name: Annotated[str, pydantic.Field(alias="streamName")]
206
208
  r"""Kinesis Data Stream to read data from"""
207
209
 
@@ -211,8 +213,6 @@ class InputKinesis(BaseModel):
211
213
  id: Optional[str] = None
212
214
  r"""Unique ID for this input"""
213
215
 
214
- type: Optional[InputKinesisType] = None
215
-
216
216
  disabled: Optional[bool] = False
217
217
 
218
218
  pipeline: Optional[str] = None
@@ -112,9 +112,9 @@ class InputKubeEventsMetadatum(BaseModel):
112
112
 
113
113
 
114
114
  class InputKubeEventsTypedDict(TypedDict):
115
- id: str
116
- r"""Unique ID for this input"""
117
115
  type: InputKubeEventsType
116
+ id: NotRequired[str]
117
+ r"""Unique ID for this input"""
118
118
  disabled: NotRequired[bool]
119
119
  pipeline: NotRequired[str]
120
120
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -137,11 +137,11 @@ class InputKubeEventsTypedDict(TypedDict):
137
137
 
138
138
 
139
139
  class InputKubeEvents(BaseModel):
140
- id: str
141
- r"""Unique ID for this input"""
142
-
143
140
  type: InputKubeEventsType
144
141
 
142
+ id: Optional[str] = None
143
+ r"""Unique ID for this input"""
144
+
145
145
  disabled: Optional[bool] = False
146
146
 
147
147
  pipeline: Optional[str] = None
@@ -151,9 +151,9 @@ class InputKubeLogsDiskSpooling(BaseModel):
151
151
 
152
152
 
153
153
  class InputKubeLogsTypedDict(TypedDict):
154
- id: str
155
- r"""Unique ID for this input"""
156
154
  type: InputKubeLogsType
155
+ id: NotRequired[str]
156
+ r"""Unique ID for this input"""
157
157
  disabled: NotRequired[bool]
158
158
  pipeline: NotRequired[str]
159
159
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -187,11 +187,11 @@ class InputKubeLogsTypedDict(TypedDict):
187
187
 
188
188
 
189
189
  class InputKubeLogs(BaseModel):
190
- id: str
191
- r"""Unique ID for this input"""
192
-
193
190
  type: InputKubeLogsType
194
191
 
192
+ id: Optional[str] = None
193
+ r"""Unique ID for this input"""
194
+
195
195
  disabled: Optional[bool] = False
196
196
 
197
197
  pipeline: Optional[str] = None
@@ -154,9 +154,9 @@ class InputKubeMetricsPersistence(BaseModel):
154
154
 
155
155
 
156
156
  class InputKubeMetricsTypedDict(TypedDict):
157
- id: str
158
- r"""Unique ID for this input"""
159
157
  type: InputKubeMetricsType
158
+ id: NotRequired[str]
159
+ r"""Unique ID for this input"""
160
160
  disabled: NotRequired[bool]
161
161
  pipeline: NotRequired[str]
162
162
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -182,11 +182,11 @@ class InputKubeMetricsTypedDict(TypedDict):
182
182
 
183
183
 
184
184
  class InputKubeMetrics(BaseModel):
185
- id: str
186
- r"""Unique ID for this input"""
187
-
188
185
  type: InputKubeMetricsType
189
186
 
187
+ id: Optional[str] = None
188
+ r"""Unique ID for this input"""
189
+
190
190
  disabled: Optional[bool] = False
191
191
 
192
192
  pipeline: Optional[str] = None
@@ -212,11 +212,11 @@ class InputLokiOauthHeader(BaseModel):
212
212
 
213
213
 
214
214
  class InputLokiTypedDict(TypedDict):
215
+ type: InputLokiType
215
216
  port: float
216
217
  r"""Port to listen on"""
217
218
  id: NotRequired[str]
218
219
  r"""Unique ID for this input"""
219
- type: NotRequired[InputLokiType]
220
220
  disabled: NotRequired[bool]
221
221
  pipeline: NotRequired[str]
222
222
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -292,14 +292,14 @@ class InputLokiTypedDict(TypedDict):
292
292
 
293
293
 
294
294
  class InputLoki(BaseModel):
295
+ type: InputLokiType
296
+
295
297
  port: float
296
298
  r"""Port to listen on"""
297
299
 
298
300
  id: Optional[str] = None
299
301
  r"""Unique ID for this input"""
300
302
 
301
- type: Optional[InputLokiType] = None
302
-
303
303
  disabled: Optional[bool] = False
304
304
 
305
305
  pipeline: Optional[str] = None
@@ -170,9 +170,9 @@ class InputModelDrivenTelemetryMetadatum(BaseModel):
170
170
 
171
171
 
172
172
  class InputModelDrivenTelemetryTypedDict(TypedDict):
173
+ type: InputModelDrivenTelemetryType
173
174
  id: NotRequired[str]
174
175
  r"""Unique ID for this input"""
175
- type: NotRequired[InputModelDrivenTelemetryType]
176
176
  disabled: NotRequired[bool]
177
177
  pipeline: NotRequired[str]
178
178
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -202,11 +202,11 @@ class InputModelDrivenTelemetryTypedDict(TypedDict):
202
202
 
203
203
 
204
204
  class InputModelDrivenTelemetry(BaseModel):
205
+ type: InputModelDrivenTelemetryType
206
+
205
207
  id: Optional[str] = None
206
208
  r"""Unique ID for this input"""
207
209
 
208
- type: Optional[InputModelDrivenTelemetryType] = None
209
-
210
210
  disabled: Optional[bool] = False
211
211
 
212
212
  pipeline: Optional[str] = None
@@ -339,6 +339,7 @@ class InputMskTLSSettingsClientSide(BaseModel):
339
339
 
340
340
 
341
341
  class InputMskTypedDict(TypedDict):
342
+ type: InputMskType
342
343
  brokers: List[str]
343
344
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
344
345
  topics: List[str]
@@ -347,7 +348,6 @@ class InputMskTypedDict(TypedDict):
347
348
  r"""Region where the MSK cluster is located"""
348
349
  id: NotRequired[str]
349
350
  r"""Unique ID for this input"""
350
- type: NotRequired[InputMskType]
351
351
  disabled: NotRequired[bool]
352
352
  pipeline: NotRequired[str]
353
353
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -441,6 +441,8 @@ class InputMskTypedDict(TypedDict):
441
441
 
442
442
 
443
443
  class InputMsk(BaseModel):
444
+ type: InputMskType
445
+
444
446
  brokers: List[str]
445
447
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
446
448
 
@@ -453,8 +455,6 @@ class InputMsk(BaseModel):
453
455
  id: Optional[str] = None
454
456
  r"""Unique ID for this input"""
455
457
 
456
- type: Optional[InputMskType] = None
457
-
458
458
  disabled: Optional[bool] = False
459
459
 
460
460
  pipeline: Optional[str] = None
@@ -97,9 +97,9 @@ class InputNetflowMetadatum(BaseModel):
97
97
 
98
98
 
99
99
  class InputNetflowTypedDict(TypedDict):
100
+ type: InputNetflowType
100
101
  id: NotRequired[str]
101
102
  r"""Unique ID for this input"""
102
- type: NotRequired[InputNetflowType]
103
103
  disabled: NotRequired[bool]
104
104
  pipeline: NotRequired[str]
105
105
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -140,11 +140,11 @@ class InputNetflowTypedDict(TypedDict):
140
140
 
141
141
 
142
142
  class InputNetflow(BaseModel):
143
+ type: InputNetflowType
144
+
143
145
  id: Optional[str] = None
144
146
  r"""Unique ID for this input"""
145
147
 
146
- type: Optional[InputNetflowType] = None
147
-
148
148
  disabled: Optional[bool] = False
149
149
 
150
150
  pipeline: Optional[str] = None
@@ -211,13 +211,13 @@ class InputOffice365MgmtAuthenticationMethod(str, Enum):
211
211
 
212
212
 
213
213
  class InputOffice365MgmtTypedDict(TypedDict):
214
+ type: InputOffice365MgmtType
214
215
  tenant_id: str
215
216
  r"""Office 365 Azure Tenant ID"""
216
217
  app_id: str
217
218
  r"""Office 365 Azure Application ID"""
218
219
  id: NotRequired[str]
219
220
  r"""Unique ID for this input"""
220
- type: NotRequired[InputOffice365MgmtType]
221
221
  disabled: NotRequired[bool]
222
222
  pipeline: NotRequired[str]
223
223
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -265,6 +265,8 @@ class InputOffice365MgmtTypedDict(TypedDict):
265
265
 
266
266
 
267
267
  class InputOffice365Mgmt(BaseModel):
268
+ type: InputOffice365MgmtType
269
+
268
270
  tenant_id: Annotated[str, pydantic.Field(alias="tenantId")]
269
271
  r"""Office 365 Azure Tenant ID"""
270
272
 
@@ -274,8 +276,6 @@ class InputOffice365Mgmt(BaseModel):
274
276
  id: Optional[str] = None
275
277
  r"""Unique ID for this input"""
276
278
 
277
- type: Optional[InputOffice365MgmtType] = None
278
-
279
279
  disabled: Optional[bool] = False
280
280
 
281
281
  pipeline: Optional[str] = None
@@ -216,9 +216,9 @@ class CertOptions(BaseModel):
216
216
 
217
217
 
218
218
  class InputOffice365MsgTraceTypedDict(TypedDict):
219
+ type: InputOffice365MsgTraceType
219
220
  id: NotRequired[str]
220
221
  r"""Unique ID for this input"""
221
- type: NotRequired[InputOffice365MsgTraceType]
222
222
  disabled: NotRequired[bool]
223
223
  pipeline: NotRequired[str]
224
224
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -289,11 +289,11 @@ class InputOffice365MsgTraceTypedDict(TypedDict):
289
289
 
290
290
 
291
291
  class InputOffice365MsgTrace(BaseModel):
292
+ type: InputOffice365MsgTraceType
293
+
292
294
  id: Optional[str] = None
293
295
  r"""Unique ID for this input"""
294
296
 
295
- type: Optional[InputOffice365MsgTraceType] = None
296
-
297
297
  disabled: Optional[bool] = False
298
298
 
299
299
  pipeline: Optional[str] = None
@@ -213,13 +213,13 @@ class InputOffice365ServiceAuthenticationMethod(str, Enum):
213
213
 
214
214
 
215
215
  class InputOffice365ServiceTypedDict(TypedDict):
216
+ type: InputOffice365ServiceType
216
217
  tenant_id: str
217
218
  r"""Office 365 Azure Tenant ID"""
218
219
  app_id: str
219
220
  r"""Office 365 Azure Application ID"""
220
221
  id: NotRequired[str]
221
222
  r"""Unique ID for this input"""
222
- type: NotRequired[InputOffice365ServiceType]
223
223
  disabled: NotRequired[bool]
224
224
  pipeline: NotRequired[str]
225
225
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -263,6 +263,8 @@ class InputOffice365ServiceTypedDict(TypedDict):
263
263
 
264
264
 
265
265
  class InputOffice365Service(BaseModel):
266
+ type: InputOffice365ServiceType
267
+
266
268
  tenant_id: Annotated[str, pydantic.Field(alias="tenantId")]
267
269
  r"""Office 365 Azure Tenant ID"""
268
270
 
@@ -272,8 +274,6 @@ class InputOffice365Service(BaseModel):
272
274
  id: Optional[str] = None
273
275
  r"""Unique ID for this input"""
274
276
 
275
- type: Optional[InputOffice365ServiceType] = None
276
-
277
277
  disabled: Optional[bool] = False
278
278
 
279
279
  pipeline: Optional[str] = None
@@ -230,9 +230,9 @@ class InputOpenTelemetryOauthHeader(BaseModel):
230
230
 
231
231
 
232
232
  class InputOpenTelemetryTypedDict(TypedDict):
233
+ type: InputOpenTelemetryType
233
234
  id: NotRequired[str]
234
235
  r"""Unique ID for this input"""
235
- type: NotRequired[InputOpenTelemetryType]
236
236
  disabled: NotRequired[bool]
237
237
  pipeline: NotRequired[str]
238
238
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -315,11 +315,11 @@ class InputOpenTelemetryTypedDict(TypedDict):
315
315
 
316
316
 
317
317
  class InputOpenTelemetry(BaseModel):
318
+ type: InputOpenTelemetryType
319
+
318
320
  id: Optional[str] = None
319
321
  r"""Unique ID for this input"""
320
322
 
321
- type: Optional[InputOpenTelemetryType] = None
322
-
323
323
  disabled: Optional[bool] = False
324
324
 
325
325
  pipeline: Optional[str] = None
@@ -166,9 +166,9 @@ class InputPrometheusSignatureVersion(str, Enum):
166
166
 
167
167
 
168
168
  class InputPrometheusTypedDict(TypedDict):
169
+ type: InputPrometheusType
169
170
  id: NotRequired[str]
170
171
  r"""Unique ID for this input"""
171
- type: NotRequired[InputPrometheusType]
172
172
  disabled: NotRequired[bool]
173
173
  pipeline: NotRequired[str]
174
174
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -254,11 +254,11 @@ class InputPrometheusTypedDict(TypedDict):
254
254
 
255
255
 
256
256
  class InputPrometheus(BaseModel):
257
+ type: InputPrometheusType
258
+
257
259
  id: Optional[str] = None
258
260
  r"""Unique ID for this input"""
259
261
 
260
- type: Optional[InputPrometheusType] = None
261
-
262
262
  disabled: Optional[bool] = False
263
263
 
264
264
  pipeline: Optional[str] = None
@@ -212,11 +212,11 @@ class InputPrometheusRwOauthHeader(BaseModel):
212
212
 
213
213
 
214
214
  class InputPrometheusRwTypedDict(TypedDict):
215
+ type: InputPrometheusRwType
215
216
  port: float
216
217
  r"""Port to listen on"""
217
218
  id: NotRequired[str]
218
219
  r"""Unique ID for this input"""
219
- type: NotRequired[InputPrometheusRwType]
220
220
  disabled: NotRequired[bool]
221
221
  pipeline: NotRequired[str]
222
222
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -290,14 +290,14 @@ class InputPrometheusRwTypedDict(TypedDict):
290
290
 
291
291
 
292
292
  class InputPrometheusRw(BaseModel):
293
+ type: InputPrometheusRwType
294
+
293
295
  port: float
294
296
  r"""Port to listen on"""
295
297
 
296
298
  id: Optional[str] = None
297
299
  r"""Unique ID for this input"""
298
300
 
299
- type: Optional[InputPrometheusRwType] = None
300
-
301
301
  disabled: Optional[bool] = False
302
302
 
303
303
  pipeline: Optional[str] = None