cribl-control-plane 0.0.24__py3-none-any.whl → 0.0.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (115) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/destinations.py +10 -8
  3. cribl_control_plane/errors/healthstatus_error.py +1 -1
  4. cribl_control_plane/groups_sdk.py +10 -10
  5. cribl_control_plane/{healthinfo.py → health.py} +3 -1
  6. cribl_control_plane/models/__init__.py +21 -27
  7. cribl_control_plane/models/healthstatus.py +3 -3
  8. cribl_control_plane/models/input.py +4 -4
  9. cribl_control_plane/models/inputappscope.py +5 -5
  10. cribl_control_plane/models/inputcollection.py +2 -2
  11. cribl_control_plane/models/inputconfluentcloud.py +17 -3
  12. cribl_control_plane/models/inputcribl.py +5 -5
  13. cribl_control_plane/models/inputcriblhttp.py +3 -3
  14. cribl_control_plane/models/inputcribllakehttp.py +3 -3
  15. cribl_control_plane/models/inputcriblmetrics.py +5 -5
  16. cribl_control_plane/models/inputcribltcp.py +3 -3
  17. cribl_control_plane/models/inputdatadogagent.py +3 -3
  18. cribl_control_plane/models/inputedgeprometheus.py +3 -3
  19. cribl_control_plane/models/inputelastic.py +3 -3
  20. cribl_control_plane/models/inputeventhub.py +3 -3
  21. cribl_control_plane/models/inputfile.py +5 -5
  22. cribl_control_plane/models/inputfirehose.py +3 -3
  23. cribl_control_plane/models/inputgooglepubsub.py +16 -9
  24. cribl_control_plane/models/inputgrafana.py +20 -6
  25. cribl_control_plane/models/inputhttp.py +3 -3
  26. cribl_control_plane/models/inputhttpraw.py +3 -3
  27. cribl_control_plane/models/inputjournalfiles.py +3 -3
  28. cribl_control_plane/models/inputkafka.py +17 -3
  29. cribl_control_plane/models/inputkinesis.py +3 -3
  30. cribl_control_plane/models/inputkubeevents.py +5 -5
  31. cribl_control_plane/models/inputkubelogs.py +5 -5
  32. cribl_control_plane/models/inputkubemetrics.py +5 -5
  33. cribl_control_plane/models/inputloki.py +10 -3
  34. cribl_control_plane/models/inputmodeldriventelemetry.py +3 -3
  35. cribl_control_plane/models/inputmsk.py +17 -3
  36. cribl_control_plane/models/inputnetflow.py +3 -3
  37. cribl_control_plane/models/inputoffice365mgmt.py +3 -3
  38. cribl_control_plane/models/inputoffice365msgtrace.py +3 -3
  39. cribl_control_plane/models/inputoffice365service.py +3 -3
  40. cribl_control_plane/models/inputopentelemetry.py +3 -3
  41. cribl_control_plane/models/inputprometheus.py +3 -3
  42. cribl_control_plane/models/inputprometheusrw.py +3 -3
  43. cribl_control_plane/models/inputrawudp.py +3 -3
  44. cribl_control_plane/models/inputsnmp.py +3 -3
  45. cribl_control_plane/models/inputsplunk.py +3 -3
  46. cribl_control_plane/models/inputsplunkhec.py +3 -3
  47. cribl_control_plane/models/inputsplunksearch.py +3 -3
  48. cribl_control_plane/models/inputsqs.py +3 -3
  49. cribl_control_plane/models/inputsystemmetrics.py +5 -5
  50. cribl_control_plane/models/inputsystemstate.py +5 -5
  51. cribl_control_plane/models/inputtcp.py +3 -3
  52. cribl_control_plane/models/inputtcpjson.py +3 -3
  53. cribl_control_plane/models/inputwef.py +3 -3
  54. cribl_control_plane/models/inputwindowsmetrics.py +5 -5
  55. cribl_control_plane/models/inputwiz.py +3 -3
  56. cribl_control_plane/models/inputzscalerhec.py +3 -3
  57. cribl_control_plane/models/output.py +14 -14
  58. cribl_control_plane/models/outputazureblob.py +3 -3
  59. cribl_control_plane/models/outputazuredataexplorer.py +3 -3
  60. cribl_control_plane/models/outputazureeventhub.py +3 -3
  61. cribl_control_plane/models/outputclickhouse.py +3 -3
  62. cribl_control_plane/models/outputcloudwatch.py +3 -3
  63. cribl_control_plane/models/outputconfluentcloud.py +17 -3
  64. cribl_control_plane/models/outputcriblhttp.py +5 -5
  65. cribl_control_plane/models/outputcribllake.py +5 -5
  66. cribl_control_plane/models/outputcribltcp.py +5 -5
  67. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +3 -3
  68. cribl_control_plane/models/outputdatadog.py +5 -5
  69. cribl_control_plane/models/outputdataset.py +5 -5
  70. cribl_control_plane/models/outputdevnull.py +5 -5
  71. cribl_control_plane/models/outputdiskspool.py +5 -5
  72. cribl_control_plane/models/outputdls3.py +5 -5
  73. cribl_control_plane/models/outputdynatracehttp.py +3 -3
  74. cribl_control_plane/models/outputdynatraceotlp.py +3 -3
  75. cribl_control_plane/models/outputelasticcloud.py +3 -3
  76. cribl_control_plane/models/outputexabeam.py +3 -3
  77. cribl_control_plane/models/outputgooglecloudlogging.py +3 -3
  78. cribl_control_plane/models/outputgooglecloudstorage.py +5 -5
  79. cribl_control_plane/models/outputgrafanacloud.py +24 -10
  80. cribl_control_plane/models/outputgraphite.py +3 -3
  81. cribl_control_plane/models/outputhumiohec.py +3 -3
  82. cribl_control_plane/models/outputkafka.py +17 -3
  83. cribl_control_plane/models/outputkinesis.py +3 -3
  84. cribl_control_plane/models/outputloki.py +14 -0
  85. cribl_control_plane/models/outputminio.py +3 -3
  86. cribl_control_plane/models/outputmsk.py +17 -3
  87. cribl_control_plane/models/outputnewrelic.py +5 -5
  88. cribl_control_plane/models/outputnewrelicevents.py +3 -3
  89. cribl_control_plane/models/outputring.py +5 -5
  90. cribl_control_plane/models/outputs3.py +5 -5
  91. cribl_control_plane/models/outputsecuritylake.py +3 -3
  92. cribl_control_plane/models/outputsentinel.py +3 -3
  93. cribl_control_plane/models/outputsentineloneaisiem.py +3 -3
  94. cribl_control_plane/models/outputservicenow.py +3 -3
  95. cribl_control_plane/models/outputsns.py +3 -3
  96. cribl_control_plane/models/outputsplunk.py +3 -3
  97. cribl_control_plane/models/outputsplunkhec.py +5 -5
  98. cribl_control_plane/models/outputsqs.py +3 -3
  99. cribl_control_plane/models/outputstatsd.py +3 -3
  100. cribl_control_plane/models/outputstatsdext.py +3 -3
  101. cribl_control_plane/models/outputsyslog.py +5 -5
  102. cribl_control_plane/models/outputtcpjson.py +5 -5
  103. cribl_control_plane/models/outputwebhook.py +5 -5
  104. cribl_control_plane/models/outputxsiam.py +5 -5
  105. cribl_control_plane/nodes.py +100 -90
  106. cribl_control_plane/pipelines.py +20 -20
  107. cribl_control_plane/sdk.py +6 -6
  108. cribl_control_plane/sources.py +2 -0
  109. cribl_control_plane/versioning.py +14 -14
  110. {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/METADATA +24 -28
  111. {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/RECORD +112 -115
  112. cribl_control_plane/deployments.py +0 -185
  113. cribl_control_plane/models/restartresponse.py +0 -26
  114. cribl_control_plane/models/updateworkersrestartop.py +0 -24
  115. {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/WHEEL +0 -0
@@ -178,11 +178,11 @@ class InputTcpjsonAuthenticationMethod(str, Enum):
178
178
 
179
179
 
180
180
  class InputTcpjsonTypedDict(TypedDict):
181
+ type: InputTcpjsonType
181
182
  port: float
182
183
  r"""Port to listen on"""
183
184
  id: NotRequired[str]
184
185
  r"""Unique ID for this input"""
185
- type: NotRequired[InputTcpjsonType]
186
186
  disabled: NotRequired[bool]
187
187
  pipeline: NotRequired[str]
188
188
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -226,14 +226,14 @@ class InputTcpjsonTypedDict(TypedDict):
226
226
 
227
227
 
228
228
  class InputTcpjson(BaseModel):
229
+ type: InputTcpjsonType
230
+
229
231
  port: float
230
232
  r"""Port to listen on"""
231
233
 
232
234
  id: Optional[str] = None
233
235
  r"""Unique ID for this input"""
234
236
 
235
- type: Optional[InputTcpjsonType] = None
236
-
237
237
  disabled: Optional[bool] = False
238
238
 
239
239
  pipeline: Optional[str] = None
@@ -298,11 +298,11 @@ class InputWefMetadatum(BaseModel):
298
298
 
299
299
 
300
300
  class InputWefTypedDict(TypedDict):
301
+ type: InputWefType
301
302
  subscriptions: List[SubscriptionTypedDict]
302
303
  r"""Subscriptions to events on forwarding endpoints"""
303
304
  id: NotRequired[str]
304
305
  r"""Unique ID for this input"""
305
- type: NotRequired[InputWefType]
306
306
  disabled: NotRequired[bool]
307
307
  pipeline: NotRequired[str]
308
308
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -358,14 +358,14 @@ class InputWefTypedDict(TypedDict):
358
358
 
359
359
 
360
360
  class InputWef(BaseModel):
361
+ type: InputWefType
362
+
361
363
  subscriptions: List[Subscription]
362
364
  r"""Subscriptions to events on forwarding endpoints"""
363
365
 
364
366
  id: Optional[str] = None
365
367
  r"""Unique ID for this input"""
366
368
 
367
- type: Optional[InputWefType] = None
368
-
369
369
  disabled: Optional[bool] = False
370
370
 
371
371
  pipeline: Optional[str] = None
@@ -358,9 +358,9 @@ class InputWindowsMetricsPersistence(BaseModel):
358
358
 
359
359
 
360
360
  class InputWindowsMetricsTypedDict(TypedDict):
361
- id: str
362
- r"""Unique ID for this input"""
363
361
  type: InputWindowsMetricsType
362
+ id: NotRequired[str]
363
+ r"""Unique ID for this input"""
364
364
  disabled: NotRequired[bool]
365
365
  pipeline: NotRequired[str]
366
366
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -388,11 +388,11 @@ class InputWindowsMetricsTypedDict(TypedDict):
388
388
 
389
389
 
390
390
  class InputWindowsMetrics(BaseModel):
391
- id: str
392
- r"""Unique ID for this input"""
393
-
394
391
  type: InputWindowsMetricsType
395
392
 
393
+ id: Optional[str] = None
394
+ r"""Unique ID for this input"""
395
+
396
396
  disabled: Optional[bool] = False
397
397
 
398
398
  pipeline: Optional[str] = None
@@ -181,6 +181,7 @@ class InputWizAuthenticationMethod(str, Enum):
181
181
 
182
182
 
183
183
  class InputWizTypedDict(TypedDict):
184
+ type: InputWizType
184
185
  auth_url: str
185
186
  r"""The authentication URL to generate an OAuth token"""
186
187
  client_id: str
@@ -188,7 +189,6 @@ class InputWizTypedDict(TypedDict):
188
189
  content_config: List[InputWizContentConfigTypedDict]
189
190
  id: NotRequired[str]
190
191
  r"""Unique ID for this input"""
191
- type: NotRequired[InputWizType]
192
192
  disabled: NotRequired[bool]
193
193
  pipeline: NotRequired[str]
194
194
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -230,6 +230,8 @@ class InputWizTypedDict(TypedDict):
230
230
 
231
231
 
232
232
  class InputWiz(BaseModel):
233
+ type: InputWizType
234
+
233
235
  auth_url: Annotated[str, pydantic.Field(alias="authUrl")]
234
236
  r"""The authentication URL to generate an OAuth token"""
235
237
 
@@ -243,8 +245,6 @@ class InputWiz(BaseModel):
243
245
  id: Optional[str] = None
244
246
  r"""Unique ID for this input"""
245
247
 
246
- type: Optional[InputWizType] = None
247
-
248
248
  disabled: Optional[bool] = False
249
249
 
250
250
  pipeline: Optional[str] = None
@@ -227,11 +227,11 @@ class InputZscalerHecMetadatum(BaseModel):
227
227
 
228
228
 
229
229
  class InputZscalerHecTypedDict(TypedDict):
230
+ type: InputZscalerHecType
230
231
  port: float
231
232
  r"""Port to listen on"""
232
233
  id: NotRequired[str]
233
234
  r"""Unique ID for this input"""
234
- type: NotRequired[InputZscalerHecType]
235
235
  disabled: NotRequired[bool]
236
236
  pipeline: NotRequired[str]
237
237
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -290,14 +290,14 @@ class InputZscalerHecTypedDict(TypedDict):
290
290
 
291
291
 
292
292
  class InputZscalerHec(BaseModel):
293
+ type: InputZscalerHecType
294
+
293
295
  port: float
294
296
  r"""Port to listen on"""
295
297
 
296
298
  id: Optional[str] = None
297
299
  r"""Unique ID for this input"""
298
300
 
299
- type: Optional[InputZscalerHecType] = None
300
-
301
301
  disabled: Optional[bool] = False
302
302
 
303
303
  pipeline: Optional[str] = None
@@ -90,25 +90,25 @@ OutputTypedDict = TypeAliasType(
90
90
  OutputDevnullTypedDict,
91
91
  OutputDefaultTypedDict,
92
92
  OutputRouterTypedDict,
93
- OutputSnmpTypedDict,
94
93
  OutputNetflowTypedDict,
94
+ OutputSnmpTypedDict,
95
95
  OutputDiskSpoolTypedDict,
96
96
  OutputRingTypedDict,
97
+ OutputStatsdExtTypedDict,
97
98
  OutputGraphiteTypedDict,
98
99
  OutputStatsdTypedDict,
99
- OutputStatsdExtTypedDict,
100
100
  OutputGooglePubsubTypedDict,
101
101
  OutputCriblTCPTypedDict,
102
- OutputSplunkTypedDict,
103
102
  OutputSnsTypedDict,
103
+ OutputSplunkTypedDict,
104
104
  OutputCloudwatchTypedDict,
105
105
  OutputSyslogTypedDict,
106
106
  OutputAzureEventhubTypedDict,
107
107
  OutputWavefrontTypedDict,
108
108
  OutputSignalfxTypedDict,
109
109
  OutputHoneycombTypedDict,
110
- OutputTcpjsonTypedDict,
111
110
  OutputSumoLogicTypedDict,
111
+ OutputTcpjsonTypedDict,
112
112
  OutputHumioHecTypedDict,
113
113
  OutputElasticCloudTypedDict,
114
114
  OutputCrowdstrikeNextGenSiemTypedDict,
@@ -125,15 +125,15 @@ OutputTypedDict = TypeAliasType(
125
125
  OutputXsiamTypedDict,
126
126
  OutputFilesystemTypedDict,
127
127
  OutputDatasetTypedDict,
128
- OutputLokiTypedDict,
129
128
  OutputSplunkHecTypedDict,
130
129
  OutputDynatraceHTTPTypedDict,
131
130
  OutputServiceNowTypedDict,
131
+ OutputLokiTypedDict,
132
132
  OutputDynatraceOtlpTypedDict,
133
- OutputElasticTypedDict,
134
133
  OutputGoogleChronicleTypedDict,
135
- OutputCriblLakeTypedDict,
134
+ OutputElasticTypedDict,
136
135
  OutputDatadogTypedDict,
136
+ OutputCriblLakeTypedDict,
137
137
  OutputPrometheusTypedDict,
138
138
  OutputMskTypedDict,
139
139
  OutputSentinelOneAiSiemTypedDict,
@@ -161,25 +161,25 @@ Output = TypeAliasType(
161
161
  OutputDevnull,
162
162
  OutputDefault,
163
163
  OutputRouter,
164
- OutputSnmp,
165
164
  OutputNetflow,
165
+ OutputSnmp,
166
166
  OutputDiskSpool,
167
167
  OutputRing,
168
+ OutputStatsdExt,
168
169
  OutputGraphite,
169
170
  OutputStatsd,
170
- OutputStatsdExt,
171
171
  OutputGooglePubsub,
172
172
  OutputCriblTCP,
173
- OutputSplunk,
174
173
  OutputSns,
174
+ OutputSplunk,
175
175
  OutputCloudwatch,
176
176
  OutputSyslog,
177
177
  OutputAzureEventhub,
178
178
  OutputWavefront,
179
179
  OutputSignalfx,
180
180
  OutputHoneycomb,
181
- OutputTcpjson,
182
181
  OutputSumoLogic,
182
+ OutputTcpjson,
183
183
  OutputHumioHec,
184
184
  OutputElasticCloud,
185
185
  OutputCrowdstrikeNextGenSiem,
@@ -196,15 +196,15 @@ Output = TypeAliasType(
196
196
  OutputXsiam,
197
197
  OutputFilesystem,
198
198
  OutputDataset,
199
- OutputLoki,
200
199
  OutputSplunkHec,
201
200
  OutputDynatraceHTTP,
202
201
  OutputServiceNow,
202
+ OutputLoki,
203
203
  OutputDynatraceOtlp,
204
- OutputElastic,
205
204
  OutputGoogleChronicle,
206
- OutputCriblLake,
205
+ OutputElastic,
207
206
  OutputDatadog,
207
+ OutputCriblLake,
208
208
  OutputPrometheus,
209
209
  OutputMsk,
210
210
  OutputSentinelOneAiSiem,
@@ -101,11 +101,11 @@ class OutputAzureBlobCertificate(BaseModel):
101
101
 
102
102
 
103
103
  class OutputAzureBlobTypedDict(TypedDict):
104
+ type: OutputAzureBlobType
104
105
  container_name: str
105
106
  r"""The Azure Blob Storage container name. Name can include only lowercase letters, numbers, and hyphens. For dynamic container names, enter a JavaScript expression within quotes or backticks, to be evaluated at initialization. The expression can evaluate to a constant value and can reference Global Variables, such as `myContainer-${C.env[\"CRIBL_WORKER_ID\"]}`."""
106
107
  id: NotRequired[str]
107
108
  r"""Unique ID for this output"""
108
- type: NotRequired[OutputAzureBlobType]
109
109
  pipeline: NotRequired[str]
110
110
  r"""Pipeline to process data before sending out to this output"""
111
111
  system_fields: NotRequired[List[str]]
@@ -205,14 +205,14 @@ class OutputAzureBlobTypedDict(TypedDict):
205
205
 
206
206
 
207
207
  class OutputAzureBlob(BaseModel):
208
+ type: OutputAzureBlobType
209
+
208
210
  container_name: Annotated[str, pydantic.Field(alias="containerName")]
209
211
  r"""The Azure Blob Storage container name. Name can include only lowercase letters, numbers, and hyphens. For dynamic container names, enter a JavaScript expression within quotes or backticks, to be evaluated at initialization. The expression can evaluate to a constant value and can reference Global Variables, such as `myContainer-${C.env[\"CRIBL_WORKER_ID\"]}`."""
210
212
 
211
213
  id: Optional[str] = None
212
214
  r"""Unique ID for this output"""
213
215
 
214
- type: Optional[OutputAzureBlobType] = None
215
-
216
216
  pipeline: Optional[str] = None
217
217
  r"""Pipeline to process data before sending out to this output"""
218
218
 
@@ -211,6 +211,7 @@ class OutputAzureDataExplorerPqControls(BaseModel):
211
211
 
212
212
 
213
213
  class OutputAzureDataExplorerTypedDict(TypedDict):
214
+ type: OutputAzureDataExplorerType
214
215
  cluster_url: str
215
216
  r"""The base URI for your cluster. Typically, `https://<cluster>.<region>.kusto.windows.net`."""
216
217
  database: str
@@ -225,7 +226,6 @@ class OutputAzureDataExplorerTypedDict(TypedDict):
225
226
  r"""Scope to pass in the OAuth request parameter"""
226
227
  id: NotRequired[str]
227
228
  r"""Unique ID for this output"""
228
- type: NotRequired[OutputAzureDataExplorerType]
229
229
  pipeline: NotRequired[str]
230
230
  r"""Pipeline to process data before sending out to this output"""
231
231
  system_fields: NotRequired[List[str]]
@@ -339,6 +339,8 @@ class OutputAzureDataExplorerTypedDict(TypedDict):
339
339
 
340
340
 
341
341
  class OutputAzureDataExplorer(BaseModel):
342
+ type: OutputAzureDataExplorerType
343
+
342
344
  cluster_url: Annotated[str, pydantic.Field(alias="clusterUrl")]
343
345
  r"""The base URI for your cluster. Typically, `https://<cluster>.<region>.kusto.windows.net`."""
344
346
 
@@ -360,8 +362,6 @@ class OutputAzureDataExplorer(BaseModel):
360
362
  id: Optional[str] = None
361
363
  r"""Unique ID for this output"""
362
364
 
363
- type: Optional[OutputAzureDataExplorerType] = None
364
-
365
365
  pipeline: Optional[str] = None
366
366
  r"""Pipeline to process data before sending out to this output"""
367
367
 
@@ -103,13 +103,13 @@ class OutputAzureEventhubPqControls(BaseModel):
103
103
 
104
104
 
105
105
  class OutputAzureEventhubTypedDict(TypedDict):
106
+ type: OutputAzureEventhubType
106
107
  brokers: List[str]
107
108
  r"""List of Event Hubs Kafka brokers to connect to, eg. yourdomain.servicebus.windows.net:9093. The hostname can be found in the host portion of the primary or secondary connection string in Shared Access Policies."""
108
109
  topic: str
109
110
  r"""The name of the Event Hub (Kafka Topic) to publish events. Can be overwritten using field __topicOut."""
110
111
  id: NotRequired[str]
111
112
  r"""Unique ID for this output"""
112
- type: NotRequired[OutputAzureEventhubType]
113
113
  pipeline: NotRequired[str]
114
114
  r"""Pipeline to process data before sending out to this output"""
115
115
  system_fields: NotRequired[List[str]]
@@ -166,6 +166,8 @@ class OutputAzureEventhubTypedDict(TypedDict):
166
166
 
167
167
 
168
168
  class OutputAzureEventhub(BaseModel):
169
+ type: OutputAzureEventhubType
170
+
169
171
  brokers: List[str]
170
172
  r"""List of Event Hubs Kafka brokers to connect to, eg. yourdomain.servicebus.windows.net:9093. The hostname can be found in the host portion of the primary or secondary connection string in Shared Access Policies."""
171
173
 
@@ -175,8 +177,6 @@ class OutputAzureEventhub(BaseModel):
175
177
  id: Optional[str] = None
176
178
  r"""Unique ID for this output"""
177
179
 
178
- type: Optional[OutputAzureEventhubType] = None
179
-
180
180
  pipeline: Optional[str] = None
181
181
  r"""Pipeline to process data before sending out to this output"""
182
182
 
@@ -264,6 +264,7 @@ class OutputClickHousePqControls(BaseModel):
264
264
 
265
265
 
266
266
  class OutputClickHouseTypedDict(TypedDict):
267
+ type: OutputClickHouseType
267
268
  url: str
268
269
  r"""URL of the ClickHouse instance. Example: http://localhost:8123/"""
269
270
  database: str
@@ -271,7 +272,6 @@ class OutputClickHouseTypedDict(TypedDict):
271
272
  r"""Name of the ClickHouse table where data will be inserted. Name can contain letters (A-Z, a-z), numbers (0-9), and the character \"_\", and must start with either a letter or the character \"_\"."""
272
273
  id: NotRequired[str]
273
274
  r"""Unique ID for this output"""
274
- type: NotRequired[OutputClickHouseType]
275
275
  pipeline: NotRequired[str]
276
276
  r"""Pipeline to process data before sending out to this output"""
277
277
  system_fields: NotRequired[List[str]]
@@ -374,6 +374,8 @@ class OutputClickHouseTypedDict(TypedDict):
374
374
 
375
375
 
376
376
  class OutputClickHouse(BaseModel):
377
+ type: OutputClickHouseType
378
+
377
379
  url: str
378
380
  r"""URL of the ClickHouse instance. Example: http://localhost:8123/"""
379
381
 
@@ -385,8 +387,6 @@ class OutputClickHouse(BaseModel):
385
387
  id: Optional[str] = None
386
388
  r"""Unique ID for this output"""
387
389
 
388
- type: Optional[OutputClickHouseType] = None
389
-
390
390
  pipeline: Optional[str] = None
391
391
  r"""Pipeline to process data before sending out to this output"""
392
392
 
@@ -59,6 +59,7 @@ class OutputCloudwatchPqControls(BaseModel):
59
59
 
60
60
 
61
61
  class OutputCloudwatchTypedDict(TypedDict):
62
+ type: OutputCloudwatchType
62
63
  log_group_name: str
63
64
  r"""CloudWatch log group to associate events with"""
64
65
  log_stream_name: str
@@ -67,7 +68,6 @@ class OutputCloudwatchTypedDict(TypedDict):
67
68
  r"""Region where the CloudWatchLogs is located"""
68
69
  id: NotRequired[str]
69
70
  r"""Unique ID for this output"""
70
- type: NotRequired[OutputCloudwatchType]
71
71
  pipeline: NotRequired[str]
72
72
  r"""Pipeline to process data before sending out to this output"""
73
73
  system_fields: NotRequired[List[str]]
@@ -121,6 +121,8 @@ class OutputCloudwatchTypedDict(TypedDict):
121
121
 
122
122
 
123
123
  class OutputCloudwatch(BaseModel):
124
+ type: OutputCloudwatchType
125
+
124
126
  log_group_name: Annotated[str, pydantic.Field(alias="logGroupName")]
125
127
  r"""CloudWatch log group to associate events with"""
126
128
 
@@ -133,8 +135,6 @@ class OutputCloudwatch(BaseModel):
133
135
  id: Optional[str] = None
134
136
  r"""Unique ID for this output"""
135
137
 
136
- type: Optional[OutputCloudwatchType] = None
137
-
138
138
  pipeline: Optional[str] = None
139
139
  r"""Pipeline to process data before sending out to this output"""
140
140
 
@@ -114,6 +114,13 @@ class OutputConfluentCloudCompression(str, Enum):
114
114
  LZ4 = "lz4"
115
115
 
116
116
 
117
+ class OutputConfluentCloudSchemaType(str, Enum):
118
+ r"""The schema format used to encode and decode event data"""
119
+
120
+ AVRO = "avro"
121
+ JSON = "json"
122
+
123
+
117
124
  class OutputConfluentCloudAuthTypedDict(TypedDict):
118
125
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
119
126
 
@@ -214,6 +221,8 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
214
221
  disabled: NotRequired[bool]
215
222
  schema_registry_url: NotRequired[str]
216
223
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
224
+ schema_type: NotRequired[OutputConfluentCloudSchemaType]
225
+ r"""The schema format used to encode and decode event data"""
217
226
  connection_timeout: NotRequired[float]
218
227
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
219
228
  request_timeout: NotRequired[float]
@@ -239,6 +248,11 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
239
248
  ] = "http://localhost:8081"
240
249
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
241
250
 
251
+ schema_type: Annotated[
252
+ Optional[OutputConfluentCloudSchemaType], pydantic.Field(alias="schemaType")
253
+ ] = OutputConfluentCloudSchemaType.AVRO
254
+ r"""The schema format used to encode and decode event data"""
255
+
242
256
  connection_timeout: Annotated[
243
257
  Optional[float], pydantic.Field(alias="connectionTimeout")
244
258
  ] = 30000
@@ -331,13 +345,13 @@ class OutputConfluentCloudPqControls(BaseModel):
331
345
 
332
346
 
333
347
  class OutputConfluentCloudTypedDict(TypedDict):
348
+ type: OutputConfluentCloudType
334
349
  brokers: List[str]
335
350
  r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092."""
336
351
  topic: str
337
352
  r"""The topic to publish events to. Can be overridden using the __topicOut field."""
338
353
  id: NotRequired[str]
339
354
  r"""Unique ID for this output"""
340
- type: NotRequired[OutputConfluentCloudType]
341
355
  pipeline: NotRequired[str]
342
356
  r"""Pipeline to process data before sending out to this output"""
343
357
  system_fields: NotRequired[List[str]]
@@ -401,6 +415,8 @@ class OutputConfluentCloudTypedDict(TypedDict):
401
415
 
402
416
 
403
417
  class OutputConfluentCloud(BaseModel):
418
+ type: OutputConfluentCloudType
419
+
404
420
  brokers: List[str]
405
421
  r"""List of Confluent Cloud bootstrap servers to use, such as yourAccount.confluent.cloud:9092."""
406
422
 
@@ -410,8 +426,6 @@ class OutputConfluentCloud(BaseModel):
410
426
  id: Optional[str] = None
411
427
  r"""Unique ID for this output"""
412
428
 
413
- type: Optional[OutputConfluentCloudType] = None
414
-
415
429
  pipeline: Optional[str] = None
416
430
  r"""Pipeline to process data before sending out to this output"""
417
431
 
@@ -221,9 +221,9 @@ class OutputCriblHTTPPqControls(BaseModel):
221
221
 
222
222
 
223
223
  class OutputCriblHTTPTypedDict(TypedDict):
224
- id: str
225
- r"""Unique ID for this output"""
226
224
  type: OutputCriblHTTPType
225
+ id: NotRequired[str]
226
+ r"""Unique ID for this output"""
227
227
  pipeline: NotRequired[str]
228
228
  r"""Pipeline to process data before sending out to this output"""
229
229
  system_fields: NotRequired[List[str]]
@@ -299,11 +299,11 @@ class OutputCriblHTTPTypedDict(TypedDict):
299
299
 
300
300
 
301
301
  class OutputCriblHTTP(BaseModel):
302
- id: str
303
- r"""Unique ID for this output"""
304
-
305
302
  type: OutputCriblHTTPType
306
303
 
304
+ id: Optional[str] = None
305
+ r"""Unique ID for this output"""
306
+
307
307
  pipeline: Optional[str] = None
308
308
  r"""Pipeline to process data before sending out to this output"""
309
309
 
@@ -76,9 +76,9 @@ class OutputCriblLakeFormat(str, Enum):
76
76
 
77
77
 
78
78
  class OutputCriblLakeTypedDict(TypedDict):
79
- id: str
80
- r"""Unique ID for this output"""
81
79
  type: OutputCriblLakeType
80
+ id: NotRequired[str]
81
+ r"""Unique ID for this output"""
82
82
  pipeline: NotRequired[str]
83
83
  r"""Pipeline to process data before sending out to this output"""
84
84
  system_fields: NotRequired[List[str]]
@@ -166,11 +166,11 @@ class OutputCriblLakeTypedDict(TypedDict):
166
166
 
167
167
 
168
168
  class OutputCriblLake(BaseModel):
169
- id: str
170
- r"""Unique ID for this output"""
171
-
172
169
  type: OutputCriblLakeType
173
170
 
171
+ id: Optional[str] = None
172
+ r"""Unique ID for this output"""
173
+
174
174
  pipeline: Optional[str] = None
175
175
  r"""Pipeline to process data before sending out to this output"""
176
176
 
@@ -170,9 +170,9 @@ class OutputCriblTCPPqControls(BaseModel):
170
170
 
171
171
 
172
172
  class OutputCriblTCPTypedDict(TypedDict):
173
- id: str
174
- r"""Unique ID for this output"""
175
173
  type: OutputCriblTCPType
174
+ id: NotRequired[str]
175
+ r"""Unique ID for this output"""
176
176
  pipeline: NotRequired[str]
177
177
  r"""Pipeline to process data before sending out to this output"""
178
178
  system_fields: NotRequired[List[str]]
@@ -231,11 +231,11 @@ class OutputCriblTCPTypedDict(TypedDict):
231
231
 
232
232
 
233
233
  class OutputCriblTCP(BaseModel):
234
- id: str
235
- r"""Unique ID for this output"""
236
-
237
234
  type: OutputCriblTCPType
238
235
 
236
+ id: Optional[str] = None
237
+ r"""Unique ID for this output"""
238
+
239
239
  pipeline: Optional[str] = None
240
240
  r"""Pipeline to process data before sending out to this output"""
241
241
 
@@ -138,13 +138,13 @@ class OutputCrowdstrikeNextGenSiemPqControls(BaseModel):
138
138
 
139
139
 
140
140
  class OutputCrowdstrikeNextGenSiemTypedDict(TypedDict):
141
+ type: OutputCrowdstrikeNextGenSiemType
141
142
  url: str
142
143
  r"""URL provided from a CrowdStrike data connector.
143
144
  Example: https://ingest.<region>.crowdstrike.com/api/ingest/hec/<connection-id>/v1/services/collector
144
145
  """
145
146
  id: NotRequired[str]
146
147
  r"""Unique ID for this output"""
147
- type: NotRequired[OutputCrowdstrikeNextGenSiemType]
148
148
  pipeline: NotRequired[str]
149
149
  r"""Pipeline to process data before sending out to this output"""
150
150
  system_fields: NotRequired[List[str]]
@@ -217,6 +217,8 @@ class OutputCrowdstrikeNextGenSiemTypedDict(TypedDict):
217
217
 
218
218
 
219
219
  class OutputCrowdstrikeNextGenSiem(BaseModel):
220
+ type: OutputCrowdstrikeNextGenSiemType
221
+
220
222
  url: str
221
223
  r"""URL provided from a CrowdStrike data connector.
222
224
  Example: https://ingest.<region>.crowdstrike.com/api/ingest/hec/<connection-id>/v1/services/collector
@@ -225,8 +227,6 @@ class OutputCrowdstrikeNextGenSiem(BaseModel):
225
227
  id: Optional[str] = None
226
228
  r"""Unique ID for this output"""
227
229
 
228
- type: Optional[OutputCrowdstrikeNextGenSiemType] = None
229
-
230
230
  pipeline: Optional[str] = None
231
231
  r"""Pipeline to process data before sending out to this output"""
232
232
 
@@ -163,9 +163,9 @@ class OutputDatadogPqControls(BaseModel):
163
163
 
164
164
 
165
165
  class OutputDatadogTypedDict(TypedDict):
166
- id: str
167
- r"""Unique ID for this output"""
168
166
  type: OutputDatadogType
167
+ id: NotRequired[str]
168
+ r"""Unique ID for this output"""
169
169
  pipeline: NotRequired[str]
170
170
  r"""Pipeline to process data before sending out to this output"""
171
171
  system_fields: NotRequired[List[str]]
@@ -256,11 +256,11 @@ class OutputDatadogTypedDict(TypedDict):
256
256
 
257
257
 
258
258
  class OutputDatadog(BaseModel):
259
- id: str
260
- r"""Unique ID for this output"""
261
-
262
259
  type: OutputDatadogType
263
260
 
261
+ id: Optional[str] = None
262
+ r"""Unique ID for this output"""
263
+
264
264
  pipeline: Optional[str] = None
265
265
  r"""Pipeline to process data before sending out to this output"""
266
266
 
@@ -151,9 +151,9 @@ class OutputDatasetPqControls(BaseModel):
151
151
 
152
152
 
153
153
  class OutputDatasetTypedDict(TypedDict):
154
- id: str
155
- r"""Unique ID for this output"""
156
154
  type: OutputDatasetType
155
+ id: NotRequired[str]
156
+ r"""Unique ID for this output"""
157
157
  pipeline: NotRequired[str]
158
158
  r"""Pipeline to process data before sending out to this output"""
159
159
  system_fields: NotRequired[List[str]]
@@ -234,11 +234,11 @@ class OutputDatasetTypedDict(TypedDict):
234
234
 
235
235
 
236
236
  class OutputDataset(BaseModel):
237
- id: str
238
- r"""Unique ID for this output"""
239
-
240
237
  type: OutputDatasetType
241
238
 
239
+ id: Optional[str] = None
240
+ r"""Unique ID for this output"""
241
+
242
242
  pipeline: Optional[str] = None
243
243
  r"""Pipeline to process data before sending out to this output"""
244
244
 
@@ -13,9 +13,9 @@ class OutputDevnullType(str, Enum):
13
13
 
14
14
 
15
15
  class OutputDevnullTypedDict(TypedDict):
16
- id: str
17
- r"""Unique ID for this output"""
18
16
  type: OutputDevnullType
17
+ id: NotRequired[str]
18
+ r"""Unique ID for this output"""
19
19
  pipeline: NotRequired[str]
20
20
  r"""Pipeline to process data before sending out to this output"""
21
21
  system_fields: NotRequired[List[str]]
@@ -27,11 +27,11 @@ class OutputDevnullTypedDict(TypedDict):
27
27
 
28
28
 
29
29
  class OutputDevnull(BaseModel):
30
- id: str
31
- r"""Unique ID for this output"""
32
-
33
30
  type: OutputDevnullType
34
31
 
32
+ id: Optional[str] = None
33
+ r"""Unique ID for this output"""
34
+
35
35
  pipeline: Optional[str] = None
36
36
  r"""Pipeline to process data before sending out to this output"""
37
37