cribl-control-plane 0.0.24__py3-none-any.whl → 0.0.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (115) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/destinations.py +10 -8
  3. cribl_control_plane/errors/healthstatus_error.py +1 -1
  4. cribl_control_plane/groups_sdk.py +10 -10
  5. cribl_control_plane/{healthinfo.py → health.py} +3 -1
  6. cribl_control_plane/models/__init__.py +21 -27
  7. cribl_control_plane/models/healthstatus.py +3 -3
  8. cribl_control_plane/models/input.py +4 -4
  9. cribl_control_plane/models/inputappscope.py +5 -5
  10. cribl_control_plane/models/inputcollection.py +2 -2
  11. cribl_control_plane/models/inputconfluentcloud.py +17 -3
  12. cribl_control_plane/models/inputcribl.py +5 -5
  13. cribl_control_plane/models/inputcriblhttp.py +3 -3
  14. cribl_control_plane/models/inputcribllakehttp.py +3 -3
  15. cribl_control_plane/models/inputcriblmetrics.py +5 -5
  16. cribl_control_plane/models/inputcribltcp.py +3 -3
  17. cribl_control_plane/models/inputdatadogagent.py +3 -3
  18. cribl_control_plane/models/inputedgeprometheus.py +3 -3
  19. cribl_control_plane/models/inputelastic.py +3 -3
  20. cribl_control_plane/models/inputeventhub.py +3 -3
  21. cribl_control_plane/models/inputfile.py +5 -5
  22. cribl_control_plane/models/inputfirehose.py +3 -3
  23. cribl_control_plane/models/inputgooglepubsub.py +16 -9
  24. cribl_control_plane/models/inputgrafana.py +20 -6
  25. cribl_control_plane/models/inputhttp.py +3 -3
  26. cribl_control_plane/models/inputhttpraw.py +3 -3
  27. cribl_control_plane/models/inputjournalfiles.py +3 -3
  28. cribl_control_plane/models/inputkafka.py +17 -3
  29. cribl_control_plane/models/inputkinesis.py +3 -3
  30. cribl_control_plane/models/inputkubeevents.py +5 -5
  31. cribl_control_plane/models/inputkubelogs.py +5 -5
  32. cribl_control_plane/models/inputkubemetrics.py +5 -5
  33. cribl_control_plane/models/inputloki.py +10 -3
  34. cribl_control_plane/models/inputmodeldriventelemetry.py +3 -3
  35. cribl_control_plane/models/inputmsk.py +17 -3
  36. cribl_control_plane/models/inputnetflow.py +3 -3
  37. cribl_control_plane/models/inputoffice365mgmt.py +3 -3
  38. cribl_control_plane/models/inputoffice365msgtrace.py +3 -3
  39. cribl_control_plane/models/inputoffice365service.py +3 -3
  40. cribl_control_plane/models/inputopentelemetry.py +3 -3
  41. cribl_control_plane/models/inputprometheus.py +3 -3
  42. cribl_control_plane/models/inputprometheusrw.py +3 -3
  43. cribl_control_plane/models/inputrawudp.py +3 -3
  44. cribl_control_plane/models/inputsnmp.py +3 -3
  45. cribl_control_plane/models/inputsplunk.py +3 -3
  46. cribl_control_plane/models/inputsplunkhec.py +3 -3
  47. cribl_control_plane/models/inputsplunksearch.py +3 -3
  48. cribl_control_plane/models/inputsqs.py +3 -3
  49. cribl_control_plane/models/inputsystemmetrics.py +5 -5
  50. cribl_control_plane/models/inputsystemstate.py +5 -5
  51. cribl_control_plane/models/inputtcp.py +3 -3
  52. cribl_control_plane/models/inputtcpjson.py +3 -3
  53. cribl_control_plane/models/inputwef.py +3 -3
  54. cribl_control_plane/models/inputwindowsmetrics.py +5 -5
  55. cribl_control_plane/models/inputwiz.py +3 -3
  56. cribl_control_plane/models/inputzscalerhec.py +3 -3
  57. cribl_control_plane/models/output.py +14 -14
  58. cribl_control_plane/models/outputazureblob.py +3 -3
  59. cribl_control_plane/models/outputazuredataexplorer.py +3 -3
  60. cribl_control_plane/models/outputazureeventhub.py +3 -3
  61. cribl_control_plane/models/outputclickhouse.py +3 -3
  62. cribl_control_plane/models/outputcloudwatch.py +3 -3
  63. cribl_control_plane/models/outputconfluentcloud.py +17 -3
  64. cribl_control_plane/models/outputcriblhttp.py +5 -5
  65. cribl_control_plane/models/outputcribllake.py +5 -5
  66. cribl_control_plane/models/outputcribltcp.py +5 -5
  67. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +3 -3
  68. cribl_control_plane/models/outputdatadog.py +5 -5
  69. cribl_control_plane/models/outputdataset.py +5 -5
  70. cribl_control_plane/models/outputdevnull.py +5 -5
  71. cribl_control_plane/models/outputdiskspool.py +5 -5
  72. cribl_control_plane/models/outputdls3.py +5 -5
  73. cribl_control_plane/models/outputdynatracehttp.py +3 -3
  74. cribl_control_plane/models/outputdynatraceotlp.py +3 -3
  75. cribl_control_plane/models/outputelasticcloud.py +3 -3
  76. cribl_control_plane/models/outputexabeam.py +3 -3
  77. cribl_control_plane/models/outputgooglecloudlogging.py +3 -3
  78. cribl_control_plane/models/outputgooglecloudstorage.py +5 -5
  79. cribl_control_plane/models/outputgrafanacloud.py +24 -10
  80. cribl_control_plane/models/outputgraphite.py +3 -3
  81. cribl_control_plane/models/outputhumiohec.py +3 -3
  82. cribl_control_plane/models/outputkafka.py +17 -3
  83. cribl_control_plane/models/outputkinesis.py +3 -3
  84. cribl_control_plane/models/outputloki.py +14 -0
  85. cribl_control_plane/models/outputminio.py +3 -3
  86. cribl_control_plane/models/outputmsk.py +17 -3
  87. cribl_control_plane/models/outputnewrelic.py +5 -5
  88. cribl_control_plane/models/outputnewrelicevents.py +3 -3
  89. cribl_control_plane/models/outputring.py +5 -5
  90. cribl_control_plane/models/outputs3.py +5 -5
  91. cribl_control_plane/models/outputsecuritylake.py +3 -3
  92. cribl_control_plane/models/outputsentinel.py +3 -3
  93. cribl_control_plane/models/outputsentineloneaisiem.py +3 -3
  94. cribl_control_plane/models/outputservicenow.py +3 -3
  95. cribl_control_plane/models/outputsns.py +3 -3
  96. cribl_control_plane/models/outputsplunk.py +3 -3
  97. cribl_control_plane/models/outputsplunkhec.py +5 -5
  98. cribl_control_plane/models/outputsqs.py +3 -3
  99. cribl_control_plane/models/outputstatsd.py +3 -3
  100. cribl_control_plane/models/outputstatsdext.py +3 -3
  101. cribl_control_plane/models/outputsyslog.py +5 -5
  102. cribl_control_plane/models/outputtcpjson.py +5 -5
  103. cribl_control_plane/models/outputwebhook.py +5 -5
  104. cribl_control_plane/models/outputxsiam.py +5 -5
  105. cribl_control_plane/nodes.py +100 -90
  106. cribl_control_plane/pipelines.py +20 -20
  107. cribl_control_plane/sdk.py +6 -6
  108. cribl_control_plane/sources.py +2 -0
  109. cribl_control_plane/versioning.py +14 -14
  110. {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/METADATA +24 -28
  111. {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/RECORD +112 -115
  112. cribl_control_plane/deployments.py +0 -185
  113. cribl_control_plane/models/restartresponse.py +0 -26
  114. cribl_control_plane/models/updateworkersrestartop.py +0 -24
  115. {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/WHEEL +0 -0
@@ -20,9 +20,9 @@ class OutputDiskSpoolCompression(str, Enum):
20
20
 
21
21
 
22
22
  class OutputDiskSpoolTypedDict(TypedDict):
23
- id: str
24
- r"""Unique ID for this output"""
25
23
  type: OutputDiskSpoolType
24
+ id: NotRequired[str]
25
+ r"""Unique ID for this output"""
26
26
  pipeline: NotRequired[str]
27
27
  r"""Pipeline to process data before sending out to this output"""
28
28
  system_fields: NotRequired[List[str]]
@@ -45,11 +45,11 @@ class OutputDiskSpoolTypedDict(TypedDict):
45
45
 
46
46
 
47
47
  class OutputDiskSpool(BaseModel):
48
- id: str
49
- r"""Unique ID for this output"""
50
-
51
48
  type: OutputDiskSpoolType
52
49
 
50
+ id: Optional[str] = None
51
+ r"""Unique ID for this output"""
52
+
53
53
  pipeline: Optional[str] = None
54
54
  r"""Pipeline to process data before sending out to this output"""
55
55
 
@@ -121,11 +121,11 @@ class OutputDlS3KeyValueMetadatum(BaseModel):
121
121
 
122
122
 
123
123
  class OutputDlS3TypedDict(TypedDict):
124
+ type: OutputDlS3Type
124
125
  bucket: str
125
126
  r"""Name of the destination S3 bucket. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myBucket-${C.vars.myVar}`"""
126
127
  id: NotRequired[str]
127
128
  r"""Unique ID for this output"""
128
- type: NotRequired[OutputDlS3Type]
129
129
  pipeline: NotRequired[str]
130
130
  r"""Pipeline to process data before sending out to this output"""
131
131
  system_fields: NotRequired[List[str]]
@@ -161,7 +161,7 @@ class OutputDlS3TypedDict(TypedDict):
161
161
  add_id_to_stage_path: NotRequired[bool]
162
162
  r"""Add the Output ID value to staging location"""
163
163
  dest_path: NotRequired[str]
164
- r"""Prefix to append to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
164
+ r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
165
165
  object_acl: NotRequired[OutputDlS3ObjectACL]
166
166
  r"""Object ACL to assign to uploaded objects"""
167
167
  storage_class: NotRequired[OutputDlS3StorageClass]
@@ -243,14 +243,14 @@ class OutputDlS3TypedDict(TypedDict):
243
243
 
244
244
 
245
245
  class OutputDlS3(BaseModel):
246
+ type: OutputDlS3Type
247
+
246
248
  bucket: str
247
249
  r"""Name of the destination S3 bucket. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myBucket-${C.vars.myVar}`"""
248
250
 
249
251
  id: Optional[str] = None
250
252
  r"""Unique ID for this output"""
251
253
 
252
- type: Optional[OutputDlS3Type] = None
253
-
254
254
  pipeline: Optional[str] = None
255
255
  r"""Pipeline to process data before sending out to this output"""
256
256
 
@@ -328,7 +328,7 @@ class OutputDlS3(BaseModel):
328
328
  r"""Add the Output ID value to staging location"""
329
329
 
330
330
  dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
331
- r"""Prefix to append to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
331
+ r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
332
332
 
333
333
  object_acl: Annotated[
334
334
  Optional[OutputDlS3ObjectACL], pydantic.Field(alias="objectACL")
@@ -155,9 +155,9 @@ class OutputDynatraceHTTPPqControls(BaseModel):
155
155
 
156
156
 
157
157
  class OutputDynatraceHTTPTypedDict(TypedDict):
158
+ type: OutputDynatraceHTTPType
158
159
  id: NotRequired[str]
159
160
  r"""Unique ID for this output"""
160
- type: NotRequired[OutputDynatraceHTTPType]
161
161
  pipeline: NotRequired[str]
162
162
  r"""Pipeline to process data before sending out to this output"""
163
163
  system_fields: NotRequired[List[str]]
@@ -242,11 +242,11 @@ class OutputDynatraceHTTPTypedDict(TypedDict):
242
242
 
243
243
 
244
244
  class OutputDynatraceHTTP(BaseModel):
245
+ type: OutputDynatraceHTTPType
246
+
245
247
  id: Optional[str] = None
246
248
  r"""Unique ID for this output"""
247
249
 
248
- type: Optional[OutputDynatraceHTTPType] = None
249
-
250
250
  pipeline: Optional[str] = None
251
251
  r"""Pipeline to process data before sending out to this output"""
252
252
 
@@ -169,11 +169,11 @@ class OutputDynatraceOtlpPqControls(BaseModel):
169
169
 
170
170
 
171
171
  class OutputDynatraceOtlpTypedDict(TypedDict):
172
+ type: OutputDynatraceOtlpType
172
173
  token_secret: str
173
174
  r"""Select or create a stored text secret"""
174
175
  id: NotRequired[str]
175
176
  r"""Unique ID for this output"""
176
- type: NotRequired[OutputDynatraceOtlpType]
177
177
  pipeline: NotRequired[str]
178
178
  r"""Pipeline to process data before sending out to this output"""
179
179
  system_fields: NotRequired[List[str]]
@@ -260,14 +260,14 @@ class OutputDynatraceOtlpTypedDict(TypedDict):
260
260
 
261
261
 
262
262
  class OutputDynatraceOtlp(BaseModel):
263
+ type: OutputDynatraceOtlpType
264
+
263
265
  token_secret: Annotated[str, pydantic.Field(alias="tokenSecret")]
264
266
  r"""Select or create a stored text secret"""
265
267
 
266
268
  id: Optional[str] = None
267
269
  r"""Unique ID for this output"""
268
270
 
269
- type: Optional[OutputDynatraceOtlpType] = None
270
-
271
271
  pipeline: Optional[str] = None
272
272
  r"""Pipeline to process data before sending out to this output"""
273
273
 
@@ -160,13 +160,13 @@ class OutputElasticCloudPqControls(BaseModel):
160
160
 
161
161
 
162
162
  class OutputElasticCloudTypedDict(TypedDict):
163
+ type: OutputElasticCloudType
163
164
  url: str
164
165
  r"""Enter Cloud ID of the Elastic Cloud environment to send events to"""
165
166
  index: str
166
167
  r"""Data stream or index to send events to. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be overwritten by an event's __index field."""
167
168
  id: NotRequired[str]
168
169
  r"""Unique ID for this output"""
169
- type: NotRequired[OutputElasticCloudType]
170
170
  pipeline: NotRequired[str]
171
171
  r"""Pipeline to process data before sending out to this output"""
172
172
  system_fields: NotRequired[List[str]]
@@ -231,6 +231,8 @@ class OutputElasticCloudTypedDict(TypedDict):
231
231
 
232
232
 
233
233
  class OutputElasticCloud(BaseModel):
234
+ type: OutputElasticCloudType
235
+
234
236
  url: str
235
237
  r"""Enter Cloud ID of the Elastic Cloud environment to send events to"""
236
238
 
@@ -240,8 +242,6 @@ class OutputElasticCloud(BaseModel):
240
242
  id: Optional[str] = None
241
243
  r"""Unique ID for this output"""
242
244
 
243
- type: Optional[OutputElasticCloudType] = None
244
-
245
245
  pipeline: Optional[str] = None
246
246
  r"""Pipeline to process data before sending out to this output"""
247
247
 
@@ -54,6 +54,7 @@ class OutputExabeamDiskSpaceProtection(str, Enum):
54
54
 
55
55
 
56
56
  class OutputExabeamTypedDict(TypedDict):
57
+ type: OutputExabeamType
57
58
  bucket: str
58
59
  r"""Name of the destination bucket. A constant or a JavaScript expression that can only be evaluated at init time. Example of referencing a JavaScript Global Variable: `myBucket-${C.vars.myVar}`."""
59
60
  region: str
@@ -64,7 +65,6 @@ class OutputExabeamTypedDict(TypedDict):
64
65
  """
65
66
  id: NotRequired[str]
66
67
  r"""Unique ID for this output"""
67
- type: NotRequired[OutputExabeamType]
68
68
  pipeline: NotRequired[str]
69
69
  r"""Pipeline to process data before sending out to this output"""
70
70
  system_fields: NotRequired[List[str]]
@@ -126,6 +126,8 @@ class OutputExabeamTypedDict(TypedDict):
126
126
 
127
127
 
128
128
  class OutputExabeam(BaseModel):
129
+ type: OutputExabeamType
130
+
129
131
  bucket: str
130
132
  r"""Name of the destination bucket. A constant or a JavaScript expression that can only be evaluated at init time. Example of referencing a JavaScript Global Variable: `myBucket-${C.vars.myVar}`."""
131
133
 
@@ -140,8 +142,6 @@ class OutputExabeam(BaseModel):
140
142
  id: Optional[str] = None
141
143
  r"""Unique ID for this output"""
142
144
 
143
- type: Optional[OutputExabeamType] = None
144
-
145
145
  pipeline: Optional[str] = None
146
146
  r"""Pipeline to process data before sending out to this output"""
147
147
 
@@ -103,6 +103,7 @@ class OutputGoogleCloudLoggingPqControls(BaseModel):
103
103
 
104
104
 
105
105
  class OutputGoogleCloudLoggingTypedDict(TypedDict):
106
+ type: OutputGoogleCloudLoggingType
106
107
  log_location_type: LogLocationType
107
108
  log_name_expression: str
108
109
  r"""JavaScript expression to compute the value of the log name."""
@@ -110,7 +111,6 @@ class OutputGoogleCloudLoggingTypedDict(TypedDict):
110
111
  r"""JavaScript expression to compute the value of the folder ID with which log entries should be associated."""
111
112
  id: NotRequired[str]
112
113
  r"""Unique ID for this output"""
113
- type: NotRequired[OutputGoogleCloudLoggingType]
114
114
  pipeline: NotRequired[str]
115
115
  r"""Pipeline to process data before sending out to this output"""
116
116
  system_fields: NotRequired[List[str]]
@@ -230,6 +230,8 @@ class OutputGoogleCloudLoggingTypedDict(TypedDict):
230
230
 
231
231
 
232
232
  class OutputGoogleCloudLogging(BaseModel):
233
+ type: OutputGoogleCloudLoggingType
234
+
233
235
  log_location_type: Annotated[
234
236
  LogLocationType, pydantic.Field(alias="logLocationType")
235
237
  ]
@@ -245,8 +247,6 @@ class OutputGoogleCloudLogging(BaseModel):
245
247
  id: Optional[str] = None
246
248
  r"""Unique ID for this output"""
247
249
 
248
- type: Optional[OutputGoogleCloudLoggingType] = None
249
-
250
250
  pipeline: Optional[str] = None
251
251
  r"""Pipeline to process data before sending out to this output"""
252
252
 
@@ -109,13 +109,13 @@ class OutputGoogleCloudStorageKeyValueMetadatum(BaseModel):
109
109
 
110
110
 
111
111
  class OutputGoogleCloudStorageTypedDict(TypedDict):
112
+ type: OutputGoogleCloudStorageType
112
113
  bucket: str
113
114
  r"""Name of the destination bucket. This value can be a constant or a JavaScript expression that can only be evaluated at init time. Example of referencing a Global Variable: `myBucket-${C.vars.myVar}`."""
114
115
  region: str
115
116
  r"""Region where the bucket is located"""
116
117
  id: NotRequired[str]
117
118
  r"""Unique ID for this output"""
118
- type: NotRequired[OutputGoogleCloudStorageType]
119
119
  pipeline: NotRequired[str]
120
120
  r"""Pipeline to process data before sending out to this output"""
121
121
  system_fields: NotRequired[List[str]]
@@ -132,7 +132,7 @@ class OutputGoogleCloudStorageTypedDict(TypedDict):
132
132
  stage_path: NotRequired[str]
133
133
  r"""Filesystem location in which to buffer files, before compressing and moving to final destination. Use performant and stable storage."""
134
134
  dest_path: NotRequired[str]
135
- r"""Prefix to append to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
135
+ r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
136
136
  verify_permissions: NotRequired[bool]
137
137
  r"""Disable if you can access files within the bucket but not the bucket itself"""
138
138
  object_acl: NotRequired[OutputGoogleCloudStorageObjectACL]
@@ -215,6 +215,8 @@ class OutputGoogleCloudStorageTypedDict(TypedDict):
215
215
 
216
216
 
217
217
  class OutputGoogleCloudStorage(BaseModel):
218
+ type: OutputGoogleCloudStorageType
219
+
218
220
  bucket: str
219
221
  r"""Name of the destination bucket. This value can be a constant or a JavaScript expression that can only be evaluated at init time. Example of referencing a Global Variable: `myBucket-${C.vars.myVar}`."""
220
222
 
@@ -224,8 +226,6 @@ class OutputGoogleCloudStorage(BaseModel):
224
226
  id: Optional[str] = None
225
227
  r"""Unique ID for this output"""
226
228
 
227
- type: Optional[OutputGoogleCloudStorageType] = None
228
-
229
229
  pipeline: Optional[str] = None
230
230
  r"""Pipeline to process data before sending out to this output"""
231
231
 
@@ -260,7 +260,7 @@ class OutputGoogleCloudStorage(BaseModel):
260
260
  r"""Filesystem location in which to buffer files, before compressing and moving to final destination. Use performant and stable storage."""
261
261
 
262
262
  dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
263
- r"""Prefix to append to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
263
+ r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
264
264
 
265
265
  verify_permissions: Annotated[
266
266
  Optional[bool], pydantic.Field(alias="verifyPermissions")
@@ -234,11 +234,11 @@ class OutputGrafanaCloudPqControls2(BaseModel):
234
234
 
235
235
 
236
236
  class OutputGrafanaCloudGrafanaCloud2TypedDict(TypedDict):
237
- id: str
238
- r"""Unique ID for this output"""
239
237
  type: OutputGrafanaCloudType2
240
238
  prometheus_url: str
241
239
  r"""The remote_write endpoint to send Prometheus metrics to, such as https://prometheus-blocks-prod-us-central1.grafana.net/api/prom/push"""
240
+ id: NotRequired[str]
241
+ r"""Unique ID for this output"""
242
242
  pipeline: NotRequired[str]
243
243
  r"""Pipeline to process data before sending out to this output"""
244
244
  system_fields: NotRequired[List[str]]
@@ -284,6 +284,8 @@ class OutputGrafanaCloudGrafanaCloud2TypedDict(TypedDict):
284
284
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
285
285
  safe_headers: NotRequired[List[str]]
286
286
  r"""List of headers that are safe to log in plain text"""
287
+ send_structured_metadata: NotRequired[bool]
288
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
287
289
  response_retry_settings: NotRequired[
288
290
  List[OutputGrafanaCloudResponseRetrySetting2TypedDict]
289
291
  ]
@@ -314,14 +316,14 @@ class OutputGrafanaCloudGrafanaCloud2TypedDict(TypedDict):
314
316
 
315
317
 
316
318
  class OutputGrafanaCloudGrafanaCloud2(BaseModel):
317
- id: str
318
- r"""Unique ID for this output"""
319
-
320
319
  type: OutputGrafanaCloudType2
321
320
 
322
321
  prometheus_url: Annotated[str, pydantic.Field(alias="prometheusUrl")]
323
322
  r"""The remote_write endpoint to send Prometheus metrics to, such as https://prometheus-blocks-prod-us-central1.grafana.net/api/prom/push"""
324
323
 
324
+ id: Optional[str] = None
325
+ r"""Unique ID for this output"""
326
+
325
327
  pipeline: Optional[str] = None
326
328
  r"""Pipeline to process data before sending out to this output"""
327
329
 
@@ -416,6 +418,11 @@ class OutputGrafanaCloudGrafanaCloud2(BaseModel):
416
418
  ] = None
417
419
  r"""List of headers that are safe to log in plain text"""
418
420
 
421
+ send_structured_metadata: Annotated[
422
+ Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
423
+ ] = False
424
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
425
+
419
426
  response_retry_settings: Annotated[
420
427
  Optional[List[OutputGrafanaCloudResponseRetrySetting2]],
421
428
  pydantic.Field(alias="responseRetrySettings"),
@@ -703,11 +710,11 @@ class OutputGrafanaCloudPqControls1(BaseModel):
703
710
 
704
711
 
705
712
  class OutputGrafanaCloudGrafanaCloud1TypedDict(TypedDict):
706
- id: str
707
- r"""Unique ID for this output"""
708
713
  type: OutputGrafanaCloudType1
709
714
  loki_url: str
710
715
  r"""The endpoint to send logs to, such as https://logs-prod-us-central1.grafana.net"""
716
+ id: NotRequired[str]
717
+ r"""Unique ID for this output"""
711
718
  pipeline: NotRequired[str]
712
719
  r"""Pipeline to process data before sending out to this output"""
713
720
  system_fields: NotRequired[List[str]]
@@ -753,6 +760,8 @@ class OutputGrafanaCloudGrafanaCloud1TypedDict(TypedDict):
753
760
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
754
761
  safe_headers: NotRequired[List[str]]
755
762
  r"""List of headers that are safe to log in plain text"""
763
+ send_structured_metadata: NotRequired[bool]
764
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
756
765
  response_retry_settings: NotRequired[
757
766
  List[OutputGrafanaCloudResponseRetrySetting1TypedDict]
758
767
  ]
@@ -783,14 +792,14 @@ class OutputGrafanaCloudGrafanaCloud1TypedDict(TypedDict):
783
792
 
784
793
 
785
794
  class OutputGrafanaCloudGrafanaCloud1(BaseModel):
786
- id: str
787
- r"""Unique ID for this output"""
788
-
789
795
  type: OutputGrafanaCloudType1
790
796
 
791
797
  loki_url: Annotated[str, pydantic.Field(alias="lokiUrl")]
792
798
  r"""The endpoint to send logs to, such as https://logs-prod-us-central1.grafana.net"""
793
799
 
800
+ id: Optional[str] = None
801
+ r"""Unique ID for this output"""
802
+
794
803
  pipeline: Optional[str] = None
795
804
  r"""Pipeline to process data before sending out to this output"""
796
805
 
@@ -887,6 +896,11 @@ class OutputGrafanaCloudGrafanaCloud1(BaseModel):
887
896
  ] = None
888
897
  r"""List of headers that are safe to log in plain text"""
889
898
 
899
+ send_structured_metadata: Annotated[
900
+ Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
901
+ ] = False
902
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
903
+
890
904
  response_retry_settings: Annotated[
891
905
  Optional[List[OutputGrafanaCloudResponseRetrySetting1]],
892
906
  pydantic.Field(alias="responseRetrySettings"),
@@ -58,11 +58,11 @@ class OutputGraphitePqControls(BaseModel):
58
58
 
59
59
 
60
60
  class OutputGraphiteTypedDict(TypedDict):
61
+ type: OutputGraphiteType
61
62
  host: str
62
63
  r"""The hostname of the destination."""
63
64
  id: NotRequired[str]
64
65
  r"""Unique ID for this output"""
65
- type: NotRequired[OutputGraphiteType]
66
66
  pipeline: NotRequired[str]
67
67
  r"""Pipeline to process data before sending out to this output"""
68
68
  system_fields: NotRequired[List[str]]
@@ -106,14 +106,14 @@ class OutputGraphiteTypedDict(TypedDict):
106
106
 
107
107
 
108
108
  class OutputGraphite(BaseModel):
109
+ type: OutputGraphiteType
110
+
109
111
  host: str
110
112
  r"""The hostname of the destination."""
111
113
 
112
114
  id: Optional[str] = None
113
115
  r"""Unique ID for this output"""
114
116
 
115
- type: Optional[OutputGraphiteType] = None
116
-
117
117
  pipeline: Optional[str] = None
118
118
  r"""Pipeline to process data before sending out to this output"""
119
119
 
@@ -138,9 +138,9 @@ class OutputHumioHecPqControls(BaseModel):
138
138
 
139
139
 
140
140
  class OutputHumioHecTypedDict(TypedDict):
141
+ type: OutputHumioHecType
141
142
  id: NotRequired[str]
142
143
  r"""Unique ID for this output"""
143
- type: NotRequired[OutputHumioHecType]
144
144
  pipeline: NotRequired[str]
145
145
  r"""Pipeline to process data before sending out to this output"""
146
146
  system_fields: NotRequired[List[str]]
@@ -210,11 +210,11 @@ class OutputHumioHecTypedDict(TypedDict):
210
210
 
211
211
 
212
212
  class OutputHumioHec(BaseModel):
213
+ type: OutputHumioHecType
214
+
213
215
  id: Optional[str] = None
214
216
  r"""Unique ID for this output"""
215
217
 
216
- type: Optional[OutputHumioHecType] = None
217
-
218
218
  pipeline: Optional[str] = None
219
219
  r"""Pipeline to process data before sending out to this output"""
220
220
 
@@ -37,6 +37,13 @@ class OutputKafkaCompression(str, Enum):
37
37
  LZ4 = "lz4"
38
38
 
39
39
 
40
+ class OutputKafkaSchemaType(str, Enum):
41
+ r"""The schema format used to encode and decode event data"""
42
+
43
+ AVRO = "avro"
44
+ JSON = "json"
45
+
46
+
40
47
  class OutputKafkaAuthTypedDict(TypedDict):
41
48
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
42
49
 
@@ -137,6 +144,8 @@ class OutputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
137
144
  disabled: NotRequired[bool]
138
145
  schema_registry_url: NotRequired[str]
139
146
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
147
+ schema_type: NotRequired[OutputKafkaSchemaType]
148
+ r"""The schema format used to encode and decode event data"""
140
149
  connection_timeout: NotRequired[float]
141
150
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
142
151
  request_timeout: NotRequired[float]
@@ -160,6 +169,11 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
160
169
  ] = "http://localhost:8081"
161
170
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
162
171
 
172
+ schema_type: Annotated[
173
+ Optional[OutputKafkaSchemaType], pydantic.Field(alias="schemaType")
174
+ ] = OutputKafkaSchemaType.AVRO
175
+ r"""The schema format used to encode and decode event data"""
176
+
163
177
  connection_timeout: Annotated[
164
178
  Optional[float], pydantic.Field(alias="connectionTimeout")
165
179
  ] = 30000
@@ -325,13 +339,13 @@ class OutputKafkaPqControls(BaseModel):
325
339
 
326
340
 
327
341
  class OutputKafkaTypedDict(TypedDict):
342
+ type: OutputKafkaType
328
343
  brokers: List[str]
329
344
  r"""Enter each Kafka bootstrap server you want to use. Specify hostname and port, e.g., mykafkabroker:9092, or just hostname, in which case @{product} will assign port 9092."""
330
345
  topic: str
331
346
  r"""The topic to publish events to. Can be overridden using the __topicOut field."""
332
347
  id: NotRequired[str]
333
348
  r"""Unique ID for this output"""
334
- type: NotRequired[OutputKafkaType]
335
349
  pipeline: NotRequired[str]
336
350
  r"""Pipeline to process data before sending out to this output"""
337
351
  system_fields: NotRequired[List[str]]
@@ -395,6 +409,8 @@ class OutputKafkaTypedDict(TypedDict):
395
409
 
396
410
 
397
411
  class OutputKafka(BaseModel):
412
+ type: OutputKafkaType
413
+
398
414
  brokers: List[str]
399
415
  r"""Enter each Kafka bootstrap server you want to use. Specify hostname and port, e.g., mykafkabroker:9092, or just hostname, in which case @{product} will assign port 9092."""
400
416
 
@@ -404,8 +420,6 @@ class OutputKafka(BaseModel):
404
420
  id: Optional[str] = None
405
421
  r"""Unique ID for this output"""
406
422
 
407
- type: Optional[OutputKafkaType] = None
408
-
409
423
  pipeline: Optional[str] = None
410
424
  r"""Pipeline to process data before sending out to this output"""
411
425
 
@@ -73,13 +73,13 @@ class OutputKinesisPqControls(BaseModel):
73
73
 
74
74
 
75
75
  class OutputKinesisTypedDict(TypedDict):
76
+ type: OutputKinesisType
76
77
  stream_name: str
77
78
  r"""Kinesis stream name to send events to."""
78
79
  region: str
79
80
  r"""Region where the Kinesis stream is located"""
80
81
  id: NotRequired[str]
81
82
  r"""Unique ID for this output"""
82
- type: NotRequired[OutputKinesisType]
83
83
  pipeline: NotRequired[str]
84
84
  r"""Pipeline to process data before sending out to this output"""
85
85
  system_fields: NotRequired[List[str]]
@@ -141,6 +141,8 @@ class OutputKinesisTypedDict(TypedDict):
141
141
 
142
142
 
143
143
  class OutputKinesis(BaseModel):
144
+ type: OutputKinesisType
145
+
144
146
  stream_name: Annotated[str, pydantic.Field(alias="streamName")]
145
147
  r"""Kinesis stream name to send events to."""
146
148
 
@@ -150,8 +152,6 @@ class OutputKinesis(BaseModel):
150
152
  id: Optional[str] = None
151
153
  r"""Unique ID for this output"""
152
154
 
153
- type: Optional[OutputKinesisType] = None
154
-
155
155
  pipeline: Optional[str] = None
156
156
  r"""Pipeline to process data before sending out to this output"""
157
157
 
@@ -198,6 +198,10 @@ class OutputLokiTypedDict(TypedDict):
198
198
  timeout_retry_settings: NotRequired[OutputLokiTimeoutRetrySettingsTypedDict]
199
199
  response_honor_retry_after_header: NotRequired[bool]
200
200
  r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
201
+ enable_dynamic_headers: NotRequired[bool]
202
+ r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
203
+ send_structured_metadata: NotRequired[bool]
204
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
201
205
  on_backpressure: NotRequired[OutputLokiBackpressureBehavior]
202
206
  r"""How to handle events when all receivers are exerting backpressure"""
203
207
  total_memory_limit_kb: NotRequired[float]
@@ -335,6 +339,16 @@ class OutputLoki(BaseModel):
335
339
  ] = False
336
340
  r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
337
341
 
342
+ enable_dynamic_headers: Annotated[
343
+ Optional[bool], pydantic.Field(alias="enableDynamicHeaders")
344
+ ] = False
345
+ r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
346
+
347
+ send_structured_metadata: Annotated[
348
+ Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
349
+ ] = False
350
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
351
+
338
352
  on_backpressure: Annotated[
339
353
  Optional[OutputLokiBackpressureBehavior], pydantic.Field(alias="onBackpressure")
340
354
  ] = OutputLokiBackpressureBehavior.BLOCK
@@ -116,13 +116,13 @@ class OutputMinioKeyValueMetadatum(BaseModel):
116
116
 
117
117
 
118
118
  class OutputMinioTypedDict(TypedDict):
119
+ type: OutputMinioType
119
120
  endpoint: str
120
121
  r"""MinIO service url (e.g. http://minioHost:9000)"""
121
122
  bucket: str
122
123
  r"""Name of the destination MinIO bucket. This value can be a constant or a JavaScript expression that can only be evaluated at init time. Example referencing a Global Variable: `myBucket-${C.vars.myVar}`"""
123
124
  id: NotRequired[str]
124
125
  r"""Unique ID for this output"""
125
- type: NotRequired[OutputMinioType]
126
126
  pipeline: NotRequired[str]
127
127
  r"""Pipeline to process data before sending out to this output"""
128
128
  system_fields: NotRequired[List[str]]
@@ -225,6 +225,8 @@ class OutputMinioTypedDict(TypedDict):
225
225
 
226
226
 
227
227
  class OutputMinio(BaseModel):
228
+ type: OutputMinioType
229
+
228
230
  endpoint: str
229
231
  r"""MinIO service url (e.g. http://minioHost:9000)"""
230
232
 
@@ -234,8 +236,6 @@ class OutputMinio(BaseModel):
234
236
  id: Optional[str] = None
235
237
  r"""Unique ID for this output"""
236
238
 
237
- type: Optional[OutputMinioType] = None
238
-
239
239
  pipeline: Optional[str] = None
240
240
  r"""Pipeline to process data before sending out to this output"""
241
241