cribl-control-plane 0.0.24__py3-none-any.whl → 0.0.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (115) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/destinations.py +10 -8
  3. cribl_control_plane/errors/healthstatus_error.py +1 -1
  4. cribl_control_plane/groups_sdk.py +10 -10
  5. cribl_control_plane/{healthinfo.py → health.py} +3 -1
  6. cribl_control_plane/models/__init__.py +21 -27
  7. cribl_control_plane/models/healthstatus.py +3 -3
  8. cribl_control_plane/models/input.py +4 -4
  9. cribl_control_plane/models/inputappscope.py +5 -5
  10. cribl_control_plane/models/inputcollection.py +2 -2
  11. cribl_control_plane/models/inputconfluentcloud.py +17 -3
  12. cribl_control_plane/models/inputcribl.py +5 -5
  13. cribl_control_plane/models/inputcriblhttp.py +3 -3
  14. cribl_control_plane/models/inputcribllakehttp.py +3 -3
  15. cribl_control_plane/models/inputcriblmetrics.py +5 -5
  16. cribl_control_plane/models/inputcribltcp.py +3 -3
  17. cribl_control_plane/models/inputdatadogagent.py +3 -3
  18. cribl_control_plane/models/inputedgeprometheus.py +3 -3
  19. cribl_control_plane/models/inputelastic.py +3 -3
  20. cribl_control_plane/models/inputeventhub.py +3 -3
  21. cribl_control_plane/models/inputfile.py +5 -5
  22. cribl_control_plane/models/inputfirehose.py +3 -3
  23. cribl_control_plane/models/inputgooglepubsub.py +16 -9
  24. cribl_control_plane/models/inputgrafana.py +20 -6
  25. cribl_control_plane/models/inputhttp.py +3 -3
  26. cribl_control_plane/models/inputhttpraw.py +3 -3
  27. cribl_control_plane/models/inputjournalfiles.py +3 -3
  28. cribl_control_plane/models/inputkafka.py +17 -3
  29. cribl_control_plane/models/inputkinesis.py +3 -3
  30. cribl_control_plane/models/inputkubeevents.py +5 -5
  31. cribl_control_plane/models/inputkubelogs.py +5 -5
  32. cribl_control_plane/models/inputkubemetrics.py +5 -5
  33. cribl_control_plane/models/inputloki.py +10 -3
  34. cribl_control_plane/models/inputmodeldriventelemetry.py +3 -3
  35. cribl_control_plane/models/inputmsk.py +17 -3
  36. cribl_control_plane/models/inputnetflow.py +3 -3
  37. cribl_control_plane/models/inputoffice365mgmt.py +3 -3
  38. cribl_control_plane/models/inputoffice365msgtrace.py +3 -3
  39. cribl_control_plane/models/inputoffice365service.py +3 -3
  40. cribl_control_plane/models/inputopentelemetry.py +3 -3
  41. cribl_control_plane/models/inputprometheus.py +3 -3
  42. cribl_control_plane/models/inputprometheusrw.py +3 -3
  43. cribl_control_plane/models/inputrawudp.py +3 -3
  44. cribl_control_plane/models/inputsnmp.py +3 -3
  45. cribl_control_plane/models/inputsplunk.py +3 -3
  46. cribl_control_plane/models/inputsplunkhec.py +3 -3
  47. cribl_control_plane/models/inputsplunksearch.py +3 -3
  48. cribl_control_plane/models/inputsqs.py +3 -3
  49. cribl_control_plane/models/inputsystemmetrics.py +5 -5
  50. cribl_control_plane/models/inputsystemstate.py +5 -5
  51. cribl_control_plane/models/inputtcp.py +3 -3
  52. cribl_control_plane/models/inputtcpjson.py +3 -3
  53. cribl_control_plane/models/inputwef.py +3 -3
  54. cribl_control_plane/models/inputwindowsmetrics.py +5 -5
  55. cribl_control_plane/models/inputwiz.py +3 -3
  56. cribl_control_plane/models/inputzscalerhec.py +3 -3
  57. cribl_control_plane/models/output.py +14 -14
  58. cribl_control_plane/models/outputazureblob.py +3 -3
  59. cribl_control_plane/models/outputazuredataexplorer.py +3 -3
  60. cribl_control_plane/models/outputazureeventhub.py +3 -3
  61. cribl_control_plane/models/outputclickhouse.py +3 -3
  62. cribl_control_plane/models/outputcloudwatch.py +3 -3
  63. cribl_control_plane/models/outputconfluentcloud.py +17 -3
  64. cribl_control_plane/models/outputcriblhttp.py +5 -5
  65. cribl_control_plane/models/outputcribllake.py +5 -5
  66. cribl_control_plane/models/outputcribltcp.py +5 -5
  67. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +3 -3
  68. cribl_control_plane/models/outputdatadog.py +5 -5
  69. cribl_control_plane/models/outputdataset.py +5 -5
  70. cribl_control_plane/models/outputdevnull.py +5 -5
  71. cribl_control_plane/models/outputdiskspool.py +5 -5
  72. cribl_control_plane/models/outputdls3.py +5 -5
  73. cribl_control_plane/models/outputdynatracehttp.py +3 -3
  74. cribl_control_plane/models/outputdynatraceotlp.py +3 -3
  75. cribl_control_plane/models/outputelasticcloud.py +3 -3
  76. cribl_control_plane/models/outputexabeam.py +3 -3
  77. cribl_control_plane/models/outputgooglecloudlogging.py +3 -3
  78. cribl_control_plane/models/outputgooglecloudstorage.py +5 -5
  79. cribl_control_plane/models/outputgrafanacloud.py +24 -10
  80. cribl_control_plane/models/outputgraphite.py +3 -3
  81. cribl_control_plane/models/outputhumiohec.py +3 -3
  82. cribl_control_plane/models/outputkafka.py +17 -3
  83. cribl_control_plane/models/outputkinesis.py +3 -3
  84. cribl_control_plane/models/outputloki.py +14 -0
  85. cribl_control_plane/models/outputminio.py +3 -3
  86. cribl_control_plane/models/outputmsk.py +17 -3
  87. cribl_control_plane/models/outputnewrelic.py +5 -5
  88. cribl_control_plane/models/outputnewrelicevents.py +3 -3
  89. cribl_control_plane/models/outputring.py +5 -5
  90. cribl_control_plane/models/outputs3.py +5 -5
  91. cribl_control_plane/models/outputsecuritylake.py +3 -3
  92. cribl_control_plane/models/outputsentinel.py +3 -3
  93. cribl_control_plane/models/outputsentineloneaisiem.py +3 -3
  94. cribl_control_plane/models/outputservicenow.py +3 -3
  95. cribl_control_plane/models/outputsns.py +3 -3
  96. cribl_control_plane/models/outputsplunk.py +3 -3
  97. cribl_control_plane/models/outputsplunkhec.py +5 -5
  98. cribl_control_plane/models/outputsqs.py +3 -3
  99. cribl_control_plane/models/outputstatsd.py +3 -3
  100. cribl_control_plane/models/outputstatsdext.py +3 -3
  101. cribl_control_plane/models/outputsyslog.py +5 -5
  102. cribl_control_plane/models/outputtcpjson.py +5 -5
  103. cribl_control_plane/models/outputwebhook.py +5 -5
  104. cribl_control_plane/models/outputxsiam.py +5 -5
  105. cribl_control_plane/nodes.py +100 -90
  106. cribl_control_plane/pipelines.py +20 -20
  107. cribl_control_plane/sdk.py +6 -6
  108. cribl_control_plane/sources.py +2 -0
  109. cribl_control_plane/versioning.py +14 -14
  110. {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/METADATA +24 -28
  111. {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/RECORD +112 -115
  112. cribl_control_plane/deployments.py +0 -185
  113. cribl_control_plane/models/restartresponse.py +0 -26
  114. cribl_control_plane/models/updateworkersrestartop.py +0 -24
  115. {cribl_control_plane-0.0.24.dist-info → cribl_control_plane-0.0.26.dist-info}/WHEEL +0 -0
@@ -175,11 +175,11 @@ class InputCriblLakeHTTPMetadatum(BaseModel):
175
175
 
176
176
 
177
177
  class InputCriblLakeHTTPTypedDict(TypedDict):
178
+ type: InputCriblLakeHTTPType
178
179
  port: float
179
180
  r"""Port to listen on"""
180
181
  id: NotRequired[str]
181
182
  r"""Unique ID for this input"""
182
- type: NotRequired[InputCriblLakeHTTPType]
183
183
  disabled: NotRequired[bool]
184
184
  pipeline: NotRequired[str]
185
185
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -227,14 +227,14 @@ class InputCriblLakeHTTPTypedDict(TypedDict):
227
227
 
228
228
 
229
229
  class InputCriblLakeHTTP(BaseModel):
230
+ type: InputCriblLakeHTTPType
231
+
230
232
  port: float
231
233
  r"""Port to listen on"""
232
234
 
233
235
  id: Optional[str] = None
234
236
  r"""Unique ID for this input"""
235
237
 
236
- type: Optional[InputCriblLakeHTTPType] = None
237
-
238
238
  disabled: Optional[bool] = False
239
239
 
240
240
  pipeline: Optional[str] = None
@@ -97,9 +97,9 @@ class InputCriblmetricsMetadatum(BaseModel):
97
97
 
98
98
 
99
99
  class InputCriblmetricsTypedDict(TypedDict):
100
- id: str
101
- r"""Unique ID for this input"""
102
100
  type: InputCriblmetricsType
101
+ id: NotRequired[str]
102
+ r"""Unique ID for this input"""
103
103
  disabled: NotRequired[bool]
104
104
  pipeline: NotRequired[str]
105
105
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -124,11 +124,11 @@ class InputCriblmetricsTypedDict(TypedDict):
124
124
 
125
125
 
126
126
  class InputCriblmetrics(BaseModel):
127
- id: str
128
- r"""Unique ID for this input"""
129
-
130
127
  type: InputCriblmetricsType
131
128
 
129
+ id: Optional[str] = None
130
+ r"""Unique ID for this input"""
131
+
132
132
  disabled: Optional[bool] = False
133
133
 
134
134
  pipeline: Optional[str] = None
@@ -171,11 +171,11 @@ class InputCriblTCPMetadatum(BaseModel):
171
171
 
172
172
 
173
173
  class InputCriblTCPTypedDict(TypedDict):
174
+ type: InputCriblTCPType
174
175
  port: float
175
176
  r"""Port to listen on"""
176
177
  id: NotRequired[str]
177
178
  r"""Unique ID for this input"""
178
- type: NotRequired[InputCriblTCPType]
179
179
  disabled: NotRequired[bool]
180
180
  pipeline: NotRequired[str]
181
181
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -211,14 +211,14 @@ class InputCriblTCPTypedDict(TypedDict):
211
211
 
212
212
 
213
213
  class InputCriblTCP(BaseModel):
214
+ type: InputCriblTCPType
215
+
214
216
  port: float
215
217
  r"""Port to listen on"""
216
218
 
217
219
  id: Optional[str] = None
218
220
  r"""Unique ID for this input"""
219
221
 
220
- type: Optional[InputCriblTCPType] = None
221
-
222
222
  disabled: Optional[bool] = False
223
223
 
224
224
  pipeline: Optional[str] = None
@@ -188,11 +188,11 @@ class InputDatadogAgentProxyMode(BaseModel):
188
188
 
189
189
 
190
190
  class InputDatadogAgentTypedDict(TypedDict):
191
+ type: InputDatadogAgentType
191
192
  port: float
192
193
  r"""Port to listen on"""
193
194
  id: NotRequired[str]
194
195
  r"""Unique ID for this input"""
195
- type: NotRequired[InputDatadogAgentType]
196
196
  disabled: NotRequired[bool]
197
197
  pipeline: NotRequired[str]
198
198
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -241,14 +241,14 @@ class InputDatadogAgentTypedDict(TypedDict):
241
241
 
242
242
 
243
243
  class InputDatadogAgent(BaseModel):
244
+ type: InputDatadogAgentType
245
+
244
246
  port: float
245
247
  r"""Port to listen on"""
246
248
 
247
249
  id: Optional[str] = None
248
250
  r"""Unique ID for this input"""
249
251
 
250
- type: Optional[InputDatadogAgentType] = None
251
-
252
252
  disabled: Optional[bool] = False
253
253
 
254
254
  pipeline: Optional[str] = None
@@ -248,9 +248,9 @@ class PodFilter(BaseModel):
248
248
 
249
249
 
250
250
  class InputEdgePrometheusTypedDict(TypedDict):
251
+ type: InputEdgePrometheusType
251
252
  id: NotRequired[str]
252
253
  r"""Unique ID for this input"""
253
- type: NotRequired[InputEdgePrometheusType]
254
254
  disabled: NotRequired[bool]
255
255
  pipeline: NotRequired[str]
256
256
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -338,11 +338,11 @@ class InputEdgePrometheusTypedDict(TypedDict):
338
338
 
339
339
 
340
340
  class InputEdgePrometheus(BaseModel):
341
+ type: InputEdgePrometheusType
342
+
341
343
  id: Optional[str] = None
342
344
  r"""Unique ID for this input"""
343
345
 
344
- type: Optional[InputEdgePrometheusType] = None
345
-
346
346
  disabled: Optional[bool] = False
347
347
 
348
348
  pipeline: Optional[str] = None
@@ -246,11 +246,11 @@ class InputElasticProxyMode(BaseModel):
246
246
 
247
247
 
248
248
  class InputElasticTypedDict(TypedDict):
249
+ type: InputElasticType
249
250
  port: float
250
251
  r"""Port to listen on"""
251
252
  id: NotRequired[str]
252
253
  r"""Unique ID for this input"""
253
- type: NotRequired[InputElasticType]
254
254
  disabled: NotRequired[bool]
255
255
  pipeline: NotRequired[str]
256
256
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -312,14 +312,14 @@ class InputElasticTypedDict(TypedDict):
312
312
 
313
313
 
314
314
  class InputElastic(BaseModel):
315
+ type: InputElasticType
316
+
315
317
  port: float
316
318
  r"""Port to listen on"""
317
319
 
318
320
  id: Optional[str] = None
319
321
  r"""Unique ID for this input"""
320
322
 
321
- type: Optional[InputElasticType] = None
322
-
323
323
  disabled: Optional[bool] = False
324
324
 
325
325
  pipeline: Optional[str] = None
@@ -132,13 +132,13 @@ class InputEventhubMetadatum(BaseModel):
132
132
 
133
133
 
134
134
  class InputEventhubTypedDict(TypedDict):
135
+ type: InputEventhubType
135
136
  brokers: List[str]
136
137
  r"""List of Event Hubs Kafka brokers to connect to (example: yourdomain.servicebus.windows.net:9093). The hostname can be found in the host portion of the primary or secondary connection string in Shared Access Policies."""
137
138
  topics: List[str]
138
139
  r"""The name of the Event Hub (Kafka topic) to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Event Hubs Source to only a single topic."""
139
140
  id: NotRequired[str]
140
141
  r"""Unique ID for this input"""
141
- type: NotRequired[InputEventhubType]
142
142
  disabled: NotRequired[bool]
143
143
  pipeline: NotRequired[str]
144
144
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -210,6 +210,8 @@ class InputEventhubTypedDict(TypedDict):
210
210
 
211
211
 
212
212
  class InputEventhub(BaseModel):
213
+ type: InputEventhubType
214
+
213
215
  brokers: List[str]
214
216
  r"""List of Event Hubs Kafka brokers to connect to (example: yourdomain.servicebus.windows.net:9093). The hostname can be found in the host portion of the primary or secondary connection string in Shared Access Policies."""
215
217
 
@@ -219,8 +221,6 @@ class InputEventhub(BaseModel):
219
221
  id: Optional[str] = None
220
222
  r"""Unique ID for this input"""
221
223
 
222
- type: Optional[InputEventhubType] = None
223
-
224
224
  disabled: Optional[bool] = False
225
225
 
226
226
  pipeline: Optional[str] = None
@@ -104,9 +104,9 @@ class InputFileMetadatum(BaseModel):
104
104
 
105
105
 
106
106
  class InputFileTypedDict(TypedDict):
107
- id: str
108
- r"""Unique ID for this input"""
109
107
  type: InputFileType
108
+ id: NotRequired[str]
109
+ r"""Unique ID for this input"""
110
110
  disabled: NotRequired[bool]
111
111
  pipeline: NotRequired[str]
112
112
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -158,11 +158,11 @@ class InputFileTypedDict(TypedDict):
158
158
 
159
159
 
160
160
  class InputFile(BaseModel):
161
- id: str
162
- r"""Unique ID for this input"""
163
-
164
161
  type: InputFileType
165
162
 
163
+ id: Optional[str] = None
164
+ r"""Unique ID for this input"""
165
+
166
166
  disabled: Optional[bool] = False
167
167
 
168
168
  pipeline: Optional[str] = None
@@ -171,11 +171,11 @@ class InputFirehoseMetadatum(BaseModel):
171
171
 
172
172
 
173
173
  class InputFirehoseTypedDict(TypedDict):
174
+ type: InputFirehoseType
174
175
  port: float
175
176
  r"""Port to listen on"""
176
177
  id: NotRequired[str]
177
178
  r"""Unique ID for this input"""
178
- type: NotRequired[InputFirehoseType]
179
179
  disabled: NotRequired[bool]
180
180
  pipeline: NotRequired[str]
181
181
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -223,14 +223,14 @@ class InputFirehoseTypedDict(TypedDict):
223
223
 
224
224
 
225
225
  class InputFirehose(BaseModel):
226
+ type: InputFirehoseType
227
+
226
228
  port: float
227
229
  r"""Port to listen on"""
228
230
 
229
231
  id: Optional[str] = None
230
232
  r"""Unique ID for this input"""
231
233
 
232
- type: Optional[InputFirehoseType] = None
233
-
234
234
  disabled: Optional[bool] = False
235
235
 
236
236
  pipeline: Optional[str] = None
@@ -105,13 +105,11 @@ class InputGooglePubsubMetadatum(BaseModel):
105
105
 
106
106
 
107
107
  class InputGooglePubsubTypedDict(TypedDict):
108
- topic_name: str
109
- r"""ID of the topic to receive events from"""
108
+ type: InputGooglePubsubType
110
109
  subscription_name: str
111
- r"""ID of the subscription to use when receiving events"""
110
+ r"""ID of the subscription to use when receiving events. When Monitor subscription is enabled, the fully qualified subscription name must be entered. Example: projects/myProject/subscriptions/mySubscription"""
112
111
  id: NotRequired[str]
113
112
  r"""Unique ID for this input"""
114
- type: NotRequired[InputGooglePubsubType]
115
113
  disabled: NotRequired[bool]
116
114
  pipeline: NotRequired[str]
117
115
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -126,6 +124,10 @@ class InputGooglePubsubTypedDict(TypedDict):
126
124
  connections: NotRequired[List[InputGooglePubsubConnectionTypedDict]]
127
125
  r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
128
126
  pq: NotRequired[InputGooglePubsubPqTypedDict]
127
+ topic_name: NotRequired[str]
128
+ r"""ID of the topic to receive events from. When Monitor subscription is enabled, any value may be entered."""
129
+ monitor_subscription: NotRequired[bool]
130
+ r"""Use when the subscription is not created by this Source and topic is not known"""
129
131
  create_topic: NotRequired[bool]
130
132
  r"""Create topic if it does not exist"""
131
133
  create_subscription: NotRequired[bool]
@@ -152,17 +154,14 @@ class InputGooglePubsubTypedDict(TypedDict):
152
154
 
153
155
 
154
156
  class InputGooglePubsub(BaseModel):
155
- topic_name: Annotated[str, pydantic.Field(alias="topicName")]
156
- r"""ID of the topic to receive events from"""
157
+ type: InputGooglePubsubType
157
158
 
158
159
  subscription_name: Annotated[str, pydantic.Field(alias="subscriptionName")]
159
- r"""ID of the subscription to use when receiving events"""
160
+ r"""ID of the subscription to use when receiving events. When Monitor subscription is enabled, the fully qualified subscription name must be entered. Example: projects/myProject/subscriptions/mySubscription"""
160
161
 
161
162
  id: Optional[str] = None
162
163
  r"""Unique ID for this input"""
163
164
 
164
- type: Optional[InputGooglePubsubType] = None
165
-
166
165
  disabled: Optional[bool] = False
167
166
 
168
167
  pipeline: Optional[str] = None
@@ -187,6 +186,14 @@ class InputGooglePubsub(BaseModel):
187
186
 
188
187
  pq: Optional[InputGooglePubsubPq] = None
189
188
 
189
+ topic_name: Annotated[Optional[str], pydantic.Field(alias="topicName")] = "cribl"
190
+ r"""ID of the topic to receive events from. When Monitor subscription is enabled, any value may be entered."""
191
+
192
+ monitor_subscription: Annotated[
193
+ Optional[bool], pydantic.Field(alias="monitorSubscription")
194
+ ] = False
195
+ r"""Use when the subscription is not created by this Source and topic is not known"""
196
+
190
197
  create_topic: Annotated[Optional[bool], pydantic.Field(alias="createTopic")] = False
191
198
  r"""Create topic if it does not exist"""
192
199
 
@@ -429,11 +429,11 @@ class InputGrafanaMetadatum2(BaseModel):
429
429
 
430
430
 
431
431
  class InputGrafanaGrafana2TypedDict(TypedDict):
432
+ type: InputGrafanaType2
432
433
  port: float
433
434
  r"""Port to listen on"""
434
435
  id: NotRequired[str]
435
436
  r"""Unique ID for this input"""
436
- type: NotRequired[InputGrafanaType2]
437
437
  disabled: NotRequired[bool]
438
438
  pipeline: NotRequired[str]
439
439
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -477,6 +477,8 @@ class InputGrafanaGrafana2TypedDict(TypedDict):
477
477
  r"""Absolute path on which to listen for Grafana Agent's Remote Write requests. Defaults to /api/prom/push, which will expand as: 'http://<your‑upstream‑URL>:<your‑port>/api/prom/push'. Either this field or 'Logs API endpoint' must be configured."""
478
478
  loki_api: NotRequired[str]
479
479
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'. Either this field or 'Remote Write API endpoint' must be configured."""
480
+ extract_structured_metadata: NotRequired[bool]
481
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
480
482
  prometheus_auth: NotRequired[InputGrafanaPrometheusAuth2TypedDict]
481
483
  loki_auth: NotRequired[InputGrafanaLokiAuth2TypedDict]
482
484
  metadata: NotRequired[List[InputGrafanaMetadatum2TypedDict]]
@@ -485,14 +487,14 @@ class InputGrafanaGrafana2TypedDict(TypedDict):
485
487
 
486
488
 
487
489
  class InputGrafanaGrafana2(BaseModel):
490
+ type: InputGrafanaType2
491
+
488
492
  port: float
489
493
  r"""Port to listen on"""
490
494
 
491
495
  id: Optional[str] = None
492
496
  r"""Unique ID for this input"""
493
497
 
494
- type: Optional[InputGrafanaType2] = None
495
-
496
498
  disabled: Optional[bool] = False
497
499
 
498
500
  pipeline: Optional[str] = None
@@ -587,6 +589,11 @@ class InputGrafanaGrafana2(BaseModel):
587
589
  )
588
590
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'. Either this field or 'Remote Write API endpoint' must be configured."""
589
591
 
592
+ extract_structured_metadata: Annotated[
593
+ Optional[bool], pydantic.Field(alias="extractStructuredMetadata")
594
+ ] = False
595
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
596
+
590
597
  prometheus_auth: Annotated[
591
598
  Optional[InputGrafanaPrometheusAuth2], pydantic.Field(alias="prometheusAuth")
592
599
  ] = None
@@ -1022,11 +1029,11 @@ class InputGrafanaMetadatum1(BaseModel):
1022
1029
 
1023
1030
 
1024
1031
  class InputGrafanaGrafana1TypedDict(TypedDict):
1032
+ type: InputGrafanaType1
1025
1033
  port: float
1026
1034
  r"""Port to listen on"""
1027
1035
  id: NotRequired[str]
1028
1036
  r"""Unique ID for this input"""
1029
- type: NotRequired[InputGrafanaType1]
1030
1037
  disabled: NotRequired[bool]
1031
1038
  pipeline: NotRequired[str]
1032
1039
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -1070,6 +1077,8 @@ class InputGrafanaGrafana1TypedDict(TypedDict):
1070
1077
  r"""Absolute path on which to listen for Grafana Agent's Remote Write requests. Defaults to /api/prom/push, which will expand as: 'http://<your‑upstream‑URL>:<your‑port>/api/prom/push'. Either this field or 'Logs API endpoint' must be configured."""
1071
1078
  loki_api: NotRequired[str]
1072
1079
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'. Either this field or 'Remote Write API endpoint' must be configured."""
1080
+ extract_structured_metadata: NotRequired[bool]
1081
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
1073
1082
  prometheus_auth: NotRequired[InputGrafanaPrometheusAuth1TypedDict]
1074
1083
  loki_auth: NotRequired[InputGrafanaLokiAuth1TypedDict]
1075
1084
  metadata: NotRequired[List[InputGrafanaMetadatum1TypedDict]]
@@ -1078,14 +1087,14 @@ class InputGrafanaGrafana1TypedDict(TypedDict):
1078
1087
 
1079
1088
 
1080
1089
  class InputGrafanaGrafana1(BaseModel):
1090
+ type: InputGrafanaType1
1091
+
1081
1092
  port: float
1082
1093
  r"""Port to listen on"""
1083
1094
 
1084
1095
  id: Optional[str] = None
1085
1096
  r"""Unique ID for this input"""
1086
1097
 
1087
- type: Optional[InputGrafanaType1] = None
1088
-
1089
1098
  disabled: Optional[bool] = False
1090
1099
 
1091
1100
  pipeline: Optional[str] = None
@@ -1180,6 +1189,11 @@ class InputGrafanaGrafana1(BaseModel):
1180
1189
  )
1181
1190
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'. Either this field or 'Remote Write API endpoint' must be configured."""
1182
1191
 
1192
+ extract_structured_metadata: Annotated[
1193
+ Optional[bool], pydantic.Field(alias="extractStructuredMetadata")
1194
+ ] = False
1195
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
1196
+
1183
1197
  prometheus_auth: Annotated[
1184
1198
  Optional[InputGrafanaPrometheusAuth1], pydantic.Field(alias="prometheusAuth")
1185
1199
  ] = None
@@ -202,11 +202,11 @@ class InputHTTPAuthTokensExt(BaseModel):
202
202
 
203
203
 
204
204
  class InputHTTPTypedDict(TypedDict):
205
+ type: InputHTTPType
205
206
  port: float
206
207
  r"""Port to listen on"""
207
208
  id: NotRequired[str]
208
209
  r"""Unique ID for this input"""
209
- type: NotRequired[InputHTTPType]
210
210
  disabled: NotRequired[bool]
211
211
  pipeline: NotRequired[str]
212
212
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -263,14 +263,14 @@ class InputHTTPTypedDict(TypedDict):
263
263
 
264
264
 
265
265
  class InputHTTP(BaseModel):
266
+ type: InputHTTPType
267
+
266
268
  port: float
267
269
  r"""Port to listen on"""
268
270
 
269
271
  id: Optional[str] = None
270
272
  r"""Unique ID for this input"""
271
273
 
272
- type: Optional[InputHTTPType] = None
273
-
274
274
  disabled: Optional[bool] = False
275
275
 
276
276
  pipeline: Optional[str] = None
@@ -202,11 +202,11 @@ class InputHTTPRawAuthTokensExt(BaseModel):
202
202
 
203
203
 
204
204
  class InputHTTPRawTypedDict(TypedDict):
205
+ type: InputHTTPRawType
205
206
  port: float
206
207
  r"""Port to listen on"""
207
208
  id: NotRequired[str]
208
209
  r"""Unique ID for this input"""
209
- type: NotRequired[InputHTTPRawType]
210
210
  disabled: NotRequired[bool]
211
211
  pipeline: NotRequired[str]
212
212
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -264,14 +264,14 @@ class InputHTTPRawTypedDict(TypedDict):
264
264
 
265
265
 
266
266
  class InputHTTPRaw(BaseModel):
267
+ type: InputHTTPRawType
268
+
267
269
  port: float
268
270
  r"""Port to listen on"""
269
271
 
270
272
  id: Optional[str] = None
271
273
  r"""Unique ID for this input"""
272
274
 
273
- type: Optional[InputHTTPRawType] = None
274
-
275
275
  disabled: Optional[bool] = False
276
276
 
277
277
  pipeline: Optional[str] = None
@@ -112,13 +112,13 @@ class InputJournalFilesMetadatum(BaseModel):
112
112
 
113
113
 
114
114
  class InputJournalFilesTypedDict(TypedDict):
115
+ type: InputJournalFilesType
115
116
  path: str
116
117
  r"""Directory path to search for journals. Environment variables will be resolved, e.g. $CRIBL_EDGE_FS_ROOT/var/log/journal/$MACHINE_ID."""
117
118
  journals: List[str]
118
119
  r"""The full path of discovered journals are matched against this wildcard list."""
119
120
  id: NotRequired[str]
120
121
  r"""Unique ID for this input"""
121
- type: NotRequired[InputJournalFilesType]
122
122
  disabled: NotRequired[bool]
123
123
  pipeline: NotRequired[str]
124
124
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -147,6 +147,8 @@ class InputJournalFilesTypedDict(TypedDict):
147
147
 
148
148
 
149
149
  class InputJournalFiles(BaseModel):
150
+ type: InputJournalFilesType
151
+
150
152
  path: str
151
153
  r"""Directory path to search for journals. Environment variables will be resolved, e.g. $CRIBL_EDGE_FS_ROOT/var/log/journal/$MACHINE_ID."""
152
154
 
@@ -156,8 +158,6 @@ class InputJournalFiles(BaseModel):
156
158
  id: Optional[str] = None
157
159
  r"""Unique ID for this input"""
158
160
 
159
- type: Optional[InputJournalFilesType] = None
160
-
161
161
  disabled: Optional[bool] = False
162
162
 
163
163
  pipeline: Optional[str] = None
@@ -83,6 +83,13 @@ class InputKafkaPq(BaseModel):
83
83
  r"""Codec to use to compress the persisted data"""
84
84
 
85
85
 
86
+ class InputKafkaSchemaType(str, Enum):
87
+ r"""The schema format used to encode and decode event data"""
88
+
89
+ AVRO = "avro"
90
+ JSON = "json"
91
+
92
+
86
93
  class InputKafkaAuthTypedDict(TypedDict):
87
94
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
88
95
 
@@ -183,6 +190,8 @@ class InputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
183
190
  disabled: NotRequired[bool]
184
191
  schema_registry_url: NotRequired[str]
185
192
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
193
+ schema_type: NotRequired[InputKafkaSchemaType]
194
+ r"""The schema format used to encode and decode event data"""
186
195
  connection_timeout: NotRequired[float]
187
196
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
188
197
  request_timeout: NotRequired[float]
@@ -202,6 +211,11 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
202
211
  ] = "http://localhost:8081"
203
212
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
204
213
 
214
+ schema_type: Annotated[
215
+ Optional[InputKafkaSchemaType], pydantic.Field(alias="schemaType")
216
+ ] = InputKafkaSchemaType.AVRO
217
+ r"""The schema format used to encode and decode event data"""
218
+
205
219
  connection_timeout: Annotated[
206
220
  Optional[float], pydantic.Field(alias="connectionTimeout")
207
221
  ] = 30000
@@ -332,13 +346,13 @@ class InputKafkaMetadatum(BaseModel):
332
346
 
333
347
 
334
348
  class InputKafkaTypedDict(TypedDict):
349
+ type: InputKafkaType
335
350
  brokers: List[str]
336
351
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
337
352
  topics: List[str]
338
353
  r"""Topic to subscribe to. Warning: To optimize performance, Cribl suggests subscribing each Kafka Source to a single topic only."""
339
354
  id: NotRequired[str]
340
355
  r"""Unique ID for this input"""
341
- type: NotRequired[InputKafkaType]
342
356
  disabled: NotRequired[bool]
343
357
  pipeline: NotRequired[str]
344
358
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -412,6 +426,8 @@ class InputKafkaTypedDict(TypedDict):
412
426
 
413
427
 
414
428
  class InputKafka(BaseModel):
429
+ type: InputKafkaType
430
+
415
431
  brokers: List[str]
416
432
  r"""Enter each Kafka bootstrap server you want to use. Specify the hostname and port (such as mykafkabroker:9092) or just the hostname (in which case @{product} will assign port 9092)."""
417
433
 
@@ -421,8 +437,6 @@ class InputKafka(BaseModel):
421
437
  id: Optional[str] = None
422
438
  r"""Unique ID for this input"""
423
439
 
424
- type: Optional[InputKafkaType] = None
425
-
426
440
  disabled: Optional[bool] = False
427
441
 
428
442
  pipeline: Optional[str] = None
@@ -135,13 +135,13 @@ class InputKinesisMetadatum(BaseModel):
135
135
 
136
136
 
137
137
  class InputKinesisTypedDict(TypedDict):
138
+ type: InputKinesisType
138
139
  stream_name: str
139
140
  r"""Kinesis Data Stream to read data from"""
140
141
  region: str
141
142
  r"""Region where the Kinesis stream is located"""
142
143
  id: NotRequired[str]
143
144
  r"""Unique ID for this input"""
144
- type: NotRequired[InputKinesisType]
145
145
  disabled: NotRequired[bool]
146
146
  pipeline: NotRequired[str]
147
147
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -202,6 +202,8 @@ class InputKinesisTypedDict(TypedDict):
202
202
 
203
203
 
204
204
  class InputKinesis(BaseModel):
205
+ type: InputKinesisType
206
+
205
207
  stream_name: Annotated[str, pydantic.Field(alias="streamName")]
206
208
  r"""Kinesis Data Stream to read data from"""
207
209
 
@@ -211,8 +213,6 @@ class InputKinesis(BaseModel):
211
213
  id: Optional[str] = None
212
214
  r"""Unique ID for this input"""
213
215
 
214
- type: Optional[InputKinesisType] = None
215
-
216
216
  disabled: Optional[bool] = False
217
217
 
218
218
  pipeline: Optional[str] = None
@@ -112,9 +112,9 @@ class InputKubeEventsMetadatum(BaseModel):
112
112
 
113
113
 
114
114
  class InputKubeEventsTypedDict(TypedDict):
115
- id: str
116
- r"""Unique ID for this input"""
117
115
  type: InputKubeEventsType
116
+ id: NotRequired[str]
117
+ r"""Unique ID for this input"""
118
118
  disabled: NotRequired[bool]
119
119
  pipeline: NotRequired[str]
120
120
  r"""Pipeline to process data from this Source before sending it through the Routes"""
@@ -137,11 +137,11 @@ class InputKubeEventsTypedDict(TypedDict):
137
137
 
138
138
 
139
139
  class InputKubeEvents(BaseModel):
140
- id: str
141
- r"""Unique ID for this input"""
142
-
143
140
  type: InputKubeEventsType
144
141
 
142
+ id: Optional[str] = None
143
+ r"""Unique ID for this input"""
144
+
145
145
  disabled: Optional[bool] = False
146
146
 
147
147
  pipeline: Optional[str] = None