cribl-control-plane 0.0.24__py3-none-any.whl → 0.0.25__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

@@ -3,10 +3,10 @@
3
3
  import importlib.metadata
4
4
 
5
5
  __title__: str = "cribl-control-plane"
6
- __version__: str = "0.0.24"
7
- __openapi_doc_version__: str = "4.14.0-alpha.1754581489015-f79f2920"
6
+ __version__: str = "0.0.25"
7
+ __openapi_doc_version__: str = "4.14.0-alpha.1754945439857-0a86c294"
8
8
  __gen_version__: str = "2.660.0"
9
- __user_agent__: str = "speakeasy-sdk/python 0.0.24 2.660.0 4.14.0-alpha.1754581489015-f79f2920 cribl-control-plane"
9
+ __user_agent__: str = "speakeasy-sdk/python 0.0.25 2.660.0 4.14.0-alpha.1754945439857-0a86c294 cribl-control-plane"
10
10
 
11
11
  try:
12
12
  if __package__ is not None:
@@ -444,6 +444,7 @@ if TYPE_CHECKING:
444
444
  InputConfluentCloudPq,
445
445
  InputConfluentCloudPqTypedDict,
446
446
  InputConfluentCloudSASLMechanism,
447
+ InputConfluentCloudSchemaType,
447
448
  InputConfluentCloudTLSSettingsClientSide,
448
449
  InputConfluentCloudTLSSettingsClientSideTypedDict,
449
450
  InputConfluentCloudType,
@@ -852,6 +853,7 @@ if TYPE_CHECKING:
852
853
  InputKafkaPq,
853
854
  InputKafkaPqTypedDict,
854
855
  InputKafkaSASLMechanism,
856
+ InputKafkaSchemaType,
855
857
  InputKafkaTLSSettingsClientSide,
856
858
  InputKafkaTLSSettingsClientSideTypedDict,
857
859
  InputKafkaType,
@@ -1003,6 +1005,7 @@ if TYPE_CHECKING:
1003
1005
  InputMskMode,
1004
1006
  InputMskPq,
1005
1007
  InputMskPqTypedDict,
1008
+ InputMskSchemaType,
1006
1009
  InputMskSignatureVersion,
1007
1010
  InputMskTLSSettingsClientSide,
1008
1011
  InputMskTLSSettingsClientSideTypedDict,
@@ -1806,6 +1809,7 @@ if TYPE_CHECKING:
1806
1809
  OutputConfluentCloudQueueFullBehavior,
1807
1810
  OutputConfluentCloudRecordDataFormat,
1808
1811
  OutputConfluentCloudSASLMechanism,
1812
+ OutputConfluentCloudSchemaType,
1809
1813
  OutputConfluentCloudTLSSettingsClientSide,
1810
1814
  OutputConfluentCloudTLSSettingsClientSideTypedDict,
1811
1815
  OutputConfluentCloudType,
@@ -2309,6 +2313,7 @@ if TYPE_CHECKING:
2309
2313
  OutputKafkaQueueFullBehavior,
2310
2314
  OutputKafkaRecordDataFormat,
2311
2315
  OutputKafkaSASLMechanism,
2316
+ OutputKafkaSchemaType,
2312
2317
  OutputKafkaTLSSettingsClientSide,
2313
2318
  OutputKafkaTLSSettingsClientSideTypedDict,
2314
2319
  OutputKafkaType,
@@ -2391,6 +2396,7 @@ if TYPE_CHECKING:
2391
2396
  OutputMskPqControlsTypedDict,
2392
2397
  OutputMskQueueFullBehavior,
2393
2398
  OutputMskRecordDataFormat,
2399
+ OutputMskSchemaType,
2394
2400
  OutputMskSignatureVersion,
2395
2401
  OutputMskTLSSettingsClientSide,
2396
2402
  OutputMskTLSSettingsClientSideTypedDict,
@@ -3428,6 +3434,7 @@ __all__ = [
3428
3434
  "InputConfluentCloudPq",
3429
3435
  "InputConfluentCloudPqTypedDict",
3430
3436
  "InputConfluentCloudSASLMechanism",
3437
+ "InputConfluentCloudSchemaType",
3431
3438
  "InputConfluentCloudTLSSettingsClientSide",
3432
3439
  "InputConfluentCloudTLSSettingsClientSideTypedDict",
3433
3440
  "InputConfluentCloudType",
@@ -3771,6 +3778,7 @@ __all__ = [
3771
3778
  "InputKafkaPq",
3772
3779
  "InputKafkaPqTypedDict",
3773
3780
  "InputKafkaSASLMechanism",
3781
+ "InputKafkaSchemaType",
3774
3782
  "InputKafkaTLSSettingsClientSide",
3775
3783
  "InputKafkaTLSSettingsClientSideTypedDict",
3776
3784
  "InputKafkaType",
@@ -3904,6 +3912,7 @@ __all__ = [
3904
3912
  "InputMskMode",
3905
3913
  "InputMskPq",
3906
3914
  "InputMskPqTypedDict",
3915
+ "InputMskSchemaType",
3907
3916
  "InputMskSignatureVersion",
3908
3917
  "InputMskTLSSettingsClientSide",
3909
3918
  "InputMskTLSSettingsClientSideTypedDict",
@@ -4603,6 +4612,7 @@ __all__ = [
4603
4612
  "OutputConfluentCloudQueueFullBehavior",
4604
4613
  "OutputConfluentCloudRecordDataFormat",
4605
4614
  "OutputConfluentCloudSASLMechanism",
4615
+ "OutputConfluentCloudSchemaType",
4606
4616
  "OutputConfluentCloudTLSSettingsClientSide",
4607
4617
  "OutputConfluentCloudTLSSettingsClientSideTypedDict",
4608
4618
  "OutputConfluentCloudType",
@@ -5038,6 +5048,7 @@ __all__ = [
5038
5048
  "OutputKafkaQueueFullBehavior",
5039
5049
  "OutputKafkaRecordDataFormat",
5040
5050
  "OutputKafkaSASLMechanism",
5051
+ "OutputKafkaSchemaType",
5041
5052
  "OutputKafkaTLSSettingsClientSide",
5042
5053
  "OutputKafkaTLSSettingsClientSideTypedDict",
5043
5054
  "OutputKafkaType",
@@ -5112,6 +5123,7 @@ __all__ = [
5112
5123
  "OutputMskPqControlsTypedDict",
5113
5124
  "OutputMskQueueFullBehavior",
5114
5125
  "OutputMskRecordDataFormat",
5126
+ "OutputMskSchemaType",
5115
5127
  "OutputMskSignatureVersion",
5116
5128
  "OutputMskTLSSettingsClientSide",
5117
5129
  "OutputMskTLSSettingsClientSideTypedDict",
@@ -6048,6 +6060,7 @@ _dynamic_imports: dict[str, str] = {
6048
6060
  "InputConfluentCloudPq": ".inputconfluentcloud",
6049
6061
  "InputConfluentCloudPqTypedDict": ".inputconfluentcloud",
6050
6062
  "InputConfluentCloudSASLMechanism": ".inputconfluentcloud",
6063
+ "InputConfluentCloudSchemaType": ".inputconfluentcloud",
6051
6064
  "InputConfluentCloudTLSSettingsClientSide": ".inputconfluentcloud",
6052
6065
  "InputConfluentCloudTLSSettingsClientSideTypedDict": ".inputconfluentcloud",
6053
6066
  "InputConfluentCloudType": ".inputconfluentcloud",
@@ -6416,6 +6429,7 @@ _dynamic_imports: dict[str, str] = {
6416
6429
  "InputKafkaPq": ".inputkafka",
6417
6430
  "InputKafkaPqTypedDict": ".inputkafka",
6418
6431
  "InputKafkaSASLMechanism": ".inputkafka",
6432
+ "InputKafkaSchemaType": ".inputkafka",
6419
6433
  "InputKafkaTLSSettingsClientSide": ".inputkafka",
6420
6434
  "InputKafkaTLSSettingsClientSideTypedDict": ".inputkafka",
6421
6435
  "InputKafkaType": ".inputkafka",
@@ -6551,6 +6565,7 @@ _dynamic_imports: dict[str, str] = {
6551
6565
  "InputMskMode": ".inputmsk",
6552
6566
  "InputMskPq": ".inputmsk",
6553
6567
  "InputMskPqTypedDict": ".inputmsk",
6568
+ "InputMskSchemaType": ".inputmsk",
6554
6569
  "InputMskSignatureVersion": ".inputmsk",
6555
6570
  "InputMskTLSSettingsClientSide": ".inputmsk",
6556
6571
  "InputMskTLSSettingsClientSideTypedDict": ".inputmsk",
@@ -7290,6 +7305,7 @@ _dynamic_imports: dict[str, str] = {
7290
7305
  "OutputConfluentCloudQueueFullBehavior": ".outputconfluentcloud",
7291
7306
  "OutputConfluentCloudRecordDataFormat": ".outputconfluentcloud",
7292
7307
  "OutputConfluentCloudSASLMechanism": ".outputconfluentcloud",
7308
+ "OutputConfluentCloudSchemaType": ".outputconfluentcloud",
7293
7309
  "OutputConfluentCloudTLSSettingsClientSide": ".outputconfluentcloud",
7294
7310
  "OutputConfluentCloudTLSSettingsClientSideTypedDict": ".outputconfluentcloud",
7295
7311
  "OutputConfluentCloudType": ".outputconfluentcloud",
@@ -7749,6 +7765,7 @@ _dynamic_imports: dict[str, str] = {
7749
7765
  "OutputKafkaQueueFullBehavior": ".outputkafka",
7750
7766
  "OutputKafkaRecordDataFormat": ".outputkafka",
7751
7767
  "OutputKafkaSASLMechanism": ".outputkafka",
7768
+ "OutputKafkaSchemaType": ".outputkafka",
7752
7769
  "OutputKafkaTLSSettingsClientSide": ".outputkafka",
7753
7770
  "OutputKafkaTLSSettingsClientSideTypedDict": ".outputkafka",
7754
7771
  "OutputKafkaType": ".outputkafka",
@@ -7823,6 +7840,7 @@ _dynamic_imports: dict[str, str] = {
7823
7840
  "OutputMskPqControlsTypedDict": ".outputmsk",
7824
7841
  "OutputMskQueueFullBehavior": ".outputmsk",
7825
7842
  "OutputMskRecordDataFormat": ".outputmsk",
7843
+ "OutputMskSchemaType": ".outputmsk",
7826
7844
  "OutputMskSignatureVersion": ".outputmsk",
7827
7845
  "OutputMskTLSSettingsClientSide": ".outputmsk",
7828
7846
  "OutputMskTLSSettingsClientSideTypedDict": ".outputmsk",
@@ -91,8 +91,8 @@ InputTypedDict = TypeAliasType(
91
91
  InputSnmpTypedDict,
92
92
  InputCriblTCPTypedDict,
93
93
  InputNetflowTypedDict,
94
- InputGooglePubsubTypedDict,
95
94
  InputTcpjsonTypedDict,
95
+ InputGooglePubsubTypedDict,
96
96
  InputOffice365ServiceTypedDict,
97
97
  InputTCPTypedDict,
98
98
  InputWizTypedDict,
@@ -117,8 +117,8 @@ InputTypedDict = TypeAliasType(
117
117
  InputElasticTypedDict,
118
118
  InputSplunkHecTypedDict,
119
119
  InputOffice365MsgTraceTypedDict,
120
- InputLokiTypedDict,
121
120
  InputPrometheusRwTypedDict,
121
+ InputLokiTypedDict,
122
122
  InputCrowdstrikeTypedDict,
123
123
  InputPrometheusTypedDict,
124
124
  InputEdgePrometheusTypedDict,
@@ -156,8 +156,8 @@ Input = TypeAliasType(
156
156
  InputSnmp,
157
157
  InputCriblTCP,
158
158
  InputNetflow,
159
- InputGooglePubsub,
160
159
  InputTcpjson,
160
+ InputGooglePubsub,
161
161
  InputOffice365Service,
162
162
  InputTCP,
163
163
  InputWiz,
@@ -182,8 +182,8 @@ Input = TypeAliasType(
182
182
  InputElastic,
183
183
  InputSplunkHec,
184
184
  InputOffice365MsgTrace,
185
- InputLoki,
186
185
  InputPrometheusRw,
186
+ InputLoki,
187
187
  InputCrowdstrike,
188
188
  InputPrometheus,
189
189
  InputEdgePrometheus,
@@ -162,6 +162,13 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
162
162
  ] = None
163
163
 
164
164
 
165
+ class InputConfluentCloudSchemaType(str, Enum):
166
+ r"""The schema format used to encode and decode event data"""
167
+
168
+ AVRO = "avro"
169
+ JSON = "json"
170
+
171
+
165
172
  class InputConfluentCloudAuthTypedDict(TypedDict):
166
173
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
167
174
 
@@ -262,6 +269,8 @@ class InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
262
269
  disabled: NotRequired[bool]
263
270
  schema_registry_url: NotRequired[str]
264
271
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
272
+ schema_type: NotRequired[InputConfluentCloudSchemaType]
273
+ r"""The schema format used to encode and decode event data"""
265
274
  connection_timeout: NotRequired[float]
266
275
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
267
276
  request_timeout: NotRequired[float]
@@ -283,6 +292,11 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
283
292
  ] = "http://localhost:8081"
284
293
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
285
294
 
295
+ schema_type: Annotated[
296
+ Optional[InputConfluentCloudSchemaType], pydantic.Field(alias="schemaType")
297
+ ] = InputConfluentCloudSchemaType.AVRO
298
+ r"""The schema format used to encode and decode event data"""
299
+
286
300
  connection_timeout: Annotated[
287
301
  Optional[float], pydantic.Field(alias="connectionTimeout")
288
302
  ] = 30000
@@ -105,10 +105,8 @@ class InputGooglePubsubMetadatum(BaseModel):
105
105
 
106
106
 
107
107
  class InputGooglePubsubTypedDict(TypedDict):
108
- topic_name: str
109
- r"""ID of the topic to receive events from"""
110
108
  subscription_name: str
111
- r"""ID of the subscription to use when receiving events"""
109
+ r"""ID of the subscription to use when receiving events. When Monitor subscription is enabled, the fully qualified subscription name must be entered. Example: projects/myProject/subscriptions/mySubscription"""
112
110
  id: NotRequired[str]
113
111
  r"""Unique ID for this input"""
114
112
  type: NotRequired[InputGooglePubsubType]
@@ -126,6 +124,10 @@ class InputGooglePubsubTypedDict(TypedDict):
126
124
  connections: NotRequired[List[InputGooglePubsubConnectionTypedDict]]
127
125
  r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
128
126
  pq: NotRequired[InputGooglePubsubPqTypedDict]
127
+ topic_name: NotRequired[str]
128
+ r"""ID of the topic to receive events from. When Monitor subscription is enabled, any value may be entered."""
129
+ monitor_subscription: NotRequired[bool]
130
+ r"""Use when the subscription is not created by this Source and topic is not known"""
129
131
  create_topic: NotRequired[bool]
130
132
  r"""Create topic if it does not exist"""
131
133
  create_subscription: NotRequired[bool]
@@ -152,11 +154,8 @@ class InputGooglePubsubTypedDict(TypedDict):
152
154
 
153
155
 
154
156
  class InputGooglePubsub(BaseModel):
155
- topic_name: Annotated[str, pydantic.Field(alias="topicName")]
156
- r"""ID of the topic to receive events from"""
157
-
158
157
  subscription_name: Annotated[str, pydantic.Field(alias="subscriptionName")]
159
- r"""ID of the subscription to use when receiving events"""
158
+ r"""ID of the subscription to use when receiving events. When Monitor subscription is enabled, the fully qualified subscription name must be entered. Example: projects/myProject/subscriptions/mySubscription"""
160
159
 
161
160
  id: Optional[str] = None
162
161
  r"""Unique ID for this input"""
@@ -187,6 +186,14 @@ class InputGooglePubsub(BaseModel):
187
186
 
188
187
  pq: Optional[InputGooglePubsubPq] = None
189
188
 
189
+ topic_name: Annotated[Optional[str], pydantic.Field(alias="topicName")] = "cribl"
190
+ r"""ID of the topic to receive events from. When Monitor subscription is enabled, any value may be entered."""
191
+
192
+ monitor_subscription: Annotated[
193
+ Optional[bool], pydantic.Field(alias="monitorSubscription")
194
+ ] = False
195
+ r"""Use when the subscription is not created by this Source and topic is not known"""
196
+
190
197
  create_topic: Annotated[Optional[bool], pydantic.Field(alias="createTopic")] = False
191
198
  r"""Create topic if it does not exist"""
192
199
 
@@ -477,6 +477,8 @@ class InputGrafanaGrafana2TypedDict(TypedDict):
477
477
  r"""Absolute path on which to listen for Grafana Agent's Remote Write requests. Defaults to /api/prom/push, which will expand as: 'http://<your‑upstream‑URL>:<your‑port>/api/prom/push'. Either this field or 'Logs API endpoint' must be configured."""
478
478
  loki_api: NotRequired[str]
479
479
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'. Either this field or 'Remote Write API endpoint' must be configured."""
480
+ extract_structured_metadata: NotRequired[bool]
481
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
480
482
  prometheus_auth: NotRequired[InputGrafanaPrometheusAuth2TypedDict]
481
483
  loki_auth: NotRequired[InputGrafanaLokiAuth2TypedDict]
482
484
  metadata: NotRequired[List[InputGrafanaMetadatum2TypedDict]]
@@ -587,6 +589,11 @@ class InputGrafanaGrafana2(BaseModel):
587
589
  )
588
590
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'. Either this field or 'Remote Write API endpoint' must be configured."""
589
591
 
592
+ extract_structured_metadata: Annotated[
593
+ Optional[bool], pydantic.Field(alias="extractStructuredMetadata")
594
+ ] = False
595
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
596
+
590
597
  prometheus_auth: Annotated[
591
598
  Optional[InputGrafanaPrometheusAuth2], pydantic.Field(alias="prometheusAuth")
592
599
  ] = None
@@ -1070,6 +1077,8 @@ class InputGrafanaGrafana1TypedDict(TypedDict):
1070
1077
  r"""Absolute path on which to listen for Grafana Agent's Remote Write requests. Defaults to /api/prom/push, which will expand as: 'http://<your‑upstream‑URL>:<your‑port>/api/prom/push'. Either this field or 'Logs API endpoint' must be configured."""
1071
1078
  loki_api: NotRequired[str]
1072
1079
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'. Either this field or 'Remote Write API endpoint' must be configured."""
1080
+ extract_structured_metadata: NotRequired[bool]
1081
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
1073
1082
  prometheus_auth: NotRequired[InputGrafanaPrometheusAuth1TypedDict]
1074
1083
  loki_auth: NotRequired[InputGrafanaLokiAuth1TypedDict]
1075
1084
  metadata: NotRequired[List[InputGrafanaMetadatum1TypedDict]]
@@ -1180,6 +1189,11 @@ class InputGrafanaGrafana1(BaseModel):
1180
1189
  )
1181
1190
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'. Either this field or 'Remote Write API endpoint' must be configured."""
1182
1191
 
1192
+ extract_structured_metadata: Annotated[
1193
+ Optional[bool], pydantic.Field(alias="extractStructuredMetadata")
1194
+ ] = False
1195
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
1196
+
1183
1197
  prometheus_auth: Annotated[
1184
1198
  Optional[InputGrafanaPrometheusAuth1], pydantic.Field(alias="prometheusAuth")
1185
1199
  ] = None
@@ -83,6 +83,13 @@ class InputKafkaPq(BaseModel):
83
83
  r"""Codec to use to compress the persisted data"""
84
84
 
85
85
 
86
+ class InputKafkaSchemaType(str, Enum):
87
+ r"""The schema format used to encode and decode event data"""
88
+
89
+ AVRO = "avro"
90
+ JSON = "json"
91
+
92
+
86
93
  class InputKafkaAuthTypedDict(TypedDict):
87
94
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
88
95
 
@@ -183,6 +190,8 @@ class InputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
183
190
  disabled: NotRequired[bool]
184
191
  schema_registry_url: NotRequired[str]
185
192
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
193
+ schema_type: NotRequired[InputKafkaSchemaType]
194
+ r"""The schema format used to encode and decode event data"""
186
195
  connection_timeout: NotRequired[float]
187
196
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
188
197
  request_timeout: NotRequired[float]
@@ -202,6 +211,11 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
202
211
  ] = "http://localhost:8081"
203
212
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
204
213
 
214
+ schema_type: Annotated[
215
+ Optional[InputKafkaSchemaType], pydantic.Field(alias="schemaType")
216
+ ] = InputKafkaSchemaType.AVRO
217
+ r"""The schema format used to encode and decode event data"""
218
+
205
219
  connection_timeout: Annotated[
206
220
  Optional[float], pydantic.Field(alias="connectionTimeout")
207
221
  ] = 30000
@@ -258,6 +258,8 @@ class InputLokiTypedDict(TypedDict):
258
258
  r"""Messages from matched IP addresses will be ignored. This takes precedence over the allowlist."""
259
259
  loki_api: NotRequired[str]
260
260
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'."""
261
+ extract_structured_metadata: NotRequired[bool]
262
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
261
263
  auth_type: NotRequired[InputLokiAuthenticationType]
262
264
  r"""Loki logs authentication type"""
263
265
  metadata: NotRequired[List[InputLokiMetadatumTypedDict]]
@@ -387,6 +389,11 @@ class InputLoki(BaseModel):
387
389
  )
388
390
  r"""Absolute path on which to listen for Loki logs requests. Defaults to /loki/api/v1/push, which will (in this example) expand as: 'http://<your‑upstream‑URL>:<your‑port>/loki/api/v1/push'."""
389
391
 
392
+ extract_structured_metadata: Annotated[
393
+ Optional[bool], pydantic.Field(alias="extractStructuredMetadata")
394
+ ] = False
395
+ r"""Extract structured metadata from the Loki 3.5.3+ format and place it in the __structuredMetadata field. When disabled, uses legacy Loki parsing for backward compatibility."""
396
+
390
397
  auth_type: Annotated[
391
398
  Optional[InputLokiAuthenticationType], pydantic.Field(alias="authType")
392
399
  ] = InputLokiAuthenticationType.NONE
@@ -96,6 +96,13 @@ class InputMskMetadatum(BaseModel):
96
96
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
97
97
 
98
98
 
99
+ class InputMskSchemaType(str, Enum):
100
+ r"""The schema format used to encode and decode event data"""
101
+
102
+ AVRO = "avro"
103
+ JSON = "json"
104
+
105
+
99
106
  class InputMskAuthTypedDict(TypedDict):
100
107
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
101
108
 
@@ -196,6 +203,8 @@ class InputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
196
203
  disabled: NotRequired[bool]
197
204
  schema_registry_url: NotRequired[str]
198
205
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
206
+ schema_type: NotRequired[InputMskSchemaType]
207
+ r"""The schema format used to encode and decode event data"""
199
208
  connection_timeout: NotRequired[float]
200
209
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
201
210
  request_timeout: NotRequired[float]
@@ -215,6 +224,11 @@ class InputMskKafkaSchemaRegistryAuthentication(BaseModel):
215
224
  ] = "http://localhost:8081"
216
225
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
217
226
 
227
+ schema_type: Annotated[
228
+ Optional[InputMskSchemaType], pydantic.Field(alias="schemaType")
229
+ ] = InputMskSchemaType.AVRO
230
+ r"""The schema format used to encode and decode event data"""
231
+
218
232
  connection_timeout: Annotated[
219
233
  Optional[float], pydantic.Field(alias="connectionTimeout")
220
234
  ] = 30000
@@ -90,25 +90,25 @@ OutputTypedDict = TypeAliasType(
90
90
  OutputDevnullTypedDict,
91
91
  OutputDefaultTypedDict,
92
92
  OutputRouterTypedDict,
93
- OutputSnmpTypedDict,
94
93
  OutputNetflowTypedDict,
94
+ OutputSnmpTypedDict,
95
95
  OutputDiskSpoolTypedDict,
96
96
  OutputRingTypedDict,
97
+ OutputStatsdExtTypedDict,
97
98
  OutputGraphiteTypedDict,
98
99
  OutputStatsdTypedDict,
99
- OutputStatsdExtTypedDict,
100
100
  OutputGooglePubsubTypedDict,
101
101
  OutputCriblTCPTypedDict,
102
- OutputSplunkTypedDict,
103
102
  OutputSnsTypedDict,
103
+ OutputSplunkTypedDict,
104
104
  OutputCloudwatchTypedDict,
105
105
  OutputSyslogTypedDict,
106
106
  OutputAzureEventhubTypedDict,
107
107
  OutputWavefrontTypedDict,
108
108
  OutputSignalfxTypedDict,
109
109
  OutputHoneycombTypedDict,
110
- OutputTcpjsonTypedDict,
111
110
  OutputSumoLogicTypedDict,
111
+ OutputTcpjsonTypedDict,
112
112
  OutputHumioHecTypedDict,
113
113
  OutputElasticCloudTypedDict,
114
114
  OutputCrowdstrikeNextGenSiemTypedDict,
@@ -125,15 +125,15 @@ OutputTypedDict = TypeAliasType(
125
125
  OutputXsiamTypedDict,
126
126
  OutputFilesystemTypedDict,
127
127
  OutputDatasetTypedDict,
128
- OutputLokiTypedDict,
129
128
  OutputSplunkHecTypedDict,
130
129
  OutputDynatraceHTTPTypedDict,
131
130
  OutputServiceNowTypedDict,
131
+ OutputLokiTypedDict,
132
132
  OutputDynatraceOtlpTypedDict,
133
- OutputElasticTypedDict,
134
133
  OutputGoogleChronicleTypedDict,
135
- OutputCriblLakeTypedDict,
134
+ OutputElasticTypedDict,
136
135
  OutputDatadogTypedDict,
136
+ OutputCriblLakeTypedDict,
137
137
  OutputPrometheusTypedDict,
138
138
  OutputMskTypedDict,
139
139
  OutputSentinelOneAiSiemTypedDict,
@@ -161,25 +161,25 @@ Output = TypeAliasType(
161
161
  OutputDevnull,
162
162
  OutputDefault,
163
163
  OutputRouter,
164
- OutputSnmp,
165
164
  OutputNetflow,
165
+ OutputSnmp,
166
166
  OutputDiskSpool,
167
167
  OutputRing,
168
+ OutputStatsdExt,
168
169
  OutputGraphite,
169
170
  OutputStatsd,
170
- OutputStatsdExt,
171
171
  OutputGooglePubsub,
172
172
  OutputCriblTCP,
173
- OutputSplunk,
174
173
  OutputSns,
174
+ OutputSplunk,
175
175
  OutputCloudwatch,
176
176
  OutputSyslog,
177
177
  OutputAzureEventhub,
178
178
  OutputWavefront,
179
179
  OutputSignalfx,
180
180
  OutputHoneycomb,
181
- OutputTcpjson,
182
181
  OutputSumoLogic,
182
+ OutputTcpjson,
183
183
  OutputHumioHec,
184
184
  OutputElasticCloud,
185
185
  OutputCrowdstrikeNextGenSiem,
@@ -196,15 +196,15 @@ Output = TypeAliasType(
196
196
  OutputXsiam,
197
197
  OutputFilesystem,
198
198
  OutputDataset,
199
- OutputLoki,
200
199
  OutputSplunkHec,
201
200
  OutputDynatraceHTTP,
202
201
  OutputServiceNow,
202
+ OutputLoki,
203
203
  OutputDynatraceOtlp,
204
- OutputElastic,
205
204
  OutputGoogleChronicle,
206
- OutputCriblLake,
205
+ OutputElastic,
207
206
  OutputDatadog,
207
+ OutputCriblLake,
208
208
  OutputPrometheus,
209
209
  OutputMsk,
210
210
  OutputSentinelOneAiSiem,
@@ -114,6 +114,13 @@ class OutputConfluentCloudCompression(str, Enum):
114
114
  LZ4 = "lz4"
115
115
 
116
116
 
117
+ class OutputConfluentCloudSchemaType(str, Enum):
118
+ r"""The schema format used to encode and decode event data"""
119
+
120
+ AVRO = "avro"
121
+ JSON = "json"
122
+
123
+
117
124
  class OutputConfluentCloudAuthTypedDict(TypedDict):
118
125
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
119
126
 
@@ -214,6 +221,8 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
214
221
  disabled: NotRequired[bool]
215
222
  schema_registry_url: NotRequired[str]
216
223
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
224
+ schema_type: NotRequired[OutputConfluentCloudSchemaType]
225
+ r"""The schema format used to encode and decode event data"""
217
226
  connection_timeout: NotRequired[float]
218
227
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
219
228
  request_timeout: NotRequired[float]
@@ -239,6 +248,11 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
239
248
  ] = "http://localhost:8081"
240
249
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
241
250
 
251
+ schema_type: Annotated[
252
+ Optional[OutputConfluentCloudSchemaType], pydantic.Field(alias="schemaType")
253
+ ] = OutputConfluentCloudSchemaType.AVRO
254
+ r"""The schema format used to encode and decode event data"""
255
+
242
256
  connection_timeout: Annotated[
243
257
  Optional[float], pydantic.Field(alias="connectionTimeout")
244
258
  ] = 30000
@@ -161,7 +161,7 @@ class OutputDlS3TypedDict(TypedDict):
161
161
  add_id_to_stage_path: NotRequired[bool]
162
162
  r"""Add the Output ID value to staging location"""
163
163
  dest_path: NotRequired[str]
164
- r"""Prefix to append to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
164
+ r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
165
165
  object_acl: NotRequired[OutputDlS3ObjectACL]
166
166
  r"""Object ACL to assign to uploaded objects"""
167
167
  storage_class: NotRequired[OutputDlS3StorageClass]
@@ -328,7 +328,7 @@ class OutputDlS3(BaseModel):
328
328
  r"""Add the Output ID value to staging location"""
329
329
 
330
330
  dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
331
- r"""Prefix to append to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
331
+ r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
332
332
 
333
333
  object_acl: Annotated[
334
334
  Optional[OutputDlS3ObjectACL], pydantic.Field(alias="objectACL")
@@ -132,7 +132,7 @@ class OutputGoogleCloudStorageTypedDict(TypedDict):
132
132
  stage_path: NotRequired[str]
133
133
  r"""Filesystem location in which to buffer files, before compressing and moving to final destination. Use performant and stable storage."""
134
134
  dest_path: NotRequired[str]
135
- r"""Prefix to append to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
135
+ r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
136
136
  verify_permissions: NotRequired[bool]
137
137
  r"""Disable if you can access files within the bucket but not the bucket itself"""
138
138
  object_acl: NotRequired[OutputGoogleCloudStorageObjectACL]
@@ -260,7 +260,7 @@ class OutputGoogleCloudStorage(BaseModel):
260
260
  r"""Filesystem location in which to buffer files, before compressing and moving to final destination. Use performant and stable storage."""
261
261
 
262
262
  dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
263
- r"""Prefix to append to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
263
+ r"""Prefix to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myKeyPrefix-${C.vars.myVar}`"""
264
264
 
265
265
  verify_permissions: Annotated[
266
266
  Optional[bool], pydantic.Field(alias="verifyPermissions")
@@ -284,6 +284,8 @@ class OutputGrafanaCloudGrafanaCloud2TypedDict(TypedDict):
284
284
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
285
285
  safe_headers: NotRequired[List[str]]
286
286
  r"""List of headers that are safe to log in plain text"""
287
+ send_structured_metadata: NotRequired[bool]
288
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
287
289
  response_retry_settings: NotRequired[
288
290
  List[OutputGrafanaCloudResponseRetrySetting2TypedDict]
289
291
  ]
@@ -416,6 +418,11 @@ class OutputGrafanaCloudGrafanaCloud2(BaseModel):
416
418
  ] = None
417
419
  r"""List of headers that are safe to log in plain text"""
418
420
 
421
+ send_structured_metadata: Annotated[
422
+ Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
423
+ ] = False
424
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
425
+
419
426
  response_retry_settings: Annotated[
420
427
  Optional[List[OutputGrafanaCloudResponseRetrySetting2]],
421
428
  pydantic.Field(alias="responseRetrySettings"),
@@ -753,6 +760,8 @@ class OutputGrafanaCloudGrafanaCloud1TypedDict(TypedDict):
753
760
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
754
761
  safe_headers: NotRequired[List[str]]
755
762
  r"""List of headers that are safe to log in plain text"""
763
+ send_structured_metadata: NotRequired[bool]
764
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
756
765
  response_retry_settings: NotRequired[
757
766
  List[OutputGrafanaCloudResponseRetrySetting1TypedDict]
758
767
  ]
@@ -887,6 +896,11 @@ class OutputGrafanaCloudGrafanaCloud1(BaseModel):
887
896
  ] = None
888
897
  r"""List of headers that are safe to log in plain text"""
889
898
 
899
+ send_structured_metadata: Annotated[
900
+ Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
901
+ ] = False
902
+ r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
903
+
890
904
  response_retry_settings: Annotated[
891
905
  Optional[List[OutputGrafanaCloudResponseRetrySetting1]],
892
906
  pydantic.Field(alias="responseRetrySettings"),