cribl-control-plane 0.0.50rc1__py3-none-any.whl → 0.0.50rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

@@ -103,13 +103,6 @@ class InputKafkaPq(BaseModel):
103
103
  ] = None
104
104
 
105
105
 
106
- class InputKafkaSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
107
- r"""The schema format used to encode and decode event data"""
108
-
109
- AVRO = "avro"
110
- JSON = "json"
111
-
112
-
113
106
  class InputKafkaAuthTypedDict(TypedDict):
114
107
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
115
108
 
@@ -220,8 +213,6 @@ class InputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
220
213
  disabled: NotRequired[bool]
221
214
  schema_registry_url: NotRequired[str]
222
215
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
223
- schema_type: NotRequired[InputKafkaSchemaType]
224
- r"""The schema format used to encode and decode event data"""
225
216
  connection_timeout: NotRequired[float]
226
217
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
227
218
  request_timeout: NotRequired[float]
@@ -241,14 +232,6 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
241
232
  ] = "http://localhost:8081"
242
233
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
243
234
 
244
- schema_type: Annotated[
245
- Annotated[
246
- Optional[InputKafkaSchemaType], PlainValidator(validate_open_enum(False))
247
- ],
248
- pydantic.Field(alias="schemaType"),
249
- ] = InputKafkaSchemaType.AVRO
250
- r"""The schema format used to encode and decode event data"""
251
-
252
235
  connection_timeout: Annotated[
253
236
  Optional[float], pydantic.Field(alias="connectionTimeout")
254
237
  ] = 30000
@@ -116,13 +116,6 @@ class InputMskMetadatum(BaseModel):
116
116
  r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
117
117
 
118
118
 
119
- class InputMskSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
120
- r"""The schema format used to encode and decode event data"""
121
-
122
- AVRO = "avro"
123
- JSON = "json"
124
-
125
-
126
119
  class InputMskAuthTypedDict(TypedDict):
127
120
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
128
121
 
@@ -233,8 +226,6 @@ class InputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
233
226
  disabled: NotRequired[bool]
234
227
  schema_registry_url: NotRequired[str]
235
228
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
236
- schema_type: NotRequired[InputMskSchemaType]
237
- r"""The schema format used to encode and decode event data"""
238
229
  connection_timeout: NotRequired[float]
239
230
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
240
231
  request_timeout: NotRequired[float]
@@ -254,14 +245,6 @@ class InputMskKafkaSchemaRegistryAuthentication(BaseModel):
254
245
  ] = "http://localhost:8081"
255
246
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
256
247
 
257
- schema_type: Annotated[
258
- Annotated[
259
- Optional[InputMskSchemaType], PlainValidator(validate_open_enum(False))
260
- ],
261
- pydantic.Field(alias="schemaType"),
262
- ] = InputMskSchemaType.AVRO
263
- r"""The schema format used to encode and decode event data"""
264
-
265
248
  connection_timeout: Annotated[
266
249
  Optional[float], pydantic.Field(alias="connectionTimeout")
267
250
  ] = 30000
@@ -4,14 +4,13 @@ from __future__ import annotations
4
4
  from .jobstatus import JobStatus, JobStatusTypedDict
5
5
  from .runnablejob import RunnableJob, RunnableJobTypedDict
6
6
  from cribl_control_plane.types import BaseModel
7
- from typing import Dict, Optional
7
+ from typing import Optional
8
8
  from typing_extensions import NotRequired, TypedDict
9
9
 
10
10
 
11
11
  class JobInfoTypedDict(TypedDict):
12
12
  args: RunnableJobTypedDict
13
13
  id: str
14
- stats: Dict[str, float]
15
14
  status: JobStatusTypedDict
16
15
  keep: NotRequired[bool]
17
16
 
@@ -21,8 +20,6 @@ class JobInfo(BaseModel):
21
20
 
22
21
  id: str
23
22
 
24
- stats: Dict[str, float]
25
-
26
23
  status: JobStatus
27
24
 
28
25
  keep: Optional[bool] = None
@@ -5,7 +5,7 @@ from .hbcriblinfo import HBCriblInfo, HBCriblInfoTypedDict
5
5
  from .heartbeatmetadata import HeartbeatMetadata, HeartbeatMetadataTypedDict
6
6
  from cribl_control_plane.types import BaseModel
7
7
  import pydantic
8
- from typing import Dict, List, Optional, Union
8
+ from typing import List, Optional, Union
9
9
  from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
10
10
 
11
11
 
@@ -19,6 +19,7 @@ class NodeProvidedInfoTags(BaseModel):
19
19
 
20
20
  class NodeProvidedInfoAwsTypedDict(TypedDict):
21
21
  enabled: bool
22
+ instance_id: str
22
23
  region: str
23
24
  type: str
24
25
  zone: str
@@ -28,6 +29,8 @@ class NodeProvidedInfoAwsTypedDict(TypedDict):
28
29
  class NodeProvidedInfoAws(BaseModel):
29
30
  enabled: bool
30
31
 
32
+ instance_id: Annotated[str, pydantic.Field(alias="instanceId")]
33
+
31
34
  region: str
32
35
 
33
36
  type: str
@@ -125,7 +128,6 @@ class NodeProvidedInfoTypedDict(TypedDict):
125
128
  architecture: str
126
129
  cpus: float
127
130
  cribl: HBCriblInfoTypedDict
128
- env: Dict[str, str]
129
131
  free_disk_space: float
130
132
  hostname: str
131
133
  node: str
@@ -150,8 +152,6 @@ class NodeProvidedInfo(BaseModel):
150
152
 
151
153
  cribl: HBCriblInfo
152
154
 
153
- env: Dict[str, str]
154
-
155
155
  free_disk_space: Annotated[float, pydantic.Field(alias="freeDiskSpace")]
156
156
 
157
157
  hostname: str
@@ -18,6 +18,7 @@ from .outputcrowdstrikenextgensiem import (
18
18
  OutputCrowdstrikeNextGenSiem,
19
19
  OutputCrowdstrikeNextGenSiemTypedDict,
20
20
  )
21
+ from .outputdatabricks import OutputDatabricks, OutputDatabricksTypedDict
21
22
  from .outputdatadog import OutputDatadog, OutputDatadogTypedDict
22
23
  from .outputdataset import OutputDataset, OutputDatasetTypedDict
23
24
  from .outputdefault import OutputDefault, OutputDefaultTypedDict
@@ -80,8 +81,10 @@ from .outputtcpjson import OutputTcpjson, OutputTcpjsonTypedDict
80
81
  from .outputwavefront import OutputWavefront, OutputWavefrontTypedDict
81
82
  from .outputwebhook import OutputWebhook, OutputWebhookTypedDict
82
83
  from .outputxsiam import OutputXsiam, OutputXsiamTypedDict
84
+ from cribl_control_plane.utils import get_discriminator
85
+ from pydantic import Discriminator, Tag
83
86
  from typing import Union
84
- from typing_extensions import TypeAliasType
87
+ from typing_extensions import Annotated, TypeAliasType
85
88
 
86
89
 
87
90
  OutputTypedDict = TypeAliasType(
@@ -90,31 +93,32 @@ OutputTypedDict = TypeAliasType(
90
93
  OutputDevnullTypedDict,
91
94
  OutputDefaultTypedDict,
92
95
  OutputRouterTypedDict,
93
- OutputSnmpTypedDict,
94
96
  OutputNetflowTypedDict,
97
+ OutputSnmpTypedDict,
95
98
  OutputDiskSpoolTypedDict,
96
99
  OutputRingTypedDict,
97
- OutputStatsdExtTypedDict,
98
100
  OutputGraphiteTypedDict,
101
+ OutputStatsdExtTypedDict,
99
102
  OutputStatsdTypedDict,
100
103
  OutputGooglePubsubTypedDict,
101
- OutputCriblTCPTypedDict,
102
104
  OutputSplunkTypedDict,
105
+ OutputCriblTCPTypedDict,
103
106
  OutputSnsTypedDict,
104
107
  OutputCloudwatchTypedDict,
105
108
  OutputAzureEventhubTypedDict,
106
- OutputWavefrontTypedDict,
107
109
  OutputSignalfxTypedDict,
110
+ OutputWavefrontTypedDict,
108
111
  OutputHoneycombTypedDict,
109
- OutputSumoLogicTypedDict,
110
- OutputCrowdstrikeNextGenSiemTypedDict,
111
112
  OutputHumioHecTypedDict,
112
113
  OutputTcpjsonTypedDict,
114
+ OutputSumoLogicTypedDict,
115
+ OutputCrowdstrikeNextGenSiemTypedDict,
113
116
  OutputElasticCloudTypedDict,
114
- OutputKinesisTypedDict,
115
- OutputConfluentCloudTypedDict,
116
- OutputKafkaTypedDict,
117
117
  OutputExabeamTypedDict,
118
+ OutputKafkaTypedDict,
119
+ OutputConfluentCloudTypedDict,
120
+ OutputKinesisTypedDict,
121
+ OutputDatabricksTypedDict,
118
122
  OutputNewrelicEventsTypedDict,
119
123
  OutputAzureLogsTypedDict,
120
124
  OutputSplunkLbTypedDict,
@@ -130,10 +134,10 @@ OutputTypedDict = TypeAliasType(
130
134
  OutputDynatraceHTTPTypedDict,
131
135
  OutputServiceNowTypedDict,
132
136
  OutputDynatraceOtlpTypedDict,
133
- OutputElasticTypedDict,
134
137
  OutputGoogleChronicleTypedDict,
135
- OutputCriblLakeTypedDict,
138
+ OutputElasticTypedDict,
136
139
  OutputDatadogTypedDict,
140
+ OutputCriblLakeTypedDict,
137
141
  OutputPrometheusTypedDict,
138
142
  OutputMskTypedDict,
139
143
  OutputSentinelOneAiSiemTypedDict,
@@ -155,72 +159,73 @@ OutputTypedDict = TypeAliasType(
155
159
  )
156
160
 
157
161
 
158
- Output = TypeAliasType(
159
- "Output",
162
+ Output = Annotated[
160
163
  Union[
161
- OutputDevnull,
162
- OutputDefault,
163
- OutputRouter,
164
- OutputSnmp,
165
- OutputNetflow,
166
- OutputDiskSpool,
167
- OutputRing,
168
- OutputStatsdExt,
169
- OutputGraphite,
170
- OutputStatsd,
171
- OutputGooglePubsub,
172
- OutputCriblTCP,
173
- OutputSplunk,
174
- OutputSns,
175
- OutputCloudwatch,
176
- OutputAzureEventhub,
177
- OutputWavefront,
178
- OutputSignalfx,
179
- OutputHoneycomb,
180
- OutputSumoLogic,
181
- OutputCrowdstrikeNextGenSiem,
182
- OutputHumioHec,
183
- OutputTcpjson,
184
- OutputElasticCloud,
185
- OutputKinesis,
186
- OutputConfluentCloud,
187
- OutputKafka,
188
- OutputExabeam,
189
- OutputNewrelicEvents,
190
- OutputAzureLogs,
191
- OutputSplunkLb,
192
- OutputSyslog,
193
- OutputSqs,
194
- OutputNewrelic,
195
- OutputCriblHTTP,
196
- OutputXsiam,
197
- OutputFilesystem,
198
- OutputDataset,
199
- OutputLoki,
200
- OutputSplunkHec,
201
- OutputDynatraceHTTP,
202
- OutputServiceNow,
203
- OutputDynatraceOtlp,
204
- OutputElastic,
205
- OutputGoogleChronicle,
206
- OutputCriblLake,
207
- OutputDatadog,
208
- OutputPrometheus,
209
- OutputMsk,
210
- OutputSentinelOneAiSiem,
211
- OutputSentinel,
212
- OutputInfluxdb,
213
- OutputGoogleCloudStorage,
214
- OutputAzureBlob,
215
- OutputOpenTelemetry,
216
- OutputMinio,
217
- OutputClickHouse,
218
- OutputSecurityLake,
219
- OutputDlS3,
220
- OutputS3,
221
- OutputWebhook,
222
- OutputAzureDataExplorer,
223
- OutputGoogleCloudLogging,
224
- OutputGrafanaCloud,
164
+ Annotated[OutputDefault, Tag("default")],
165
+ Annotated[OutputWebhook, Tag("webhook")],
166
+ Annotated[OutputSentinel, Tag("sentinel")],
167
+ Annotated[OutputDevnull, Tag("devnull")],
168
+ Annotated[OutputSyslog, Tag("syslog")],
169
+ Annotated[OutputSplunk, Tag("splunk")],
170
+ Annotated[OutputSplunkLb, Tag("splunk_lb")],
171
+ Annotated[OutputSplunkHec, Tag("splunk_hec")],
172
+ Annotated[OutputTcpjson, Tag("tcpjson")],
173
+ Annotated[OutputWavefront, Tag("wavefront")],
174
+ Annotated[OutputSignalfx, Tag("signalfx")],
175
+ Annotated[OutputFilesystem, Tag("filesystem")],
176
+ Annotated[OutputS3, Tag("s3")],
177
+ Annotated[OutputAzureBlob, Tag("azure_blob")],
178
+ Annotated[OutputAzureDataExplorer, Tag("azure_data_explorer")],
179
+ Annotated[OutputAzureLogs, Tag("azure_logs")],
180
+ Annotated[OutputKinesis, Tag("kinesis")],
181
+ Annotated[OutputHoneycomb, Tag("honeycomb")],
182
+ Annotated[OutputAzureEventhub, Tag("azure_eventhub")],
183
+ Annotated[OutputGoogleChronicle, Tag("google_chronicle")],
184
+ Annotated[OutputGoogleCloudStorage, Tag("google_cloud_storage")],
185
+ Annotated[OutputGoogleCloudLogging, Tag("google_cloud_logging")],
186
+ Annotated[OutputGooglePubsub, Tag("google_pubsub")],
187
+ Annotated[OutputExabeam, Tag("exabeam")],
188
+ Annotated[OutputKafka, Tag("kafka")],
189
+ Annotated[OutputConfluentCloud, Tag("confluent_cloud")],
190
+ Annotated[OutputMsk, Tag("msk")],
191
+ Annotated[OutputElastic, Tag("elastic")],
192
+ Annotated[OutputElasticCloud, Tag("elastic_cloud")],
193
+ Annotated[OutputNewrelic, Tag("newrelic")],
194
+ Annotated[OutputNewrelicEvents, Tag("newrelic_events")],
195
+ Annotated[OutputInfluxdb, Tag("influxdb")],
196
+ Annotated[OutputCloudwatch, Tag("cloudwatch")],
197
+ Annotated[OutputMinio, Tag("minio")],
198
+ Annotated[OutputStatsd, Tag("statsd")],
199
+ Annotated[OutputStatsdExt, Tag("statsd_ext")],
200
+ Annotated[OutputGraphite, Tag("graphite")],
201
+ Annotated[OutputRouter, Tag("router")],
202
+ Annotated[OutputSns, Tag("sns")],
203
+ Annotated[OutputSqs, Tag("sqs")],
204
+ Annotated[OutputSnmp, Tag("snmp")],
205
+ Annotated[OutputSumoLogic, Tag("sumo_logic")],
206
+ Annotated[OutputDatadog, Tag("datadog")],
207
+ Annotated[OutputGrafanaCloud, Tag("grafana_cloud")],
208
+ Annotated[OutputLoki, Tag("loki")],
209
+ Annotated[OutputPrometheus, Tag("prometheus")],
210
+ Annotated[OutputRing, Tag("ring")],
211
+ Annotated[OutputOpenTelemetry, Tag("open_telemetry")],
212
+ Annotated[OutputServiceNow, Tag("service_now")],
213
+ Annotated[OutputDataset, Tag("dataset")],
214
+ Annotated[OutputCriblTCP, Tag("cribl_tcp")],
215
+ Annotated[OutputCriblHTTP, Tag("cribl_http")],
216
+ Annotated[OutputHumioHec, Tag("humio_hec")],
217
+ Annotated[OutputCrowdstrikeNextGenSiem, Tag("crowdstrike_next_gen_siem")],
218
+ Annotated[OutputDlS3, Tag("dl_s3")],
219
+ Annotated[OutputSecurityLake, Tag("security_lake")],
220
+ Annotated[OutputCriblLake, Tag("cribl_lake")],
221
+ Annotated[OutputDiskSpool, Tag("disk_spool")],
222
+ Annotated[OutputClickHouse, Tag("click_house")],
223
+ Annotated[OutputXsiam, Tag("xsiam")],
224
+ Annotated[OutputNetflow, Tag("netflow")],
225
+ Annotated[OutputDynatraceHTTP, Tag("dynatrace_http")],
226
+ Annotated[OutputDynatraceOtlp, Tag("dynatrace_otlp")],
227
+ Annotated[OutputSentinelOneAiSiem, Tag("sentinel_one_ai_siem")],
228
+ Annotated[OutputDatabricks, Tag("databricks")],
225
229
  ],
226
- )
230
+ Discriminator(lambda m: get_discriminator(m, "type", "type")),
231
+ ]
@@ -123,13 +123,6 @@ class OutputConfluentCloudCompression(str, Enum, metaclass=utils.OpenEnumMeta):
123
123
  LZ4 = "lz4"
124
124
 
125
125
 
126
- class OutputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
127
- r"""The schema format used to encode and decode event data"""
128
-
129
- AVRO = "avro"
130
- JSON = "json"
131
-
132
-
133
126
  class OutputConfluentCloudAuthTypedDict(TypedDict):
134
127
  r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
135
128
 
@@ -240,8 +233,6 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
240
233
  disabled: NotRequired[bool]
241
234
  schema_registry_url: NotRequired[str]
242
235
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
243
- schema_type: NotRequired[OutputConfluentCloudSchemaType]
244
- r"""The schema format used to encode and decode event data"""
245
236
  connection_timeout: NotRequired[float]
246
237
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
247
238
  request_timeout: NotRequired[float]
@@ -267,15 +258,6 @@ class OutputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
267
258
  ] = "http://localhost:8081"
268
259
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
269
260
 
270
- schema_type: Annotated[
271
- Annotated[
272
- Optional[OutputConfluentCloudSchemaType],
273
- PlainValidator(validate_open_enum(False)),
274
- ],
275
- pydantic.Field(alias="schemaType"),
276
- ] = OutputConfluentCloudSchemaType.AVRO
277
- r"""The schema format used to encode and decode event data"""
278
-
279
261
  connection_timeout: Annotated[
280
262
  Optional[float], pydantic.Field(alias="connectionTimeout")
281
263
  ] = 30000