cribl-control-plane 0.1.0b2__py3-none-any.whl → 0.2.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_hooks/clientcredentials.py +91 -41
- cribl_control_plane/_version.py +4 -4
- cribl_control_plane/errors/apierror.py +1 -1
- cribl_control_plane/errors/criblcontrolplaneerror.py +1 -1
- cribl_control_plane/errors/error.py +1 -1
- cribl_control_plane/errors/healthstatus_error.py +1 -1
- cribl_control_plane/errors/no_response_error.py +1 -1
- cribl_control_plane/errors/responsevalidationerror.py +1 -1
- cribl_control_plane/groups_sdk.py +4 -4
- cribl_control_plane/httpclient.py +0 -1
- cribl_control_plane/lakedatasets.py +12 -12
- cribl_control_plane/models/__init__.py +106 -42
- cribl_control_plane/models/appmode.py +14 -0
- cribl_control_plane/models/configgroup.py +2 -17
- cribl_control_plane/models/cribllakedatasetupdate.py +81 -0
- cribl_control_plane/models/gitinfo.py +14 -3
- cribl_control_plane/models/hbcriblinfo.py +3 -14
- cribl_control_plane/models/heartbeatmetadata.py +0 -3
- cribl_control_plane/models/inputconfluentcloud.py +18 -0
- cribl_control_plane/models/inputkafka.py +17 -0
- cribl_control_plane/models/inputmsk.py +17 -0
- cribl_control_plane/models/inputsqs.py +8 -10
- cribl_control_plane/models/nodeprovidedinfo.py +0 -3
- cribl_control_plane/models/output.py +25 -25
- cribl_control_plane/models/outputchronicle.py +431 -0
- cribl_control_plane/models/outputconfluentcloud.py +18 -0
- cribl_control_plane/models/outputgooglechronicle.py +5 -4
- cribl_control_plane/models/outputgooglecloudlogging.py +9 -4
- cribl_control_plane/models/outputkafka.py +17 -0
- cribl_control_plane/models/outputmsk.py +17 -0
- cribl_control_plane/models/outputsqs.py +8 -10
- cribl_control_plane/models/routecloneconf.py +13 -0
- cribl_control_plane/models/routeconf.py +4 -3
- cribl_control_plane/models/updatecribllakedatasetbylakeidandidop.py +9 -5
- {cribl_control_plane-0.1.0b2.dist-info → cribl_control_plane-0.2.0a1.dist-info}/METADATA +1 -8
- {cribl_control_plane-0.1.0b2.dist-info → cribl_control_plane-0.2.0a1.dist-info}/RECORD +37 -34
- cribl_control_plane/models/outputdatabricks.py +0 -282
- {cribl_control_plane-0.1.0b2.dist-info → cribl_control_plane-0.2.0a1.dist-info}/WHEEL +0 -0
|
@@ -17,7 +17,6 @@ class HeartbeatMetadataTags(BaseModel):
|
|
|
17
17
|
|
|
18
18
|
class HeartbeatMetadataAwsTypedDict(TypedDict):
|
|
19
19
|
enabled: bool
|
|
20
|
-
instance_id: str
|
|
21
20
|
region: str
|
|
22
21
|
type: str
|
|
23
22
|
zone: str
|
|
@@ -27,8 +26,6 @@ class HeartbeatMetadataAwsTypedDict(TypedDict):
|
|
|
27
26
|
class HeartbeatMetadataAws(BaseModel):
|
|
28
27
|
enabled: bool
|
|
29
28
|
|
|
30
|
-
instance_id: Annotated[str, pydantic.Field(alias="instanceId")]
|
|
31
|
-
|
|
32
29
|
region: str
|
|
33
30
|
|
|
34
31
|
type: str
|
|
@@ -187,6 +187,13 @@ class InputConfluentCloudTLSSettingsClientSide(BaseModel):
|
|
|
187
187
|
] = None
|
|
188
188
|
|
|
189
189
|
|
|
190
|
+
class InputConfluentCloudSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
191
|
+
r"""The schema format used to encode and decode event data"""
|
|
192
|
+
|
|
193
|
+
AVRO = "avro"
|
|
194
|
+
JSON = "json"
|
|
195
|
+
|
|
196
|
+
|
|
190
197
|
class InputConfluentCloudAuthTypedDict(TypedDict):
|
|
191
198
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
192
199
|
|
|
@@ -297,6 +304,8 @@ class InputConfluentCloudKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
297
304
|
disabled: NotRequired[bool]
|
|
298
305
|
schema_registry_url: NotRequired[str]
|
|
299
306
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
307
|
+
schema_type: NotRequired[InputConfluentCloudSchemaType]
|
|
308
|
+
r"""The schema format used to encode and decode event data"""
|
|
300
309
|
connection_timeout: NotRequired[float]
|
|
301
310
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
302
311
|
request_timeout: NotRequired[float]
|
|
@@ -318,6 +327,15 @@ class InputConfluentCloudKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
318
327
|
] = "http://localhost:8081"
|
|
319
328
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
320
329
|
|
|
330
|
+
schema_type: Annotated[
|
|
331
|
+
Annotated[
|
|
332
|
+
Optional[InputConfluentCloudSchemaType],
|
|
333
|
+
PlainValidator(validate_open_enum(False)),
|
|
334
|
+
],
|
|
335
|
+
pydantic.Field(alias="schemaType"),
|
|
336
|
+
] = InputConfluentCloudSchemaType.AVRO
|
|
337
|
+
r"""The schema format used to encode and decode event data"""
|
|
338
|
+
|
|
321
339
|
connection_timeout: Annotated[
|
|
322
340
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
323
341
|
] = 30000
|
|
@@ -103,6 +103,13 @@ class InputKafkaPq(BaseModel):
|
|
|
103
103
|
] = None
|
|
104
104
|
|
|
105
105
|
|
|
106
|
+
class InputKafkaSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
107
|
+
r"""The schema format used to encode and decode event data"""
|
|
108
|
+
|
|
109
|
+
AVRO = "avro"
|
|
110
|
+
JSON = "json"
|
|
111
|
+
|
|
112
|
+
|
|
106
113
|
class InputKafkaAuthTypedDict(TypedDict):
|
|
107
114
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
108
115
|
|
|
@@ -213,6 +220,8 @@ class InputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
213
220
|
disabled: NotRequired[bool]
|
|
214
221
|
schema_registry_url: NotRequired[str]
|
|
215
222
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
223
|
+
schema_type: NotRequired[InputKafkaSchemaType]
|
|
224
|
+
r"""The schema format used to encode and decode event data"""
|
|
216
225
|
connection_timeout: NotRequired[float]
|
|
217
226
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
218
227
|
request_timeout: NotRequired[float]
|
|
@@ -232,6 +241,14 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
232
241
|
] = "http://localhost:8081"
|
|
233
242
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
234
243
|
|
|
244
|
+
schema_type: Annotated[
|
|
245
|
+
Annotated[
|
|
246
|
+
Optional[InputKafkaSchemaType], PlainValidator(validate_open_enum(False))
|
|
247
|
+
],
|
|
248
|
+
pydantic.Field(alias="schemaType"),
|
|
249
|
+
] = InputKafkaSchemaType.AVRO
|
|
250
|
+
r"""The schema format used to encode and decode event data"""
|
|
251
|
+
|
|
235
252
|
connection_timeout: Annotated[
|
|
236
253
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
237
254
|
] = 30000
|
|
@@ -116,6 +116,13 @@ class InputMskMetadatum(BaseModel):
|
|
|
116
116
|
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
117
117
|
|
|
118
118
|
|
|
119
|
+
class InputMskSchemaType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
120
|
+
r"""The schema format used to encode and decode event data"""
|
|
121
|
+
|
|
122
|
+
AVRO = "avro"
|
|
123
|
+
JSON = "json"
|
|
124
|
+
|
|
125
|
+
|
|
119
126
|
class InputMskAuthTypedDict(TypedDict):
|
|
120
127
|
r"""Credentials to use when authenticating with the schema registry using basic HTTP authentication"""
|
|
121
128
|
|
|
@@ -226,6 +233,8 @@ class InputMskKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
|
226
233
|
disabled: NotRequired[bool]
|
|
227
234
|
schema_registry_url: NotRequired[str]
|
|
228
235
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
236
|
+
schema_type: NotRequired[InputMskSchemaType]
|
|
237
|
+
r"""The schema format used to encode and decode event data"""
|
|
229
238
|
connection_timeout: NotRequired[float]
|
|
230
239
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
231
240
|
request_timeout: NotRequired[float]
|
|
@@ -245,6 +254,14 @@ class InputMskKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
245
254
|
] = "http://localhost:8081"
|
|
246
255
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
247
256
|
|
|
257
|
+
schema_type: Annotated[
|
|
258
|
+
Annotated[
|
|
259
|
+
Optional[InputMskSchemaType], PlainValidator(validate_open_enum(False))
|
|
260
|
+
],
|
|
261
|
+
pydantic.Field(alias="schemaType"),
|
|
262
|
+
] = InputMskSchemaType.AVRO
|
|
263
|
+
r"""The schema format used to encode and decode event data"""
|
|
264
|
+
|
|
248
265
|
connection_timeout: Annotated[
|
|
249
266
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
250
267
|
] = 30000
|
|
@@ -142,6 +142,8 @@ class InputSqsTypedDict(TypedDict):
|
|
|
142
142
|
type: InputSqsType
|
|
143
143
|
queue_name: str
|
|
144
144
|
r"""The name, URL, or ARN of the SQS queue to read events from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can only be evaluated at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
|
|
145
|
+
queue_type: InputSqsQueueType
|
|
146
|
+
r"""The queue type used (or created)"""
|
|
145
147
|
id: NotRequired[str]
|
|
146
148
|
r"""Unique ID for this input"""
|
|
147
149
|
disabled: NotRequired[bool]
|
|
@@ -158,8 +160,6 @@ class InputSqsTypedDict(TypedDict):
|
|
|
158
160
|
connections: NotRequired[List[InputSqsConnectionTypedDict]]
|
|
159
161
|
r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
|
|
160
162
|
pq: NotRequired[InputSqsPqTypedDict]
|
|
161
|
-
queue_type: NotRequired[InputSqsQueueType]
|
|
162
|
-
r"""The queue type used (or created)"""
|
|
163
163
|
aws_account_id: NotRequired[str]
|
|
164
164
|
r"""SQS queue owner's AWS account ID. Leave empty if SQS queue is in same AWS account."""
|
|
165
165
|
create_queue: NotRequired[bool]
|
|
@@ -207,6 +207,12 @@ class InputSqs(BaseModel):
|
|
|
207
207
|
queue_name: Annotated[str, pydantic.Field(alias="queueName")]
|
|
208
208
|
r"""The name, URL, or ARN of the SQS queue to read events from. When a non-AWS URL is specified, format must be: '{url}/myQueueName'. Example: 'https://host:port/myQueueName'. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can only be evaluated at init time. Example referencing a Global Variable: `https://host:port/myQueue-${C.vars.myVar}`."""
|
|
209
209
|
|
|
210
|
+
queue_type: Annotated[
|
|
211
|
+
Annotated[InputSqsQueueType, PlainValidator(validate_open_enum(False))],
|
|
212
|
+
pydantic.Field(alias="queueType"),
|
|
213
|
+
]
|
|
214
|
+
r"""The queue type used (or created)"""
|
|
215
|
+
|
|
210
216
|
id: Optional[str] = None
|
|
211
217
|
r"""Unique ID for this input"""
|
|
212
218
|
|
|
@@ -234,14 +240,6 @@ class InputSqs(BaseModel):
|
|
|
234
240
|
|
|
235
241
|
pq: Optional[InputSqsPq] = None
|
|
236
242
|
|
|
237
|
-
queue_type: Annotated[
|
|
238
|
-
Annotated[
|
|
239
|
-
Optional[InputSqsQueueType], PlainValidator(validate_open_enum(False))
|
|
240
|
-
],
|
|
241
|
-
pydantic.Field(alias="queueType"),
|
|
242
|
-
] = InputSqsQueueType.STANDARD
|
|
243
|
-
r"""The queue type used (or created)"""
|
|
244
|
-
|
|
245
243
|
aws_account_id: Annotated[Optional[str], pydantic.Field(alias="awsAccountId")] = (
|
|
246
244
|
None
|
|
247
245
|
)
|
|
@@ -19,7 +19,6 @@ class NodeProvidedInfoTags(BaseModel):
|
|
|
19
19
|
|
|
20
20
|
class NodeProvidedInfoAwsTypedDict(TypedDict):
|
|
21
21
|
enabled: bool
|
|
22
|
-
instance_id: str
|
|
23
22
|
region: str
|
|
24
23
|
type: str
|
|
25
24
|
zone: str
|
|
@@ -29,8 +28,6 @@ class NodeProvidedInfoAwsTypedDict(TypedDict):
|
|
|
29
28
|
class NodeProvidedInfoAws(BaseModel):
|
|
30
29
|
enabled: bool
|
|
31
30
|
|
|
32
|
-
instance_id: Annotated[str, pydantic.Field(alias="instanceId")]
|
|
33
|
-
|
|
34
31
|
region: str
|
|
35
32
|
|
|
36
33
|
type: str
|
|
@@ -8,6 +8,7 @@ from .outputazuredataexplorer import (
|
|
|
8
8
|
)
|
|
9
9
|
from .outputazureeventhub import OutputAzureEventhub, OutputAzureEventhubTypedDict
|
|
10
10
|
from .outputazurelogs import OutputAzureLogs, OutputAzureLogsTypedDict
|
|
11
|
+
from .outputchronicle import OutputChronicle, OutputChronicleTypedDict
|
|
11
12
|
from .outputclickhouse import OutputClickHouse, OutputClickHouseTypedDict
|
|
12
13
|
from .outputcloudwatch import OutputCloudwatch, OutputCloudwatchTypedDict
|
|
13
14
|
from .outputconfluentcloud import OutputConfluentCloud, OutputConfluentCloudTypedDict
|
|
@@ -18,7 +19,6 @@ from .outputcrowdstrikenextgensiem import (
|
|
|
18
19
|
OutputCrowdstrikeNextGenSiem,
|
|
19
20
|
OutputCrowdstrikeNextGenSiemTypedDict,
|
|
20
21
|
)
|
|
21
|
-
from .outputdatabricks import OutputDatabricks, OutputDatabricksTypedDict
|
|
22
22
|
from .outputdatadog import OutputDatadog, OutputDatadogTypedDict
|
|
23
23
|
from .outputdataset import OutputDataset, OutputDatasetTypedDict
|
|
24
24
|
from .outputdefault import OutputDefault, OutputDefaultTypedDict
|
|
@@ -91,32 +91,31 @@ OutputTypedDict = TypeAliasType(
|
|
|
91
91
|
OutputDevnullTypedDict,
|
|
92
92
|
OutputDefaultTypedDict,
|
|
93
93
|
OutputRouterTypedDict,
|
|
94
|
-
OutputNetflowTypedDict,
|
|
95
94
|
OutputSnmpTypedDict,
|
|
95
|
+
OutputNetflowTypedDict,
|
|
96
96
|
OutputDiskSpoolTypedDict,
|
|
97
97
|
OutputRingTypedDict,
|
|
98
|
-
OutputGraphiteTypedDict,
|
|
99
98
|
OutputStatsdExtTypedDict,
|
|
99
|
+
OutputGraphiteTypedDict,
|
|
100
100
|
OutputStatsdTypedDict,
|
|
101
101
|
OutputGooglePubsubTypedDict,
|
|
102
|
-
OutputSplunkTypedDict,
|
|
103
102
|
OutputCriblTCPTypedDict,
|
|
103
|
+
OutputSplunkTypedDict,
|
|
104
104
|
OutputSnsTypedDict,
|
|
105
105
|
OutputCloudwatchTypedDict,
|
|
106
106
|
OutputAzureEventhubTypedDict,
|
|
107
|
-
OutputSignalfxTypedDict,
|
|
108
107
|
OutputWavefrontTypedDict,
|
|
108
|
+
OutputSignalfxTypedDict,
|
|
109
109
|
OutputHoneycombTypedDict,
|
|
110
|
-
OutputHumioHecTypedDict,
|
|
111
|
-
OutputTcpjsonTypedDict,
|
|
112
110
|
OutputSumoLogicTypedDict,
|
|
113
111
|
OutputCrowdstrikeNextGenSiemTypedDict,
|
|
112
|
+
OutputHumioHecTypedDict,
|
|
113
|
+
OutputTcpjsonTypedDict,
|
|
114
114
|
OutputElasticCloudTypedDict,
|
|
115
|
-
OutputExabeamTypedDict,
|
|
116
|
-
OutputKafkaTypedDict,
|
|
117
|
-
OutputConfluentCloudTypedDict,
|
|
118
115
|
OutputKinesisTypedDict,
|
|
119
|
-
|
|
116
|
+
OutputConfluentCloudTypedDict,
|
|
117
|
+
OutputKafkaTypedDict,
|
|
118
|
+
OutputExabeamTypedDict,
|
|
120
119
|
OutputNewrelicEventsTypedDict,
|
|
121
120
|
OutputAzureLogsTypedDict,
|
|
122
121
|
OutputSplunkLbTypedDict,
|
|
@@ -131,11 +130,12 @@ OutputTypedDict = TypeAliasType(
|
|
|
131
130
|
OutputSplunkHecTypedDict,
|
|
132
131
|
OutputDynatraceHTTPTypedDict,
|
|
133
132
|
OutputServiceNowTypedDict,
|
|
133
|
+
OutputChronicleTypedDict,
|
|
134
134
|
OutputDynatraceOtlpTypedDict,
|
|
135
|
-
OutputGoogleChronicleTypedDict,
|
|
136
135
|
OutputElasticTypedDict,
|
|
137
|
-
|
|
136
|
+
OutputGoogleChronicleTypedDict,
|
|
138
137
|
OutputCriblLakeTypedDict,
|
|
138
|
+
OutputDatadogTypedDict,
|
|
139
139
|
OutputPrometheusTypedDict,
|
|
140
140
|
OutputMskTypedDict,
|
|
141
141
|
OutputSentinelOneAiSiemTypedDict,
|
|
@@ -163,32 +163,31 @@ Output = TypeAliasType(
|
|
|
163
163
|
OutputDevnull,
|
|
164
164
|
OutputDefault,
|
|
165
165
|
OutputRouter,
|
|
166
|
-
OutputNetflow,
|
|
167
166
|
OutputSnmp,
|
|
167
|
+
OutputNetflow,
|
|
168
168
|
OutputDiskSpool,
|
|
169
169
|
OutputRing,
|
|
170
|
-
OutputGraphite,
|
|
171
170
|
OutputStatsdExt,
|
|
171
|
+
OutputGraphite,
|
|
172
172
|
OutputStatsd,
|
|
173
173
|
OutputGooglePubsub,
|
|
174
|
-
OutputSplunk,
|
|
175
174
|
OutputCriblTCP,
|
|
175
|
+
OutputSplunk,
|
|
176
176
|
OutputSns,
|
|
177
177
|
OutputCloudwatch,
|
|
178
178
|
OutputAzureEventhub,
|
|
179
|
-
OutputSignalfx,
|
|
180
179
|
OutputWavefront,
|
|
180
|
+
OutputSignalfx,
|
|
181
181
|
OutputHoneycomb,
|
|
182
|
-
OutputHumioHec,
|
|
183
|
-
OutputTcpjson,
|
|
184
182
|
OutputSumoLogic,
|
|
185
183
|
OutputCrowdstrikeNextGenSiem,
|
|
184
|
+
OutputHumioHec,
|
|
185
|
+
OutputTcpjson,
|
|
186
186
|
OutputElasticCloud,
|
|
187
|
-
OutputExabeam,
|
|
188
|
-
OutputKafka,
|
|
189
|
-
OutputConfluentCloud,
|
|
190
187
|
OutputKinesis,
|
|
191
|
-
|
|
188
|
+
OutputConfluentCloud,
|
|
189
|
+
OutputKafka,
|
|
190
|
+
OutputExabeam,
|
|
192
191
|
OutputNewrelicEvents,
|
|
193
192
|
OutputAzureLogs,
|
|
194
193
|
OutputSplunkLb,
|
|
@@ -203,11 +202,12 @@ Output = TypeAliasType(
|
|
|
203
202
|
OutputSplunkHec,
|
|
204
203
|
OutputDynatraceHTTP,
|
|
205
204
|
OutputServiceNow,
|
|
205
|
+
OutputChronicle,
|
|
206
206
|
OutputDynatraceOtlp,
|
|
207
|
-
OutputGoogleChronicle,
|
|
208
207
|
OutputElastic,
|
|
209
|
-
|
|
208
|
+
OutputGoogleChronicle,
|
|
210
209
|
OutputCriblLake,
|
|
210
|
+
OutputDatadog,
|
|
211
211
|
OutputPrometheus,
|
|
212
212
|
OutputMsk,
|
|
213
213
|
OutputSentinelOneAiSiem,
|