cribl-control-plane 0.0.41__py3-none-any.whl → 0.0.43__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/commits.py +2 -2
- cribl_control_plane/models/__init__.py +378 -0
- cribl_control_plane/models/createroutesappendbyidop.py +2 -2
- cribl_control_plane/models/createversionrevertop.py +2 -2
- cribl_control_plane/models/deletepipelinebyidop.py +2 -2
- cribl_control_plane/models/getpipelinebyidop.py +2 -2
- cribl_control_plane/models/getroutesbyidop.py +2 -2
- cribl_control_plane/models/input.py +2 -2
- cribl_control_plane/models/inputappscope.py +13 -0
- cribl_control_plane/models/inputazureblob.py +13 -0
- cribl_control_plane/models/inputcollection.py +13 -0
- cribl_control_plane/models/inputconfluentcloud.py +13 -0
- cribl_control_plane/models/inputcribl.py +13 -0
- cribl_control_plane/models/inputcriblhttp.py +13 -0
- cribl_control_plane/models/inputcribllakehttp.py +38 -4
- cribl_control_plane/models/inputcriblmetrics.py +13 -0
- cribl_control_plane/models/inputcribltcp.py +13 -0
- cribl_control_plane/models/inputcrowdstrike.py +13 -0
- cribl_control_plane/models/inputdatadogagent.py +13 -0
- cribl_control_plane/models/inputdatagen.py +13 -0
- cribl_control_plane/models/inputedgeprometheus.py +13 -0
- cribl_control_plane/models/inputelastic.py +13 -0
- cribl_control_plane/models/inputeventhub.py +13 -0
- cribl_control_plane/models/inputexec.py +13 -0
- cribl_control_plane/models/inputfile.py +13 -0
- cribl_control_plane/models/inputfirehose.py +13 -0
- cribl_control_plane/models/inputgooglepubsub.py +13 -0
- cribl_control_plane/models/inputgrafana.py +26 -14
- cribl_control_plane/models/inputhttp.py +13 -0
- cribl_control_plane/models/inputhttpraw.py +13 -0
- cribl_control_plane/models/inputjournalfiles.py +13 -0
- cribl_control_plane/models/inputkafka.py +13 -0
- cribl_control_plane/models/inputkinesis.py +13 -0
- cribl_control_plane/models/inputkubeevents.py +13 -0
- cribl_control_plane/models/inputkubelogs.py +13 -0
- cribl_control_plane/models/inputkubemetrics.py +13 -0
- cribl_control_plane/models/inputloki.py +13 -7
- cribl_control_plane/models/inputmetrics.py +13 -0
- cribl_control_plane/models/inputmodeldriventelemetry.py +14 -0
- cribl_control_plane/models/inputmsk.py +13 -0
- cribl_control_plane/models/inputnetflow.py +13 -0
- cribl_control_plane/models/inputoffice365mgmt.py +13 -0
- cribl_control_plane/models/inputoffice365msgtrace.py +13 -0
- cribl_control_plane/models/inputoffice365service.py +13 -0
- cribl_control_plane/models/inputopentelemetry.py +13 -0
- cribl_control_plane/models/inputprometheus.py +13 -0
- cribl_control_plane/models/inputprometheusrw.py +13 -0
- cribl_control_plane/models/inputrawudp.py +13 -0
- cribl_control_plane/models/inputs3.py +13 -0
- cribl_control_plane/models/inputs3inventory.py +13 -0
- cribl_control_plane/models/inputsecuritylake.py +13 -0
- cribl_control_plane/models/inputsnmp.py +13 -0
- cribl_control_plane/models/inputsplunk.py +13 -0
- cribl_control_plane/models/inputsplunkhec.py +13 -0
- cribl_control_plane/models/inputsplunksearch.py +13 -0
- cribl_control_plane/models/inputsqs.py +13 -0
- cribl_control_plane/models/inputsyslog.py +26 -0
- cribl_control_plane/models/inputsystemmetrics.py +13 -0
- cribl_control_plane/models/inputsystemstate.py +13 -0
- cribl_control_plane/models/inputtcp.py +13 -0
- cribl_control_plane/models/inputtcpjson.py +13 -0
- cribl_control_plane/models/inputwef.py +13 -0
- cribl_control_plane/models/inputwindowsmetrics.py +13 -0
- cribl_control_plane/models/inputwineventlogs.py +13 -0
- cribl_control_plane/models/inputwiz.py +13 -0
- cribl_control_plane/models/inputwizwebhook.py +13 -0
- cribl_control_plane/models/inputzscalerhec.py +13 -0
- cribl_control_plane/models/output.py +14 -14
- cribl_control_plane/models/outputgooglechronicle.py +2 -2
- cribl_control_plane/models/outputgrafanacloud.py +0 -14
- cribl_control_plane/models/outputloki.py +0 -7
- cribl_control_plane/models/updatepipelinebyidop.py +6 -6
- cribl_control_plane/models/updateroutesbyidop.py +2 -2
- cribl_control_plane/pipelines.py +18 -18
- cribl_control_plane/routes_sdk.py +22 -22
- {cribl_control_plane-0.0.41.dist-info → cribl_control_plane-0.0.43.dist-info}/METADATA +6 -6
- {cribl_control_plane-0.0.41.dist-info → cribl_control_plane-0.0.43.dist-info}/RECORD +79 -79
- {cribl_control_plane-0.0.41.dist-info → cribl_control_plane-0.0.43.dist-info}/WHEEL +0 -0
|
@@ -37,6 +37,14 @@ class InputSyslogCompression2(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputSyslogPqControls2TypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputSyslogPqControls2(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputSyslogPq2TypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputSyslogMode2]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputSyslogPq2TypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputSyslogCompression2]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputSyslogPqControls2TypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputSyslogPq2(BaseModel):
|
|
@@ -82,6 +91,10 @@ class InputSyslogPq2(BaseModel):
|
|
|
82
91
|
compress: Optional[InputSyslogCompression2] = InputSyslogCompression2.NONE
|
|
83
92
|
r"""Codec to use to compress the persisted data"""
|
|
84
93
|
|
|
94
|
+
pq_controls: Annotated[
|
|
95
|
+
Optional[InputSyslogPqControls2], pydantic.Field(alias="pqControls")
|
|
96
|
+
] = None
|
|
97
|
+
|
|
85
98
|
|
|
86
99
|
class InputSyslogMinimumTLSVersion2(str, Enum):
|
|
87
100
|
TL_SV1 = "TLSv1"
|
|
@@ -395,6 +408,14 @@ class InputSyslogCompression1(str, Enum):
|
|
|
395
408
|
GZIP = "gzip"
|
|
396
409
|
|
|
397
410
|
|
|
411
|
+
class InputSyslogPqControls1TypedDict(TypedDict):
|
|
412
|
+
pass
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
class InputSyslogPqControls1(BaseModel):
|
|
416
|
+
pass
|
|
417
|
+
|
|
418
|
+
|
|
398
419
|
class InputSyslogPq1TypedDict(TypedDict):
|
|
399
420
|
mode: NotRequired[InputSyslogMode1]
|
|
400
421
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -410,6 +431,7 @@ class InputSyslogPq1TypedDict(TypedDict):
|
|
|
410
431
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
411
432
|
compress: NotRequired[InputSyslogCompression1]
|
|
412
433
|
r"""Codec to use to compress the persisted data"""
|
|
434
|
+
pq_controls: NotRequired[InputSyslogPqControls1TypedDict]
|
|
413
435
|
|
|
414
436
|
|
|
415
437
|
class InputSyslogPq1(BaseModel):
|
|
@@ -440,6 +462,10 @@ class InputSyslogPq1(BaseModel):
|
|
|
440
462
|
compress: Optional[InputSyslogCompression1] = InputSyslogCompression1.NONE
|
|
441
463
|
r"""Codec to use to compress the persisted data"""
|
|
442
464
|
|
|
465
|
+
pq_controls: Annotated[
|
|
466
|
+
Optional[InputSyslogPqControls1], pydantic.Field(alias="pqControls")
|
|
467
|
+
] = None
|
|
468
|
+
|
|
443
469
|
|
|
444
470
|
class InputSyslogMinimumTLSVersion1(str, Enum):
|
|
445
471
|
TL_SV1 = "TLSv1"
|
|
@@ -37,6 +37,14 @@ class InputSystemMetricsCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputSystemMetricsPqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputSystemMetricsPqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputSystemMetricsPqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputSystemMetricsPqMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputSystemMetricsPqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputSystemMetricsCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputSystemMetricsPqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputSystemMetricsPq(BaseModel):
|
|
@@ -84,6 +93,10 @@ class InputSystemMetricsPq(BaseModel):
|
|
|
84
93
|
)
|
|
85
94
|
r"""Codec to use to compress the persisted data"""
|
|
86
95
|
|
|
96
|
+
pq_controls: Annotated[
|
|
97
|
+
Optional[InputSystemMetricsPqControls], pydantic.Field(alias="pqControls")
|
|
98
|
+
] = None
|
|
99
|
+
|
|
87
100
|
|
|
88
101
|
class InputSystemMetricsHostMode(str, Enum):
|
|
89
102
|
r"""Select level of detail for host metrics"""
|
|
@@ -37,6 +37,14 @@ class InputSystemStateCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputSystemStatePqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputSystemStatePqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputSystemStatePqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputSystemStateMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputSystemStatePqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputSystemStateCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputSystemStatePqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputSystemStatePq(BaseModel):
|
|
@@ -82,6 +91,10 @@ class InputSystemStatePq(BaseModel):
|
|
|
82
91
|
compress: Optional[InputSystemStateCompression] = InputSystemStateCompression.NONE
|
|
83
92
|
r"""Codec to use to compress the persisted data"""
|
|
84
93
|
|
|
94
|
+
pq_controls: Annotated[
|
|
95
|
+
Optional[InputSystemStatePqControls], pydantic.Field(alias="pqControls")
|
|
96
|
+
] = None
|
|
97
|
+
|
|
85
98
|
|
|
86
99
|
class InputSystemStateMetadatumTypedDict(TypedDict):
|
|
87
100
|
name: str
|
|
@@ -37,6 +37,14 @@ class InputTCPCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputTCPPqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputTCPPqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputTCPPqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputTCPMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputTCPPqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputTCPCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputTCPPqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputTCPPq(BaseModel):
|
|
@@ -82,6 +91,10 @@ class InputTCPPq(BaseModel):
|
|
|
82
91
|
compress: Optional[InputTCPCompression] = InputTCPCompression.NONE
|
|
83
92
|
r"""Codec to use to compress the persisted data"""
|
|
84
93
|
|
|
94
|
+
pq_controls: Annotated[
|
|
95
|
+
Optional[InputTCPPqControls], pydantic.Field(alias="pqControls")
|
|
96
|
+
] = None
|
|
97
|
+
|
|
85
98
|
|
|
86
99
|
class InputTCPMinimumTLSVersion(str, Enum):
|
|
87
100
|
TL_SV1 = "TLSv1"
|
|
@@ -37,6 +37,14 @@ class InputTcpjsonCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputTcpjsonPqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputTcpjsonPqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputTcpjsonPqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputTcpjsonMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputTcpjsonPqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputTcpjsonCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputTcpjsonPqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputTcpjsonPq(BaseModel):
|
|
@@ -82,6 +91,10 @@ class InputTcpjsonPq(BaseModel):
|
|
|
82
91
|
compress: Optional[InputTcpjsonCompression] = InputTcpjsonCompression.NONE
|
|
83
92
|
r"""Codec to use to compress the persisted data"""
|
|
84
93
|
|
|
94
|
+
pq_controls: Annotated[
|
|
95
|
+
Optional[InputTcpjsonPqControls], pydantic.Field(alias="pqControls")
|
|
96
|
+
] = None
|
|
97
|
+
|
|
85
98
|
|
|
86
99
|
class InputTcpjsonMinimumTLSVersion(str, Enum):
|
|
87
100
|
TL_SV1 = "TLSv1"
|
|
@@ -37,6 +37,14 @@ class InputWefCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputWefPqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputWefPqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputWefPqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputWefMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputWefPqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputWefCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputWefPqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputWefPq(BaseModel):
|
|
@@ -82,6 +91,10 @@ class InputWefPq(BaseModel):
|
|
|
82
91
|
compress: Optional[InputWefCompression] = InputWefCompression.NONE
|
|
83
92
|
r"""Codec to use to compress the persisted data"""
|
|
84
93
|
|
|
94
|
+
pq_controls: Annotated[
|
|
95
|
+
Optional[InputWefPqControls], pydantic.Field(alias="pqControls")
|
|
96
|
+
] = None
|
|
97
|
+
|
|
85
98
|
|
|
86
99
|
class InputWefAuthenticationMethod(str, Enum):
|
|
87
100
|
r"""How to authenticate incoming client connections"""
|
|
@@ -37,6 +37,14 @@ class InputWindowsMetricsCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputWindowsMetricsPqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputWindowsMetricsPqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputWindowsMetricsPqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputWindowsMetricsPqMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputWindowsMetricsPqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputWindowsMetricsCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputWindowsMetricsPqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputWindowsMetricsPq(BaseModel):
|
|
@@ -84,6 +93,10 @@ class InputWindowsMetricsPq(BaseModel):
|
|
|
84
93
|
)
|
|
85
94
|
r"""Codec to use to compress the persisted data"""
|
|
86
95
|
|
|
96
|
+
pq_controls: Annotated[
|
|
97
|
+
Optional[InputWindowsMetricsPqControls], pydantic.Field(alias="pqControls")
|
|
98
|
+
] = None
|
|
99
|
+
|
|
87
100
|
|
|
88
101
|
class InputWindowsMetricsHostMode(str, Enum):
|
|
89
102
|
r"""Select level of detail for host metrics"""
|
|
@@ -37,6 +37,14 @@ class InputWinEventLogsCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputWinEventLogsPqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputWinEventLogsPqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputWinEventLogsPqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputWinEventLogsMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputWinEventLogsPqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputWinEventLogsCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputWinEventLogsPqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputWinEventLogsPq(BaseModel):
|
|
@@ -82,6 +91,10 @@ class InputWinEventLogsPq(BaseModel):
|
|
|
82
91
|
compress: Optional[InputWinEventLogsCompression] = InputWinEventLogsCompression.NONE
|
|
83
92
|
r"""Codec to use to compress the persisted data"""
|
|
84
93
|
|
|
94
|
+
pq_controls: Annotated[
|
|
95
|
+
Optional[InputWinEventLogsPqControls], pydantic.Field(alias="pqControls")
|
|
96
|
+
] = None
|
|
97
|
+
|
|
85
98
|
|
|
86
99
|
class ReadMode(str, Enum):
|
|
87
100
|
r"""Read all stored and future event logs, or only future events"""
|
|
@@ -37,6 +37,14 @@ class InputWizCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputWizPqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputWizPqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputWizPqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputWizMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputWizPqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputWizCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputWizPqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputWizPq(BaseModel):
|
|
@@ -82,6 +91,10 @@ class InputWizPq(BaseModel):
|
|
|
82
91
|
compress: Optional[InputWizCompression] = InputWizCompression.NONE
|
|
83
92
|
r"""Codec to use to compress the persisted data"""
|
|
84
93
|
|
|
94
|
+
pq_controls: Annotated[
|
|
95
|
+
Optional[InputWizPqControls], pydantic.Field(alias="pqControls")
|
|
96
|
+
] = None
|
|
97
|
+
|
|
85
98
|
|
|
86
99
|
class InputWizContentConfigTypedDict(TypedDict):
|
|
87
100
|
content_type: str
|
|
@@ -37,6 +37,14 @@ class InputWizWebhookCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputWizWebhookPqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputWizWebhookPqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputWizWebhookPqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputWizWebhookMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputWizWebhookPqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputWizWebhookCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputWizWebhookPqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputWizWebhookPq(BaseModel):
|
|
@@ -82,6 +91,10 @@ class InputWizWebhookPq(BaseModel):
|
|
|
82
91
|
compress: Optional[InputWizWebhookCompression] = InputWizWebhookCompression.NONE
|
|
83
92
|
r"""Codec to use to compress the persisted data"""
|
|
84
93
|
|
|
94
|
+
pq_controls: Annotated[
|
|
95
|
+
Optional[InputWizWebhookPqControls], pydantic.Field(alias="pqControls")
|
|
96
|
+
] = None
|
|
97
|
+
|
|
85
98
|
|
|
86
99
|
class InputWizWebhookMinimumTLSVersion(str, Enum):
|
|
87
100
|
TL_SV1 = "TLSv1"
|
|
@@ -37,6 +37,14 @@ class InputZscalerHecCompression(str, Enum):
|
|
|
37
37
|
GZIP = "gzip"
|
|
38
38
|
|
|
39
39
|
|
|
40
|
+
class InputZscalerHecPqControlsTypedDict(TypedDict):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class InputZscalerHecPqControls(BaseModel):
|
|
45
|
+
pass
|
|
46
|
+
|
|
47
|
+
|
|
40
48
|
class InputZscalerHecPqTypedDict(TypedDict):
|
|
41
49
|
mode: NotRequired[InputZscalerHecMode]
|
|
42
50
|
r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
|
|
@@ -52,6 +60,7 @@ class InputZscalerHecPqTypedDict(TypedDict):
|
|
|
52
60
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
|
|
53
61
|
compress: NotRequired[InputZscalerHecCompression]
|
|
54
62
|
r"""Codec to use to compress the persisted data"""
|
|
63
|
+
pq_controls: NotRequired[InputZscalerHecPqControlsTypedDict]
|
|
55
64
|
|
|
56
65
|
|
|
57
66
|
class InputZscalerHecPq(BaseModel):
|
|
@@ -82,6 +91,10 @@ class InputZscalerHecPq(BaseModel):
|
|
|
82
91
|
compress: Optional[InputZscalerHecCompression] = InputZscalerHecCompression.NONE
|
|
83
92
|
r"""Codec to use to compress the persisted data"""
|
|
84
93
|
|
|
94
|
+
pq_controls: Annotated[
|
|
95
|
+
Optional[InputZscalerHecPqControls], pydantic.Field(alias="pqControls")
|
|
96
|
+
] = None
|
|
97
|
+
|
|
85
98
|
|
|
86
99
|
class InputZscalerHecAuthenticationMethod(str, Enum):
|
|
87
100
|
r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
|
|
@@ -90,25 +90,25 @@ OutputTypedDict = TypeAliasType(
|
|
|
90
90
|
OutputDevnullTypedDict,
|
|
91
91
|
OutputDefaultTypedDict,
|
|
92
92
|
OutputRouterTypedDict,
|
|
93
|
-
OutputNetflowTypedDict,
|
|
94
93
|
OutputSnmpTypedDict,
|
|
94
|
+
OutputNetflowTypedDict,
|
|
95
95
|
OutputDiskSpoolTypedDict,
|
|
96
96
|
OutputRingTypedDict,
|
|
97
|
-
OutputStatsdExtTypedDict,
|
|
98
97
|
OutputGraphiteTypedDict,
|
|
99
98
|
OutputStatsdTypedDict,
|
|
99
|
+
OutputStatsdExtTypedDict,
|
|
100
100
|
OutputGooglePubsubTypedDict,
|
|
101
101
|
OutputCriblTCPTypedDict,
|
|
102
|
-
OutputSnsTypedDict,
|
|
103
102
|
OutputSplunkTypedDict,
|
|
103
|
+
OutputSnsTypedDict,
|
|
104
104
|
OutputCloudwatchTypedDict,
|
|
105
105
|
OutputSyslogTypedDict,
|
|
106
106
|
OutputAzureEventhubTypedDict,
|
|
107
107
|
OutputWavefrontTypedDict,
|
|
108
108
|
OutputSignalfxTypedDict,
|
|
109
109
|
OutputHoneycombTypedDict,
|
|
110
|
-
OutputSumoLogicTypedDict,
|
|
111
110
|
OutputTcpjsonTypedDict,
|
|
111
|
+
OutputSumoLogicTypedDict,
|
|
112
112
|
OutputHumioHecTypedDict,
|
|
113
113
|
OutputElasticCloudTypedDict,
|
|
114
114
|
OutputCrowdstrikeNextGenSiemTypedDict,
|
|
@@ -125,15 +125,15 @@ OutputTypedDict = TypeAliasType(
|
|
|
125
125
|
OutputXsiamTypedDict,
|
|
126
126
|
OutputFilesystemTypedDict,
|
|
127
127
|
OutputDatasetTypedDict,
|
|
128
|
+
OutputLokiTypedDict,
|
|
128
129
|
OutputSplunkHecTypedDict,
|
|
129
130
|
OutputDynatraceHTTPTypedDict,
|
|
130
131
|
OutputServiceNowTypedDict,
|
|
131
|
-
OutputLokiTypedDict,
|
|
132
132
|
OutputDynatraceOtlpTypedDict,
|
|
133
|
-
OutputGoogleChronicleTypedDict,
|
|
134
133
|
OutputElasticTypedDict,
|
|
135
|
-
|
|
134
|
+
OutputGoogleChronicleTypedDict,
|
|
136
135
|
OutputCriblLakeTypedDict,
|
|
136
|
+
OutputDatadogTypedDict,
|
|
137
137
|
OutputPrometheusTypedDict,
|
|
138
138
|
OutputMskTypedDict,
|
|
139
139
|
OutputSentinelOneAiSiemTypedDict,
|
|
@@ -161,25 +161,25 @@ Output = TypeAliasType(
|
|
|
161
161
|
OutputDevnull,
|
|
162
162
|
OutputDefault,
|
|
163
163
|
OutputRouter,
|
|
164
|
-
OutputNetflow,
|
|
165
164
|
OutputSnmp,
|
|
165
|
+
OutputNetflow,
|
|
166
166
|
OutputDiskSpool,
|
|
167
167
|
OutputRing,
|
|
168
|
-
OutputStatsdExt,
|
|
169
168
|
OutputGraphite,
|
|
170
169
|
OutputStatsd,
|
|
170
|
+
OutputStatsdExt,
|
|
171
171
|
OutputGooglePubsub,
|
|
172
172
|
OutputCriblTCP,
|
|
173
|
-
OutputSns,
|
|
174
173
|
OutputSplunk,
|
|
174
|
+
OutputSns,
|
|
175
175
|
OutputCloudwatch,
|
|
176
176
|
OutputSyslog,
|
|
177
177
|
OutputAzureEventhub,
|
|
178
178
|
OutputWavefront,
|
|
179
179
|
OutputSignalfx,
|
|
180
180
|
OutputHoneycomb,
|
|
181
|
-
OutputSumoLogic,
|
|
182
181
|
OutputTcpjson,
|
|
182
|
+
OutputSumoLogic,
|
|
183
183
|
OutputHumioHec,
|
|
184
184
|
OutputElasticCloud,
|
|
185
185
|
OutputCrowdstrikeNextGenSiem,
|
|
@@ -196,15 +196,15 @@ Output = TypeAliasType(
|
|
|
196
196
|
OutputXsiam,
|
|
197
197
|
OutputFilesystem,
|
|
198
198
|
OutputDataset,
|
|
199
|
+
OutputLoki,
|
|
199
200
|
OutputSplunkHec,
|
|
200
201
|
OutputDynatraceHTTP,
|
|
201
202
|
OutputServiceNow,
|
|
202
|
-
OutputLoki,
|
|
203
203
|
OutputDynatraceOtlp,
|
|
204
|
-
OutputGoogleChronicle,
|
|
205
204
|
OutputElastic,
|
|
206
|
-
|
|
205
|
+
OutputGoogleChronicle,
|
|
207
206
|
OutputCriblLake,
|
|
207
|
+
OutputDatadog,
|
|
208
208
|
OutputPrometheus,
|
|
209
209
|
OutputMsk,
|
|
210
210
|
OutputSentinelOneAiSiem,
|
|
@@ -227,7 +227,7 @@ class OutputGoogleChronicleTypedDict(TypedDict):
|
|
|
227
227
|
log_text_field: NotRequired[str]
|
|
228
228
|
r"""Name of the event field that contains the log text to send. If not specified, Stream sends a JSON representation of the whole event."""
|
|
229
229
|
customer_id: NotRequired[str]
|
|
230
|
-
r"""
|
|
230
|
+
r"""A unique identifier (UUID) for your Google SecOps instance. This is provided by your Google representative and is required for API V2 authentication."""
|
|
231
231
|
namespace: NotRequired[str]
|
|
232
232
|
r"""User-configured environment namespace to identify the data domain the logs originated from. Use namespace as a tag to identify the appropriate data domain for indexing and enrichment functionality. Can be overwritten by event field __namespace."""
|
|
233
233
|
custom_labels: NotRequired[List[CustomLabelTypedDict]]
|
|
@@ -388,7 +388,7 @@ class OutputGoogleChronicle(BaseModel):
|
|
|
388
388
|
r"""Name of the event field that contains the log text to send. If not specified, Stream sends a JSON representation of the whole event."""
|
|
389
389
|
|
|
390
390
|
customer_id: Annotated[Optional[str], pydantic.Field(alias="customerId")] = None
|
|
391
|
-
r"""
|
|
391
|
+
r"""A unique identifier (UUID) for your Google SecOps instance. This is provided by your Google representative and is required for API V2 authentication."""
|
|
392
392
|
|
|
393
393
|
namespace: Optional[str] = None
|
|
394
394
|
r"""User-configured environment namespace to identify the data domain the logs originated from. Use namespace as a tag to identify the appropriate data domain for indexing and enrichment functionality. Can be overwritten by event field __namespace."""
|
|
@@ -284,8 +284,6 @@ class OutputGrafanaCloudGrafanaCloud2TypedDict(TypedDict):
|
|
|
284
284
|
r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
|
|
285
285
|
safe_headers: NotRequired[List[str]]
|
|
286
286
|
r"""List of headers that are safe to log in plain text"""
|
|
287
|
-
send_structured_metadata: NotRequired[bool]
|
|
288
|
-
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
289
287
|
response_retry_settings: NotRequired[
|
|
290
288
|
List[OutputGrafanaCloudResponseRetrySetting2TypedDict]
|
|
291
289
|
]
|
|
@@ -418,11 +416,6 @@ class OutputGrafanaCloudGrafanaCloud2(BaseModel):
|
|
|
418
416
|
] = None
|
|
419
417
|
r"""List of headers that are safe to log in plain text"""
|
|
420
418
|
|
|
421
|
-
send_structured_metadata: Annotated[
|
|
422
|
-
Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
|
|
423
|
-
] = False
|
|
424
|
-
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
425
|
-
|
|
426
419
|
response_retry_settings: Annotated[
|
|
427
420
|
Optional[List[OutputGrafanaCloudResponseRetrySetting2]],
|
|
428
421
|
pydantic.Field(alias="responseRetrySettings"),
|
|
@@ -760,8 +753,6 @@ class OutputGrafanaCloudGrafanaCloud1TypedDict(TypedDict):
|
|
|
760
753
|
r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
|
|
761
754
|
safe_headers: NotRequired[List[str]]
|
|
762
755
|
r"""List of headers that are safe to log in plain text"""
|
|
763
|
-
send_structured_metadata: NotRequired[bool]
|
|
764
|
-
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
765
756
|
response_retry_settings: NotRequired[
|
|
766
757
|
List[OutputGrafanaCloudResponseRetrySetting1TypedDict]
|
|
767
758
|
]
|
|
@@ -896,11 +887,6 @@ class OutputGrafanaCloudGrafanaCloud1(BaseModel):
|
|
|
896
887
|
] = None
|
|
897
888
|
r"""List of headers that are safe to log in plain text"""
|
|
898
889
|
|
|
899
|
-
send_structured_metadata: Annotated[
|
|
900
|
-
Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
|
|
901
|
-
] = False
|
|
902
|
-
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
903
|
-
|
|
904
890
|
response_retry_settings: Annotated[
|
|
905
891
|
Optional[List[OutputGrafanaCloudResponseRetrySetting1]],
|
|
906
892
|
pydantic.Field(alias="responseRetrySettings"),
|
|
@@ -200,8 +200,6 @@ class OutputLokiTypedDict(TypedDict):
|
|
|
200
200
|
r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
|
|
201
201
|
enable_dynamic_headers: NotRequired[bool]
|
|
202
202
|
r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
|
|
203
|
-
send_structured_metadata: NotRequired[bool]
|
|
204
|
-
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
205
203
|
on_backpressure: NotRequired[OutputLokiBackpressureBehavior]
|
|
206
204
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
207
205
|
total_memory_limit_kb: NotRequired[float]
|
|
@@ -344,11 +342,6 @@ class OutputLoki(BaseModel):
|
|
|
344
342
|
] = False
|
|
345
343
|
r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
|
|
346
344
|
|
|
347
|
-
send_structured_metadata: Annotated[
|
|
348
|
-
Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
|
|
349
|
-
] = False
|
|
350
|
-
r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
|
|
351
|
-
|
|
352
345
|
on_backpressure: Annotated[
|
|
353
346
|
Optional[OutputLokiBackpressureBehavior], pydantic.Field(alias="onBackpressure")
|
|
354
347
|
] = OutputLokiBackpressureBehavior.BLOCK
|
|
@@ -11,9 +11,9 @@ from typing_extensions import Annotated, NotRequired, TypedDict
|
|
|
11
11
|
|
|
12
12
|
class UpdatePipelineByIDRequestTypedDict(TypedDict):
|
|
13
13
|
id_param: str
|
|
14
|
-
r"""
|
|
14
|
+
r"""The <code>id</code> of the Pipeline to update."""
|
|
15
15
|
pipeline: PipelineTypedDict
|
|
16
|
-
r"""Pipeline object
|
|
16
|
+
r"""Pipeline object"""
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
class UpdatePipelineByIDRequest(BaseModel):
|
|
@@ -22,16 +22,16 @@ class UpdatePipelineByIDRequest(BaseModel):
|
|
|
22
22
|
pydantic.Field(alias="id"),
|
|
23
23
|
FieldMetadata(path=PathParamMetadata(style="simple", explode=False)),
|
|
24
24
|
]
|
|
25
|
-
r"""
|
|
25
|
+
r"""The <code>id</code> of the Pipeline to update."""
|
|
26
26
|
|
|
27
27
|
pipeline: Annotated[
|
|
28
28
|
Pipeline, FieldMetadata(request=RequestMetadata(media_type="application/json"))
|
|
29
29
|
]
|
|
30
|
-
r"""Pipeline object
|
|
30
|
+
r"""Pipeline object"""
|
|
31
31
|
|
|
32
32
|
|
|
33
33
|
class UpdatePipelineByIDResponseTypedDict(TypedDict):
|
|
34
|
-
r"""a list of
|
|
34
|
+
r"""a list of Pipeline objects"""
|
|
35
35
|
|
|
36
36
|
count: NotRequired[int]
|
|
37
37
|
r"""number of items present in the items array"""
|
|
@@ -39,7 +39,7 @@ class UpdatePipelineByIDResponseTypedDict(TypedDict):
|
|
|
39
39
|
|
|
40
40
|
|
|
41
41
|
class UpdatePipelineByIDResponse(BaseModel):
|
|
42
|
-
r"""a list of
|
|
42
|
+
r"""a list of Pipeline objects"""
|
|
43
43
|
|
|
44
44
|
count: Optional[int] = None
|
|
45
45
|
r"""number of items present in the items array"""
|
|
@@ -11,7 +11,7 @@ from typing_extensions import Annotated, NotRequired, TypedDict
|
|
|
11
11
|
|
|
12
12
|
class UpdateRoutesByIDRequestTypedDict(TypedDict):
|
|
13
13
|
id_param: str
|
|
14
|
-
r"""
|
|
14
|
+
r"""The <code>id</code> of the Routing table that contains the Route to update. The supported value is <code>default</code>."""
|
|
15
15
|
routes: RoutesTypedDict
|
|
16
16
|
r"""Routes object"""
|
|
17
17
|
|
|
@@ -22,7 +22,7 @@ class UpdateRoutesByIDRequest(BaseModel):
|
|
|
22
22
|
pydantic.Field(alias="id"),
|
|
23
23
|
FieldMetadata(path=PathParamMetadata(style="simple", explode=False)),
|
|
24
24
|
]
|
|
25
|
-
r"""
|
|
25
|
+
r"""The <code>id</code> of the Routing table that contains the Route to update. The supported value is <code>default</code>."""
|
|
26
26
|
|
|
27
27
|
routes: Annotated[
|
|
28
28
|
Routes, FieldMetadata(request=RequestMetadata(media_type="application/json"))
|