cribl-control-plane 0.0.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cribl-control-plane might be problematic. Click here for more details.
- cribl_control_plane/__init__.py +17 -0
- cribl_control_plane/_hooks/__init__.py +5 -0
- cribl_control_plane/_hooks/clientcredentials.py +211 -0
- cribl_control_plane/_hooks/registration.py +13 -0
- cribl_control_plane/_hooks/sdkhooks.py +81 -0
- cribl_control_plane/_hooks/types.py +112 -0
- cribl_control_plane/_version.py +15 -0
- cribl_control_plane/auth_sdk.py +184 -0
- cribl_control_plane/basesdk.py +358 -0
- cribl_control_plane/errors/__init__.py +60 -0
- cribl_control_plane/errors/apierror.py +38 -0
- cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
- cribl_control_plane/errors/error.py +24 -0
- cribl_control_plane/errors/healthstatus_error.py +38 -0
- cribl_control_plane/errors/no_response_error.py +13 -0
- cribl_control_plane/errors/responsevalidationerror.py +25 -0
- cribl_control_plane/health.py +166 -0
- cribl_control_plane/httpclient.py +126 -0
- cribl_control_plane/models/__init__.py +7305 -0
- cribl_control_plane/models/addhectokenrequest.py +34 -0
- cribl_control_plane/models/authtoken.py +13 -0
- cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
- cribl_control_plane/models/createinputop.py +24 -0
- cribl_control_plane/models/createoutputop.py +24 -0
- cribl_control_plane/models/createoutputtestbyidop.py +46 -0
- cribl_control_plane/models/criblevent.py +14 -0
- cribl_control_plane/models/deleteinputbyidop.py +37 -0
- cribl_control_plane/models/deleteoutputbyidop.py +37 -0
- cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
- cribl_control_plane/models/getinputbyidop.py +37 -0
- cribl_control_plane/models/getoutputbyidop.py +37 -0
- cribl_control_plane/models/getoutputpqbyidop.py +36 -0
- cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
- cribl_control_plane/models/healthstatus.py +36 -0
- cribl_control_plane/models/input.py +199 -0
- cribl_control_plane/models/inputappscope.py +448 -0
- cribl_control_plane/models/inputazureblob.py +308 -0
- cribl_control_plane/models/inputcollection.py +208 -0
- cribl_control_plane/models/inputconfluentcloud.py +585 -0
- cribl_control_plane/models/inputcribl.py +165 -0
- cribl_control_plane/models/inputcriblhttp.py +341 -0
- cribl_control_plane/models/inputcribllakehttp.py +342 -0
- cribl_control_plane/models/inputcriblmetrics.py +175 -0
- cribl_control_plane/models/inputcribltcp.py +299 -0
- cribl_control_plane/models/inputcrowdstrike.py +410 -0
- cribl_control_plane/models/inputdatadogagent.py +364 -0
- cribl_control_plane/models/inputdatagen.py +180 -0
- cribl_control_plane/models/inputedgeprometheus.py +551 -0
- cribl_control_plane/models/inputelastic.py +494 -0
- cribl_control_plane/models/inputeventhub.py +360 -0
- cribl_control_plane/models/inputexec.py +213 -0
- cribl_control_plane/models/inputfile.py +259 -0
- cribl_control_plane/models/inputfirehose.py +341 -0
- cribl_control_plane/models/inputgooglepubsub.py +247 -0
- cribl_control_plane/models/inputgrafana_union.py +1247 -0
- cribl_control_plane/models/inputhttp.py +403 -0
- cribl_control_plane/models/inputhttpraw.py +407 -0
- cribl_control_plane/models/inputjournalfiles.py +208 -0
- cribl_control_plane/models/inputkafka.py +581 -0
- cribl_control_plane/models/inputkinesis.py +363 -0
- cribl_control_plane/models/inputkubeevents.py +182 -0
- cribl_control_plane/models/inputkubelogs.py +256 -0
- cribl_control_plane/models/inputkubemetrics.py +233 -0
- cribl_control_plane/models/inputloki.py +468 -0
- cribl_control_plane/models/inputmetrics.py +290 -0
- cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
- cribl_control_plane/models/inputmsk.py +654 -0
- cribl_control_plane/models/inputnetflow.py +224 -0
- cribl_control_plane/models/inputoffice365mgmt.py +384 -0
- cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
- cribl_control_plane/models/inputoffice365service.py +377 -0
- cribl_control_plane/models/inputopentelemetry.py +516 -0
- cribl_control_plane/models/inputprometheus.py +464 -0
- cribl_control_plane/models/inputprometheusrw.py +470 -0
- cribl_control_plane/models/inputrawudp.py +207 -0
- cribl_control_plane/models/inputs3.py +416 -0
- cribl_control_plane/models/inputs3inventory.py +440 -0
- cribl_control_plane/models/inputsecuritylake.py +425 -0
- cribl_control_plane/models/inputsnmp.py +274 -0
- cribl_control_plane/models/inputsplunk.py +387 -0
- cribl_control_plane/models/inputsplunkhec.py +478 -0
- cribl_control_plane/models/inputsplunksearch.py +537 -0
- cribl_control_plane/models/inputsqs.py +320 -0
- cribl_control_plane/models/inputsyslog_union.py +759 -0
- cribl_control_plane/models/inputsystemmetrics.py +533 -0
- cribl_control_plane/models/inputsystemstate.py +417 -0
- cribl_control_plane/models/inputtcp.py +359 -0
- cribl_control_plane/models/inputtcpjson.py +334 -0
- cribl_control_plane/models/inputwef.py +498 -0
- cribl_control_plane/models/inputwindowsmetrics.py +457 -0
- cribl_control_plane/models/inputwineventlogs.py +222 -0
- cribl_control_plane/models/inputwiz.py +334 -0
- cribl_control_plane/models/inputzscalerhec.py +439 -0
- cribl_control_plane/models/listinputop.py +24 -0
- cribl_control_plane/models/listoutputop.py +24 -0
- cribl_control_plane/models/logininfo.py +16 -0
- cribl_control_plane/models/output.py +229 -0
- cribl_control_plane/models/outputazureblob.py +471 -0
- cribl_control_plane/models/outputazuredataexplorer.py +660 -0
- cribl_control_plane/models/outputazureeventhub.py +321 -0
- cribl_control_plane/models/outputazurelogs.py +386 -0
- cribl_control_plane/models/outputclickhouse.py +650 -0
- cribl_control_plane/models/outputcloudwatch.py +273 -0
- cribl_control_plane/models/outputconfluentcloud.py +591 -0
- cribl_control_plane/models/outputcriblhttp.py +494 -0
- cribl_control_plane/models/outputcribllake.py +396 -0
- cribl_control_plane/models/outputcribltcp.py +387 -0
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
- cribl_control_plane/models/outputdatadog.py +472 -0
- cribl_control_plane/models/outputdataset.py +437 -0
- cribl_control_plane/models/outputdefault.py +55 -0
- cribl_control_plane/models/outputdevnull.py +50 -0
- cribl_control_plane/models/outputdiskspool.py +89 -0
- cribl_control_plane/models/outputdls3.py +560 -0
- cribl_control_plane/models/outputdynatracehttp.py +454 -0
- cribl_control_plane/models/outputdynatraceotlp.py +486 -0
- cribl_control_plane/models/outputelastic.py +494 -0
- cribl_control_plane/models/outputelasticcloud.py +407 -0
- cribl_control_plane/models/outputexabeam.py +297 -0
- cribl_control_plane/models/outputfilesystem.py +357 -0
- cribl_control_plane/models/outputgooglechronicle.py +486 -0
- cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
- cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
- cribl_control_plane/models/outputgooglepubsub.py +274 -0
- cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
- cribl_control_plane/models/outputgraphite.py +225 -0
- cribl_control_plane/models/outputhoneycomb.py +369 -0
- cribl_control_plane/models/outputhumiohec.py +389 -0
- cribl_control_plane/models/outputinfluxdb.py +523 -0
- cribl_control_plane/models/outputkafka.py +581 -0
- cribl_control_plane/models/outputkinesis.py +312 -0
- cribl_control_plane/models/outputloki.py +425 -0
- cribl_control_plane/models/outputminio.py +512 -0
- cribl_control_plane/models/outputmsk.py +654 -0
- cribl_control_plane/models/outputnetflow.py +80 -0
- cribl_control_plane/models/outputnewrelic.py +424 -0
- cribl_control_plane/models/outputnewrelicevents.py +401 -0
- cribl_control_plane/models/outputopentelemetry.py +669 -0
- cribl_control_plane/models/outputprometheus.py +485 -0
- cribl_control_plane/models/outputring.py +121 -0
- cribl_control_plane/models/outputrouter.py +83 -0
- cribl_control_plane/models/outputs3.py +556 -0
- cribl_control_plane/models/outputsamplesresponse.py +14 -0
- cribl_control_plane/models/outputsecuritylake.py +505 -0
- cribl_control_plane/models/outputsentinel.py +488 -0
- cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
- cribl_control_plane/models/outputservicenow.py +543 -0
- cribl_control_plane/models/outputsignalfx.py +369 -0
- cribl_control_plane/models/outputsnmp.py +80 -0
- cribl_control_plane/models/outputsns.py +274 -0
- cribl_control_plane/models/outputsplunk.py +383 -0
- cribl_control_plane/models/outputsplunkhec.py +434 -0
- cribl_control_plane/models/outputsplunklb.py +558 -0
- cribl_control_plane/models/outputsqs.py +328 -0
- cribl_control_plane/models/outputstatsd.py +224 -0
- cribl_control_plane/models/outputstatsdext.py +225 -0
- cribl_control_plane/models/outputsumologic.py +378 -0
- cribl_control_plane/models/outputsyslog.py +415 -0
- cribl_control_plane/models/outputtcpjson.py +413 -0
- cribl_control_plane/models/outputtestrequest.py +15 -0
- cribl_control_plane/models/outputtestresponse.py +29 -0
- cribl_control_plane/models/outputwavefront.py +369 -0
- cribl_control_plane/models/outputwebhook.py +689 -0
- cribl_control_plane/models/outputxsiam.py +415 -0
- cribl_control_plane/models/schemeclientoauth.py +24 -0
- cribl_control_plane/models/security.py +36 -0
- cribl_control_plane/models/updatehectokenrequest.py +31 -0
- cribl_control_plane/models/updateinputbyidop.py +44 -0
- cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
- cribl_control_plane/models/updateoutputbyidop.py +44 -0
- cribl_control_plane/outputs.py +1615 -0
- cribl_control_plane/py.typed +1 -0
- cribl_control_plane/sdk.py +164 -0
- cribl_control_plane/sdkconfiguration.py +36 -0
- cribl_control_plane/sources.py +1355 -0
- cribl_control_plane/types/__init__.py +21 -0
- cribl_control_plane/types/basemodel.py +39 -0
- cribl_control_plane/utils/__init__.py +187 -0
- cribl_control_plane/utils/annotations.py +55 -0
- cribl_control_plane/utils/datetimes.py +23 -0
- cribl_control_plane/utils/enums.py +74 -0
- cribl_control_plane/utils/eventstreaming.py +238 -0
- cribl_control_plane/utils/forms.py +223 -0
- cribl_control_plane/utils/headers.py +136 -0
- cribl_control_plane/utils/logger.py +27 -0
- cribl_control_plane/utils/metadata.py +118 -0
- cribl_control_plane/utils/queryparams.py +205 -0
- cribl_control_plane/utils/requestbodies.py +66 -0
- cribl_control_plane/utils/retries.py +217 -0
- cribl_control_plane/utils/security.py +207 -0
- cribl_control_plane/utils/serializers.py +249 -0
- cribl_control_plane/utils/unmarshal_json_response.py +24 -0
- cribl_control_plane/utils/url.py +155 -0
- cribl_control_plane/utils/values.py +137 -0
- cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
- cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
- cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,396 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import utils
|
|
5
|
+
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
7
|
+
from enum import Enum
|
|
8
|
+
import pydantic
|
|
9
|
+
from pydantic.functional_validators import PlainValidator
|
|
10
|
+
from typing import List, Optional
|
|
11
|
+
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class OutputCriblLakeType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
15
|
+
CRIBL_LAKE = "cribl_lake"
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class OutputCriblLakeSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
19
|
+
r"""Signature version to use for signing S3 requests"""
|
|
20
|
+
|
|
21
|
+
V2 = "v2"
|
|
22
|
+
V4 = "v4"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class OutputCriblLakeObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
26
|
+
r"""Object ACL to assign to uploaded objects"""
|
|
27
|
+
|
|
28
|
+
PRIVATE = "private"
|
|
29
|
+
PUBLIC_READ = "public-read"
|
|
30
|
+
PUBLIC_READ_WRITE = "public-read-write"
|
|
31
|
+
AUTHENTICATED_READ = "authenticated-read"
|
|
32
|
+
AWS_EXEC_READ = "aws-exec-read"
|
|
33
|
+
BUCKET_OWNER_READ = "bucket-owner-read"
|
|
34
|
+
BUCKET_OWNER_FULL_CONTROL = "bucket-owner-full-control"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class OutputCriblLakeStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
38
|
+
r"""Storage class to select for uploaded objects"""
|
|
39
|
+
|
|
40
|
+
STANDARD = "STANDARD"
|
|
41
|
+
REDUCED_REDUNDANCY = "REDUCED_REDUNDANCY"
|
|
42
|
+
STANDARD_IA = "STANDARD_IA"
|
|
43
|
+
ONEZONE_IA = "ONEZONE_IA"
|
|
44
|
+
INTELLIGENT_TIERING = "INTELLIGENT_TIERING"
|
|
45
|
+
GLACIER = "GLACIER"
|
|
46
|
+
GLACIER_IR = "GLACIER_IR"
|
|
47
|
+
DEEP_ARCHIVE = "DEEP_ARCHIVE"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class OutputCriblLakeServerSideEncryptionForUploadedObjects(
|
|
51
|
+
str, Enum, metaclass=utils.OpenEnumMeta
|
|
52
|
+
):
|
|
53
|
+
AES256 = "AES256"
|
|
54
|
+
AWS_KMS = "aws:kms"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class OutputCriblLakeBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
58
|
+
r"""How to handle events when all receivers are exerting backpressure"""
|
|
59
|
+
|
|
60
|
+
BLOCK = "block"
|
|
61
|
+
DROP = "drop"
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class OutputCriblLakeDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
65
|
+
r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
|
|
66
|
+
|
|
67
|
+
BLOCK = "block"
|
|
68
|
+
DROP = "drop"
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class AwsAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
72
|
+
AUTO = "auto"
|
|
73
|
+
AUTO_RPC = "auto_rpc"
|
|
74
|
+
MANUAL = "manual"
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class OutputCriblLakeFormat(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
78
|
+
JSON = "json"
|
|
79
|
+
PARQUET = "parquet"
|
|
80
|
+
DDSS = "ddss"
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class OutputCriblLakeTypedDict(TypedDict):
|
|
84
|
+
id: str
|
|
85
|
+
r"""Unique ID for this output"""
|
|
86
|
+
type: OutputCriblLakeType
|
|
87
|
+
pipeline: NotRequired[str]
|
|
88
|
+
r"""Pipeline to process data before sending out to this output"""
|
|
89
|
+
system_fields: NotRequired[List[str]]
|
|
90
|
+
r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
|
|
91
|
+
environment: NotRequired[str]
|
|
92
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
93
|
+
streamtags: NotRequired[List[str]]
|
|
94
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
95
|
+
bucket: NotRequired[str]
|
|
96
|
+
r"""Name of the destination S3 bucket. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myBucket-${C.vars.myVar}`"""
|
|
97
|
+
region: NotRequired[str]
|
|
98
|
+
r"""Region where the S3 bucket is located"""
|
|
99
|
+
aws_secret_key: NotRequired[str]
|
|
100
|
+
r"""Secret key. This value can be a constant or a JavaScript expression. Example: `${C.env.SOME_SECRET}`)"""
|
|
101
|
+
endpoint: NotRequired[str]
|
|
102
|
+
r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
|
|
103
|
+
signature_version: NotRequired[OutputCriblLakeSignatureVersion]
|
|
104
|
+
r"""Signature version to use for signing S3 requests"""
|
|
105
|
+
reuse_connections: NotRequired[bool]
|
|
106
|
+
r"""Reuse connections between requests, which can improve performance"""
|
|
107
|
+
reject_unauthorized: NotRequired[bool]
|
|
108
|
+
r"""Reject certificates that cannot be verified against a valid CA, such as self-signed certificates"""
|
|
109
|
+
enable_assume_role: NotRequired[bool]
|
|
110
|
+
r"""Use Assume Role credentials to access S3"""
|
|
111
|
+
assume_role_arn: NotRequired[str]
|
|
112
|
+
r"""Amazon Resource Name (ARN) of the role to assume"""
|
|
113
|
+
assume_role_external_id: NotRequired[str]
|
|
114
|
+
r"""External ID to use when assuming role"""
|
|
115
|
+
duration_seconds: NotRequired[float]
|
|
116
|
+
r"""Duration of the assumed role's session, in seconds. Minimum is 900 (15 minutes), default is 3600 (1 hour), and maximum is 43200 (12 hours)."""
|
|
117
|
+
stage_path: NotRequired[str]
|
|
118
|
+
r"""Filesystem location in which to buffer files, before compressing and moving to final destination. Use performant and stable storage."""
|
|
119
|
+
add_id_to_stage_path: NotRequired[bool]
|
|
120
|
+
r"""Add the Output ID value to staging location"""
|
|
121
|
+
dest_path: NotRequired[str]
|
|
122
|
+
r"""Lake dataset to send the data to."""
|
|
123
|
+
object_acl: NotRequired[OutputCriblLakeObjectACL]
|
|
124
|
+
r"""Object ACL to assign to uploaded objects"""
|
|
125
|
+
storage_class: NotRequired[OutputCriblLakeStorageClass]
|
|
126
|
+
r"""Storage class to select for uploaded objects"""
|
|
127
|
+
server_side_encryption: NotRequired[
|
|
128
|
+
OutputCriblLakeServerSideEncryptionForUploadedObjects
|
|
129
|
+
]
|
|
130
|
+
kms_key_id: NotRequired[str]
|
|
131
|
+
r"""ID or ARN of the KMS customer-managed key to use for encryption"""
|
|
132
|
+
remove_empty_dirs: NotRequired[bool]
|
|
133
|
+
r"""Remove empty staging directories after moving files"""
|
|
134
|
+
base_file_name: NotRequired[str]
|
|
135
|
+
r"""JavaScript expression to define the output filename prefix (can be constant)"""
|
|
136
|
+
file_name_suffix: NotRequired[str]
|
|
137
|
+
r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
|
|
138
|
+
max_file_size_mb: NotRequired[float]
|
|
139
|
+
r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
|
|
140
|
+
max_open_files: NotRequired[float]
|
|
141
|
+
r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
|
|
142
|
+
header_line: NotRequired[str]
|
|
143
|
+
r"""If set, this line will be written to the beginning of each output file"""
|
|
144
|
+
write_high_water_mark: NotRequired[float]
|
|
145
|
+
r"""Buffer size used to write to a file"""
|
|
146
|
+
on_backpressure: NotRequired[OutputCriblLakeBackpressureBehavior]
|
|
147
|
+
r"""How to handle events when all receivers are exerting backpressure"""
|
|
148
|
+
deadletter_enabled: NotRequired[bool]
|
|
149
|
+
r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
|
|
150
|
+
on_disk_full_backpressure: NotRequired[OutputCriblLakeDiskSpaceProtection]
|
|
151
|
+
r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
|
|
152
|
+
max_file_open_time_sec: NotRequired[float]
|
|
153
|
+
r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
|
|
154
|
+
max_file_idle_time_sec: NotRequired[float]
|
|
155
|
+
r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
|
|
156
|
+
verify_permissions: NotRequired[bool]
|
|
157
|
+
r"""Disable if you can access files within the bucket but not the bucket itself"""
|
|
158
|
+
max_closing_files_to_backpressure: NotRequired[float]
|
|
159
|
+
r"""Maximum number of files that can be waiting for upload before backpressure is applied"""
|
|
160
|
+
aws_authentication_method: NotRequired[AwsAuthenticationMethod]
|
|
161
|
+
format_: NotRequired[OutputCriblLakeFormat]
|
|
162
|
+
max_concurrent_file_parts: NotRequired[float]
|
|
163
|
+
r"""Maximum number of parts to upload in parallel per file. Minimum part size is 5MB."""
|
|
164
|
+
description: NotRequired[str]
|
|
165
|
+
empty_dir_cleanup_sec: NotRequired[float]
|
|
166
|
+
r"""How frequently, in seconds, to clean up empty directories"""
|
|
167
|
+
deadletter_path: NotRequired[str]
|
|
168
|
+
r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
|
|
169
|
+
max_retry_num: NotRequired[float]
|
|
170
|
+
r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
class OutputCriblLake(BaseModel):
|
|
174
|
+
id: str
|
|
175
|
+
r"""Unique ID for this output"""
|
|
176
|
+
|
|
177
|
+
type: Annotated[OutputCriblLakeType, PlainValidator(validate_open_enum(False))]
|
|
178
|
+
|
|
179
|
+
pipeline: Optional[str] = None
|
|
180
|
+
r"""Pipeline to process data before sending out to this output"""
|
|
181
|
+
|
|
182
|
+
system_fields: Annotated[
|
|
183
|
+
Optional[List[str]], pydantic.Field(alias="systemFields")
|
|
184
|
+
] = None
|
|
185
|
+
r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
|
|
186
|
+
|
|
187
|
+
environment: Optional[str] = None
|
|
188
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
189
|
+
|
|
190
|
+
streamtags: Optional[List[str]] = None
|
|
191
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
192
|
+
|
|
193
|
+
bucket: Optional[str] = None
|
|
194
|
+
r"""Name of the destination S3 bucket. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myBucket-${C.vars.myVar}`"""
|
|
195
|
+
|
|
196
|
+
region: Optional[str] = None
|
|
197
|
+
r"""Region where the S3 bucket is located"""
|
|
198
|
+
|
|
199
|
+
aws_secret_key: Annotated[Optional[str], pydantic.Field(alias="awsSecretKey")] = (
|
|
200
|
+
None
|
|
201
|
+
)
|
|
202
|
+
r"""Secret key. This value can be a constant or a JavaScript expression. Example: `${C.env.SOME_SECRET}`)"""
|
|
203
|
+
|
|
204
|
+
endpoint: Optional[str] = None
|
|
205
|
+
r"""S3 service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to S3-compatible endpoint."""
|
|
206
|
+
|
|
207
|
+
signature_version: Annotated[
|
|
208
|
+
Annotated[
|
|
209
|
+
Optional[OutputCriblLakeSignatureVersion],
|
|
210
|
+
PlainValidator(validate_open_enum(False)),
|
|
211
|
+
],
|
|
212
|
+
pydantic.Field(alias="signatureVersion"),
|
|
213
|
+
] = OutputCriblLakeSignatureVersion.V4
|
|
214
|
+
r"""Signature version to use for signing S3 requests"""
|
|
215
|
+
|
|
216
|
+
reuse_connections: Annotated[
|
|
217
|
+
Optional[bool], pydantic.Field(alias="reuseConnections")
|
|
218
|
+
] = True
|
|
219
|
+
r"""Reuse connections between requests, which can improve performance"""
|
|
220
|
+
|
|
221
|
+
reject_unauthorized: Annotated[
|
|
222
|
+
Optional[bool], pydantic.Field(alias="rejectUnauthorized")
|
|
223
|
+
] = True
|
|
224
|
+
r"""Reject certificates that cannot be verified against a valid CA, such as self-signed certificates"""
|
|
225
|
+
|
|
226
|
+
enable_assume_role: Annotated[
|
|
227
|
+
Optional[bool], pydantic.Field(alias="enableAssumeRole")
|
|
228
|
+
] = False
|
|
229
|
+
r"""Use Assume Role credentials to access S3"""
|
|
230
|
+
|
|
231
|
+
assume_role_arn: Annotated[Optional[str], pydantic.Field(alias="assumeRoleArn")] = (
|
|
232
|
+
None
|
|
233
|
+
)
|
|
234
|
+
r"""Amazon Resource Name (ARN) of the role to assume"""
|
|
235
|
+
|
|
236
|
+
assume_role_external_id: Annotated[
|
|
237
|
+
Optional[str], pydantic.Field(alias="assumeRoleExternalId")
|
|
238
|
+
] = None
|
|
239
|
+
r"""External ID to use when assuming role"""
|
|
240
|
+
|
|
241
|
+
duration_seconds: Annotated[
|
|
242
|
+
Optional[float], pydantic.Field(alias="durationSeconds")
|
|
243
|
+
] = 3600
|
|
244
|
+
r"""Duration of the assumed role's session, in seconds. Minimum is 900 (15 minutes), default is 3600 (1 hour), and maximum is 43200 (12 hours)."""
|
|
245
|
+
|
|
246
|
+
stage_path: Annotated[Optional[str], pydantic.Field(alias="stagePath")] = (
|
|
247
|
+
"$CRIBL_HOME/state/outputs/staging"
|
|
248
|
+
)
|
|
249
|
+
r"""Filesystem location in which to buffer files, before compressing and moving to final destination. Use performant and stable storage."""
|
|
250
|
+
|
|
251
|
+
add_id_to_stage_path: Annotated[
|
|
252
|
+
Optional[bool], pydantic.Field(alias="addIdToStagePath")
|
|
253
|
+
] = True
|
|
254
|
+
r"""Add the Output ID value to staging location"""
|
|
255
|
+
|
|
256
|
+
dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = None
|
|
257
|
+
r"""Lake dataset to send the data to."""
|
|
258
|
+
|
|
259
|
+
object_acl: Annotated[
|
|
260
|
+
Annotated[
|
|
261
|
+
Optional[OutputCriblLakeObjectACL],
|
|
262
|
+
PlainValidator(validate_open_enum(False)),
|
|
263
|
+
],
|
|
264
|
+
pydantic.Field(alias="objectACL"),
|
|
265
|
+
] = OutputCriblLakeObjectACL.PRIVATE
|
|
266
|
+
r"""Object ACL to assign to uploaded objects"""
|
|
267
|
+
|
|
268
|
+
storage_class: Annotated[
|
|
269
|
+
Annotated[
|
|
270
|
+
Optional[OutputCriblLakeStorageClass],
|
|
271
|
+
PlainValidator(validate_open_enum(False)),
|
|
272
|
+
],
|
|
273
|
+
pydantic.Field(alias="storageClass"),
|
|
274
|
+
] = None
|
|
275
|
+
r"""Storage class to select for uploaded objects"""
|
|
276
|
+
|
|
277
|
+
server_side_encryption: Annotated[
|
|
278
|
+
Annotated[
|
|
279
|
+
Optional[OutputCriblLakeServerSideEncryptionForUploadedObjects],
|
|
280
|
+
PlainValidator(validate_open_enum(False)),
|
|
281
|
+
],
|
|
282
|
+
pydantic.Field(alias="serverSideEncryption"),
|
|
283
|
+
] = None
|
|
284
|
+
|
|
285
|
+
kms_key_id: Annotated[Optional[str], pydantic.Field(alias="kmsKeyId")] = None
|
|
286
|
+
r"""ID or ARN of the KMS customer-managed key to use for encryption"""
|
|
287
|
+
|
|
288
|
+
remove_empty_dirs: Annotated[
|
|
289
|
+
Optional[bool], pydantic.Field(alias="removeEmptyDirs")
|
|
290
|
+
] = True
|
|
291
|
+
r"""Remove empty staging directories after moving files"""
|
|
292
|
+
|
|
293
|
+
base_file_name: Annotated[Optional[str], pydantic.Field(alias="baseFileName")] = (
|
|
294
|
+
"`CriblOut`"
|
|
295
|
+
)
|
|
296
|
+
r"""JavaScript expression to define the output filename prefix (can be constant)"""
|
|
297
|
+
|
|
298
|
+
file_name_suffix: Annotated[
|
|
299
|
+
Optional[str], pydantic.Field(alias="fileNameSuffix")
|
|
300
|
+
] = '`.${C.env["CRIBL_WORKER_ID"]}.${__format}${__compression === "gzip" ? ".gz" : ""}`'
|
|
301
|
+
r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
|
|
302
|
+
|
|
303
|
+
max_file_size_mb: Annotated[
|
|
304
|
+
Optional[float], pydantic.Field(alias="maxFileSizeMB")
|
|
305
|
+
] = 64
|
|
306
|
+
r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
|
|
307
|
+
|
|
308
|
+
max_open_files: Annotated[Optional[float], pydantic.Field(alias="maxOpenFiles")] = (
|
|
309
|
+
100
|
|
310
|
+
)
|
|
311
|
+
r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
|
|
312
|
+
|
|
313
|
+
header_line: Annotated[Optional[str], pydantic.Field(alias="headerLine")] = ""
|
|
314
|
+
r"""If set, this line will be written to the beginning of each output file"""
|
|
315
|
+
|
|
316
|
+
write_high_water_mark: Annotated[
|
|
317
|
+
Optional[float], pydantic.Field(alias="writeHighWaterMark")
|
|
318
|
+
] = 64
|
|
319
|
+
r"""Buffer size used to write to a file"""
|
|
320
|
+
|
|
321
|
+
on_backpressure: Annotated[
|
|
322
|
+
Annotated[
|
|
323
|
+
Optional[OutputCriblLakeBackpressureBehavior],
|
|
324
|
+
PlainValidator(validate_open_enum(False)),
|
|
325
|
+
],
|
|
326
|
+
pydantic.Field(alias="onBackpressure"),
|
|
327
|
+
] = OutputCriblLakeBackpressureBehavior.BLOCK
|
|
328
|
+
r"""How to handle events when all receivers are exerting backpressure"""
|
|
329
|
+
|
|
330
|
+
deadletter_enabled: Annotated[
|
|
331
|
+
Optional[bool], pydantic.Field(alias="deadletterEnabled")
|
|
332
|
+
] = False
|
|
333
|
+
r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
|
|
334
|
+
|
|
335
|
+
on_disk_full_backpressure: Annotated[
|
|
336
|
+
Annotated[
|
|
337
|
+
Optional[OutputCriblLakeDiskSpaceProtection],
|
|
338
|
+
PlainValidator(validate_open_enum(False)),
|
|
339
|
+
],
|
|
340
|
+
pydantic.Field(alias="onDiskFullBackpressure"),
|
|
341
|
+
] = OutputCriblLakeDiskSpaceProtection.BLOCK
|
|
342
|
+
r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
|
|
343
|
+
|
|
344
|
+
max_file_open_time_sec: Annotated[
|
|
345
|
+
Optional[float], pydantic.Field(alias="maxFileOpenTimeSec")
|
|
346
|
+
] = 300
|
|
347
|
+
r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
|
|
348
|
+
|
|
349
|
+
max_file_idle_time_sec: Annotated[
|
|
350
|
+
Optional[float], pydantic.Field(alias="maxFileIdleTimeSec")
|
|
351
|
+
] = 300
|
|
352
|
+
r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
|
|
353
|
+
|
|
354
|
+
verify_permissions: Annotated[
|
|
355
|
+
Optional[bool], pydantic.Field(alias="verifyPermissions")
|
|
356
|
+
] = True
|
|
357
|
+
r"""Disable if you can access files within the bucket but not the bucket itself"""
|
|
358
|
+
|
|
359
|
+
max_closing_files_to_backpressure: Annotated[
|
|
360
|
+
Optional[float], pydantic.Field(alias="maxClosingFilesToBackpressure")
|
|
361
|
+
] = 100
|
|
362
|
+
r"""Maximum number of files that can be waiting for upload before backpressure is applied"""
|
|
363
|
+
|
|
364
|
+
aws_authentication_method: Annotated[
|
|
365
|
+
Annotated[
|
|
366
|
+
Optional[AwsAuthenticationMethod], PlainValidator(validate_open_enum(False))
|
|
367
|
+
],
|
|
368
|
+
pydantic.Field(alias="awsAuthenticationMethod"),
|
|
369
|
+
] = AwsAuthenticationMethod.AUTO
|
|
370
|
+
|
|
371
|
+
format_: Annotated[
|
|
372
|
+
Annotated[
|
|
373
|
+
Optional[OutputCriblLakeFormat], PlainValidator(validate_open_enum(False))
|
|
374
|
+
],
|
|
375
|
+
pydantic.Field(alias="format"),
|
|
376
|
+
] = None
|
|
377
|
+
|
|
378
|
+
max_concurrent_file_parts: Annotated[
|
|
379
|
+
Optional[float], pydantic.Field(alias="maxConcurrentFileParts")
|
|
380
|
+
] = 1
|
|
381
|
+
r"""Maximum number of parts to upload in parallel per file. Minimum part size is 5MB."""
|
|
382
|
+
|
|
383
|
+
description: Optional[str] = None
|
|
384
|
+
|
|
385
|
+
empty_dir_cleanup_sec: Annotated[
|
|
386
|
+
Optional[float], pydantic.Field(alias="emptyDirCleanupSec")
|
|
387
|
+
] = 300
|
|
388
|
+
r"""How frequently, in seconds, to clean up empty directories"""
|
|
389
|
+
|
|
390
|
+
deadletter_path: Annotated[
|
|
391
|
+
Optional[str], pydantic.Field(alias="deadletterPath")
|
|
392
|
+
] = "$CRIBL_HOME/state/outputs/dead-letter"
|
|
393
|
+
r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
|
|
394
|
+
|
|
395
|
+
max_retry_num: Annotated[Optional[float], pydantic.Field(alias="maxRetryNum")] = 20
|
|
396
|
+
r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
|