cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,505 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class OutputSecurityLakeType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ SECURITY_LAKE = "security_lake"
16
+
17
+
18
+ class OutputSecurityLakeAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
19
+ r"""AWS authentication method. Choose Auto to use IAM roles."""
20
+
21
+ AUTO = "auto"
22
+ MANUAL = "manual"
23
+ SECRET = "secret"
24
+
25
+
26
+ class OutputSecurityLakeSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
27
+ r"""Signature version to use for signing Amazon Security Lake requests"""
28
+
29
+ V2 = "v2"
30
+ V4 = "v4"
31
+
32
+
33
+ class OutputSecurityLakeObjectACL(str, Enum, metaclass=utils.OpenEnumMeta):
34
+ r"""Object ACL to assign to uploaded objects"""
35
+
36
+ PRIVATE = "private"
37
+ PUBLIC_READ = "public-read"
38
+ PUBLIC_READ_WRITE = "public-read-write"
39
+ AUTHENTICATED_READ = "authenticated-read"
40
+ AWS_EXEC_READ = "aws-exec-read"
41
+ BUCKET_OWNER_READ = "bucket-owner-read"
42
+ BUCKET_OWNER_FULL_CONTROL = "bucket-owner-full-control"
43
+
44
+
45
+ class OutputSecurityLakeStorageClass(str, Enum, metaclass=utils.OpenEnumMeta):
46
+ r"""Storage class to select for uploaded objects"""
47
+
48
+ STANDARD = "STANDARD"
49
+ REDUCED_REDUNDANCY = "REDUCED_REDUNDANCY"
50
+ STANDARD_IA = "STANDARD_IA"
51
+ ONEZONE_IA = "ONEZONE_IA"
52
+ INTELLIGENT_TIERING = "INTELLIGENT_TIERING"
53
+ GLACIER = "GLACIER"
54
+ GLACIER_IR = "GLACIER_IR"
55
+ DEEP_ARCHIVE = "DEEP_ARCHIVE"
56
+
57
+
58
+ class OutputSecurityLakeServerSideEncryptionForUploadedObjects(
59
+ str, Enum, metaclass=utils.OpenEnumMeta
60
+ ):
61
+ AES256 = "AES256"
62
+ AWS_KMS = "aws:kms"
63
+
64
+
65
+ class OutputSecurityLakeBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
66
+ r"""How to handle events when all receivers are exerting backpressure"""
67
+
68
+ BLOCK = "block"
69
+ DROP = "drop"
70
+
71
+
72
+ class OutputSecurityLakeDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
73
+ r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
74
+
75
+ BLOCK = "block"
76
+ DROP = "drop"
77
+
78
+
79
+ class OutputSecurityLakeParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
80
+ r"""Determines which data types are supported and how they are represented"""
81
+
82
+ PARQUET_1_0 = "PARQUET_1_0"
83
+ PARQUET_2_4 = "PARQUET_2_4"
84
+ PARQUET_2_6 = "PARQUET_2_6"
85
+
86
+
87
+ class OutputSecurityLakeDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
88
+ r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
89
+
90
+ DATA_PAGE_V1 = "DATA_PAGE_V1"
91
+ DATA_PAGE_V2 = "DATA_PAGE_V2"
92
+
93
+
94
+ class OutputSecurityLakeKeyValueMetadatumTypedDict(TypedDict):
95
+ value: str
96
+ key: NotRequired[str]
97
+
98
+
99
+ class OutputSecurityLakeKeyValueMetadatum(BaseModel):
100
+ value: str
101
+
102
+ key: Optional[str] = ""
103
+
104
+
105
+ class OutputSecurityLakeTypedDict(TypedDict):
106
+ bucket: str
107
+ r"""Name of the destination S3 bucket. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myBucket-${C.vars.myVar}`"""
108
+ region: str
109
+ r"""Region where the Amazon Security Lake is located."""
110
+ assume_role_arn: str
111
+ r"""Amazon Resource Name (ARN) of the role to assume"""
112
+ account_id: str
113
+ r"""ID of the AWS account whose data the Destination will write to Security Lake. This should have been configured when creating the Amazon Security Lake custom source."""
114
+ custom_source: str
115
+ r"""Name of the custom source configured in Amazon Security Lake"""
116
+ id: NotRequired[str]
117
+ r"""Unique ID for this output"""
118
+ type: NotRequired[OutputSecurityLakeType]
119
+ pipeline: NotRequired[str]
120
+ r"""Pipeline to process data before sending out to this output"""
121
+ system_fields: NotRequired[List[str]]
122
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards. These fields are added as dimensions and labels to generated metrics and logs, respectively."""
123
+ environment: NotRequired[str]
124
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
125
+ streamtags: NotRequired[List[str]]
126
+ r"""Tags for filtering and grouping in @{product}"""
127
+ aws_secret_key: NotRequired[str]
128
+ aws_authentication_method: NotRequired[OutputSecurityLakeAuthenticationMethod]
129
+ r"""AWS authentication method. Choose Auto to use IAM roles."""
130
+ endpoint: NotRequired[str]
131
+ r"""Amazon Security Lake service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to Amazon Security Lake-compatible endpoint."""
132
+ signature_version: NotRequired[OutputSecurityLakeSignatureVersion]
133
+ r"""Signature version to use for signing Amazon Security Lake requests"""
134
+ reuse_connections: NotRequired[bool]
135
+ r"""Reuse connections between requests, which can improve performance"""
136
+ reject_unauthorized: NotRequired[bool]
137
+ r"""Reject certificates that cannot be verified against a valid CA, such as self-signed certificates"""
138
+ enable_assume_role: NotRequired[bool]
139
+ r"""Use Assume Role credentials to access S3"""
140
+ assume_role_external_id: NotRequired[str]
141
+ r"""External ID to use when assuming role"""
142
+ duration_seconds: NotRequired[float]
143
+ r"""Duration of the assumed role's session, in seconds. Minimum is 900 (15 minutes), default is 3600 (1 hour), and maximum is 43200 (12 hours)."""
144
+ stage_path: NotRequired[str]
145
+ r"""Filesystem location in which to buffer files, before compressing and moving to final destination. Use performant and stable storage."""
146
+ add_id_to_stage_path: NotRequired[bool]
147
+ r"""Add the Output ID value to staging location"""
148
+ object_acl: NotRequired[OutputSecurityLakeObjectACL]
149
+ r"""Object ACL to assign to uploaded objects"""
150
+ storage_class: NotRequired[OutputSecurityLakeStorageClass]
151
+ r"""Storage class to select for uploaded objects"""
152
+ server_side_encryption: NotRequired[
153
+ OutputSecurityLakeServerSideEncryptionForUploadedObjects
154
+ ]
155
+ kms_key_id: NotRequired[str]
156
+ r"""ID or ARN of the KMS customer-managed key to use for encryption"""
157
+ remove_empty_dirs: NotRequired[bool]
158
+ r"""Remove empty staging directories after moving files"""
159
+ base_file_name: NotRequired[str]
160
+ r"""JavaScript expression to define the output filename prefix (can be constant)"""
161
+ max_file_size_mb: NotRequired[float]
162
+ r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
163
+ max_open_files: NotRequired[float]
164
+ r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
165
+ header_line: NotRequired[str]
166
+ r"""If set, this line will be written to the beginning of each output file"""
167
+ write_high_water_mark: NotRequired[float]
168
+ r"""Buffer size used to write to a file"""
169
+ on_backpressure: NotRequired[OutputSecurityLakeBackpressureBehavior]
170
+ r"""How to handle events when all receivers are exerting backpressure"""
171
+ deadletter_enabled: NotRequired[bool]
172
+ r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
173
+ on_disk_full_backpressure: NotRequired[OutputSecurityLakeDiskSpaceProtection]
174
+ r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
175
+ max_file_open_time_sec: NotRequired[float]
176
+ r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
177
+ max_file_idle_time_sec: NotRequired[float]
178
+ r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
179
+ max_concurrent_file_parts: NotRequired[float]
180
+ r"""Maximum number of parts to upload in parallel per file. Minimum part size is 5MB."""
181
+ verify_permissions: NotRequired[bool]
182
+ r"""Disable if you can access files within the bucket but not the bucket itself"""
183
+ max_closing_files_to_backpressure: NotRequired[float]
184
+ r"""Maximum number of files that can be waiting for upload before backpressure is applied"""
185
+ automatic_schema: NotRequired[bool]
186
+ r"""Automatically calculate the schema based on the events of each Parquet file generated"""
187
+ parquet_version: NotRequired[OutputSecurityLakeParquetVersion]
188
+ r"""Determines which data types are supported and how they are represented"""
189
+ parquet_data_page_version: NotRequired[OutputSecurityLakeDataPageVersion]
190
+ r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
191
+ parquet_row_group_length: NotRequired[float]
192
+ r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
193
+ parquet_page_size: NotRequired[str]
194
+ r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
195
+ should_log_invalid_rows: NotRequired[bool]
196
+ r"""Log up to 3 rows that @{product} skips due to data mismatch"""
197
+ key_value_metadata: NotRequired[List[OutputSecurityLakeKeyValueMetadatumTypedDict]]
198
+ r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
199
+ enable_statistics: NotRequired[bool]
200
+ r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
201
+ enable_write_page_index: NotRequired[bool]
202
+ r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
203
+ enable_page_checksum: NotRequired[bool]
204
+ r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
205
+ description: NotRequired[str]
206
+ aws_api_key: NotRequired[str]
207
+ r"""This value can be a constant or a JavaScript expression (`${C.env.SOME_ACCESS_KEY}`)"""
208
+ aws_secret: NotRequired[str]
209
+ r"""Select or create a stored secret that references your access key and secret key"""
210
+ empty_dir_cleanup_sec: NotRequired[float]
211
+ r"""How frequently, in seconds, to clean up empty directories"""
212
+ parquet_schema: NotRequired[str]
213
+ r"""To add a new schema, navigate to Processing > Knowledge > Parquet Schemas"""
214
+ deadletter_path: NotRequired[str]
215
+ r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
216
+ max_retry_num: NotRequired[float]
217
+ r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
218
+
219
+
220
+ class OutputSecurityLake(BaseModel):
221
+ bucket: str
222
+ r"""Name of the destination S3 bucket. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myBucket-${C.vars.myVar}`"""
223
+
224
+ region: str
225
+ r"""Region where the Amazon Security Lake is located."""
226
+
227
+ assume_role_arn: Annotated[str, pydantic.Field(alias="assumeRoleArn")]
228
+ r"""Amazon Resource Name (ARN) of the role to assume"""
229
+
230
+ account_id: Annotated[str, pydantic.Field(alias="accountId")]
231
+ r"""ID of the AWS account whose data the Destination will write to Security Lake. This should have been configured when creating the Amazon Security Lake custom source."""
232
+
233
+ custom_source: Annotated[str, pydantic.Field(alias="customSource")]
234
+ r"""Name of the custom source configured in Amazon Security Lake"""
235
+
236
+ id: Optional[str] = None
237
+ r"""Unique ID for this output"""
238
+
239
+ type: Annotated[
240
+ Optional[OutputSecurityLakeType], PlainValidator(validate_open_enum(False))
241
+ ] = None
242
+
243
+ pipeline: Optional[str] = None
244
+ r"""Pipeline to process data before sending out to this output"""
245
+
246
+ system_fields: Annotated[
247
+ Optional[List[str]], pydantic.Field(alias="systemFields")
248
+ ] = None
249
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards. These fields are added as dimensions and labels to generated metrics and logs, respectively."""
250
+
251
+ environment: Optional[str] = None
252
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
253
+
254
+ streamtags: Optional[List[str]] = None
255
+ r"""Tags for filtering and grouping in @{product}"""
256
+
257
+ aws_secret_key: Annotated[Optional[str], pydantic.Field(alias="awsSecretKey")] = (
258
+ None
259
+ )
260
+
261
+ aws_authentication_method: Annotated[
262
+ Annotated[
263
+ Optional[OutputSecurityLakeAuthenticationMethod],
264
+ PlainValidator(validate_open_enum(False)),
265
+ ],
266
+ pydantic.Field(alias="awsAuthenticationMethod"),
267
+ ] = OutputSecurityLakeAuthenticationMethod.AUTO
268
+ r"""AWS authentication method. Choose Auto to use IAM roles."""
269
+
270
+ endpoint: Optional[str] = None
271
+ r"""Amazon Security Lake service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to Amazon Security Lake-compatible endpoint."""
272
+
273
+ signature_version: Annotated[
274
+ Annotated[
275
+ Optional[OutputSecurityLakeSignatureVersion],
276
+ PlainValidator(validate_open_enum(False)),
277
+ ],
278
+ pydantic.Field(alias="signatureVersion"),
279
+ ] = OutputSecurityLakeSignatureVersion.V4
280
+ r"""Signature version to use for signing Amazon Security Lake requests"""
281
+
282
+ reuse_connections: Annotated[
283
+ Optional[bool], pydantic.Field(alias="reuseConnections")
284
+ ] = True
285
+ r"""Reuse connections between requests, which can improve performance"""
286
+
287
+ reject_unauthorized: Annotated[
288
+ Optional[bool], pydantic.Field(alias="rejectUnauthorized")
289
+ ] = True
290
+ r"""Reject certificates that cannot be verified against a valid CA, such as self-signed certificates"""
291
+
292
+ enable_assume_role: Annotated[
293
+ Optional[bool], pydantic.Field(alias="enableAssumeRole")
294
+ ] = False
295
+ r"""Use Assume Role credentials to access S3"""
296
+
297
+ assume_role_external_id: Annotated[
298
+ Optional[str], pydantic.Field(alias="assumeRoleExternalId")
299
+ ] = None
300
+ r"""External ID to use when assuming role"""
301
+
302
+ duration_seconds: Annotated[
303
+ Optional[float], pydantic.Field(alias="durationSeconds")
304
+ ] = 3600
305
+ r"""Duration of the assumed role's session, in seconds. Minimum is 900 (15 minutes), default is 3600 (1 hour), and maximum is 43200 (12 hours)."""
306
+
307
+ stage_path: Annotated[Optional[str], pydantic.Field(alias="stagePath")] = (
308
+ "$CRIBL_HOME/state/outputs/staging"
309
+ )
310
+ r"""Filesystem location in which to buffer files, before compressing and moving to final destination. Use performant and stable storage."""
311
+
312
+ add_id_to_stage_path: Annotated[
313
+ Optional[bool], pydantic.Field(alias="addIdToStagePath")
314
+ ] = True
315
+ r"""Add the Output ID value to staging location"""
316
+
317
+ object_acl: Annotated[
318
+ Annotated[
319
+ Optional[OutputSecurityLakeObjectACL],
320
+ PlainValidator(validate_open_enum(False)),
321
+ ],
322
+ pydantic.Field(alias="objectACL"),
323
+ ] = OutputSecurityLakeObjectACL.PRIVATE
324
+ r"""Object ACL to assign to uploaded objects"""
325
+
326
+ storage_class: Annotated[
327
+ Annotated[
328
+ Optional[OutputSecurityLakeStorageClass],
329
+ PlainValidator(validate_open_enum(False)),
330
+ ],
331
+ pydantic.Field(alias="storageClass"),
332
+ ] = None
333
+ r"""Storage class to select for uploaded objects"""
334
+
335
+ server_side_encryption: Annotated[
336
+ Annotated[
337
+ Optional[OutputSecurityLakeServerSideEncryptionForUploadedObjects],
338
+ PlainValidator(validate_open_enum(False)),
339
+ ],
340
+ pydantic.Field(alias="serverSideEncryption"),
341
+ ] = None
342
+
343
+ kms_key_id: Annotated[Optional[str], pydantic.Field(alias="kmsKeyId")] = None
344
+ r"""ID or ARN of the KMS customer-managed key to use for encryption"""
345
+
346
+ remove_empty_dirs: Annotated[
347
+ Optional[bool], pydantic.Field(alias="removeEmptyDirs")
348
+ ] = True
349
+ r"""Remove empty staging directories after moving files"""
350
+
351
+ base_file_name: Annotated[Optional[str], pydantic.Field(alias="baseFileName")] = (
352
+ "`CriblOut`"
353
+ )
354
+ r"""JavaScript expression to define the output filename prefix (can be constant)"""
355
+
356
+ max_file_size_mb: Annotated[
357
+ Optional[float], pydantic.Field(alias="maxFileSizeMB")
358
+ ] = 32
359
+ r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
360
+
361
+ max_open_files: Annotated[Optional[float], pydantic.Field(alias="maxOpenFiles")] = (
362
+ 100
363
+ )
364
+ r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
365
+
366
+ header_line: Annotated[Optional[str], pydantic.Field(alias="headerLine")] = ""
367
+ r"""If set, this line will be written to the beginning of each output file"""
368
+
369
+ write_high_water_mark: Annotated[
370
+ Optional[float], pydantic.Field(alias="writeHighWaterMark")
371
+ ] = 64
372
+ r"""Buffer size used to write to a file"""
373
+
374
+ on_backpressure: Annotated[
375
+ Annotated[
376
+ Optional[OutputSecurityLakeBackpressureBehavior],
377
+ PlainValidator(validate_open_enum(False)),
378
+ ],
379
+ pydantic.Field(alias="onBackpressure"),
380
+ ] = OutputSecurityLakeBackpressureBehavior.BLOCK
381
+ r"""How to handle events when all receivers are exerting backpressure"""
382
+
383
+ deadletter_enabled: Annotated[
384
+ Optional[bool], pydantic.Field(alias="deadletterEnabled")
385
+ ] = False
386
+ r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
387
+
388
+ on_disk_full_backpressure: Annotated[
389
+ Annotated[
390
+ Optional[OutputSecurityLakeDiskSpaceProtection],
391
+ PlainValidator(validate_open_enum(False)),
392
+ ],
393
+ pydantic.Field(alias="onDiskFullBackpressure"),
394
+ ] = OutputSecurityLakeDiskSpaceProtection.BLOCK
395
+ r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
396
+
397
+ max_file_open_time_sec: Annotated[
398
+ Optional[float], pydantic.Field(alias="maxFileOpenTimeSec")
399
+ ] = 300
400
+ r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
401
+
402
+ max_file_idle_time_sec: Annotated[
403
+ Optional[float], pydantic.Field(alias="maxFileIdleTimeSec")
404
+ ] = 30
405
+ r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
406
+
407
+ max_concurrent_file_parts: Annotated[
408
+ Optional[float], pydantic.Field(alias="maxConcurrentFileParts")
409
+ ] = 4
410
+ r"""Maximum number of parts to upload in parallel per file. Minimum part size is 5MB."""
411
+
412
+ verify_permissions: Annotated[
413
+ Optional[bool], pydantic.Field(alias="verifyPermissions")
414
+ ] = True
415
+ r"""Disable if you can access files within the bucket but not the bucket itself"""
416
+
417
+ max_closing_files_to_backpressure: Annotated[
418
+ Optional[float], pydantic.Field(alias="maxClosingFilesToBackpressure")
419
+ ] = 100
420
+ r"""Maximum number of files that can be waiting for upload before backpressure is applied"""
421
+
422
+ automatic_schema: Annotated[
423
+ Optional[bool], pydantic.Field(alias="automaticSchema")
424
+ ] = False
425
+ r"""Automatically calculate the schema based on the events of each Parquet file generated"""
426
+
427
+ parquet_version: Annotated[
428
+ Annotated[
429
+ Optional[OutputSecurityLakeParquetVersion],
430
+ PlainValidator(validate_open_enum(False)),
431
+ ],
432
+ pydantic.Field(alias="parquetVersion"),
433
+ ] = OutputSecurityLakeParquetVersion.PARQUET_2_6
434
+ r"""Determines which data types are supported and how they are represented"""
435
+
436
+ parquet_data_page_version: Annotated[
437
+ Annotated[
438
+ Optional[OutputSecurityLakeDataPageVersion],
439
+ PlainValidator(validate_open_enum(False)),
440
+ ],
441
+ pydantic.Field(alias="parquetDataPageVersion"),
442
+ ] = OutputSecurityLakeDataPageVersion.DATA_PAGE_V2
443
+ r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
444
+
445
+ parquet_row_group_length: Annotated[
446
+ Optional[float], pydantic.Field(alias="parquetRowGroupLength")
447
+ ] = 10000
448
+ r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
449
+
450
+ parquet_page_size: Annotated[
451
+ Optional[str], pydantic.Field(alias="parquetPageSize")
452
+ ] = "1MB"
453
+ r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
454
+
455
+ should_log_invalid_rows: Annotated[
456
+ Optional[bool], pydantic.Field(alias="shouldLogInvalidRows")
457
+ ] = None
458
+ r"""Log up to 3 rows that @{product} skips due to data mismatch"""
459
+
460
+ key_value_metadata: Annotated[
461
+ Optional[List[OutputSecurityLakeKeyValueMetadatum]],
462
+ pydantic.Field(alias="keyValueMetadata"),
463
+ ] = None
464
+ r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
465
+
466
+ enable_statistics: Annotated[
467
+ Optional[bool], pydantic.Field(alias="enableStatistics")
468
+ ] = True
469
+ r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
470
+
471
+ enable_write_page_index: Annotated[
472
+ Optional[bool], pydantic.Field(alias="enableWritePageIndex")
473
+ ] = True
474
+ r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
475
+
476
+ enable_page_checksum: Annotated[
477
+ Optional[bool], pydantic.Field(alias="enablePageChecksum")
478
+ ] = False
479
+ r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
480
+
481
+ description: Optional[str] = None
482
+
483
+ aws_api_key: Annotated[Optional[str], pydantic.Field(alias="awsApiKey")] = None
484
+ r"""This value can be a constant or a JavaScript expression (`${C.env.SOME_ACCESS_KEY}`)"""
485
+
486
+ aws_secret: Annotated[Optional[str], pydantic.Field(alias="awsSecret")] = None
487
+ r"""Select or create a stored secret that references your access key and secret key"""
488
+
489
+ empty_dir_cleanup_sec: Annotated[
490
+ Optional[float], pydantic.Field(alias="emptyDirCleanupSec")
491
+ ] = 300
492
+ r"""How frequently, in seconds, to clean up empty directories"""
493
+
494
+ parquet_schema: Annotated[Optional[str], pydantic.Field(alias="parquetSchema")] = (
495
+ None
496
+ )
497
+ r"""To add a new schema, navigate to Processing > Knowledge > Parquet Schemas"""
498
+
499
+ deadletter_path: Annotated[
500
+ Optional[str], pydantic.Field(alias="deadletterPath")
501
+ ] = "$CRIBL_HOME/state/outputs/dead-letter"
502
+ r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
503
+
504
+ max_retry_num: Annotated[Optional[float], pydantic.Field(alias="maxRetryNum")] = 20
505
+ r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""