cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,437 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class OutputDatasetType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ DATASET = "dataset"
16
+
17
+
18
+ class OutputDatasetSeverity(str, Enum, metaclass=utils.OpenEnumMeta):
19
+ r"""Default value for event severity. If the `sev` or `__severity` fields are set on an event, the first one matching will override this value."""
20
+
21
+ FINEST = "finest"
22
+ FINER = "finer"
23
+ FINE = "fine"
24
+ INFO = "info"
25
+ WARNING = "warning"
26
+ ERROR = "error"
27
+ FATAL = "fatal"
28
+
29
+
30
+ class OutputDatasetResponseRetrySettingTypedDict(TypedDict):
31
+ http_status: float
32
+ r"""The HTTP response status code that will trigger retries"""
33
+ initial_backoff: NotRequired[float]
34
+ r"""How long, in milliseconds, Cribl Stream should wait before initiating backoff. Maximum interval is 600,000 ms (10 minutes)."""
35
+ backoff_rate: NotRequired[float]
36
+ r"""Base for exponential backoff. A value of 2 (default) means Cribl Stream will retry after 2 seconds, then 4 seconds, then 8 seconds, etc."""
37
+ max_backoff: NotRequired[float]
38
+ r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
39
+
40
+
41
+ class OutputDatasetResponseRetrySetting(BaseModel):
42
+ http_status: Annotated[float, pydantic.Field(alias="httpStatus")]
43
+ r"""The HTTP response status code that will trigger retries"""
44
+
45
+ initial_backoff: Annotated[
46
+ Optional[float], pydantic.Field(alias="initialBackoff")
47
+ ] = 1000
48
+ r"""How long, in milliseconds, Cribl Stream should wait before initiating backoff. Maximum interval is 600,000 ms (10 minutes)."""
49
+
50
+ backoff_rate: Annotated[Optional[float], pydantic.Field(alias="backoffRate")] = 2
51
+ r"""Base for exponential backoff. A value of 2 (default) means Cribl Stream will retry after 2 seconds, then 4 seconds, then 8 seconds, etc."""
52
+
53
+ max_backoff: Annotated[Optional[float], pydantic.Field(alias="maxBackoff")] = 10000
54
+ r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
55
+
56
+
57
+ class OutputDatasetTimeoutRetrySettingsTypedDict(TypedDict):
58
+ timeout_retry: NotRequired[bool]
59
+ initial_backoff: NotRequired[float]
60
+ r"""How long, in milliseconds, Cribl Stream should wait before initiating backoff. Maximum interval is 600,000 ms (10 minutes)."""
61
+ backoff_rate: NotRequired[float]
62
+ r"""Base for exponential backoff. A value of 2 (default) means Cribl Stream will retry after 2 seconds, then 4 seconds, then 8 seconds, etc."""
63
+ max_backoff: NotRequired[float]
64
+ r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
65
+
66
+
67
+ class OutputDatasetTimeoutRetrySettings(BaseModel):
68
+ timeout_retry: Annotated[Optional[bool], pydantic.Field(alias="timeoutRetry")] = (
69
+ False
70
+ )
71
+
72
+ initial_backoff: Annotated[
73
+ Optional[float], pydantic.Field(alias="initialBackoff")
74
+ ] = 1000
75
+ r"""How long, in milliseconds, Cribl Stream should wait before initiating backoff. Maximum interval is 600,000 ms (10 minutes)."""
76
+
77
+ backoff_rate: Annotated[Optional[float], pydantic.Field(alias="backoffRate")] = 2
78
+ r"""Base for exponential backoff. A value of 2 (default) means Cribl Stream will retry after 2 seconds, then 4 seconds, then 8 seconds, etc."""
79
+
80
+ max_backoff: Annotated[Optional[float], pydantic.Field(alias="maxBackoff")] = 10000
81
+ r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
82
+
83
+
84
+ class DataSetSite(str, Enum, metaclass=utils.OpenEnumMeta):
85
+ r"""DataSet site to which events should be sent"""
86
+
87
+ US = "us"
88
+ EU = "eu"
89
+ CUSTOM = "custom"
90
+
91
+
92
+ class OutputDatasetExtraHTTPHeaderTypedDict(TypedDict):
93
+ value: str
94
+ name: NotRequired[str]
95
+
96
+
97
+ class OutputDatasetExtraHTTPHeader(BaseModel):
98
+ value: str
99
+
100
+ name: Optional[str] = None
101
+
102
+
103
+ class OutputDatasetFailedRequestLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
104
+ r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
105
+
106
+ PAYLOAD = "payload"
107
+ PAYLOAD_AND_HEADERS = "payloadAndHeaders"
108
+ NONE = "none"
109
+
110
+
111
+ class OutputDatasetBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
112
+ r"""How to handle events when all receivers are exerting backpressure"""
113
+
114
+ BLOCK = "block"
115
+ DROP = "drop"
116
+ QUEUE = "queue"
117
+
118
+
119
+ class OutputDatasetAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
120
+ r"""Enter API key directly, or select a stored secret"""
121
+
122
+ MANUAL = "manual"
123
+ SECRET = "secret"
124
+
125
+
126
+ class OutputDatasetCompression(str, Enum, metaclass=utils.OpenEnumMeta):
127
+ r"""Codec to use to compress the persisted data"""
128
+
129
+ NONE = "none"
130
+ GZIP = "gzip"
131
+
132
+
133
+ class OutputDatasetQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
134
+ r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
135
+
136
+ BLOCK = "block"
137
+ DROP = "drop"
138
+
139
+
140
+ class OutputDatasetMode(str, Enum, metaclass=utils.OpenEnumMeta):
141
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
142
+
143
+ ERROR = "error"
144
+ BACKPRESSURE = "backpressure"
145
+ ALWAYS = "always"
146
+
147
+
148
+ class OutputDatasetPqControlsTypedDict(TypedDict):
149
+ pass
150
+
151
+
152
+ class OutputDatasetPqControls(BaseModel):
153
+ pass
154
+
155
+
156
+ class OutputDatasetTypedDict(TypedDict):
157
+ id: str
158
+ r"""Unique ID for this output"""
159
+ type: OutputDatasetType
160
+ pipeline: NotRequired[str]
161
+ r"""Pipeline to process data before sending out to this output"""
162
+ system_fields: NotRequired[List[str]]
163
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
164
+ environment: NotRequired[str]
165
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
166
+ streamtags: NotRequired[List[str]]
167
+ r"""Tags for filtering and grouping in @{product}"""
168
+ message_field: NotRequired[str]
169
+ r"""Name of the event field that contains the message or attributes to send. If not specified, all of the event's non-internal fields will be sent as attributes."""
170
+ exclude_fields: NotRequired[List[str]]
171
+ r"""Fields to exclude from the event if the Message field is either unspecified or refers to an object. Ignored if the Message field is a string. If empty, we send all non-internal fields."""
172
+ server_host_field: NotRequired[str]
173
+ r"""Name of the event field that contains the `serverHost` identifier. If not specified, defaults to `cribl_<outputId>`."""
174
+ timestamp_field: NotRequired[str]
175
+ r"""Name of the event field that contains the timestamp. If not specified, defaults to `ts`, `_time`, or `Date.now()`, in that order."""
176
+ default_severity: NotRequired[OutputDatasetSeverity]
177
+ r"""Default value for event severity. If the `sev` or `__severity` fields are set on an event, the first one matching will override this value."""
178
+ response_retry_settings: NotRequired[
179
+ List[OutputDatasetResponseRetrySettingTypedDict]
180
+ ]
181
+ r"""Automatically retry after unsuccessful response status codes, such as 429 (Too Many Requests) or 503 (Service Unavailable)"""
182
+ timeout_retry_settings: NotRequired[OutputDatasetTimeoutRetrySettingsTypedDict]
183
+ response_honor_retry_after_header: NotRequired[bool]
184
+ r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
185
+ site: NotRequired[DataSetSite]
186
+ r"""DataSet site to which events should be sent"""
187
+ concurrency: NotRequired[float]
188
+ r"""Maximum number of ongoing requests before blocking"""
189
+ max_payload_size_kb: NotRequired[float]
190
+ r"""Maximum size, in KB, of the request body"""
191
+ max_payload_events: NotRequired[float]
192
+ r"""Maximum number of events to include in the request body. Default is 0 (unlimited)."""
193
+ compress: NotRequired[bool]
194
+ r"""Compress the payload body before sending"""
195
+ reject_unauthorized: NotRequired[bool]
196
+ r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's).
197
+ Enabled by default. When this setting is also present in TLS Settings (Client Side),
198
+ that value will take precedence.
199
+ """
200
+ timeout_sec: NotRequired[float]
201
+ r"""Amount of time, in seconds, to wait for a request to complete before canceling it"""
202
+ flush_period_sec: NotRequired[float]
203
+ r"""Maximum time between requests. Small values could cause the payload size to be smaller than the configured Body size limit."""
204
+ extra_http_headers: NotRequired[List[OutputDatasetExtraHTTPHeaderTypedDict]]
205
+ r"""Headers to add to all events"""
206
+ use_round_robin_dns: NotRequired[bool]
207
+ r"""Enable round-robin DNS lookup. When a DNS server returns multiple addresses, @{product} will cycle through them in the order returned. For optimal performance, consider enabling this setting for non-load balanced destinations."""
208
+ failed_request_logging_mode: NotRequired[OutputDatasetFailedRequestLoggingMode]
209
+ r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
210
+ safe_headers: NotRequired[List[str]]
211
+ r"""List of headers that are safe to log in plain text"""
212
+ on_backpressure: NotRequired[OutputDatasetBackpressureBehavior]
213
+ r"""How to handle events when all receivers are exerting backpressure"""
214
+ auth_type: NotRequired[OutputDatasetAuthenticationMethod]
215
+ r"""Enter API key directly, or select a stored secret"""
216
+ total_memory_limit_kb: NotRequired[float]
217
+ r"""Maximum total size of the batches waiting to be sent. If left blank, defaults to 5 times the max body size (if set). If 0, no limit is enforced."""
218
+ description: NotRequired[str]
219
+ custom_url: NotRequired[str]
220
+ pq_max_file_size: NotRequired[str]
221
+ r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
222
+ pq_max_size: NotRequired[str]
223
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
224
+ pq_path: NotRequired[str]
225
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
226
+ pq_compress: NotRequired[OutputDatasetCompression]
227
+ r"""Codec to use to compress the persisted data"""
228
+ pq_on_backpressure: NotRequired[OutputDatasetQueueFullBehavior]
229
+ r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
230
+ pq_mode: NotRequired[OutputDatasetMode]
231
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
232
+ pq_controls: NotRequired[OutputDatasetPqControlsTypedDict]
233
+ api_key: NotRequired[str]
234
+ r"""A 'Log Write Access' API key for the DataSet account"""
235
+ text_secret: NotRequired[str]
236
+ r"""Select or create a stored text secret"""
237
+
238
+
239
+ class OutputDataset(BaseModel):
240
+ id: str
241
+ r"""Unique ID for this output"""
242
+
243
+ type: Annotated[OutputDatasetType, PlainValidator(validate_open_enum(False))]
244
+
245
+ pipeline: Optional[str] = None
246
+ r"""Pipeline to process data before sending out to this output"""
247
+
248
+ system_fields: Annotated[
249
+ Optional[List[str]], pydantic.Field(alias="systemFields")
250
+ ] = None
251
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
252
+
253
+ environment: Optional[str] = None
254
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
255
+
256
+ streamtags: Optional[List[str]] = None
257
+ r"""Tags for filtering and grouping in @{product}"""
258
+
259
+ message_field: Annotated[Optional[str], pydantic.Field(alias="messageField")] = None
260
+ r"""Name of the event field that contains the message or attributes to send. If not specified, all of the event's non-internal fields will be sent as attributes."""
261
+
262
+ exclude_fields: Annotated[
263
+ Optional[List[str]], pydantic.Field(alias="excludeFields")
264
+ ] = None
265
+ r"""Fields to exclude from the event if the Message field is either unspecified or refers to an object. Ignored if the Message field is a string. If empty, we send all non-internal fields."""
266
+
267
+ server_host_field: Annotated[
268
+ Optional[str], pydantic.Field(alias="serverHostField")
269
+ ] = None
270
+ r"""Name of the event field that contains the `serverHost` identifier. If not specified, defaults to `cribl_<outputId>`."""
271
+
272
+ timestamp_field: Annotated[
273
+ Optional[str], pydantic.Field(alias="timestampField")
274
+ ] = None
275
+ r"""Name of the event field that contains the timestamp. If not specified, defaults to `ts`, `_time`, or `Date.now()`, in that order."""
276
+
277
+ default_severity: Annotated[
278
+ Annotated[
279
+ Optional[OutputDatasetSeverity], PlainValidator(validate_open_enum(False))
280
+ ],
281
+ pydantic.Field(alias="defaultSeverity"),
282
+ ] = OutputDatasetSeverity.INFO
283
+ r"""Default value for event severity. If the `sev` or `__severity` fields are set on an event, the first one matching will override this value."""
284
+
285
+ response_retry_settings: Annotated[
286
+ Optional[List[OutputDatasetResponseRetrySetting]],
287
+ pydantic.Field(alias="responseRetrySettings"),
288
+ ] = None
289
+ r"""Automatically retry after unsuccessful response status codes, such as 429 (Too Many Requests) or 503 (Service Unavailable)"""
290
+
291
+ timeout_retry_settings: Annotated[
292
+ Optional[OutputDatasetTimeoutRetrySettings],
293
+ pydantic.Field(alias="timeoutRetrySettings"),
294
+ ] = None
295
+
296
+ response_honor_retry_after_header: Annotated[
297
+ Optional[bool], pydantic.Field(alias="responseHonorRetryAfterHeader")
298
+ ] = False
299
+ r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
300
+
301
+ site: Annotated[
302
+ Optional[DataSetSite], PlainValidator(validate_open_enum(False))
303
+ ] = DataSetSite.US
304
+ r"""DataSet site to which events should be sent"""
305
+
306
+ concurrency: Optional[float] = 5
307
+ r"""Maximum number of ongoing requests before blocking"""
308
+
309
+ max_payload_size_kb: Annotated[
310
+ Optional[float], pydantic.Field(alias="maxPayloadSizeKB")
311
+ ] = 4096
312
+ r"""Maximum size, in KB, of the request body"""
313
+
314
+ max_payload_events: Annotated[
315
+ Optional[float], pydantic.Field(alias="maxPayloadEvents")
316
+ ] = 0
317
+ r"""Maximum number of events to include in the request body. Default is 0 (unlimited)."""
318
+
319
+ compress: Optional[bool] = True
320
+ r"""Compress the payload body before sending"""
321
+
322
+ reject_unauthorized: Annotated[
323
+ Optional[bool], pydantic.Field(alias="rejectUnauthorized")
324
+ ] = True
325
+ r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's).
326
+ Enabled by default. When this setting is also present in TLS Settings (Client Side),
327
+ that value will take precedence.
328
+ """
329
+
330
+ timeout_sec: Annotated[Optional[float], pydantic.Field(alias="timeoutSec")] = 30
331
+ r"""Amount of time, in seconds, to wait for a request to complete before canceling it"""
332
+
333
+ flush_period_sec: Annotated[
334
+ Optional[float], pydantic.Field(alias="flushPeriodSec")
335
+ ] = 1
336
+ r"""Maximum time between requests. Small values could cause the payload size to be smaller than the configured Body size limit."""
337
+
338
+ extra_http_headers: Annotated[
339
+ Optional[List[OutputDatasetExtraHTTPHeader]],
340
+ pydantic.Field(alias="extraHttpHeaders"),
341
+ ] = None
342
+ r"""Headers to add to all events"""
343
+
344
+ use_round_robin_dns: Annotated[
345
+ Optional[bool], pydantic.Field(alias="useRoundRobinDns")
346
+ ] = False
347
+ r"""Enable round-robin DNS lookup. When a DNS server returns multiple addresses, @{product} will cycle through them in the order returned. For optimal performance, consider enabling this setting for non-load balanced destinations."""
348
+
349
+ failed_request_logging_mode: Annotated[
350
+ Annotated[
351
+ Optional[OutputDatasetFailedRequestLoggingMode],
352
+ PlainValidator(validate_open_enum(False)),
353
+ ],
354
+ pydantic.Field(alias="failedRequestLoggingMode"),
355
+ ] = OutputDatasetFailedRequestLoggingMode.NONE
356
+ r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
357
+
358
+ safe_headers: Annotated[
359
+ Optional[List[str]], pydantic.Field(alias="safeHeaders")
360
+ ] = None
361
+ r"""List of headers that are safe to log in plain text"""
362
+
363
+ on_backpressure: Annotated[
364
+ Annotated[
365
+ Optional[OutputDatasetBackpressureBehavior],
366
+ PlainValidator(validate_open_enum(False)),
367
+ ],
368
+ pydantic.Field(alias="onBackpressure"),
369
+ ] = OutputDatasetBackpressureBehavior.BLOCK
370
+ r"""How to handle events when all receivers are exerting backpressure"""
371
+
372
+ auth_type: Annotated[
373
+ Annotated[
374
+ Optional[OutputDatasetAuthenticationMethod],
375
+ PlainValidator(validate_open_enum(False)),
376
+ ],
377
+ pydantic.Field(alias="authType"),
378
+ ] = OutputDatasetAuthenticationMethod.MANUAL
379
+ r"""Enter API key directly, or select a stored secret"""
380
+
381
+ total_memory_limit_kb: Annotated[
382
+ Optional[float], pydantic.Field(alias="totalMemoryLimitKB")
383
+ ] = None
384
+ r"""Maximum total size of the batches waiting to be sent. If left blank, defaults to 5 times the max body size (if set). If 0, no limit is enforced."""
385
+
386
+ description: Optional[str] = None
387
+
388
+ custom_url: Annotated[Optional[str], pydantic.Field(alias="customUrl")] = None
389
+
390
+ pq_max_file_size: Annotated[
391
+ Optional[str], pydantic.Field(alias="pqMaxFileSize")
392
+ ] = "1 MB"
393
+ r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
394
+
395
+ pq_max_size: Annotated[Optional[str], pydantic.Field(alias="pqMaxSize")] = "5GB"
396
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
397
+
398
+ pq_path: Annotated[Optional[str], pydantic.Field(alias="pqPath")] = (
399
+ "$CRIBL_HOME/state/queues"
400
+ )
401
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
402
+
403
+ pq_compress: Annotated[
404
+ Annotated[
405
+ Optional[OutputDatasetCompression],
406
+ PlainValidator(validate_open_enum(False)),
407
+ ],
408
+ pydantic.Field(alias="pqCompress"),
409
+ ] = OutputDatasetCompression.NONE
410
+ r"""Codec to use to compress the persisted data"""
411
+
412
+ pq_on_backpressure: Annotated[
413
+ Annotated[
414
+ Optional[OutputDatasetQueueFullBehavior],
415
+ PlainValidator(validate_open_enum(False)),
416
+ ],
417
+ pydantic.Field(alias="pqOnBackpressure"),
418
+ ] = OutputDatasetQueueFullBehavior.BLOCK
419
+ r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
420
+
421
+ pq_mode: Annotated[
422
+ Annotated[
423
+ Optional[OutputDatasetMode], PlainValidator(validate_open_enum(False))
424
+ ],
425
+ pydantic.Field(alias="pqMode"),
426
+ ] = OutputDatasetMode.ERROR
427
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
428
+
429
+ pq_controls: Annotated[
430
+ Optional[OutputDatasetPqControls], pydantic.Field(alias="pqControls")
431
+ ] = None
432
+
433
+ api_key: Annotated[Optional[str], pydantic.Field(alias="apiKey")] = None
434
+ r"""A 'Log Write Access' API key for the DataSet account"""
435
+
436
+ text_secret: Annotated[Optional[str], pydantic.Field(alias="textSecret")] = None
437
+ r"""Select or create a stored text secret"""
@@ -0,0 +1,55 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class OutputDefaultType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ DEFAULT = "default"
16
+
17
+
18
+ class OutputDefaultTypedDict(TypedDict):
19
+ type: OutputDefaultType
20
+ default_id: str
21
+ r"""ID of the default output. This will be used whenever a nonexistent/deleted output is referenced."""
22
+ id: NotRequired[str]
23
+ r"""Unique ID for this output"""
24
+ pipeline: NotRequired[str]
25
+ r"""Pipeline to process data before sending out to this output"""
26
+ system_fields: NotRequired[List[str]]
27
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
28
+ environment: NotRequired[str]
29
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
30
+ streamtags: NotRequired[List[str]]
31
+ r"""Tags for filtering and grouping in @{product}"""
32
+
33
+
34
+ class OutputDefault(BaseModel):
35
+ type: Annotated[OutputDefaultType, PlainValidator(validate_open_enum(False))]
36
+
37
+ default_id: Annotated[str, pydantic.Field(alias="defaultId")]
38
+ r"""ID of the default output. This will be used whenever a nonexistent/deleted output is referenced."""
39
+
40
+ id: Optional[str] = None
41
+ r"""Unique ID for this output"""
42
+
43
+ pipeline: Optional[str] = None
44
+ r"""Pipeline to process data before sending out to this output"""
45
+
46
+ system_fields: Annotated[
47
+ Optional[List[str]], pydantic.Field(alias="systemFields")
48
+ ] = None
49
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
50
+
51
+ environment: Optional[str] = None
52
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
53
+
54
+ streamtags: Optional[List[str]] = None
55
+ r"""Tags for filtering and grouping in @{product}"""
@@ -0,0 +1,50 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class OutputDevnullType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ DEVNULL = "devnull"
16
+
17
+
18
+ class OutputDevnullTypedDict(TypedDict):
19
+ id: str
20
+ r"""Unique ID for this output"""
21
+ type: OutputDevnullType
22
+ pipeline: NotRequired[str]
23
+ r"""Pipeline to process data before sending out to this output"""
24
+ system_fields: NotRequired[List[str]]
25
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
26
+ environment: NotRequired[str]
27
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
28
+ streamtags: NotRequired[List[str]]
29
+ r"""Tags for filtering and grouping in @{product}"""
30
+
31
+
32
+ class OutputDevnull(BaseModel):
33
+ id: str
34
+ r"""Unique ID for this output"""
35
+
36
+ type: Annotated[OutputDevnullType, PlainValidator(validate_open_enum(False))]
37
+
38
+ pipeline: Optional[str] = None
39
+ r"""Pipeline to process data before sending out to this output"""
40
+
41
+ system_fields: Annotated[
42
+ Optional[List[str]], pydantic.Field(alias="systemFields")
43
+ ] = None
44
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
45
+
46
+ environment: Optional[str] = None
47
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
48
+
49
+ streamtags: Optional[List[str]] = None
50
+ r"""Tags for filtering and grouping in @{product}"""
@@ -0,0 +1,89 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class OutputDiskSpoolType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ DISK_SPOOL = "disk_spool"
16
+
17
+
18
+ class OutputDiskSpoolCompression(str, Enum, metaclass=utils.OpenEnumMeta):
19
+ r"""Data compression format. Default is gzip."""
20
+
21
+ NONE = "none"
22
+ GZIP = "gzip"
23
+
24
+
25
+ class OutputDiskSpoolTypedDict(TypedDict):
26
+ id: str
27
+ r"""Unique ID for this output"""
28
+ type: OutputDiskSpoolType
29
+ pipeline: NotRequired[str]
30
+ r"""Pipeline to process data before sending out to this output"""
31
+ system_fields: NotRequired[List[str]]
32
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
33
+ environment: NotRequired[str]
34
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
35
+ streamtags: NotRequired[List[str]]
36
+ r"""Tags for filtering and grouping in @{product}"""
37
+ time_window: NotRequired[str]
38
+ r"""Time period for grouping spooled events. Default is 10m."""
39
+ max_data_size: NotRequired[str]
40
+ r"""Maximum disk space that can be consumed before older buckets are deleted. Examples: 420MB, 4GB. Default is 1GB."""
41
+ max_data_time: NotRequired[str]
42
+ r"""Maximum amount of time to retain data before older buckets are deleted. Examples: 2h, 4d. Default is 24h."""
43
+ compress: NotRequired[OutputDiskSpoolCompression]
44
+ r"""Data compression format. Default is gzip."""
45
+ partition_expr: NotRequired[str]
46
+ r"""JavaScript expression defining how files are partitioned and organized within the time-buckets. If blank, the event's __partition property is used and otherwise, events go directly into the time-bucket directory."""
47
+ description: NotRequired[str]
48
+
49
+
50
+ class OutputDiskSpool(BaseModel):
51
+ id: str
52
+ r"""Unique ID for this output"""
53
+
54
+ type: Annotated[OutputDiskSpoolType, PlainValidator(validate_open_enum(False))]
55
+
56
+ pipeline: Optional[str] = None
57
+ r"""Pipeline to process data before sending out to this output"""
58
+
59
+ system_fields: Annotated[
60
+ Optional[List[str]], pydantic.Field(alias="systemFields")
61
+ ] = None
62
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
63
+
64
+ environment: Optional[str] = None
65
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
66
+
67
+ streamtags: Optional[List[str]] = None
68
+ r"""Tags for filtering and grouping in @{product}"""
69
+
70
+ time_window: Annotated[Optional[str], pydantic.Field(alias="timeWindow")] = "10m"
71
+ r"""Time period for grouping spooled events. Default is 10m."""
72
+
73
+ max_data_size: Annotated[Optional[str], pydantic.Field(alias="maxDataSize")] = "1GB"
74
+ r"""Maximum disk space that can be consumed before older buckets are deleted. Examples: 420MB, 4GB. Default is 1GB."""
75
+
76
+ max_data_time: Annotated[Optional[str], pydantic.Field(alias="maxDataTime")] = "24h"
77
+ r"""Maximum amount of time to retain data before older buckets are deleted. Examples: 2h, 4d. Default is 24h."""
78
+
79
+ compress: Annotated[
80
+ Optional[OutputDiskSpoolCompression], PlainValidator(validate_open_enum(False))
81
+ ] = OutputDiskSpoolCompression.GZIP
82
+ r"""Data compression format. Default is gzip."""
83
+
84
+ partition_expr: Annotated[Optional[str], pydantic.Field(alias="partitionExpr")] = (
85
+ None
86
+ )
87
+ r"""JavaScript expression defining how files are partitioned and organized within the time-buckets. If blank, the event's __partition property is used and otherwise, events go directly into the time-bucket directory."""
88
+
89
+ description: Optional[str] = None