cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,472 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class OutputDatadogType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ DATADOG = "datadog"
16
+
17
+
18
+ class SendLogsAs(str, Enum, metaclass=utils.OpenEnumMeta):
19
+ r"""The content type to use when sending logs"""
20
+
21
+ TEXT = "text"
22
+ JSON = "json"
23
+
24
+
25
+ class OutputDatadogSeverity(str, Enum, metaclass=utils.OpenEnumMeta):
26
+ r"""Default value for message severity. When you send logs as JSON objects, the event's '__severity' field (if set) will override this value."""
27
+
28
+ EMERGENCY = "emergency"
29
+ ALERT = "alert"
30
+ CRITICAL = "critical"
31
+ ERROR = "error"
32
+ WARNING = "warning"
33
+ NOTICE = "notice"
34
+ INFO = "info"
35
+ DEBUG = "debug"
36
+
37
+
38
+ class DatadogSite(str, Enum, metaclass=utils.OpenEnumMeta):
39
+ r"""Datadog site to which events should be sent"""
40
+
41
+ US = "us"
42
+ US3 = "us3"
43
+ US5 = "us5"
44
+ EU = "eu"
45
+ FED1 = "fed1"
46
+ AP1 = "ap1"
47
+ CUSTOM = "custom"
48
+
49
+
50
+ class OutputDatadogExtraHTTPHeaderTypedDict(TypedDict):
51
+ value: str
52
+ name: NotRequired[str]
53
+
54
+
55
+ class OutputDatadogExtraHTTPHeader(BaseModel):
56
+ value: str
57
+
58
+ name: Optional[str] = None
59
+
60
+
61
+ class OutputDatadogFailedRequestLoggingMode(str, Enum, metaclass=utils.OpenEnumMeta):
62
+ r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
63
+
64
+ PAYLOAD = "payload"
65
+ PAYLOAD_AND_HEADERS = "payloadAndHeaders"
66
+ NONE = "none"
67
+
68
+
69
+ class OutputDatadogResponseRetrySettingTypedDict(TypedDict):
70
+ http_status: float
71
+ r"""The HTTP response status code that will trigger retries"""
72
+ initial_backoff: NotRequired[float]
73
+ r"""How long, in milliseconds, Cribl Stream should wait before initiating backoff. Maximum interval is 600,000 ms (10 minutes)."""
74
+ backoff_rate: NotRequired[float]
75
+ r"""Base for exponential backoff. A value of 2 (default) means Cribl Stream will retry after 2 seconds, then 4 seconds, then 8 seconds, etc."""
76
+ max_backoff: NotRequired[float]
77
+ r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
78
+
79
+
80
+ class OutputDatadogResponseRetrySetting(BaseModel):
81
+ http_status: Annotated[float, pydantic.Field(alias="httpStatus")]
82
+ r"""The HTTP response status code that will trigger retries"""
83
+
84
+ initial_backoff: Annotated[
85
+ Optional[float], pydantic.Field(alias="initialBackoff")
86
+ ] = 1000
87
+ r"""How long, in milliseconds, Cribl Stream should wait before initiating backoff. Maximum interval is 600,000 ms (10 minutes)."""
88
+
89
+ backoff_rate: Annotated[Optional[float], pydantic.Field(alias="backoffRate")] = 2
90
+ r"""Base for exponential backoff. A value of 2 (default) means Cribl Stream will retry after 2 seconds, then 4 seconds, then 8 seconds, etc."""
91
+
92
+ max_backoff: Annotated[Optional[float], pydantic.Field(alias="maxBackoff")] = 10000
93
+ r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
94
+
95
+
96
+ class OutputDatadogTimeoutRetrySettingsTypedDict(TypedDict):
97
+ timeout_retry: NotRequired[bool]
98
+ initial_backoff: NotRequired[float]
99
+ r"""How long, in milliseconds, Cribl Stream should wait before initiating backoff. Maximum interval is 600,000 ms (10 minutes)."""
100
+ backoff_rate: NotRequired[float]
101
+ r"""Base for exponential backoff. A value of 2 (default) means Cribl Stream will retry after 2 seconds, then 4 seconds, then 8 seconds, etc."""
102
+ max_backoff: NotRequired[float]
103
+ r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
104
+
105
+
106
+ class OutputDatadogTimeoutRetrySettings(BaseModel):
107
+ timeout_retry: Annotated[Optional[bool], pydantic.Field(alias="timeoutRetry")] = (
108
+ False
109
+ )
110
+
111
+ initial_backoff: Annotated[
112
+ Optional[float], pydantic.Field(alias="initialBackoff")
113
+ ] = 1000
114
+ r"""How long, in milliseconds, Cribl Stream should wait before initiating backoff. Maximum interval is 600,000 ms (10 minutes)."""
115
+
116
+ backoff_rate: Annotated[Optional[float], pydantic.Field(alias="backoffRate")] = 2
117
+ r"""Base for exponential backoff. A value of 2 (default) means Cribl Stream will retry after 2 seconds, then 4 seconds, then 8 seconds, etc."""
118
+
119
+ max_backoff: Annotated[Optional[float], pydantic.Field(alias="maxBackoff")] = 10000
120
+ r"""The maximum backoff interval, in milliseconds, Cribl Stream should apply. Default (and minimum) is 10,000 ms (10 seconds); maximum is 180,000 ms (180 seconds)."""
121
+
122
+
123
+ class OutputDatadogBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
124
+ r"""How to handle events when all receivers are exerting backpressure"""
125
+
126
+ BLOCK = "block"
127
+ DROP = "drop"
128
+ QUEUE = "queue"
129
+
130
+
131
+ class OutputDatadogAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
132
+ r"""Enter API key directly, or select a stored secret"""
133
+
134
+ MANUAL = "manual"
135
+ SECRET = "secret"
136
+
137
+
138
+ class OutputDatadogCompression(str, Enum, metaclass=utils.OpenEnumMeta):
139
+ r"""Codec to use to compress the persisted data"""
140
+
141
+ NONE = "none"
142
+ GZIP = "gzip"
143
+
144
+
145
+ class OutputDatadogQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
146
+ r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
147
+
148
+ BLOCK = "block"
149
+ DROP = "drop"
150
+
151
+
152
+ class OutputDatadogMode(str, Enum, metaclass=utils.OpenEnumMeta):
153
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
154
+
155
+ ERROR = "error"
156
+ BACKPRESSURE = "backpressure"
157
+ ALWAYS = "always"
158
+
159
+
160
+ class OutputDatadogPqControlsTypedDict(TypedDict):
161
+ pass
162
+
163
+
164
+ class OutputDatadogPqControls(BaseModel):
165
+ pass
166
+
167
+
168
+ class OutputDatadogTypedDict(TypedDict):
169
+ id: str
170
+ r"""Unique ID for this output"""
171
+ type: OutputDatadogType
172
+ pipeline: NotRequired[str]
173
+ r"""Pipeline to process data before sending out to this output"""
174
+ system_fields: NotRequired[List[str]]
175
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
176
+ environment: NotRequired[str]
177
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
178
+ streamtags: NotRequired[List[str]]
179
+ r"""Tags for filtering and grouping in @{product}"""
180
+ content_type: NotRequired[SendLogsAs]
181
+ r"""The content type to use when sending logs"""
182
+ message: NotRequired[str]
183
+ r"""Name of the event field that contains the message to send. If not specified, Stream sends a JSON representation of the whole event."""
184
+ source: NotRequired[str]
185
+ r"""Name of the source to send with logs. When you send logs as JSON objects, the event's 'source' field (if set) will override this value."""
186
+ host: NotRequired[str]
187
+ r"""Name of the host to send with logs. When you send logs as JSON objects, the event's 'host' field (if set) will override this value."""
188
+ service: NotRequired[str]
189
+ r"""Name of the service to send with logs. When you send logs as JSON objects, the event's '__service' field (if set) will override this value."""
190
+ tags: NotRequired[List[str]]
191
+ r"""List of tags to send with logs, such as 'env:prod' and 'env_staging:east'"""
192
+ batch_by_tags: NotRequired[bool]
193
+ r"""Batch events by API key and the ddtags field on the event. When disabled, batches events only by API key. If incoming events have high cardinality in the ddtags field, disabling this setting may improve Destination performance."""
194
+ allow_api_key_from_events: NotRequired[bool]
195
+ r"""Allow API key to be set from the event's '__agent_api_key' field"""
196
+ severity: NotRequired[OutputDatadogSeverity]
197
+ r"""Default value for message severity. When you send logs as JSON objects, the event's '__severity' field (if set) will override this value."""
198
+ site: NotRequired[DatadogSite]
199
+ r"""Datadog site to which events should be sent"""
200
+ send_counters_as_count: NotRequired[bool]
201
+ r"""If not enabled, Datadog will transform 'counter' metrics to 'gauge'. [Learn more about Datadog metrics types.](https://docs.datadoghq.com/metrics/types/?tab=count)"""
202
+ concurrency: NotRequired[float]
203
+ r"""Maximum number of ongoing requests before blocking"""
204
+ max_payload_size_kb: NotRequired[float]
205
+ r"""Maximum size, in KB, of the request body"""
206
+ max_payload_events: NotRequired[float]
207
+ r"""Maximum number of events to include in the request body. Default is 0 (unlimited)."""
208
+ compress: NotRequired[bool]
209
+ r"""Compress the payload body before sending"""
210
+ reject_unauthorized: NotRequired[bool]
211
+ r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's).
212
+ Enabled by default. When this setting is also present in TLS Settings (Client Side),
213
+ that value will take precedence.
214
+ """
215
+ timeout_sec: NotRequired[float]
216
+ r"""Amount of time, in seconds, to wait for a request to complete before canceling it"""
217
+ flush_period_sec: NotRequired[float]
218
+ r"""Maximum time between requests. Small values could cause the payload size to be smaller than the configured Body size limit."""
219
+ extra_http_headers: NotRequired[List[OutputDatadogExtraHTTPHeaderTypedDict]]
220
+ r"""Headers to add to all events"""
221
+ use_round_robin_dns: NotRequired[bool]
222
+ r"""Enable round-robin DNS lookup. When a DNS server returns multiple addresses, @{product} will cycle through them in the order returned. For optimal performance, consider enabling this setting for non-load balanced destinations."""
223
+ failed_request_logging_mode: NotRequired[OutputDatadogFailedRequestLoggingMode]
224
+ r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
225
+ safe_headers: NotRequired[List[str]]
226
+ r"""List of headers that are safe to log in plain text"""
227
+ response_retry_settings: NotRequired[
228
+ List[OutputDatadogResponseRetrySettingTypedDict]
229
+ ]
230
+ r"""Automatically retry after unsuccessful response status codes, such as 429 (Too Many Requests) or 503 (Service Unavailable)"""
231
+ timeout_retry_settings: NotRequired[OutputDatadogTimeoutRetrySettingsTypedDict]
232
+ response_honor_retry_after_header: NotRequired[bool]
233
+ r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
234
+ on_backpressure: NotRequired[OutputDatadogBackpressureBehavior]
235
+ r"""How to handle events when all receivers are exerting backpressure"""
236
+ auth_type: NotRequired[OutputDatadogAuthenticationMethod]
237
+ r"""Enter API key directly, or select a stored secret"""
238
+ total_memory_limit_kb: NotRequired[float]
239
+ r"""Maximum total size of the batches waiting to be sent. If left blank, defaults to 5 times the max body size (if set). If 0, no limit is enforced."""
240
+ description: NotRequired[str]
241
+ custom_url: NotRequired[str]
242
+ pq_max_file_size: NotRequired[str]
243
+ r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
244
+ pq_max_size: NotRequired[str]
245
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
246
+ pq_path: NotRequired[str]
247
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
248
+ pq_compress: NotRequired[OutputDatadogCompression]
249
+ r"""Codec to use to compress the persisted data"""
250
+ pq_on_backpressure: NotRequired[OutputDatadogQueueFullBehavior]
251
+ r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
252
+ pq_mode: NotRequired[OutputDatadogMode]
253
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
254
+ pq_controls: NotRequired[OutputDatadogPqControlsTypedDict]
255
+ api_key: NotRequired[str]
256
+ r"""Organization's API key in Datadog"""
257
+ text_secret: NotRequired[str]
258
+ r"""Select or create a stored text secret"""
259
+
260
+
261
+ class OutputDatadog(BaseModel):
262
+ id: str
263
+ r"""Unique ID for this output"""
264
+
265
+ type: Annotated[OutputDatadogType, PlainValidator(validate_open_enum(False))]
266
+
267
+ pipeline: Optional[str] = None
268
+ r"""Pipeline to process data before sending out to this output"""
269
+
270
+ system_fields: Annotated[
271
+ Optional[List[str]], pydantic.Field(alias="systemFields")
272
+ ] = None
273
+ r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
274
+
275
+ environment: Optional[str] = None
276
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
277
+
278
+ streamtags: Optional[List[str]] = None
279
+ r"""Tags for filtering and grouping in @{product}"""
280
+
281
+ content_type: Annotated[
282
+ Annotated[Optional[SendLogsAs], PlainValidator(validate_open_enum(False))],
283
+ pydantic.Field(alias="contentType"),
284
+ ] = SendLogsAs.JSON
285
+ r"""The content type to use when sending logs"""
286
+
287
+ message: Optional[str] = None
288
+ r"""Name of the event field that contains the message to send. If not specified, Stream sends a JSON representation of the whole event."""
289
+
290
+ source: Optional[str] = None
291
+ r"""Name of the source to send with logs. When you send logs as JSON objects, the event's 'source' field (if set) will override this value."""
292
+
293
+ host: Optional[str] = None
294
+ r"""Name of the host to send with logs. When you send logs as JSON objects, the event's 'host' field (if set) will override this value."""
295
+
296
+ service: Optional[str] = None
297
+ r"""Name of the service to send with logs. When you send logs as JSON objects, the event's '__service' field (if set) will override this value."""
298
+
299
+ tags: Optional[List[str]] = None
300
+ r"""List of tags to send with logs, such as 'env:prod' and 'env_staging:east'"""
301
+
302
+ batch_by_tags: Annotated[Optional[bool], pydantic.Field(alias="batchByTags")] = True
303
+ r"""Batch events by API key and the ddtags field on the event. When disabled, batches events only by API key. If incoming events have high cardinality in the ddtags field, disabling this setting may improve Destination performance."""
304
+
305
+ allow_api_key_from_events: Annotated[
306
+ Optional[bool], pydantic.Field(alias="allowApiKeyFromEvents")
307
+ ] = False
308
+ r"""Allow API key to be set from the event's '__agent_api_key' field"""
309
+
310
+ severity: Annotated[
311
+ Optional[OutputDatadogSeverity], PlainValidator(validate_open_enum(False))
312
+ ] = None
313
+ r"""Default value for message severity. When you send logs as JSON objects, the event's '__severity' field (if set) will override this value."""
314
+
315
+ site: Annotated[
316
+ Optional[DatadogSite], PlainValidator(validate_open_enum(False))
317
+ ] = DatadogSite.US
318
+ r"""Datadog site to which events should be sent"""
319
+
320
+ send_counters_as_count: Annotated[
321
+ Optional[bool], pydantic.Field(alias="sendCountersAsCount")
322
+ ] = False
323
+ r"""If not enabled, Datadog will transform 'counter' metrics to 'gauge'. [Learn more about Datadog metrics types.](https://docs.datadoghq.com/metrics/types/?tab=count)"""
324
+
325
+ concurrency: Optional[float] = 5
326
+ r"""Maximum number of ongoing requests before blocking"""
327
+
328
+ max_payload_size_kb: Annotated[
329
+ Optional[float], pydantic.Field(alias="maxPayloadSizeKB")
330
+ ] = 4096
331
+ r"""Maximum size, in KB, of the request body"""
332
+
333
+ max_payload_events: Annotated[
334
+ Optional[float], pydantic.Field(alias="maxPayloadEvents")
335
+ ] = 0
336
+ r"""Maximum number of events to include in the request body. Default is 0 (unlimited)."""
337
+
338
+ compress: Optional[bool] = True
339
+ r"""Compress the payload body before sending"""
340
+
341
+ reject_unauthorized: Annotated[
342
+ Optional[bool], pydantic.Field(alias="rejectUnauthorized")
343
+ ] = True
344
+ r"""Reject certificates not authorized by a CA in the CA certificate path or by another trusted CA (such as the system's).
345
+ Enabled by default. When this setting is also present in TLS Settings (Client Side),
346
+ that value will take precedence.
347
+ """
348
+
349
+ timeout_sec: Annotated[Optional[float], pydantic.Field(alias="timeoutSec")] = 30
350
+ r"""Amount of time, in seconds, to wait for a request to complete before canceling it"""
351
+
352
+ flush_period_sec: Annotated[
353
+ Optional[float], pydantic.Field(alias="flushPeriodSec")
354
+ ] = 1
355
+ r"""Maximum time between requests. Small values could cause the payload size to be smaller than the configured Body size limit."""
356
+
357
+ extra_http_headers: Annotated[
358
+ Optional[List[OutputDatadogExtraHTTPHeader]],
359
+ pydantic.Field(alias="extraHttpHeaders"),
360
+ ] = None
361
+ r"""Headers to add to all events"""
362
+
363
+ use_round_robin_dns: Annotated[
364
+ Optional[bool], pydantic.Field(alias="useRoundRobinDns")
365
+ ] = False
366
+ r"""Enable round-robin DNS lookup. When a DNS server returns multiple addresses, @{product} will cycle through them in the order returned. For optimal performance, consider enabling this setting for non-load balanced destinations."""
367
+
368
+ failed_request_logging_mode: Annotated[
369
+ Annotated[
370
+ Optional[OutputDatadogFailedRequestLoggingMode],
371
+ PlainValidator(validate_open_enum(False)),
372
+ ],
373
+ pydantic.Field(alias="failedRequestLoggingMode"),
374
+ ] = OutputDatadogFailedRequestLoggingMode.NONE
375
+ r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
376
+
377
+ safe_headers: Annotated[
378
+ Optional[List[str]], pydantic.Field(alias="safeHeaders")
379
+ ] = None
380
+ r"""List of headers that are safe to log in plain text"""
381
+
382
+ response_retry_settings: Annotated[
383
+ Optional[List[OutputDatadogResponseRetrySetting]],
384
+ pydantic.Field(alias="responseRetrySettings"),
385
+ ] = None
386
+ r"""Automatically retry after unsuccessful response status codes, such as 429 (Too Many Requests) or 503 (Service Unavailable)"""
387
+
388
+ timeout_retry_settings: Annotated[
389
+ Optional[OutputDatadogTimeoutRetrySettings],
390
+ pydantic.Field(alias="timeoutRetrySettings"),
391
+ ] = None
392
+
393
+ response_honor_retry_after_header: Annotated[
394
+ Optional[bool], pydantic.Field(alias="responseHonorRetryAfterHeader")
395
+ ] = False
396
+ r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
397
+
398
+ on_backpressure: Annotated[
399
+ Annotated[
400
+ Optional[OutputDatadogBackpressureBehavior],
401
+ PlainValidator(validate_open_enum(False)),
402
+ ],
403
+ pydantic.Field(alias="onBackpressure"),
404
+ ] = OutputDatadogBackpressureBehavior.BLOCK
405
+ r"""How to handle events when all receivers are exerting backpressure"""
406
+
407
+ auth_type: Annotated[
408
+ Annotated[
409
+ Optional[OutputDatadogAuthenticationMethod],
410
+ PlainValidator(validate_open_enum(False)),
411
+ ],
412
+ pydantic.Field(alias="authType"),
413
+ ] = OutputDatadogAuthenticationMethod.MANUAL
414
+ r"""Enter API key directly, or select a stored secret"""
415
+
416
+ total_memory_limit_kb: Annotated[
417
+ Optional[float], pydantic.Field(alias="totalMemoryLimitKB")
418
+ ] = None
419
+ r"""Maximum total size of the batches waiting to be sent. If left blank, defaults to 5 times the max body size (if set). If 0, no limit is enforced."""
420
+
421
+ description: Optional[str] = None
422
+
423
+ custom_url: Annotated[Optional[str], pydantic.Field(alias="customUrl")] = None
424
+
425
+ pq_max_file_size: Annotated[
426
+ Optional[str], pydantic.Field(alias="pqMaxFileSize")
427
+ ] = "1 MB"
428
+ r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
429
+
430
+ pq_max_size: Annotated[Optional[str], pydantic.Field(alias="pqMaxSize")] = "5GB"
431
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
432
+
433
+ pq_path: Annotated[Optional[str], pydantic.Field(alias="pqPath")] = (
434
+ "$CRIBL_HOME/state/queues"
435
+ )
436
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
437
+
438
+ pq_compress: Annotated[
439
+ Annotated[
440
+ Optional[OutputDatadogCompression],
441
+ PlainValidator(validate_open_enum(False)),
442
+ ],
443
+ pydantic.Field(alias="pqCompress"),
444
+ ] = OutputDatadogCompression.NONE
445
+ r"""Codec to use to compress the persisted data"""
446
+
447
+ pq_on_backpressure: Annotated[
448
+ Annotated[
449
+ Optional[OutputDatadogQueueFullBehavior],
450
+ PlainValidator(validate_open_enum(False)),
451
+ ],
452
+ pydantic.Field(alias="pqOnBackpressure"),
453
+ ] = OutputDatadogQueueFullBehavior.BLOCK
454
+ r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
455
+
456
+ pq_mode: Annotated[
457
+ Annotated[
458
+ Optional[OutputDatadogMode], PlainValidator(validate_open_enum(False))
459
+ ],
460
+ pydantic.Field(alias="pqMode"),
461
+ ] = OutputDatadogMode.ERROR
462
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
463
+
464
+ pq_controls: Annotated[
465
+ Optional[OutputDatadogPqControls], pydantic.Field(alias="pqControls")
466
+ ] = None
467
+
468
+ api_key: Annotated[Optional[str], pydantic.Field(alias="apiKey")] = None
469
+ r"""Organization's API key in Datadog"""
470
+
471
+ text_secret: Annotated[Optional[str], pydantic.Field(alias="textSecret")] = None
472
+ r"""Select or create a stored text secret"""