cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,290 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import Any, List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class InputMetricsType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ METRICS = "metrics"
16
+
17
+
18
+ class InputMetricsConnectionTypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputMetricsConnection(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputMetricsMode(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputMetricsCompression(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputMetricsPqTypedDict(TypedDict):
44
+ mode: NotRequired[InputMetricsMode]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputMetricsCompression]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputMetricsPq(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputMetricsMode], PlainValidator(validate_open_enum(False))
63
+ ] = InputMetricsMode.ALWAYS
64
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
+
66
+ max_buffer_size: Annotated[
67
+ Optional[float], pydantic.Field(alias="maxBufferSize")
68
+ ] = 1000
69
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
70
+
71
+ commit_frequency: Annotated[
72
+ Optional[float], pydantic.Field(alias="commitFrequency")
73
+ ] = 42
74
+ r"""The number of events to send downstream before committing that Stream has read them"""
75
+
76
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
77
+ "1 MB"
78
+ )
79
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
80
+
81
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
82
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
83
+
84
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
85
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
+
87
+ compress: Annotated[
88
+ Optional[InputMetricsCompression], PlainValidator(validate_open_enum(False))
89
+ ] = InputMetricsCompression.NONE
90
+ r"""Codec to use to compress the persisted data"""
91
+
92
+
93
+ class InputMetricsMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ TL_SV1 = "TLSv1"
95
+ TL_SV1_1 = "TLSv1.1"
96
+ TL_SV1_2 = "TLSv1.2"
97
+ TL_SV1_3 = "TLSv1.3"
98
+
99
+
100
+ class InputMetricsMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
101
+ TL_SV1 = "TLSv1"
102
+ TL_SV1_1 = "TLSv1.1"
103
+ TL_SV1_2 = "TLSv1.2"
104
+ TL_SV1_3 = "TLSv1.3"
105
+
106
+
107
+ class InputMetricsTLSSettingsServerSideTypedDict(TypedDict):
108
+ disabled: NotRequired[bool]
109
+ certificate_name: NotRequired[str]
110
+ r"""The name of the predefined certificate"""
111
+ priv_key_path: NotRequired[str]
112
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
113
+ passphrase: NotRequired[str]
114
+ r"""Passphrase to use to decrypt private key"""
115
+ cert_path: NotRequired[str]
116
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
117
+ ca_path: NotRequired[str]
118
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
119
+ request_cert: NotRequired[bool]
120
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
121
+ reject_unauthorized: NotRequired[Any]
122
+ common_name_regex: NotRequired[Any]
123
+ min_version: NotRequired[InputMetricsMinimumTLSVersion]
124
+ max_version: NotRequired[InputMetricsMaximumTLSVersion]
125
+
126
+
127
+ class InputMetricsTLSSettingsServerSide(BaseModel):
128
+ disabled: Optional[bool] = True
129
+
130
+ certificate_name: Annotated[
131
+ Optional[str], pydantic.Field(alias="certificateName")
132
+ ] = None
133
+ r"""The name of the predefined certificate"""
134
+
135
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
136
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
137
+
138
+ passphrase: Optional[str] = None
139
+ r"""Passphrase to use to decrypt private key"""
140
+
141
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
142
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
143
+
144
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
145
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
146
+
147
+ request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
148
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
149
+
150
+ reject_unauthorized: Annotated[
151
+ Optional[Any], pydantic.Field(alias="rejectUnauthorized")
152
+ ] = None
153
+
154
+ common_name_regex: Annotated[
155
+ Optional[Any], pydantic.Field(alias="commonNameRegex")
156
+ ] = None
157
+
158
+ min_version: Annotated[
159
+ Annotated[
160
+ Optional[InputMetricsMinimumTLSVersion],
161
+ PlainValidator(validate_open_enum(False)),
162
+ ],
163
+ pydantic.Field(alias="minVersion"),
164
+ ] = None
165
+
166
+ max_version: Annotated[
167
+ Annotated[
168
+ Optional[InputMetricsMaximumTLSVersion],
169
+ PlainValidator(validate_open_enum(False)),
170
+ ],
171
+ pydantic.Field(alias="maxVersion"),
172
+ ] = None
173
+
174
+
175
+ class InputMetricsMetadatumTypedDict(TypedDict):
176
+ name: str
177
+ value: str
178
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
179
+
180
+
181
+ class InputMetricsMetadatum(BaseModel):
182
+ name: str
183
+
184
+ value: str
185
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
186
+
187
+
188
+ class InputMetricsTypedDict(TypedDict):
189
+ type: InputMetricsType
190
+ id: NotRequired[str]
191
+ r"""Unique ID for this input"""
192
+ disabled: NotRequired[bool]
193
+ pipeline: NotRequired[str]
194
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
195
+ send_to_routes: NotRequired[bool]
196
+ r"""Select whether to send data to Routes, or directly to Destinations."""
197
+ environment: NotRequired[str]
198
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
199
+ pq_enabled: NotRequired[bool]
200
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
201
+ streamtags: NotRequired[List[str]]
202
+ r"""Tags for filtering and grouping in @{product}"""
203
+ connections: NotRequired[List[InputMetricsConnectionTypedDict]]
204
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
205
+ pq: NotRequired[InputMetricsPqTypedDict]
206
+ host: NotRequired[str]
207
+ r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
208
+ udp_port: NotRequired[float]
209
+ r"""Enter UDP port number to listen on. Not required if listening on TCP."""
210
+ tcp_port: NotRequired[float]
211
+ r"""Enter TCP port number to listen on. Not required if listening on UDP."""
212
+ max_buffer_size: NotRequired[float]
213
+ r"""Maximum number of events to buffer when downstream is blocking. Only applies to UDP."""
214
+ ip_whitelist_regex: NotRequired[str]
215
+ r"""Regex matching IP addresses that are allowed to send data"""
216
+ enable_proxy_header: NotRequired[bool]
217
+ r"""Enable if the connection is proxied by a device that supports Proxy Protocol V1 or V2"""
218
+ tls: NotRequired[InputMetricsTLSSettingsServerSideTypedDict]
219
+ metadata: NotRequired[List[InputMetricsMetadatumTypedDict]]
220
+ r"""Fields to add to events from this input"""
221
+ udp_socket_rx_buf_size: NotRequired[float]
222
+ r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
223
+ description: NotRequired[str]
224
+
225
+
226
+ class InputMetrics(BaseModel):
227
+ type: Annotated[InputMetricsType, PlainValidator(validate_open_enum(False))]
228
+
229
+ id: Optional[str] = None
230
+ r"""Unique ID for this input"""
231
+
232
+ disabled: Optional[bool] = False
233
+
234
+ pipeline: Optional[str] = None
235
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
236
+
237
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
238
+ True
239
+ )
240
+ r"""Select whether to send data to Routes, or directly to Destinations."""
241
+
242
+ environment: Optional[str] = None
243
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
244
+
245
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
246
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
247
+
248
+ streamtags: Optional[List[str]] = None
249
+ r"""Tags for filtering and grouping in @{product}"""
250
+
251
+ connections: Optional[List[InputMetricsConnection]] = None
252
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
253
+
254
+ pq: Optional[InputMetricsPq] = None
255
+
256
+ host: Optional[str] = "0.0.0.0"
257
+ r"""Address to bind on. For IPv4 (all addresses), use the default '0.0.0.0'. For IPv6, enter '::' (all addresses) or specify an IP address."""
258
+
259
+ udp_port: Annotated[Optional[float], pydantic.Field(alias="udpPort")] = None
260
+ r"""Enter UDP port number to listen on. Not required if listening on TCP."""
261
+
262
+ tcp_port: Annotated[Optional[float], pydantic.Field(alias="tcpPort")] = None
263
+ r"""Enter TCP port number to listen on. Not required if listening on UDP."""
264
+
265
+ max_buffer_size: Annotated[
266
+ Optional[float], pydantic.Field(alias="maxBufferSize")
267
+ ] = 1000
268
+ r"""Maximum number of events to buffer when downstream is blocking. Only applies to UDP."""
269
+
270
+ ip_whitelist_regex: Annotated[
271
+ Optional[str], pydantic.Field(alias="ipWhitelistRegex")
272
+ ] = "/.*/"
273
+ r"""Regex matching IP addresses that are allowed to send data"""
274
+
275
+ enable_proxy_header: Annotated[
276
+ Optional[bool], pydantic.Field(alias="enableProxyHeader")
277
+ ] = False
278
+ r"""Enable if the connection is proxied by a device that supports Proxy Protocol V1 or V2"""
279
+
280
+ tls: Optional[InputMetricsTLSSettingsServerSide] = None
281
+
282
+ metadata: Optional[List[InputMetricsMetadatum]] = None
283
+ r"""Fields to add to events from this input"""
284
+
285
+ udp_socket_rx_buf_size: Annotated[
286
+ Optional[float], pydantic.Field(alias="udpSocketRxBufSize")
287
+ ] = None
288
+ r"""Optionally, set the SO_RCVBUF socket option for the UDP socket. This value tells the operating system how many bytes can be buffered in the kernel before events are dropped. Leave blank to use the OS default. Caution: Increasing this value will affect OS memory utilization."""
289
+
290
+ description: Optional[str] = None
@@ -0,0 +1,274 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import Any, List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class InputModelDrivenTelemetryType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ MODEL_DRIVEN_TELEMETRY = "model_driven_telemetry"
16
+
17
+
18
+ class InputModelDrivenTelemetryConnectionTypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputModelDrivenTelemetryConnection(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputModelDrivenTelemetryMode(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputModelDrivenTelemetryCompression(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputModelDrivenTelemetryPqTypedDict(TypedDict):
44
+ mode: NotRequired[InputModelDrivenTelemetryMode]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputModelDrivenTelemetryCompression]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputModelDrivenTelemetryPq(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputModelDrivenTelemetryMode],
63
+ PlainValidator(validate_open_enum(False)),
64
+ ] = InputModelDrivenTelemetryMode.ALWAYS
65
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
66
+
67
+ max_buffer_size: Annotated[
68
+ Optional[float], pydantic.Field(alias="maxBufferSize")
69
+ ] = 1000
70
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
71
+
72
+ commit_frequency: Annotated[
73
+ Optional[float], pydantic.Field(alias="commitFrequency")
74
+ ] = 42
75
+ r"""The number of events to send downstream before committing that Stream has read them"""
76
+
77
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
78
+ "1 MB"
79
+ )
80
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
81
+
82
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
83
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
84
+
85
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
86
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
87
+
88
+ compress: Annotated[
89
+ Optional[InputModelDrivenTelemetryCompression],
90
+ PlainValidator(validate_open_enum(False)),
91
+ ] = InputModelDrivenTelemetryCompression.NONE
92
+ r"""Codec to use to compress the persisted data"""
93
+
94
+
95
+ class InputModelDrivenTelemetryMinimumTLSVersion(
96
+ str, Enum, metaclass=utils.OpenEnumMeta
97
+ ):
98
+ TL_SV1 = "TLSv1"
99
+ TL_SV1_1 = "TLSv1.1"
100
+ TL_SV1_2 = "TLSv1.2"
101
+ TL_SV1_3 = "TLSv1.3"
102
+
103
+
104
+ class InputModelDrivenTelemetryMaximumTLSVersion(
105
+ str, Enum, metaclass=utils.OpenEnumMeta
106
+ ):
107
+ TL_SV1 = "TLSv1"
108
+ TL_SV1_1 = "TLSv1.1"
109
+ TL_SV1_2 = "TLSv1.2"
110
+ TL_SV1_3 = "TLSv1.3"
111
+
112
+
113
+ class InputModelDrivenTelemetryTLSSettingsServerSideTypedDict(TypedDict):
114
+ disabled: NotRequired[bool]
115
+ certificate_name: NotRequired[str]
116
+ r"""The name of the predefined certificate"""
117
+ priv_key_path: NotRequired[str]
118
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
119
+ cert_path: NotRequired[str]
120
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
121
+ ca_path: NotRequired[str]
122
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
123
+ request_cert: NotRequired[bool]
124
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
125
+ reject_unauthorized: NotRequired[Any]
126
+ common_name_regex: NotRequired[Any]
127
+ min_version: NotRequired[InputModelDrivenTelemetryMinimumTLSVersion]
128
+ max_version: NotRequired[InputModelDrivenTelemetryMaximumTLSVersion]
129
+
130
+
131
+ class InputModelDrivenTelemetryTLSSettingsServerSide(BaseModel):
132
+ disabled: Optional[bool] = True
133
+
134
+ certificate_name: Annotated[
135
+ Optional[str], pydantic.Field(alias="certificateName")
136
+ ] = None
137
+ r"""The name of the predefined certificate"""
138
+
139
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
140
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
141
+
142
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
143
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
144
+
145
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
146
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
147
+
148
+ request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
149
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
150
+
151
+ reject_unauthorized: Annotated[
152
+ Optional[Any], pydantic.Field(alias="rejectUnauthorized")
153
+ ] = None
154
+
155
+ common_name_regex: Annotated[
156
+ Optional[Any], pydantic.Field(alias="commonNameRegex")
157
+ ] = None
158
+
159
+ min_version: Annotated[
160
+ Annotated[
161
+ Optional[InputModelDrivenTelemetryMinimumTLSVersion],
162
+ PlainValidator(validate_open_enum(False)),
163
+ ],
164
+ pydantic.Field(alias="minVersion"),
165
+ ] = None
166
+
167
+ max_version: Annotated[
168
+ Annotated[
169
+ Optional[InputModelDrivenTelemetryMaximumTLSVersion],
170
+ PlainValidator(validate_open_enum(False)),
171
+ ],
172
+ pydantic.Field(alias="maxVersion"),
173
+ ] = None
174
+
175
+
176
+ class InputModelDrivenTelemetryMetadatumTypedDict(TypedDict):
177
+ name: str
178
+ value: str
179
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
180
+
181
+
182
+ class InputModelDrivenTelemetryMetadatum(BaseModel):
183
+ name: str
184
+
185
+ value: str
186
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
187
+
188
+
189
+ class InputModelDrivenTelemetryTypedDict(TypedDict):
190
+ type: InputModelDrivenTelemetryType
191
+ id: NotRequired[str]
192
+ r"""Unique ID for this input"""
193
+ disabled: NotRequired[bool]
194
+ pipeline: NotRequired[str]
195
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
196
+ send_to_routes: NotRequired[bool]
197
+ r"""Select whether to send data to Routes, or directly to Destinations."""
198
+ environment: NotRequired[str]
199
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
200
+ pq_enabled: NotRequired[bool]
201
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
202
+ streamtags: NotRequired[List[str]]
203
+ r"""Tags for filtering and grouping in @{product}"""
204
+ connections: NotRequired[List[InputModelDrivenTelemetryConnectionTypedDict]]
205
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
206
+ pq: NotRequired[InputModelDrivenTelemetryPqTypedDict]
207
+ host: NotRequired[str]
208
+ r"""Address to bind on. Defaults to 0.0.0.0 (all addresses)."""
209
+ port: NotRequired[float]
210
+ r"""Port to listen on"""
211
+ tls: NotRequired[InputModelDrivenTelemetryTLSSettingsServerSideTypedDict]
212
+ metadata: NotRequired[List[InputModelDrivenTelemetryMetadatumTypedDict]]
213
+ r"""Fields to add to events from this input"""
214
+ max_active_cxn: NotRequired[float]
215
+ r"""Maximum number of active connections allowed per Worker Process. Use 0 for unlimited."""
216
+ shutdown_timeout_ms: NotRequired[float]
217
+ r"""Time in milliseconds to allow the server to shutdown gracefully before forcing shutdown. Defaults to 5000."""
218
+ description: NotRequired[str]
219
+
220
+
221
+ class InputModelDrivenTelemetry(BaseModel):
222
+ type: Annotated[
223
+ InputModelDrivenTelemetryType, PlainValidator(validate_open_enum(False))
224
+ ]
225
+
226
+ id: Optional[str] = None
227
+ r"""Unique ID for this input"""
228
+
229
+ disabled: Optional[bool] = False
230
+
231
+ pipeline: Optional[str] = None
232
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
233
+
234
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
235
+ True
236
+ )
237
+ r"""Select whether to send data to Routes, or directly to Destinations."""
238
+
239
+ environment: Optional[str] = None
240
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
241
+
242
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
243
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
244
+
245
+ streamtags: Optional[List[str]] = None
246
+ r"""Tags for filtering and grouping in @{product}"""
247
+
248
+ connections: Optional[List[InputModelDrivenTelemetryConnection]] = None
249
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
250
+
251
+ pq: Optional[InputModelDrivenTelemetryPq] = None
252
+
253
+ host: Optional[str] = "0.0.0.0"
254
+ r"""Address to bind on. Defaults to 0.0.0.0 (all addresses)."""
255
+
256
+ port: Optional[float] = 57000
257
+ r"""Port to listen on"""
258
+
259
+ tls: Optional[InputModelDrivenTelemetryTLSSettingsServerSide] = None
260
+
261
+ metadata: Optional[List[InputModelDrivenTelemetryMetadatum]] = None
262
+ r"""Fields to add to events from this input"""
263
+
264
+ max_active_cxn: Annotated[Optional[float], pydantic.Field(alias="maxActiveCxn")] = (
265
+ 1000
266
+ )
267
+ r"""Maximum number of active connections allowed per Worker Process. Use 0 for unlimited."""
268
+
269
+ shutdown_timeout_ms: Annotated[
270
+ Optional[float], pydantic.Field(alias="shutdownTimeoutMs")
271
+ ] = 5000
272
+ r"""Time in milliseconds to allow the server to shutdown gracefully before forcing shutdown. Defaults to 5000."""
273
+
274
+ description: Optional[str] = None