cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,407 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import Any, List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class InputHTTPRawType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ HTTP_RAW = "http_raw"
16
+
17
+
18
+ class InputHTTPRawConnectionTypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputHTTPRawConnection(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputHTTPRawMode(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputHTTPRawCompression(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputHTTPRawPqTypedDict(TypedDict):
44
+ mode: NotRequired[InputHTTPRawMode]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputHTTPRawCompression]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputHTTPRawPq(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputHTTPRawMode], PlainValidator(validate_open_enum(False))
63
+ ] = InputHTTPRawMode.ALWAYS
64
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
+
66
+ max_buffer_size: Annotated[
67
+ Optional[float], pydantic.Field(alias="maxBufferSize")
68
+ ] = 1000
69
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
70
+
71
+ commit_frequency: Annotated[
72
+ Optional[float], pydantic.Field(alias="commitFrequency")
73
+ ] = 42
74
+ r"""The number of events to send downstream before committing that Stream has read them"""
75
+
76
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
77
+ "1 MB"
78
+ )
79
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
80
+
81
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
82
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
83
+
84
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
85
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
+
87
+ compress: Annotated[
88
+ Optional[InputHTTPRawCompression], PlainValidator(validate_open_enum(False))
89
+ ] = InputHTTPRawCompression.NONE
90
+ r"""Codec to use to compress the persisted data"""
91
+
92
+
93
+ class InputHTTPRawMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
94
+ TL_SV1 = "TLSv1"
95
+ TL_SV1_1 = "TLSv1.1"
96
+ TL_SV1_2 = "TLSv1.2"
97
+ TL_SV1_3 = "TLSv1.3"
98
+
99
+
100
+ class InputHTTPRawMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
101
+ TL_SV1 = "TLSv1"
102
+ TL_SV1_1 = "TLSv1.1"
103
+ TL_SV1_2 = "TLSv1.2"
104
+ TL_SV1_3 = "TLSv1.3"
105
+
106
+
107
+ class InputHTTPRawTLSSettingsServerSideTypedDict(TypedDict):
108
+ disabled: NotRequired[bool]
109
+ certificate_name: NotRequired[str]
110
+ r"""The name of the predefined certificate"""
111
+ priv_key_path: NotRequired[str]
112
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
113
+ passphrase: NotRequired[str]
114
+ r"""Passphrase to use to decrypt private key"""
115
+ cert_path: NotRequired[str]
116
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
117
+ ca_path: NotRequired[str]
118
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
119
+ request_cert: NotRequired[bool]
120
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
121
+ reject_unauthorized: NotRequired[Any]
122
+ common_name_regex: NotRequired[Any]
123
+ min_version: NotRequired[InputHTTPRawMinimumTLSVersion]
124
+ max_version: NotRequired[InputHTTPRawMaximumTLSVersion]
125
+
126
+
127
+ class InputHTTPRawTLSSettingsServerSide(BaseModel):
128
+ disabled: Optional[bool] = True
129
+
130
+ certificate_name: Annotated[
131
+ Optional[str], pydantic.Field(alias="certificateName")
132
+ ] = None
133
+ r"""The name of the predefined certificate"""
134
+
135
+ priv_key_path: Annotated[Optional[str], pydantic.Field(alias="privKeyPath")] = None
136
+ r"""Path on server containing the private key to use. PEM format. Can reference $ENV_VARS."""
137
+
138
+ passphrase: Optional[str] = None
139
+ r"""Passphrase to use to decrypt private key"""
140
+
141
+ cert_path: Annotated[Optional[str], pydantic.Field(alias="certPath")] = None
142
+ r"""Path on server containing certificates to use. PEM format. Can reference $ENV_VARS."""
143
+
144
+ ca_path: Annotated[Optional[str], pydantic.Field(alias="caPath")] = None
145
+ r"""Path on server containing CA certificates to use. PEM format. Can reference $ENV_VARS."""
146
+
147
+ request_cert: Annotated[Optional[bool], pydantic.Field(alias="requestCert")] = False
148
+ r"""Require clients to present their certificates. Used to perform client authentication using SSL certs."""
149
+
150
+ reject_unauthorized: Annotated[
151
+ Optional[Any], pydantic.Field(alias="rejectUnauthorized")
152
+ ] = None
153
+
154
+ common_name_regex: Annotated[
155
+ Optional[Any], pydantic.Field(alias="commonNameRegex")
156
+ ] = None
157
+
158
+ min_version: Annotated[
159
+ Annotated[
160
+ Optional[InputHTTPRawMinimumTLSVersion],
161
+ PlainValidator(validate_open_enum(False)),
162
+ ],
163
+ pydantic.Field(alias="minVersion"),
164
+ ] = None
165
+
166
+ max_version: Annotated[
167
+ Annotated[
168
+ Optional[InputHTTPRawMaximumTLSVersion],
169
+ PlainValidator(validate_open_enum(False)),
170
+ ],
171
+ pydantic.Field(alias="maxVersion"),
172
+ ] = None
173
+
174
+
175
+ class InputHTTPRawMetadatumTypedDict(TypedDict):
176
+ name: str
177
+ value: str
178
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
179
+
180
+
181
+ class InputHTTPRawMetadatum(BaseModel):
182
+ name: str
183
+
184
+ value: str
185
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
186
+
187
+
188
+ class InputHTTPRawAuthTokensExtMetadatumTypedDict(TypedDict):
189
+ name: str
190
+ value: str
191
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
192
+
193
+
194
+ class InputHTTPRawAuthTokensExtMetadatum(BaseModel):
195
+ name: str
196
+
197
+ value: str
198
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
199
+
200
+
201
+ class InputHTTPRawAuthTokensExtTypedDict(TypedDict):
202
+ token: str
203
+ r"""Shared secret to be provided by any client (Authorization: <token>)"""
204
+ description: NotRequired[str]
205
+ metadata: NotRequired[List[InputHTTPRawAuthTokensExtMetadatumTypedDict]]
206
+ r"""Fields to add to events referencing this token"""
207
+
208
+
209
+ class InputHTTPRawAuthTokensExt(BaseModel):
210
+ token: str
211
+ r"""Shared secret to be provided by any client (Authorization: <token>)"""
212
+
213
+ description: Optional[str] = None
214
+
215
+ metadata: Optional[List[InputHTTPRawAuthTokensExtMetadatum]] = None
216
+ r"""Fields to add to events referencing this token"""
217
+
218
+
219
+ class InputHTTPRawTypedDict(TypedDict):
220
+ type: InputHTTPRawType
221
+ port: float
222
+ r"""Port to listen on"""
223
+ id: NotRequired[str]
224
+ r"""Unique ID for this input"""
225
+ disabled: NotRequired[bool]
226
+ pipeline: NotRequired[str]
227
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
228
+ send_to_routes: NotRequired[bool]
229
+ r"""Select whether to send data to Routes, or directly to Destinations."""
230
+ environment: NotRequired[str]
231
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
232
+ pq_enabled: NotRequired[bool]
233
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
234
+ streamtags: NotRequired[List[str]]
235
+ r"""Tags for filtering and grouping in @{product}"""
236
+ connections: NotRequired[List[InputHTTPRawConnectionTypedDict]]
237
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
238
+ pq: NotRequired[InputHTTPRawPqTypedDict]
239
+ host: NotRequired[str]
240
+ r"""Address to bind on. Defaults to 0.0.0.0 (all addresses)."""
241
+ auth_tokens: NotRequired[List[str]]
242
+ r"""Shared secrets to be provided by any client (Authorization: <token>). If empty, unauthorized access is permitted."""
243
+ tls: NotRequired[InputHTTPRawTLSSettingsServerSideTypedDict]
244
+ max_active_req: NotRequired[float]
245
+ r"""Maximum number of active requests allowed per Worker Process. Set to 0 for unlimited. Caution: Increasing the limit above the default value, or setting it to unlimited, may degrade performance and reduce throughput."""
246
+ max_requests_per_socket: NotRequired[int]
247
+ r"""Maximum number of requests per socket before @{product} instructs the client to close the connection. Default is 0 (unlimited)."""
248
+ enable_proxy_header: NotRequired[bool]
249
+ r"""Extract the client IP and port from PROXY protocol v1/v2. When enabled, the X-Forwarded-For header is ignored. Disable to use the X-Forwarded-For header for client IP extraction."""
250
+ capture_headers: NotRequired[bool]
251
+ r"""Add request headers to events, in the __headers field"""
252
+ activity_log_sample_rate: NotRequired[float]
253
+ r"""How often request activity is logged at the `info` level. A value of 1 would log every request, 10 every 10th request, etc."""
254
+ request_timeout: NotRequired[float]
255
+ r"""How long to wait for an incoming request to complete before aborting it. Use 0 to disable."""
256
+ socket_timeout: NotRequired[float]
257
+ r"""How long @{product} should wait before assuming that an inactive socket has timed out. To wait forever, set to 0."""
258
+ keep_alive_timeout: NotRequired[float]
259
+ r"""After the last response is sent, @{product} will wait this long for additional data before closing the socket connection. Minimum 1 second, maximum 600 seconds (10 minutes)."""
260
+ enable_health_check: NotRequired[bool]
261
+ r"""Expose the /cribl_health endpoint, which returns 200 OK when this Source is healthy"""
262
+ ip_allowlist_regex: NotRequired[str]
263
+ r"""Messages from matched IP addresses will be processed, unless also matched by the denylist"""
264
+ ip_denylist_regex: NotRequired[str]
265
+ r"""Messages from matched IP addresses will be ignored. This takes precedence over the allowlist."""
266
+ breaker_rulesets: NotRequired[List[str]]
267
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
268
+ stale_channel_flush_ms: NotRequired[float]
269
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
270
+ metadata: NotRequired[List[InputHTTPRawMetadatumTypedDict]]
271
+ r"""Fields to add to events from this input"""
272
+ allowed_paths: NotRequired[List[str]]
273
+ r"""List of URI paths accepted by this input, wildcards are supported, e.g /api/v*/hook. Defaults to allow all."""
274
+ allowed_methods: NotRequired[List[str]]
275
+ r"""List of HTTP methods accepted by this input. Wildcards are supported (such as P*, GET). Defaults to allow all."""
276
+ auth_tokens_ext: NotRequired[List[InputHTTPRawAuthTokensExtTypedDict]]
277
+ r"""Shared secrets to be provided by any client (Authorization: <token>). If empty, unauthorized access is permitted."""
278
+ description: NotRequired[str]
279
+
280
+
281
+ class InputHTTPRaw(BaseModel):
282
+ type: Annotated[InputHTTPRawType, PlainValidator(validate_open_enum(False))]
283
+
284
+ port: float
285
+ r"""Port to listen on"""
286
+
287
+ id: Optional[str] = None
288
+ r"""Unique ID for this input"""
289
+
290
+ disabled: Optional[bool] = False
291
+
292
+ pipeline: Optional[str] = None
293
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
294
+
295
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
296
+ True
297
+ )
298
+ r"""Select whether to send data to Routes, or directly to Destinations."""
299
+
300
+ environment: Optional[str] = None
301
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
302
+
303
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
304
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
305
+
306
+ streamtags: Optional[List[str]] = None
307
+ r"""Tags for filtering and grouping in @{product}"""
308
+
309
+ connections: Optional[List[InputHTTPRawConnection]] = None
310
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
311
+
312
+ pq: Optional[InputHTTPRawPq] = None
313
+
314
+ host: Optional[str] = "0.0.0.0"
315
+ r"""Address to bind on. Defaults to 0.0.0.0 (all addresses)."""
316
+
317
+ auth_tokens: Annotated[Optional[List[str]], pydantic.Field(alias="authTokens")] = (
318
+ None
319
+ )
320
+ r"""Shared secrets to be provided by any client (Authorization: <token>). If empty, unauthorized access is permitted."""
321
+
322
+ tls: Optional[InputHTTPRawTLSSettingsServerSide] = None
323
+
324
+ max_active_req: Annotated[Optional[float], pydantic.Field(alias="maxActiveReq")] = (
325
+ 256
326
+ )
327
+ r"""Maximum number of active requests allowed per Worker Process. Set to 0 for unlimited. Caution: Increasing the limit above the default value, or setting it to unlimited, may degrade performance and reduce throughput."""
328
+
329
+ max_requests_per_socket: Annotated[
330
+ Optional[int], pydantic.Field(alias="maxRequestsPerSocket")
331
+ ] = 0
332
+ r"""Maximum number of requests per socket before @{product} instructs the client to close the connection. Default is 0 (unlimited)."""
333
+
334
+ enable_proxy_header: Annotated[
335
+ Optional[bool], pydantic.Field(alias="enableProxyHeader")
336
+ ] = False
337
+ r"""Extract the client IP and port from PROXY protocol v1/v2. When enabled, the X-Forwarded-For header is ignored. Disable to use the X-Forwarded-For header for client IP extraction."""
338
+
339
+ capture_headers: Annotated[
340
+ Optional[bool], pydantic.Field(alias="captureHeaders")
341
+ ] = False
342
+ r"""Add request headers to events, in the __headers field"""
343
+
344
+ activity_log_sample_rate: Annotated[
345
+ Optional[float], pydantic.Field(alias="activityLogSampleRate")
346
+ ] = 100
347
+ r"""How often request activity is logged at the `info` level. A value of 1 would log every request, 10 every 10th request, etc."""
348
+
349
+ request_timeout: Annotated[
350
+ Optional[float], pydantic.Field(alias="requestTimeout")
351
+ ] = 0
352
+ r"""How long to wait for an incoming request to complete before aborting it. Use 0 to disable."""
353
+
354
+ socket_timeout: Annotated[
355
+ Optional[float], pydantic.Field(alias="socketTimeout")
356
+ ] = 0
357
+ r"""How long @{product} should wait before assuming that an inactive socket has timed out. To wait forever, set to 0."""
358
+
359
+ keep_alive_timeout: Annotated[
360
+ Optional[float], pydantic.Field(alias="keepAliveTimeout")
361
+ ] = 5
362
+ r"""After the last response is sent, @{product} will wait this long for additional data before closing the socket connection. Minimum 1 second, maximum 600 seconds (10 minutes)."""
363
+
364
+ enable_health_check: Annotated[
365
+ Optional[bool], pydantic.Field(alias="enableHealthCheck")
366
+ ] = False
367
+ r"""Expose the /cribl_health endpoint, which returns 200 OK when this Source is healthy"""
368
+
369
+ ip_allowlist_regex: Annotated[
370
+ Optional[str], pydantic.Field(alias="ipAllowlistRegex")
371
+ ] = "/.*/"
372
+ r"""Messages from matched IP addresses will be processed, unless also matched by the denylist"""
373
+
374
+ ip_denylist_regex: Annotated[
375
+ Optional[str], pydantic.Field(alias="ipDenylistRegex")
376
+ ] = "/^$/"
377
+ r"""Messages from matched IP addresses will be ignored. This takes precedence over the allowlist."""
378
+
379
+ breaker_rulesets: Annotated[
380
+ Optional[List[str]], pydantic.Field(alias="breakerRulesets")
381
+ ] = None
382
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
383
+
384
+ stale_channel_flush_ms: Annotated[
385
+ Optional[float], pydantic.Field(alias="staleChannelFlushMs")
386
+ ] = 10000
387
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
388
+
389
+ metadata: Optional[List[InputHTTPRawMetadatum]] = None
390
+ r"""Fields to add to events from this input"""
391
+
392
+ allowed_paths: Annotated[
393
+ Optional[List[str]], pydantic.Field(alias="allowedPaths")
394
+ ] = None
395
+ r"""List of URI paths accepted by this input, wildcards are supported, e.g /api/v*/hook. Defaults to allow all."""
396
+
397
+ allowed_methods: Annotated[
398
+ Optional[List[str]], pydantic.Field(alias="allowedMethods")
399
+ ] = None
400
+ r"""List of HTTP methods accepted by this input. Wildcards are supported (such as P*, GET). Defaults to allow all."""
401
+
402
+ auth_tokens_ext: Annotated[
403
+ Optional[List[InputHTTPRawAuthTokensExt]], pydantic.Field(alias="authTokensExt")
404
+ ] = None
405
+ r"""Shared secrets to be provided by any client (Authorization: <token>). If empty, unauthorized access is permitted."""
406
+
407
+ description: Optional[str] = None
@@ -0,0 +1,208 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class InputJournalFilesType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ JOURNAL_FILES = "journal_files"
16
+
17
+
18
+ class InputJournalFilesConnectionTypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputJournalFilesConnection(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputJournalFilesMode(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputJournalFilesCompression(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputJournalFilesPqTypedDict(TypedDict):
44
+ mode: NotRequired[InputJournalFilesMode]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputJournalFilesCompression]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputJournalFilesPq(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputJournalFilesMode], PlainValidator(validate_open_enum(False))
63
+ ] = InputJournalFilesMode.ALWAYS
64
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
+
66
+ max_buffer_size: Annotated[
67
+ Optional[float], pydantic.Field(alias="maxBufferSize")
68
+ ] = 1000
69
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
70
+
71
+ commit_frequency: Annotated[
72
+ Optional[float], pydantic.Field(alias="commitFrequency")
73
+ ] = 42
74
+ r"""The number of events to send downstream before committing that Stream has read them"""
75
+
76
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
77
+ "1 MB"
78
+ )
79
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
80
+
81
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
82
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
83
+
84
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
85
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
+
87
+ compress: Annotated[
88
+ Optional[InputJournalFilesCompression],
89
+ PlainValidator(validate_open_enum(False)),
90
+ ] = InputJournalFilesCompression.NONE
91
+ r"""Codec to use to compress the persisted data"""
92
+
93
+
94
+ class InputJournalFilesRuleTypedDict(TypedDict):
95
+ filter_: str
96
+ r"""JavaScript expression applied to Journal objects. Return 'true' to include it."""
97
+ description: NotRequired[str]
98
+ r"""Optional description of this rule's purpose"""
99
+
100
+
101
+ class InputJournalFilesRule(BaseModel):
102
+ filter_: Annotated[str, pydantic.Field(alias="filter")]
103
+ r"""JavaScript expression applied to Journal objects. Return 'true' to include it."""
104
+
105
+ description: Optional[str] = None
106
+ r"""Optional description of this rule's purpose"""
107
+
108
+
109
+ class InputJournalFilesMetadatumTypedDict(TypedDict):
110
+ name: str
111
+ value: str
112
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
113
+
114
+
115
+ class InputJournalFilesMetadatum(BaseModel):
116
+ name: str
117
+
118
+ value: str
119
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
120
+
121
+
122
+ class InputJournalFilesTypedDict(TypedDict):
123
+ type: InputJournalFilesType
124
+ path: str
125
+ r"""Directory path to search for journals. Environment variables will be resolved, e.g. $CRIBL_EDGE_FS_ROOT/var/log/journal/$MACHINE_ID."""
126
+ journals: List[str]
127
+ r"""The full path of discovered journals are matched against this wildcard list."""
128
+ id: NotRequired[str]
129
+ r"""Unique ID for this input"""
130
+ disabled: NotRequired[bool]
131
+ pipeline: NotRequired[str]
132
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
133
+ send_to_routes: NotRequired[bool]
134
+ r"""Select whether to send data to Routes, or directly to Destinations."""
135
+ environment: NotRequired[str]
136
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
137
+ pq_enabled: NotRequired[bool]
138
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
139
+ streamtags: NotRequired[List[str]]
140
+ r"""Tags for filtering and grouping in @{product}"""
141
+ connections: NotRequired[List[InputJournalFilesConnectionTypedDict]]
142
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
143
+ pq: NotRequired[InputJournalFilesPqTypedDict]
144
+ interval: NotRequired[float]
145
+ r"""Time, in seconds, between scanning for journals."""
146
+ rules: NotRequired[List[InputJournalFilesRuleTypedDict]]
147
+ r"""Add rules to decide which journal objects to allow. Events are generated if no rules are given or if all the rules' expressions evaluate to true."""
148
+ current_boot: NotRequired[bool]
149
+ r"""Skip log messages that are not part of the current boot session."""
150
+ max_age_dur: NotRequired[str]
151
+ r"""The maximum log message age, in duration form (e.g,: 60s, 4h, 3d, 1w). Default of no value will apply no max age filters."""
152
+ metadata: NotRequired[List[InputJournalFilesMetadatumTypedDict]]
153
+ r"""Fields to add to events from this input"""
154
+ description: NotRequired[str]
155
+
156
+
157
+ class InputJournalFiles(BaseModel):
158
+ type: Annotated[InputJournalFilesType, PlainValidator(validate_open_enum(False))]
159
+
160
+ path: str
161
+ r"""Directory path to search for journals. Environment variables will be resolved, e.g. $CRIBL_EDGE_FS_ROOT/var/log/journal/$MACHINE_ID."""
162
+
163
+ journals: List[str]
164
+ r"""The full path of discovered journals are matched against this wildcard list."""
165
+
166
+ id: Optional[str] = None
167
+ r"""Unique ID for this input"""
168
+
169
+ disabled: Optional[bool] = False
170
+
171
+ pipeline: Optional[str] = None
172
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
173
+
174
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
175
+ True
176
+ )
177
+ r"""Select whether to send data to Routes, or directly to Destinations."""
178
+
179
+ environment: Optional[str] = None
180
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
181
+
182
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
183
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
184
+
185
+ streamtags: Optional[List[str]] = None
186
+ r"""Tags for filtering and grouping in @{product}"""
187
+
188
+ connections: Optional[List[InputJournalFilesConnection]] = None
189
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
190
+
191
+ pq: Optional[InputJournalFilesPq] = None
192
+
193
+ interval: Optional[float] = 10
194
+ r"""Time, in seconds, between scanning for journals."""
195
+
196
+ rules: Optional[List[InputJournalFilesRule]] = None
197
+ r"""Add rules to decide which journal objects to allow. Events are generated if no rules are given or if all the rules' expressions evaluate to true."""
198
+
199
+ current_boot: Annotated[Optional[bool], pydantic.Field(alias="currentBoot")] = False
200
+ r"""Skip log messages that are not part of the current boot session."""
201
+
202
+ max_age_dur: Annotated[Optional[str], pydantic.Field(alias="maxAgeDur")] = None
203
+ r"""The maximum log message age, in duration form (e.g,: 60s, 4h, 3d, 1w). Default of no value will apply no max age filters."""
204
+
205
+ metadata: Optional[List[InputJournalFilesMetadatum]] = None
206
+ r"""Fields to add to events from this input"""
207
+
208
+ description: Optional[str] = None