cribl-control-plane 0.0.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (197) hide show
  1. cribl_control_plane/__init__.py +17 -0
  2. cribl_control_plane/_hooks/__init__.py +5 -0
  3. cribl_control_plane/_hooks/clientcredentials.py +211 -0
  4. cribl_control_plane/_hooks/registration.py +13 -0
  5. cribl_control_plane/_hooks/sdkhooks.py +81 -0
  6. cribl_control_plane/_hooks/types.py +112 -0
  7. cribl_control_plane/_version.py +15 -0
  8. cribl_control_plane/auth_sdk.py +184 -0
  9. cribl_control_plane/basesdk.py +358 -0
  10. cribl_control_plane/errors/__init__.py +60 -0
  11. cribl_control_plane/errors/apierror.py +38 -0
  12. cribl_control_plane/errors/criblcontrolplaneerror.py +26 -0
  13. cribl_control_plane/errors/error.py +24 -0
  14. cribl_control_plane/errors/healthstatus_error.py +38 -0
  15. cribl_control_plane/errors/no_response_error.py +13 -0
  16. cribl_control_plane/errors/responsevalidationerror.py +25 -0
  17. cribl_control_plane/health.py +166 -0
  18. cribl_control_plane/httpclient.py +126 -0
  19. cribl_control_plane/models/__init__.py +7305 -0
  20. cribl_control_plane/models/addhectokenrequest.py +34 -0
  21. cribl_control_plane/models/authtoken.py +13 -0
  22. cribl_control_plane/models/createinputhectokenbyidop.py +45 -0
  23. cribl_control_plane/models/createinputop.py +24 -0
  24. cribl_control_plane/models/createoutputop.py +24 -0
  25. cribl_control_plane/models/createoutputtestbyidop.py +46 -0
  26. cribl_control_plane/models/criblevent.py +14 -0
  27. cribl_control_plane/models/deleteinputbyidop.py +37 -0
  28. cribl_control_plane/models/deleteoutputbyidop.py +37 -0
  29. cribl_control_plane/models/deleteoutputpqbyidop.py +36 -0
  30. cribl_control_plane/models/getinputbyidop.py +37 -0
  31. cribl_control_plane/models/getoutputbyidop.py +37 -0
  32. cribl_control_plane/models/getoutputpqbyidop.py +36 -0
  33. cribl_control_plane/models/getoutputsamplesbyidop.py +37 -0
  34. cribl_control_plane/models/healthstatus.py +36 -0
  35. cribl_control_plane/models/input.py +199 -0
  36. cribl_control_plane/models/inputappscope.py +448 -0
  37. cribl_control_plane/models/inputazureblob.py +308 -0
  38. cribl_control_plane/models/inputcollection.py +208 -0
  39. cribl_control_plane/models/inputconfluentcloud.py +585 -0
  40. cribl_control_plane/models/inputcribl.py +165 -0
  41. cribl_control_plane/models/inputcriblhttp.py +341 -0
  42. cribl_control_plane/models/inputcribllakehttp.py +342 -0
  43. cribl_control_plane/models/inputcriblmetrics.py +175 -0
  44. cribl_control_plane/models/inputcribltcp.py +299 -0
  45. cribl_control_plane/models/inputcrowdstrike.py +410 -0
  46. cribl_control_plane/models/inputdatadogagent.py +364 -0
  47. cribl_control_plane/models/inputdatagen.py +180 -0
  48. cribl_control_plane/models/inputedgeprometheus.py +551 -0
  49. cribl_control_plane/models/inputelastic.py +494 -0
  50. cribl_control_plane/models/inputeventhub.py +360 -0
  51. cribl_control_plane/models/inputexec.py +213 -0
  52. cribl_control_plane/models/inputfile.py +259 -0
  53. cribl_control_plane/models/inputfirehose.py +341 -0
  54. cribl_control_plane/models/inputgooglepubsub.py +247 -0
  55. cribl_control_plane/models/inputgrafana_union.py +1247 -0
  56. cribl_control_plane/models/inputhttp.py +403 -0
  57. cribl_control_plane/models/inputhttpraw.py +407 -0
  58. cribl_control_plane/models/inputjournalfiles.py +208 -0
  59. cribl_control_plane/models/inputkafka.py +581 -0
  60. cribl_control_plane/models/inputkinesis.py +363 -0
  61. cribl_control_plane/models/inputkubeevents.py +182 -0
  62. cribl_control_plane/models/inputkubelogs.py +256 -0
  63. cribl_control_plane/models/inputkubemetrics.py +233 -0
  64. cribl_control_plane/models/inputloki.py +468 -0
  65. cribl_control_plane/models/inputmetrics.py +290 -0
  66. cribl_control_plane/models/inputmodeldriventelemetry.py +274 -0
  67. cribl_control_plane/models/inputmsk.py +654 -0
  68. cribl_control_plane/models/inputnetflow.py +224 -0
  69. cribl_control_plane/models/inputoffice365mgmt.py +384 -0
  70. cribl_control_plane/models/inputoffice365msgtrace.py +449 -0
  71. cribl_control_plane/models/inputoffice365service.py +377 -0
  72. cribl_control_plane/models/inputopentelemetry.py +516 -0
  73. cribl_control_plane/models/inputprometheus.py +464 -0
  74. cribl_control_plane/models/inputprometheusrw.py +470 -0
  75. cribl_control_plane/models/inputrawudp.py +207 -0
  76. cribl_control_plane/models/inputs3.py +416 -0
  77. cribl_control_plane/models/inputs3inventory.py +440 -0
  78. cribl_control_plane/models/inputsecuritylake.py +425 -0
  79. cribl_control_plane/models/inputsnmp.py +274 -0
  80. cribl_control_plane/models/inputsplunk.py +387 -0
  81. cribl_control_plane/models/inputsplunkhec.py +478 -0
  82. cribl_control_plane/models/inputsplunksearch.py +537 -0
  83. cribl_control_plane/models/inputsqs.py +320 -0
  84. cribl_control_plane/models/inputsyslog_union.py +759 -0
  85. cribl_control_plane/models/inputsystemmetrics.py +533 -0
  86. cribl_control_plane/models/inputsystemstate.py +417 -0
  87. cribl_control_plane/models/inputtcp.py +359 -0
  88. cribl_control_plane/models/inputtcpjson.py +334 -0
  89. cribl_control_plane/models/inputwef.py +498 -0
  90. cribl_control_plane/models/inputwindowsmetrics.py +457 -0
  91. cribl_control_plane/models/inputwineventlogs.py +222 -0
  92. cribl_control_plane/models/inputwiz.py +334 -0
  93. cribl_control_plane/models/inputzscalerhec.py +439 -0
  94. cribl_control_plane/models/listinputop.py +24 -0
  95. cribl_control_plane/models/listoutputop.py +24 -0
  96. cribl_control_plane/models/logininfo.py +16 -0
  97. cribl_control_plane/models/output.py +229 -0
  98. cribl_control_plane/models/outputazureblob.py +471 -0
  99. cribl_control_plane/models/outputazuredataexplorer.py +660 -0
  100. cribl_control_plane/models/outputazureeventhub.py +321 -0
  101. cribl_control_plane/models/outputazurelogs.py +386 -0
  102. cribl_control_plane/models/outputclickhouse.py +650 -0
  103. cribl_control_plane/models/outputcloudwatch.py +273 -0
  104. cribl_control_plane/models/outputconfluentcloud.py +591 -0
  105. cribl_control_plane/models/outputcriblhttp.py +494 -0
  106. cribl_control_plane/models/outputcribllake.py +396 -0
  107. cribl_control_plane/models/outputcribltcp.py +387 -0
  108. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +410 -0
  109. cribl_control_plane/models/outputdatadog.py +472 -0
  110. cribl_control_plane/models/outputdataset.py +437 -0
  111. cribl_control_plane/models/outputdefault.py +55 -0
  112. cribl_control_plane/models/outputdevnull.py +50 -0
  113. cribl_control_plane/models/outputdiskspool.py +89 -0
  114. cribl_control_plane/models/outputdls3.py +560 -0
  115. cribl_control_plane/models/outputdynatracehttp.py +454 -0
  116. cribl_control_plane/models/outputdynatraceotlp.py +486 -0
  117. cribl_control_plane/models/outputelastic.py +494 -0
  118. cribl_control_plane/models/outputelasticcloud.py +407 -0
  119. cribl_control_plane/models/outputexabeam.py +297 -0
  120. cribl_control_plane/models/outputfilesystem.py +357 -0
  121. cribl_control_plane/models/outputgooglechronicle.py +486 -0
  122. cribl_control_plane/models/outputgooglecloudlogging.py +557 -0
  123. cribl_control_plane/models/outputgooglecloudstorage.py +499 -0
  124. cribl_control_plane/models/outputgooglepubsub.py +274 -0
  125. cribl_control_plane/models/outputgrafanacloud_union.py +1024 -0
  126. cribl_control_plane/models/outputgraphite.py +225 -0
  127. cribl_control_plane/models/outputhoneycomb.py +369 -0
  128. cribl_control_plane/models/outputhumiohec.py +389 -0
  129. cribl_control_plane/models/outputinfluxdb.py +523 -0
  130. cribl_control_plane/models/outputkafka.py +581 -0
  131. cribl_control_plane/models/outputkinesis.py +312 -0
  132. cribl_control_plane/models/outputloki.py +425 -0
  133. cribl_control_plane/models/outputminio.py +512 -0
  134. cribl_control_plane/models/outputmsk.py +654 -0
  135. cribl_control_plane/models/outputnetflow.py +80 -0
  136. cribl_control_plane/models/outputnewrelic.py +424 -0
  137. cribl_control_plane/models/outputnewrelicevents.py +401 -0
  138. cribl_control_plane/models/outputopentelemetry.py +669 -0
  139. cribl_control_plane/models/outputprometheus.py +485 -0
  140. cribl_control_plane/models/outputring.py +121 -0
  141. cribl_control_plane/models/outputrouter.py +83 -0
  142. cribl_control_plane/models/outputs3.py +556 -0
  143. cribl_control_plane/models/outputsamplesresponse.py +14 -0
  144. cribl_control_plane/models/outputsecuritylake.py +505 -0
  145. cribl_control_plane/models/outputsentinel.py +488 -0
  146. cribl_control_plane/models/outputsentineloneaisiem.py +505 -0
  147. cribl_control_plane/models/outputservicenow.py +543 -0
  148. cribl_control_plane/models/outputsignalfx.py +369 -0
  149. cribl_control_plane/models/outputsnmp.py +80 -0
  150. cribl_control_plane/models/outputsns.py +274 -0
  151. cribl_control_plane/models/outputsplunk.py +383 -0
  152. cribl_control_plane/models/outputsplunkhec.py +434 -0
  153. cribl_control_plane/models/outputsplunklb.py +558 -0
  154. cribl_control_plane/models/outputsqs.py +328 -0
  155. cribl_control_plane/models/outputstatsd.py +224 -0
  156. cribl_control_plane/models/outputstatsdext.py +225 -0
  157. cribl_control_plane/models/outputsumologic.py +378 -0
  158. cribl_control_plane/models/outputsyslog.py +415 -0
  159. cribl_control_plane/models/outputtcpjson.py +413 -0
  160. cribl_control_plane/models/outputtestrequest.py +15 -0
  161. cribl_control_plane/models/outputtestresponse.py +29 -0
  162. cribl_control_plane/models/outputwavefront.py +369 -0
  163. cribl_control_plane/models/outputwebhook.py +689 -0
  164. cribl_control_plane/models/outputxsiam.py +415 -0
  165. cribl_control_plane/models/schemeclientoauth.py +24 -0
  166. cribl_control_plane/models/security.py +36 -0
  167. cribl_control_plane/models/updatehectokenrequest.py +31 -0
  168. cribl_control_plane/models/updateinputbyidop.py +44 -0
  169. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +52 -0
  170. cribl_control_plane/models/updateoutputbyidop.py +44 -0
  171. cribl_control_plane/outputs.py +1615 -0
  172. cribl_control_plane/py.typed +1 -0
  173. cribl_control_plane/sdk.py +164 -0
  174. cribl_control_plane/sdkconfiguration.py +36 -0
  175. cribl_control_plane/sources.py +1355 -0
  176. cribl_control_plane/types/__init__.py +21 -0
  177. cribl_control_plane/types/basemodel.py +39 -0
  178. cribl_control_plane/utils/__init__.py +187 -0
  179. cribl_control_plane/utils/annotations.py +55 -0
  180. cribl_control_plane/utils/datetimes.py +23 -0
  181. cribl_control_plane/utils/enums.py +74 -0
  182. cribl_control_plane/utils/eventstreaming.py +238 -0
  183. cribl_control_plane/utils/forms.py +223 -0
  184. cribl_control_plane/utils/headers.py +136 -0
  185. cribl_control_plane/utils/logger.py +27 -0
  186. cribl_control_plane/utils/metadata.py +118 -0
  187. cribl_control_plane/utils/queryparams.py +205 -0
  188. cribl_control_plane/utils/requestbodies.py +66 -0
  189. cribl_control_plane/utils/retries.py +217 -0
  190. cribl_control_plane/utils/security.py +207 -0
  191. cribl_control_plane/utils/serializers.py +249 -0
  192. cribl_control_plane/utils/unmarshal_json_response.py +24 -0
  193. cribl_control_plane/utils/url.py +155 -0
  194. cribl_control_plane/utils/values.py +137 -0
  195. cribl_control_plane-0.0.13.dist-info/METADATA +489 -0
  196. cribl_control_plane-0.0.13.dist-info/RECORD +197 -0
  197. cribl_control_plane-0.0.13.dist-info/WHEEL +4 -0
@@ -0,0 +1,308 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class InputAzureBlobType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ AZURE_BLOB = "azure_blob"
16
+
17
+
18
+ class InputAzureBlobConnectionTypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputAzureBlobConnection(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputAzureBlobMode(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputAzureBlobCompression(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputAzureBlobPqTypedDict(TypedDict):
44
+ mode: NotRequired[InputAzureBlobMode]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputAzureBlobCompression]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputAzureBlobPq(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputAzureBlobMode], PlainValidator(validate_open_enum(False))
63
+ ] = InputAzureBlobMode.ALWAYS
64
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
+
66
+ max_buffer_size: Annotated[
67
+ Optional[float], pydantic.Field(alias="maxBufferSize")
68
+ ] = 1000
69
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
70
+
71
+ commit_frequency: Annotated[
72
+ Optional[float], pydantic.Field(alias="commitFrequency")
73
+ ] = 42
74
+ r"""The number of events to send downstream before committing that Stream has read them"""
75
+
76
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
77
+ "1 MB"
78
+ )
79
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
80
+
81
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
82
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
83
+
84
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
85
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
+
87
+ compress: Annotated[
88
+ Optional[InputAzureBlobCompression], PlainValidator(validate_open_enum(False))
89
+ ] = InputAzureBlobCompression.NONE
90
+ r"""Codec to use to compress the persisted data"""
91
+
92
+
93
+ class InputAzureBlobMetadatumTypedDict(TypedDict):
94
+ name: str
95
+ value: str
96
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
97
+
98
+
99
+ class InputAzureBlobMetadatum(BaseModel):
100
+ name: str
101
+
102
+ value: str
103
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
104
+
105
+
106
+ class InputAzureBlobAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
107
+ MANUAL = "manual"
108
+ SECRET = "secret"
109
+ CLIENT_SECRET = "clientSecret"
110
+ CLIENT_CERT = "clientCert"
111
+
112
+
113
+ class InputAzureBlobCertificateTypedDict(TypedDict):
114
+ certificate_name: str
115
+ r"""The certificate you registered as credentials for your app in the Azure portal"""
116
+
117
+
118
+ class InputAzureBlobCertificate(BaseModel):
119
+ certificate_name: Annotated[str, pydantic.Field(alias="certificateName")]
120
+ r"""The certificate you registered as credentials for your app in the Azure portal"""
121
+
122
+
123
+ class InputAzureBlobTypedDict(TypedDict):
124
+ type: InputAzureBlobType
125
+ queue_name: str
126
+ r"""The storage account queue name blob notifications will be read from. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myQueue-${C.vars.myVar}`"""
127
+ id: NotRequired[str]
128
+ r"""Unique ID for this input"""
129
+ disabled: NotRequired[bool]
130
+ pipeline: NotRequired[str]
131
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
132
+ send_to_routes: NotRequired[bool]
133
+ r"""Select whether to send data to Routes, or directly to Destinations."""
134
+ environment: NotRequired[str]
135
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
136
+ pq_enabled: NotRequired[bool]
137
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
138
+ streamtags: NotRequired[List[str]]
139
+ r"""Tags for filtering and grouping in @{product}"""
140
+ connections: NotRequired[List[InputAzureBlobConnectionTypedDict]]
141
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
142
+ pq: NotRequired[InputAzureBlobPqTypedDict]
143
+ file_filter: NotRequired[str]
144
+ r"""Regex matching file names to download and process. Defaults to: .*"""
145
+ visibility_timeout: NotRequired[float]
146
+ r"""The duration (in seconds) that the received messages are hidden from subsequent retrieve requests after being retrieved by a ReceiveMessage request."""
147
+ num_receivers: NotRequired[float]
148
+ r"""How many receiver processes to run. The higher the number, the better the throughput - at the expense of CPU overhead."""
149
+ max_messages: NotRequired[float]
150
+ r"""The maximum number of messages to return in a poll request. Azure storage queues never returns more messages than this value (however, fewer messages might be returned). Valid values: 1 to 32."""
151
+ service_period_secs: NotRequired[float]
152
+ r"""The duration (in seconds) which pollers should be validated and restarted if exited"""
153
+ skip_on_error: NotRequired[bool]
154
+ r"""Skip files that trigger a processing error. Disabled by default, which allows retries after processing errors."""
155
+ metadata: NotRequired[List[InputAzureBlobMetadatumTypedDict]]
156
+ r"""Fields to add to events from this input"""
157
+ breaker_rulesets: NotRequired[List[str]]
158
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
159
+ stale_channel_flush_ms: NotRequired[float]
160
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
161
+ parquet_chunk_size_mb: NotRequired[float]
162
+ r"""Maximum file size for each Parquet chunk"""
163
+ parquet_chunk_download_timeout: NotRequired[float]
164
+ r"""The maximum time allowed for downloading a Parquet chunk. Processing will stop if a chunk cannot be downloaded within the time specified."""
165
+ auth_type: NotRequired[InputAzureBlobAuthenticationMethod]
166
+ description: NotRequired[str]
167
+ connection_string: NotRequired[str]
168
+ r"""Enter your Azure Storage account connection string. If left blank, Stream will fall back to env.AZURE_STORAGE_CONNECTION_STRING."""
169
+ text_secret: NotRequired[str]
170
+ r"""Select or create a stored text secret"""
171
+ storage_account_name: NotRequired[str]
172
+ r"""The name of your Azure storage account"""
173
+ tenant_id: NotRequired[str]
174
+ r"""The service principal's tenant ID"""
175
+ client_id: NotRequired[str]
176
+ r"""The service principal's client ID"""
177
+ azure_cloud: NotRequired[str]
178
+ r"""The Azure cloud to use. Defaults to Azure Public Cloud."""
179
+ endpoint_suffix: NotRequired[str]
180
+ r"""Endpoint suffix for the service URL. Takes precedence over the Azure Cloud setting. Defaults to core.windows.net."""
181
+ client_text_secret: NotRequired[str]
182
+ r"""Select or create a stored text secret"""
183
+ certificate: NotRequired[InputAzureBlobCertificateTypedDict]
184
+
185
+
186
+ class InputAzureBlob(BaseModel):
187
+ type: Annotated[InputAzureBlobType, PlainValidator(validate_open_enum(False))]
188
+
189
+ queue_name: Annotated[str, pydantic.Field(alias="queueName")]
190
+ r"""The storage account queue name blob notifications will be read from. Value must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at initialization time. Example referencing a Global Variable: `myQueue-${C.vars.myVar}`"""
191
+
192
+ id: Optional[str] = None
193
+ r"""Unique ID for this input"""
194
+
195
+ disabled: Optional[bool] = False
196
+
197
+ pipeline: Optional[str] = None
198
+ r"""Pipeline to process data from this Source before sending it through the Routes"""
199
+
200
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
201
+ True
202
+ )
203
+ r"""Select whether to send data to Routes, or directly to Destinations."""
204
+
205
+ environment: Optional[str] = None
206
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
207
+
208
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
209
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
210
+
211
+ streamtags: Optional[List[str]] = None
212
+ r"""Tags for filtering and grouping in @{product}"""
213
+
214
+ connections: Optional[List[InputAzureBlobConnection]] = None
215
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
216
+
217
+ pq: Optional[InputAzureBlobPq] = None
218
+
219
+ file_filter: Annotated[Optional[str], pydantic.Field(alias="fileFilter")] = "/.*/"
220
+ r"""Regex matching file names to download and process. Defaults to: .*"""
221
+
222
+ visibility_timeout: Annotated[
223
+ Optional[float], pydantic.Field(alias="visibilityTimeout")
224
+ ] = 600
225
+ r"""The duration (in seconds) that the received messages are hidden from subsequent retrieve requests after being retrieved by a ReceiveMessage request."""
226
+
227
+ num_receivers: Annotated[Optional[float], pydantic.Field(alias="numReceivers")] = 1
228
+ r"""How many receiver processes to run. The higher the number, the better the throughput - at the expense of CPU overhead."""
229
+
230
+ max_messages: Annotated[Optional[float], pydantic.Field(alias="maxMessages")] = 1
231
+ r"""The maximum number of messages to return in a poll request. Azure storage queues never returns more messages than this value (however, fewer messages might be returned). Valid values: 1 to 32."""
232
+
233
+ service_period_secs: Annotated[
234
+ Optional[float], pydantic.Field(alias="servicePeriodSecs")
235
+ ] = 5
236
+ r"""The duration (in seconds) which pollers should be validated and restarted if exited"""
237
+
238
+ skip_on_error: Annotated[Optional[bool], pydantic.Field(alias="skipOnError")] = (
239
+ False
240
+ )
241
+ r"""Skip files that trigger a processing error. Disabled by default, which allows retries after processing errors."""
242
+
243
+ metadata: Optional[List[InputAzureBlobMetadatum]] = None
244
+ r"""Fields to add to events from this input"""
245
+
246
+ breaker_rulesets: Annotated[
247
+ Optional[List[str]], pydantic.Field(alias="breakerRulesets")
248
+ ] = None
249
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
250
+
251
+ stale_channel_flush_ms: Annotated[
252
+ Optional[float], pydantic.Field(alias="staleChannelFlushMs")
253
+ ] = 10000
254
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
255
+
256
+ parquet_chunk_size_mb: Annotated[
257
+ Optional[float], pydantic.Field(alias="parquetChunkSizeMB")
258
+ ] = 5
259
+ r"""Maximum file size for each Parquet chunk"""
260
+
261
+ parquet_chunk_download_timeout: Annotated[
262
+ Optional[float], pydantic.Field(alias="parquetChunkDownloadTimeout")
263
+ ] = 600
264
+ r"""The maximum time allowed for downloading a Parquet chunk. Processing will stop if a chunk cannot be downloaded within the time specified."""
265
+
266
+ auth_type: Annotated[
267
+ Annotated[
268
+ Optional[InputAzureBlobAuthenticationMethod],
269
+ PlainValidator(validate_open_enum(False)),
270
+ ],
271
+ pydantic.Field(alias="authType"),
272
+ ] = InputAzureBlobAuthenticationMethod.MANUAL
273
+
274
+ description: Optional[str] = None
275
+
276
+ connection_string: Annotated[
277
+ Optional[str], pydantic.Field(alias="connectionString")
278
+ ] = None
279
+ r"""Enter your Azure Storage account connection string. If left blank, Stream will fall back to env.AZURE_STORAGE_CONNECTION_STRING."""
280
+
281
+ text_secret: Annotated[Optional[str], pydantic.Field(alias="textSecret")] = None
282
+ r"""Select or create a stored text secret"""
283
+
284
+ storage_account_name: Annotated[
285
+ Optional[str], pydantic.Field(alias="storageAccountName")
286
+ ] = None
287
+ r"""The name of your Azure storage account"""
288
+
289
+ tenant_id: Annotated[Optional[str], pydantic.Field(alias="tenantId")] = None
290
+ r"""The service principal's tenant ID"""
291
+
292
+ client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
293
+ r"""The service principal's client ID"""
294
+
295
+ azure_cloud: Annotated[Optional[str], pydantic.Field(alias="azureCloud")] = None
296
+ r"""The Azure cloud to use. Defaults to Azure Public Cloud."""
297
+
298
+ endpoint_suffix: Annotated[
299
+ Optional[str], pydantic.Field(alias="endpointSuffix")
300
+ ] = None
301
+ r"""Endpoint suffix for the service URL. Takes precedence over the Azure Cloud setting. Defaults to core.windows.net."""
302
+
303
+ client_text_secret: Annotated[
304
+ Optional[str], pydantic.Field(alias="clientTextSecret")
305
+ ] = None
306
+ r"""Select or create a stored text secret"""
307
+
308
+ certificate: Optional[InputAzureBlobCertificate] = None
@@ -0,0 +1,208 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class InputCollectionType(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ COLLECTION = "collection"
16
+
17
+
18
+ class InputCollectionConnectionTypedDict(TypedDict):
19
+ output: str
20
+ pipeline: NotRequired[str]
21
+
22
+
23
+ class InputCollectionConnection(BaseModel):
24
+ output: str
25
+
26
+ pipeline: Optional[str] = None
27
+
28
+
29
+ class InputCollectionMode(str, Enum, metaclass=utils.OpenEnumMeta):
30
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
31
+
32
+ SMART = "smart"
33
+ ALWAYS = "always"
34
+
35
+
36
+ class InputCollectionCompression(str, Enum, metaclass=utils.OpenEnumMeta):
37
+ r"""Codec to use to compress the persisted data"""
38
+
39
+ NONE = "none"
40
+ GZIP = "gzip"
41
+
42
+
43
+ class InputCollectionPqTypedDict(TypedDict):
44
+ mode: NotRequired[InputCollectionMode]
45
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
46
+ max_buffer_size: NotRequired[float]
47
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
48
+ commit_frequency: NotRequired[float]
49
+ r"""The number of events to send downstream before committing that Stream has read them"""
50
+ max_file_size: NotRequired[str]
51
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
52
+ max_size: NotRequired[str]
53
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
54
+ path: NotRequired[str]
55
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
56
+ compress: NotRequired[InputCollectionCompression]
57
+ r"""Codec to use to compress the persisted data"""
58
+
59
+
60
+ class InputCollectionPq(BaseModel):
61
+ mode: Annotated[
62
+ Optional[InputCollectionMode], PlainValidator(validate_open_enum(False))
63
+ ] = InputCollectionMode.ALWAYS
64
+ r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
65
+
66
+ max_buffer_size: Annotated[
67
+ Optional[float], pydantic.Field(alias="maxBufferSize")
68
+ ] = 1000
69
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
70
+
71
+ commit_frequency: Annotated[
72
+ Optional[float], pydantic.Field(alias="commitFrequency")
73
+ ] = 42
74
+ r"""The number of events to send downstream before committing that Stream has read them"""
75
+
76
+ max_file_size: Annotated[Optional[str], pydantic.Field(alias="maxFileSize")] = (
77
+ "1 MB"
78
+ )
79
+ r"""The maximum size to store in each queue file before closing and optionally compressing. Enter a numeral with units of KB, MB, etc."""
80
+
81
+ max_size: Annotated[Optional[str], pydantic.Field(alias="maxSize")] = "5GB"
82
+ r"""The maximum disk space that the queue can consume (as an average per Worker Process) before queueing stops. Enter a numeral with units of KB, MB, etc."""
83
+
84
+ path: Optional[str] = "$CRIBL_HOME/state/queues"
85
+ r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
86
+
87
+ compress: Annotated[
88
+ Optional[InputCollectionCompression], PlainValidator(validate_open_enum(False))
89
+ ] = InputCollectionCompression.NONE
90
+ r"""Codec to use to compress the persisted data"""
91
+
92
+
93
+ class InputCollectionPreprocessTypedDict(TypedDict):
94
+ disabled: NotRequired[bool]
95
+ command: NotRequired[str]
96
+ r"""Command to feed the data through (via stdin) and process its output (stdout)"""
97
+ args: NotRequired[List[str]]
98
+ r"""Arguments to be added to the custom command"""
99
+
100
+
101
+ class InputCollectionPreprocess(BaseModel):
102
+ disabled: Optional[bool] = True
103
+
104
+ command: Optional[str] = None
105
+ r"""Command to feed the data through (via stdin) and process its output (stdout)"""
106
+
107
+ args: Optional[List[str]] = None
108
+ r"""Arguments to be added to the custom command"""
109
+
110
+
111
+ class InputCollectionMetadatumTypedDict(TypedDict):
112
+ name: str
113
+ value: str
114
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
115
+
116
+
117
+ class InputCollectionMetadatum(BaseModel):
118
+ name: str
119
+
120
+ value: str
121
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
122
+
123
+
124
+ class InputCollectionTypedDict(TypedDict):
125
+ id: str
126
+ r"""Unique ID for this input"""
127
+ type: NotRequired[InputCollectionType]
128
+ disabled: NotRequired[bool]
129
+ pipeline: NotRequired[str]
130
+ r"""Pipeline to process results"""
131
+ send_to_routes: NotRequired[bool]
132
+ r"""Send events to normal routing and event processing. Disable to select a specific Pipeline/Destination combination."""
133
+ environment: NotRequired[str]
134
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
135
+ pq_enabled: NotRequired[bool]
136
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
137
+ streamtags: NotRequired[List[str]]
138
+ r"""Tags for filtering and grouping in @{product}"""
139
+ connections: NotRequired[List[InputCollectionConnectionTypedDict]]
140
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
141
+ pq: NotRequired[InputCollectionPqTypedDict]
142
+ breaker_rulesets: NotRequired[List[str]]
143
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
144
+ stale_channel_flush_ms: NotRequired[float]
145
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
146
+ preprocess: NotRequired[InputCollectionPreprocessTypedDict]
147
+ throttle_rate_per_sec: NotRequired[str]
148
+ r"""Rate (in bytes per second) to throttle while writing to an output. Accepts values with multiple-byte units, such as KB, MB, and GB. (Example: 42 MB) Default value of 0 specifies no throttling."""
149
+ metadata: NotRequired[List[InputCollectionMetadatumTypedDict]]
150
+ r"""Fields to add to events from this input"""
151
+ output: NotRequired[str]
152
+ r"""Destination to send results to"""
153
+
154
+
155
+ class InputCollection(BaseModel):
156
+ id: str
157
+ r"""Unique ID for this input"""
158
+
159
+ type: Annotated[
160
+ Optional[InputCollectionType], PlainValidator(validate_open_enum(False))
161
+ ] = InputCollectionType.COLLECTION
162
+
163
+ disabled: Optional[bool] = False
164
+
165
+ pipeline: Optional[str] = None
166
+ r"""Pipeline to process results"""
167
+
168
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
169
+ True
170
+ )
171
+ r"""Send events to normal routing and event processing. Disable to select a specific Pipeline/Destination combination."""
172
+
173
+ environment: Optional[str] = None
174
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
175
+
176
+ pq_enabled: Annotated[Optional[bool], pydantic.Field(alias="pqEnabled")] = False
177
+ r"""Use a disk queue to minimize data loss when connected services block. See [Cribl Docs](https://docs.cribl.io/stream/persistent-queues) for PQ defaults (Cribl-managed Cloud Workers) and configuration options (on-prem and hybrid Workers)."""
178
+
179
+ streamtags: Optional[List[str]] = None
180
+ r"""Tags for filtering and grouping in @{product}"""
181
+
182
+ connections: Optional[List[InputCollectionConnection]] = None
183
+ r"""Direct connections to Destinations, and optionally via a Pipeline or a Pack"""
184
+
185
+ pq: Optional[InputCollectionPq] = None
186
+
187
+ breaker_rulesets: Annotated[
188
+ Optional[List[str]], pydantic.Field(alias="breakerRulesets")
189
+ ] = None
190
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
191
+
192
+ stale_channel_flush_ms: Annotated[
193
+ Optional[float], pydantic.Field(alias="staleChannelFlushMs")
194
+ ] = 10000
195
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
196
+
197
+ preprocess: Optional[InputCollectionPreprocess] = None
198
+
199
+ throttle_rate_per_sec: Annotated[
200
+ Optional[str], pydantic.Field(alias="throttleRatePerSec")
201
+ ] = "0"
202
+ r"""Rate (in bytes per second) to throttle while writing to an output. Accepts values with multiple-byte units, such as KB, MB, and GB. (Example: 42 MB) Default value of 0 specifies no throttling."""
203
+
204
+ metadata: Optional[List[InputCollectionMetadatum]] = None
205
+ r"""Fields to add to events from this input"""
206
+
207
+ output: Optional[str] = None
208
+ r"""Destination to send results to"""