cribl-control-plane 0.1.0b2__py3-none-any.whl → 0.2.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (38) hide show
  1. cribl_control_plane/_hooks/clientcredentials.py +91 -41
  2. cribl_control_plane/_version.py +4 -4
  3. cribl_control_plane/errors/apierror.py +1 -1
  4. cribl_control_plane/errors/criblcontrolplaneerror.py +1 -1
  5. cribl_control_plane/errors/error.py +1 -1
  6. cribl_control_plane/errors/healthstatus_error.py +1 -1
  7. cribl_control_plane/errors/no_response_error.py +1 -1
  8. cribl_control_plane/errors/responsevalidationerror.py +1 -1
  9. cribl_control_plane/groups_sdk.py +4 -4
  10. cribl_control_plane/httpclient.py +0 -1
  11. cribl_control_plane/lakedatasets.py +12 -12
  12. cribl_control_plane/models/__init__.py +106 -42
  13. cribl_control_plane/models/appmode.py +14 -0
  14. cribl_control_plane/models/configgroup.py +2 -17
  15. cribl_control_plane/models/cribllakedatasetupdate.py +81 -0
  16. cribl_control_plane/models/gitinfo.py +14 -3
  17. cribl_control_plane/models/hbcriblinfo.py +3 -14
  18. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  19. cribl_control_plane/models/inputconfluentcloud.py +18 -0
  20. cribl_control_plane/models/inputkafka.py +17 -0
  21. cribl_control_plane/models/inputmsk.py +17 -0
  22. cribl_control_plane/models/inputsqs.py +8 -10
  23. cribl_control_plane/models/nodeprovidedinfo.py +0 -3
  24. cribl_control_plane/models/output.py +25 -25
  25. cribl_control_plane/models/outputchronicle.py +431 -0
  26. cribl_control_plane/models/outputconfluentcloud.py +18 -0
  27. cribl_control_plane/models/outputgooglechronicle.py +5 -4
  28. cribl_control_plane/models/outputgooglecloudlogging.py +9 -4
  29. cribl_control_plane/models/outputkafka.py +17 -0
  30. cribl_control_plane/models/outputmsk.py +17 -0
  31. cribl_control_plane/models/outputsqs.py +8 -10
  32. cribl_control_plane/models/routecloneconf.py +13 -0
  33. cribl_control_plane/models/routeconf.py +4 -3
  34. cribl_control_plane/models/updatecribllakedatasetbylakeidandidop.py +9 -5
  35. {cribl_control_plane-0.1.0b2.dist-info → cribl_control_plane-0.2.0a1.dist-info}/METADATA +1 -8
  36. {cribl_control_plane-0.1.0b2.dist-info → cribl_control_plane-0.2.0a1.dist-info}/RECORD +37 -34
  37. cribl_control_plane/models/outputdatabricks.py +0 -282
  38. {cribl_control_plane-0.1.0b2.dist-info → cribl_control_plane-0.2.0a1.dist-info}/WHEEL +0 -0
@@ -1,282 +0,0 @@
1
- """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
-
3
- from __future__ import annotations
4
- from cribl_control_plane import utils
5
- from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
- from enum import Enum
8
- import pydantic
9
- from pydantic.functional_validators import PlainValidator
10
- from typing import List, Optional
11
- from typing_extensions import Annotated, NotRequired, TypedDict
12
-
13
-
14
- class OutputDatabricksType(str, Enum):
15
- DATABRICKS = "databricks"
16
-
17
-
18
- class OutputDatabricksDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
19
- r"""Format of the output data"""
20
-
21
- JSON = "json"
22
- RAW = "raw"
23
- PARQUET = "parquet"
24
-
25
-
26
- class OutputDatabricksBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
27
- r"""How to handle events when all receivers are exerting backpressure"""
28
-
29
- BLOCK = "block"
30
- DROP = "drop"
31
-
32
-
33
- class OutputDatabricksDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
34
- r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
35
-
36
- BLOCK = "block"
37
- DROP = "drop"
38
-
39
-
40
- class OutputDatabricksAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
41
- r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
42
-
43
- MANUAL = "manual"
44
- SECRET = "secret"
45
-
46
-
47
- class OutputDatabricksTypedDict(TypedDict):
48
- type: OutputDatabricksType
49
- login_url: str
50
- r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
51
- client_id: str
52
- r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
53
- id: NotRequired[str]
54
- r"""Unique ID for this output"""
55
- pipeline: NotRequired[str]
56
- r"""Pipeline to process data before sending out to this output"""
57
- system_fields: NotRequired[List[str]]
58
- r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
59
- environment: NotRequired[str]
60
- r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
61
- streamtags: NotRequired[List[str]]
62
- r"""Tags for filtering and grouping in @{product}"""
63
- dest_path: NotRequired[str]
64
- r"""Optional path to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myEventsVolumePath-${C.vars.myVar}`"""
65
- stage_path: NotRequired[str]
66
- r"""Filesystem location in which to buffer files before compressing and moving to final destination. Use performant, stable storage."""
67
- add_id_to_stage_path: NotRequired[bool]
68
- r"""Add the Output ID value to staging location"""
69
- remove_empty_dirs: NotRequired[bool]
70
- r"""Remove empty staging directories after moving files"""
71
- partition_expr: NotRequired[str]
72
- r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
73
- format_: NotRequired[OutputDatabricksDataFormat]
74
- r"""Format of the output data"""
75
- base_file_name: NotRequired[str]
76
- r"""JavaScript expression to define the output filename prefix (can be constant)"""
77
- file_name_suffix: NotRequired[str]
78
- r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
79
- max_file_size_mb: NotRequired[float]
80
- r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
81
- max_file_open_time_sec: NotRequired[float]
82
- r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
83
- max_file_idle_time_sec: NotRequired[float]
84
- r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
85
- max_open_files: NotRequired[float]
86
- r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
87
- header_line: NotRequired[str]
88
- r"""If set, this line will be written to the beginning of each output file"""
89
- write_high_water_mark: NotRequired[float]
90
- r"""Buffer size used to write to a file"""
91
- on_backpressure: NotRequired[OutputDatabricksBackpressureBehavior]
92
- r"""How to handle events when all receivers are exerting backpressure"""
93
- deadletter_enabled: NotRequired[bool]
94
- r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
95
- on_disk_full_backpressure: NotRequired[OutputDatabricksDiskSpaceProtection]
96
- r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
97
- unity_auth_method: NotRequired[OutputDatabricksAuthenticationMethod]
98
- r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
99
- scope: NotRequired[str]
100
- r"""OAuth scope for Unity Catalog authentication"""
101
- token_timeout_secs: NotRequired[float]
102
- r"""How often the OAuth token should be refreshed"""
103
- default_catalog: NotRequired[str]
104
- r"""Name of the catalog to use for the output"""
105
- default_schema: NotRequired[str]
106
- r"""Name of the catalog schema to use for the output"""
107
- events_volume_name: NotRequired[str]
108
- r"""Name of the events volume in Databricks"""
109
- over_write_files: NotRequired[bool]
110
- r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
111
- description: NotRequired[str]
112
- client_secret: NotRequired[str]
113
- r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
114
- client_text_secret: NotRequired[str]
115
- r"""Select or create a stored text secret"""
116
-
117
-
118
- class OutputDatabricks(BaseModel):
119
- type: OutputDatabricksType
120
-
121
- login_url: Annotated[str, pydantic.Field(alias="loginUrl")]
122
- r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
123
-
124
- client_id: Annotated[str, pydantic.Field(alias="clientId")]
125
- r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
126
-
127
- id: Optional[str] = None
128
- r"""Unique ID for this output"""
129
-
130
- pipeline: Optional[str] = None
131
- r"""Pipeline to process data before sending out to this output"""
132
-
133
- system_fields: Annotated[
134
- Optional[List[str]], pydantic.Field(alias="systemFields")
135
- ] = None
136
- r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
137
-
138
- environment: Optional[str] = None
139
- r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
140
-
141
- streamtags: Optional[List[str]] = None
142
- r"""Tags for filtering and grouping in @{product}"""
143
-
144
- dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
145
- r"""Optional path to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myEventsVolumePath-${C.vars.myVar}`"""
146
-
147
- stage_path: Annotated[Optional[str], pydantic.Field(alias="stagePath")] = (
148
- "$CRIBL_HOME/state/outputs/staging"
149
- )
150
- r"""Filesystem location in which to buffer files before compressing and moving to final destination. Use performant, stable storage."""
151
-
152
- add_id_to_stage_path: Annotated[
153
- Optional[bool], pydantic.Field(alias="addIdToStagePath")
154
- ] = True
155
- r"""Add the Output ID value to staging location"""
156
-
157
- remove_empty_dirs: Annotated[
158
- Optional[bool], pydantic.Field(alias="removeEmptyDirs")
159
- ] = True
160
- r"""Remove empty staging directories after moving files"""
161
-
162
- partition_expr: Annotated[Optional[str], pydantic.Field(alias="partitionExpr")] = (
163
- "C.Time.strftime(_time ? _time : Date.now()/1000, '%Y/%m/%d')"
164
- )
165
- r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
166
-
167
- format_: Annotated[
168
- Annotated[
169
- Optional[OutputDatabricksDataFormat],
170
- PlainValidator(validate_open_enum(False)),
171
- ],
172
- pydantic.Field(alias="format"),
173
- ] = OutputDatabricksDataFormat.JSON
174
- r"""Format of the output data"""
175
-
176
- base_file_name: Annotated[Optional[str], pydantic.Field(alias="baseFileName")] = (
177
- "`CriblOut`"
178
- )
179
- r"""JavaScript expression to define the output filename prefix (can be constant)"""
180
-
181
- file_name_suffix: Annotated[
182
- Optional[str], pydantic.Field(alias="fileNameSuffix")
183
- ] = '`.${C.env["CRIBL_WORKER_ID"]}.${__format}${__compression === "gzip" ? ".gz" : ""}`'
184
- r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
185
-
186
- max_file_size_mb: Annotated[
187
- Optional[float], pydantic.Field(alias="maxFileSizeMB")
188
- ] = 32
189
- r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
190
-
191
- max_file_open_time_sec: Annotated[
192
- Optional[float], pydantic.Field(alias="maxFileOpenTimeSec")
193
- ] = 300
194
- r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
195
-
196
- max_file_idle_time_sec: Annotated[
197
- Optional[float], pydantic.Field(alias="maxFileIdleTimeSec")
198
- ] = 30
199
- r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
200
-
201
- max_open_files: Annotated[Optional[float], pydantic.Field(alias="maxOpenFiles")] = (
202
- 100
203
- )
204
- r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
205
-
206
- header_line: Annotated[Optional[str], pydantic.Field(alias="headerLine")] = ""
207
- r"""If set, this line will be written to the beginning of each output file"""
208
-
209
- write_high_water_mark: Annotated[
210
- Optional[float], pydantic.Field(alias="writeHighWaterMark")
211
- ] = 64
212
- r"""Buffer size used to write to a file"""
213
-
214
- on_backpressure: Annotated[
215
- Annotated[
216
- Optional[OutputDatabricksBackpressureBehavior],
217
- PlainValidator(validate_open_enum(False)),
218
- ],
219
- pydantic.Field(alias="onBackpressure"),
220
- ] = OutputDatabricksBackpressureBehavior.BLOCK
221
- r"""How to handle events when all receivers are exerting backpressure"""
222
-
223
- deadletter_enabled: Annotated[
224
- Optional[bool], pydantic.Field(alias="deadletterEnabled")
225
- ] = False
226
- r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
227
-
228
- on_disk_full_backpressure: Annotated[
229
- Annotated[
230
- Optional[OutputDatabricksDiskSpaceProtection],
231
- PlainValidator(validate_open_enum(False)),
232
- ],
233
- pydantic.Field(alias="onDiskFullBackpressure"),
234
- ] = OutputDatabricksDiskSpaceProtection.BLOCK
235
- r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
236
-
237
- unity_auth_method: Annotated[
238
- Annotated[
239
- Optional[OutputDatabricksAuthenticationMethod],
240
- PlainValidator(validate_open_enum(False)),
241
- ],
242
- pydantic.Field(alias="unityAuthMethod"),
243
- ] = OutputDatabricksAuthenticationMethod.MANUAL
244
- r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
245
-
246
- scope: Optional[str] = "all-apis"
247
- r"""OAuth scope for Unity Catalog authentication"""
248
-
249
- token_timeout_secs: Annotated[
250
- Optional[float], pydantic.Field(alias="tokenTimeoutSecs")
251
- ] = 3600
252
- r"""How often the OAuth token should be refreshed"""
253
-
254
- default_catalog: Annotated[
255
- Optional[str], pydantic.Field(alias="defaultCatalog")
256
- ] = "main"
257
- r"""Name of the catalog to use for the output"""
258
-
259
- default_schema: Annotated[Optional[str], pydantic.Field(alias="defaultSchema")] = (
260
- "external"
261
- )
262
- r"""Name of the catalog schema to use for the output"""
263
-
264
- events_volume_name: Annotated[
265
- Optional[str], pydantic.Field(alias="eventsVolumeName")
266
- ] = "events"
267
- r"""Name of the events volume in Databricks"""
268
-
269
- over_write_files: Annotated[
270
- Optional[bool], pydantic.Field(alias="overWriteFiles")
271
- ] = False
272
- r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
273
-
274
- description: Optional[str] = None
275
-
276
- client_secret: Annotated[Optional[str], pydantic.Field(alias="clientSecret")] = None
277
- r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
278
-
279
- client_text_secret: Annotated[
280
- Optional[str], pydantic.Field(alias="clientTextSecret")
281
- ] = None
282
- r"""Select or create a stored text secret"""