cribl-control-plane 0.2.0b8__py3-none-any.whl → 0.3.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

@@ -3,10 +3,10 @@
3
3
  import importlib.metadata
4
4
 
5
5
  __title__: str = "cribl-control-plane"
6
- __version__: str = "0.2.0b8"
7
- __openapi_doc_version__: str = "4.15.0-alpha.1761174335818-8b3b1a8b"
6
+ __version__: str = "0.3.0b1"
7
+ __openapi_doc_version__: str = "4.15.0-alpha.1761347346026-24da0953"
8
8
  __gen_version__: str = "2.723.11"
9
- __user_agent__: str = "speakeasy-sdk/python 0.2.0b8 2.723.11 4.15.0-alpha.1761174335818-8b3b1a8b cribl-control-plane"
9
+ __user_agent__: str = "speakeasy-sdk/python 0.3.0b1 2.723.11 4.15.0-alpha.1761347346026-24da0953 cribl-control-plane"
10
10
 
11
11
  try:
12
12
  if __package__ is not None:
@@ -1838,6 +1838,7 @@ if TYPE_CHECKING:
1838
1838
  from .nodeskippedupgradestatus import NodeSkippedUpgradeStatus
1839
1839
  from .nodeupgradestate import NodeUpgradeState
1840
1840
  from .nodeupgradestatus import NodeUpgradeStatus, NodeUpgradeStatusTypedDict
1841
+ from .outpostnodeinfo import OutpostNodeInfo, OutpostNodeInfoTypedDict
1841
1842
  from .output import Output, OutputTypedDict
1842
1843
  from .outputazureblob import (
1843
1844
  BlobAccessTier,
@@ -2098,10 +2099,15 @@ if TYPE_CHECKING:
2098
2099
  )
2099
2100
  from .outputdatabricks import (
2100
2101
  OutputDatabricks,
2101
- OutputDatabricksAuthenticationMethod,
2102
2102
  OutputDatabricksBackpressureBehavior,
2103
+ OutputDatabricksCompression,
2104
+ OutputDatabricksCompressionLevel,
2103
2105
  OutputDatabricksDataFormat,
2106
+ OutputDatabricksDataPageVersion,
2104
2107
  OutputDatabricksDiskSpaceProtection,
2108
+ OutputDatabricksKeyValueMetadatum,
2109
+ OutputDatabricksKeyValueMetadatumTypedDict,
2110
+ OutputDatabricksParquetVersion,
2105
2111
  OutputDatabricksType,
2106
2112
  OutputDatabricksTypedDict,
2107
2113
  )
@@ -4954,6 +4960,8 @@ __all__ = [
4954
4960
  "OldModeTypedDict",
4955
4961
  "Os",
4956
4962
  "OsTypedDict",
4963
+ "OutpostNodeInfo",
4964
+ "OutpostNodeInfoTypedDict",
4957
4965
  "Output",
4958
4966
  "OutputAzureBlob",
4959
4967
  "OutputAzureBlobAuthenticationMethod",
@@ -5173,10 +5181,15 @@ __all__ = [
5173
5181
  "OutputCrowdstrikeNextGenSiemType",
5174
5182
  "OutputCrowdstrikeNextGenSiemTypedDict",
5175
5183
  "OutputDatabricks",
5176
- "OutputDatabricksAuthenticationMethod",
5177
5184
  "OutputDatabricksBackpressureBehavior",
5185
+ "OutputDatabricksCompression",
5186
+ "OutputDatabricksCompressionLevel",
5178
5187
  "OutputDatabricksDataFormat",
5188
+ "OutputDatabricksDataPageVersion",
5179
5189
  "OutputDatabricksDiskSpaceProtection",
5190
+ "OutputDatabricksKeyValueMetadatum",
5191
+ "OutputDatabricksKeyValueMetadatumTypedDict",
5192
+ "OutputDatabricksParquetVersion",
5180
5193
  "OutputDatabricksType",
5181
5194
  "OutputDatabricksTypedDict",
5182
5195
  "OutputDatadog",
@@ -7909,6 +7922,8 @@ _dynamic_imports: dict[str, str] = {
7909
7922
  "NodeUpgradeState": ".nodeupgradestate",
7910
7923
  "NodeUpgradeStatus": ".nodeupgradestatus",
7911
7924
  "NodeUpgradeStatusTypedDict": ".nodeupgradestatus",
7925
+ "OutpostNodeInfo": ".outpostnodeinfo",
7926
+ "OutpostNodeInfoTypedDict": ".outpostnodeinfo",
7912
7927
  "Output": ".output",
7913
7928
  "OutputTypedDict": ".output",
7914
7929
  "BlobAccessTier": ".outputazureblob",
@@ -8145,10 +8160,15 @@ _dynamic_imports: dict[str, str] = {
8145
8160
  "OutputCrowdstrikeNextGenSiemType": ".outputcrowdstrikenextgensiem",
8146
8161
  "OutputCrowdstrikeNextGenSiemTypedDict": ".outputcrowdstrikenextgensiem",
8147
8162
  "OutputDatabricks": ".outputdatabricks",
8148
- "OutputDatabricksAuthenticationMethod": ".outputdatabricks",
8149
8163
  "OutputDatabricksBackpressureBehavior": ".outputdatabricks",
8164
+ "OutputDatabricksCompression": ".outputdatabricks",
8165
+ "OutputDatabricksCompressionLevel": ".outputdatabricks",
8150
8166
  "OutputDatabricksDataFormat": ".outputdatabricks",
8167
+ "OutputDatabricksDataPageVersion": ".outputdatabricks",
8151
8168
  "OutputDatabricksDiskSpaceProtection": ".outputdatabricks",
8169
+ "OutputDatabricksKeyValueMetadatum": ".outputdatabricks",
8170
+ "OutputDatabricksKeyValueMetadatumTypedDict": ".outputdatabricks",
8171
+ "OutputDatabricksParquetVersion": ".outputdatabricks",
8152
8172
  "OutputDatabricksType": ".outputdatabricks",
8153
8173
  "OutputDatabricksTypedDict": ".outputdatabricks",
8154
8174
  "DatadogSite": ".outputdatadog",
@@ -3,6 +3,7 @@
3
3
  from __future__ import annotations
4
4
  from .hbcriblinfo import HBCriblInfo, HBCriblInfoTypedDict
5
5
  from .heartbeatmetadata import HeartbeatMetadata, HeartbeatMetadataTypedDict
6
+ from .outpostnodeinfo import OutpostNodeInfo, OutpostNodeInfoTypedDict
6
7
  from cribl_control_plane.types import BaseModel
7
8
  import pydantic
8
9
  from typing import Dict, List, Optional, Union
@@ -144,6 +145,7 @@ class NodeProvidedInfoTypedDict(TypedDict):
144
145
  local_time: NotRequired[float]
145
146
  metadata: NotRequired[HeartbeatMetadataTypedDict]
146
147
  os: NotRequired[OsTypedDict]
148
+ outpost: NotRequired[OutpostNodeInfoTypedDict]
147
149
 
148
150
 
149
151
  class NodeProvidedInfo(BaseModel):
@@ -188,3 +190,5 @@ class NodeProvidedInfo(BaseModel):
188
190
  metadata: Optional[HeartbeatMetadata] = None
189
191
 
190
192
  os: Optional[Os] = None
193
+
194
+ outpost: Optional[OutpostNodeInfo] = None
@@ -0,0 +1,16 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane.types import BaseModel
5
+ from typing_extensions import TypedDict
6
+
7
+
8
+ class OutpostNodeInfoTypedDict(TypedDict):
9
+ guid: str
10
+ host: str
11
+
12
+
13
+ class OutpostNodeInfo(BaseModel):
14
+ guid: str
15
+
16
+ host: str
@@ -94,32 +94,31 @@ OutputTypedDict = TypeAliasType(
94
94
  OutputDevnullTypedDict,
95
95
  OutputDefaultTypedDict,
96
96
  OutputRouterTypedDict,
97
- OutputNetflowTypedDict,
98
97
  OutputSnmpTypedDict,
98
+ OutputNetflowTypedDict,
99
99
  OutputDiskSpoolTypedDict,
100
100
  OutputRingTypedDict,
101
- OutputGraphiteTypedDict,
102
101
  OutputStatsdExtTypedDict,
102
+ OutputGraphiteTypedDict,
103
103
  OutputStatsdTypedDict,
104
104
  OutputGooglePubsubTypedDict,
105
- OutputSplunkTypedDict,
106
105
  OutputCriblTCPTypedDict,
106
+ OutputSplunkTypedDict,
107
107
  OutputSnsTypedDict,
108
108
  OutputCloudwatchTypedDict,
109
109
  OutputAzureEventhubTypedDict,
110
- OutputSignalfxTypedDict,
111
110
  OutputWavefrontTypedDict,
111
+ OutputSignalfxTypedDict,
112
112
  OutputHoneycombTypedDict,
113
- OutputHumioHecTypedDict,
114
- OutputTcpjsonTypedDict,
115
113
  OutputSumoLogicTypedDict,
116
114
  OutputCrowdstrikeNextGenSiemTypedDict,
115
+ OutputHumioHecTypedDict,
116
+ OutputTcpjsonTypedDict,
117
117
  OutputElasticCloudTypedDict,
118
- OutputExabeamTypedDict,
119
- OutputKafkaTypedDict,
120
- OutputConfluentCloudTypedDict,
121
118
  OutputKinesisTypedDict,
122
- OutputDatabricksTypedDict,
119
+ OutputConfluentCloudTypedDict,
120
+ OutputKafkaTypedDict,
121
+ OutputExabeamTypedDict,
123
122
  OutputNewrelicEventsTypedDict,
124
123
  OutputAzureLogsTypedDict,
125
124
  OutputSplunkLbTypedDict,
@@ -140,6 +139,7 @@ OutputTypedDict = TypeAliasType(
140
139
  OutputElasticTypedDict,
141
140
  OutputDatadogTypedDict,
142
141
  OutputCriblLakeTypedDict,
142
+ OutputDatabricksTypedDict,
143
143
  OutputPrometheusTypedDict,
144
144
  OutputMskTypedDict,
145
145
  OutputSentinelOneAiSiemTypedDict,
@@ -151,8 +151,8 @@ OutputTypedDict = TypeAliasType(
151
151
  OutputMinioTypedDict,
152
152
  OutputClickHouseTypedDict,
153
153
  OutputSecurityLakeTypedDict,
154
- OutputDlS3TypedDict,
155
154
  OutputS3TypedDict,
155
+ OutputDlS3TypedDict,
156
156
  OutputWebhookTypedDict,
157
157
  OutputAzureDataExplorerTypedDict,
158
158
  OutputGoogleCloudLoggingTypedDict,
@@ -44,21 +44,63 @@ class OutputDatabricksDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMet
44
44
  DROP = "drop"
45
45
 
46
46
 
47
- class OutputDatabricksAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
48
- r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
47
+ class OutputDatabricksCompression(str, Enum, metaclass=utils.OpenEnumMeta):
48
+ r"""Data compression format to apply to HTTP content before it is delivered"""
49
49
 
50
- # Manual
51
- MANUAL = "manual"
52
- # Secret Key pair
53
- SECRET = "secret"
50
+ NONE = "none"
51
+ GZIP = "gzip"
52
+
53
+
54
+ class OutputDatabricksCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
55
+ r"""Compression level to apply before moving files to final destination"""
56
+
57
+ # Best Speed
58
+ BEST_SPEED = "best_speed"
59
+ # Normal
60
+ NORMAL = "normal"
61
+ # Best Compression
62
+ BEST_COMPRESSION = "best_compression"
63
+
64
+
65
+ class OutputDatabricksParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
66
+ r"""Determines which data types are supported and how they are represented"""
67
+
68
+ # 1.0
69
+ PARQUET_1_0 = "PARQUET_1_0"
70
+ # 2.4
71
+ PARQUET_2_4 = "PARQUET_2_4"
72
+ # 2.6
73
+ PARQUET_2_6 = "PARQUET_2_6"
74
+
75
+
76
+ class OutputDatabricksDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
77
+ r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
78
+
79
+ # V1
80
+ DATA_PAGE_V1 = "DATA_PAGE_V1"
81
+ # V2
82
+ DATA_PAGE_V2 = "DATA_PAGE_V2"
83
+
84
+
85
+ class OutputDatabricksKeyValueMetadatumTypedDict(TypedDict):
86
+ value: str
87
+ key: NotRequired[str]
88
+
89
+
90
+ class OutputDatabricksKeyValueMetadatum(BaseModel):
91
+ value: str
92
+
93
+ key: Optional[str] = ""
54
94
 
55
95
 
56
96
  class OutputDatabricksTypedDict(TypedDict):
57
97
  type: OutputDatabricksType
58
- login_url: str
59
- r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
98
+ workspace_id: str
99
+ r"""Databricks workspace ID"""
60
100
  client_id: str
61
- r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
101
+ r"""OAuth client ID for Unity Catalog authentication"""
102
+ client_secret: str
103
+ r"""OAuth client secret for Unity Catalog authentication"""
62
104
  id: NotRequired[str]
63
105
  r"""Unique ID for this output"""
64
106
  pipeline: NotRequired[str]
@@ -103,35 +145,58 @@ class OutputDatabricksTypedDict(TypedDict):
103
145
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
104
146
  on_disk_full_backpressure: NotRequired[OutputDatabricksDiskSpaceProtection]
105
147
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
106
- unity_auth_method: NotRequired[OutputDatabricksAuthenticationMethod]
107
- r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
108
148
  scope: NotRequired[str]
109
149
  r"""OAuth scope for Unity Catalog authentication"""
110
- token_timeout_secs: NotRequired[float]
111
- r"""How often the OAuth token should be refreshed"""
112
- default_catalog: NotRequired[str]
150
+ catalog: NotRequired[str]
113
151
  r"""Name of the catalog to use for the output"""
114
- default_schema: NotRequired[str]
152
+ schema_: NotRequired[str]
115
153
  r"""Name of the catalog schema to use for the output"""
116
154
  events_volume_name: NotRequired[str]
117
155
  r"""Name of the events volume in Databricks"""
118
- over_write_files: NotRequired[bool]
119
- r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
120
156
  description: NotRequired[str]
121
- client_secret: NotRequired[str]
122
- r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
123
- client_text_secret: NotRequired[str]
124
- r"""Select or create a stored text secret"""
157
+ compress: NotRequired[OutputDatabricksCompression]
158
+ r"""Data compression format to apply to HTTP content before it is delivered"""
159
+ compression_level: NotRequired[OutputDatabricksCompressionLevel]
160
+ r"""Compression level to apply before moving files to final destination"""
161
+ automatic_schema: NotRequired[bool]
162
+ r"""Automatically calculate the schema based on the events of each Parquet file generated"""
163
+ parquet_version: NotRequired[OutputDatabricksParquetVersion]
164
+ r"""Determines which data types are supported and how they are represented"""
165
+ parquet_data_page_version: NotRequired[OutputDatabricksDataPageVersion]
166
+ r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
167
+ parquet_row_group_length: NotRequired[float]
168
+ r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
169
+ parquet_page_size: NotRequired[str]
170
+ r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
171
+ should_log_invalid_rows: NotRequired[bool]
172
+ r"""Log up to 3 rows that @{product} skips due to data mismatch"""
173
+ key_value_metadata: NotRequired[List[OutputDatabricksKeyValueMetadatumTypedDict]]
174
+ r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
175
+ enable_statistics: NotRequired[bool]
176
+ r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
177
+ enable_write_page_index: NotRequired[bool]
178
+ r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
179
+ enable_page_checksum: NotRequired[bool]
180
+ r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
181
+ empty_dir_cleanup_sec: NotRequired[float]
182
+ r"""How frequently, in seconds, to clean up empty directories"""
183
+ deadletter_path: NotRequired[str]
184
+ r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
185
+ max_retry_num: NotRequired[float]
186
+ r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
125
187
 
126
188
 
127
189
  class OutputDatabricks(BaseModel):
128
190
  type: OutputDatabricksType
129
191
 
130
- login_url: Annotated[str, pydantic.Field(alias="loginUrl")]
131
- r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
192
+ workspace_id: Annotated[str, pydantic.Field(alias="workspaceId")]
193
+ r"""Databricks workspace ID"""
132
194
 
133
195
  client_id: Annotated[str, pydantic.Field(alias="clientId")]
134
- r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
196
+ r"""OAuth client ID for Unity Catalog authentication"""
197
+
198
+ client_secret: Annotated[str, pydantic.Field(alias="clientSecret")]
199
+ r"""OAuth client secret for Unity Catalog authentication"""
135
200
 
136
201
  id: Optional[str] = None
137
202
  r"""Unique ID for this output"""
@@ -243,31 +308,13 @@ class OutputDatabricks(BaseModel):
243
308
  ] = OutputDatabricksDiskSpaceProtection.BLOCK
244
309
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
245
310
 
246
- unity_auth_method: Annotated[
247
- Annotated[
248
- Optional[OutputDatabricksAuthenticationMethod],
249
- PlainValidator(validate_open_enum(False)),
250
- ],
251
- pydantic.Field(alias="unityAuthMethod"),
252
- ] = OutputDatabricksAuthenticationMethod.MANUAL
253
- r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
254
-
255
311
  scope: Optional[str] = "all-apis"
256
312
  r"""OAuth scope for Unity Catalog authentication"""
257
313
 
258
- token_timeout_secs: Annotated[
259
- Optional[float], pydantic.Field(alias="tokenTimeoutSecs")
260
- ] = 3600
261
- r"""How often the OAuth token should be refreshed"""
262
-
263
- default_catalog: Annotated[
264
- Optional[str], pydantic.Field(alias="defaultCatalog")
265
- ] = "main"
314
+ catalog: Optional[str] = "main"
266
315
  r"""Name of the catalog to use for the output"""
267
316
 
268
- default_schema: Annotated[Optional[str], pydantic.Field(alias="defaultSchema")] = (
269
- "external"
270
- )
317
+ schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = "external"
271
318
  r"""Name of the catalog schema to use for the output"""
272
319
 
273
320
  events_volume_name: Annotated[
@@ -275,17 +322,90 @@ class OutputDatabricks(BaseModel):
275
322
  ] = "events"
276
323
  r"""Name of the events volume in Databricks"""
277
324
 
278
- over_write_files: Annotated[
279
- Optional[bool], pydantic.Field(alias="overWriteFiles")
325
+ description: Optional[str] = None
326
+
327
+ compress: Annotated[
328
+ Optional[OutputDatabricksCompression], PlainValidator(validate_open_enum(False))
329
+ ] = OutputDatabricksCompression.GZIP
330
+ r"""Data compression format to apply to HTTP content before it is delivered"""
331
+
332
+ compression_level: Annotated[
333
+ Annotated[
334
+ Optional[OutputDatabricksCompressionLevel],
335
+ PlainValidator(validate_open_enum(False)),
336
+ ],
337
+ pydantic.Field(alias="compressionLevel"),
338
+ ] = OutputDatabricksCompressionLevel.BEST_SPEED
339
+ r"""Compression level to apply before moving files to final destination"""
340
+
341
+ automatic_schema: Annotated[
342
+ Optional[bool], pydantic.Field(alias="automaticSchema")
280
343
  ] = False
281
- r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
344
+ r"""Automatically calculate the schema based on the events of each Parquet file generated"""
282
345
 
283
- description: Optional[str] = None
346
+ parquet_version: Annotated[
347
+ Annotated[
348
+ Optional[OutputDatabricksParquetVersion],
349
+ PlainValidator(validate_open_enum(False)),
350
+ ],
351
+ pydantic.Field(alias="parquetVersion"),
352
+ ] = OutputDatabricksParquetVersion.PARQUET_2_6
353
+ r"""Determines which data types are supported and how they are represented"""
284
354
 
285
- client_secret: Annotated[Optional[str], pydantic.Field(alias="clientSecret")] = None
286
- r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
355
+ parquet_data_page_version: Annotated[
356
+ Annotated[
357
+ Optional[OutputDatabricksDataPageVersion],
358
+ PlainValidator(validate_open_enum(False)),
359
+ ],
360
+ pydantic.Field(alias="parquetDataPageVersion"),
361
+ ] = OutputDatabricksDataPageVersion.DATA_PAGE_V2
362
+ r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
363
+
364
+ parquet_row_group_length: Annotated[
365
+ Optional[float], pydantic.Field(alias="parquetRowGroupLength")
366
+ ] = 10000
367
+ r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
368
+
369
+ parquet_page_size: Annotated[
370
+ Optional[str], pydantic.Field(alias="parquetPageSize")
371
+ ] = "1MB"
372
+ r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
373
+
374
+ should_log_invalid_rows: Annotated[
375
+ Optional[bool], pydantic.Field(alias="shouldLogInvalidRows")
376
+ ] = None
377
+ r"""Log up to 3 rows that @{product} skips due to data mismatch"""
287
378
 
288
- client_text_secret: Annotated[
289
- Optional[str], pydantic.Field(alias="clientTextSecret")
379
+ key_value_metadata: Annotated[
380
+ Optional[List[OutputDatabricksKeyValueMetadatum]],
381
+ pydantic.Field(alias="keyValueMetadata"),
290
382
  ] = None
291
- r"""Select or create a stored text secret"""
383
+ r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
384
+
385
+ enable_statistics: Annotated[
386
+ Optional[bool], pydantic.Field(alias="enableStatistics")
387
+ ] = True
388
+ r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
389
+
390
+ enable_write_page_index: Annotated[
391
+ Optional[bool], pydantic.Field(alias="enableWritePageIndex")
392
+ ] = True
393
+ r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
394
+
395
+ enable_page_checksum: Annotated[
396
+ Optional[bool], pydantic.Field(alias="enablePageChecksum")
397
+ ] = False
398
+ r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
399
+
400
+ empty_dir_cleanup_sec: Annotated[
401
+ Optional[float], pydantic.Field(alias="emptyDirCleanupSec")
402
+ ] = 300
403
+ r"""How frequently, in seconds, to clean up empty directories"""
404
+
405
+ deadletter_path: Annotated[
406
+ Optional[str], pydantic.Field(alias="deadletterPath")
407
+ ] = "$CRIBL_HOME/state/outputs/dead-letter"
408
+ r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
409
+
410
+ max_retry_num: Annotated[Optional[float], pydantic.Field(alias="maxRetryNum")] = 20
411
+ r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cribl-control-plane
3
- Version: 0.2.0b8
3
+ Version: 0.3.0b1
4
4
  Summary: Python Client SDK Generated by Speakeasy.
5
5
  Author: Speakeasy
6
6
  Requires-Python: >=3.9.2
@@ -4,7 +4,7 @@ cribl_control_plane/_hooks/clientcredentials.py,sha256=CeI19FzRb2V6kiNPgSFGn0CgI
4
4
  cribl_control_plane/_hooks/registration.py,sha256=1QZB41w6If7I9dXiOSQx6dhSc6BPWrnI5Q5bMOr4iVA,624
5
5
  cribl_control_plane/_hooks/sdkhooks.py,sha256=ggXjME1_Rdv8CVCg1XHqB83eYtbxzKyhXyfQ36Yc1gA,2816
6
6
  cribl_control_plane/_hooks/types.py,sha256=Tw_C4zTZm01rW_89VDEUpvQ8KQr1WxN0Gu_-s_fYSPc,2998
7
- cribl_control_plane/_version.py,sha256=8OTMapRkZK7wrTdJzWe3PoQ7eh2YPiHQajP_04FUyhs,546
7
+ cribl_control_plane/_version.py,sha256=jcJDs9NWxpFLIYWmcR0tLAJfsCOZzYycW9G_RiwQAQg,546
8
8
  cribl_control_plane/acl.py,sha256=8lvYOKAli4PzsQhOVaCU6YCwblPMh9jQo04L0r4HJuQ,9025
9
9
  cribl_control_plane/auth_sdk.py,sha256=3sjf1VoyWwfhSyuMDQLixgWISSf03BOZwmkiT8g5Ruw,626
10
10
  cribl_control_plane/basesdk.py,sha256=y4yIXSNVXLMd0sLS2htBFdTCI3gkPQbIWd-C671kg1I,12249
@@ -27,7 +27,7 @@ cribl_control_plane/health.py,sha256=N8pX8RHkJVtLFd4nZ8ypJPrzT_JezciEVry9s9qvCRc
27
27
  cribl_control_plane/hectokens.py,sha256=0EGgGGrM83m1YmTZwkN5S4xFkHQGnw1IZe3y6uMwmLw,19151
28
28
  cribl_control_plane/httpclient.py,sha256=dqTPONDBpRn4ktXfcetQiRXnG93f0pJkFhqsYFhLUac,3945
29
29
  cribl_control_plane/lakedatasets.py,sha256=VaacfDeQDMJKibABnkZibVMfOYxeh9ITcHKjM9QDqw8,46676
30
- cribl_control_plane/models/__init__.py,sha256=xztoMdonWnLoDZM0HsRCpHLUZZxdQ37cNOhOYtkGqFQ,391644
30
+ cribl_control_plane/models/__init__.py,sha256=cfKxbznEeRWhg15aWLi0P8PBMY315Wq9fGcVGHyNWig,392581
31
31
  cribl_control_plane/models/addhectokenrequest.py,sha256=mzQLKrMWlwxNheqEs5SM_yrT-gyenfCWgHKhmb5oXFQ,800
32
32
  cribl_control_plane/models/authtoken.py,sha256=gtO4VNC8-vr6spO6HCtL_yFPm30sUdtn-_qajc_mLmI,528
33
33
  cribl_control_plane/models/branchinfo.py,sha256=jCX31O5TMG9jTjqigPvvUiBwpgPpVxHtSuhYrNykXiI,291
@@ -185,11 +185,12 @@ cribl_control_plane/models/lookupversions.py,sha256=PLk5hD1WPEIoePfJbhllePawNTa1
185
185
  cribl_control_plane/models/masterworkerentry.py,sha256=KT8bTu5t20ZwhybN8yz4MtG8CQZGpqv3I1JGjVItY7Q,2481
186
186
  cribl_control_plane/models/nodeactiveupgradestatus.py,sha256=knwgNh1octWr6oY-TadH0StJmzv0cktlJ4tc5pq_ChM,279
187
187
  cribl_control_plane/models/nodefailedupgradestatus.py,sha256=EE4tSjcWyQxASftW9xJCS8K5QjpLkjCl3YDIys4r7FA,267
188
- cribl_control_plane/models/nodeprovidedinfo.py,sha256=9oh01FeEJOR4h9Y3I22HMgW_kAaSRvo9bAq-sUNkr6Y,3797
188
+ cribl_control_plane/models/nodeprovidedinfo.py,sha256=bhR6PGEH7KVf7q09O17JKJnGRIyXn9mn5_Va4Jw880U,3966
189
189
  cribl_control_plane/models/nodeskippedupgradestatus.py,sha256=EY-U3cUPwMa3H-X-hn5gdaEBmSAP3hB9gRPdiQZs5yU,294
190
190
  cribl_control_plane/models/nodeupgradestate.py,sha256=EerzMMQeFl-iHKHsJwEIxRroH6w97S7-em9YoY2-ASk,286
191
191
  cribl_control_plane/models/nodeupgradestatus.py,sha256=Ygdb7jTFOvD6M3Fjl3brliLCKbkdX3aCwkPYjTE4Dw0,1346
192
- cribl_control_plane/models/output.py,sha256=PCoSqMYzIbzz_eLIM1DKODoE715HiRYQV1myMPs2fN8,11067
192
+ cribl_control_plane/models/outpostnodeinfo.py,sha256=CbYoOaUBIEa6viCvh5aCDhjG4Pe3mqyOMIjyk0rI_r4,334
193
+ cribl_control_plane/models/output.py,sha256=ZpL7x04ug03fFFh7LLVKEN6p2RuhN87gYQp6rrbJEuc,11067
193
194
  cribl_control_plane/models/outputazureblob.py,sha256=3FwhgPC9rqLY9NMAgwQlwBjjI-6ANNzzg5t6SW2dZNQ,23414
194
195
  cribl_control_plane/models/outputazuredataexplorer.py,sha256=uRzlsyAsnvoBay7_WnD1EX-geJYleexLJQ8XTq24xHQ,32423
195
196
  cribl_control_plane/models/outputazureeventhub.py,sha256=65ybby3H6XlJvCGpdWbNUFfnXD7hkmHg0xe9NpCmUxI,15949
@@ -202,7 +203,7 @@ cribl_control_plane/models/outputcriblhttp.py,sha256=u-1dwQgnbJbT0uKKg_hTVm9EUcT
202
203
  cribl_control_plane/models/outputcribllake.py,sha256=9ybDwH-7_rdTctcS8yx_Hqhl95mT-3Lm3B2M6PjsWfM,18428
203
204
  cribl_control_plane/models/outputcribltcp.py,sha256=I_zKaSk9F3YsyCdoTqERhyrhEZtZq9_yFjpqYU0D61k,17651
204
205
  cribl_control_plane/models/outputcrowdstrikenextgensiem.py,sha256=Yz9YIVnJoCpmW1cGggJIQ_c2YLfXkH9cdeFmCfOGYVc,20081
205
- cribl_control_plane/models/outputdatabricks.py,sha256=EBnfJLzCmSrUpPzp5_f40rTbN68-r-A_LtuaY1nTVA0,14050
206
+ cribl_control_plane/models/outputdatabricks.py,sha256=gN1tb4tLAfTmiEbe2bo-oTXLNcGmQL9uz9jfyHO3Ahk,19574
206
207
  cribl_control_plane/models/outputdatadog.py,sha256=Xr3HIFnuuTHgSEycO20az3PHnHb7QyYyoH1Y39zdWKM,23711
207
208
  cribl_control_plane/models/outputdataset.py,sha256=p68GTCPkL0-sMwt5A4-oqI7p2-vDwbWwRgh6aZzUMLU,21950
208
209
  cribl_control_plane/models/outputdefault.py,sha256=2tjMKYSksR-0qWLd_u3PPLXL0gZiSlUdj9JTPYeYMps,1952
@@ -326,6 +327,6 @@ cribl_control_plane/utils/url.py,sha256=BgGPgcTA6MRK4bF8fjP2dUopN3NzEzxWMXPBVg8N
326
327
  cribl_control_plane/utils/values.py,sha256=CcaCXEa3xHhkUDROyXZocN8f0bdITftv9Y0P9lTf0YM,3517
327
328
  cribl_control_plane/versions.py,sha256=4xdTYbM84Xyjr5qkixqNpgn2q6V8aXVYXkEPDU2Ele0,1156
328
329
  cribl_control_plane/versions_configs.py,sha256=5CKcfN4SzuyFgggrx6O8H_h3GhNyKSbfdVhSkVGZKi4,7284
329
- cribl_control_plane-0.2.0b8.dist-info/METADATA,sha256=Qtk3JSbPyxbiqX4k68B_lq_6uesZ78bfSnw9HKyXsUc,39955
330
- cribl_control_plane-0.2.0b8.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
331
- cribl_control_plane-0.2.0b8.dist-info/RECORD,,
330
+ cribl_control_plane-0.3.0b1.dist-info/METADATA,sha256=cRfinV47ZaIm79rB63rJkCWiCic4IrZebNlfH8FPbFg,39955
331
+ cribl_control_plane-0.3.0b1.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
332
+ cribl_control_plane-0.3.0b1.dist-info/RECORD,,