cribl-control-plane 0.2.0b8__py3-none-any.whl → 0.2.1rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

@@ -0,0 +1,95 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic.functional_validators import PlainValidator
10
+ from typing import List, Optional
11
+ from typing_extensions import Annotated, NotRequired, TypedDict
12
+
13
+
14
+ class ID(str, Enum, metaclass=utils.OpenEnumMeta):
15
+ r"""Unique function identifier. Must always be 'eval'."""
16
+
17
+ EVAL = "eval"
18
+
19
+
20
+ class AddTypedDict(TypedDict):
21
+ name: str
22
+ value: str
23
+
24
+
25
+ class Add(BaseModel):
26
+ name: str
27
+
28
+ value: str
29
+
30
+
31
+ class FunctionConfTypedDict(TypedDict):
32
+ add: List[AddTypedDict]
33
+ r"""Fields to add in this eval function"""
34
+
35
+
36
+ class FunctionConf(BaseModel):
37
+ add: List[Add]
38
+ r"""Fields to add in this eval function"""
39
+
40
+
41
+ class FunctionTypedDict(TypedDict):
42
+ id: ID
43
+ r"""Unique function identifier. Must always be 'eval'."""
44
+ filter_: str
45
+ r"""Filter expression controlling when this function runs"""
46
+ final: bool
47
+ r"""Marks whether this function is final in the chain. Must always be true."""
48
+ conf: FunctionConfTypedDict
49
+ description: NotRequired[str]
50
+ r"""Optional function description"""
51
+ disabled: NotRequired[bool]
52
+ r"""Indicates whether the function is disabled"""
53
+
54
+
55
+ class Function(BaseModel):
56
+ id: Annotated[ID, PlainValidator(validate_open_enum(False))]
57
+ r"""Unique function identifier. Must always be 'eval'."""
58
+
59
+ filter_: Annotated[str, pydantic.Field(alias="filter")]
60
+ r"""Filter expression controlling when this function runs"""
61
+
62
+ final: bool
63
+ r"""Marks whether this function is final in the chain. Must always be true."""
64
+
65
+ conf: FunctionConf
66
+
67
+ description: Optional[str] = None
68
+ r"""Optional function description"""
69
+
70
+ disabled: Optional[bool] = None
71
+ r"""Indicates whether the function is disabled"""
72
+
73
+
74
+ class MappingRulesetConfTypedDict(TypedDict):
75
+ functions: NotRequired[List[FunctionTypedDict]]
76
+ r"""List of functions to pass data through"""
77
+
78
+
79
+ class MappingRulesetConf(BaseModel):
80
+ functions: Optional[List[Function]] = None
81
+ r"""List of functions to pass data through"""
82
+
83
+
84
+ class MappingRulesetTypedDict(TypedDict):
85
+ id: str
86
+ conf: NotRequired[MappingRulesetConfTypedDict]
87
+ active: NotRequired[bool]
88
+
89
+
90
+ class MappingRuleset(BaseModel):
91
+ id: str
92
+
93
+ conf: Optional[MappingRulesetConf] = None
94
+
95
+ active: Optional[bool] = None
@@ -3,6 +3,7 @@
3
3
  from __future__ import annotations
4
4
  from .hbcriblinfo import HBCriblInfo, HBCriblInfoTypedDict
5
5
  from .heartbeatmetadata import HeartbeatMetadata, HeartbeatMetadataTypedDict
6
+ from .outpostnodeinfo import OutpostNodeInfo, OutpostNodeInfoTypedDict
6
7
  from cribl_control_plane.types import BaseModel
7
8
  import pydantic
8
9
  from typing import Dict, List, Optional, Union
@@ -144,6 +145,7 @@ class NodeProvidedInfoTypedDict(TypedDict):
144
145
  local_time: NotRequired[float]
145
146
  metadata: NotRequired[HeartbeatMetadataTypedDict]
146
147
  os: NotRequired[OsTypedDict]
148
+ outpost: NotRequired[OutpostNodeInfoTypedDict]
147
149
 
148
150
 
149
151
  class NodeProvidedInfo(BaseModel):
@@ -188,3 +190,5 @@ class NodeProvidedInfo(BaseModel):
188
190
  metadata: Optional[HeartbeatMetadata] = None
189
191
 
190
192
  os: Optional[Os] = None
193
+
194
+ outpost: Optional[OutpostNodeInfo] = None
@@ -0,0 +1,16 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane.types import BaseModel
5
+ from typing_extensions import TypedDict
6
+
7
+
8
+ class OutpostNodeInfoTypedDict(TypedDict):
9
+ guid: str
10
+ host: str
11
+
12
+
13
+ class OutpostNodeInfo(BaseModel):
14
+ guid: str
15
+
16
+ host: str
@@ -94,32 +94,31 @@ OutputTypedDict = TypeAliasType(
94
94
  OutputDevnullTypedDict,
95
95
  OutputDefaultTypedDict,
96
96
  OutputRouterTypedDict,
97
- OutputNetflowTypedDict,
98
97
  OutputSnmpTypedDict,
98
+ OutputNetflowTypedDict,
99
99
  OutputDiskSpoolTypedDict,
100
100
  OutputRingTypedDict,
101
- OutputGraphiteTypedDict,
102
101
  OutputStatsdExtTypedDict,
102
+ OutputGraphiteTypedDict,
103
103
  OutputStatsdTypedDict,
104
104
  OutputGooglePubsubTypedDict,
105
- OutputSplunkTypedDict,
106
105
  OutputCriblTCPTypedDict,
106
+ OutputSplunkTypedDict,
107
107
  OutputSnsTypedDict,
108
108
  OutputCloudwatchTypedDict,
109
109
  OutputAzureEventhubTypedDict,
110
- OutputSignalfxTypedDict,
111
110
  OutputWavefrontTypedDict,
111
+ OutputSignalfxTypedDict,
112
112
  OutputHoneycombTypedDict,
113
- OutputHumioHecTypedDict,
114
- OutputTcpjsonTypedDict,
115
113
  OutputSumoLogicTypedDict,
116
114
  OutputCrowdstrikeNextGenSiemTypedDict,
115
+ OutputHumioHecTypedDict,
116
+ OutputTcpjsonTypedDict,
117
117
  OutputElasticCloudTypedDict,
118
- OutputExabeamTypedDict,
119
- OutputKafkaTypedDict,
120
- OutputConfluentCloudTypedDict,
121
118
  OutputKinesisTypedDict,
122
- OutputDatabricksTypedDict,
119
+ OutputConfluentCloudTypedDict,
120
+ OutputKafkaTypedDict,
121
+ OutputExabeamTypedDict,
123
122
  OutputNewrelicEventsTypedDict,
124
123
  OutputAzureLogsTypedDict,
125
124
  OutputSplunkLbTypedDict,
@@ -140,6 +139,7 @@ OutputTypedDict = TypeAliasType(
140
139
  OutputElasticTypedDict,
141
140
  OutputDatadogTypedDict,
142
141
  OutputCriblLakeTypedDict,
142
+ OutputDatabricksTypedDict,
143
143
  OutputPrometheusTypedDict,
144
144
  OutputMskTypedDict,
145
145
  OutputSentinelOneAiSiemTypedDict,
@@ -151,8 +151,8 @@ OutputTypedDict = TypeAliasType(
151
151
  OutputMinioTypedDict,
152
152
  OutputClickHouseTypedDict,
153
153
  OutputSecurityLakeTypedDict,
154
- OutputDlS3TypedDict,
155
154
  OutputS3TypedDict,
155
+ OutputDlS3TypedDict,
156
156
  OutputWebhookTypedDict,
157
157
  OutputAzureDataExplorerTypedDict,
158
158
  OutputGoogleCloudLoggingTypedDict,
@@ -44,21 +44,63 @@ class OutputDatabricksDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMet
44
44
  DROP = "drop"
45
45
 
46
46
 
47
- class OutputDatabricksAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
48
- r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
47
+ class OutputDatabricksCompression(str, Enum, metaclass=utils.OpenEnumMeta):
48
+ r"""Data compression format to apply to HTTP content before it is delivered"""
49
49
 
50
- # Manual
51
- MANUAL = "manual"
52
- # Secret Key pair
53
- SECRET = "secret"
50
+ NONE = "none"
51
+ GZIP = "gzip"
52
+
53
+
54
+ class OutputDatabricksCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
55
+ r"""Compression level to apply before moving files to final destination"""
56
+
57
+ # Best Speed
58
+ BEST_SPEED = "best_speed"
59
+ # Normal
60
+ NORMAL = "normal"
61
+ # Best Compression
62
+ BEST_COMPRESSION = "best_compression"
63
+
64
+
65
+ class OutputDatabricksParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
66
+ r"""Determines which data types are supported and how they are represented"""
67
+
68
+ # 1.0
69
+ PARQUET_1_0 = "PARQUET_1_0"
70
+ # 2.4
71
+ PARQUET_2_4 = "PARQUET_2_4"
72
+ # 2.6
73
+ PARQUET_2_6 = "PARQUET_2_6"
74
+
75
+
76
+ class OutputDatabricksDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
77
+ r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
78
+
79
+ # V1
80
+ DATA_PAGE_V1 = "DATA_PAGE_V1"
81
+ # V2
82
+ DATA_PAGE_V2 = "DATA_PAGE_V2"
83
+
84
+
85
+ class OutputDatabricksKeyValueMetadatumTypedDict(TypedDict):
86
+ value: str
87
+ key: NotRequired[str]
88
+
89
+
90
+ class OutputDatabricksKeyValueMetadatum(BaseModel):
91
+ value: str
92
+
93
+ key: Optional[str] = ""
54
94
 
55
95
 
56
96
  class OutputDatabricksTypedDict(TypedDict):
57
97
  type: OutputDatabricksType
58
- login_url: str
59
- r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
98
+ workspace_id: str
99
+ r"""Databricks workspace ID"""
60
100
  client_id: str
61
- r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
101
+ r"""OAuth client ID for Unity Catalog authentication"""
102
+ client_secret: str
103
+ r"""OAuth client secret for Unity Catalog authentication"""
62
104
  id: NotRequired[str]
63
105
  r"""Unique ID for this output"""
64
106
  pipeline: NotRequired[str]
@@ -103,35 +145,58 @@ class OutputDatabricksTypedDict(TypedDict):
103
145
  r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
104
146
  on_disk_full_backpressure: NotRequired[OutputDatabricksDiskSpaceProtection]
105
147
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
106
- unity_auth_method: NotRequired[OutputDatabricksAuthenticationMethod]
107
- r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
108
148
  scope: NotRequired[str]
109
149
  r"""OAuth scope for Unity Catalog authentication"""
110
- token_timeout_secs: NotRequired[float]
111
- r"""How often the OAuth token should be refreshed"""
112
- default_catalog: NotRequired[str]
150
+ catalog: NotRequired[str]
113
151
  r"""Name of the catalog to use for the output"""
114
- default_schema: NotRequired[str]
152
+ schema_: NotRequired[str]
115
153
  r"""Name of the catalog schema to use for the output"""
116
154
  events_volume_name: NotRequired[str]
117
155
  r"""Name of the events volume in Databricks"""
118
- over_write_files: NotRequired[bool]
119
- r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
120
156
  description: NotRequired[str]
121
- client_secret: NotRequired[str]
122
- r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
123
- client_text_secret: NotRequired[str]
124
- r"""Select or create a stored text secret"""
157
+ compress: NotRequired[OutputDatabricksCompression]
158
+ r"""Data compression format to apply to HTTP content before it is delivered"""
159
+ compression_level: NotRequired[OutputDatabricksCompressionLevel]
160
+ r"""Compression level to apply before moving files to final destination"""
161
+ automatic_schema: NotRequired[bool]
162
+ r"""Automatically calculate the schema based on the events of each Parquet file generated"""
163
+ parquet_version: NotRequired[OutputDatabricksParquetVersion]
164
+ r"""Determines which data types are supported and how they are represented"""
165
+ parquet_data_page_version: NotRequired[OutputDatabricksDataPageVersion]
166
+ r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
167
+ parquet_row_group_length: NotRequired[float]
168
+ r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
169
+ parquet_page_size: NotRequired[str]
170
+ r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
171
+ should_log_invalid_rows: NotRequired[bool]
172
+ r"""Log up to 3 rows that @{product} skips due to data mismatch"""
173
+ key_value_metadata: NotRequired[List[OutputDatabricksKeyValueMetadatumTypedDict]]
174
+ r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
175
+ enable_statistics: NotRequired[bool]
176
+ r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
177
+ enable_write_page_index: NotRequired[bool]
178
+ r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
179
+ enable_page_checksum: NotRequired[bool]
180
+ r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
181
+ empty_dir_cleanup_sec: NotRequired[float]
182
+ r"""How frequently, in seconds, to clean up empty directories"""
183
+ deadletter_path: NotRequired[str]
184
+ r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
185
+ max_retry_num: NotRequired[float]
186
+ r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
125
187
 
126
188
 
127
189
  class OutputDatabricks(BaseModel):
128
190
  type: OutputDatabricksType
129
191
 
130
- login_url: Annotated[str, pydantic.Field(alias="loginUrl")]
131
- r"""URL for Unity Catalog OAuth token endpoint (example: 'https://your-workspace.cloud.databricks.com/oauth/token')"""
192
+ workspace_id: Annotated[str, pydantic.Field(alias="workspaceId")]
193
+ r"""Databricks workspace ID"""
132
194
 
133
195
  client_id: Annotated[str, pydantic.Field(alias="clientId")]
134
- r"""JavaScript expression to compute the OAuth client ID for Unity Catalog authentication. Can be a constant."""
196
+ r"""OAuth client ID for Unity Catalog authentication"""
197
+
198
+ client_secret: Annotated[str, pydantic.Field(alias="clientSecret")]
199
+ r"""OAuth client secret for Unity Catalog authentication"""
135
200
 
136
201
  id: Optional[str] = None
137
202
  r"""Unique ID for this output"""
@@ -243,31 +308,13 @@ class OutputDatabricks(BaseModel):
243
308
  ] = OutputDatabricksDiskSpaceProtection.BLOCK
244
309
  r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
245
310
 
246
- unity_auth_method: Annotated[
247
- Annotated[
248
- Optional[OutputDatabricksAuthenticationMethod],
249
- PlainValidator(validate_open_enum(False)),
250
- ],
251
- pydantic.Field(alias="unityAuthMethod"),
252
- ] = OutputDatabricksAuthenticationMethod.MANUAL
253
- r"""Unity Catalog authentication method. Choose Manual to enter credentials directly, or Secret to use a stored secret."""
254
-
255
311
  scope: Optional[str] = "all-apis"
256
312
  r"""OAuth scope for Unity Catalog authentication"""
257
313
 
258
- token_timeout_secs: Annotated[
259
- Optional[float], pydantic.Field(alias="tokenTimeoutSecs")
260
- ] = 3600
261
- r"""How often the OAuth token should be refreshed"""
262
-
263
- default_catalog: Annotated[
264
- Optional[str], pydantic.Field(alias="defaultCatalog")
265
- ] = "main"
314
+ catalog: Optional[str] = "main"
266
315
  r"""Name of the catalog to use for the output"""
267
316
 
268
- default_schema: Annotated[Optional[str], pydantic.Field(alias="defaultSchema")] = (
269
- "external"
270
- )
317
+ schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = "external"
271
318
  r"""Name of the catalog schema to use for the output"""
272
319
 
273
320
  events_volume_name: Annotated[
@@ -275,17 +322,90 @@ class OutputDatabricks(BaseModel):
275
322
  ] = "events"
276
323
  r"""Name of the events volume in Databricks"""
277
324
 
278
- over_write_files: Annotated[
279
- Optional[bool], pydantic.Field(alias="overWriteFiles")
325
+ description: Optional[str] = None
326
+
327
+ compress: Annotated[
328
+ Optional[OutputDatabricksCompression], PlainValidator(validate_open_enum(False))
329
+ ] = OutputDatabricksCompression.GZIP
330
+ r"""Data compression format to apply to HTTP content before it is delivered"""
331
+
332
+ compression_level: Annotated[
333
+ Annotated[
334
+ Optional[OutputDatabricksCompressionLevel],
335
+ PlainValidator(validate_open_enum(False)),
336
+ ],
337
+ pydantic.Field(alias="compressionLevel"),
338
+ ] = OutputDatabricksCompressionLevel.BEST_SPEED
339
+ r"""Compression level to apply before moving files to final destination"""
340
+
341
+ automatic_schema: Annotated[
342
+ Optional[bool], pydantic.Field(alias="automaticSchema")
280
343
  ] = False
281
- r"""Uploaded files should be overwritten if they already exist. If disabled, upload will fail if a file already exists."""
344
+ r"""Automatically calculate the schema based on the events of each Parquet file generated"""
282
345
 
283
- description: Optional[str] = None
346
+ parquet_version: Annotated[
347
+ Annotated[
348
+ Optional[OutputDatabricksParquetVersion],
349
+ PlainValidator(validate_open_enum(False)),
350
+ ],
351
+ pydantic.Field(alias="parquetVersion"),
352
+ ] = OutputDatabricksParquetVersion.PARQUET_2_6
353
+ r"""Determines which data types are supported and how they are represented"""
284
354
 
285
- client_secret: Annotated[Optional[str], pydantic.Field(alias="clientSecret")] = None
286
- r"""JavaScript expression to compute the OAuth client secret for Unity Catalog authentication. Can be a constant."""
355
+ parquet_data_page_version: Annotated[
356
+ Annotated[
357
+ Optional[OutputDatabricksDataPageVersion],
358
+ PlainValidator(validate_open_enum(False)),
359
+ ],
360
+ pydantic.Field(alias="parquetDataPageVersion"),
361
+ ] = OutputDatabricksDataPageVersion.DATA_PAGE_V2
362
+ r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
363
+
364
+ parquet_row_group_length: Annotated[
365
+ Optional[float], pydantic.Field(alias="parquetRowGroupLength")
366
+ ] = 10000
367
+ r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
368
+
369
+ parquet_page_size: Annotated[
370
+ Optional[str], pydantic.Field(alias="parquetPageSize")
371
+ ] = "1MB"
372
+ r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
373
+
374
+ should_log_invalid_rows: Annotated[
375
+ Optional[bool], pydantic.Field(alias="shouldLogInvalidRows")
376
+ ] = None
377
+ r"""Log up to 3 rows that @{product} skips due to data mismatch"""
287
378
 
288
- client_text_secret: Annotated[
289
- Optional[str], pydantic.Field(alias="clientTextSecret")
379
+ key_value_metadata: Annotated[
380
+ Optional[List[OutputDatabricksKeyValueMetadatum]],
381
+ pydantic.Field(alias="keyValueMetadata"),
290
382
  ] = None
291
- r"""Select or create a stored text secret"""
383
+ r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
384
+
385
+ enable_statistics: Annotated[
386
+ Optional[bool], pydantic.Field(alias="enableStatistics")
387
+ ] = True
388
+ r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
389
+
390
+ enable_write_page_index: Annotated[
391
+ Optional[bool], pydantic.Field(alias="enableWritePageIndex")
392
+ ] = True
393
+ r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
394
+
395
+ enable_page_checksum: Annotated[
396
+ Optional[bool], pydantic.Field(alias="enablePageChecksum")
397
+ ] = False
398
+ r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
399
+
400
+ empty_dir_cleanup_sec: Annotated[
401
+ Optional[float], pydantic.Field(alias="emptyDirCleanupSec")
402
+ ] = 300
403
+ r"""How frequently, in seconds, to clean up empty directories"""
404
+
405
+ deadletter_path: Annotated[
406
+ Optional[str], pydantic.Field(alias="deadletterPath")
407
+ ] = "$CRIBL_HOME/state/outputs/dead-letter"
408
+ r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
409
+
410
+ max_retry_num: Annotated[Optional[float], pydantic.Field(alias="maxRetryNum")] = 20
411
+ r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
@@ -26,7 +26,7 @@ class PipelineGroups(BaseModel):
26
26
  r"""Whether this group is disabled"""
27
27
 
28
28
 
29
- class ConfTypedDict(TypedDict):
29
+ class PipelineConfTypedDict(TypedDict):
30
30
  async_func_timeout: NotRequired[int]
31
31
  r"""Time (in ms) to wait for an async function to complete processing of a data item"""
32
32
  output: NotRequired[str]
@@ -39,7 +39,7 @@ class ConfTypedDict(TypedDict):
39
39
  groups: NotRequired[Dict[str, PipelineGroupsTypedDict]]
40
40
 
41
41
 
42
- class Conf(BaseModel):
42
+ class PipelineConf(BaseModel):
43
43
  async_func_timeout: Annotated[
44
44
  Optional[int], pydantic.Field(alias="asyncFuncTimeout")
45
45
  ] = None
@@ -61,10 +61,10 @@ class Conf(BaseModel):
61
61
 
62
62
  class PipelineTypedDict(TypedDict):
63
63
  id: str
64
- conf: ConfTypedDict
64
+ conf: PipelineConfTypedDict
65
65
 
66
66
 
67
67
  class Pipeline(BaseModel):
68
68
  id: str
69
69
 
70
- conf: Conf
70
+ conf: PipelineConf
@@ -0,0 +1,13 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane.types import BaseModel
5
+ from typing_extensions import TypedDict
6
+
7
+
8
+ class RulesetIDTypedDict(TypedDict):
9
+ id: str
10
+
11
+
12
+ class RulesetID(BaseModel):
13
+ id: str
@@ -0,0 +1,63 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .mappingruleset import MappingRuleset, MappingRulesetTypedDict
5
+ from .productscore import ProductsCore
6
+ from cribl_control_plane.types import BaseModel
7
+ from cribl_control_plane.utils import (
8
+ FieldMetadata,
9
+ PathParamMetadata,
10
+ RequestMetadata,
11
+ validate_open_enum,
12
+ )
13
+ import pydantic
14
+ from pydantic.functional_validators import PlainValidator
15
+ from typing import List, Optional
16
+ from typing_extensions import Annotated, NotRequired, TypedDict
17
+
18
+
19
+ class UpdateAdminProductsMappingsByProductAndIDRequestTypedDict(TypedDict):
20
+ product: ProductsCore
21
+ r"""Name of the Cribl product to update the Mapping Ruleset for"""
22
+ id_param: str
23
+ r"""The <code>id</code> of the Mapping Ruleset to update."""
24
+ mapping_ruleset: MappingRulesetTypedDict
25
+ r"""MappingRuleset object"""
26
+
27
+
28
+ class UpdateAdminProductsMappingsByProductAndIDRequest(BaseModel):
29
+ product: Annotated[
30
+ Annotated[ProductsCore, PlainValidator(validate_open_enum(False))],
31
+ FieldMetadata(path=PathParamMetadata(style="simple", explode=False)),
32
+ ]
33
+ r"""Name of the Cribl product to update the Mapping Ruleset for"""
34
+
35
+ id_param: Annotated[
36
+ str,
37
+ pydantic.Field(alias="id"),
38
+ FieldMetadata(path=PathParamMetadata(style="simple", explode=False)),
39
+ ]
40
+ r"""The <code>id</code> of the Mapping Ruleset to update."""
41
+
42
+ mapping_ruleset: Annotated[
43
+ MappingRuleset,
44
+ FieldMetadata(request=RequestMetadata(media_type="application/json")),
45
+ ]
46
+ r"""MappingRuleset object"""
47
+
48
+
49
+ class UpdateAdminProductsMappingsByProductAndIDResponseTypedDict(TypedDict):
50
+ r"""A list containing the updated Mapping Ruleset objects"""
51
+
52
+ count: NotRequired[int]
53
+ r"""number of items present in the items array"""
54
+ items: NotRequired[List[MappingRulesetTypedDict]]
55
+
56
+
57
+ class UpdateAdminProductsMappingsByProductAndIDResponse(BaseModel):
58
+ r"""A list containing the updated Mapping Ruleset objects"""
59
+
60
+ count: Optional[int] = None
61
+ r"""number of items present in the items array"""
62
+
63
+ items: Optional[List[MappingRuleset]] = None
@@ -176,7 +176,7 @@ class Pipelines(BaseSDK):
176
176
  self,
177
177
  *,
178
178
  id: str,
179
- conf: Union[models.Conf, models.ConfTypedDict],
179
+ conf: Union[models.PipelineConf, models.PipelineConfTypedDict],
180
180
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
181
181
  server_url: Optional[str] = None,
182
182
  timeout_ms: Optional[int] = None,
@@ -205,7 +205,7 @@ class Pipelines(BaseSDK):
205
205
 
206
206
  request = models.Pipeline(
207
207
  id=id,
208
- conf=utils.get_pydantic_model(conf, models.Conf),
208
+ conf=utils.get_pydantic_model(conf, models.PipelineConf),
209
209
  )
210
210
 
211
211
  req = self._build_request(
@@ -269,7 +269,7 @@ class Pipelines(BaseSDK):
269
269
  self,
270
270
  *,
271
271
  id: str,
272
- conf: Union[models.Conf, models.ConfTypedDict],
272
+ conf: Union[models.PipelineConf, models.PipelineConfTypedDict],
273
273
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
274
274
  server_url: Optional[str] = None,
275
275
  timeout_ms: Optional[int] = None,
@@ -298,7 +298,7 @@ class Pipelines(BaseSDK):
298
298
 
299
299
  request = models.Pipeline(
300
300
  id=id,
301
- conf=utils.get_pydantic_model(conf, models.Conf),
301
+ conf=utils.get_pydantic_model(conf, models.PipelineConf),
302
302
  )
303
303
 
304
304
  req = self._build_request_async(
@@ -537,7 +537,7 @@ class Pipelines(BaseSDK):
537
537
  *,
538
538
  id_param: str,
539
539
  id: str,
540
- conf: Union[models.Conf, models.ConfTypedDict],
540
+ conf: Union[models.PipelineConf, models.PipelineConfTypedDict],
541
541
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
542
542
  server_url: Optional[str] = None,
543
543
  timeout_ms: Optional[int] = None,
@@ -569,7 +569,7 @@ class Pipelines(BaseSDK):
569
569
  id_param=id_param,
570
570
  pipeline=models.Pipeline(
571
571
  id=id,
572
- conf=utils.get_pydantic_model(conf, models.Conf),
572
+ conf=utils.get_pydantic_model(conf, models.PipelineConf),
573
573
  ),
574
574
  )
575
575
 
@@ -635,7 +635,7 @@ class Pipelines(BaseSDK):
635
635
  *,
636
636
  id_param: str,
637
637
  id: str,
638
- conf: Union[models.Conf, models.ConfTypedDict],
638
+ conf: Union[models.PipelineConf, models.PipelineConfTypedDict],
639
639
  retries: OptionalNullable[utils.RetryConfig] = UNSET,
640
640
  server_url: Optional[str] = None,
641
641
  timeout_ms: Optional[int] = None,
@@ -667,7 +667,7 @@ class Pipelines(BaseSDK):
667
667
  id_param=id_param,
668
668
  pipeline=models.Pipeline(
669
669
  id=id,
670
- conf=utils.get_pydantic_model(conf, models.Conf),
670
+ conf=utils.get_pydantic_model(conf, models.PipelineConf),
671
671
  ),
672
672
  )
673
673