cribl-control-plane 0.3.0b3__py3-none-any.whl → 0.3.0b12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (158) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/groups_sdk.py +2 -2
  3. cribl_control_plane/lakedatasets.py +28 -0
  4. cribl_control_plane/models/__init__.py +124 -5
  5. cribl_control_plane/models/cacheconnection.py +20 -0
  6. cribl_control_plane/models/configgroup.py +20 -1
  7. cribl_control_plane/models/configgroupcloud.py +11 -1
  8. cribl_control_plane/models/createconfiggroupbyproductop.py +13 -2
  9. cribl_control_plane/models/cribllakedataset.py +15 -1
  10. cribl_control_plane/models/cribllakedatasetupdate.py +15 -1
  11. cribl_control_plane/models/datasetmetadata.py +11 -1
  12. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +11 -0
  13. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +20 -0
  14. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +20 -0
  15. cribl_control_plane/models/getconfiggroupbyproductandidop.py +11 -0
  16. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +11 -0
  17. cribl_control_plane/models/getsummaryop.py +11 -0
  18. cribl_control_plane/models/groupcreaterequest.py +20 -1
  19. cribl_control_plane/models/hbcriblinfo.py +11 -1
  20. cribl_control_plane/models/healthserverstatus.py +20 -1
  21. cribl_control_plane/models/input.py +15 -15
  22. cribl_control_plane/models/inputappscope.py +76 -17
  23. cribl_control_plane/models/inputazureblob.py +29 -1
  24. cribl_control_plane/models/inputcollection.py +20 -1
  25. cribl_control_plane/models/inputconfluentcloud.py +188 -1
  26. cribl_control_plane/models/inputcribl.py +20 -1
  27. cribl_control_plane/models/inputcriblhttp.py +58 -17
  28. cribl_control_plane/models/inputcribllakehttp.py +58 -17
  29. cribl_control_plane/models/inputcriblmetrics.py +20 -1
  30. cribl_control_plane/models/inputcribltcp.py +58 -17
  31. cribl_control_plane/models/inputcrowdstrike.py +47 -1
  32. cribl_control_plane/models/inputdatadogagent.py +58 -17
  33. cribl_control_plane/models/inputdatagen.py +20 -1
  34. cribl_control_plane/models/inputedgeprometheus.py +138 -37
  35. cribl_control_plane/models/inputelastic.py +108 -27
  36. cribl_control_plane/models/inputeventhub.py +176 -1
  37. cribl_control_plane/models/inputexec.py +29 -1
  38. cribl_control_plane/models/inputfile.py +40 -7
  39. cribl_control_plane/models/inputfirehose.py +58 -17
  40. cribl_control_plane/models/inputgooglepubsub.py +29 -1
  41. cribl_control_plane/models/inputgrafana.py +149 -32
  42. cribl_control_plane/models/inputhttp.py +58 -17
  43. cribl_control_plane/models/inputhttpraw.py +58 -17
  44. cribl_control_plane/models/inputjournalfiles.py +20 -1
  45. cribl_control_plane/models/inputkafka.py +182 -1
  46. cribl_control_plane/models/inputkinesis.py +65 -1
  47. cribl_control_plane/models/inputkubeevents.py +20 -1
  48. cribl_control_plane/models/inputkubelogs.py +29 -1
  49. cribl_control_plane/models/inputkubemetrics.py +29 -1
  50. cribl_control_plane/models/inputloki.py +67 -17
  51. cribl_control_plane/models/inputmetrics.py +58 -17
  52. cribl_control_plane/models/inputmodeldriventelemetry.py +58 -17
  53. cribl_control_plane/models/inputmsk.py +74 -1
  54. cribl_control_plane/models/inputnetflow.py +20 -1
  55. cribl_control_plane/models/inputoffice365mgmt.py +56 -1
  56. cribl_control_plane/models/inputoffice365msgtrace.py +56 -1
  57. cribl_control_plane/models/inputoffice365service.py +56 -1
  58. cribl_control_plane/models/inputopentelemetry.py +84 -16
  59. cribl_control_plane/models/inputprometheus.py +131 -37
  60. cribl_control_plane/models/inputprometheusrw.py +67 -17
  61. cribl_control_plane/models/inputrawudp.py +20 -1
  62. cribl_control_plane/models/inputs3.py +38 -1
  63. cribl_control_plane/models/inputs3inventory.py +47 -1
  64. cribl_control_plane/models/inputsecuritylake.py +47 -1
  65. cribl_control_plane/models/inputsnmp.py +29 -1
  66. cribl_control_plane/models/inputsplunk.py +76 -17
  67. cribl_control_plane/models/inputsplunkhec.py +66 -16
  68. cribl_control_plane/models/inputsplunksearch.py +56 -1
  69. cribl_control_plane/models/inputsqs.py +47 -1
  70. cribl_control_plane/models/inputsyslog.py +113 -32
  71. cribl_control_plane/models/inputsystemmetrics.py +110 -9
  72. cribl_control_plane/models/inputsystemstate.py +29 -1
  73. cribl_control_plane/models/inputtcp.py +77 -17
  74. cribl_control_plane/models/inputtcpjson.py +67 -17
  75. cribl_control_plane/models/inputwef.py +65 -1
  76. cribl_control_plane/models/inputwindowsmetrics.py +101 -9
  77. cribl_control_plane/models/inputwineventlogs.py +52 -1
  78. cribl_control_plane/models/inputwiz.py +38 -1
  79. cribl_control_plane/models/inputwizwebhook.py +58 -17
  80. cribl_control_plane/models/inputzscalerhec.py +66 -16
  81. cribl_control_plane/models/jobinfo.py +10 -4
  82. cribl_control_plane/models/jobstatus.py +34 -3
  83. cribl_control_plane/models/lakedatasetmetrics.py +17 -0
  84. cribl_control_plane/models/listconfiggroupbyproductop.py +11 -0
  85. cribl_control_plane/models/masterworkerentry.py +11 -1
  86. cribl_control_plane/models/nodeupgradestatus.py +38 -0
  87. cribl_control_plane/models/output.py +21 -21
  88. cribl_control_plane/models/outputazureblob.py +90 -1
  89. cribl_control_plane/models/outputazuredataexplorer.py +430 -93
  90. cribl_control_plane/models/outputazureeventhub.py +267 -22
  91. cribl_control_plane/models/outputazurelogs.py +105 -22
  92. cribl_control_plane/models/outputchronicle.py +105 -22
  93. cribl_control_plane/models/outputclickhouse.py +141 -22
  94. cribl_control_plane/models/outputcloudwatch.py +96 -22
  95. cribl_control_plane/models/outputconfluentcloud.py +292 -23
  96. cribl_control_plane/models/outputcriblhttp.py +123 -22
  97. cribl_control_plane/models/outputcribllake.py +76 -1
  98. cribl_control_plane/models/outputcribltcp.py +123 -22
  99. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +117 -23
  100. cribl_control_plane/models/outputdatabricks.py +76 -5
  101. cribl_control_plane/models/outputdatadog.py +132 -22
  102. cribl_control_plane/models/outputdataset.py +123 -22
  103. cribl_control_plane/models/outputdiskspool.py +11 -1
  104. cribl_control_plane/models/outputdls3.py +117 -1
  105. cribl_control_plane/models/outputdynatracehttp.py +141 -22
  106. cribl_control_plane/models/outputdynatraceotlp.py +141 -22
  107. cribl_control_plane/models/outputelastic.py +148 -22
  108. cribl_control_plane/models/outputelasticcloud.py +130 -22
  109. cribl_control_plane/models/outputexabeam.py +47 -1
  110. cribl_control_plane/models/outputfilesystem.py +72 -1
  111. cribl_control_plane/models/outputgooglechronicle.py +148 -23
  112. cribl_control_plane/models/outputgooglecloudlogging.py +115 -23
  113. cribl_control_plane/models/outputgooglecloudstorage.py +108 -1
  114. cribl_control_plane/models/outputgooglepubsub.py +96 -22
  115. cribl_control_plane/models/outputgrafanacloud.py +244 -43
  116. cribl_control_plane/models/outputgraphite.py +96 -22
  117. cribl_control_plane/models/outputhoneycomb.py +105 -22
  118. cribl_control_plane/models/outputhumiohec.py +114 -22
  119. cribl_control_plane/models/outputinfluxdb.py +114 -22
  120. cribl_control_plane/models/outputkafka.py +283 -20
  121. cribl_control_plane/models/outputkinesis.py +121 -22
  122. cribl_control_plane/models/outputloki.py +112 -20
  123. cribl_control_plane/models/outputminio.py +117 -1
  124. cribl_control_plane/models/outputmsk.py +175 -20
  125. cribl_control_plane/models/outputnewrelic.py +123 -22
  126. cribl_control_plane/models/outputnewrelicevents.py +115 -23
  127. cribl_control_plane/models/outputopentelemetry.py +159 -22
  128. cribl_control_plane/models/outputprometheus.py +105 -22
  129. cribl_control_plane/models/outputring.py +29 -1
  130. cribl_control_plane/models/outputs3.py +117 -1
  131. cribl_control_plane/models/outputsecuritylake.py +85 -1
  132. cribl_control_plane/models/outputsentinel.py +123 -22
  133. cribl_control_plane/models/outputsentineloneaisiem.py +124 -23
  134. cribl_control_plane/models/outputservicenow.py +150 -22
  135. cribl_control_plane/models/outputsignalfx.py +105 -22
  136. cribl_control_plane/models/outputsns.py +103 -20
  137. cribl_control_plane/models/outputsplunk.py +141 -22
  138. cribl_control_plane/models/outputsplunkhec.py +198 -22
  139. cribl_control_plane/models/outputsplunklb.py +170 -22
  140. cribl_control_plane/models/outputsqs.py +112 -20
  141. cribl_control_plane/models/outputstatsd.py +96 -22
  142. cribl_control_plane/models/outputstatsdext.py +96 -22
  143. cribl_control_plane/models/outputsumologic.py +105 -22
  144. cribl_control_plane/models/outputsyslog.py +238 -99
  145. cribl_control_plane/models/outputtcpjson.py +132 -22
  146. cribl_control_plane/models/outputwavefront.py +105 -22
  147. cribl_control_plane/models/outputwebhook.py +141 -22
  148. cribl_control_plane/models/outputxsiam.py +103 -20
  149. cribl_control_plane/models/resourcepolicy.py +11 -0
  150. cribl_control_plane/models/runnablejobcollection.py +68 -9
  151. cribl_control_plane/models/runnablejobexecutor.py +32 -9
  152. cribl_control_plane/models/runnablejobscheduledsearch.py +23 -9
  153. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +11 -0
  154. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +11 -0
  155. cribl_control_plane/sdk.py +2 -2
  156. {cribl_control_plane-0.3.0b3.dist-info → cribl_control_plane-0.3.0b12.dist-info}/METADATA +25 -7
  157. {cribl_control_plane-0.3.0b3.dist-info → cribl_control_plane-0.3.0b12.dist-info}/RECORD +158 -157
  158. {cribl_control_plane-0.3.0b3.dist-info → cribl_control_plane-0.3.0b12.dist-info}/WHEEL +0 -0
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import Any, List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -122,6 +123,17 @@ class OutputXsiamURL(BaseModel):
122
123
  r"""Assign a weight (>0) to each endpoint to indicate its traffic-handling capability"""
123
124
 
124
125
 
126
+ class OutputXsiamMode(str, Enum, metaclass=utils.OpenEnumMeta):
127
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
128
+
129
+ # Error
130
+ ERROR = "error"
131
+ # Backpressure
132
+ ALWAYS = "always"
133
+ # Always On
134
+ BACKPRESSURE = "backpressure"
135
+
136
+
125
137
  class OutputXsiamCompression(str, Enum, metaclass=utils.OpenEnumMeta):
126
138
  r"""Codec to use to compress the persisted data"""
127
139
 
@@ -140,17 +152,6 @@ class OutputXsiamQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
140
152
  DROP = "drop"
141
153
 
142
154
 
143
- class OutputXsiamMode(str, Enum, metaclass=utils.OpenEnumMeta):
144
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
145
-
146
- # Error
147
- ERROR = "error"
148
- # Backpressure
149
- BACKPRESSURE = "backpressure"
150
- # Always On
151
- ALWAYS = "always"
152
-
153
-
154
155
  class OutputXsiamPqControlsTypedDict(TypedDict):
155
156
  pass
156
157
 
@@ -223,6 +224,16 @@ class OutputXsiamTypedDict(TypedDict):
223
224
  r"""XSIAM authentication token"""
224
225
  text_secret: NotRequired[str]
225
226
  r"""Select or create a stored text secret"""
227
+ pq_strict_ordering: NotRequired[bool]
228
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
229
+ pq_rate_per_sec: NotRequired[float]
230
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
231
+ pq_mode: NotRequired[OutputXsiamMode]
232
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
233
+ pq_max_buffer_size: NotRequired[float]
234
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
235
+ pq_max_backpressure_sec: NotRequired[float]
236
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
226
237
  pq_max_file_size: NotRequired[str]
227
238
  r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
228
239
  pq_max_size: NotRequired[str]
@@ -233,8 +244,6 @@ class OutputXsiamTypedDict(TypedDict):
233
244
  r"""Codec to use to compress the persisted data"""
234
245
  pq_on_backpressure: NotRequired[OutputXsiamQueueFullBehavior]
235
246
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
236
- pq_mode: NotRequired[OutputXsiamMode]
237
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
238
247
  pq_controls: NotRequired[OutputXsiamPqControlsTypedDict]
239
248
 
240
249
 
@@ -387,6 +396,32 @@ class OutputXsiam(BaseModel):
387
396
  text_secret: Annotated[Optional[str], pydantic.Field(alias="textSecret")] = None
388
397
  r"""Select or create a stored text secret"""
389
398
 
399
+ pq_strict_ordering: Annotated[
400
+ Optional[bool], pydantic.Field(alias="pqStrictOrdering")
401
+ ] = True
402
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
403
+
404
+ pq_rate_per_sec: Annotated[
405
+ Optional[float], pydantic.Field(alias="pqRatePerSec")
406
+ ] = 0
407
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
408
+
409
+ pq_mode: Annotated[
410
+ Annotated[Optional[OutputXsiamMode], PlainValidator(validate_open_enum(False))],
411
+ pydantic.Field(alias="pqMode"),
412
+ ] = OutputXsiamMode.ERROR
413
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
414
+
415
+ pq_max_buffer_size: Annotated[
416
+ Optional[float], pydantic.Field(alias="pqMaxBufferSize")
417
+ ] = 42
418
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
419
+
420
+ pq_max_backpressure_sec: Annotated[
421
+ Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
422
+ ] = 30
423
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
424
+
390
425
  pq_max_file_size: Annotated[
391
426
  Optional[str], pydantic.Field(alias="pqMaxFileSize")
392
427
  ] = "1 MB"
@@ -417,12 +452,60 @@ class OutputXsiam(BaseModel):
417
452
  ] = OutputXsiamQueueFullBehavior.BLOCK
418
453
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
419
454
 
420
- pq_mode: Annotated[
421
- Annotated[Optional[OutputXsiamMode], PlainValidator(validate_open_enum(False))],
422
- pydantic.Field(alias="pqMode"),
423
- ] = OutputXsiamMode.ERROR
424
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
425
-
426
455
  pq_controls: Annotated[
427
456
  Optional[OutputXsiamPqControls], pydantic.Field(alias="pqControls")
428
457
  ] = None
458
+
459
+ @field_serializer("failed_request_logging_mode")
460
+ def serialize_failed_request_logging_mode(self, value):
461
+ if isinstance(value, str):
462
+ try:
463
+ return models.OutputXsiamFailedRequestLoggingMode(value)
464
+ except ValueError:
465
+ return value
466
+ return value
467
+
468
+ @field_serializer("auth_type")
469
+ def serialize_auth_type(self, value):
470
+ if isinstance(value, str):
471
+ try:
472
+ return models.OutputXsiamAuthenticationMethod(value)
473
+ except ValueError:
474
+ return value
475
+ return value
476
+
477
+ @field_serializer("on_backpressure")
478
+ def serialize_on_backpressure(self, value):
479
+ if isinstance(value, str):
480
+ try:
481
+ return models.OutputXsiamBackpressureBehavior(value)
482
+ except ValueError:
483
+ return value
484
+ return value
485
+
486
+ @field_serializer("pq_mode")
487
+ def serialize_pq_mode(self, value):
488
+ if isinstance(value, str):
489
+ try:
490
+ return models.OutputXsiamMode(value)
491
+ except ValueError:
492
+ return value
493
+ return value
494
+
495
+ @field_serializer("pq_compress")
496
+ def serialize_pq_compress(self, value):
497
+ if isinstance(value, str):
498
+ try:
499
+ return models.OutputXsiamCompression(value)
500
+ except ValueError:
501
+ return value
502
+ return value
503
+
504
+ @field_serializer("pq_on_backpressure")
505
+ def serialize_pq_on_backpressure(self, value):
506
+ if isinstance(value, str):
507
+ try:
508
+ return models.OutputXsiamQueueFullBehavior(value)
509
+ except ValueError:
510
+ return value
511
+ return value
@@ -2,8 +2,10 @@
2
2
 
3
3
  from __future__ import annotations
4
4
  from .rbacresource import RbacResource
5
+ from cribl_control_plane import models
5
6
  from cribl_control_plane.types import BaseModel
6
7
  from cribl_control_plane.utils import validate_open_enum
8
+ from pydantic import field_serializer
7
9
  from pydantic.functional_validators import PlainValidator
8
10
  from typing import Optional
9
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -24,3 +26,12 @@ class ResourcePolicy(BaseModel):
24
26
  type: Annotated[RbacResource, PlainValidator(validate_open_enum(False))]
25
27
 
26
28
  id: Optional[str] = None
29
+
30
+ @field_serializer("type")
31
+ def serialize_type(self, value):
32
+ if isinstance(value, str):
33
+ try:
34
+ return models.RbacResource(value)
35
+ except ValueError:
36
+ return value
37
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import Any, List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -175,13 +176,14 @@ class RunnableJobCollectionScheduleTypedDict(TypedDict):
175
176
 
176
177
  enabled: NotRequired[bool]
177
178
  r"""Enable to configure scheduling for this Collector"""
179
+ skippable: NotRequired[bool]
180
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
181
+ resume_missed: NotRequired[bool]
182
+ r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
178
183
  cron_schedule: NotRequired[str]
179
184
  r"""A cron schedule on which to run this job"""
180
185
  max_concurrent_runs: NotRequired[float]
181
186
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
182
- skippable: NotRequired[bool]
183
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
184
- resume_missed: NotRequired[Any]
185
187
  run: NotRequired[RunnableJobCollectionRunSettingsTypedDict]
186
188
 
187
189
 
@@ -191,6 +193,14 @@ class RunnableJobCollectionSchedule(BaseModel):
191
193
  enabled: Optional[bool] = None
192
194
  r"""Enable to configure scheduling for this Collector"""
193
195
 
196
+ skippable: Optional[bool] = True
197
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
198
+
199
+ resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
200
+ False
201
+ )
202
+ r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
203
+
194
204
  cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
195
205
  "*/5 * * * *"
196
206
  )
@@ -201,11 +211,6 @@ class RunnableJobCollectionSchedule(BaseModel):
201
211
  ] = 1
202
212
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
203
213
 
204
- skippable: Optional[bool] = True
205
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
206
-
207
- resume_missed: Annotated[Optional[Any], pydantic.Field(alias="resumeMissed")] = None
208
-
209
214
  run: Optional[RunnableJobCollectionRunSettings] = None
210
215
 
211
216
 
@@ -330,6 +335,15 @@ class RunnableJobCollectionInput(BaseModel):
330
335
  output: Optional[str] = None
331
336
  r"""Destination to send results to"""
332
337
 
338
+ @field_serializer("type")
339
+ def serialize_type(self, value):
340
+ if isinstance(value, str):
341
+ try:
342
+ return models.InputType(value)
343
+ except ValueError:
344
+ return value
345
+ return value
346
+
333
347
 
334
348
  class RunnableJobCollectionLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
335
349
  r"""Level at which to set task logging"""
@@ -392,6 +406,15 @@ class CaptureSettings(BaseModel):
392
406
  Optional[WhereToCapture], PlainValidator(validate_open_enum(True))
393
407
  ] = WhereToCapture.ZERO
394
408
 
409
+ @field_serializer("level")
410
+ def serialize_level(self, value):
411
+ if isinstance(value, str):
412
+ try:
413
+ return models.WhereToCapture(value)
414
+ except ValueError:
415
+ return value
416
+ return value
417
+
395
418
 
396
419
  class RunnableJobCollectionRunTypedDict(TypedDict):
397
420
  reschedule_dropped_tasks: NotRequired[bool]
@@ -533,6 +556,33 @@ class RunnableJobCollectionRun(BaseModel):
533
556
 
534
557
  capture: Optional[CaptureSettings] = None
535
558
 
559
+ @field_serializer("log_level")
560
+ def serialize_log_level(self, value):
561
+ if isinstance(value, str):
562
+ try:
563
+ return models.RunnableJobCollectionLogLevel(value)
564
+ except ValueError:
565
+ return value
566
+ return value
567
+
568
+ @field_serializer("mode")
569
+ def serialize_mode(self, value):
570
+ if isinstance(value, str):
571
+ try:
572
+ return models.RunnableJobCollectionMode(value)
573
+ except ValueError:
574
+ return value
575
+ return value
576
+
577
+ @field_serializer("time_range_type")
578
+ def serialize_time_range_type(self, value):
579
+ if isinstance(value, str):
580
+ try:
581
+ return models.TimeRange(value)
582
+ except ValueError:
583
+ return value
584
+ return value
585
+
536
586
 
537
587
  class RunnableJobCollectionTypedDict(TypedDict):
538
588
  collector: CollectorTypedDict
@@ -608,3 +658,12 @@ class RunnableJobCollection(BaseModel):
608
658
  r"""If enabled, tasks are created and run by the same Worker Node"""
609
659
 
610
660
  input: Optional[RunnableJobCollectionInput] = None
661
+
662
+ @field_serializer("type")
663
+ def serialize_type(self, value):
664
+ if isinstance(value, str):
665
+ try:
666
+ return models.RunnableJobCollectionJobType(value)
667
+ except ValueError:
668
+ return value
669
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import Any, List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -173,13 +174,14 @@ class RunnableJobExecutorScheduleTypedDict(TypedDict):
173
174
 
174
175
  enabled: NotRequired[bool]
175
176
  r"""Enable to configure scheduling for this Collector"""
177
+ skippable: NotRequired[bool]
178
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
179
+ resume_missed: NotRequired[bool]
180
+ r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
176
181
  cron_schedule: NotRequired[str]
177
182
  r"""A cron schedule on which to run this job"""
178
183
  max_concurrent_runs: NotRequired[float]
179
184
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
180
- skippable: NotRequired[bool]
181
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
182
- resume_missed: NotRequired[Any]
183
185
  run: NotRequired[RunnableJobExecutorRunSettingsTypedDict]
184
186
 
185
187
 
@@ -189,6 +191,14 @@ class RunnableJobExecutorSchedule(BaseModel):
189
191
  enabled: Optional[bool] = None
190
192
  r"""Enable to configure scheduling for this Collector"""
191
193
 
194
+ skippable: Optional[bool] = True
195
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
196
+
197
+ resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
198
+ False
199
+ )
200
+ r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
201
+
192
202
  cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
193
203
  "*/5 * * * *"
194
204
  )
@@ -199,11 +209,6 @@ class RunnableJobExecutorSchedule(BaseModel):
199
209
  ] = 1
200
210
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
201
211
 
202
- skippable: Optional[bool] = True
203
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
204
-
205
- resume_missed: Annotated[Optional[Any], pydantic.Field(alias="resumeMissed")] = None
206
-
207
212
  run: Optional[RunnableJobExecutorRunSettings] = None
208
213
 
209
214
 
@@ -279,6 +284,15 @@ class RunnableJobExecutorRun(BaseModel):
279
284
  job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
280
285
  r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
281
286
 
287
+ @field_serializer("log_level")
288
+ def serialize_log_level(self, value):
289
+ if isinstance(value, str):
290
+ try:
291
+ return models.RunnableJobExecutorLogLevel(value)
292
+ except ValueError:
293
+ return value
294
+ return value
295
+
282
296
 
283
297
  class RunnableJobExecutorTypedDict(TypedDict):
284
298
  executor: ExecutorTypedDict
@@ -343,3 +357,12 @@ class RunnableJobExecutor(BaseModel):
343
357
 
344
358
  streamtags: Optional[List[str]] = None
345
359
  r"""Tags for filtering and grouping in @{product}"""
360
+
361
+ @field_serializer("type")
362
+ def serialize_type(self, value):
363
+ if isinstance(value, str):
364
+ try:
365
+ return models.RunnableJobExecutorJobType(value)
366
+ except ValueError:
367
+ return value
368
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import Any, List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -174,13 +175,14 @@ class RunnableJobScheduledSearchScheduleTypedDict(TypedDict):
174
175
 
175
176
  enabled: NotRequired[bool]
176
177
  r"""Enable to configure scheduling for this Collector"""
178
+ skippable: NotRequired[bool]
179
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
180
+ resume_missed: NotRequired[bool]
181
+ r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
177
182
  cron_schedule: NotRequired[str]
178
183
  r"""A cron schedule on which to run this job"""
179
184
  max_concurrent_runs: NotRequired[float]
180
185
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
181
- skippable: NotRequired[bool]
182
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
183
- resume_missed: NotRequired[Any]
184
186
  run: NotRequired[RunnableJobScheduledSearchRunSettingsTypedDict]
185
187
 
186
188
 
@@ -190,6 +192,14 @@ class RunnableJobScheduledSearchSchedule(BaseModel):
190
192
  enabled: Optional[bool] = None
191
193
  r"""Enable to configure scheduling for this Collector"""
192
194
 
195
+ skippable: Optional[bool] = True
196
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
197
+
198
+ resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
199
+ False
200
+ )
201
+ r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
202
+
193
203
  cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
194
204
  "*/5 * * * *"
195
205
  )
@@ -200,11 +210,6 @@ class RunnableJobScheduledSearchSchedule(BaseModel):
200
210
  ] = 1
201
211
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
202
212
 
203
- skippable: Optional[bool] = True
204
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
205
-
206
- resume_missed: Annotated[Optional[Any], pydantic.Field(alias="resumeMissed")] = None
207
-
208
213
  run: Optional[RunnableJobScheduledSearchRunSettings] = None
209
214
 
210
215
 
@@ -270,3 +275,12 @@ class RunnableJobScheduledSearch(BaseModel):
270
275
 
271
276
  streamtags: Optional[List[str]] = None
272
277
  r"""Tags for filtering and grouping in @{product}"""
278
+
279
+ @field_serializer("type")
280
+ def serialize_type(self, value):
281
+ if isinstance(value, str):
282
+ try:
283
+ return models.RunnableJobScheduledSearchJobType(value)
284
+ except ValueError:
285
+ return value
286
+ return value
@@ -3,6 +3,7 @@
3
3
  from __future__ import annotations
4
4
  from .configgroup import ConfigGroup, ConfigGroupTypedDict
5
5
  from .productscore import ProductsCore
6
+ from cribl_control_plane import models
6
7
  from cribl_control_plane.types import BaseModel
7
8
  from cribl_control_plane.utils import (
8
9
  FieldMetadata,
@@ -11,6 +12,7 @@ from cribl_control_plane.utils import (
11
12
  validate_open_enum,
12
13
  )
13
14
  import pydantic
15
+ from pydantic import field_serializer
14
16
  from pydantic.functional_validators import PlainValidator
15
17
  from typing import List, Optional
16
18
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -45,6 +47,15 @@ class UpdateConfigGroupByProductAndIDRequest(BaseModel):
45
47
  ]
46
48
  r"""ConfigGroup object"""
47
49
 
50
+ @field_serializer("product")
51
+ def serialize_product(self, value):
52
+ if isinstance(value, str):
53
+ try:
54
+ return models.ProductsCore(value)
55
+ except ValueError:
56
+ return value
57
+ return value
58
+
48
59
 
49
60
  class UpdateConfigGroupByProductAndIDResponseTypedDict(TypedDict):
50
61
  r"""a list of ConfigGroup objects"""
@@ -4,6 +4,7 @@ from __future__ import annotations
4
4
  from .configgroup import ConfigGroup, ConfigGroupTypedDict
5
5
  from .deployrequest import DeployRequest, DeployRequestTypedDict
6
6
  from .productscore import ProductsCore
7
+ from cribl_control_plane import models
7
8
  from cribl_control_plane.types import BaseModel
8
9
  from cribl_control_plane.utils import (
9
10
  FieldMetadata,
@@ -11,6 +12,7 @@ from cribl_control_plane.utils import (
11
12
  RequestMetadata,
12
13
  validate_open_enum,
13
14
  )
15
+ from pydantic import field_serializer
14
16
  from pydantic.functional_validators import PlainValidator
15
17
  from typing import List, Optional
16
18
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -43,6 +45,15 @@ class UpdateConfigGroupDeployByProductAndIDRequest(BaseModel):
43
45
  ]
44
46
  r"""DeployRequest object"""
45
47
 
48
+ @field_serializer("product")
49
+ def serialize_product(self, value):
50
+ if isinstance(value, str):
51
+ try:
52
+ return models.ProductsCore(value)
53
+ except ValueError:
54
+ return value
55
+ return value
56
+
46
57
 
47
58
  class UpdateConfigGroupDeployByProductAndIDResponseTypedDict(TypedDict):
48
59
  r"""a list of ConfigGroup objects"""
@@ -41,12 +41,12 @@ class CriblControlPlane(BaseSDK):
41
41
  routes: "RoutesSDK"
42
42
  r"""Actions related to Routes"""
43
43
  auth: "AuthSDK"
44
- nodes: "Nodes"
45
44
  health: "Health"
46
45
  r"""Actions related to REST server health"""
47
46
  packs: "Packs"
48
47
  r"""Actions related to Packs"""
49
48
  versions: "Versions"
49
+ nodes: "Nodes"
50
50
  groups: "GroupsSDK"
51
51
  r"""Actions related to Groups"""
52
52
  _sub_sdk_map = {
@@ -56,10 +56,10 @@ class CriblControlPlane(BaseSDK):
56
56
  "pipelines": ("cribl_control_plane.pipelines", "Pipelines"),
57
57
  "routes": ("cribl_control_plane.routes_sdk", "RoutesSDK"),
58
58
  "auth": ("cribl_control_plane.auth_sdk", "AuthSDK"),
59
- "nodes": ("cribl_control_plane.nodes", "Nodes"),
60
59
  "health": ("cribl_control_plane.health", "Health"),
61
60
  "packs": ("cribl_control_plane.packs", "Packs"),
62
61
  "versions": ("cribl_control_plane.versions", "Versions"),
62
+ "nodes": ("cribl_control_plane.nodes", "Nodes"),
63
63
  "groups": ("cribl_control_plane.groups_sdk", "GroupsSDK"),
64
64
  }
65
65
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cribl-control-plane
3
- Version: 0.3.0b3
3
+ Version: 0.3.0b12
4
4
  Summary: Python Client SDK Generated by Speakeasy.
5
5
  Author: Speakeasy
6
6
  Requires-Python: >=3.9.2
@@ -149,7 +149,10 @@ with CriblControlPlane(
149
149
  "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
150
150
  "migration_query_id": "<id>",
151
151
  "retention_in_days": 1466.58,
152
- }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
152
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, metrics={
153
+ "current_size_bytes": 6170.04,
154
+ "metrics_date": "<value>",
155
+ }, retention_period_in_days=456.37, search_config={
153
156
  "datatypes": [
154
157
  "<value 1>",
155
158
  ],
@@ -207,7 +210,10 @@ async def main():
207
210
  "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
208
211
  "migration_query_id": "<id>",
209
212
  "retention_in_days": 1466.58,
210
- }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
213
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, metrics={
214
+ "current_size_bytes": 6170.04,
215
+ "metrics_date": "<value>",
216
+ }, retention_period_in_days=456.37, search_config={
211
217
  "datatypes": [
212
218
  "<value 1>",
213
219
  ],
@@ -274,7 +280,10 @@ with CriblControlPlane(
274
280
  "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
275
281
  "migration_query_id": "<id>",
276
282
  "retention_in_days": 1466.58,
277
- }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
283
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, metrics={
284
+ "current_size_bytes": 6170.04,
285
+ "metrics_date": "<value>",
286
+ }, retention_period_in_days=456.37, search_config={
278
287
  "datatypes": [
279
288
  "<value 1>",
280
289
  ],
@@ -502,7 +511,10 @@ with CriblControlPlane(
502
511
  "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
503
512
  "migration_query_id": "<id>",
504
513
  "retention_in_days": 1466.58,
505
- }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
514
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, metrics={
515
+ "current_size_bytes": 6170.04,
516
+ "metrics_date": "<value>",
517
+ }, retention_period_in_days=456.37, search_config={
506
518
  "datatypes": [
507
519
  "<value 1>",
508
520
  ],
@@ -558,7 +570,10 @@ with CriblControlPlane(
558
570
  "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
559
571
  "migration_query_id": "<id>",
560
572
  "retention_in_days": 1466.58,
561
- }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
573
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, metrics={
574
+ "current_size_bytes": 6170.04,
575
+ "metrics_date": "<value>",
576
+ }, retention_period_in_days=456.37, search_config={
562
577
  "datatypes": [
563
578
  "<value 1>",
564
579
  ],
@@ -628,7 +643,10 @@ with CriblControlPlane(
628
643
  "lakehouse_connection_type": models.LakehouseConnectionType.CACHE,
629
644
  "migration_query_id": "<id>",
630
645
  "retention_in_days": 1466.58,
631
- }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, retention_period_in_days=456.37, search_config={
646
+ }, deletion_started_at=8310.58, description="pleased toothbrush long brush smooth swiftly rightfully phooey chapel", format_=models.CriblLakeDatasetFormat.DDSS, http_da_used=True, metrics={
647
+ "current_size_bytes": 6170.04,
648
+ "metrics_date": "<value>",
649
+ }, retention_period_in_days=456.37, search_config={
632
650
  "datatypes": [
633
651
  "<value 1>",
634
652
  ],