cribl-control-plane 0.3.0b3__py3-none-any.whl → 0.3.0b12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (158) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/groups_sdk.py +2 -2
  3. cribl_control_plane/lakedatasets.py +28 -0
  4. cribl_control_plane/models/__init__.py +124 -5
  5. cribl_control_plane/models/cacheconnection.py +20 -0
  6. cribl_control_plane/models/configgroup.py +20 -1
  7. cribl_control_plane/models/configgroupcloud.py +11 -1
  8. cribl_control_plane/models/createconfiggroupbyproductop.py +13 -2
  9. cribl_control_plane/models/cribllakedataset.py +15 -1
  10. cribl_control_plane/models/cribllakedatasetupdate.py +15 -1
  11. cribl_control_plane/models/datasetmetadata.py +11 -1
  12. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +11 -0
  13. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +20 -0
  14. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +20 -0
  15. cribl_control_plane/models/getconfiggroupbyproductandidop.py +11 -0
  16. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +11 -0
  17. cribl_control_plane/models/getsummaryop.py +11 -0
  18. cribl_control_plane/models/groupcreaterequest.py +20 -1
  19. cribl_control_plane/models/hbcriblinfo.py +11 -1
  20. cribl_control_plane/models/healthserverstatus.py +20 -1
  21. cribl_control_plane/models/input.py +15 -15
  22. cribl_control_plane/models/inputappscope.py +76 -17
  23. cribl_control_plane/models/inputazureblob.py +29 -1
  24. cribl_control_plane/models/inputcollection.py +20 -1
  25. cribl_control_plane/models/inputconfluentcloud.py +188 -1
  26. cribl_control_plane/models/inputcribl.py +20 -1
  27. cribl_control_plane/models/inputcriblhttp.py +58 -17
  28. cribl_control_plane/models/inputcribllakehttp.py +58 -17
  29. cribl_control_plane/models/inputcriblmetrics.py +20 -1
  30. cribl_control_plane/models/inputcribltcp.py +58 -17
  31. cribl_control_plane/models/inputcrowdstrike.py +47 -1
  32. cribl_control_plane/models/inputdatadogagent.py +58 -17
  33. cribl_control_plane/models/inputdatagen.py +20 -1
  34. cribl_control_plane/models/inputedgeprometheus.py +138 -37
  35. cribl_control_plane/models/inputelastic.py +108 -27
  36. cribl_control_plane/models/inputeventhub.py +176 -1
  37. cribl_control_plane/models/inputexec.py +29 -1
  38. cribl_control_plane/models/inputfile.py +40 -7
  39. cribl_control_plane/models/inputfirehose.py +58 -17
  40. cribl_control_plane/models/inputgooglepubsub.py +29 -1
  41. cribl_control_plane/models/inputgrafana.py +149 -32
  42. cribl_control_plane/models/inputhttp.py +58 -17
  43. cribl_control_plane/models/inputhttpraw.py +58 -17
  44. cribl_control_plane/models/inputjournalfiles.py +20 -1
  45. cribl_control_plane/models/inputkafka.py +182 -1
  46. cribl_control_plane/models/inputkinesis.py +65 -1
  47. cribl_control_plane/models/inputkubeevents.py +20 -1
  48. cribl_control_plane/models/inputkubelogs.py +29 -1
  49. cribl_control_plane/models/inputkubemetrics.py +29 -1
  50. cribl_control_plane/models/inputloki.py +67 -17
  51. cribl_control_plane/models/inputmetrics.py +58 -17
  52. cribl_control_plane/models/inputmodeldriventelemetry.py +58 -17
  53. cribl_control_plane/models/inputmsk.py +74 -1
  54. cribl_control_plane/models/inputnetflow.py +20 -1
  55. cribl_control_plane/models/inputoffice365mgmt.py +56 -1
  56. cribl_control_plane/models/inputoffice365msgtrace.py +56 -1
  57. cribl_control_plane/models/inputoffice365service.py +56 -1
  58. cribl_control_plane/models/inputopentelemetry.py +84 -16
  59. cribl_control_plane/models/inputprometheus.py +131 -37
  60. cribl_control_plane/models/inputprometheusrw.py +67 -17
  61. cribl_control_plane/models/inputrawudp.py +20 -1
  62. cribl_control_plane/models/inputs3.py +38 -1
  63. cribl_control_plane/models/inputs3inventory.py +47 -1
  64. cribl_control_plane/models/inputsecuritylake.py +47 -1
  65. cribl_control_plane/models/inputsnmp.py +29 -1
  66. cribl_control_plane/models/inputsplunk.py +76 -17
  67. cribl_control_plane/models/inputsplunkhec.py +66 -16
  68. cribl_control_plane/models/inputsplunksearch.py +56 -1
  69. cribl_control_plane/models/inputsqs.py +47 -1
  70. cribl_control_plane/models/inputsyslog.py +113 -32
  71. cribl_control_plane/models/inputsystemmetrics.py +110 -9
  72. cribl_control_plane/models/inputsystemstate.py +29 -1
  73. cribl_control_plane/models/inputtcp.py +77 -17
  74. cribl_control_plane/models/inputtcpjson.py +67 -17
  75. cribl_control_plane/models/inputwef.py +65 -1
  76. cribl_control_plane/models/inputwindowsmetrics.py +101 -9
  77. cribl_control_plane/models/inputwineventlogs.py +52 -1
  78. cribl_control_plane/models/inputwiz.py +38 -1
  79. cribl_control_plane/models/inputwizwebhook.py +58 -17
  80. cribl_control_plane/models/inputzscalerhec.py +66 -16
  81. cribl_control_plane/models/jobinfo.py +10 -4
  82. cribl_control_plane/models/jobstatus.py +34 -3
  83. cribl_control_plane/models/lakedatasetmetrics.py +17 -0
  84. cribl_control_plane/models/listconfiggroupbyproductop.py +11 -0
  85. cribl_control_plane/models/masterworkerentry.py +11 -1
  86. cribl_control_plane/models/nodeupgradestatus.py +38 -0
  87. cribl_control_plane/models/output.py +21 -21
  88. cribl_control_plane/models/outputazureblob.py +90 -1
  89. cribl_control_plane/models/outputazuredataexplorer.py +430 -93
  90. cribl_control_plane/models/outputazureeventhub.py +267 -22
  91. cribl_control_plane/models/outputazurelogs.py +105 -22
  92. cribl_control_plane/models/outputchronicle.py +105 -22
  93. cribl_control_plane/models/outputclickhouse.py +141 -22
  94. cribl_control_plane/models/outputcloudwatch.py +96 -22
  95. cribl_control_plane/models/outputconfluentcloud.py +292 -23
  96. cribl_control_plane/models/outputcriblhttp.py +123 -22
  97. cribl_control_plane/models/outputcribllake.py +76 -1
  98. cribl_control_plane/models/outputcribltcp.py +123 -22
  99. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +117 -23
  100. cribl_control_plane/models/outputdatabricks.py +76 -5
  101. cribl_control_plane/models/outputdatadog.py +132 -22
  102. cribl_control_plane/models/outputdataset.py +123 -22
  103. cribl_control_plane/models/outputdiskspool.py +11 -1
  104. cribl_control_plane/models/outputdls3.py +117 -1
  105. cribl_control_plane/models/outputdynatracehttp.py +141 -22
  106. cribl_control_plane/models/outputdynatraceotlp.py +141 -22
  107. cribl_control_plane/models/outputelastic.py +148 -22
  108. cribl_control_plane/models/outputelasticcloud.py +130 -22
  109. cribl_control_plane/models/outputexabeam.py +47 -1
  110. cribl_control_plane/models/outputfilesystem.py +72 -1
  111. cribl_control_plane/models/outputgooglechronicle.py +148 -23
  112. cribl_control_plane/models/outputgooglecloudlogging.py +115 -23
  113. cribl_control_plane/models/outputgooglecloudstorage.py +108 -1
  114. cribl_control_plane/models/outputgooglepubsub.py +96 -22
  115. cribl_control_plane/models/outputgrafanacloud.py +244 -43
  116. cribl_control_plane/models/outputgraphite.py +96 -22
  117. cribl_control_plane/models/outputhoneycomb.py +105 -22
  118. cribl_control_plane/models/outputhumiohec.py +114 -22
  119. cribl_control_plane/models/outputinfluxdb.py +114 -22
  120. cribl_control_plane/models/outputkafka.py +283 -20
  121. cribl_control_plane/models/outputkinesis.py +121 -22
  122. cribl_control_plane/models/outputloki.py +112 -20
  123. cribl_control_plane/models/outputminio.py +117 -1
  124. cribl_control_plane/models/outputmsk.py +175 -20
  125. cribl_control_plane/models/outputnewrelic.py +123 -22
  126. cribl_control_plane/models/outputnewrelicevents.py +115 -23
  127. cribl_control_plane/models/outputopentelemetry.py +159 -22
  128. cribl_control_plane/models/outputprometheus.py +105 -22
  129. cribl_control_plane/models/outputring.py +29 -1
  130. cribl_control_plane/models/outputs3.py +117 -1
  131. cribl_control_plane/models/outputsecuritylake.py +85 -1
  132. cribl_control_plane/models/outputsentinel.py +123 -22
  133. cribl_control_plane/models/outputsentineloneaisiem.py +124 -23
  134. cribl_control_plane/models/outputservicenow.py +150 -22
  135. cribl_control_plane/models/outputsignalfx.py +105 -22
  136. cribl_control_plane/models/outputsns.py +103 -20
  137. cribl_control_plane/models/outputsplunk.py +141 -22
  138. cribl_control_plane/models/outputsplunkhec.py +198 -22
  139. cribl_control_plane/models/outputsplunklb.py +170 -22
  140. cribl_control_plane/models/outputsqs.py +112 -20
  141. cribl_control_plane/models/outputstatsd.py +96 -22
  142. cribl_control_plane/models/outputstatsdext.py +96 -22
  143. cribl_control_plane/models/outputsumologic.py +105 -22
  144. cribl_control_plane/models/outputsyslog.py +238 -99
  145. cribl_control_plane/models/outputtcpjson.py +132 -22
  146. cribl_control_plane/models/outputwavefront.py +105 -22
  147. cribl_control_plane/models/outputwebhook.py +141 -22
  148. cribl_control_plane/models/outputxsiam.py +103 -20
  149. cribl_control_plane/models/resourcepolicy.py +11 -0
  150. cribl_control_plane/models/runnablejobcollection.py +68 -9
  151. cribl_control_plane/models/runnablejobexecutor.py +32 -9
  152. cribl_control_plane/models/runnablejobscheduledsearch.py +23 -9
  153. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +11 -0
  154. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +11 -0
  155. cribl_control_plane/sdk.py +2 -2
  156. {cribl_control_plane-0.3.0b3.dist-info → cribl_control_plane-0.3.0b12.dist-info}/METADATA +25 -7
  157. {cribl_control_plane-0.3.0b3.dist-info → cribl_control_plane-0.3.0b12.dist-info}/RECORD +158 -157
  158. {cribl_control_plane-0.3.0b3.dist-info → cribl_control_plane-0.3.0b12.dist-info}/WHEEL +0 -0
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -158,6 +159,17 @@ class OutputDatadogAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta)
158
159
  SECRET = "secret"
159
160
 
160
161
 
162
+ class OutputDatadogMode(str, Enum, metaclass=utils.OpenEnumMeta):
163
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
164
+
165
+ # Error
166
+ ERROR = "error"
167
+ # Backpressure
168
+ ALWAYS = "always"
169
+ # Always On
170
+ BACKPRESSURE = "backpressure"
171
+
172
+
161
173
  class OutputDatadogCompression(str, Enum, metaclass=utils.OpenEnumMeta):
162
174
  r"""Codec to use to compress the persisted data"""
163
175
 
@@ -176,17 +188,6 @@ class OutputDatadogQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
176
188
  DROP = "drop"
177
189
 
178
190
 
179
- class OutputDatadogMode(str, Enum, metaclass=utils.OpenEnumMeta):
180
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
181
-
182
- # Error
183
- ERROR = "error"
184
- # Backpressure
185
- BACKPRESSURE = "backpressure"
186
- # Always On
187
- ALWAYS = "always"
188
-
189
-
190
191
  class OutputDatadogPqControlsTypedDict(TypedDict):
191
192
  pass
192
193
 
@@ -269,6 +270,16 @@ class OutputDatadogTypedDict(TypedDict):
269
270
  r"""Maximum total size of the batches waiting to be sent. If left blank, defaults to 5 times the max body size (if set). If 0, no limit is enforced."""
270
271
  description: NotRequired[str]
271
272
  custom_url: NotRequired[str]
273
+ pq_strict_ordering: NotRequired[bool]
274
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
275
+ pq_rate_per_sec: NotRequired[float]
276
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
277
+ pq_mode: NotRequired[OutputDatadogMode]
278
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
279
+ pq_max_buffer_size: NotRequired[float]
280
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
281
+ pq_max_backpressure_sec: NotRequired[float]
282
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
272
283
  pq_max_file_size: NotRequired[str]
273
284
  r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
274
285
  pq_max_size: NotRequired[str]
@@ -279,8 +290,6 @@ class OutputDatadogTypedDict(TypedDict):
279
290
  r"""Codec to use to compress the persisted data"""
280
291
  pq_on_backpressure: NotRequired[OutputDatadogQueueFullBehavior]
281
292
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
282
- pq_mode: NotRequired[OutputDatadogMode]
283
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
284
293
  pq_controls: NotRequired[OutputDatadogPqControlsTypedDict]
285
294
  api_key: NotRequired[str]
286
295
  r"""Organization's API key in Datadog"""
@@ -452,6 +461,34 @@ class OutputDatadog(BaseModel):
452
461
 
453
462
  custom_url: Annotated[Optional[str], pydantic.Field(alias="customUrl")] = None
454
463
 
464
+ pq_strict_ordering: Annotated[
465
+ Optional[bool], pydantic.Field(alias="pqStrictOrdering")
466
+ ] = True
467
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
468
+
469
+ pq_rate_per_sec: Annotated[
470
+ Optional[float], pydantic.Field(alias="pqRatePerSec")
471
+ ] = 0
472
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
473
+
474
+ pq_mode: Annotated[
475
+ Annotated[
476
+ Optional[OutputDatadogMode], PlainValidator(validate_open_enum(False))
477
+ ],
478
+ pydantic.Field(alias="pqMode"),
479
+ ] = OutputDatadogMode.ERROR
480
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
481
+
482
+ pq_max_buffer_size: Annotated[
483
+ Optional[float], pydantic.Field(alias="pqMaxBufferSize")
484
+ ] = 42
485
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
486
+
487
+ pq_max_backpressure_sec: Annotated[
488
+ Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
489
+ ] = 30
490
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
491
+
455
492
  pq_max_file_size: Annotated[
456
493
  Optional[str], pydantic.Field(alias="pqMaxFileSize")
457
494
  ] = "1 MB"
@@ -483,14 +520,6 @@ class OutputDatadog(BaseModel):
483
520
  ] = OutputDatadogQueueFullBehavior.BLOCK
484
521
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
485
522
 
486
- pq_mode: Annotated[
487
- Annotated[
488
- Optional[OutputDatadogMode], PlainValidator(validate_open_enum(False))
489
- ],
490
- pydantic.Field(alias="pqMode"),
491
- ] = OutputDatadogMode.ERROR
492
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
493
-
494
523
  pq_controls: Annotated[
495
524
  Optional[OutputDatadogPqControls], pydantic.Field(alias="pqControls")
496
525
  ] = None
@@ -500,3 +529,84 @@ class OutputDatadog(BaseModel):
500
529
 
501
530
  text_secret: Annotated[Optional[str], pydantic.Field(alias="textSecret")] = None
502
531
  r"""Select or create a stored text secret"""
532
+
533
+ @field_serializer("content_type")
534
+ def serialize_content_type(self, value):
535
+ if isinstance(value, str):
536
+ try:
537
+ return models.SendLogsAs(value)
538
+ except ValueError:
539
+ return value
540
+ return value
541
+
542
+ @field_serializer("severity")
543
+ def serialize_severity(self, value):
544
+ if isinstance(value, str):
545
+ try:
546
+ return models.OutputDatadogSeverity(value)
547
+ except ValueError:
548
+ return value
549
+ return value
550
+
551
+ @field_serializer("site")
552
+ def serialize_site(self, value):
553
+ if isinstance(value, str):
554
+ try:
555
+ return models.DatadogSite(value)
556
+ except ValueError:
557
+ return value
558
+ return value
559
+
560
+ @field_serializer("failed_request_logging_mode")
561
+ def serialize_failed_request_logging_mode(self, value):
562
+ if isinstance(value, str):
563
+ try:
564
+ return models.OutputDatadogFailedRequestLoggingMode(value)
565
+ except ValueError:
566
+ return value
567
+ return value
568
+
569
+ @field_serializer("on_backpressure")
570
+ def serialize_on_backpressure(self, value):
571
+ if isinstance(value, str):
572
+ try:
573
+ return models.OutputDatadogBackpressureBehavior(value)
574
+ except ValueError:
575
+ return value
576
+ return value
577
+
578
+ @field_serializer("auth_type")
579
+ def serialize_auth_type(self, value):
580
+ if isinstance(value, str):
581
+ try:
582
+ return models.OutputDatadogAuthenticationMethod(value)
583
+ except ValueError:
584
+ return value
585
+ return value
586
+
587
+ @field_serializer("pq_mode")
588
+ def serialize_pq_mode(self, value):
589
+ if isinstance(value, str):
590
+ try:
591
+ return models.OutputDatadogMode(value)
592
+ except ValueError:
593
+ return value
594
+ return value
595
+
596
+ @field_serializer("pq_compress")
597
+ def serialize_pq_compress(self, value):
598
+ if isinstance(value, str):
599
+ try:
600
+ return models.OutputDatadogCompression(value)
601
+ except ValueError:
602
+ return value
603
+ return value
604
+
605
+ @field_serializer("pq_on_backpressure")
606
+ def serialize_pq_on_backpressure(self, value):
607
+ if isinstance(value, str):
608
+ try:
609
+ return models.OutputDatadogQueueFullBehavior(value)
610
+ except ValueError:
611
+ return value
612
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -139,6 +140,17 @@ class OutputDatasetAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta)
139
140
  SECRET = "secret"
140
141
 
141
142
 
143
+ class OutputDatasetMode(str, Enum, metaclass=utils.OpenEnumMeta):
144
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
145
+
146
+ # Error
147
+ ERROR = "error"
148
+ # Backpressure
149
+ ALWAYS = "always"
150
+ # Always On
151
+ BACKPRESSURE = "backpressure"
152
+
153
+
142
154
  class OutputDatasetCompression(str, Enum, metaclass=utils.OpenEnumMeta):
143
155
  r"""Codec to use to compress the persisted data"""
144
156
 
@@ -157,17 +169,6 @@ class OutputDatasetQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
157
169
  DROP = "drop"
158
170
 
159
171
 
160
- class OutputDatasetMode(str, Enum, metaclass=utils.OpenEnumMeta):
161
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
162
-
163
- # Error
164
- ERROR = "error"
165
- # Backpressure
166
- BACKPRESSURE = "backpressure"
167
- # Always On
168
- ALWAYS = "always"
169
-
170
-
171
172
  class OutputDatasetPqControlsTypedDict(TypedDict):
172
173
  pass
173
174
 
@@ -240,6 +241,16 @@ class OutputDatasetTypedDict(TypedDict):
240
241
  r"""Maximum total size of the batches waiting to be sent. If left blank, defaults to 5 times the max body size (if set). If 0, no limit is enforced."""
241
242
  description: NotRequired[str]
242
243
  custom_url: NotRequired[str]
244
+ pq_strict_ordering: NotRequired[bool]
245
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
246
+ pq_rate_per_sec: NotRequired[float]
247
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
248
+ pq_mode: NotRequired[OutputDatasetMode]
249
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
250
+ pq_max_buffer_size: NotRequired[float]
251
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
252
+ pq_max_backpressure_sec: NotRequired[float]
253
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
243
254
  pq_max_file_size: NotRequired[str]
244
255
  r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
245
256
  pq_max_size: NotRequired[str]
@@ -250,8 +261,6 @@ class OutputDatasetTypedDict(TypedDict):
250
261
  r"""Codec to use to compress the persisted data"""
251
262
  pq_on_backpressure: NotRequired[OutputDatasetQueueFullBehavior]
252
263
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
253
- pq_mode: NotRequired[OutputDatasetMode]
254
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
255
264
  pq_controls: NotRequired[OutputDatasetPqControlsTypedDict]
256
265
  api_key: NotRequired[str]
257
266
  r"""A 'Log Write Access' API key for the DataSet account"""
@@ -410,6 +419,34 @@ class OutputDataset(BaseModel):
410
419
 
411
420
  custom_url: Annotated[Optional[str], pydantic.Field(alias="customUrl")] = None
412
421
 
422
+ pq_strict_ordering: Annotated[
423
+ Optional[bool], pydantic.Field(alias="pqStrictOrdering")
424
+ ] = True
425
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
426
+
427
+ pq_rate_per_sec: Annotated[
428
+ Optional[float], pydantic.Field(alias="pqRatePerSec")
429
+ ] = 0
430
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
431
+
432
+ pq_mode: Annotated[
433
+ Annotated[
434
+ Optional[OutputDatasetMode], PlainValidator(validate_open_enum(False))
435
+ ],
436
+ pydantic.Field(alias="pqMode"),
437
+ ] = OutputDatasetMode.ERROR
438
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
439
+
440
+ pq_max_buffer_size: Annotated[
441
+ Optional[float], pydantic.Field(alias="pqMaxBufferSize")
442
+ ] = 42
443
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
444
+
445
+ pq_max_backpressure_sec: Annotated[
446
+ Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
447
+ ] = 30
448
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
449
+
413
450
  pq_max_file_size: Annotated[
414
451
  Optional[str], pydantic.Field(alias="pqMaxFileSize")
415
452
  ] = "1 MB"
@@ -441,14 +478,6 @@ class OutputDataset(BaseModel):
441
478
  ] = OutputDatasetQueueFullBehavior.BLOCK
442
479
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
443
480
 
444
- pq_mode: Annotated[
445
- Annotated[
446
- Optional[OutputDatasetMode], PlainValidator(validate_open_enum(False))
447
- ],
448
- pydantic.Field(alias="pqMode"),
449
- ] = OutputDatasetMode.ERROR
450
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
451
-
452
481
  pq_controls: Annotated[
453
482
  Optional[OutputDatasetPqControls], pydantic.Field(alias="pqControls")
454
483
  ] = None
@@ -458,3 +487,75 @@ class OutputDataset(BaseModel):
458
487
 
459
488
  text_secret: Annotated[Optional[str], pydantic.Field(alias="textSecret")] = None
460
489
  r"""Select or create a stored text secret"""
490
+
491
+ @field_serializer("default_severity")
492
+ def serialize_default_severity(self, value):
493
+ if isinstance(value, str):
494
+ try:
495
+ return models.OutputDatasetSeverity(value)
496
+ except ValueError:
497
+ return value
498
+ return value
499
+
500
+ @field_serializer("site")
501
+ def serialize_site(self, value):
502
+ if isinstance(value, str):
503
+ try:
504
+ return models.DataSetSite(value)
505
+ except ValueError:
506
+ return value
507
+ return value
508
+
509
+ @field_serializer("failed_request_logging_mode")
510
+ def serialize_failed_request_logging_mode(self, value):
511
+ if isinstance(value, str):
512
+ try:
513
+ return models.OutputDatasetFailedRequestLoggingMode(value)
514
+ except ValueError:
515
+ return value
516
+ return value
517
+
518
+ @field_serializer("on_backpressure")
519
+ def serialize_on_backpressure(self, value):
520
+ if isinstance(value, str):
521
+ try:
522
+ return models.OutputDatasetBackpressureBehavior(value)
523
+ except ValueError:
524
+ return value
525
+ return value
526
+
527
+ @field_serializer("auth_type")
528
+ def serialize_auth_type(self, value):
529
+ if isinstance(value, str):
530
+ try:
531
+ return models.OutputDatasetAuthenticationMethod(value)
532
+ except ValueError:
533
+ return value
534
+ return value
535
+
536
+ @field_serializer("pq_mode")
537
+ def serialize_pq_mode(self, value):
538
+ if isinstance(value, str):
539
+ try:
540
+ return models.OutputDatasetMode(value)
541
+ except ValueError:
542
+ return value
543
+ return value
544
+
545
+ @field_serializer("pq_compress")
546
+ def serialize_pq_compress(self, value):
547
+ if isinstance(value, str):
548
+ try:
549
+ return models.OutputDatasetCompression(value)
550
+ except ValueError:
551
+ return value
552
+ return value
553
+
554
+ @field_serializer("pq_on_backpressure")
555
+ def serialize_pq_on_backpressure(self, value):
556
+ if isinstance(value, str):
557
+ try:
558
+ return models.OutputDatasetQueueFullBehavior(value)
559
+ except ValueError:
560
+ return value
561
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -87,3 +88,12 @@ class OutputDiskSpool(BaseModel):
87
88
  r"""JavaScript expression defining how files are partitioned and organized within the time-buckets. If blank, the event's __partition property is used and otherwise, events go directly into the time-bucket directory."""
88
89
 
89
90
  description: Optional[str] = None
91
+
92
+ @field_serializer("compress")
93
+ def serialize_compress(self, value):
94
+ if isinstance(value, str):
95
+ try:
96
+ return models.OutputDiskSpoolCompression(value)
97
+ except ValueError:
98
+ return value
99
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -256,6 +257,8 @@ class OutputDlS3TypedDict(TypedDict):
256
257
  r"""Compression level to apply before moving files to final destination"""
257
258
  automatic_schema: NotRequired[bool]
258
259
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
260
+ parquet_schema: NotRequired[str]
261
+ r"""To add a new schema, navigate to Processing > Knowledge > Parquet Schemas"""
259
262
  parquet_version: NotRequired[OutputDlS3ParquetVersion]
260
263
  r"""Determines which data types are supported and how they are represented"""
261
264
  parquet_data_page_version: NotRequired[OutputDlS3DataPageVersion]
@@ -525,6 +528,11 @@ class OutputDlS3(BaseModel):
525
528
  ] = False
526
529
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
527
530
 
531
+ parquet_schema: Annotated[Optional[str], pydantic.Field(alias="parquetSchema")] = (
532
+ None
533
+ )
534
+ r"""To add a new schema, navigate to Processing > Knowledge > Parquet Schemas"""
535
+
528
536
  parquet_version: Annotated[
529
537
  Annotated[
530
538
  Optional[OutputDlS3ParquetVersion],
@@ -591,3 +599,111 @@ class OutputDlS3(BaseModel):
591
599
 
592
600
  max_retry_num: Annotated[Optional[float], pydantic.Field(alias="maxRetryNum")] = 20
593
601
  r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
602
+
603
+ @field_serializer("aws_authentication_method")
604
+ def serialize_aws_authentication_method(self, value):
605
+ if isinstance(value, str):
606
+ try:
607
+ return models.OutputDlS3AuthenticationMethod(value)
608
+ except ValueError:
609
+ return value
610
+ return value
611
+
612
+ @field_serializer("signature_version")
613
+ def serialize_signature_version(self, value):
614
+ if isinstance(value, str):
615
+ try:
616
+ return models.OutputDlS3SignatureVersion(value)
617
+ except ValueError:
618
+ return value
619
+ return value
620
+
621
+ @field_serializer("object_acl")
622
+ def serialize_object_acl(self, value):
623
+ if isinstance(value, str):
624
+ try:
625
+ return models.OutputDlS3ObjectACL(value)
626
+ except ValueError:
627
+ return value
628
+ return value
629
+
630
+ @field_serializer("storage_class")
631
+ def serialize_storage_class(self, value):
632
+ if isinstance(value, str):
633
+ try:
634
+ return models.OutputDlS3StorageClass(value)
635
+ except ValueError:
636
+ return value
637
+ return value
638
+
639
+ @field_serializer("server_side_encryption")
640
+ def serialize_server_side_encryption(self, value):
641
+ if isinstance(value, str):
642
+ try:
643
+ return models.OutputDlS3ServerSideEncryptionForUploadedObjects(value)
644
+ except ValueError:
645
+ return value
646
+ return value
647
+
648
+ @field_serializer("format_")
649
+ def serialize_format_(self, value):
650
+ if isinstance(value, str):
651
+ try:
652
+ return models.OutputDlS3DataFormat(value)
653
+ except ValueError:
654
+ return value
655
+ return value
656
+
657
+ @field_serializer("on_backpressure")
658
+ def serialize_on_backpressure(self, value):
659
+ if isinstance(value, str):
660
+ try:
661
+ return models.OutputDlS3BackpressureBehavior(value)
662
+ except ValueError:
663
+ return value
664
+ return value
665
+
666
+ @field_serializer("on_disk_full_backpressure")
667
+ def serialize_on_disk_full_backpressure(self, value):
668
+ if isinstance(value, str):
669
+ try:
670
+ return models.OutputDlS3DiskSpaceProtection(value)
671
+ except ValueError:
672
+ return value
673
+ return value
674
+
675
+ @field_serializer("compress")
676
+ def serialize_compress(self, value):
677
+ if isinstance(value, str):
678
+ try:
679
+ return models.OutputDlS3Compression(value)
680
+ except ValueError:
681
+ return value
682
+ return value
683
+
684
+ @field_serializer("compression_level")
685
+ def serialize_compression_level(self, value):
686
+ if isinstance(value, str):
687
+ try:
688
+ return models.OutputDlS3CompressionLevel(value)
689
+ except ValueError:
690
+ return value
691
+ return value
692
+
693
+ @field_serializer("parquet_version")
694
+ def serialize_parquet_version(self, value):
695
+ if isinstance(value, str):
696
+ try:
697
+ return models.OutputDlS3ParquetVersion(value)
698
+ except ValueError:
699
+ return value
700
+ return value
701
+
702
+ @field_serializer("parquet_data_page_version")
703
+ def serialize_parquet_data_page_version(self, value):
704
+ if isinstance(value, str):
705
+ try:
706
+ return models.OutputDlS3DataPageVersion(value)
707
+ except ValueError:
708
+ return value
709
+ return value