cribl-control-plane 0.3.0b3__py3-none-any.whl → 0.3.0b12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (158) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/groups_sdk.py +2 -2
  3. cribl_control_plane/lakedatasets.py +28 -0
  4. cribl_control_plane/models/__init__.py +124 -5
  5. cribl_control_plane/models/cacheconnection.py +20 -0
  6. cribl_control_plane/models/configgroup.py +20 -1
  7. cribl_control_plane/models/configgroupcloud.py +11 -1
  8. cribl_control_plane/models/createconfiggroupbyproductop.py +13 -2
  9. cribl_control_plane/models/cribllakedataset.py +15 -1
  10. cribl_control_plane/models/cribllakedatasetupdate.py +15 -1
  11. cribl_control_plane/models/datasetmetadata.py +11 -1
  12. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +11 -0
  13. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +20 -0
  14. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +20 -0
  15. cribl_control_plane/models/getconfiggroupbyproductandidop.py +11 -0
  16. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +11 -0
  17. cribl_control_plane/models/getsummaryop.py +11 -0
  18. cribl_control_plane/models/groupcreaterequest.py +20 -1
  19. cribl_control_plane/models/hbcriblinfo.py +11 -1
  20. cribl_control_plane/models/healthserverstatus.py +20 -1
  21. cribl_control_plane/models/input.py +15 -15
  22. cribl_control_plane/models/inputappscope.py +76 -17
  23. cribl_control_plane/models/inputazureblob.py +29 -1
  24. cribl_control_plane/models/inputcollection.py +20 -1
  25. cribl_control_plane/models/inputconfluentcloud.py +188 -1
  26. cribl_control_plane/models/inputcribl.py +20 -1
  27. cribl_control_plane/models/inputcriblhttp.py +58 -17
  28. cribl_control_plane/models/inputcribllakehttp.py +58 -17
  29. cribl_control_plane/models/inputcriblmetrics.py +20 -1
  30. cribl_control_plane/models/inputcribltcp.py +58 -17
  31. cribl_control_plane/models/inputcrowdstrike.py +47 -1
  32. cribl_control_plane/models/inputdatadogagent.py +58 -17
  33. cribl_control_plane/models/inputdatagen.py +20 -1
  34. cribl_control_plane/models/inputedgeprometheus.py +138 -37
  35. cribl_control_plane/models/inputelastic.py +108 -27
  36. cribl_control_plane/models/inputeventhub.py +176 -1
  37. cribl_control_plane/models/inputexec.py +29 -1
  38. cribl_control_plane/models/inputfile.py +40 -7
  39. cribl_control_plane/models/inputfirehose.py +58 -17
  40. cribl_control_plane/models/inputgooglepubsub.py +29 -1
  41. cribl_control_plane/models/inputgrafana.py +149 -32
  42. cribl_control_plane/models/inputhttp.py +58 -17
  43. cribl_control_plane/models/inputhttpraw.py +58 -17
  44. cribl_control_plane/models/inputjournalfiles.py +20 -1
  45. cribl_control_plane/models/inputkafka.py +182 -1
  46. cribl_control_plane/models/inputkinesis.py +65 -1
  47. cribl_control_plane/models/inputkubeevents.py +20 -1
  48. cribl_control_plane/models/inputkubelogs.py +29 -1
  49. cribl_control_plane/models/inputkubemetrics.py +29 -1
  50. cribl_control_plane/models/inputloki.py +67 -17
  51. cribl_control_plane/models/inputmetrics.py +58 -17
  52. cribl_control_plane/models/inputmodeldriventelemetry.py +58 -17
  53. cribl_control_plane/models/inputmsk.py +74 -1
  54. cribl_control_plane/models/inputnetflow.py +20 -1
  55. cribl_control_plane/models/inputoffice365mgmt.py +56 -1
  56. cribl_control_plane/models/inputoffice365msgtrace.py +56 -1
  57. cribl_control_plane/models/inputoffice365service.py +56 -1
  58. cribl_control_plane/models/inputopentelemetry.py +84 -16
  59. cribl_control_plane/models/inputprometheus.py +131 -37
  60. cribl_control_plane/models/inputprometheusrw.py +67 -17
  61. cribl_control_plane/models/inputrawudp.py +20 -1
  62. cribl_control_plane/models/inputs3.py +38 -1
  63. cribl_control_plane/models/inputs3inventory.py +47 -1
  64. cribl_control_plane/models/inputsecuritylake.py +47 -1
  65. cribl_control_plane/models/inputsnmp.py +29 -1
  66. cribl_control_plane/models/inputsplunk.py +76 -17
  67. cribl_control_plane/models/inputsplunkhec.py +66 -16
  68. cribl_control_plane/models/inputsplunksearch.py +56 -1
  69. cribl_control_plane/models/inputsqs.py +47 -1
  70. cribl_control_plane/models/inputsyslog.py +113 -32
  71. cribl_control_plane/models/inputsystemmetrics.py +110 -9
  72. cribl_control_plane/models/inputsystemstate.py +29 -1
  73. cribl_control_plane/models/inputtcp.py +77 -17
  74. cribl_control_plane/models/inputtcpjson.py +67 -17
  75. cribl_control_plane/models/inputwef.py +65 -1
  76. cribl_control_plane/models/inputwindowsmetrics.py +101 -9
  77. cribl_control_plane/models/inputwineventlogs.py +52 -1
  78. cribl_control_plane/models/inputwiz.py +38 -1
  79. cribl_control_plane/models/inputwizwebhook.py +58 -17
  80. cribl_control_plane/models/inputzscalerhec.py +66 -16
  81. cribl_control_plane/models/jobinfo.py +10 -4
  82. cribl_control_plane/models/jobstatus.py +34 -3
  83. cribl_control_plane/models/lakedatasetmetrics.py +17 -0
  84. cribl_control_plane/models/listconfiggroupbyproductop.py +11 -0
  85. cribl_control_plane/models/masterworkerentry.py +11 -1
  86. cribl_control_plane/models/nodeupgradestatus.py +38 -0
  87. cribl_control_plane/models/output.py +21 -21
  88. cribl_control_plane/models/outputazureblob.py +90 -1
  89. cribl_control_plane/models/outputazuredataexplorer.py +430 -93
  90. cribl_control_plane/models/outputazureeventhub.py +267 -22
  91. cribl_control_plane/models/outputazurelogs.py +105 -22
  92. cribl_control_plane/models/outputchronicle.py +105 -22
  93. cribl_control_plane/models/outputclickhouse.py +141 -22
  94. cribl_control_plane/models/outputcloudwatch.py +96 -22
  95. cribl_control_plane/models/outputconfluentcloud.py +292 -23
  96. cribl_control_plane/models/outputcriblhttp.py +123 -22
  97. cribl_control_plane/models/outputcribllake.py +76 -1
  98. cribl_control_plane/models/outputcribltcp.py +123 -22
  99. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +117 -23
  100. cribl_control_plane/models/outputdatabricks.py +76 -5
  101. cribl_control_plane/models/outputdatadog.py +132 -22
  102. cribl_control_plane/models/outputdataset.py +123 -22
  103. cribl_control_plane/models/outputdiskspool.py +11 -1
  104. cribl_control_plane/models/outputdls3.py +117 -1
  105. cribl_control_plane/models/outputdynatracehttp.py +141 -22
  106. cribl_control_plane/models/outputdynatraceotlp.py +141 -22
  107. cribl_control_plane/models/outputelastic.py +148 -22
  108. cribl_control_plane/models/outputelasticcloud.py +130 -22
  109. cribl_control_plane/models/outputexabeam.py +47 -1
  110. cribl_control_plane/models/outputfilesystem.py +72 -1
  111. cribl_control_plane/models/outputgooglechronicle.py +148 -23
  112. cribl_control_plane/models/outputgooglecloudlogging.py +115 -23
  113. cribl_control_plane/models/outputgooglecloudstorage.py +108 -1
  114. cribl_control_plane/models/outputgooglepubsub.py +96 -22
  115. cribl_control_plane/models/outputgrafanacloud.py +244 -43
  116. cribl_control_plane/models/outputgraphite.py +96 -22
  117. cribl_control_plane/models/outputhoneycomb.py +105 -22
  118. cribl_control_plane/models/outputhumiohec.py +114 -22
  119. cribl_control_plane/models/outputinfluxdb.py +114 -22
  120. cribl_control_plane/models/outputkafka.py +283 -20
  121. cribl_control_plane/models/outputkinesis.py +121 -22
  122. cribl_control_plane/models/outputloki.py +112 -20
  123. cribl_control_plane/models/outputminio.py +117 -1
  124. cribl_control_plane/models/outputmsk.py +175 -20
  125. cribl_control_plane/models/outputnewrelic.py +123 -22
  126. cribl_control_plane/models/outputnewrelicevents.py +115 -23
  127. cribl_control_plane/models/outputopentelemetry.py +159 -22
  128. cribl_control_plane/models/outputprometheus.py +105 -22
  129. cribl_control_plane/models/outputring.py +29 -1
  130. cribl_control_plane/models/outputs3.py +117 -1
  131. cribl_control_plane/models/outputsecuritylake.py +85 -1
  132. cribl_control_plane/models/outputsentinel.py +123 -22
  133. cribl_control_plane/models/outputsentineloneaisiem.py +124 -23
  134. cribl_control_plane/models/outputservicenow.py +150 -22
  135. cribl_control_plane/models/outputsignalfx.py +105 -22
  136. cribl_control_plane/models/outputsns.py +103 -20
  137. cribl_control_plane/models/outputsplunk.py +141 -22
  138. cribl_control_plane/models/outputsplunkhec.py +198 -22
  139. cribl_control_plane/models/outputsplunklb.py +170 -22
  140. cribl_control_plane/models/outputsqs.py +112 -20
  141. cribl_control_plane/models/outputstatsd.py +96 -22
  142. cribl_control_plane/models/outputstatsdext.py +96 -22
  143. cribl_control_plane/models/outputsumologic.py +105 -22
  144. cribl_control_plane/models/outputsyslog.py +238 -99
  145. cribl_control_plane/models/outputtcpjson.py +132 -22
  146. cribl_control_plane/models/outputwavefront.py +105 -22
  147. cribl_control_plane/models/outputwebhook.py +141 -22
  148. cribl_control_plane/models/outputxsiam.py +103 -20
  149. cribl_control_plane/models/resourcepolicy.py +11 -0
  150. cribl_control_plane/models/runnablejobcollection.py +68 -9
  151. cribl_control_plane/models/runnablejobexecutor.py +32 -9
  152. cribl_control_plane/models/runnablejobscheduledsearch.py +23 -9
  153. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +11 -0
  154. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +11 -0
  155. cribl_control_plane/sdk.py +2 -2
  156. {cribl_control_plane-0.3.0b3.dist-info → cribl_control_plane-0.3.0b12.dist-info}/METADATA +25 -7
  157. {cribl_control_plane-0.3.0b3.dist-info → cribl_control_plane-0.3.0b12.dist-info}/RECORD +158 -157
  158. {cribl_control_plane-0.3.0b3.dist-info → cribl_control_plane-0.3.0b12.dist-info}/WHEEL +0 -0
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -113,6 +114,17 @@ class OutputPrometheusAuthenticationType(str, Enum, metaclass=utils.OpenEnumMeta
113
114
  OAUTH = "oauth"
114
115
 
115
116
 
117
+ class OutputPrometheusMode(str, Enum, metaclass=utils.OpenEnumMeta):
118
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
119
+
120
+ # Error
121
+ ERROR = "error"
122
+ # Backpressure
123
+ ALWAYS = "always"
124
+ # Always On
125
+ BACKPRESSURE = "backpressure"
126
+
127
+
116
128
  class OutputPrometheusCompression(str, Enum, metaclass=utils.OpenEnumMeta):
117
129
  r"""Codec to use to compress the persisted data"""
118
130
 
@@ -131,17 +143,6 @@ class OutputPrometheusQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta)
131
143
  DROP = "drop"
132
144
 
133
145
 
134
- class OutputPrometheusMode(str, Enum, metaclass=utils.OpenEnumMeta):
135
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
136
-
137
- # Error
138
- ERROR = "error"
139
- # Backpressure
140
- BACKPRESSURE = "backpressure"
141
- # Always On
142
- ALWAYS = "always"
143
-
144
-
145
146
  class OutputPrometheusPqControlsTypedDict(TypedDict):
146
147
  pass
147
148
 
@@ -235,6 +236,16 @@ class OutputPrometheusTypedDict(TypedDict):
235
236
  description: NotRequired[str]
236
237
  metrics_flush_period_sec: NotRequired[float]
237
238
  r"""How frequently metrics metadata is sent out. Value cannot be smaller than the base Flush period set above."""
239
+ pq_strict_ordering: NotRequired[bool]
240
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
241
+ pq_rate_per_sec: NotRequired[float]
242
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
243
+ pq_mode: NotRequired[OutputPrometheusMode]
244
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
245
+ pq_max_buffer_size: NotRequired[float]
246
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
247
+ pq_max_backpressure_sec: NotRequired[float]
248
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
238
249
  pq_max_file_size: NotRequired[str]
239
250
  r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
240
251
  pq_max_size: NotRequired[str]
@@ -245,8 +256,6 @@ class OutputPrometheusTypedDict(TypedDict):
245
256
  r"""Codec to use to compress the persisted data"""
246
257
  pq_on_backpressure: NotRequired[OutputPrometheusQueueFullBehavior]
247
258
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
248
- pq_mode: NotRequired[OutputPrometheusMode]
249
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
250
259
  pq_controls: NotRequired[OutputPrometheusPqControlsTypedDict]
251
260
  username: NotRequired[str]
252
261
  password: NotRequired[str]
@@ -402,6 +411,34 @@ class OutputPrometheus(BaseModel):
402
411
  ] = 60
403
412
  r"""How frequently metrics metadata is sent out. Value cannot be smaller than the base Flush period set above."""
404
413
 
414
+ pq_strict_ordering: Annotated[
415
+ Optional[bool], pydantic.Field(alias="pqStrictOrdering")
416
+ ] = True
417
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
418
+
419
+ pq_rate_per_sec: Annotated[
420
+ Optional[float], pydantic.Field(alias="pqRatePerSec")
421
+ ] = 0
422
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
423
+
424
+ pq_mode: Annotated[
425
+ Annotated[
426
+ Optional[OutputPrometheusMode], PlainValidator(validate_open_enum(False))
427
+ ],
428
+ pydantic.Field(alias="pqMode"),
429
+ ] = OutputPrometheusMode.ERROR
430
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
431
+
432
+ pq_max_buffer_size: Annotated[
433
+ Optional[float], pydantic.Field(alias="pqMaxBufferSize")
434
+ ] = 42
435
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
436
+
437
+ pq_max_backpressure_sec: Annotated[
438
+ Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
439
+ ] = 30
440
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
441
+
405
442
  pq_max_file_size: Annotated[
406
443
  Optional[str], pydantic.Field(alias="pqMaxFileSize")
407
444
  ] = "1 MB"
@@ -433,14 +470,6 @@ class OutputPrometheus(BaseModel):
433
470
  ] = OutputPrometheusQueueFullBehavior.BLOCK
434
471
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
435
472
 
436
- pq_mode: Annotated[
437
- Annotated[
438
- Optional[OutputPrometheusMode], PlainValidator(validate_open_enum(False))
439
- ],
440
- pydantic.Field(alias="pqMode"),
441
- ] = OutputPrometheusMode.ERROR
442
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
443
-
444
473
  pq_controls: Annotated[
445
474
  Optional[OutputPrometheusPqControls], pydantic.Field(alias="pqControls")
446
475
  ] = None
@@ -496,3 +525,57 @@ class OutputPrometheus(BaseModel):
496
525
  pydantic.Field(alias="oauthHeaders"),
497
526
  ] = None
498
527
  r"""Additional headers to send in the OAuth login request. @{product} will automatically add the content-type header 'application/x-www-form-urlencoded' when sending this request."""
528
+
529
+ @field_serializer("failed_request_logging_mode")
530
+ def serialize_failed_request_logging_mode(self, value):
531
+ if isinstance(value, str):
532
+ try:
533
+ return models.OutputPrometheusFailedRequestLoggingMode(value)
534
+ except ValueError:
535
+ return value
536
+ return value
537
+
538
+ @field_serializer("on_backpressure")
539
+ def serialize_on_backpressure(self, value):
540
+ if isinstance(value, str):
541
+ try:
542
+ return models.OutputPrometheusBackpressureBehavior(value)
543
+ except ValueError:
544
+ return value
545
+ return value
546
+
547
+ @field_serializer("auth_type")
548
+ def serialize_auth_type(self, value):
549
+ if isinstance(value, str):
550
+ try:
551
+ return models.OutputPrometheusAuthenticationType(value)
552
+ except ValueError:
553
+ return value
554
+ return value
555
+
556
+ @field_serializer("pq_mode")
557
+ def serialize_pq_mode(self, value):
558
+ if isinstance(value, str):
559
+ try:
560
+ return models.OutputPrometheusMode(value)
561
+ except ValueError:
562
+ return value
563
+ return value
564
+
565
+ @field_serializer("pq_compress")
566
+ def serialize_pq_compress(self, value):
567
+ if isinstance(value, str):
568
+ try:
569
+ return models.OutputPrometheusCompression(value)
570
+ except ValueError:
571
+ return value
572
+ return value
573
+
574
+ @field_serializer("pq_on_backpressure")
575
+ def serialize_pq_on_backpressure(self, value):
576
+ if isinstance(value, str):
577
+ try:
578
+ return models.OutputPrometheusQueueFullBehavior(value)
579
+ except ValueError:
580
+ return value
581
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -121,3 +122,30 @@ class OutputRing(BaseModel):
121
122
  r"""How to handle events when all receivers are exerting backpressure"""
122
123
 
123
124
  description: Optional[str] = None
125
+
126
+ @field_serializer("format_")
127
+ def serialize_format_(self, value):
128
+ if isinstance(value, str):
129
+ try:
130
+ return models.OutputRingDataFormat(value)
131
+ except ValueError:
132
+ return value
133
+ return value
134
+
135
+ @field_serializer("compress")
136
+ def serialize_compress(self, value):
137
+ if isinstance(value, str):
138
+ try:
139
+ return models.OutputRingDataCompressionFormat(value)
140
+ except ValueError:
141
+ return value
142
+ return value
143
+
144
+ @field_serializer("on_backpressure")
145
+ def serialize_on_backpressure(self, value):
146
+ if isinstance(value, str):
147
+ try:
148
+ return models.OutputRingBackpressureBehavior(value)
149
+ except ValueError:
150
+ return value
151
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -254,6 +255,8 @@ class OutputS3TypedDict(TypedDict):
254
255
  r"""Compression level to apply before moving files to final destination"""
255
256
  automatic_schema: NotRequired[bool]
256
257
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
258
+ parquet_schema: NotRequired[str]
259
+ r"""To add a new schema, navigate to Processing > Knowledge > Parquet Schemas"""
257
260
  parquet_version: NotRequired[OutputS3ParquetVersion]
258
261
  r"""Determines which data types are supported and how they are represented"""
259
262
  parquet_data_page_version: NotRequired[OutputS3DataPageVersion]
@@ -523,6 +526,11 @@ class OutputS3(BaseModel):
523
526
  ] = False
524
527
  r"""Automatically calculate the schema based on the events of each Parquet file generated"""
525
528
 
529
+ parquet_schema: Annotated[Optional[str], pydantic.Field(alias="parquetSchema")] = (
530
+ None
531
+ )
532
+ r"""To add a new schema, navigate to Processing > Knowledge > Parquet Schemas"""
533
+
526
534
  parquet_version: Annotated[
527
535
  Annotated[
528
536
  Optional[OutputS3ParquetVersion], PlainValidator(validate_open_enum(False))
@@ -587,3 +595,111 @@ class OutputS3(BaseModel):
587
595
 
588
596
  max_retry_num: Annotated[Optional[float], pydantic.Field(alias="maxRetryNum")] = 20
589
597
  r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
598
+
599
+ @field_serializer("aws_authentication_method")
600
+ def serialize_aws_authentication_method(self, value):
601
+ if isinstance(value, str):
602
+ try:
603
+ return models.OutputS3AuthenticationMethod(value)
604
+ except ValueError:
605
+ return value
606
+ return value
607
+
608
+ @field_serializer("signature_version")
609
+ def serialize_signature_version(self, value):
610
+ if isinstance(value, str):
611
+ try:
612
+ return models.OutputS3SignatureVersion(value)
613
+ except ValueError:
614
+ return value
615
+ return value
616
+
617
+ @field_serializer("object_acl")
618
+ def serialize_object_acl(self, value):
619
+ if isinstance(value, str):
620
+ try:
621
+ return models.OutputS3ObjectACL(value)
622
+ except ValueError:
623
+ return value
624
+ return value
625
+
626
+ @field_serializer("storage_class")
627
+ def serialize_storage_class(self, value):
628
+ if isinstance(value, str):
629
+ try:
630
+ return models.OutputS3StorageClass(value)
631
+ except ValueError:
632
+ return value
633
+ return value
634
+
635
+ @field_serializer("server_side_encryption")
636
+ def serialize_server_side_encryption(self, value):
637
+ if isinstance(value, str):
638
+ try:
639
+ return models.OutputS3ServerSideEncryptionForUploadedObjects(value)
640
+ except ValueError:
641
+ return value
642
+ return value
643
+
644
+ @field_serializer("format_")
645
+ def serialize_format_(self, value):
646
+ if isinstance(value, str):
647
+ try:
648
+ return models.OutputS3DataFormat(value)
649
+ except ValueError:
650
+ return value
651
+ return value
652
+
653
+ @field_serializer("on_backpressure")
654
+ def serialize_on_backpressure(self, value):
655
+ if isinstance(value, str):
656
+ try:
657
+ return models.OutputS3BackpressureBehavior(value)
658
+ except ValueError:
659
+ return value
660
+ return value
661
+
662
+ @field_serializer("on_disk_full_backpressure")
663
+ def serialize_on_disk_full_backpressure(self, value):
664
+ if isinstance(value, str):
665
+ try:
666
+ return models.OutputS3DiskSpaceProtection(value)
667
+ except ValueError:
668
+ return value
669
+ return value
670
+
671
+ @field_serializer("compress")
672
+ def serialize_compress(self, value):
673
+ if isinstance(value, str):
674
+ try:
675
+ return models.OutputS3Compression(value)
676
+ except ValueError:
677
+ return value
678
+ return value
679
+
680
+ @field_serializer("compression_level")
681
+ def serialize_compression_level(self, value):
682
+ if isinstance(value, str):
683
+ try:
684
+ return models.OutputS3CompressionLevel(value)
685
+ except ValueError:
686
+ return value
687
+ return value
688
+
689
+ @field_serializer("parquet_version")
690
+ def serialize_parquet_version(self, value):
691
+ if isinstance(value, str):
692
+ try:
693
+ return models.OutputS3ParquetVersion(value)
694
+ except ValueError:
695
+ return value
696
+ return value
697
+
698
+ @field_serializer("parquet_data_page_version")
699
+ def serialize_parquet_data_page_version(self, value):
700
+ if isinstance(value, str):
701
+ try:
702
+ return models.OutputS3DataPageVersion(value)
703
+ except ValueError:
704
+ return value
705
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -530,3 +531,86 @@ class OutputSecurityLake(BaseModel):
530
531
 
531
532
  max_retry_num: Annotated[Optional[float], pydantic.Field(alias="maxRetryNum")] = 20
532
533
  r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
534
+
535
+ @field_serializer("aws_authentication_method")
536
+ def serialize_aws_authentication_method(self, value):
537
+ if isinstance(value, str):
538
+ try:
539
+ return models.OutputSecurityLakeAuthenticationMethod(value)
540
+ except ValueError:
541
+ return value
542
+ return value
543
+
544
+ @field_serializer("signature_version")
545
+ def serialize_signature_version(self, value):
546
+ if isinstance(value, str):
547
+ try:
548
+ return models.OutputSecurityLakeSignatureVersion(value)
549
+ except ValueError:
550
+ return value
551
+ return value
552
+
553
+ @field_serializer("object_acl")
554
+ def serialize_object_acl(self, value):
555
+ if isinstance(value, str):
556
+ try:
557
+ return models.OutputSecurityLakeObjectACL(value)
558
+ except ValueError:
559
+ return value
560
+ return value
561
+
562
+ @field_serializer("storage_class")
563
+ def serialize_storage_class(self, value):
564
+ if isinstance(value, str):
565
+ try:
566
+ return models.OutputSecurityLakeStorageClass(value)
567
+ except ValueError:
568
+ return value
569
+ return value
570
+
571
+ @field_serializer("server_side_encryption")
572
+ def serialize_server_side_encryption(self, value):
573
+ if isinstance(value, str):
574
+ try:
575
+ return models.OutputSecurityLakeServerSideEncryptionForUploadedObjects(
576
+ value
577
+ )
578
+ except ValueError:
579
+ return value
580
+ return value
581
+
582
+ @field_serializer("on_backpressure")
583
+ def serialize_on_backpressure(self, value):
584
+ if isinstance(value, str):
585
+ try:
586
+ return models.OutputSecurityLakeBackpressureBehavior(value)
587
+ except ValueError:
588
+ return value
589
+ return value
590
+
591
+ @field_serializer("on_disk_full_backpressure")
592
+ def serialize_on_disk_full_backpressure(self, value):
593
+ if isinstance(value, str):
594
+ try:
595
+ return models.OutputSecurityLakeDiskSpaceProtection(value)
596
+ except ValueError:
597
+ return value
598
+ return value
599
+
600
+ @field_serializer("parquet_version")
601
+ def serialize_parquet_version(self, value):
602
+ if isinstance(value, str):
603
+ try:
604
+ return models.OutputSecurityLakeParquetVersion(value)
605
+ except ValueError:
606
+ return value
607
+ return value
608
+
609
+ @field_serializer("parquet_data_page_version")
610
+ def serialize_parquet_data_page_version(self, value):
611
+ if isinstance(value, str):
612
+ try:
613
+ return models.OutputSecurityLakeDataPageVersion(value)
614
+ except ValueError:
615
+ return value
616
+ return value
@@ -1,11 +1,12 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import utils
4
+ from cribl_control_plane import models, utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
+ from pydantic import field_serializer
9
10
  from pydantic.functional_validators import PlainValidator
10
11
  from typing import List, Optional
11
12
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -122,6 +123,17 @@ class OutputSentinelFormat(str, Enum, metaclass=utils.OpenEnumMeta):
122
123
  ADVANCED = "advanced"
123
124
 
124
125
 
126
+ class OutputSentinelMode(str, Enum, metaclass=utils.OpenEnumMeta):
127
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
128
+
129
+ # Error
130
+ ERROR = "error"
131
+ # Backpressure
132
+ ALWAYS = "always"
133
+ # Always On
134
+ BACKPRESSURE = "backpressure"
135
+
136
+
125
137
  class OutputSentinelCompression(str, Enum, metaclass=utils.OpenEnumMeta):
126
138
  r"""Codec to use to compress the persisted data"""
127
139
 
@@ -140,17 +152,6 @@ class OutputSentinelQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
140
152
  DROP = "drop"
141
153
 
142
154
 
143
- class OutputSentinelMode(str, Enum, metaclass=utils.OpenEnumMeta):
144
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
145
-
146
- # Error
147
- ERROR = "error"
148
- # Backpressure
149
- BACKPRESSURE = "backpressure"
150
- # Always On
151
- ALWAYS = "always"
152
-
153
-
154
155
  class OutputSentinelPqControlsTypedDict(TypedDict):
155
156
  pass
156
157
 
@@ -238,6 +239,16 @@ class OutputSentinelTypedDict(TypedDict):
238
239
  r"""Custom JavaScript code to format incoming event data accessible through the __e variable. The formatted content is added to (__e['__eventOut']) if available. Otherwise, the original event is serialized as JSON. Caution: This function is evaluated in an unprotected context, allowing you to execute almost any JavaScript code."""
239
240
  format_payload_code: NotRequired[str]
240
241
  r"""Optional JavaScript code to format the payload sent to the Destination. The payload, containing a batch of formatted events, is accessible through the __e['payload'] variable. The formatted payload is returned in the __e['__payloadOut'] variable. Caution: This function is evaluated in an unprotected context, allowing you to execute almost any JavaScript code."""
242
+ pq_strict_ordering: NotRequired[bool]
243
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
244
+ pq_rate_per_sec: NotRequired[float]
245
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
246
+ pq_mode: NotRequired[OutputSentinelMode]
247
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
248
+ pq_max_buffer_size: NotRequired[float]
249
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
250
+ pq_max_backpressure_sec: NotRequired[float]
251
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
241
252
  pq_max_file_size: NotRequired[str]
242
253
  r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
243
254
  pq_max_size: NotRequired[str]
@@ -248,8 +259,6 @@ class OutputSentinelTypedDict(TypedDict):
248
259
  r"""Codec to use to compress the persisted data"""
249
260
  pq_on_backpressure: NotRequired[OutputSentinelQueueFullBehavior]
250
261
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
251
- pq_mode: NotRequired[OutputSentinelMode]
252
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
253
262
  pq_controls: NotRequired[OutputSentinelPqControlsTypedDict]
254
263
  url: NotRequired[str]
255
264
  r"""URL to send events to. Can be overwritten by an event's __url field."""
@@ -445,6 +454,34 @@ class OutputSentinel(BaseModel):
445
454
  ] = None
446
455
  r"""Optional JavaScript code to format the payload sent to the Destination. The payload, containing a batch of formatted events, is accessible through the __e['payload'] variable. The formatted payload is returned in the __e['__payloadOut'] variable. Caution: This function is evaluated in an unprotected context, allowing you to execute almost any JavaScript code."""
447
456
 
457
+ pq_strict_ordering: Annotated[
458
+ Optional[bool], pydantic.Field(alias="pqStrictOrdering")
459
+ ] = True
460
+ r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
461
+
462
+ pq_rate_per_sec: Annotated[
463
+ Optional[float], pydantic.Field(alias="pqRatePerSec")
464
+ ] = 0
465
+ r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
466
+
467
+ pq_mode: Annotated[
468
+ Annotated[
469
+ Optional[OutputSentinelMode], PlainValidator(validate_open_enum(False))
470
+ ],
471
+ pydantic.Field(alias="pqMode"),
472
+ ] = OutputSentinelMode.ERROR
473
+ r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
474
+
475
+ pq_max_buffer_size: Annotated[
476
+ Optional[float], pydantic.Field(alias="pqMaxBufferSize")
477
+ ] = 42
478
+ r"""The maximum number of events to hold in memory before writing the events to disk"""
479
+
480
+ pq_max_backpressure_sec: Annotated[
481
+ Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
482
+ ] = 30
483
+ r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
484
+
448
485
  pq_max_file_size: Annotated[
449
486
  Optional[str], pydantic.Field(alias="pqMaxFileSize")
450
487
  ] = "1 MB"
@@ -476,14 +513,6 @@ class OutputSentinel(BaseModel):
476
513
  ] = OutputSentinelQueueFullBehavior.BLOCK
477
514
  r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
478
515
 
479
- pq_mode: Annotated[
480
- Annotated[
481
- Optional[OutputSentinelMode], PlainValidator(validate_open_enum(False))
482
- ],
483
- pydantic.Field(alias="pqMode"),
484
- ] = OutputSentinelMode.ERROR
485
- r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
486
-
487
516
  pq_controls: Annotated[
488
517
  Optional[OutputSentinelPqControls], pydantic.Field(alias="pqControls")
489
518
  ] = None
@@ -499,3 +528,75 @@ class OutputSentinel(BaseModel):
499
528
 
500
529
  stream_name: Annotated[Optional[str], pydantic.Field(alias="streamName")] = None
501
530
  r"""The name of the stream (Sentinel table) in which to store the events"""
531
+
532
+ @field_serializer("failed_request_logging_mode")
533
+ def serialize_failed_request_logging_mode(self, value):
534
+ if isinstance(value, str):
535
+ try:
536
+ return models.OutputSentinelFailedRequestLoggingMode(value)
537
+ except ValueError:
538
+ return value
539
+ return value
540
+
541
+ @field_serializer("on_backpressure")
542
+ def serialize_on_backpressure(self, value):
543
+ if isinstance(value, str):
544
+ try:
545
+ return models.OutputSentinelBackpressureBehavior(value)
546
+ except ValueError:
547
+ return value
548
+ return value
549
+
550
+ @field_serializer("auth_type")
551
+ def serialize_auth_type(self, value):
552
+ if isinstance(value, str):
553
+ try:
554
+ return models.AuthType(value)
555
+ except ValueError:
556
+ return value
557
+ return value
558
+
559
+ @field_serializer("endpoint_url_configuration")
560
+ def serialize_endpoint_url_configuration(self, value):
561
+ if isinstance(value, str):
562
+ try:
563
+ return models.EndpointConfiguration(value)
564
+ except ValueError:
565
+ return value
566
+ return value
567
+
568
+ @field_serializer("format_")
569
+ def serialize_format_(self, value):
570
+ if isinstance(value, str):
571
+ try:
572
+ return models.OutputSentinelFormat(value)
573
+ except ValueError:
574
+ return value
575
+ return value
576
+
577
+ @field_serializer("pq_mode")
578
+ def serialize_pq_mode(self, value):
579
+ if isinstance(value, str):
580
+ try:
581
+ return models.OutputSentinelMode(value)
582
+ except ValueError:
583
+ return value
584
+ return value
585
+
586
+ @field_serializer("pq_compress")
587
+ def serialize_pq_compress(self, value):
588
+ if isinstance(value, str):
589
+ try:
590
+ return models.OutputSentinelCompression(value)
591
+ except ValueError:
592
+ return value
593
+ return value
594
+
595
+ @field_serializer("pq_on_backpressure")
596
+ def serialize_pq_on_backpressure(self, value):
597
+ if isinstance(value, str):
598
+ try:
599
+ return models.OutputSentinelQueueFullBehavior(value)
600
+ except ValueError:
601
+ return value
602
+ return value