cribl-control-plane 0.2.1rc7__py3-none-any.whl → 0.3.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (179) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/errors/__init__.py +5 -8
  3. cribl_control_plane/errors/{healthserverstatus_error.py → healthstatus_error.py} +9 -10
  4. cribl_control_plane/groups_sdk.py +28 -52
  5. cribl_control_plane/health.py +16 -22
  6. cribl_control_plane/models/__init__.py +54 -217
  7. cribl_control_plane/models/appmode.py +14 -0
  8. cribl_control_plane/models/authtoken.py +1 -5
  9. cribl_control_plane/models/cacheconnection.py +0 -20
  10. cribl_control_plane/models/configgroup.py +7 -55
  11. cribl_control_plane/models/configgroupcloud.py +1 -11
  12. cribl_control_plane/models/createconfiggroupbyproductop.py +5 -17
  13. cribl_control_plane/models/createroutesappendbyidop.py +2 -2
  14. cribl_control_plane/models/createversionundoop.py +3 -3
  15. cribl_control_plane/models/cribllakedataset.py +1 -11
  16. cribl_control_plane/models/cribllakedatasetupdate.py +1 -11
  17. cribl_control_plane/models/datasetmetadata.py +1 -11
  18. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +0 -11
  19. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  20. cribl_control_plane/models/distributedsummary.py +0 -6
  21. cribl_control_plane/models/error.py +16 -0
  22. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +0 -20
  23. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +0 -20
  24. cribl_control_plane/models/getconfiggroupbyproductandidop.py +0 -11
  25. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +0 -11
  26. cribl_control_plane/models/gethealthinfoop.py +17 -0
  27. cribl_control_plane/models/getsummaryop.py +0 -11
  28. cribl_control_plane/models/hbcriblinfo.py +3 -24
  29. cribl_control_plane/models/{healthserverstatus.py → healthstatus.py} +8 -27
  30. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  31. cribl_control_plane/models/input.py +78 -80
  32. cribl_control_plane/models/inputappscope.py +17 -80
  33. cribl_control_plane/models/inputazureblob.py +1 -33
  34. cribl_control_plane/models/inputcollection.py +1 -24
  35. cribl_control_plane/models/inputconfluentcloud.py +18 -195
  36. cribl_control_plane/models/inputcribl.py +1 -24
  37. cribl_control_plane/models/inputcriblhttp.py +17 -62
  38. cribl_control_plane/models/inputcribllakehttp.py +17 -62
  39. cribl_control_plane/models/inputcriblmetrics.py +1 -24
  40. cribl_control_plane/models/inputcribltcp.py +17 -62
  41. cribl_control_plane/models/inputcrowdstrike.py +1 -54
  42. cribl_control_plane/models/inputdatadogagent.py +17 -62
  43. cribl_control_plane/models/inputdatagen.py +1 -24
  44. cribl_control_plane/models/inputedgeprometheus.py +34 -147
  45. cribl_control_plane/models/inputelastic.py +27 -119
  46. cribl_control_plane/models/inputeventhub.py +1 -182
  47. cribl_control_plane/models/inputexec.py +1 -33
  48. cribl_control_plane/models/inputfile.py +3 -42
  49. cribl_control_plane/models/inputfirehose.py +17 -62
  50. cribl_control_plane/models/inputgooglepubsub.py +1 -36
  51. cribl_control_plane/models/inputgrafana.py +32 -157
  52. cribl_control_plane/models/inputhttp.py +17 -62
  53. cribl_control_plane/models/inputhttpraw.py +17 -62
  54. cribl_control_plane/models/inputjournalfiles.py +1 -24
  55. cribl_control_plane/models/inputkafka.py +17 -189
  56. cribl_control_plane/models/inputkinesis.py +1 -80
  57. cribl_control_plane/models/inputkubeevents.py +1 -24
  58. cribl_control_plane/models/inputkubelogs.py +1 -33
  59. cribl_control_plane/models/inputkubemetrics.py +1 -33
  60. cribl_control_plane/models/inputloki.py +17 -71
  61. cribl_control_plane/models/inputmetrics.py +17 -62
  62. cribl_control_plane/models/inputmodeldriventelemetry.py +17 -62
  63. cribl_control_plane/models/inputmsk.py +18 -81
  64. cribl_control_plane/models/inputnetflow.py +1 -24
  65. cribl_control_plane/models/inputoffice365mgmt.py +1 -67
  66. cribl_control_plane/models/inputoffice365msgtrace.py +1 -67
  67. cribl_control_plane/models/inputoffice365service.py +1 -67
  68. cribl_control_plane/models/inputopentelemetry.py +16 -92
  69. cribl_control_plane/models/inputprometheus.py +34 -138
  70. cribl_control_plane/models/inputprometheusrw.py +17 -71
  71. cribl_control_plane/models/inputrawudp.py +1 -24
  72. cribl_control_plane/models/inputs3.py +1 -45
  73. cribl_control_plane/models/inputs3inventory.py +1 -54
  74. cribl_control_plane/models/inputsecuritylake.py +1 -54
  75. cribl_control_plane/models/inputsnmp.py +1 -40
  76. cribl_control_plane/models/inputsplunk.py +17 -85
  77. cribl_control_plane/models/inputsplunkhec.py +16 -70
  78. cribl_control_plane/models/inputsplunksearch.py +1 -63
  79. cribl_control_plane/models/inputsqs.py +1 -56
  80. cribl_control_plane/models/inputsyslog.py +32 -121
  81. cribl_control_plane/models/inputsystemmetrics.py +9 -142
  82. cribl_control_plane/models/inputsystemstate.py +1 -33
  83. cribl_control_plane/models/inputtcp.py +17 -81
  84. cribl_control_plane/models/inputtcpjson.py +17 -71
  85. cribl_control_plane/models/inputwef.py +1 -71
  86. cribl_control_plane/models/inputwindowsmetrics.py +9 -129
  87. cribl_control_plane/models/inputwineventlogs.py +1 -60
  88. cribl_control_plane/models/inputwiz.py +1 -45
  89. cribl_control_plane/models/inputwizwebhook.py +17 -62
  90. cribl_control_plane/models/inputzscalerhec.py +16 -70
  91. cribl_control_plane/models/jobinfo.py +1 -4
  92. cribl_control_plane/models/jobstatus.py +3 -34
  93. cribl_control_plane/models/listconfiggroupbyproductop.py +0 -11
  94. cribl_control_plane/models/logininfo.py +3 -3
  95. cribl_control_plane/models/masterworkerentry.py +1 -11
  96. cribl_control_plane/models/nodeprovidedinfo.py +1 -11
  97. cribl_control_plane/models/nodeupgradestatus.py +0 -38
  98. cribl_control_plane/models/output.py +88 -93
  99. cribl_control_plane/models/outputazureblob.py +1 -110
  100. cribl_control_plane/models/outputazuredataexplorer.py +87 -452
  101. cribl_control_plane/models/outputazureeventhub.py +19 -281
  102. cribl_control_plane/models/outputazurelogs.py +19 -115
  103. cribl_control_plane/models/outputchronicle.py +19 -115
  104. cribl_control_plane/models/outputclickhouse.py +19 -155
  105. cribl_control_plane/models/outputcloudwatch.py +19 -106
  106. cribl_control_plane/models/outputconfluentcloud.py +38 -311
  107. cribl_control_plane/models/outputcriblhttp.py +19 -135
  108. cribl_control_plane/models/outputcribllake.py +1 -97
  109. cribl_control_plane/models/outputcribltcp.py +19 -132
  110. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +20 -129
  111. cribl_control_plane/models/outputdatadog.py +19 -159
  112. cribl_control_plane/models/outputdataset.py +19 -143
  113. cribl_control_plane/models/outputdiskspool.py +1 -11
  114. cribl_control_plane/models/outputdls3.py +1 -152
  115. cribl_control_plane/models/outputdynatracehttp.py +19 -160
  116. cribl_control_plane/models/outputdynatraceotlp.py +19 -160
  117. cribl_control_plane/models/outputelastic.py +19 -163
  118. cribl_control_plane/models/outputelasticcloud.py +19 -140
  119. cribl_control_plane/models/outputexabeam.py +1 -61
  120. cribl_control_plane/models/outputfilesystem.py +1 -87
  121. cribl_control_plane/models/outputgooglechronicle.py +20 -166
  122. cribl_control_plane/models/outputgooglecloudlogging.py +20 -131
  123. cribl_control_plane/models/outputgooglecloudstorage.py +1 -136
  124. cribl_control_plane/models/outputgooglepubsub.py +19 -106
  125. cribl_control_plane/models/outputgrafanacloud.py +37 -288
  126. cribl_control_plane/models/outputgraphite.py +19 -105
  127. cribl_control_plane/models/outputhoneycomb.py +19 -115
  128. cribl_control_plane/models/outputhumiohec.py +19 -126
  129. cribl_control_plane/models/outputinfluxdb.py +19 -130
  130. cribl_control_plane/models/outputkafka.py +34 -302
  131. cribl_control_plane/models/outputkinesis.py +19 -133
  132. cribl_control_plane/models/outputloki.py +17 -129
  133. cribl_control_plane/models/outputminio.py +1 -145
  134. cribl_control_plane/models/outputmsk.py +34 -193
  135. cribl_control_plane/models/outputnewrelic.py +19 -136
  136. cribl_control_plane/models/outputnewrelicevents.py +20 -128
  137. cribl_control_plane/models/outputopentelemetry.py +19 -178
  138. cribl_control_plane/models/outputprometheus.py +19 -115
  139. cribl_control_plane/models/outputring.py +1 -31
  140. cribl_control_plane/models/outputs3.py +1 -152
  141. cribl_control_plane/models/outputsecuritylake.py +1 -114
  142. cribl_control_plane/models/outputsentinel.py +19 -135
  143. cribl_control_plane/models/outputsentineloneaisiem.py +20 -134
  144. cribl_control_plane/models/outputservicenow.py +19 -168
  145. cribl_control_plane/models/outputsignalfx.py +19 -115
  146. cribl_control_plane/models/outputsns.py +17 -113
  147. cribl_control_plane/models/outputsplunk.py +19 -153
  148. cribl_control_plane/models/outputsplunkhec.py +19 -208
  149. cribl_control_plane/models/outputsplunklb.py +19 -182
  150. cribl_control_plane/models/outputsqs.py +17 -124
  151. cribl_control_plane/models/outputstatsd.py +19 -105
  152. cribl_control_plane/models/outputstatsdext.py +19 -105
  153. cribl_control_plane/models/outputsumologic.py +19 -117
  154. cribl_control_plane/models/outputsyslog.py +96 -259
  155. cribl_control_plane/models/outputtcpjson.py +19 -141
  156. cribl_control_plane/models/outputwavefront.py +19 -115
  157. cribl_control_plane/models/outputwebhook.py +19 -161
  158. cribl_control_plane/models/outputxsiam.py +17 -113
  159. cribl_control_plane/models/packinfo.py +5 -8
  160. cribl_control_plane/models/packinstallinfo.py +5 -8
  161. cribl_control_plane/models/resourcepolicy.py +0 -11
  162. cribl_control_plane/models/{uploadpackresponse.py → routecloneconf.py} +4 -4
  163. cribl_control_plane/models/routeconf.py +4 -3
  164. cribl_control_plane/models/runnablejobcollection.py +9 -72
  165. cribl_control_plane/models/runnablejobexecutor.py +9 -32
  166. cribl_control_plane/models/runnablejobscheduledsearch.py +9 -23
  167. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +0 -11
  168. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +0 -11
  169. cribl_control_plane/packs.py +7 -202
  170. cribl_control_plane/routes_sdk.py +6 -6
  171. cribl_control_plane/tokens.py +15 -23
  172. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/METADATA +9 -50
  173. cribl_control_plane-0.3.0a1.dist-info/RECORD +330 -0
  174. cribl_control_plane/models/groupcreaterequest.py +0 -171
  175. cribl_control_plane/models/outpostnodeinfo.py +0 -16
  176. cribl_control_plane/models/outputdatabricks.py +0 -482
  177. cribl_control_plane/models/updatepacksop.py +0 -25
  178. cribl_control_plane-0.2.1rc7.dist-info/RECORD +0 -331
  179. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/WHEEL +0 -0
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import Any, List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -176,14 +175,13 @@ class RunnableJobCollectionScheduleTypedDict(TypedDict):
176
175
 
177
176
  enabled: NotRequired[bool]
178
177
  r"""Enable to configure scheduling for this Collector"""
179
- skippable: NotRequired[bool]
180
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
181
- resume_missed: NotRequired[bool]
182
- r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
183
178
  cron_schedule: NotRequired[str]
184
179
  r"""A cron schedule on which to run this job"""
185
180
  max_concurrent_runs: NotRequired[float]
186
181
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
182
+ skippable: NotRequired[bool]
183
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
184
+ resume_missed: NotRequired[Any]
187
185
  run: NotRequired[RunnableJobCollectionRunSettingsTypedDict]
188
186
 
189
187
 
@@ -193,14 +191,6 @@ class RunnableJobCollectionSchedule(BaseModel):
193
191
  enabled: Optional[bool] = None
194
192
  r"""Enable to configure scheduling for this Collector"""
195
193
 
196
- skippable: Optional[bool] = True
197
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
198
-
199
- resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
200
- False
201
- )
202
- r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
203
-
204
194
  cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
205
195
  "*/5 * * * *"
206
196
  )
@@ -211,6 +201,11 @@ class RunnableJobCollectionSchedule(BaseModel):
211
201
  ] = 1
212
202
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
213
203
 
204
+ skippable: Optional[bool] = True
205
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
206
+
207
+ resume_missed: Annotated[Optional[Any], pydantic.Field(alias="resumeMissed")] = None
208
+
214
209
  run: Optional[RunnableJobCollectionRunSettings] = None
215
210
 
216
211
 
@@ -335,15 +330,6 @@ class RunnableJobCollectionInput(BaseModel):
335
330
  output: Optional[str] = None
336
331
  r"""Destination to send results to"""
337
332
 
338
- @field_serializer("type")
339
- def serialize_type(self, value):
340
- if isinstance(value, str):
341
- try:
342
- return models.InputType(value)
343
- except ValueError:
344
- return value
345
- return value
346
-
347
333
 
348
334
  class RunnableJobCollectionLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
349
335
  r"""Level at which to set task logging"""
@@ -377,13 +363,9 @@ class RunnableJobCollectionTimeWarning(BaseModel):
377
363
 
378
364
 
379
365
  class WhereToCapture(int, Enum, metaclass=utils.OpenEnumMeta):
380
- # 1. Before pre-processing Pipeline
381
366
  ZERO = 0
382
- # 2. Before the Routes
383
367
  ONE = 1
384
- # 3. Before post-processing Pipeline
385
368
  TWO = 2
386
- # 4. Before the Destination
387
369
  THREE = 3
388
370
 
389
371
 
@@ -406,15 +388,6 @@ class CaptureSettings(BaseModel):
406
388
  Optional[WhereToCapture], PlainValidator(validate_open_enum(True))
407
389
  ] = WhereToCapture.ZERO
408
390
 
409
- @field_serializer("level")
410
- def serialize_level(self, value):
411
- if isinstance(value, str):
412
- try:
413
- return models.WhereToCapture(value)
414
- except ValueError:
415
- return value
416
- return value
417
-
418
391
 
419
392
  class RunnableJobCollectionRunTypedDict(TypedDict):
420
393
  reschedule_dropped_tasks: NotRequired[bool]
@@ -556,33 +529,6 @@ class RunnableJobCollectionRun(BaseModel):
556
529
 
557
530
  capture: Optional[CaptureSettings] = None
558
531
 
559
- @field_serializer("log_level")
560
- def serialize_log_level(self, value):
561
- if isinstance(value, str):
562
- try:
563
- return models.RunnableJobCollectionLogLevel(value)
564
- except ValueError:
565
- return value
566
- return value
567
-
568
- @field_serializer("mode")
569
- def serialize_mode(self, value):
570
- if isinstance(value, str):
571
- try:
572
- return models.RunnableJobCollectionMode(value)
573
- except ValueError:
574
- return value
575
- return value
576
-
577
- @field_serializer("time_range_type")
578
- def serialize_time_range_type(self, value):
579
- if isinstance(value, str):
580
- try:
581
- return models.TimeRange(value)
582
- except ValueError:
583
- return value
584
- return value
585
-
586
532
 
587
533
  class RunnableJobCollectionTypedDict(TypedDict):
588
534
  collector: CollectorTypedDict
@@ -658,12 +604,3 @@ class RunnableJobCollection(BaseModel):
658
604
  r"""If enabled, tasks are created and run by the same Worker Node"""
659
605
 
660
606
  input: Optional[RunnableJobCollectionInput] = None
661
-
662
- @field_serializer("type")
663
- def serialize_type(self, value):
664
- if isinstance(value, str):
665
- try:
666
- return models.RunnableJobCollectionJobType(value)
667
- except ValueError:
668
- return value
669
- return value
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import Any, List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -174,14 +173,13 @@ class RunnableJobExecutorScheduleTypedDict(TypedDict):
174
173
 
175
174
  enabled: NotRequired[bool]
176
175
  r"""Enable to configure scheduling for this Collector"""
177
- skippable: NotRequired[bool]
178
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
179
- resume_missed: NotRequired[bool]
180
- r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
181
176
  cron_schedule: NotRequired[str]
182
177
  r"""A cron schedule on which to run this job"""
183
178
  max_concurrent_runs: NotRequired[float]
184
179
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
180
+ skippable: NotRequired[bool]
181
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
182
+ resume_missed: NotRequired[Any]
185
183
  run: NotRequired[RunnableJobExecutorRunSettingsTypedDict]
186
184
 
187
185
 
@@ -191,14 +189,6 @@ class RunnableJobExecutorSchedule(BaseModel):
191
189
  enabled: Optional[bool] = None
192
190
  r"""Enable to configure scheduling for this Collector"""
193
191
 
194
- skippable: Optional[bool] = True
195
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
196
-
197
- resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
198
- False
199
- )
200
- r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
201
-
202
192
  cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
203
193
  "*/5 * * * *"
204
194
  )
@@ -209,6 +199,11 @@ class RunnableJobExecutorSchedule(BaseModel):
209
199
  ] = 1
210
200
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
211
201
 
202
+ skippable: Optional[bool] = True
203
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
204
+
205
+ resume_missed: Annotated[Optional[Any], pydantic.Field(alias="resumeMissed")] = None
206
+
212
207
  run: Optional[RunnableJobExecutorRunSettings] = None
213
208
 
214
209
 
@@ -284,15 +279,6 @@ class RunnableJobExecutorRun(BaseModel):
284
279
  job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
285
280
  r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
286
281
 
287
- @field_serializer("log_level")
288
- def serialize_log_level(self, value):
289
- if isinstance(value, str):
290
- try:
291
- return models.RunnableJobExecutorLogLevel(value)
292
- except ValueError:
293
- return value
294
- return value
295
-
296
282
 
297
283
  class RunnableJobExecutorTypedDict(TypedDict):
298
284
  executor: ExecutorTypedDict
@@ -357,12 +343,3 @@ class RunnableJobExecutor(BaseModel):
357
343
 
358
344
  streamtags: Optional[List[str]] = None
359
345
  r"""Tags for filtering and grouping in @{product}"""
360
-
361
- @field_serializer("type")
362
- def serialize_type(self, value):
363
- if isinstance(value, str):
364
- try:
365
- return models.RunnableJobExecutorJobType(value)
366
- except ValueError:
367
- return value
368
- return value
@@ -1,12 +1,11 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
- from cribl_control_plane import models, utils
4
+ from cribl_control_plane import utils
5
5
  from cribl_control_plane.types import BaseModel
6
6
  from cribl_control_plane.utils import validate_open_enum
7
7
  from enum import Enum
8
8
  import pydantic
9
- from pydantic import field_serializer
10
9
  from pydantic.functional_validators import PlainValidator
11
10
  from typing import Any, List, Optional
12
11
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -175,14 +174,13 @@ class RunnableJobScheduledSearchScheduleTypedDict(TypedDict):
175
174
 
176
175
  enabled: NotRequired[bool]
177
176
  r"""Enable to configure scheduling for this Collector"""
178
- skippable: NotRequired[bool]
179
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
180
- resume_missed: NotRequired[bool]
181
- r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
182
177
  cron_schedule: NotRequired[str]
183
178
  r"""A cron schedule on which to run this job"""
184
179
  max_concurrent_runs: NotRequired[float]
185
180
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
181
+ skippable: NotRequired[bool]
182
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
183
+ resume_missed: NotRequired[Any]
186
184
  run: NotRequired[RunnableJobScheduledSearchRunSettingsTypedDict]
187
185
 
188
186
 
@@ -192,14 +190,6 @@ class RunnableJobScheduledSearchSchedule(BaseModel):
192
190
  enabled: Optional[bool] = None
193
191
  r"""Enable to configure scheduling for this Collector"""
194
192
 
195
- skippable: Optional[bool] = True
196
- r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
197
-
198
- resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
199
- False
200
- )
201
- r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
202
-
203
193
  cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
204
194
  "*/5 * * * *"
205
195
  )
@@ -210,6 +200,11 @@ class RunnableJobScheduledSearchSchedule(BaseModel):
210
200
  ] = 1
211
201
  r"""The maximum number of instances of this scheduled job that may be running at any time"""
212
202
 
203
+ skippable: Optional[bool] = True
204
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
205
+
206
+ resume_missed: Annotated[Optional[Any], pydantic.Field(alias="resumeMissed")] = None
207
+
213
208
  run: Optional[RunnableJobScheduledSearchRunSettings] = None
214
209
 
215
210
 
@@ -275,12 +270,3 @@ class RunnableJobScheduledSearch(BaseModel):
275
270
 
276
271
  streamtags: Optional[List[str]] = None
277
272
  r"""Tags for filtering and grouping in @{product}"""
278
-
279
- @field_serializer("type")
280
- def serialize_type(self, value):
281
- if isinstance(value, str):
282
- try:
283
- return models.RunnableJobScheduledSearchJobType(value)
284
- except ValueError:
285
- return value
286
- return value
@@ -3,7 +3,6 @@
3
3
  from __future__ import annotations
4
4
  from .configgroup import ConfigGroup, ConfigGroupTypedDict
5
5
  from .productscore import ProductsCore
6
- from cribl_control_plane import models
7
6
  from cribl_control_plane.types import BaseModel
8
7
  from cribl_control_plane.utils import (
9
8
  FieldMetadata,
@@ -12,7 +11,6 @@ from cribl_control_plane.utils import (
12
11
  validate_open_enum,
13
12
  )
14
13
  import pydantic
15
- from pydantic import field_serializer
16
14
  from pydantic.functional_validators import PlainValidator
17
15
  from typing import List, Optional
18
16
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -47,15 +45,6 @@ class UpdateConfigGroupByProductAndIDRequest(BaseModel):
47
45
  ]
48
46
  r"""ConfigGroup object"""
49
47
 
50
- @field_serializer("product")
51
- def serialize_product(self, value):
52
- if isinstance(value, str):
53
- try:
54
- return models.ProductsCore(value)
55
- except ValueError:
56
- return value
57
- return value
58
-
59
48
 
60
49
  class UpdateConfigGroupByProductAndIDResponseTypedDict(TypedDict):
61
50
  r"""a list of ConfigGroup objects"""
@@ -4,7 +4,6 @@ from __future__ import annotations
4
4
  from .configgroup import ConfigGroup, ConfigGroupTypedDict
5
5
  from .deployrequest import DeployRequest, DeployRequestTypedDict
6
6
  from .productscore import ProductsCore
7
- from cribl_control_plane import models
8
7
  from cribl_control_plane.types import BaseModel
9
8
  from cribl_control_plane.utils import (
10
9
  FieldMetadata,
@@ -12,7 +11,6 @@ from cribl_control_plane.utils import (
12
11
  RequestMetadata,
13
12
  validate_open_enum,
14
13
  )
15
- from pydantic import field_serializer
16
14
  from pydantic.functional_validators import PlainValidator
17
15
  from typing import List, Optional
18
16
  from typing_extensions import Annotated, NotRequired, TypedDict
@@ -45,15 +43,6 @@ class UpdateConfigGroupDeployByProductAndIDRequest(BaseModel):
45
43
  ]
46
44
  r"""DeployRequest object"""
47
45
 
48
- @field_serializer("product")
49
- def serialize_product(self, value):
50
- if isinstance(value, str):
51
- try:
52
- return models.ProductsCore(value)
53
- except ValueError:
54
- return value
55
- return value
56
-
57
46
 
58
47
  class UpdateConfigGroupDeployByProductAndIDResponseTypedDict(TypedDict):
59
48
  r"""a list of ConfigGroup objects"""
@@ -6,8 +6,7 @@ from cribl_control_plane._hooks import HookContext
6
6
  from cribl_control_plane.types import BaseModel, OptionalNullable, UNSET
7
7
  from cribl_control_plane.utils import get_security_from_env
8
8
  from cribl_control_plane.utils.unmarshal_json_response import unmarshal_json_response
9
- import io
10
- from typing import Any, IO, Mapping, Optional, Union, cast
9
+ from typing import Any, Mapping, Optional, Union, cast
11
10
 
12
11
 
13
12
  class Packs(BaseSDK):
@@ -24,9 +23,9 @@ class Packs(BaseSDK):
24
23
  timeout_ms: Optional[int] = None,
25
24
  http_headers: Optional[Mapping[str, str]] = None,
26
25
  ) -> models.CreatePacksResponse:
27
- r"""Install a Pack
26
+ r"""Create or install a Pack
28
27
 
29
- Install a Pack.<br><br>To install an uploaded Pack, provide the <code>source</code> value from the <code>PUT /packs</code> response as the <code>source</code> parameter in the request body.<br><br>To install a Pack by importing from a URL, provide the direct URL location of the <code>.crbl</code> file for the Pack as the <code>source</code> parameter in the request body.<br><br>To install a Pack by importing from a Git repository, provide <code>git+<repo-url></code> as the <code>source</code> parameter in the request body.<br><br>If you do not include the <code>source</code> parameter in the request body, an empty Pack is created.
28
+ Create or install a Pack.
30
29
 
31
30
  :param request: The request object to send.
32
31
  :param retries: Override the default retry configuration for this method
@@ -116,9 +115,9 @@ class Packs(BaseSDK):
116
115
  timeout_ms: Optional[int] = None,
117
116
  http_headers: Optional[Mapping[str, str]] = None,
118
117
  ) -> models.CreatePacksResponse:
119
- r"""Install a Pack
118
+ r"""Create or install a Pack
120
119
 
121
- Install a Pack.<br><br>To install an uploaded Pack, provide the <code>source</code> value from the <code>PUT /packs</code> response as the <code>source</code> parameter in the request body.<br><br>To install a Pack by importing from a URL, provide the direct URL location of the <code>.crbl</code> file for the Pack as the <code>source</code> parameter in the request body.<br><br>To install a Pack by importing from a Git repository, provide <code>git+<repo-url></code> as the <code>source</code> parameter in the request body.<br><br>If you do not include the <code>source</code> parameter in the request body, an empty Pack is created.
120
+ Create or install a Pack.
122
121
 
123
122
  :param request: The request object to send.
124
123
  :param retries: Override the default retry configuration for this method
@@ -371,200 +370,6 @@ class Packs(BaseSDK):
371
370
 
372
371
  raise errors.APIError("Unexpected response received", http_res)
373
372
 
374
- def upload(
375
- self,
376
- *,
377
- filename: str,
378
- request_body: Union[bytes, IO[bytes], io.BufferedReader],
379
- retries: OptionalNullable[utils.RetryConfig] = UNSET,
380
- server_url: Optional[str] = None,
381
- timeout_ms: Optional[int] = None,
382
- http_headers: Optional[Mapping[str, str]] = None,
383
- ) -> models.UploadPackResponse:
384
- r"""Upload a Pack file
385
-
386
- Upload a Pack file. Returns the <code>source</code> ID needed to install the Pack with <code>POST /packs source</code>, which you must call separately.
387
-
388
- :param filename: Filename of the Pack file to upload.
389
- :param request_body:
390
- :param retries: Override the default retry configuration for this method
391
- :param server_url: Override the default server URL for this method
392
- :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
393
- :param http_headers: Additional headers to set or replace on requests.
394
- """
395
- base_url = None
396
- url_variables = None
397
- if timeout_ms is None:
398
- timeout_ms = self.sdk_configuration.timeout_ms
399
-
400
- if server_url is not None:
401
- base_url = server_url
402
- else:
403
- base_url = self._get_url(base_url, url_variables)
404
-
405
- request = models.UpdatePacksRequest(
406
- filename=filename,
407
- request_body=request_body,
408
- )
409
-
410
- req = self._build_request(
411
- method="PUT",
412
- path="/packs",
413
- base_url=base_url,
414
- url_variables=url_variables,
415
- request=request,
416
- request_body_required=True,
417
- request_has_path_params=False,
418
- request_has_query_params=True,
419
- user_agent_header="user-agent",
420
- accept_header_value="application/json",
421
- http_headers=http_headers,
422
- security=self.sdk_configuration.security,
423
- get_serialized_body=lambda: utils.serialize_request_body(
424
- request.request_body,
425
- False,
426
- False,
427
- "raw",
428
- Union[bytes, IO[bytes], io.BufferedReader],
429
- ),
430
- timeout_ms=timeout_ms,
431
- )
432
-
433
- if retries == UNSET:
434
- if self.sdk_configuration.retry_config is not UNSET:
435
- retries = self.sdk_configuration.retry_config
436
-
437
- retry_config = None
438
- if isinstance(retries, utils.RetryConfig):
439
- retry_config = (retries, ["429", "500", "502", "503", "504"])
440
-
441
- http_res = self.do_request(
442
- hook_ctx=HookContext(
443
- config=self.sdk_configuration,
444
- base_url=base_url or "",
445
- operation_id="updatePacks",
446
- oauth2_scopes=[],
447
- security_source=get_security_from_env(
448
- self.sdk_configuration.security, models.Security
449
- ),
450
- ),
451
- request=req,
452
- error_status_codes=["401", "4XX", "500", "5XX"],
453
- retry_config=retry_config,
454
- )
455
-
456
- response_data: Any = None
457
- if utils.match_response(http_res, "200", "application/json"):
458
- return unmarshal_json_response(models.UploadPackResponse, http_res)
459
- if utils.match_response(http_res, "500", "application/json"):
460
- response_data = unmarshal_json_response(errors.ErrorData, http_res)
461
- raise errors.Error(response_data, http_res)
462
- if utils.match_response(http_res, ["401", "4XX"], "*"):
463
- http_res_text = utils.stream_to_text(http_res)
464
- raise errors.APIError("API error occurred", http_res, http_res_text)
465
- if utils.match_response(http_res, "5XX", "*"):
466
- http_res_text = utils.stream_to_text(http_res)
467
- raise errors.APIError("API error occurred", http_res, http_res_text)
468
-
469
- raise errors.APIError("Unexpected response received", http_res)
470
-
471
- async def upload_async(
472
- self,
473
- *,
474
- filename: str,
475
- request_body: Union[bytes, IO[bytes], io.BufferedReader],
476
- retries: OptionalNullable[utils.RetryConfig] = UNSET,
477
- server_url: Optional[str] = None,
478
- timeout_ms: Optional[int] = None,
479
- http_headers: Optional[Mapping[str, str]] = None,
480
- ) -> models.UploadPackResponse:
481
- r"""Upload a Pack file
482
-
483
- Upload a Pack file. Returns the <code>source</code> ID needed to install the Pack with <code>POST /packs source</code>, which you must call separately.
484
-
485
- :param filename: Filename of the Pack file to upload.
486
- :param request_body:
487
- :param retries: Override the default retry configuration for this method
488
- :param server_url: Override the default server URL for this method
489
- :param timeout_ms: Override the default request timeout configuration for this method in milliseconds
490
- :param http_headers: Additional headers to set or replace on requests.
491
- """
492
- base_url = None
493
- url_variables = None
494
- if timeout_ms is None:
495
- timeout_ms = self.sdk_configuration.timeout_ms
496
-
497
- if server_url is not None:
498
- base_url = server_url
499
- else:
500
- base_url = self._get_url(base_url, url_variables)
501
-
502
- request = models.UpdatePacksRequest(
503
- filename=filename,
504
- request_body=request_body,
505
- )
506
-
507
- req = self._build_request_async(
508
- method="PUT",
509
- path="/packs",
510
- base_url=base_url,
511
- url_variables=url_variables,
512
- request=request,
513
- request_body_required=True,
514
- request_has_path_params=False,
515
- request_has_query_params=True,
516
- user_agent_header="user-agent",
517
- accept_header_value="application/json",
518
- http_headers=http_headers,
519
- security=self.sdk_configuration.security,
520
- get_serialized_body=lambda: utils.serialize_request_body(
521
- request.request_body,
522
- False,
523
- False,
524
- "raw",
525
- Union[bytes, IO[bytes], io.BufferedReader],
526
- ),
527
- timeout_ms=timeout_ms,
528
- )
529
-
530
- if retries == UNSET:
531
- if self.sdk_configuration.retry_config is not UNSET:
532
- retries = self.sdk_configuration.retry_config
533
-
534
- retry_config = None
535
- if isinstance(retries, utils.RetryConfig):
536
- retry_config = (retries, ["429", "500", "502", "503", "504"])
537
-
538
- http_res = await self.do_request_async(
539
- hook_ctx=HookContext(
540
- config=self.sdk_configuration,
541
- base_url=base_url or "",
542
- operation_id="updatePacks",
543
- oauth2_scopes=[],
544
- security_source=get_security_from_env(
545
- self.sdk_configuration.security, models.Security
546
- ),
547
- ),
548
- request=req,
549
- error_status_codes=["401", "4XX", "500", "5XX"],
550
- retry_config=retry_config,
551
- )
552
-
553
- response_data: Any = None
554
- if utils.match_response(http_res, "200", "application/json"):
555
- return unmarshal_json_response(models.UploadPackResponse, http_res)
556
- if utils.match_response(http_res, "500", "application/json"):
557
- response_data = unmarshal_json_response(errors.ErrorData, http_res)
558
- raise errors.Error(response_data, http_res)
559
- if utils.match_response(http_res, ["401", "4XX"], "*"):
560
- http_res_text = await utils.stream_to_text_async(http_res)
561
- raise errors.APIError("API error occurred", http_res, http_res_text)
562
- if utils.match_response(http_res, "5XX", "*"):
563
- http_res_text = await utils.stream_to_text_async(http_res)
564
- raise errors.APIError("API error occurred", http_res, http_res_text)
565
-
566
- raise errors.APIError("Unexpected response received", http_res)
567
-
568
373
  def delete(
569
374
  self,
570
375
  *,
@@ -928,7 +733,7 @@ class Packs(BaseSDK):
928
733
  ) -> models.UpdatePacksByIDResponse:
929
734
  r"""Upgrade a Pack
930
735
 
931
- Upgrade the specified Pack.</br></br>If the Pack includes any user–modified versions of default Cribl Knowledge resources such as lookups, copy the modified files locally for safekeeping before upgrading the Pack.Copy the modified files back to the upgraded Pack after you install it with <code>POST /packs</code> to overwrite the default versions in the Pack.</br></br>After you upgrade the Pack, update any Routes, Pipelines, Sources, and Destinations that use the previous Pack version so that they reference the upgraded Pack.
736
+ Upgrade the specified Pack.</br></br>If the Pack includes any user–modified versions of default Cribl Knowledge resources such as lookups, copy the modified files locally for safekeeping before upgrading the Pack. Copy the modified files back to the upgraded Pack after you install it with <code>POST /packs</code> to overwrite the default versions in the Pack.</br></br>After you upgrade the Pack, update any Routes, Pipelines, Sources, and Destinations that use the previous Pack version so that they reference the upgraded Pack.
932
737
 
933
738
  :param id: The <code>id</code> of the Pack to upgrade.
934
739
  :param source:
@@ -1036,7 +841,7 @@ class Packs(BaseSDK):
1036
841
  ) -> models.UpdatePacksByIDResponse:
1037
842
  r"""Upgrade a Pack
1038
843
 
1039
- Upgrade the specified Pack.</br></br>If the Pack includes any user–modified versions of default Cribl Knowledge resources such as lookups, copy the modified files locally for safekeeping before upgrading the Pack.Copy the modified files back to the upgraded Pack after you install it with <code>POST /packs</code> to overwrite the default versions in the Pack.</br></br>After you upgrade the Pack, update any Routes, Pipelines, Sources, and Destinations that use the previous Pack version so that they reference the upgraded Pack.
844
+ Upgrade the specified Pack.</br></br>If the Pack includes any user–modified versions of default Cribl Knowledge resources such as lookups, copy the modified files locally for safekeeping before upgrading the Pack. Copy the modified files back to the upgraded Pack after you install it with <code>POST /packs</code> to overwrite the default versions in the Pack.</br></br>After you upgrade the Pack, update any Routes, Pipelines, Sources, and Destinations that use the previous Pack version so that they reference the upgraded Pack.
1040
845
 
1041
846
  :param id: The <code>id</code> of the Pack to upgrade.
1042
847
  :param source:
@@ -584,11 +584,11 @@ class RoutesSDK(BaseSDK):
584
584
  timeout_ms: Optional[int] = None,
585
585
  http_headers: Optional[Mapping[str, str]] = None,
586
586
  ) -> models.CreateRoutesAppendByIDResponse:
587
- r"""Add a Route to the end of the Routing table
587
+ r"""Append a Route to the end of the Routing table
588
588
 
589
- Add a Route to the end of the specified Routing table.
589
+ Append a Route to the end of the specified Routing table.</br></br>Provide a complete representation of the Routing table, including the Route that you want to append, in the request body. Cribl removes any omitted Routes and fields in the Routing table when appending the Route.</br></br>Confirm that the configuration in your request body is correct before sending the request. If the configuration is incorrect, the Routing table might not function as expected.
590
590
 
591
- :param id: The <code>id</code> of the Routing table to add the Route to. The supported value is <code>default</code>.
591
+ :param id: The <code>id</code> of the Routing table to append the Route to. The supported value is <code>default</code>.
592
592
  :param request_body: RouteDefinitions object
593
593
  :param retries: Override the default retry configuration for this method
594
594
  :param server_url: Override the default server URL for this method
@@ -679,11 +679,11 @@ class RoutesSDK(BaseSDK):
679
679
  timeout_ms: Optional[int] = None,
680
680
  http_headers: Optional[Mapping[str, str]] = None,
681
681
  ) -> models.CreateRoutesAppendByIDResponse:
682
- r"""Add a Route to the end of the Routing table
682
+ r"""Append a Route to the end of the Routing table
683
683
 
684
- Add a Route to the end of the specified Routing table.
684
+ Append a Route to the end of the specified Routing table.</br></br>Provide a complete representation of the Routing table, including the Route that you want to append, in the request body. Cribl removes any omitted Routes and fields in the Routing table when appending the Route.</br></br>Confirm that the configuration in your request body is correct before sending the request. If the configuration is incorrect, the Routing table might not function as expected.
685
685
 
686
- :param id: The <code>id</code> of the Routing table to add the Route to. The supported value is <code>default</code>.
686
+ :param id: The <code>id</code> of the Routing table to append the Route to. The supported value is <code>default</code>.
687
687
  :param request_body: RouteDefinitions object
688
688
  :param retries: Override the default retry configuration for this method
689
689
  :param server_url: Override the default server URL for this method