cribl-control-plane 0.0.39__py3-none-any.whl → 0.4.0b23__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (403) hide show
  1. cribl_control_plane/_hooks/clientcredentials.py +92 -42
  2. cribl_control_plane/_version.py +4 -4
  3. cribl_control_plane/acl.py +21 -9
  4. cribl_control_plane/auth_sdk.py +6 -3
  5. cribl_control_plane/basesdk.py +17 -1
  6. cribl_control_plane/branches.py +28 -8
  7. cribl_control_plane/commits.py +119 -47
  8. cribl_control_plane/commits_files.py +44 -24
  9. cribl_control_plane/configs_versions.py +16 -10
  10. cribl_control_plane/cribl.py +495 -0
  11. cribl_control_plane/destinations.py +86 -34
  12. cribl_control_plane/destinations_pq.py +34 -14
  13. cribl_control_plane/errors/__init__.py +23 -8
  14. cribl_control_plane/errors/apierror.py +2 -0
  15. cribl_control_plane/errors/criblcontrolplaneerror.py +11 -7
  16. cribl_control_plane/errors/error.py +4 -2
  17. cribl_control_plane/errors/healthserverstatus_error.py +41 -0
  18. cribl_control_plane/errors/no_response_error.py +5 -1
  19. cribl_control_plane/errors/responsevalidationerror.py +2 -0
  20. cribl_control_plane/functions.py +367 -0
  21. cribl_control_plane/groups_configs.py +8 -3
  22. cribl_control_plane/groups_sdk.py +156 -94
  23. cribl_control_plane/health.py +34 -14
  24. cribl_control_plane/hectokens.py +44 -20
  25. cribl_control_plane/httpclient.py +0 -1
  26. cribl_control_plane/lakedatasets.py +156 -62
  27. cribl_control_plane/models/__init__.py +3298 -479
  28. cribl_control_plane/models/addhectokenrequest.py +7 -1
  29. cribl_control_plane/models/authtoken.py +5 -1
  30. cribl_control_plane/models/backupssettings_union.py +37 -0
  31. cribl_control_plane/models/{lookupversions.py → branchinfo.py} +4 -4
  32. cribl_control_plane/models/cacheconnection.py +30 -2
  33. cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
  34. cribl_control_plane/models/cloudprovider.py +2 -1
  35. cribl_control_plane/models/collectorazureblob.py +130 -0
  36. cribl_control_plane/models/collectorconf.py +56 -0
  37. cribl_control_plane/models/collectorcribllake.py +27 -0
  38. cribl_control_plane/models/collectordatabase.py +92 -0
  39. cribl_control_plane/models/collectorfilesystem.py +66 -0
  40. cribl_control_plane/models/collectorgooglecloudstorage.py +131 -0
  41. cribl_control_plane/models/collectorhealthcheck.py +269 -0
  42. cribl_control_plane/models/collectorrest.py +340 -0
  43. cribl_control_plane/models/collectors3.py +239 -0
  44. cribl_control_plane/models/collectorscript.py +59 -0
  45. cribl_control_plane/models/collectorsplunk.py +253 -0
  46. cribl_control_plane/models/configgroup.py +62 -8
  47. cribl_control_plane/models/configgroupcloud.py +17 -3
  48. cribl_control_plane/models/countedbranchinfo.py +20 -0
  49. cribl_control_plane/models/countedconfiggroup.py +20 -0
  50. cribl_control_plane/models/countedcribllakedataset.py +20 -0
  51. cribl_control_plane/models/counteddistributedsummary.py +20 -0
  52. cribl_control_plane/models/countedfunctionresponse.py +20 -0
  53. cribl_control_plane/models/countedgitcommitsummary.py +20 -0
  54. cribl_control_plane/models/countedgitcountresult.py +20 -0
  55. cribl_control_plane/models/{createinputop.py → countedgitdiffresult.py} +5 -9
  56. cribl_control_plane/models/countedgitfilesresponse.py +20 -0
  57. cribl_control_plane/models/{getversioninfoop.py → countedgitinfo.py} +2 -6
  58. cribl_control_plane/models/countedgitlogresult.py +20 -0
  59. cribl_control_plane/models/countedgitrevertresult.py +20 -0
  60. cribl_control_plane/models/countedgitshowresult.py +20 -0
  61. cribl_control_plane/models/countedgitstatusresult.py +20 -0
  62. cribl_control_plane/models/{listinputop.py → countedinput.py} +2 -6
  63. cribl_control_plane/models/countedinputsplunkhec.py +20 -0
  64. cribl_control_plane/models/countedjobinfo.py +20 -0
  65. cribl_control_plane/models/countedmasterworkerentry.py +20 -0
  66. cribl_control_plane/models/countednumber.py +19 -0
  67. cribl_control_plane/models/{getversionbranchop.py → countedobject.py} +2 -6
  68. cribl_control_plane/models/{listoutputop.py → countedoutput.py} +2 -6
  69. cribl_control_plane/models/{createoutputop.py → countedoutputsamplesresponse.py} +5 -9
  70. cribl_control_plane/models/countedoutputtestresponse.py +20 -0
  71. cribl_control_plane/models/countedpackinfo.py +20 -0
  72. cribl_control_plane/models/{createpacksop.py → countedpackinstallinfo.py} +2 -6
  73. cribl_control_plane/models/{listpipelineop.py → countedpipeline.py} +2 -6
  74. cribl_control_plane/models/{listroutesop.py → countedroutes.py} +2 -6
  75. cribl_control_plane/models/countedstring.py +19 -0
  76. cribl_control_plane/models/countedsystemsettingsconf.py +20 -0
  77. cribl_control_plane/models/countedteamaccesscontrollist.py +20 -0
  78. cribl_control_plane/models/counteduseraccesscontrollist.py +20 -0
  79. cribl_control_plane/models/createauthloginop.py +18 -0
  80. cribl_control_plane/models/createconfiggroupbyproductop.py +26 -27
  81. cribl_control_plane/models/createcribllakedatasetbylakeidop.py +1 -19
  82. cribl_control_plane/models/createinputhectokenbyidop.py +1 -19
  83. cribl_control_plane/models/createoutputtestbyidop.py +1 -20
  84. cribl_control_plane/models/createroutesappendbyidop.py +4 -22
  85. cribl_control_plane/models/createversioncommitop.py +1 -19
  86. cribl_control_plane/models/createversionrevertop.py +3 -21
  87. cribl_control_plane/models/createversionundoop.py +1 -18
  88. cribl_control_plane/models/criblevent.py +15 -0
  89. cribl_control_plane/models/cribllakedataset.py +23 -3
  90. cribl_control_plane/models/cribllakedatasetupdate.py +95 -0
  91. cribl_control_plane/models/datasetmetadata.py +18 -2
  92. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +18 -21
  93. cribl_control_plane/models/deletecribllakedatasetbylakeidandidop.py +1 -20
  94. cribl_control_plane/models/deleteinputbyidop.py +1 -20
  95. cribl_control_plane/models/deleteoutputbyidop.py +1 -20
  96. cribl_control_plane/models/deleteoutputpqbyidop.py +1 -19
  97. cribl_control_plane/models/deletepacksbyidop.py +1 -20
  98. cribl_control_plane/models/deletepipelinebyidop.py +3 -22
  99. cribl_control_plane/models/difffiles.py +130 -0
  100. cribl_control_plane/models/diffline.py +26 -0
  101. cribl_control_plane/models/difflinecontext.py +28 -0
  102. cribl_control_plane/models/difflinedelete.py +25 -0
  103. cribl_control_plane/models/difflineinsert.py +25 -0
  104. cribl_control_plane/models/distributedsummary.py +6 -0
  105. cribl_control_plane/models/functionaggregatemetrics.py +206 -0
  106. cribl_control_plane/models/functionaggregation.py +172 -0
  107. cribl_control_plane/models/functionautotimestamp.py +173 -0
  108. cribl_control_plane/models/functioncef.py +111 -0
  109. cribl_control_plane/models/functionchain.py +75 -0
  110. cribl_control_plane/models/functionclone.py +75 -0
  111. cribl_control_plane/models/functioncode.py +96 -0
  112. cribl_control_plane/models/functioncomment.py +75 -0
  113. cribl_control_plane/models/functiondistinct.py +99 -0
  114. cribl_control_plane/models/functiondnslookup.py +250 -0
  115. cribl_control_plane/models/functiondrop.py +73 -0
  116. cribl_control_plane/models/functiondropdimensions.py +87 -0
  117. cribl_control_plane/models/functiondynamicsampling.py +121 -0
  118. cribl_control_plane/models/functioneval.py +103 -0
  119. cribl_control_plane/models/functioneventbreaker.py +103 -0
  120. cribl_control_plane/models/functioneventstats.py +92 -0
  121. cribl_control_plane/models/functionexternaldata.py +73 -0
  122. cribl_control_plane/models/functionflatten.py +90 -0
  123. cribl_control_plane/models/functionfoldkeys.py +89 -0
  124. cribl_control_plane/models/functiongenstats.py +73 -0
  125. cribl_control_plane/models/functiongeoip.py +120 -0
  126. cribl_control_plane/models/functiongrok.py +95 -0
  127. cribl_control_plane/models/functionhandlebar.py +112 -0
  128. cribl_control_plane/models/functionjoin.py +112 -0
  129. cribl_control_plane/models/functionjsonunroll.py +80 -0
  130. cribl_control_plane/models/functionlakeexport.py +102 -0
  131. cribl_control_plane/models/functionlimit.py +75 -0
  132. cribl_control_plane/models/functionlocalsearchdatatypeparser.py +76 -0
  133. cribl_control_plane/models/functionlocalsearchrulesetrunner.py +97 -0
  134. cribl_control_plane/models/functionlookup.py +148 -0
  135. cribl_control_plane/models/functionmask.py +121 -0
  136. cribl_control_plane/models/functionmvexpand.py +128 -0
  137. cribl_control_plane/models/functionmvpull.py +99 -0
  138. cribl_control_plane/models/functionnotificationpolicies.py +186 -0
  139. cribl_control_plane/models/functionnotifications.py +85 -0
  140. cribl_control_plane/models/functionnotify.py +196 -0
  141. cribl_control_plane/models/functionnumerify.py +119 -0
  142. cribl_control_plane/models/functionotlplogs.py +82 -0
  143. cribl_control_plane/models/functionotlpmetrics.py +118 -0
  144. cribl_control_plane/models/functionotlptraces.py +111 -0
  145. cribl_control_plane/models/functionpack.py +80 -0
  146. cribl_control_plane/models/functionpivot.py +85 -0
  147. cribl_control_plane/models/functionpublishmetrics.py +153 -0
  148. cribl_control_plane/models/functionredis.py +173 -0
  149. cribl_control_plane/models/functionregexextract.py +112 -0
  150. cribl_control_plane/models/functionregexfilter.py +95 -0
  151. cribl_control_plane/models/functionrename.py +107 -0
  152. cribl_control_plane/models/functionresponse.py +242 -0
  153. cribl_control_plane/models/functionrollupmetrics.py +114 -0
  154. cribl_control_plane/models/functionsampling.py +90 -0
  155. cribl_control_plane/models/functionsend.py +141 -0
  156. cribl_control_plane/models/functionsensitivedatascanner.py +128 -0
  157. cribl_control_plane/models/functionserde.py +161 -0
  158. cribl_control_plane/models/functionserialize.py +134 -0
  159. cribl_control_plane/models/functionsidlookup.py +93 -0
  160. cribl_control_plane/models/functionsnmptrapserialize.py +144 -0
  161. cribl_control_plane/models/functionsort.py +97 -0
  162. cribl_control_plane/models/functionstore.py +132 -0
  163. cribl_control_plane/models/functionsuppress.py +115 -0
  164. cribl_control_plane/models/functiontee.py +90 -0
  165. cribl_control_plane/models/functiontrimtimestamp.py +75 -0
  166. cribl_control_plane/models/functionunion.py +80 -0
  167. cribl_control_plane/models/functionunroll.py +80 -0
  168. cribl_control_plane/models/functionwindow.py +96 -0
  169. cribl_control_plane/models/functionxmlunroll.py +92 -0
  170. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +24 -20
  171. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +24 -20
  172. cribl_control_plane/models/getconfiggroupbyproductandidop.py +14 -19
  173. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +18 -20
  174. cribl_control_plane/models/getcribllakedatasetbylakeidandidop.py +1 -20
  175. cribl_control_plane/models/getcribllakedatasetbylakeidop.py +54 -14
  176. cribl_control_plane/models/getfunctionsbyidop.py +18 -0
  177. cribl_control_plane/models/getinputbyidop.py +1 -20
  178. cribl_control_plane/models/getmasterworkerentryop.py +1 -18
  179. cribl_control_plane/models/getoutputbyidop.py +1 -20
  180. cribl_control_plane/models/getoutputpqbyidop.py +1 -19
  181. cribl_control_plane/models/getoutputsamplesbyidop.py +1 -20
  182. cribl_control_plane/models/getpacksbyidop.py +1 -20
  183. cribl_control_plane/models/getpacksop.py +1 -19
  184. cribl_control_plane/models/getpipelinebyidop.py +3 -22
  185. cribl_control_plane/models/getroutesbyidop.py +3 -22
  186. cribl_control_plane/models/getsummaryop.py +18 -20
  187. cribl_control_plane/models/getversioncountop.py +3 -21
  188. cribl_control_plane/models/getversiondiffop.py +1 -18
  189. cribl_control_plane/models/getversionfilesop.py +3 -22
  190. cribl_control_plane/models/getversionop.py +1 -19
  191. cribl_control_plane/models/getversionshowop.py +1 -18
  192. cribl_control_plane/models/getversionstatusop.py +1 -19
  193. cribl_control_plane/models/{routecloneconf.py → gitcountresult.py} +4 -4
  194. cribl_control_plane/models/gitdiffresult.py +16 -0
  195. cribl_control_plane/models/gitinfo.py +14 -3
  196. cribl_control_plane/models/gitshowresult.py +19 -0
  197. cribl_control_plane/models/groupcreaterequest.py +172 -0
  198. cribl_control_plane/models/hbcriblinfo.py +42 -7
  199. cribl_control_plane/models/healthserverstatus.py +55 -0
  200. cribl_control_plane/models/heartbeatmetadata.py +6 -11
  201. cribl_control_plane/models/input.py +83 -78
  202. cribl_control_plane/models/inputappscope.py +126 -30
  203. cribl_control_plane/models/inputazureblob.py +62 -6
  204. cribl_control_plane/models/inputcloudflarehec.py +518 -0
  205. cribl_control_plane/models/inputcollection.py +47 -4
  206. cribl_control_plane/models/inputconfluentcloud.py +258 -32
  207. cribl_control_plane/models/inputcribl.py +47 -4
  208. cribl_control_plane/models/inputcriblhttp.py +121 -30
  209. cribl_control_plane/models/inputcribllakehttp.py +136 -30
  210. cribl_control_plane/models/inputcriblmetrics.py +48 -4
  211. cribl_control_plane/models/inputcribltcp.py +122 -24
  212. cribl_control_plane/models/inputcrowdstrike.py +92 -10
  213. cribl_control_plane/models/inputdatadogagent.py +98 -24
  214. cribl_control_plane/models/inputdatagen.py +47 -4
  215. cribl_control_plane/models/inputedgeprometheus.py +210 -50
  216. cribl_control_plane/models/inputelastic.py +167 -36
  217. cribl_control_plane/models/inputeventhub.py +209 -6
  218. cribl_control_plane/models/inputexec.py +59 -6
  219. cribl_control_plane/models/inputfile.py +78 -10
  220. cribl_control_plane/models/inputfirehose.py +97 -24
  221. cribl_control_plane/models/inputgooglepubsub.py +67 -6
  222. cribl_control_plane/models/inputgrafana.py +251 -71
  223. cribl_control_plane/models/inputhttp.py +97 -24
  224. cribl_control_plane/models/inputhttpraw.py +97 -24
  225. cribl_control_plane/models/inputjournalfiles.py +48 -4
  226. cribl_control_plane/models/inputkafka.py +252 -28
  227. cribl_control_plane/models/inputkinesis.py +130 -14
  228. cribl_control_plane/models/inputkubeevents.py +47 -4
  229. cribl_control_plane/models/inputkubelogs.py +61 -8
  230. cribl_control_plane/models/inputkubemetrics.py +61 -8
  231. cribl_control_plane/models/inputloki.py +113 -34
  232. cribl_control_plane/models/inputmetrics.py +97 -24
  233. cribl_control_plane/models/inputmodeldriventelemetry.py +107 -26
  234. cribl_control_plane/models/inputmsk.py +145 -32
  235. cribl_control_plane/models/inputnetflow.py +47 -4
  236. cribl_control_plane/models/inputoffice365mgmt.py +112 -14
  237. cribl_control_plane/models/inputoffice365msgtrace.py +114 -16
  238. cribl_control_plane/models/inputoffice365service.py +114 -16
  239. cribl_control_plane/models/inputopentelemetry.py +143 -32
  240. cribl_control_plane/models/inputprometheus.py +193 -44
  241. cribl_control_plane/models/inputprometheusrw.py +114 -27
  242. cribl_control_plane/models/inputrawudp.py +47 -4
  243. cribl_control_plane/models/inputs3.py +78 -8
  244. cribl_control_plane/models/inputs3inventory.py +92 -10
  245. cribl_control_plane/models/inputsecuritylake.py +93 -10
  246. cribl_control_plane/models/inputsnmp.py +112 -21
  247. cribl_control_plane/models/inputsplunk.py +130 -28
  248. cribl_control_plane/models/inputsplunkhec.py +119 -29
  249. cribl_control_plane/models/inputsplunksearch.py +112 -15
  250. cribl_control_plane/models/inputsqs.py +99 -16
  251. cribl_control_plane/models/inputsyslog.py +189 -47
  252. cribl_control_plane/models/inputsystemmetrics.py +202 -32
  253. cribl_control_plane/models/inputsystemstate.py +61 -8
  254. cribl_control_plane/models/inputtcp.py +122 -26
  255. cribl_control_plane/models/inputtcpjson.py +112 -26
  256. cribl_control_plane/models/inputwef.py +144 -15
  257. cribl_control_plane/models/inputwindowsmetrics.py +186 -33
  258. cribl_control_plane/models/inputwineventlogs.py +93 -11
  259. cribl_control_plane/models/inputwiz.py +173 -8
  260. cribl_control_plane/models/inputwizwebhook.py +97 -24
  261. cribl_control_plane/models/inputzscalerhec.py +119 -29
  262. cribl_control_plane/models/jobinfo.py +34 -0
  263. cribl_control_plane/models/jobstatus.py +48 -0
  264. cribl_control_plane/models/lakedatasetmetrics.py +17 -0
  265. cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
  266. cribl_control_plane/models/listconfiggroupbyproductop.py +14 -19
  267. cribl_control_plane/models/listmasterworkerentryop.py +1 -19
  268. cribl_control_plane/models/logininfo.py +3 -3
  269. cribl_control_plane/models/masterworkerentry.py +20 -13
  270. cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
  271. cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
  272. cribl_control_plane/models/nodeprovidedinfo.py +13 -11
  273. cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
  274. cribl_control_plane/models/nodeupgradestate.py +2 -1
  275. cribl_control_plane/models/nodeupgradestatus.py +51 -5
  276. cribl_control_plane/models/outpostnodeinfo.py +16 -0
  277. cribl_control_plane/models/output.py +104 -90
  278. cribl_control_plane/models/outputazureblob.py +171 -18
  279. cribl_control_plane/models/outputazuredataexplorer.py +514 -90
  280. cribl_control_plane/models/outputazureeventhub.py +315 -31
  281. cribl_control_plane/models/outputazurelogs.py +145 -26
  282. cribl_control_plane/models/outputchronicle.py +532 -0
  283. cribl_control_plane/models/outputclickhouse.py +205 -34
  284. cribl_control_plane/models/outputcloudflarer2.py +632 -0
  285. cribl_control_plane/models/outputcloudwatch.py +129 -23
  286. cribl_control_plane/models/outputconfluentcloud.py +384 -57
  287. cribl_control_plane/models/outputcriblhttp.py +198 -31
  288. cribl_control_plane/models/outputcribllake.py +156 -16
  289. cribl_control_plane/models/outputcribltcp.py +194 -29
  290. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +172 -28
  291. cribl_control_plane/models/outputdatabricks.py +501 -0
  292. cribl_control_plane/models/outputdatadog.py +199 -31
  293. cribl_control_plane/models/outputdataset.py +181 -29
  294. cribl_control_plane/models/outputdiskspool.py +17 -2
  295. cribl_control_plane/models/outputdls3.py +233 -24
  296. cribl_control_plane/models/outputdynatracehttp.py +208 -34
  297. cribl_control_plane/models/outputdynatraceotlp.py +210 -36
  298. cribl_control_plane/models/outputelastic.py +199 -30
  299. cribl_control_plane/models/outputelasticcloud.py +171 -26
  300. cribl_control_plane/models/outputexabeam.py +96 -10
  301. cribl_control_plane/models/outputfilesystem.py +139 -14
  302. cribl_control_plane/models/outputgooglechronicle.py +216 -35
  303. cribl_control_plane/models/outputgooglecloudlogging.py +174 -31
  304. cribl_control_plane/models/outputgooglecloudstorage.py +215 -24
  305. cribl_control_plane/models/outputgooglepubsub.py +131 -23
  306. cribl_control_plane/models/outputgrafanacloud.py +376 -74
  307. cribl_control_plane/models/outputgraphite.py +128 -25
  308. cribl_control_plane/models/outputhoneycomb.py +145 -26
  309. cribl_control_plane/models/outputhumiohec.py +162 -28
  310. cribl_control_plane/models/outputinfluxdb.py +165 -28
  311. cribl_control_plane/models/outputkafka.py +375 -52
  312. cribl_control_plane/models/outputkinesis.py +165 -27
  313. cribl_control_plane/models/outputloki.py +164 -34
  314. cribl_control_plane/models/outputmicrosoftfabric.py +540 -0
  315. cribl_control_plane/models/outputminio.py +225 -25
  316. cribl_control_plane/models/outputmsk.py +267 -54
  317. cribl_control_plane/models/outputnewrelic.py +171 -29
  318. cribl_control_plane/models/outputnewrelicevents.py +163 -28
  319. cribl_control_plane/models/outputopentelemetry.py +240 -40
  320. cribl_control_plane/models/outputprometheus.py +145 -26
  321. cribl_control_plane/models/outputring.py +49 -8
  322. cribl_control_plane/models/outputs3.py +233 -26
  323. cribl_control_plane/models/outputsecuritylake.py +179 -18
  324. cribl_control_plane/models/outputsentinel.py +172 -29
  325. cribl_control_plane/models/outputsentineloneaisiem.py +178 -32
  326. cribl_control_plane/models/outputservicenow.py +223 -38
  327. cribl_control_plane/models/outputsignalfx.py +145 -26
  328. cribl_control_plane/models/outputsns.py +143 -25
  329. cribl_control_plane/models/outputsplunk.py +206 -36
  330. cribl_control_plane/models/outputsplunkhec.py +238 -26
  331. cribl_control_plane/models/outputsplunklb.py +262 -42
  332. cribl_control_plane/models/outputsqs.py +163 -33
  333. cribl_control_plane/models/outputstatsd.py +127 -25
  334. cribl_control_plane/models/outputstatsdext.py +128 -25
  335. cribl_control_plane/models/outputsumologic.py +146 -25
  336. cribl_control_plane/models/outputsyslog.py +318 -46
  337. cribl_control_plane/models/outputtcpjson.py +186 -32
  338. cribl_control_plane/models/outputwavefront.py +145 -26
  339. cribl_control_plane/models/outputwebhook.py +211 -33
  340. cribl_control_plane/models/outputxsiam.py +143 -26
  341. cribl_control_plane/models/packinfo.py +8 -5
  342. cribl_control_plane/models/packinstallinfo.py +11 -8
  343. cribl_control_plane/models/piisettings_union.py +31 -0
  344. cribl_control_plane/models/productscore.py +2 -1
  345. cribl_control_plane/models/rbacresource.py +2 -1
  346. cribl_control_plane/models/resourcepolicy.py +15 -2
  347. cribl_control_plane/models/rollbacksettings_union.py +44 -0
  348. cribl_control_plane/models/routeconf.py +3 -4
  349. cribl_control_plane/models/runnablejob.py +27 -0
  350. cribl_control_plane/models/runnablejobcollection.py +628 -0
  351. cribl_control_plane/models/runnablejobexecutor.py +360 -0
  352. cribl_control_plane/models/runnablejobscheduledsearch.py +279 -0
  353. cribl_control_plane/models/snisettings_union.py +31 -0
  354. cribl_control_plane/models/systemsettingsconf.py +291 -0
  355. cribl_control_plane/models/tlssettings_union.py +43 -0
  356. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +19 -20
  357. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +19 -21
  358. cribl_control_plane/models/updatecribllakedatasetbylakeidandidop.py +10 -25
  359. cribl_control_plane/models/updatehectokenrequest.py +7 -1
  360. cribl_control_plane/models/updateinputbyidop.py +1 -19
  361. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +1 -19
  362. cribl_control_plane/models/updateoutputbyidop.py +1 -19
  363. cribl_control_plane/models/updatepacksbyidop.py +1 -20
  364. cribl_control_plane/models/updatepacksop.py +25 -0
  365. cribl_control_plane/models/updatepipelinebyidop.py +5 -23
  366. cribl_control_plane/models/updateroutesbyidop.py +3 -21
  367. cribl_control_plane/models/upgradegroupsettings_union.py +43 -0
  368. cribl_control_plane/models/upgradepackageurls.py +20 -0
  369. cribl_control_plane/models/upgradesettings.py +38 -0
  370. cribl_control_plane/models/uploadpackresponse.py +13 -0
  371. cribl_control_plane/models/workertypes.py +2 -1
  372. cribl_control_plane/nodes.py +37 -23
  373. cribl_control_plane/packs.py +292 -37
  374. cribl_control_plane/pipelines.py +98 -48
  375. cribl_control_plane/routes_sdk.py +86 -50
  376. cribl_control_plane/samples.py +32 -16
  377. cribl_control_plane/sdk.py +26 -6
  378. cribl_control_plane/settings.py +20 -0
  379. cribl_control_plane/sources.py +85 -33
  380. cribl_control_plane/statuses.py +16 -6
  381. cribl_control_plane/summaries.py +16 -6
  382. cribl_control_plane/system_sdk.py +20 -0
  383. cribl_control_plane/teams.py +16 -6
  384. cribl_control_plane/tokens.py +49 -21
  385. cribl_control_plane/utils/__init__.py +15 -3
  386. cribl_control_plane/utils/annotations.py +32 -8
  387. cribl_control_plane/utils/eventstreaming.py +10 -0
  388. cribl_control_plane/utils/forms.py +21 -10
  389. cribl_control_plane/utils/queryparams.py +14 -2
  390. cribl_control_plane/utils/retries.py +69 -5
  391. cribl_control_plane/utils/unmarshal_json_response.py +15 -1
  392. cribl_control_plane/versions.py +11 -6
  393. cribl_control_plane/versions_configs.py +16 -6
  394. {cribl_control_plane-0.0.39.dist-info → cribl_control_plane-0.4.0b23.dist-info}/METADATA +99 -43
  395. cribl_control_plane-0.4.0b23.dist-info/RECORD +450 -0
  396. {cribl_control_plane-0.0.39.dist-info → cribl_control_plane-0.4.0b23.dist-info}/WHEEL +1 -1
  397. cribl_control_plane-0.4.0b23.dist-info/licenses/LICENSE +201 -0
  398. cribl_control_plane/errors/healthstatus_error.py +0 -32
  399. cribl_control_plane/models/appmode.py +0 -13
  400. cribl_control_plane/models/createpipelineop.py +0 -24
  401. cribl_control_plane/models/createversionpushop.py +0 -23
  402. cribl_control_plane/models/healthstatus.py +0 -33
  403. cribl_control_plane-0.0.39.dist-info/RECORD +0 -315
@@ -1,9 +1,13 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import models, utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic import field_serializer
10
+ from pydantic.functional_validators import PlainValidator
7
11
  from typing import List, Optional
8
12
  from typing_extensions import Annotated, NotRequired, TypedDict
9
13
 
@@ -23,20 +27,32 @@ class InputKafkaConnection(BaseModel):
23
27
  pipeline: Optional[str] = None
24
28
 
25
29
 
26
- class InputKafkaMode(str, Enum):
30
+ class InputKafkaMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
31
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
32
 
33
+ # Smart
29
34
  SMART = "smart"
35
+ # Always On
30
36
  ALWAYS = "always"
31
37
 
32
38
 
33
- class InputKafkaCompression(str, Enum):
39
+ class InputKafkaCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
40
  r"""Codec to use to compress the persisted data"""
35
41
 
42
+ # None
36
43
  NONE = "none"
44
+ # Gzip
37
45
  GZIP = "gzip"
38
46
 
39
47
 
48
+ class InputKafkaPqControlsTypedDict(TypedDict):
49
+ pass
50
+
51
+
52
+ class InputKafkaPqControls(BaseModel):
53
+ pass
54
+
55
+
40
56
  class InputKafkaPqTypedDict(TypedDict):
41
57
  mode: NotRequired[InputKafkaMode]
42
58
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,10 +68,13 @@ class InputKafkaPqTypedDict(TypedDict):
52
68
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
69
  compress: NotRequired[InputKafkaCompression]
54
70
  r"""Codec to use to compress the persisted data"""
71
+ pq_controls: NotRequired[InputKafkaPqControlsTypedDict]
55
72
 
56
73
 
57
74
  class InputKafkaPq(BaseModel):
58
- mode: Optional[InputKafkaMode] = InputKafkaMode.ALWAYS
75
+ mode: Annotated[
76
+ Optional[InputKafkaMode], PlainValidator(validate_open_enum(False))
77
+ ] = InputKafkaMode.ALWAYS
59
78
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
60
79
 
61
80
  max_buffer_size: Annotated[
@@ -79,15 +98,32 @@ class InputKafkaPq(BaseModel):
79
98
  path: Optional[str] = "$CRIBL_HOME/state/queues"
80
99
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
81
100
 
82
- compress: Optional[InputKafkaCompression] = InputKafkaCompression.NONE
101
+ compress: Annotated[
102
+ Optional[InputKafkaCompression], PlainValidator(validate_open_enum(False))
103
+ ] = InputKafkaCompression.NONE
83
104
  r"""Codec to use to compress the persisted data"""
84
105
 
106
+ pq_controls: Annotated[
107
+ Optional[InputKafkaPqControls], pydantic.Field(alias="pqControls")
108
+ ] = None
85
109
 
86
- class InputKafkaSchemaType(str, Enum):
87
- r"""The schema format used to encode and decode event data"""
88
-
89
- AVRO = "avro"
90
- JSON = "json"
110
+ @field_serializer("mode")
111
+ def serialize_mode(self, value):
112
+ if isinstance(value, str):
113
+ try:
114
+ return models.InputKafkaMode(value)
115
+ except ValueError:
116
+ return value
117
+ return value
118
+
119
+ @field_serializer("compress")
120
+ def serialize_compress(self, value):
121
+ if isinstance(value, str):
122
+ try:
123
+ return models.InputKafkaCompression(value)
124
+ except ValueError:
125
+ return value
126
+ return value
91
127
 
92
128
 
93
129
  class InputKafkaAuthTypedDict(TypedDict):
@@ -109,14 +145,18 @@ class InputKafkaAuth(BaseModel):
109
145
  r"""Select or create a secret that references your credentials"""
110
146
 
111
147
 
112
- class InputKafkaKafkaSchemaRegistryMinimumTLSVersion(str, Enum):
148
+ class InputKafkaKafkaSchemaRegistryMinimumTLSVersion(
149
+ str, Enum, metaclass=utils.OpenEnumMeta
150
+ ):
113
151
  TL_SV1 = "TLSv1"
114
152
  TL_SV1_1 = "TLSv1.1"
115
153
  TL_SV1_2 = "TLSv1.2"
116
154
  TL_SV1_3 = "TLSv1.3"
117
155
 
118
156
 
119
- class InputKafkaKafkaSchemaRegistryMaximumTLSVersion(str, Enum):
157
+ class InputKafkaKafkaSchemaRegistryMaximumTLSVersion(
158
+ str, Enum, metaclass=utils.OpenEnumMeta
159
+ ):
120
160
  TL_SV1 = "TLSv1"
121
161
  TL_SV1_1 = "TLSv1.1"
122
162
  TL_SV1_2 = "TLSv1.2"
@@ -176,22 +216,44 @@ class InputKafkaKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
176
216
  r"""Passphrase to use to decrypt private key"""
177
217
 
178
218
  min_version: Annotated[
179
- Optional[InputKafkaKafkaSchemaRegistryMinimumTLSVersion],
219
+ Annotated[
220
+ Optional[InputKafkaKafkaSchemaRegistryMinimumTLSVersion],
221
+ PlainValidator(validate_open_enum(False)),
222
+ ],
180
223
  pydantic.Field(alias="minVersion"),
181
224
  ] = None
182
225
 
183
226
  max_version: Annotated[
184
- Optional[InputKafkaKafkaSchemaRegistryMaximumTLSVersion],
227
+ Annotated[
228
+ Optional[InputKafkaKafkaSchemaRegistryMaximumTLSVersion],
229
+ PlainValidator(validate_open_enum(False)),
230
+ ],
185
231
  pydantic.Field(alias="maxVersion"),
186
232
  ] = None
187
233
 
234
+ @field_serializer("min_version")
235
+ def serialize_min_version(self, value):
236
+ if isinstance(value, str):
237
+ try:
238
+ return models.InputKafkaKafkaSchemaRegistryMinimumTLSVersion(value)
239
+ except ValueError:
240
+ return value
241
+ return value
242
+
243
+ @field_serializer("max_version")
244
+ def serialize_max_version(self, value):
245
+ if isinstance(value, str):
246
+ try:
247
+ return models.InputKafkaKafkaSchemaRegistryMaximumTLSVersion(value)
248
+ except ValueError:
249
+ return value
250
+ return value
251
+
188
252
 
189
253
  class InputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
190
254
  disabled: NotRequired[bool]
191
255
  schema_registry_url: NotRequired[str]
192
256
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
193
- schema_type: NotRequired[InputKafkaSchemaType]
194
- r"""The schema format used to encode and decode event data"""
195
257
  connection_timeout: NotRequired[float]
196
258
  r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
197
259
  request_timeout: NotRequired[float]
@@ -211,11 +273,6 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
211
273
  ] = "http://localhost:8081"
212
274
  r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
213
275
 
214
- schema_type: Annotated[
215
- Optional[InputKafkaSchemaType], pydantic.Field(alias="schemaType")
216
- ] = InputKafkaSchemaType.AVRO
217
- r"""The schema format used to encode and decode event data"""
218
-
219
276
  connection_timeout: Annotated[
220
277
  Optional[float], pydantic.Field(alias="connectionTimeout")
221
278
  ] = 30000
@@ -235,18 +292,76 @@ class InputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
235
292
  tls: Optional[InputKafkaKafkaSchemaRegistryTLSSettingsClientSide] = None
236
293
 
237
294
 
238
- class InputKafkaSASLMechanism(str, Enum):
295
+ class InputKafkaAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
296
+ r"""Enter credentials directly, or select a stored secret"""
297
+
298
+ MANUAL = "manual"
299
+ SECRET = "secret"
300
+
301
+
302
+ class InputKafkaSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
303
+ # PLAIN
239
304
  PLAIN = "plain"
305
+ # SCRAM-SHA-256
240
306
  SCRAM_SHA_256 = "scram-sha-256"
307
+ # SCRAM-SHA-512
241
308
  SCRAM_SHA_512 = "scram-sha-512"
309
+ # GSSAPI/Kerberos
242
310
  KERBEROS = "kerberos"
243
311
 
244
312
 
313
+ class InputKafkaOauthParamTypedDict(TypedDict):
314
+ name: str
315
+ value: str
316
+
317
+
318
+ class InputKafkaOauthParam(BaseModel):
319
+ name: str
320
+
321
+ value: str
322
+
323
+
324
+ class InputKafkaSaslExtensionTypedDict(TypedDict):
325
+ name: str
326
+ value: str
327
+
328
+
329
+ class InputKafkaSaslExtension(BaseModel):
330
+ name: str
331
+
332
+ value: str
333
+
334
+
245
335
  class InputKafkaAuthenticationTypedDict(TypedDict):
246
336
  r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
247
337
 
248
338
  disabled: NotRequired[bool]
339
+ username: NotRequired[str]
340
+ password: NotRequired[str]
341
+ auth_type: NotRequired[InputKafkaAuthenticationMethod]
342
+ r"""Enter credentials directly, or select a stored secret"""
343
+ credentials_secret: NotRequired[str]
344
+ r"""Select or create a secret that references your credentials"""
249
345
  mechanism: NotRequired[InputKafkaSASLMechanism]
346
+ keytab_location: NotRequired[str]
347
+ r"""Location of keytab file for authentication principal"""
348
+ principal: NotRequired[str]
349
+ r"""Authentication principal, such as `kafka_user@example.com`"""
350
+ broker_service_class: NotRequired[str]
351
+ r"""Kerberos service class for Kafka brokers, such as `kafka`"""
352
+ oauth_enabled: NotRequired[bool]
353
+ r"""Enable OAuth authentication"""
354
+ token_url: NotRequired[str]
355
+ r"""URL of the token endpoint to use for OAuth authentication"""
356
+ client_id: NotRequired[str]
357
+ r"""Client ID to use for OAuth authentication"""
358
+ oauth_secret_type: NotRequired[str]
359
+ client_text_secret: NotRequired[str]
360
+ r"""Select or create a stored text secret"""
361
+ oauth_params: NotRequired[List[InputKafkaOauthParamTypedDict]]
362
+ r"""Additional fields to send to the token endpoint, such as scope or audience"""
363
+ sasl_extensions: NotRequired[List[InputKafkaSaslExtensionTypedDict]]
364
+ r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
250
365
 
251
366
 
252
367
  class InputKafkaAuthentication(BaseModel):
@@ -254,17 +369,98 @@ class InputKafkaAuthentication(BaseModel):
254
369
 
255
370
  disabled: Optional[bool] = True
256
371
 
257
- mechanism: Optional[InputKafkaSASLMechanism] = InputKafkaSASLMechanism.PLAIN
372
+ username: Optional[str] = None
373
+
374
+ password: Optional[str] = None
375
+
376
+ auth_type: Annotated[
377
+ Annotated[
378
+ Optional[InputKafkaAuthenticationMethod],
379
+ PlainValidator(validate_open_enum(False)),
380
+ ],
381
+ pydantic.Field(alias="authType"),
382
+ ] = InputKafkaAuthenticationMethod.MANUAL
383
+ r"""Enter credentials directly, or select a stored secret"""
384
+
385
+ credentials_secret: Annotated[
386
+ Optional[str], pydantic.Field(alias="credentialsSecret")
387
+ ] = None
388
+ r"""Select or create a secret that references your credentials"""
389
+
390
+ mechanism: Annotated[
391
+ Optional[InputKafkaSASLMechanism], PlainValidator(validate_open_enum(False))
392
+ ] = InputKafkaSASLMechanism.PLAIN
393
+
394
+ keytab_location: Annotated[
395
+ Optional[str], pydantic.Field(alias="keytabLocation")
396
+ ] = None
397
+ r"""Location of keytab file for authentication principal"""
398
+
399
+ principal: Optional[str] = None
400
+ r"""Authentication principal, such as `kafka_user@example.com`"""
258
401
 
402
+ broker_service_class: Annotated[
403
+ Optional[str], pydantic.Field(alias="brokerServiceClass")
404
+ ] = None
405
+ r"""Kerberos service class for Kafka brokers, such as `kafka`"""
406
+
407
+ oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
408
+ False
409
+ )
410
+ r"""Enable OAuth authentication"""
411
+
412
+ token_url: Annotated[Optional[str], pydantic.Field(alias="tokenUrl")] = None
413
+ r"""URL of the token endpoint to use for OAuth authentication"""
414
+
415
+ client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
416
+ r"""Client ID to use for OAuth authentication"""
417
+
418
+ oauth_secret_type: Annotated[
419
+ Optional[str], pydantic.Field(alias="oauthSecretType")
420
+ ] = "secret"
421
+
422
+ client_text_secret: Annotated[
423
+ Optional[str], pydantic.Field(alias="clientTextSecret")
424
+ ] = None
425
+ r"""Select or create a stored text secret"""
426
+
427
+ oauth_params: Annotated[
428
+ Optional[List[InputKafkaOauthParam]], pydantic.Field(alias="oauthParams")
429
+ ] = None
430
+ r"""Additional fields to send to the token endpoint, such as scope or audience"""
259
431
 
260
- class InputKafkaMinimumTLSVersion(str, Enum):
432
+ sasl_extensions: Annotated[
433
+ Optional[List[InputKafkaSaslExtension]], pydantic.Field(alias="saslExtensions")
434
+ ] = None
435
+ r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
436
+
437
+ @field_serializer("auth_type")
438
+ def serialize_auth_type(self, value):
439
+ if isinstance(value, str):
440
+ try:
441
+ return models.InputKafkaAuthenticationMethod(value)
442
+ except ValueError:
443
+ return value
444
+ return value
445
+
446
+ @field_serializer("mechanism")
447
+ def serialize_mechanism(self, value):
448
+ if isinstance(value, str):
449
+ try:
450
+ return models.InputKafkaSASLMechanism(value)
451
+ except ValueError:
452
+ return value
453
+ return value
454
+
455
+
456
+ class InputKafkaMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
261
457
  TL_SV1 = "TLSv1"
262
458
  TL_SV1_1 = "TLSv1.1"
263
459
  TL_SV1_2 = "TLSv1.2"
264
460
  TL_SV1_3 = "TLSv1.3"
265
461
 
266
462
 
267
- class InputKafkaMaximumTLSVersion(str, Enum):
463
+ class InputKafkaMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
268
464
  TL_SV1 = "TLSv1"
269
465
  TL_SV1_1 = "TLSv1.1"
270
466
  TL_SV1_2 = "TLSv1.2"
@@ -324,13 +520,39 @@ class InputKafkaTLSSettingsClientSide(BaseModel):
324
520
  r"""Passphrase to use to decrypt private key"""
325
521
 
326
522
  min_version: Annotated[
327
- Optional[InputKafkaMinimumTLSVersion], pydantic.Field(alias="minVersion")
523
+ Annotated[
524
+ Optional[InputKafkaMinimumTLSVersion],
525
+ PlainValidator(validate_open_enum(False)),
526
+ ],
527
+ pydantic.Field(alias="minVersion"),
328
528
  ] = None
329
529
 
330
530
  max_version: Annotated[
331
- Optional[InputKafkaMaximumTLSVersion], pydantic.Field(alias="maxVersion")
531
+ Annotated[
532
+ Optional[InputKafkaMaximumTLSVersion],
533
+ PlainValidator(validate_open_enum(False)),
534
+ ],
535
+ pydantic.Field(alias="maxVersion"),
332
536
  ] = None
333
537
 
538
+ @field_serializer("min_version")
539
+ def serialize_min_version(self, value):
540
+ if isinstance(value, str):
541
+ try:
542
+ return models.InputKafkaMinimumTLSVersion(value)
543
+ except ValueError:
544
+ return value
545
+ return value
546
+
547
+ @field_serializer("max_version")
548
+ def serialize_max_version(self, value):
549
+ if isinstance(value, str):
550
+ try:
551
+ return models.InputKafkaMaximumTLSVersion(value)
552
+ except ValueError:
553
+ return value
554
+ return value
555
+
334
556
 
335
557
  class InputKafkaMetadatumTypedDict(TypedDict):
336
558
  name: str
@@ -394,7 +616,8 @@ class InputKafkaTypedDict(TypedDict):
394
616
  r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
395
617
  tls: NotRequired[InputKafkaTLSSettingsClientSideTypedDict]
396
618
  session_timeout: NotRequired[float]
397
- r"""Timeout used to detect client failures when using Kafka's group-management facilities.
619
+ r"""
620
+ Timeout used to detect client failures when using Kafka's group-management facilities.
398
621
  If the client sends no heartbeats to the broker before the timeout expires,
399
622
  the broker will remove the client from the group and initiate a rebalance.
400
623
  Value must be between the broker's configured group.min.session.timeout.ms and group.max.session.timeout.ms.
@@ -516,7 +739,8 @@ class InputKafka(BaseModel):
516
739
  session_timeout: Annotated[
517
740
  Optional[float], pydantic.Field(alias="sessionTimeout")
518
741
  ] = 30000
519
- r"""Timeout used to detect client failures when using Kafka's group-management facilities.
742
+ r"""
743
+ Timeout used to detect client failures when using Kafka's group-management facilities.
520
744
  If the client sends no heartbeats to the broker before the timeout expires,
521
745
  the broker will remove the client from the group and initiate a rebalance.
522
746
  Value must be between the broker's configured group.min.session.timeout.ms and group.max.session.timeout.ms.
@@ -1,9 +1,13 @@
1
1
  """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
2
 
3
3
  from __future__ import annotations
4
+ from cribl_control_plane import models, utils
4
5
  from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
5
7
  from enum import Enum
6
8
  import pydantic
9
+ from pydantic import field_serializer
10
+ from pydantic.functional_validators import PlainValidator
7
11
  from typing import List, Optional
8
12
  from typing_extensions import Annotated, NotRequired, TypedDict
9
13
 
@@ -23,20 +27,32 @@ class InputKinesisConnection(BaseModel):
23
27
  pipeline: Optional[str] = None
24
28
 
25
29
 
26
- class InputKinesisMode(str, Enum):
30
+ class InputKinesisMode(str, Enum, metaclass=utils.OpenEnumMeta):
27
31
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
28
32
 
33
+ # Smart
29
34
  SMART = "smart"
35
+ # Always On
30
36
  ALWAYS = "always"
31
37
 
32
38
 
33
- class InputKinesisCompression(str, Enum):
39
+ class InputKinesisCompression(str, Enum, metaclass=utils.OpenEnumMeta):
34
40
  r"""Codec to use to compress the persisted data"""
35
41
 
42
+ # None
36
43
  NONE = "none"
44
+ # Gzip
37
45
  GZIP = "gzip"
38
46
 
39
47
 
48
+ class InputKinesisPqControlsTypedDict(TypedDict):
49
+ pass
50
+
51
+
52
+ class InputKinesisPqControls(BaseModel):
53
+ pass
54
+
55
+
40
56
  class InputKinesisPqTypedDict(TypedDict):
41
57
  mode: NotRequired[InputKinesisMode]
42
58
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,10 +68,13 @@ class InputKinesisPqTypedDict(TypedDict):
52
68
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
69
  compress: NotRequired[InputKinesisCompression]
54
70
  r"""Codec to use to compress the persisted data"""
71
+ pq_controls: NotRequired[InputKinesisPqControlsTypedDict]
55
72
 
56
73
 
57
74
  class InputKinesisPq(BaseModel):
58
- mode: Optional[InputKinesisMode] = InputKinesisMode.ALWAYS
75
+ mode: Annotated[
76
+ Optional[InputKinesisMode], PlainValidator(validate_open_enum(False))
77
+ ] = InputKinesisMode.ALWAYS
59
78
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
60
79
 
61
80
  max_buffer_size: Annotated[
@@ -79,42 +98,77 @@ class InputKinesisPq(BaseModel):
79
98
  path: Optional[str] = "$CRIBL_HOME/state/queues"
80
99
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
81
100
 
82
- compress: Optional[InputKinesisCompression] = InputKinesisCompression.NONE
101
+ compress: Annotated[
102
+ Optional[InputKinesisCompression], PlainValidator(validate_open_enum(False))
103
+ ] = InputKinesisCompression.NONE
83
104
  r"""Codec to use to compress the persisted data"""
84
105
 
106
+ pq_controls: Annotated[
107
+ Optional[InputKinesisPqControls], pydantic.Field(alias="pqControls")
108
+ ] = None
85
109
 
86
- class ShardIteratorStart(str, Enum):
110
+ @field_serializer("mode")
111
+ def serialize_mode(self, value):
112
+ if isinstance(value, str):
113
+ try:
114
+ return models.InputKinesisMode(value)
115
+ except ValueError:
116
+ return value
117
+ return value
118
+
119
+ @field_serializer("compress")
120
+ def serialize_compress(self, value):
121
+ if isinstance(value, str):
122
+ try:
123
+ return models.InputKinesisCompression(value)
124
+ except ValueError:
125
+ return value
126
+ return value
127
+
128
+
129
+ class ShardIteratorStart(str, Enum, metaclass=utils.OpenEnumMeta):
87
130
  r"""Location at which to start reading a shard for the first time"""
88
131
 
132
+ # Earliest record
89
133
  TRIM_HORIZON = "TRIM_HORIZON"
134
+ # Latest record
90
135
  LATEST = "LATEST"
91
136
 
92
137
 
93
- class InputKinesisRecordDataFormat(str, Enum):
138
+ class InputKinesisRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
94
139
  r"""Format of data inside the Kinesis Stream records. Gzip compression is automatically detected."""
95
140
 
141
+ # Cribl
96
142
  CRIBL = "cribl"
143
+ # Newline JSON
97
144
  NDJSON = "ndjson"
145
+ # Cloudwatch Logs
98
146
  CLOUDWATCH = "cloudwatch"
147
+ # Event per line
99
148
  LINE = "line"
100
149
 
101
150
 
102
- class ShardLoadBalancing(str, Enum):
151
+ class ShardLoadBalancing(str, Enum, metaclass=utils.OpenEnumMeta):
103
152
  r"""The load-balancing algorithm to use for spreading out shards across Workers and Worker Processes"""
104
153
 
154
+ # Consistent Hashing
105
155
  CONSISTENT_HASHING = "ConsistentHashing"
156
+ # Round Robin
106
157
  ROUND_ROBIN = "RoundRobin"
107
158
 
108
159
 
109
- class InputKinesisAuthenticationMethod(str, Enum):
160
+ class InputKinesisAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
110
161
  r"""AWS authentication method. Choose Auto to use IAM roles."""
111
162
 
163
+ # Auto
112
164
  AUTO = "auto"
165
+ # Manual
113
166
  MANUAL = "manual"
167
+ # Secret Key pair
114
168
  SECRET = "secret"
115
169
 
116
170
 
117
- class InputKinesisSignatureVersion(str, Enum):
171
+ class InputKinesisSignatureVersion(str, Enum, metaclass=utils.OpenEnumMeta):
118
172
  r"""Signature version to use for signing Kinesis stream requests"""
119
173
 
120
174
  V2 = "v2"
@@ -246,12 +300,19 @@ class InputKinesis(BaseModel):
246
300
  r"""A JavaScript expression to be called with each shardId for the stream. If the expression evaluates to a truthy value, the shard will be processed."""
247
301
 
248
302
  shard_iterator_type: Annotated[
249
- Optional[ShardIteratorStart], pydantic.Field(alias="shardIteratorType")
303
+ Annotated[
304
+ Optional[ShardIteratorStart], PlainValidator(validate_open_enum(False))
305
+ ],
306
+ pydantic.Field(alias="shardIteratorType"),
250
307
  ] = ShardIteratorStart.TRIM_HORIZON
251
308
  r"""Location at which to start reading a shard for the first time"""
252
309
 
253
310
  payload_format: Annotated[
254
- Optional[InputKinesisRecordDataFormat], pydantic.Field(alias="payloadFormat")
311
+ Annotated[
312
+ Optional[InputKinesisRecordDataFormat],
313
+ PlainValidator(validate_open_enum(False)),
314
+ ],
315
+ pydantic.Field(alias="payloadFormat"),
255
316
  ] = InputKinesisRecordDataFormat.CRIBL
256
317
  r"""Format of data inside the Kinesis Stream records. Gzip compression is automatically detected."""
257
318
 
@@ -266,12 +327,18 @@ class InputKinesis(BaseModel):
266
327
  r"""Maximum number of records, across all shards, to pull down at once per Worker Process"""
267
328
 
268
329
  load_balancing_algorithm: Annotated[
269
- Optional[ShardLoadBalancing], pydantic.Field(alias="loadBalancingAlgorithm")
330
+ Annotated[
331
+ Optional[ShardLoadBalancing], PlainValidator(validate_open_enum(False))
332
+ ],
333
+ pydantic.Field(alias="loadBalancingAlgorithm"),
270
334
  ] = ShardLoadBalancing.CONSISTENT_HASHING
271
335
  r"""The load-balancing algorithm to use for spreading out shards across Workers and Worker Processes"""
272
336
 
273
337
  aws_authentication_method: Annotated[
274
- Optional[InputKinesisAuthenticationMethod],
338
+ Annotated[
339
+ Optional[InputKinesisAuthenticationMethod],
340
+ PlainValidator(validate_open_enum(False)),
341
+ ],
275
342
  pydantic.Field(alias="awsAuthenticationMethod"),
276
343
  ] = InputKinesisAuthenticationMethod.AUTO
277
344
  r"""AWS authentication method. Choose Auto to use IAM roles."""
@@ -284,7 +351,11 @@ class InputKinesis(BaseModel):
284
351
  r"""Kinesis stream service endpoint. If empty, defaults to the AWS Region-specific endpoint. Otherwise, it must point to Kinesis stream-compatible endpoint."""
285
352
 
286
353
  signature_version: Annotated[
287
- Optional[InputKinesisSignatureVersion], pydantic.Field(alias="signatureVersion")
354
+ Annotated[
355
+ Optional[InputKinesisSignatureVersion],
356
+ PlainValidator(validate_open_enum(False)),
357
+ ],
358
+ pydantic.Field(alias="signatureVersion"),
288
359
  ] = InputKinesisSignatureVersion.V4
289
360
  r"""Signature version to use for signing Kinesis stream requests"""
290
361
 
@@ -337,3 +408,48 @@ class InputKinesis(BaseModel):
337
408
 
338
409
  aws_secret: Annotated[Optional[str], pydantic.Field(alias="awsSecret")] = None
339
410
  r"""Select or create a stored secret that references your access key and secret key"""
411
+
412
+ @field_serializer("shard_iterator_type")
413
+ def serialize_shard_iterator_type(self, value):
414
+ if isinstance(value, str):
415
+ try:
416
+ return models.ShardIteratorStart(value)
417
+ except ValueError:
418
+ return value
419
+ return value
420
+
421
+ @field_serializer("payload_format")
422
+ def serialize_payload_format(self, value):
423
+ if isinstance(value, str):
424
+ try:
425
+ return models.InputKinesisRecordDataFormat(value)
426
+ except ValueError:
427
+ return value
428
+ return value
429
+
430
+ @field_serializer("load_balancing_algorithm")
431
+ def serialize_load_balancing_algorithm(self, value):
432
+ if isinstance(value, str):
433
+ try:
434
+ return models.ShardLoadBalancing(value)
435
+ except ValueError:
436
+ return value
437
+ return value
438
+
439
+ @field_serializer("aws_authentication_method")
440
+ def serialize_aws_authentication_method(self, value):
441
+ if isinstance(value, str):
442
+ try:
443
+ return models.InputKinesisAuthenticationMethod(value)
444
+ except ValueError:
445
+ return value
446
+ return value
447
+
448
+ @field_serializer("signature_version")
449
+ def serialize_signature_version(self, value):
450
+ if isinstance(value, str):
451
+ try:
452
+ return models.InputKinesisSignatureVersion(value)
453
+ except ValueError:
454
+ return value
455
+ return value