cribl-control-plane 0.0.39__py3-none-any.whl → 0.4.0b23__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cribl_control_plane/_hooks/clientcredentials.py +92 -42
- cribl_control_plane/_version.py +4 -4
- cribl_control_plane/acl.py +21 -9
- cribl_control_plane/auth_sdk.py +6 -3
- cribl_control_plane/basesdk.py +17 -1
- cribl_control_plane/branches.py +28 -8
- cribl_control_plane/commits.py +119 -47
- cribl_control_plane/commits_files.py +44 -24
- cribl_control_plane/configs_versions.py +16 -10
- cribl_control_plane/cribl.py +495 -0
- cribl_control_plane/destinations.py +86 -34
- cribl_control_plane/destinations_pq.py +34 -14
- cribl_control_plane/errors/__init__.py +23 -8
- cribl_control_plane/errors/apierror.py +2 -0
- cribl_control_plane/errors/criblcontrolplaneerror.py +11 -7
- cribl_control_plane/errors/error.py +4 -2
- cribl_control_plane/errors/healthserverstatus_error.py +41 -0
- cribl_control_plane/errors/no_response_error.py +5 -1
- cribl_control_plane/errors/responsevalidationerror.py +2 -0
- cribl_control_plane/functions.py +367 -0
- cribl_control_plane/groups_configs.py +8 -3
- cribl_control_plane/groups_sdk.py +156 -94
- cribl_control_plane/health.py +34 -14
- cribl_control_plane/hectokens.py +44 -20
- cribl_control_plane/httpclient.py +0 -1
- cribl_control_plane/lakedatasets.py +156 -62
- cribl_control_plane/models/__init__.py +3298 -479
- cribl_control_plane/models/addhectokenrequest.py +7 -1
- cribl_control_plane/models/authtoken.py +5 -1
- cribl_control_plane/models/backupssettings_union.py +37 -0
- cribl_control_plane/models/{lookupversions.py → branchinfo.py} +4 -4
- cribl_control_plane/models/cacheconnection.py +30 -2
- cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
- cribl_control_plane/models/cloudprovider.py +2 -1
- cribl_control_plane/models/collectorazureblob.py +130 -0
- cribl_control_plane/models/collectorconf.py +56 -0
- cribl_control_plane/models/collectorcribllake.py +27 -0
- cribl_control_plane/models/collectordatabase.py +92 -0
- cribl_control_plane/models/collectorfilesystem.py +66 -0
- cribl_control_plane/models/collectorgooglecloudstorage.py +131 -0
- cribl_control_plane/models/collectorhealthcheck.py +269 -0
- cribl_control_plane/models/collectorrest.py +340 -0
- cribl_control_plane/models/collectors3.py +239 -0
- cribl_control_plane/models/collectorscript.py +59 -0
- cribl_control_plane/models/collectorsplunk.py +253 -0
- cribl_control_plane/models/configgroup.py +62 -8
- cribl_control_plane/models/configgroupcloud.py +17 -3
- cribl_control_plane/models/countedbranchinfo.py +20 -0
- cribl_control_plane/models/countedconfiggroup.py +20 -0
- cribl_control_plane/models/countedcribllakedataset.py +20 -0
- cribl_control_plane/models/counteddistributedsummary.py +20 -0
- cribl_control_plane/models/countedfunctionresponse.py +20 -0
- cribl_control_plane/models/countedgitcommitsummary.py +20 -0
- cribl_control_plane/models/countedgitcountresult.py +20 -0
- cribl_control_plane/models/{createinputop.py → countedgitdiffresult.py} +5 -9
- cribl_control_plane/models/countedgitfilesresponse.py +20 -0
- cribl_control_plane/models/{getversioninfoop.py → countedgitinfo.py} +2 -6
- cribl_control_plane/models/countedgitlogresult.py +20 -0
- cribl_control_plane/models/countedgitrevertresult.py +20 -0
- cribl_control_plane/models/countedgitshowresult.py +20 -0
- cribl_control_plane/models/countedgitstatusresult.py +20 -0
- cribl_control_plane/models/{listinputop.py → countedinput.py} +2 -6
- cribl_control_plane/models/countedinputsplunkhec.py +20 -0
- cribl_control_plane/models/countedjobinfo.py +20 -0
- cribl_control_plane/models/countedmasterworkerentry.py +20 -0
- cribl_control_plane/models/countednumber.py +19 -0
- cribl_control_plane/models/{getversionbranchop.py → countedobject.py} +2 -6
- cribl_control_plane/models/{listoutputop.py → countedoutput.py} +2 -6
- cribl_control_plane/models/{createoutputop.py → countedoutputsamplesresponse.py} +5 -9
- cribl_control_plane/models/countedoutputtestresponse.py +20 -0
- cribl_control_plane/models/countedpackinfo.py +20 -0
- cribl_control_plane/models/{createpacksop.py → countedpackinstallinfo.py} +2 -6
- cribl_control_plane/models/{listpipelineop.py → countedpipeline.py} +2 -6
- cribl_control_plane/models/{listroutesop.py → countedroutes.py} +2 -6
- cribl_control_plane/models/countedstring.py +19 -0
- cribl_control_plane/models/countedsystemsettingsconf.py +20 -0
- cribl_control_plane/models/countedteamaccesscontrollist.py +20 -0
- cribl_control_plane/models/counteduseraccesscontrollist.py +20 -0
- cribl_control_plane/models/createauthloginop.py +18 -0
- cribl_control_plane/models/createconfiggroupbyproductop.py +26 -27
- cribl_control_plane/models/createcribllakedatasetbylakeidop.py +1 -19
- cribl_control_plane/models/createinputhectokenbyidop.py +1 -19
- cribl_control_plane/models/createoutputtestbyidop.py +1 -20
- cribl_control_plane/models/createroutesappendbyidop.py +4 -22
- cribl_control_plane/models/createversioncommitop.py +1 -19
- cribl_control_plane/models/createversionrevertop.py +3 -21
- cribl_control_plane/models/createversionundoop.py +1 -18
- cribl_control_plane/models/criblevent.py +15 -0
- cribl_control_plane/models/cribllakedataset.py +23 -3
- cribl_control_plane/models/cribllakedatasetupdate.py +95 -0
- cribl_control_plane/models/datasetmetadata.py +18 -2
- cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +18 -21
- cribl_control_plane/models/deletecribllakedatasetbylakeidandidop.py +1 -20
- cribl_control_plane/models/deleteinputbyidop.py +1 -20
- cribl_control_plane/models/deleteoutputbyidop.py +1 -20
- cribl_control_plane/models/deleteoutputpqbyidop.py +1 -19
- cribl_control_plane/models/deletepacksbyidop.py +1 -20
- cribl_control_plane/models/deletepipelinebyidop.py +3 -22
- cribl_control_plane/models/difffiles.py +130 -0
- cribl_control_plane/models/diffline.py +26 -0
- cribl_control_plane/models/difflinecontext.py +28 -0
- cribl_control_plane/models/difflinedelete.py +25 -0
- cribl_control_plane/models/difflineinsert.py +25 -0
- cribl_control_plane/models/distributedsummary.py +6 -0
- cribl_control_plane/models/functionaggregatemetrics.py +206 -0
- cribl_control_plane/models/functionaggregation.py +172 -0
- cribl_control_plane/models/functionautotimestamp.py +173 -0
- cribl_control_plane/models/functioncef.py +111 -0
- cribl_control_plane/models/functionchain.py +75 -0
- cribl_control_plane/models/functionclone.py +75 -0
- cribl_control_plane/models/functioncode.py +96 -0
- cribl_control_plane/models/functioncomment.py +75 -0
- cribl_control_plane/models/functiondistinct.py +99 -0
- cribl_control_plane/models/functiondnslookup.py +250 -0
- cribl_control_plane/models/functiondrop.py +73 -0
- cribl_control_plane/models/functiondropdimensions.py +87 -0
- cribl_control_plane/models/functiondynamicsampling.py +121 -0
- cribl_control_plane/models/functioneval.py +103 -0
- cribl_control_plane/models/functioneventbreaker.py +103 -0
- cribl_control_plane/models/functioneventstats.py +92 -0
- cribl_control_plane/models/functionexternaldata.py +73 -0
- cribl_control_plane/models/functionflatten.py +90 -0
- cribl_control_plane/models/functionfoldkeys.py +89 -0
- cribl_control_plane/models/functiongenstats.py +73 -0
- cribl_control_plane/models/functiongeoip.py +120 -0
- cribl_control_plane/models/functiongrok.py +95 -0
- cribl_control_plane/models/functionhandlebar.py +112 -0
- cribl_control_plane/models/functionjoin.py +112 -0
- cribl_control_plane/models/functionjsonunroll.py +80 -0
- cribl_control_plane/models/functionlakeexport.py +102 -0
- cribl_control_plane/models/functionlimit.py +75 -0
- cribl_control_plane/models/functionlocalsearchdatatypeparser.py +76 -0
- cribl_control_plane/models/functionlocalsearchrulesetrunner.py +97 -0
- cribl_control_plane/models/functionlookup.py +148 -0
- cribl_control_plane/models/functionmask.py +121 -0
- cribl_control_plane/models/functionmvexpand.py +128 -0
- cribl_control_plane/models/functionmvpull.py +99 -0
- cribl_control_plane/models/functionnotificationpolicies.py +186 -0
- cribl_control_plane/models/functionnotifications.py +85 -0
- cribl_control_plane/models/functionnotify.py +196 -0
- cribl_control_plane/models/functionnumerify.py +119 -0
- cribl_control_plane/models/functionotlplogs.py +82 -0
- cribl_control_plane/models/functionotlpmetrics.py +118 -0
- cribl_control_plane/models/functionotlptraces.py +111 -0
- cribl_control_plane/models/functionpack.py +80 -0
- cribl_control_plane/models/functionpivot.py +85 -0
- cribl_control_plane/models/functionpublishmetrics.py +153 -0
- cribl_control_plane/models/functionredis.py +173 -0
- cribl_control_plane/models/functionregexextract.py +112 -0
- cribl_control_plane/models/functionregexfilter.py +95 -0
- cribl_control_plane/models/functionrename.py +107 -0
- cribl_control_plane/models/functionresponse.py +242 -0
- cribl_control_plane/models/functionrollupmetrics.py +114 -0
- cribl_control_plane/models/functionsampling.py +90 -0
- cribl_control_plane/models/functionsend.py +141 -0
- cribl_control_plane/models/functionsensitivedatascanner.py +128 -0
- cribl_control_plane/models/functionserde.py +161 -0
- cribl_control_plane/models/functionserialize.py +134 -0
- cribl_control_plane/models/functionsidlookup.py +93 -0
- cribl_control_plane/models/functionsnmptrapserialize.py +144 -0
- cribl_control_plane/models/functionsort.py +97 -0
- cribl_control_plane/models/functionstore.py +132 -0
- cribl_control_plane/models/functionsuppress.py +115 -0
- cribl_control_plane/models/functiontee.py +90 -0
- cribl_control_plane/models/functiontrimtimestamp.py +75 -0
- cribl_control_plane/models/functionunion.py +80 -0
- cribl_control_plane/models/functionunroll.py +80 -0
- cribl_control_plane/models/functionwindow.py +96 -0
- cribl_control_plane/models/functionxmlunroll.py +92 -0
- cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +24 -20
- cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +24 -20
- cribl_control_plane/models/getconfiggroupbyproductandidop.py +14 -19
- cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +18 -20
- cribl_control_plane/models/getcribllakedatasetbylakeidandidop.py +1 -20
- cribl_control_plane/models/getcribllakedatasetbylakeidop.py +54 -14
- cribl_control_plane/models/getfunctionsbyidop.py +18 -0
- cribl_control_plane/models/getinputbyidop.py +1 -20
- cribl_control_plane/models/getmasterworkerentryop.py +1 -18
- cribl_control_plane/models/getoutputbyidop.py +1 -20
- cribl_control_plane/models/getoutputpqbyidop.py +1 -19
- cribl_control_plane/models/getoutputsamplesbyidop.py +1 -20
- cribl_control_plane/models/getpacksbyidop.py +1 -20
- cribl_control_plane/models/getpacksop.py +1 -19
- cribl_control_plane/models/getpipelinebyidop.py +3 -22
- cribl_control_plane/models/getroutesbyidop.py +3 -22
- cribl_control_plane/models/getsummaryop.py +18 -20
- cribl_control_plane/models/getversioncountop.py +3 -21
- cribl_control_plane/models/getversiondiffop.py +1 -18
- cribl_control_plane/models/getversionfilesop.py +3 -22
- cribl_control_plane/models/getversionop.py +1 -19
- cribl_control_plane/models/getversionshowop.py +1 -18
- cribl_control_plane/models/getversionstatusop.py +1 -19
- cribl_control_plane/models/{routecloneconf.py → gitcountresult.py} +4 -4
- cribl_control_plane/models/gitdiffresult.py +16 -0
- cribl_control_plane/models/gitinfo.py +14 -3
- cribl_control_plane/models/gitshowresult.py +19 -0
- cribl_control_plane/models/groupcreaterequest.py +172 -0
- cribl_control_plane/models/hbcriblinfo.py +42 -7
- cribl_control_plane/models/healthserverstatus.py +55 -0
- cribl_control_plane/models/heartbeatmetadata.py +6 -11
- cribl_control_plane/models/input.py +83 -78
- cribl_control_plane/models/inputappscope.py +126 -30
- cribl_control_plane/models/inputazureblob.py +62 -6
- cribl_control_plane/models/inputcloudflarehec.py +518 -0
- cribl_control_plane/models/inputcollection.py +47 -4
- cribl_control_plane/models/inputconfluentcloud.py +258 -32
- cribl_control_plane/models/inputcribl.py +47 -4
- cribl_control_plane/models/inputcriblhttp.py +121 -30
- cribl_control_plane/models/inputcribllakehttp.py +136 -30
- cribl_control_plane/models/inputcriblmetrics.py +48 -4
- cribl_control_plane/models/inputcribltcp.py +122 -24
- cribl_control_plane/models/inputcrowdstrike.py +92 -10
- cribl_control_plane/models/inputdatadogagent.py +98 -24
- cribl_control_plane/models/inputdatagen.py +47 -4
- cribl_control_plane/models/inputedgeprometheus.py +210 -50
- cribl_control_plane/models/inputelastic.py +167 -36
- cribl_control_plane/models/inputeventhub.py +209 -6
- cribl_control_plane/models/inputexec.py +59 -6
- cribl_control_plane/models/inputfile.py +78 -10
- cribl_control_plane/models/inputfirehose.py +97 -24
- cribl_control_plane/models/inputgooglepubsub.py +67 -6
- cribl_control_plane/models/inputgrafana.py +251 -71
- cribl_control_plane/models/inputhttp.py +97 -24
- cribl_control_plane/models/inputhttpraw.py +97 -24
- cribl_control_plane/models/inputjournalfiles.py +48 -4
- cribl_control_plane/models/inputkafka.py +252 -28
- cribl_control_plane/models/inputkinesis.py +130 -14
- cribl_control_plane/models/inputkubeevents.py +47 -4
- cribl_control_plane/models/inputkubelogs.py +61 -8
- cribl_control_plane/models/inputkubemetrics.py +61 -8
- cribl_control_plane/models/inputloki.py +113 -34
- cribl_control_plane/models/inputmetrics.py +97 -24
- cribl_control_plane/models/inputmodeldriventelemetry.py +107 -26
- cribl_control_plane/models/inputmsk.py +145 -32
- cribl_control_plane/models/inputnetflow.py +47 -4
- cribl_control_plane/models/inputoffice365mgmt.py +112 -14
- cribl_control_plane/models/inputoffice365msgtrace.py +114 -16
- cribl_control_plane/models/inputoffice365service.py +114 -16
- cribl_control_plane/models/inputopentelemetry.py +143 -32
- cribl_control_plane/models/inputprometheus.py +193 -44
- cribl_control_plane/models/inputprometheusrw.py +114 -27
- cribl_control_plane/models/inputrawudp.py +47 -4
- cribl_control_plane/models/inputs3.py +78 -8
- cribl_control_plane/models/inputs3inventory.py +92 -10
- cribl_control_plane/models/inputsecuritylake.py +93 -10
- cribl_control_plane/models/inputsnmp.py +112 -21
- cribl_control_plane/models/inputsplunk.py +130 -28
- cribl_control_plane/models/inputsplunkhec.py +119 -29
- cribl_control_plane/models/inputsplunksearch.py +112 -15
- cribl_control_plane/models/inputsqs.py +99 -16
- cribl_control_plane/models/inputsyslog.py +189 -47
- cribl_control_plane/models/inputsystemmetrics.py +202 -32
- cribl_control_plane/models/inputsystemstate.py +61 -8
- cribl_control_plane/models/inputtcp.py +122 -26
- cribl_control_plane/models/inputtcpjson.py +112 -26
- cribl_control_plane/models/inputwef.py +144 -15
- cribl_control_plane/models/inputwindowsmetrics.py +186 -33
- cribl_control_plane/models/inputwineventlogs.py +93 -11
- cribl_control_plane/models/inputwiz.py +173 -8
- cribl_control_plane/models/inputwizwebhook.py +97 -24
- cribl_control_plane/models/inputzscalerhec.py +119 -29
- cribl_control_plane/models/jobinfo.py +34 -0
- cribl_control_plane/models/jobstatus.py +48 -0
- cribl_control_plane/models/lakedatasetmetrics.py +17 -0
- cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
- cribl_control_plane/models/listconfiggroupbyproductop.py +14 -19
- cribl_control_plane/models/listmasterworkerentryop.py +1 -19
- cribl_control_plane/models/logininfo.py +3 -3
- cribl_control_plane/models/masterworkerentry.py +20 -13
- cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
- cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
- cribl_control_plane/models/nodeprovidedinfo.py +13 -11
- cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
- cribl_control_plane/models/nodeupgradestate.py +2 -1
- cribl_control_plane/models/nodeupgradestatus.py +51 -5
- cribl_control_plane/models/outpostnodeinfo.py +16 -0
- cribl_control_plane/models/output.py +104 -90
- cribl_control_plane/models/outputazureblob.py +171 -18
- cribl_control_plane/models/outputazuredataexplorer.py +514 -90
- cribl_control_plane/models/outputazureeventhub.py +315 -31
- cribl_control_plane/models/outputazurelogs.py +145 -26
- cribl_control_plane/models/outputchronicle.py +532 -0
- cribl_control_plane/models/outputclickhouse.py +205 -34
- cribl_control_plane/models/outputcloudflarer2.py +632 -0
- cribl_control_plane/models/outputcloudwatch.py +129 -23
- cribl_control_plane/models/outputconfluentcloud.py +384 -57
- cribl_control_plane/models/outputcriblhttp.py +198 -31
- cribl_control_plane/models/outputcribllake.py +156 -16
- cribl_control_plane/models/outputcribltcp.py +194 -29
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +172 -28
- cribl_control_plane/models/outputdatabricks.py +501 -0
- cribl_control_plane/models/outputdatadog.py +199 -31
- cribl_control_plane/models/outputdataset.py +181 -29
- cribl_control_plane/models/outputdiskspool.py +17 -2
- cribl_control_plane/models/outputdls3.py +233 -24
- cribl_control_plane/models/outputdynatracehttp.py +208 -34
- cribl_control_plane/models/outputdynatraceotlp.py +210 -36
- cribl_control_plane/models/outputelastic.py +199 -30
- cribl_control_plane/models/outputelasticcloud.py +171 -26
- cribl_control_plane/models/outputexabeam.py +96 -10
- cribl_control_plane/models/outputfilesystem.py +139 -14
- cribl_control_plane/models/outputgooglechronicle.py +216 -35
- cribl_control_plane/models/outputgooglecloudlogging.py +174 -31
- cribl_control_plane/models/outputgooglecloudstorage.py +215 -24
- cribl_control_plane/models/outputgooglepubsub.py +131 -23
- cribl_control_plane/models/outputgrafanacloud.py +376 -74
- cribl_control_plane/models/outputgraphite.py +128 -25
- cribl_control_plane/models/outputhoneycomb.py +145 -26
- cribl_control_plane/models/outputhumiohec.py +162 -28
- cribl_control_plane/models/outputinfluxdb.py +165 -28
- cribl_control_plane/models/outputkafka.py +375 -52
- cribl_control_plane/models/outputkinesis.py +165 -27
- cribl_control_plane/models/outputloki.py +164 -34
- cribl_control_plane/models/outputmicrosoftfabric.py +540 -0
- cribl_control_plane/models/outputminio.py +225 -25
- cribl_control_plane/models/outputmsk.py +267 -54
- cribl_control_plane/models/outputnewrelic.py +171 -29
- cribl_control_plane/models/outputnewrelicevents.py +163 -28
- cribl_control_plane/models/outputopentelemetry.py +240 -40
- cribl_control_plane/models/outputprometheus.py +145 -26
- cribl_control_plane/models/outputring.py +49 -8
- cribl_control_plane/models/outputs3.py +233 -26
- cribl_control_plane/models/outputsecuritylake.py +179 -18
- cribl_control_plane/models/outputsentinel.py +172 -29
- cribl_control_plane/models/outputsentineloneaisiem.py +178 -32
- cribl_control_plane/models/outputservicenow.py +223 -38
- cribl_control_plane/models/outputsignalfx.py +145 -26
- cribl_control_plane/models/outputsns.py +143 -25
- cribl_control_plane/models/outputsplunk.py +206 -36
- cribl_control_plane/models/outputsplunkhec.py +238 -26
- cribl_control_plane/models/outputsplunklb.py +262 -42
- cribl_control_plane/models/outputsqs.py +163 -33
- cribl_control_plane/models/outputstatsd.py +127 -25
- cribl_control_plane/models/outputstatsdext.py +128 -25
- cribl_control_plane/models/outputsumologic.py +146 -25
- cribl_control_plane/models/outputsyslog.py +318 -46
- cribl_control_plane/models/outputtcpjson.py +186 -32
- cribl_control_plane/models/outputwavefront.py +145 -26
- cribl_control_plane/models/outputwebhook.py +211 -33
- cribl_control_plane/models/outputxsiam.py +143 -26
- cribl_control_plane/models/packinfo.py +8 -5
- cribl_control_plane/models/packinstallinfo.py +11 -8
- cribl_control_plane/models/piisettings_union.py +31 -0
- cribl_control_plane/models/productscore.py +2 -1
- cribl_control_plane/models/rbacresource.py +2 -1
- cribl_control_plane/models/resourcepolicy.py +15 -2
- cribl_control_plane/models/rollbacksettings_union.py +44 -0
- cribl_control_plane/models/routeconf.py +3 -4
- cribl_control_plane/models/runnablejob.py +27 -0
- cribl_control_plane/models/runnablejobcollection.py +628 -0
- cribl_control_plane/models/runnablejobexecutor.py +360 -0
- cribl_control_plane/models/runnablejobscheduledsearch.py +279 -0
- cribl_control_plane/models/snisettings_union.py +31 -0
- cribl_control_plane/models/systemsettingsconf.py +291 -0
- cribl_control_plane/models/tlssettings_union.py +43 -0
- cribl_control_plane/models/updateconfiggroupbyproductandidop.py +19 -20
- cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +19 -21
- cribl_control_plane/models/updatecribllakedatasetbylakeidandidop.py +10 -25
- cribl_control_plane/models/updatehectokenrequest.py +7 -1
- cribl_control_plane/models/updateinputbyidop.py +1 -19
- cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +1 -19
- cribl_control_plane/models/updateoutputbyidop.py +1 -19
- cribl_control_plane/models/updatepacksbyidop.py +1 -20
- cribl_control_plane/models/updatepacksop.py +25 -0
- cribl_control_plane/models/updatepipelinebyidop.py +5 -23
- cribl_control_plane/models/updateroutesbyidop.py +3 -21
- cribl_control_plane/models/upgradegroupsettings_union.py +43 -0
- cribl_control_plane/models/upgradepackageurls.py +20 -0
- cribl_control_plane/models/upgradesettings.py +38 -0
- cribl_control_plane/models/uploadpackresponse.py +13 -0
- cribl_control_plane/models/workertypes.py +2 -1
- cribl_control_plane/nodes.py +37 -23
- cribl_control_plane/packs.py +292 -37
- cribl_control_plane/pipelines.py +98 -48
- cribl_control_plane/routes_sdk.py +86 -50
- cribl_control_plane/samples.py +32 -16
- cribl_control_plane/sdk.py +26 -6
- cribl_control_plane/settings.py +20 -0
- cribl_control_plane/sources.py +85 -33
- cribl_control_plane/statuses.py +16 -6
- cribl_control_plane/summaries.py +16 -6
- cribl_control_plane/system_sdk.py +20 -0
- cribl_control_plane/teams.py +16 -6
- cribl_control_plane/tokens.py +49 -21
- cribl_control_plane/utils/__init__.py +15 -3
- cribl_control_plane/utils/annotations.py +32 -8
- cribl_control_plane/utils/eventstreaming.py +10 -0
- cribl_control_plane/utils/forms.py +21 -10
- cribl_control_plane/utils/queryparams.py +14 -2
- cribl_control_plane/utils/retries.py +69 -5
- cribl_control_plane/utils/unmarshal_json_response.py +15 -1
- cribl_control_plane/versions.py +11 -6
- cribl_control_plane/versions_configs.py +16 -6
- {cribl_control_plane-0.0.39.dist-info → cribl_control_plane-0.4.0b23.dist-info}/METADATA +99 -43
- cribl_control_plane-0.4.0b23.dist-info/RECORD +450 -0
- {cribl_control_plane-0.0.39.dist-info → cribl_control_plane-0.4.0b23.dist-info}/WHEEL +1 -1
- cribl_control_plane-0.4.0b23.dist-info/licenses/LICENSE +201 -0
- cribl_control_plane/errors/healthstatus_error.py +0 -32
- cribl_control_plane/models/appmode.py +0 -13
- cribl_control_plane/models/createpipelineop.py +0 -24
- cribl_control_plane/models/createversionpushop.py +0 -23
- cribl_control_plane/models/healthstatus.py +0 -33
- cribl_control_plane-0.0.39.dist-info/RECORD +0 -315
|
@@ -1,9 +1,13 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import models, utils
|
|
4
5
|
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
5
7
|
from enum import Enum
|
|
6
8
|
import pydantic
|
|
9
|
+
from pydantic import field_serializer
|
|
10
|
+
from pydantic.functional_validators import PlainValidator
|
|
7
11
|
from typing import List, Optional
|
|
8
12
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
13
|
|
|
@@ -12,36 +16,41 @@ class OutputKafkaType(str, Enum):
|
|
|
12
16
|
KAFKA = "kafka"
|
|
13
17
|
|
|
14
18
|
|
|
15
|
-
class OutputKafkaAcknowledgments(int, Enum):
|
|
19
|
+
class OutputKafkaAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
|
|
16
20
|
r"""Control the number of required acknowledgments."""
|
|
17
21
|
|
|
22
|
+
# Leader
|
|
18
23
|
ONE = 1
|
|
24
|
+
# None
|
|
19
25
|
ZERO = 0
|
|
26
|
+
# All
|
|
20
27
|
MINUS_1 = -1
|
|
21
28
|
|
|
22
29
|
|
|
23
|
-
class OutputKafkaRecordDataFormat(str, Enum):
|
|
30
|
+
class OutputKafkaRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
24
31
|
r"""Format to use to serialize events before writing to Kafka."""
|
|
25
32
|
|
|
33
|
+
# JSON
|
|
26
34
|
JSON = "json"
|
|
35
|
+
# Field _raw
|
|
27
36
|
RAW = "raw"
|
|
37
|
+
# Protobuf
|
|
28
38
|
PROTOBUF = "protobuf"
|
|
29
39
|
|
|
30
40
|
|
|
31
|
-
class OutputKafkaCompression(str, Enum):
|
|
41
|
+
class OutputKafkaCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
32
42
|
r"""Codec to use to compress the data before sending to Kafka"""
|
|
33
43
|
|
|
44
|
+
# None
|
|
34
45
|
NONE = "none"
|
|
46
|
+
# Gzip
|
|
35
47
|
GZIP = "gzip"
|
|
48
|
+
# Snappy
|
|
36
49
|
SNAPPY = "snappy"
|
|
50
|
+
# LZ4
|
|
37
51
|
LZ4 = "lz4"
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
class OutputKafkaSchemaType(str, Enum):
|
|
41
|
-
r"""The schema format used to encode and decode event data"""
|
|
42
|
-
|
|
43
|
-
AVRO = "avro"
|
|
44
|
-
JSON = "json"
|
|
52
|
+
# ZSTD
|
|
53
|
+
ZSTD = "zstd"
|
|
45
54
|
|
|
46
55
|
|
|
47
56
|
class OutputKafkaAuthTypedDict(TypedDict):
|
|
@@ -63,14 +72,18 @@ class OutputKafkaAuth(BaseModel):
|
|
|
63
72
|
r"""Select or create a secret that references your credentials"""
|
|
64
73
|
|
|
65
74
|
|
|
66
|
-
class OutputKafkaKafkaSchemaRegistryMinimumTLSVersion(
|
|
75
|
+
class OutputKafkaKafkaSchemaRegistryMinimumTLSVersion(
|
|
76
|
+
str, Enum, metaclass=utils.OpenEnumMeta
|
|
77
|
+
):
|
|
67
78
|
TL_SV1 = "TLSv1"
|
|
68
79
|
TL_SV1_1 = "TLSv1.1"
|
|
69
80
|
TL_SV1_2 = "TLSv1.2"
|
|
70
81
|
TL_SV1_3 = "TLSv1.3"
|
|
71
82
|
|
|
72
83
|
|
|
73
|
-
class OutputKafkaKafkaSchemaRegistryMaximumTLSVersion(
|
|
84
|
+
class OutputKafkaKafkaSchemaRegistryMaximumTLSVersion(
|
|
85
|
+
str, Enum, metaclass=utils.OpenEnumMeta
|
|
86
|
+
):
|
|
74
87
|
TL_SV1 = "TLSv1"
|
|
75
88
|
TL_SV1_1 = "TLSv1.1"
|
|
76
89
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -130,22 +143,44 @@ class OutputKafkaKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
|
|
|
130
143
|
r"""Passphrase to use to decrypt private key"""
|
|
131
144
|
|
|
132
145
|
min_version: Annotated[
|
|
133
|
-
|
|
146
|
+
Annotated[
|
|
147
|
+
Optional[OutputKafkaKafkaSchemaRegistryMinimumTLSVersion],
|
|
148
|
+
PlainValidator(validate_open_enum(False)),
|
|
149
|
+
],
|
|
134
150
|
pydantic.Field(alias="minVersion"),
|
|
135
151
|
] = None
|
|
136
152
|
|
|
137
153
|
max_version: Annotated[
|
|
138
|
-
|
|
154
|
+
Annotated[
|
|
155
|
+
Optional[OutputKafkaKafkaSchemaRegistryMaximumTLSVersion],
|
|
156
|
+
PlainValidator(validate_open_enum(False)),
|
|
157
|
+
],
|
|
139
158
|
pydantic.Field(alias="maxVersion"),
|
|
140
159
|
] = None
|
|
141
160
|
|
|
161
|
+
@field_serializer("min_version")
|
|
162
|
+
def serialize_min_version(self, value):
|
|
163
|
+
if isinstance(value, str):
|
|
164
|
+
try:
|
|
165
|
+
return models.OutputKafkaKafkaSchemaRegistryMinimumTLSVersion(value)
|
|
166
|
+
except ValueError:
|
|
167
|
+
return value
|
|
168
|
+
return value
|
|
169
|
+
|
|
170
|
+
@field_serializer("max_version")
|
|
171
|
+
def serialize_max_version(self, value):
|
|
172
|
+
if isinstance(value, str):
|
|
173
|
+
try:
|
|
174
|
+
return models.OutputKafkaKafkaSchemaRegistryMaximumTLSVersion(value)
|
|
175
|
+
except ValueError:
|
|
176
|
+
return value
|
|
177
|
+
return value
|
|
178
|
+
|
|
142
179
|
|
|
143
180
|
class OutputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
144
181
|
disabled: NotRequired[bool]
|
|
145
182
|
schema_registry_url: NotRequired[str]
|
|
146
183
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
147
|
-
schema_type: NotRequired[OutputKafkaSchemaType]
|
|
148
|
-
r"""The schema format used to encode and decode event data"""
|
|
149
184
|
connection_timeout: NotRequired[float]
|
|
150
185
|
r"""Maximum time to wait for a Schema Registry connection to complete successfully"""
|
|
151
186
|
request_timeout: NotRequired[float]
|
|
@@ -169,11 +204,6 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
169
204
|
] = "http://localhost:8081"
|
|
170
205
|
r"""URL for accessing the Confluent Schema Registry. Example: http://localhost:8081. To connect over TLS, use https instead of http."""
|
|
171
206
|
|
|
172
|
-
schema_type: Annotated[
|
|
173
|
-
Optional[OutputKafkaSchemaType], pydantic.Field(alias="schemaType")
|
|
174
|
-
] = OutputKafkaSchemaType.AVRO
|
|
175
|
-
r"""The schema format used to encode and decode event data"""
|
|
176
|
-
|
|
177
207
|
connection_timeout: Annotated[
|
|
178
208
|
Optional[float], pydantic.Field(alias="connectionTimeout")
|
|
179
209
|
] = 30000
|
|
@@ -203,18 +233,76 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
203
233
|
r"""Used when __valueSchemaIdOut is not present, to transform _raw, leave blank if value transformation is not required by default."""
|
|
204
234
|
|
|
205
235
|
|
|
206
|
-
class
|
|
236
|
+
class OutputKafkaAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
237
|
+
r"""Enter credentials directly, or select a stored secret"""
|
|
238
|
+
|
|
239
|
+
MANUAL = "manual"
|
|
240
|
+
SECRET = "secret"
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
class OutputKafkaSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
244
|
+
# PLAIN
|
|
207
245
|
PLAIN = "plain"
|
|
246
|
+
# SCRAM-SHA-256
|
|
208
247
|
SCRAM_SHA_256 = "scram-sha-256"
|
|
248
|
+
# SCRAM-SHA-512
|
|
209
249
|
SCRAM_SHA_512 = "scram-sha-512"
|
|
250
|
+
# GSSAPI/Kerberos
|
|
210
251
|
KERBEROS = "kerberos"
|
|
211
252
|
|
|
212
253
|
|
|
254
|
+
class OutputKafkaOauthParamTypedDict(TypedDict):
|
|
255
|
+
name: str
|
|
256
|
+
value: str
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
class OutputKafkaOauthParam(BaseModel):
|
|
260
|
+
name: str
|
|
261
|
+
|
|
262
|
+
value: str
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
class OutputKafkaSaslExtensionTypedDict(TypedDict):
|
|
266
|
+
name: str
|
|
267
|
+
value: str
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
class OutputKafkaSaslExtension(BaseModel):
|
|
271
|
+
name: str
|
|
272
|
+
|
|
273
|
+
value: str
|
|
274
|
+
|
|
275
|
+
|
|
213
276
|
class OutputKafkaAuthenticationTypedDict(TypedDict):
|
|
214
277
|
r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
|
|
215
278
|
|
|
216
279
|
disabled: NotRequired[bool]
|
|
280
|
+
username: NotRequired[str]
|
|
281
|
+
password: NotRequired[str]
|
|
282
|
+
auth_type: NotRequired[OutputKafkaAuthenticationMethod]
|
|
283
|
+
r"""Enter credentials directly, or select a stored secret"""
|
|
284
|
+
credentials_secret: NotRequired[str]
|
|
285
|
+
r"""Select or create a secret that references your credentials"""
|
|
217
286
|
mechanism: NotRequired[OutputKafkaSASLMechanism]
|
|
287
|
+
keytab_location: NotRequired[str]
|
|
288
|
+
r"""Location of keytab file for authentication principal"""
|
|
289
|
+
principal: NotRequired[str]
|
|
290
|
+
r"""Authentication principal, such as `kafka_user@example.com`"""
|
|
291
|
+
broker_service_class: NotRequired[str]
|
|
292
|
+
r"""Kerberos service class for Kafka brokers, such as `kafka`"""
|
|
293
|
+
oauth_enabled: NotRequired[bool]
|
|
294
|
+
r"""Enable OAuth authentication"""
|
|
295
|
+
token_url: NotRequired[str]
|
|
296
|
+
r"""URL of the token endpoint to use for OAuth authentication"""
|
|
297
|
+
client_id: NotRequired[str]
|
|
298
|
+
r"""Client ID to use for OAuth authentication"""
|
|
299
|
+
oauth_secret_type: NotRequired[str]
|
|
300
|
+
client_text_secret: NotRequired[str]
|
|
301
|
+
r"""Select or create a stored text secret"""
|
|
302
|
+
oauth_params: NotRequired[List[OutputKafkaOauthParamTypedDict]]
|
|
303
|
+
r"""Additional fields to send to the token endpoint, such as scope or audience"""
|
|
304
|
+
sasl_extensions: NotRequired[List[OutputKafkaSaslExtensionTypedDict]]
|
|
305
|
+
r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
|
|
218
306
|
|
|
219
307
|
|
|
220
308
|
class OutputKafkaAuthentication(BaseModel):
|
|
@@ -222,17 +310,98 @@ class OutputKafkaAuthentication(BaseModel):
|
|
|
222
310
|
|
|
223
311
|
disabled: Optional[bool] = True
|
|
224
312
|
|
|
225
|
-
|
|
313
|
+
username: Optional[str] = None
|
|
314
|
+
|
|
315
|
+
password: Optional[str] = None
|
|
316
|
+
|
|
317
|
+
auth_type: Annotated[
|
|
318
|
+
Annotated[
|
|
319
|
+
Optional[OutputKafkaAuthenticationMethod],
|
|
320
|
+
PlainValidator(validate_open_enum(False)),
|
|
321
|
+
],
|
|
322
|
+
pydantic.Field(alias="authType"),
|
|
323
|
+
] = OutputKafkaAuthenticationMethod.MANUAL
|
|
324
|
+
r"""Enter credentials directly, or select a stored secret"""
|
|
325
|
+
|
|
326
|
+
credentials_secret: Annotated[
|
|
327
|
+
Optional[str], pydantic.Field(alias="credentialsSecret")
|
|
328
|
+
] = None
|
|
329
|
+
r"""Select or create a secret that references your credentials"""
|
|
330
|
+
|
|
331
|
+
mechanism: Annotated[
|
|
332
|
+
Optional[OutputKafkaSASLMechanism], PlainValidator(validate_open_enum(False))
|
|
333
|
+
] = OutputKafkaSASLMechanism.PLAIN
|
|
226
334
|
|
|
335
|
+
keytab_location: Annotated[
|
|
336
|
+
Optional[str], pydantic.Field(alias="keytabLocation")
|
|
337
|
+
] = None
|
|
338
|
+
r"""Location of keytab file for authentication principal"""
|
|
227
339
|
|
|
228
|
-
|
|
340
|
+
principal: Optional[str] = None
|
|
341
|
+
r"""Authentication principal, such as `kafka_user@example.com`"""
|
|
342
|
+
|
|
343
|
+
broker_service_class: Annotated[
|
|
344
|
+
Optional[str], pydantic.Field(alias="brokerServiceClass")
|
|
345
|
+
] = None
|
|
346
|
+
r"""Kerberos service class for Kafka brokers, such as `kafka`"""
|
|
347
|
+
|
|
348
|
+
oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
|
|
349
|
+
False
|
|
350
|
+
)
|
|
351
|
+
r"""Enable OAuth authentication"""
|
|
352
|
+
|
|
353
|
+
token_url: Annotated[Optional[str], pydantic.Field(alias="tokenUrl")] = None
|
|
354
|
+
r"""URL of the token endpoint to use for OAuth authentication"""
|
|
355
|
+
|
|
356
|
+
client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
|
|
357
|
+
r"""Client ID to use for OAuth authentication"""
|
|
358
|
+
|
|
359
|
+
oauth_secret_type: Annotated[
|
|
360
|
+
Optional[str], pydantic.Field(alias="oauthSecretType")
|
|
361
|
+
] = "secret"
|
|
362
|
+
|
|
363
|
+
client_text_secret: Annotated[
|
|
364
|
+
Optional[str], pydantic.Field(alias="clientTextSecret")
|
|
365
|
+
] = None
|
|
366
|
+
r"""Select or create a stored text secret"""
|
|
367
|
+
|
|
368
|
+
oauth_params: Annotated[
|
|
369
|
+
Optional[List[OutputKafkaOauthParam]], pydantic.Field(alias="oauthParams")
|
|
370
|
+
] = None
|
|
371
|
+
r"""Additional fields to send to the token endpoint, such as scope or audience"""
|
|
372
|
+
|
|
373
|
+
sasl_extensions: Annotated[
|
|
374
|
+
Optional[List[OutputKafkaSaslExtension]], pydantic.Field(alias="saslExtensions")
|
|
375
|
+
] = None
|
|
376
|
+
r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
|
|
377
|
+
|
|
378
|
+
@field_serializer("auth_type")
|
|
379
|
+
def serialize_auth_type(self, value):
|
|
380
|
+
if isinstance(value, str):
|
|
381
|
+
try:
|
|
382
|
+
return models.OutputKafkaAuthenticationMethod(value)
|
|
383
|
+
except ValueError:
|
|
384
|
+
return value
|
|
385
|
+
return value
|
|
386
|
+
|
|
387
|
+
@field_serializer("mechanism")
|
|
388
|
+
def serialize_mechanism(self, value):
|
|
389
|
+
if isinstance(value, str):
|
|
390
|
+
try:
|
|
391
|
+
return models.OutputKafkaSASLMechanism(value)
|
|
392
|
+
except ValueError:
|
|
393
|
+
return value
|
|
394
|
+
return value
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
class OutputKafkaMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
229
398
|
TL_SV1 = "TLSv1"
|
|
230
399
|
TL_SV1_1 = "TLSv1.1"
|
|
231
400
|
TL_SV1_2 = "TLSv1.2"
|
|
232
401
|
TL_SV1_3 = "TLSv1.3"
|
|
233
402
|
|
|
234
403
|
|
|
235
|
-
class OutputKafkaMaximumTLSVersion(str, Enum):
|
|
404
|
+
class OutputKafkaMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
236
405
|
TL_SV1 = "TLSv1"
|
|
237
406
|
TL_SV1_1 = "TLSv1.1"
|
|
238
407
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -292,44 +461,80 @@ class OutputKafkaTLSSettingsClientSide(BaseModel):
|
|
|
292
461
|
r"""Passphrase to use to decrypt private key"""
|
|
293
462
|
|
|
294
463
|
min_version: Annotated[
|
|
295
|
-
|
|
464
|
+
Annotated[
|
|
465
|
+
Optional[OutputKafkaMinimumTLSVersion],
|
|
466
|
+
PlainValidator(validate_open_enum(False)),
|
|
467
|
+
],
|
|
468
|
+
pydantic.Field(alias="minVersion"),
|
|
296
469
|
] = None
|
|
297
470
|
|
|
298
471
|
max_version: Annotated[
|
|
299
|
-
|
|
472
|
+
Annotated[
|
|
473
|
+
Optional[OutputKafkaMaximumTLSVersion],
|
|
474
|
+
PlainValidator(validate_open_enum(False)),
|
|
475
|
+
],
|
|
476
|
+
pydantic.Field(alias="maxVersion"),
|
|
300
477
|
] = None
|
|
301
478
|
|
|
302
|
-
|
|
303
|
-
|
|
479
|
+
@field_serializer("min_version")
|
|
480
|
+
def serialize_min_version(self, value):
|
|
481
|
+
if isinstance(value, str):
|
|
482
|
+
try:
|
|
483
|
+
return models.OutputKafkaMinimumTLSVersion(value)
|
|
484
|
+
except ValueError:
|
|
485
|
+
return value
|
|
486
|
+
return value
|
|
487
|
+
|
|
488
|
+
@field_serializer("max_version")
|
|
489
|
+
def serialize_max_version(self, value):
|
|
490
|
+
if isinstance(value, str):
|
|
491
|
+
try:
|
|
492
|
+
return models.OutputKafkaMaximumTLSVersion(value)
|
|
493
|
+
except ValueError:
|
|
494
|
+
return value
|
|
495
|
+
return value
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
class OutputKafkaBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
304
499
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
305
500
|
|
|
501
|
+
# Block
|
|
306
502
|
BLOCK = "block"
|
|
503
|
+
# Drop
|
|
307
504
|
DROP = "drop"
|
|
505
|
+
# Persistent Queue
|
|
308
506
|
QUEUE = "queue"
|
|
309
507
|
|
|
310
508
|
|
|
311
|
-
class
|
|
509
|
+
class OutputKafkaMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
510
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
511
|
+
|
|
512
|
+
# Error
|
|
513
|
+
ERROR = "error"
|
|
514
|
+
# Backpressure
|
|
515
|
+
ALWAYS = "always"
|
|
516
|
+
# Always On
|
|
517
|
+
BACKPRESSURE = "backpressure"
|
|
518
|
+
|
|
519
|
+
|
|
520
|
+
class OutputKafkaPqCompressCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
312
521
|
r"""Codec to use to compress the persisted data"""
|
|
313
522
|
|
|
523
|
+
# None
|
|
314
524
|
NONE = "none"
|
|
525
|
+
# Gzip
|
|
315
526
|
GZIP = "gzip"
|
|
316
527
|
|
|
317
528
|
|
|
318
|
-
class OutputKafkaQueueFullBehavior(str, Enum):
|
|
529
|
+
class OutputKafkaQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
319
530
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
320
531
|
|
|
532
|
+
# Block
|
|
321
533
|
BLOCK = "block"
|
|
534
|
+
# Drop new data
|
|
322
535
|
DROP = "drop"
|
|
323
536
|
|
|
324
537
|
|
|
325
|
-
class OutputKafkaMode(str, Enum):
|
|
326
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
327
|
-
|
|
328
|
-
ERROR = "error"
|
|
329
|
-
BACKPRESSURE = "backpressure"
|
|
330
|
-
ALWAYS = "always"
|
|
331
|
-
|
|
332
|
-
|
|
333
538
|
class OutputKafkaPqControlsTypedDict(TypedDict):
|
|
334
539
|
pass
|
|
335
540
|
|
|
@@ -393,6 +598,18 @@ class OutputKafkaTypedDict(TypedDict):
|
|
|
393
598
|
description: NotRequired[str]
|
|
394
599
|
protobuf_library_id: NotRequired[str]
|
|
395
600
|
r"""Select a set of Protobuf definitions for the events you want to send"""
|
|
601
|
+
protobuf_encoding_id: NotRequired[str]
|
|
602
|
+
r"""Select the type of object you want the Protobuf definitions to use for event encoding"""
|
|
603
|
+
pq_strict_ordering: NotRequired[bool]
|
|
604
|
+
r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
|
|
605
|
+
pq_rate_per_sec: NotRequired[float]
|
|
606
|
+
r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
|
|
607
|
+
pq_mode: NotRequired[OutputKafkaMode]
|
|
608
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
609
|
+
pq_max_buffer_size: NotRequired[float]
|
|
610
|
+
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
611
|
+
pq_max_backpressure_sec: NotRequired[float]
|
|
612
|
+
r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
|
|
396
613
|
pq_max_file_size: NotRequired[str]
|
|
397
614
|
r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
|
|
398
615
|
pq_max_size: NotRequired[str]
|
|
@@ -403,8 +620,6 @@ class OutputKafkaTypedDict(TypedDict):
|
|
|
403
620
|
r"""Codec to use to compress the persisted data"""
|
|
404
621
|
pq_on_backpressure: NotRequired[OutputKafkaQueueFullBehavior]
|
|
405
622
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
406
|
-
pq_mode: NotRequired[OutputKafkaMode]
|
|
407
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
408
623
|
pq_controls: NotRequired[OutputKafkaPqControlsTypedDict]
|
|
409
624
|
|
|
410
625
|
|
|
@@ -434,15 +649,23 @@ class OutputKafka(BaseModel):
|
|
|
434
649
|
streamtags: Optional[List[str]] = None
|
|
435
650
|
r"""Tags for filtering and grouping in @{product}"""
|
|
436
651
|
|
|
437
|
-
ack:
|
|
652
|
+
ack: Annotated[
|
|
653
|
+
Optional[OutputKafkaAcknowledgments], PlainValidator(validate_open_enum(True))
|
|
654
|
+
] = OutputKafkaAcknowledgments.ONE
|
|
438
655
|
r"""Control the number of required acknowledgments."""
|
|
439
656
|
|
|
440
657
|
format_: Annotated[
|
|
441
|
-
|
|
658
|
+
Annotated[
|
|
659
|
+
Optional[OutputKafkaRecordDataFormat],
|
|
660
|
+
PlainValidator(validate_open_enum(False)),
|
|
661
|
+
],
|
|
662
|
+
pydantic.Field(alias="format"),
|
|
442
663
|
] = OutputKafkaRecordDataFormat.JSON
|
|
443
664
|
r"""Format to use to serialize events before writing to Kafka."""
|
|
444
665
|
|
|
445
|
-
compression:
|
|
666
|
+
compression: Annotated[
|
|
667
|
+
Optional[OutputKafkaCompression], PlainValidator(validate_open_enum(False))
|
|
668
|
+
] = OutputKafkaCompression.GZIP
|
|
446
669
|
r"""Codec to use to compress the data before sending to Kafka"""
|
|
447
670
|
|
|
448
671
|
max_record_size_kb: Annotated[
|
|
@@ -505,7 +728,10 @@ class OutputKafka(BaseModel):
|
|
|
505
728
|
tls: Optional[OutputKafkaTLSSettingsClientSide] = None
|
|
506
729
|
|
|
507
730
|
on_backpressure: Annotated[
|
|
508
|
-
|
|
731
|
+
Annotated[
|
|
732
|
+
Optional[OutputKafkaBackpressureBehavior],
|
|
733
|
+
PlainValidator(validate_open_enum(False)),
|
|
734
|
+
],
|
|
509
735
|
pydantic.Field(alias="onBackpressure"),
|
|
510
736
|
] = OutputKafkaBackpressureBehavior.BLOCK
|
|
511
737
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
@@ -517,6 +743,37 @@ class OutputKafka(BaseModel):
|
|
|
517
743
|
] = None
|
|
518
744
|
r"""Select a set of Protobuf definitions for the events you want to send"""
|
|
519
745
|
|
|
746
|
+
protobuf_encoding_id: Annotated[
|
|
747
|
+
Optional[str], pydantic.Field(alias="protobufEncodingId")
|
|
748
|
+
] = None
|
|
749
|
+
r"""Select the type of object you want the Protobuf definitions to use for event encoding"""
|
|
750
|
+
|
|
751
|
+
pq_strict_ordering: Annotated[
|
|
752
|
+
Optional[bool], pydantic.Field(alias="pqStrictOrdering")
|
|
753
|
+
] = True
|
|
754
|
+
r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
|
|
755
|
+
|
|
756
|
+
pq_rate_per_sec: Annotated[
|
|
757
|
+
Optional[float], pydantic.Field(alias="pqRatePerSec")
|
|
758
|
+
] = 0
|
|
759
|
+
r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
|
|
760
|
+
|
|
761
|
+
pq_mode: Annotated[
|
|
762
|
+
Annotated[Optional[OutputKafkaMode], PlainValidator(validate_open_enum(False))],
|
|
763
|
+
pydantic.Field(alias="pqMode"),
|
|
764
|
+
] = OutputKafkaMode.ERROR
|
|
765
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
766
|
+
|
|
767
|
+
pq_max_buffer_size: Annotated[
|
|
768
|
+
Optional[float], pydantic.Field(alias="pqMaxBufferSize")
|
|
769
|
+
] = 42
|
|
770
|
+
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
771
|
+
|
|
772
|
+
pq_max_backpressure_sec: Annotated[
|
|
773
|
+
Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
|
|
774
|
+
] = 30
|
|
775
|
+
r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
|
|
776
|
+
|
|
520
777
|
pq_max_file_size: Annotated[
|
|
521
778
|
Optional[str], pydantic.Field(alias="pqMaxFileSize")
|
|
522
779
|
] = "1 MB"
|
|
@@ -531,20 +788,86 @@ class OutputKafka(BaseModel):
|
|
|
531
788
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
|
|
532
789
|
|
|
533
790
|
pq_compress: Annotated[
|
|
534
|
-
|
|
791
|
+
Annotated[
|
|
792
|
+
Optional[OutputKafkaPqCompressCompression],
|
|
793
|
+
PlainValidator(validate_open_enum(False)),
|
|
794
|
+
],
|
|
795
|
+
pydantic.Field(alias="pqCompress"),
|
|
535
796
|
] = OutputKafkaPqCompressCompression.NONE
|
|
536
797
|
r"""Codec to use to compress the persisted data"""
|
|
537
798
|
|
|
538
799
|
pq_on_backpressure: Annotated[
|
|
539
|
-
|
|
800
|
+
Annotated[
|
|
801
|
+
Optional[OutputKafkaQueueFullBehavior],
|
|
802
|
+
PlainValidator(validate_open_enum(False)),
|
|
803
|
+
],
|
|
804
|
+
pydantic.Field(alias="pqOnBackpressure"),
|
|
540
805
|
] = OutputKafkaQueueFullBehavior.BLOCK
|
|
541
806
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
542
807
|
|
|
543
|
-
pq_mode: Annotated[Optional[OutputKafkaMode], pydantic.Field(alias="pqMode")] = (
|
|
544
|
-
OutputKafkaMode.ERROR
|
|
545
|
-
)
|
|
546
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
547
|
-
|
|
548
808
|
pq_controls: Annotated[
|
|
549
809
|
Optional[OutputKafkaPqControls], pydantic.Field(alias="pqControls")
|
|
550
810
|
] = None
|
|
811
|
+
|
|
812
|
+
@field_serializer("ack")
|
|
813
|
+
def serialize_ack(self, value):
|
|
814
|
+
if isinstance(value, str):
|
|
815
|
+
try:
|
|
816
|
+
return models.OutputKafkaAcknowledgments(value)
|
|
817
|
+
except ValueError:
|
|
818
|
+
return value
|
|
819
|
+
return value
|
|
820
|
+
|
|
821
|
+
@field_serializer("format_")
|
|
822
|
+
def serialize_format_(self, value):
|
|
823
|
+
if isinstance(value, str):
|
|
824
|
+
try:
|
|
825
|
+
return models.OutputKafkaRecordDataFormat(value)
|
|
826
|
+
except ValueError:
|
|
827
|
+
return value
|
|
828
|
+
return value
|
|
829
|
+
|
|
830
|
+
@field_serializer("compression")
|
|
831
|
+
def serialize_compression(self, value):
|
|
832
|
+
if isinstance(value, str):
|
|
833
|
+
try:
|
|
834
|
+
return models.OutputKafkaCompression(value)
|
|
835
|
+
except ValueError:
|
|
836
|
+
return value
|
|
837
|
+
return value
|
|
838
|
+
|
|
839
|
+
@field_serializer("on_backpressure")
|
|
840
|
+
def serialize_on_backpressure(self, value):
|
|
841
|
+
if isinstance(value, str):
|
|
842
|
+
try:
|
|
843
|
+
return models.OutputKafkaBackpressureBehavior(value)
|
|
844
|
+
except ValueError:
|
|
845
|
+
return value
|
|
846
|
+
return value
|
|
847
|
+
|
|
848
|
+
@field_serializer("pq_mode")
|
|
849
|
+
def serialize_pq_mode(self, value):
|
|
850
|
+
if isinstance(value, str):
|
|
851
|
+
try:
|
|
852
|
+
return models.OutputKafkaMode(value)
|
|
853
|
+
except ValueError:
|
|
854
|
+
return value
|
|
855
|
+
return value
|
|
856
|
+
|
|
857
|
+
@field_serializer("pq_compress")
|
|
858
|
+
def serialize_pq_compress(self, value):
|
|
859
|
+
if isinstance(value, str):
|
|
860
|
+
try:
|
|
861
|
+
return models.OutputKafkaPqCompressCompression(value)
|
|
862
|
+
except ValueError:
|
|
863
|
+
return value
|
|
864
|
+
return value
|
|
865
|
+
|
|
866
|
+
@field_serializer("pq_on_backpressure")
|
|
867
|
+
def serialize_pq_on_backpressure(self, value):
|
|
868
|
+
if isinstance(value, str):
|
|
869
|
+
try:
|
|
870
|
+
return models.OutputKafkaQueueFullBehavior(value)
|
|
871
|
+
except ValueError:
|
|
872
|
+
return value
|
|
873
|
+
return value
|