cribl-control-plane 0.0.21__py3-none-any.whl → 0.4.0b23__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cribl_control_plane/_hooks/clientcredentials.py +113 -48
- cribl_control_plane/_version.py +4 -4
- cribl_control_plane/acl.py +225 -0
- cribl_control_plane/auth_sdk.py +12 -176
- cribl_control_plane/basesdk.py +17 -1
- cribl_control_plane/branches.py +351 -0
- cribl_control_plane/commits.py +1403 -0
- cribl_control_plane/commits_files.py +391 -0
- cribl_control_plane/configs_versions.py +201 -0
- cribl_control_plane/cribl.py +495 -0
- cribl_control_plane/destinations.py +146 -805
- cribl_control_plane/destinations_pq.py +379 -0
- cribl_control_plane/errors/__init__.py +26 -10
- cribl_control_plane/errors/apierror.py +2 -0
- cribl_control_plane/errors/criblcontrolplaneerror.py +11 -7
- cribl_control_plane/errors/error.py +4 -2
- cribl_control_plane/errors/healthserverstatus_error.py +41 -0
- cribl_control_plane/errors/no_response_error.py +5 -1
- cribl_control_plane/errors/responsevalidationerror.py +2 -0
- cribl_control_plane/functions.py +367 -0
- cribl_control_plane/groups_configs.py +22 -0
- cribl_control_plane/groups_sdk.py +333 -578
- cribl_control_plane/health.py +38 -18
- cribl_control_plane/hectokens.py +503 -0
- cribl_control_plane/httpclient.py +0 -1
- cribl_control_plane/{lake.py → lakedatasets.py} +207 -115
- cribl_control_plane/models/__init__.py +3644 -5986
- cribl_control_plane/models/addhectokenrequest.py +7 -1
- cribl_control_plane/models/authtoken.py +5 -1
- cribl_control_plane/models/backupssettings_union.py +37 -0
- cribl_control_plane/models/{lookupversions.py → branchinfo.py} +4 -4
- cribl_control_plane/models/cacheconnection.py +30 -2
- cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
- cribl_control_plane/models/cloudprovider.py +2 -1
- cribl_control_plane/models/collectorazureblob.py +130 -0
- cribl_control_plane/models/collectorconf.py +56 -0
- cribl_control_plane/models/collectorcribllake.py +27 -0
- cribl_control_plane/models/collectordatabase.py +92 -0
- cribl_control_plane/models/collectorfilesystem.py +66 -0
- cribl_control_plane/models/collectorgooglecloudstorage.py +131 -0
- cribl_control_plane/models/collectorhealthcheck.py +269 -0
- cribl_control_plane/models/collectorrest.py +340 -0
- cribl_control_plane/models/collectors3.py +239 -0
- cribl_control_plane/models/collectorscript.py +59 -0
- cribl_control_plane/models/collectorsplunk.py +253 -0
- cribl_control_plane/models/configgroup.py +67 -11
- cribl_control_plane/models/configgroupcloud.py +17 -3
- cribl_control_plane/models/countedbranchinfo.py +20 -0
- cribl_control_plane/models/countedconfiggroup.py +20 -0
- cribl_control_plane/models/countedcribllakedataset.py +20 -0
- cribl_control_plane/models/counteddistributedsummary.py +20 -0
- cribl_control_plane/models/countedfunctionresponse.py +20 -0
- cribl_control_plane/models/countedgitcommitsummary.py +20 -0
- cribl_control_plane/models/countedgitcountresult.py +20 -0
- cribl_control_plane/models/countedgitdiffresult.py +20 -0
- cribl_control_plane/models/countedgitfilesresponse.py +20 -0
- cribl_control_plane/models/{getversioninfoop.py → countedgitinfo.py} +2 -6
- cribl_control_plane/models/countedgitlogresult.py +20 -0
- cribl_control_plane/models/countedgitrevertresult.py +20 -0
- cribl_control_plane/models/countedgitshowresult.py +20 -0
- cribl_control_plane/models/countedgitstatusresult.py +20 -0
- cribl_control_plane/models/{listinputop.py → countedinput.py} +2 -6
- cribl_control_plane/models/countedinputsplunkhec.py +20 -0
- cribl_control_plane/models/countedjobinfo.py +20 -0
- cribl_control_plane/models/countedmasterworkerentry.py +20 -0
- cribl_control_plane/models/countednumber.py +19 -0
- cribl_control_plane/models/{getversionbranchop.py → countedobject.py} +2 -6
- cribl_control_plane/models/{listoutputop.py → countedoutput.py} +2 -6
- cribl_control_plane/models/countedoutputsamplesresponse.py +20 -0
- cribl_control_plane/models/countedoutputtestresponse.py +20 -0
- cribl_control_plane/models/countedpackinfo.py +20 -0
- cribl_control_plane/models/{createpacksop.py → countedpackinstallinfo.py} +2 -6
- cribl_control_plane/models/{listpipelineop.py → countedpipeline.py} +2 -6
- cribl_control_plane/models/{listroutesop.py → countedroutes.py} +2 -6
- cribl_control_plane/models/countedstring.py +19 -0
- cribl_control_plane/models/countedsystemsettingsconf.py +20 -0
- cribl_control_plane/models/countedteamaccesscontrollist.py +20 -0
- cribl_control_plane/models/counteduseraccesscontrollist.py +20 -0
- cribl_control_plane/models/createauthloginop.py +18 -0
- cribl_control_plane/models/createconfiggroupbyproductop.py +46 -0
- cribl_control_plane/models/createcribllakedatasetbylakeidop.py +3 -21
- cribl_control_plane/models/createinputhectokenbyidop.py +3 -21
- cribl_control_plane/models/createoutputtestbyidop.py +3 -22
- cribl_control_plane/models/createroutesappendbyidop.py +4 -21
- cribl_control_plane/models/createversioncommitop.py +27 -20
- cribl_control_plane/models/createversionrevertop.py +7 -23
- cribl_control_plane/models/createversionundoop.py +7 -22
- cribl_control_plane/models/criblevent.py +15 -0
- cribl_control_plane/models/cribllakedataset.py +23 -3
- cribl_control_plane/models/cribllakedatasetupdate.py +95 -0
- cribl_control_plane/models/currentbranchresult.py +13 -0
- cribl_control_plane/models/datasetmetadata.py +18 -2
- cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +43 -0
- cribl_control_plane/models/deletecribllakedatasetbylakeidandidop.py +5 -24
- cribl_control_plane/models/deleteinputbyidop.py +3 -22
- cribl_control_plane/models/deleteoutputbyidop.py +3 -22
- cribl_control_plane/models/deleteoutputpqbyidop.py +3 -21
- cribl_control_plane/models/deletepacksbyidop.py +3 -22
- cribl_control_plane/models/deletepipelinebyidop.py +3 -22
- cribl_control_plane/models/difffiles.py +130 -0
- cribl_control_plane/models/diffline.py +26 -0
- cribl_control_plane/models/difflinecontext.py +28 -0
- cribl_control_plane/models/difflinedelete.py +25 -0
- cribl_control_plane/models/difflineinsert.py +25 -0
- cribl_control_plane/models/distributedsummary.py +6 -0
- cribl_control_plane/models/functionaggregatemetrics.py +206 -0
- cribl_control_plane/models/functionaggregation.py +172 -0
- cribl_control_plane/models/functionautotimestamp.py +173 -0
- cribl_control_plane/models/functioncef.py +111 -0
- cribl_control_plane/models/functionchain.py +75 -0
- cribl_control_plane/models/functionclone.py +75 -0
- cribl_control_plane/models/functioncode.py +96 -0
- cribl_control_plane/models/functioncomment.py +75 -0
- cribl_control_plane/models/functiondistinct.py +99 -0
- cribl_control_plane/models/functiondnslookup.py +250 -0
- cribl_control_plane/models/functiondrop.py +73 -0
- cribl_control_plane/models/functiondropdimensions.py +87 -0
- cribl_control_plane/models/functiondynamicsampling.py +121 -0
- cribl_control_plane/models/functioneval.py +103 -0
- cribl_control_plane/models/functioneventbreaker.py +103 -0
- cribl_control_plane/models/functioneventstats.py +92 -0
- cribl_control_plane/models/functionexternaldata.py +73 -0
- cribl_control_plane/models/functionflatten.py +90 -0
- cribl_control_plane/models/functionfoldkeys.py +89 -0
- cribl_control_plane/models/functiongenstats.py +73 -0
- cribl_control_plane/models/functiongeoip.py +120 -0
- cribl_control_plane/models/functiongrok.py +95 -0
- cribl_control_plane/models/functionhandlebar.py +112 -0
- cribl_control_plane/models/functionjoin.py +112 -0
- cribl_control_plane/models/functionjsonunroll.py +80 -0
- cribl_control_plane/models/functionlakeexport.py +102 -0
- cribl_control_plane/models/functionlimit.py +75 -0
- cribl_control_plane/models/functionlocalsearchdatatypeparser.py +76 -0
- cribl_control_plane/models/functionlocalsearchrulesetrunner.py +97 -0
- cribl_control_plane/models/functionlookup.py +148 -0
- cribl_control_plane/models/functionmask.py +121 -0
- cribl_control_plane/models/functionmvexpand.py +128 -0
- cribl_control_plane/models/functionmvpull.py +99 -0
- cribl_control_plane/models/functionnotificationpolicies.py +186 -0
- cribl_control_plane/models/functionnotifications.py +85 -0
- cribl_control_plane/models/functionnotify.py +196 -0
- cribl_control_plane/models/functionnumerify.py +119 -0
- cribl_control_plane/models/functionotlplogs.py +82 -0
- cribl_control_plane/models/functionotlpmetrics.py +118 -0
- cribl_control_plane/models/functionotlptraces.py +111 -0
- cribl_control_plane/models/functionpack.py +80 -0
- cribl_control_plane/models/functionpivot.py +85 -0
- cribl_control_plane/models/functionpublishmetrics.py +153 -0
- cribl_control_plane/models/functionredis.py +173 -0
- cribl_control_plane/models/functionregexextract.py +112 -0
- cribl_control_plane/models/functionregexfilter.py +95 -0
- cribl_control_plane/models/functionrename.py +107 -0
- cribl_control_plane/models/functionresponse.py +242 -0
- cribl_control_plane/models/functionrollupmetrics.py +114 -0
- cribl_control_plane/models/functionsampling.py +90 -0
- cribl_control_plane/models/functionsend.py +141 -0
- cribl_control_plane/models/functionsensitivedatascanner.py +128 -0
- cribl_control_plane/models/functionserde.py +161 -0
- cribl_control_plane/models/functionserialize.py +134 -0
- cribl_control_plane/models/functionsidlookup.py +93 -0
- cribl_control_plane/models/functionsnmptrapserialize.py +144 -0
- cribl_control_plane/models/functionsort.py +97 -0
- cribl_control_plane/models/functionstore.py +132 -0
- cribl_control_plane/models/functionsuppress.py +115 -0
- cribl_control_plane/models/functiontee.py +90 -0
- cribl_control_plane/models/functiontrimtimestamp.py +75 -0
- cribl_control_plane/models/functionunion.py +80 -0
- cribl_control_plane/models/functionunroll.py +80 -0
- cribl_control_plane/models/functionwindow.py +96 -0
- cribl_control_plane/models/functionxmlunroll.py +92 -0
- cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +63 -0
- cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +63 -0
- cribl_control_plane/models/getconfiggroupbyproductandidop.py +53 -0
- cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +43 -0
- cribl_control_plane/models/getcribllakedatasetbylakeidandidop.py +5 -24
- cribl_control_plane/models/getcribllakedatasetbylakeidop.py +56 -16
- cribl_control_plane/models/getfunctionsbyidop.py +18 -0
- cribl_control_plane/models/getinputbyidop.py +3 -22
- cribl_control_plane/models/getmasterworkerentryop.py +22 -0
- cribl_control_plane/models/getoutputbyidop.py +3 -22
- cribl_control_plane/models/getoutputpqbyidop.py +3 -21
- cribl_control_plane/models/getoutputsamplesbyidop.py +3 -22
- cribl_control_plane/models/getpacksbyidop.py +18 -0
- cribl_control_plane/models/getpacksop.py +3 -21
- cribl_control_plane/models/getpipelinebyidop.py +3 -22
- cribl_control_plane/models/getroutesbyidop.py +3 -22
- cribl_control_plane/models/getsummaryop.py +23 -32
- cribl_control_plane/models/getversioncountop.py +10 -27
- cribl_control_plane/models/getversiondiffop.py +12 -28
- cribl_control_plane/models/getversionfilesop.py +10 -28
- cribl_control_plane/models/getversionop.py +30 -0
- cribl_control_plane/models/getversionshowop.py +12 -28
- cribl_control_plane/models/getversionstatusop.py +7 -23
- cribl_control_plane/models/gitcommitsummary.py +3 -3
- cribl_control_plane/models/{routecloneconf.py → gitcountresult.py} +4 -4
- cribl_control_plane/models/gitdiffresult.py +16 -0
- cribl_control_plane/models/gitfilesresponse.py +7 -5
- cribl_control_plane/models/gitinfo.py +14 -3
- cribl_control_plane/models/gitlogresult.py +33 -0
- cribl_control_plane/models/gitrevertparams.py +3 -3
- cribl_control_plane/models/gitrevertresult.py +5 -5
- cribl_control_plane/models/gitshowresult.py +19 -0
- cribl_control_plane/models/gitstatusresult.py +0 -3
- cribl_control_plane/models/groupcreaterequest.py +172 -0
- cribl_control_plane/models/hbcriblinfo.py +42 -7
- cribl_control_plane/models/healthserverstatus.py +55 -0
- cribl_control_plane/models/heartbeatmetadata.py +6 -11
- cribl_control_plane/models/input.py +89 -81
- cribl_control_plane/models/inputappscope.py +131 -35
- cribl_control_plane/models/inputazureblob.py +62 -6
- cribl_control_plane/models/inputcloudflarehec.py +518 -0
- cribl_control_plane/models/inputcollection.py +49 -6
- cribl_control_plane/models/inputconfluentcloud.py +262 -22
- cribl_control_plane/models/inputcribl.py +52 -9
- cribl_control_plane/models/inputcriblhttp.py +124 -33
- cribl_control_plane/models/inputcribllakehttp.py +199 -29
- cribl_control_plane/models/inputcriblmetrics.py +53 -9
- cribl_control_plane/models/inputcribltcp.py +125 -27
- cribl_control_plane/models/inputcrowdstrike.py +99 -10
- cribl_control_plane/models/inputdatadogagent.py +101 -27
- cribl_control_plane/models/inputdatagen.py +47 -4
- cribl_control_plane/models/inputedgeprometheus.py +215 -58
- cribl_control_plane/models/inputelastic.py +170 -39
- cribl_control_plane/models/inputeventhub.py +212 -9
- cribl_control_plane/models/inputexec.py +59 -6
- cribl_control_plane/models/inputfile.py +83 -15
- cribl_control_plane/models/inputfirehose.py +100 -27
- cribl_control_plane/models/inputgooglepubsub.py +83 -15
- cribl_control_plane/models/{inputgrafana_union.py → inputgrafana.py} +261 -67
- cribl_control_plane/models/inputhttp.py +100 -27
- cribl_control_plane/models/inputhttpraw.py +100 -27
- cribl_control_plane/models/inputjournalfiles.py +51 -7
- cribl_control_plane/models/inputkafka.py +257 -19
- cribl_control_plane/models/inputkinesis.py +133 -17
- cribl_control_plane/models/inputkubeevents.py +52 -9
- cribl_control_plane/models/inputkubelogs.py +66 -13
- cribl_control_plane/models/inputkubemetrics.py +66 -13
- cribl_control_plane/models/inputloki.py +116 -30
- cribl_control_plane/models/inputmetrics.py +97 -24
- cribl_control_plane/models/inputmodeldriventelemetry.py +110 -29
- cribl_control_plane/models/inputmsk.py +148 -21
- cribl_control_plane/models/inputnetflow.py +50 -7
- cribl_control_plane/models/inputoffice365mgmt.py +115 -17
- cribl_control_plane/models/inputoffice365msgtrace.py +117 -19
- cribl_control_plane/models/inputoffice365service.py +117 -19
- cribl_control_plane/models/inputopentelemetry.py +146 -35
- cribl_control_plane/models/inputprometheus.py +196 -47
- cribl_control_plane/models/inputprometheusrw.py +117 -30
- cribl_control_plane/models/inputrawudp.py +50 -7
- cribl_control_plane/models/inputs3.py +85 -8
- cribl_control_plane/models/inputs3inventory.py +99 -10
- cribl_control_plane/models/inputsecuritylake.py +100 -10
- cribl_control_plane/models/inputsnmp.py +115 -24
- cribl_control_plane/models/inputsplunk.py +133 -31
- cribl_control_plane/models/inputsplunkhec.py +122 -32
- cribl_control_plane/models/inputsplunksearch.py +115 -18
- cribl_control_plane/models/inputsqs.py +102 -19
- cribl_control_plane/models/{inputsyslog_union.py → inputsyslog.py} +193 -51
- cribl_control_plane/models/inputsystemmetrics.py +207 -37
- cribl_control_plane/models/inputsystemstate.py +66 -13
- cribl_control_plane/models/inputtcp.py +125 -29
- cribl_control_plane/models/inputtcpjson.py +115 -29
- cribl_control_plane/models/inputwef.py +151 -22
- cribl_control_plane/models/inputwindowsmetrics.py +191 -38
- cribl_control_plane/models/inputwineventlogs.py +93 -11
- cribl_control_plane/models/inputwiz.py +176 -11
- cribl_control_plane/models/inputwizwebhook.py +466 -0
- cribl_control_plane/models/inputzscalerhec.py +122 -32
- cribl_control_plane/models/jobinfo.py +34 -0
- cribl_control_plane/models/jobstatus.py +48 -0
- cribl_control_plane/models/lakedatasetmetrics.py +17 -0
- cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
- cribl_control_plane/models/listconfiggroupbyproductop.py +46 -0
- cribl_control_plane/models/listmasterworkerentryop.py +64 -0
- cribl_control_plane/models/logininfo.py +3 -3
- cribl_control_plane/models/masterworkerentry.py +20 -13
- cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
- cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
- cribl_control_plane/models/nodeprovidedinfo.py +13 -11
- cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
- cribl_control_plane/models/nodeupgradestate.py +2 -1
- cribl_control_plane/models/nodeupgradestatus.py +51 -5
- cribl_control_plane/models/outpostnodeinfo.py +16 -0
- cribl_control_plane/models/output.py +103 -89
- cribl_control_plane/models/outputazureblob.py +174 -21
- cribl_control_plane/models/outputazuredataexplorer.py +517 -93
- cribl_control_plane/models/outputazureeventhub.py +318 -34
- cribl_control_plane/models/outputazurelogs.py +145 -26
- cribl_control_plane/models/outputchronicle.py +532 -0
- cribl_control_plane/models/outputclickhouse.py +208 -37
- cribl_control_plane/models/outputcloudflarer2.py +632 -0
- cribl_control_plane/models/outputcloudwatch.py +132 -26
- cribl_control_plane/models/outputconfluentcloud.py +387 -46
- cribl_control_plane/models/outputcriblhttp.py +203 -36
- cribl_control_plane/models/outputcribllake.py +161 -21
- cribl_control_plane/models/outputcribltcp.py +199 -34
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +176 -32
- cribl_control_plane/models/outputdatabricks.py +501 -0
- cribl_control_plane/models/outputdatadog.py +204 -36
- cribl_control_plane/models/outputdataset.py +186 -34
- cribl_control_plane/models/outputdevnull.py +5 -5
- cribl_control_plane/models/outputdiskspool.py +22 -7
- cribl_control_plane/models/outputdls3.py +238 -29
- cribl_control_plane/models/outputdynatracehttp.py +211 -37
- cribl_control_plane/models/outputdynatraceotlp.py +213 -39
- cribl_control_plane/models/outputelastic.py +199 -30
- cribl_control_plane/models/outputelasticcloud.py +174 -29
- cribl_control_plane/models/outputexabeam.py +99 -13
- cribl_control_plane/models/outputfilesystem.py +139 -14
- cribl_control_plane/models/outputgooglechronicle.py +216 -35
- cribl_control_plane/models/outputgooglecloudlogging.py +177 -34
- cribl_control_plane/models/outputgooglecloudstorage.py +220 -29
- cribl_control_plane/models/outputgooglepubsub.py +138 -51
- cribl_control_plane/models/outputgrafanacloud.py +386 -70
- cribl_control_plane/models/outputgraphite.py +131 -28
- cribl_control_plane/models/outputhoneycomb.py +145 -26
- cribl_control_plane/models/outputhumiohec.py +165 -31
- cribl_control_plane/models/outputinfluxdb.py +165 -28
- cribl_control_plane/models/outputkafka.py +378 -41
- cribl_control_plane/models/outputkinesis.py +168 -30
- cribl_control_plane/models/outputloki.py +171 -27
- cribl_control_plane/models/outputmicrosoftfabric.py +540 -0
- cribl_control_plane/models/outputminio.py +228 -28
- cribl_control_plane/models/outputmsk.py +270 -43
- cribl_control_plane/models/outputnewrelic.py +176 -34
- cribl_control_plane/models/outputnewrelicevents.py +166 -31
- cribl_control_plane/models/outputopentelemetry.py +240 -40
- cribl_control_plane/models/outputprometheus.py +145 -26
- cribl_control_plane/models/outputring.py +54 -13
- cribl_control_plane/models/outputs3.py +238 -31
- cribl_control_plane/models/outputsecuritylake.py +182 -21
- cribl_control_plane/models/outputsentinel.py +175 -32
- cribl_control_plane/models/outputsentineloneaisiem.py +184 -38
- cribl_control_plane/models/outputservicenow.py +226 -41
- cribl_control_plane/models/outputsignalfx.py +145 -26
- cribl_control_plane/models/outputsns.py +146 -28
- cribl_control_plane/models/outputsplunk.py +209 -39
- cribl_control_plane/models/outputsplunkhec.py +243 -31
- cribl_control_plane/models/outputsplunklb.py +266 -46
- cribl_control_plane/models/outputsqs.py +166 -36
- cribl_control_plane/models/outputstatsd.py +130 -28
- cribl_control_plane/models/outputstatsdext.py +131 -28
- cribl_control_plane/models/outputsumologic.py +146 -25
- cribl_control_plane/models/outputsyslog.py +323 -51
- cribl_control_plane/models/outputtcpjson.py +191 -37
- cribl_control_plane/models/outputwavefront.py +145 -26
- cribl_control_plane/models/outputwebhook.py +216 -38
- cribl_control_plane/models/outputxsiam.py +148 -31
- cribl_control_plane/models/packinfo.py +11 -8
- cribl_control_plane/models/packinstallinfo.py +14 -11
- cribl_control_plane/models/packrequestbody_union.py +140 -0
- cribl_control_plane/models/packupgraderequest.py +26 -0
- cribl_control_plane/models/piisettings_union.py +31 -0
- cribl_control_plane/models/productscore.py +10 -0
- cribl_control_plane/models/rbacresource.py +2 -1
- cribl_control_plane/models/resourcepolicy.py +15 -2
- cribl_control_plane/models/rollbacksettings_union.py +44 -0
- cribl_control_plane/models/routeconf.py +3 -4
- cribl_control_plane/models/routes.py +0 -24
- cribl_control_plane/models/runnablejob.py +27 -0
- cribl_control_plane/models/runnablejobcollection.py +628 -0
- cribl_control_plane/models/runnablejobexecutor.py +360 -0
- cribl_control_plane/models/runnablejobscheduledsearch.py +279 -0
- cribl_control_plane/models/schemeclientoauth.py +5 -0
- cribl_control_plane/models/snisettings_union.py +31 -0
- cribl_control_plane/models/systemsettingsconf.py +291 -0
- cribl_control_plane/models/tlssettings_union.py +43 -0
- cribl_control_plane/models/updateconfiggroupbyproductandidop.py +56 -0
- cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +53 -0
- cribl_control_plane/models/updatecribllakedatasetbylakeidandidop.py +14 -29
- cribl_control_plane/models/updatehectokenrequest.py +7 -1
- cribl_control_plane/models/updateinputbyidop.py +5 -23
- cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +5 -23
- cribl_control_plane/models/updateoutputbyidop.py +5 -23
- cribl_control_plane/models/updatepacksbyidop.py +12 -50
- cribl_control_plane/models/updatepacksop.py +12 -24
- cribl_control_plane/models/updatepipelinebyidop.py +5 -23
- cribl_control_plane/models/updateroutesbyidop.py +8 -27
- cribl_control_plane/models/upgradegroupsettings_union.py +43 -0
- cribl_control_plane/models/upgradepackageurls.py +20 -0
- cribl_control_plane/models/upgradesettings.py +38 -0
- cribl_control_plane/models/uploadpackresponse.py +13 -0
- cribl_control_plane/models/{appmode.py → workertypes.py} +2 -5
- cribl_control_plane/{workers_sdk.py → nodes.py} +102 -234
- cribl_control_plane/packs.py +385 -184
- cribl_control_plane/pipelines.py +116 -66
- cribl_control_plane/routes_sdk.py +102 -70
- cribl_control_plane/samples.py +407 -0
- cribl_control_plane/sdk.py +35 -25
- cribl_control_plane/settings.py +20 -0
- cribl_control_plane/sources.py +143 -545
- cribl_control_plane/statuses.py +195 -0
- cribl_control_plane/summaries.py +195 -0
- cribl_control_plane/system_sdk.py +20 -0
- cribl_control_plane/teams.py +36 -28
- cribl_control_plane/tokens.py +210 -0
- cribl_control_plane/utils/__init__.py +18 -5
- cribl_control_plane/utils/annotations.py +32 -8
- cribl_control_plane/utils/eventstreaming.py +10 -0
- cribl_control_plane/utils/forms.py +21 -10
- cribl_control_plane/utils/queryparams.py +14 -2
- cribl_control_plane/utils/retries.py +69 -5
- cribl_control_plane/utils/security.py +5 -0
- cribl_control_plane/utils/unmarshal_json_response.py +15 -1
- cribl_control_plane/versions.py +31 -0
- cribl_control_plane/{distributed.py → versions_configs.py} +29 -35
- cribl_control_plane-0.4.0b23.dist-info/METADATA +855 -0
- cribl_control_plane-0.4.0b23.dist-info/RECORD +450 -0
- {cribl_control_plane-0.0.21.dist-info → cribl_control_plane-0.4.0b23.dist-info}/WHEEL +1 -1
- cribl_control_plane-0.4.0b23.dist-info/licenses/LICENSE +201 -0
- cribl_control_plane/errors/healthstatus_error.py +0 -32
- cribl_control_plane/models/createinputop.py +0 -18238
- cribl_control_plane/models/createoutputop.py +0 -18437
- cribl_control_plane/models/createpipelineop.py +0 -24
- cribl_control_plane/models/createproductsgroupsbyproductop.py +0 -54
- cribl_control_plane/models/createversionpushop.py +0 -23
- cribl_control_plane/models/createversionsyncop.py +0 -23
- cribl_control_plane/models/deletegroupsbyidop.py +0 -37
- cribl_control_plane/models/getgroupsaclbyidop.py +0 -63
- cribl_control_plane/models/getgroupsbyidop.py +0 -49
- cribl_control_plane/models/getgroupsconfigversionbyidop.py +0 -36
- cribl_control_plane/models/getproductsgroupsaclteamsbyproductandidop.py +0 -78
- cribl_control_plane/models/getproductsgroupsbyproductop.py +0 -58
- cribl_control_plane/models/getsummaryworkersop.py +0 -39
- cribl_control_plane/models/getversioncurrentbranchop.py +0 -23
- cribl_control_plane/models/getworkersop.py +0 -82
- cribl_control_plane/models/healthstatus.py +0 -33
- cribl_control_plane/models/packrequestbody.py +0 -75
- cribl_control_plane/models/restartresponse.py +0 -26
- cribl_control_plane/models/routesroute_input.py +0 -67
- cribl_control_plane/models/updategroupsbyidop.py +0 -48
- cribl_control_plane/models/updategroupsdeploybyidop.py +0 -46
- cribl_control_plane/models/updateworkersrestartop.py +0 -24
- cribl_control_plane/versioning.py +0 -2309
- cribl_control_plane-0.0.21.dist-info/METADATA +0 -561
- cribl_control_plane-0.0.21.dist-info/RECORD +0 -301
|
@@ -1,9 +1,13 @@
|
|
|
1
1
|
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import models, utils
|
|
4
5
|
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
5
7
|
from enum import Enum
|
|
6
8
|
import pydantic
|
|
9
|
+
from pydantic import field_serializer
|
|
10
|
+
from pydantic.functional_validators import PlainValidator
|
|
7
11
|
from typing import List, Optional
|
|
8
12
|
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
9
13
|
|
|
@@ -12,29 +16,41 @@ class OutputKafkaType(str, Enum):
|
|
|
12
16
|
KAFKA = "kafka"
|
|
13
17
|
|
|
14
18
|
|
|
15
|
-
class OutputKafkaAcknowledgments(int, Enum):
|
|
19
|
+
class OutputKafkaAcknowledgments(int, Enum, metaclass=utils.OpenEnumMeta):
|
|
16
20
|
r"""Control the number of required acknowledgments."""
|
|
17
21
|
|
|
22
|
+
# Leader
|
|
18
23
|
ONE = 1
|
|
24
|
+
# None
|
|
19
25
|
ZERO = 0
|
|
26
|
+
# All
|
|
20
27
|
MINUS_1 = -1
|
|
21
28
|
|
|
22
29
|
|
|
23
|
-
class OutputKafkaRecordDataFormat(str, Enum):
|
|
30
|
+
class OutputKafkaRecordDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
24
31
|
r"""Format to use to serialize events before writing to Kafka."""
|
|
25
32
|
|
|
33
|
+
# JSON
|
|
26
34
|
JSON = "json"
|
|
35
|
+
# Field _raw
|
|
27
36
|
RAW = "raw"
|
|
37
|
+
# Protobuf
|
|
28
38
|
PROTOBUF = "protobuf"
|
|
29
39
|
|
|
30
40
|
|
|
31
|
-
class OutputKafkaCompression(str, Enum):
|
|
41
|
+
class OutputKafkaCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
32
42
|
r"""Codec to use to compress the data before sending to Kafka"""
|
|
33
43
|
|
|
44
|
+
# None
|
|
34
45
|
NONE = "none"
|
|
46
|
+
# Gzip
|
|
35
47
|
GZIP = "gzip"
|
|
48
|
+
# Snappy
|
|
36
49
|
SNAPPY = "snappy"
|
|
50
|
+
# LZ4
|
|
37
51
|
LZ4 = "lz4"
|
|
52
|
+
# ZSTD
|
|
53
|
+
ZSTD = "zstd"
|
|
38
54
|
|
|
39
55
|
|
|
40
56
|
class OutputKafkaAuthTypedDict(TypedDict):
|
|
@@ -56,14 +72,18 @@ class OutputKafkaAuth(BaseModel):
|
|
|
56
72
|
r"""Select or create a secret that references your credentials"""
|
|
57
73
|
|
|
58
74
|
|
|
59
|
-
class OutputKafkaKafkaSchemaRegistryMinimumTLSVersion(
|
|
75
|
+
class OutputKafkaKafkaSchemaRegistryMinimumTLSVersion(
|
|
76
|
+
str, Enum, metaclass=utils.OpenEnumMeta
|
|
77
|
+
):
|
|
60
78
|
TL_SV1 = "TLSv1"
|
|
61
79
|
TL_SV1_1 = "TLSv1.1"
|
|
62
80
|
TL_SV1_2 = "TLSv1.2"
|
|
63
81
|
TL_SV1_3 = "TLSv1.3"
|
|
64
82
|
|
|
65
83
|
|
|
66
|
-
class OutputKafkaKafkaSchemaRegistryMaximumTLSVersion(
|
|
84
|
+
class OutputKafkaKafkaSchemaRegistryMaximumTLSVersion(
|
|
85
|
+
str, Enum, metaclass=utils.OpenEnumMeta
|
|
86
|
+
):
|
|
67
87
|
TL_SV1 = "TLSv1"
|
|
68
88
|
TL_SV1_1 = "TLSv1.1"
|
|
69
89
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -123,15 +143,39 @@ class OutputKafkaKafkaSchemaRegistryTLSSettingsClientSide(BaseModel):
|
|
|
123
143
|
r"""Passphrase to use to decrypt private key"""
|
|
124
144
|
|
|
125
145
|
min_version: Annotated[
|
|
126
|
-
|
|
146
|
+
Annotated[
|
|
147
|
+
Optional[OutputKafkaKafkaSchemaRegistryMinimumTLSVersion],
|
|
148
|
+
PlainValidator(validate_open_enum(False)),
|
|
149
|
+
],
|
|
127
150
|
pydantic.Field(alias="minVersion"),
|
|
128
151
|
] = None
|
|
129
152
|
|
|
130
153
|
max_version: Annotated[
|
|
131
|
-
|
|
154
|
+
Annotated[
|
|
155
|
+
Optional[OutputKafkaKafkaSchemaRegistryMaximumTLSVersion],
|
|
156
|
+
PlainValidator(validate_open_enum(False)),
|
|
157
|
+
],
|
|
132
158
|
pydantic.Field(alias="maxVersion"),
|
|
133
159
|
] = None
|
|
134
160
|
|
|
161
|
+
@field_serializer("min_version")
|
|
162
|
+
def serialize_min_version(self, value):
|
|
163
|
+
if isinstance(value, str):
|
|
164
|
+
try:
|
|
165
|
+
return models.OutputKafkaKafkaSchemaRegistryMinimumTLSVersion(value)
|
|
166
|
+
except ValueError:
|
|
167
|
+
return value
|
|
168
|
+
return value
|
|
169
|
+
|
|
170
|
+
@field_serializer("max_version")
|
|
171
|
+
def serialize_max_version(self, value):
|
|
172
|
+
if isinstance(value, str):
|
|
173
|
+
try:
|
|
174
|
+
return models.OutputKafkaKafkaSchemaRegistryMaximumTLSVersion(value)
|
|
175
|
+
except ValueError:
|
|
176
|
+
return value
|
|
177
|
+
return value
|
|
178
|
+
|
|
135
179
|
|
|
136
180
|
class OutputKafkaKafkaSchemaRegistryAuthenticationTypedDict(TypedDict):
|
|
137
181
|
disabled: NotRequired[bool]
|
|
@@ -189,18 +233,76 @@ class OutputKafkaKafkaSchemaRegistryAuthentication(BaseModel):
|
|
|
189
233
|
r"""Used when __valueSchemaIdOut is not present, to transform _raw, leave blank if value transformation is not required by default."""
|
|
190
234
|
|
|
191
235
|
|
|
192
|
-
class
|
|
236
|
+
class OutputKafkaAuthenticationMethod(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
237
|
+
r"""Enter credentials directly, or select a stored secret"""
|
|
238
|
+
|
|
239
|
+
MANUAL = "manual"
|
|
240
|
+
SECRET = "secret"
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
class OutputKafkaSASLMechanism(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
244
|
+
# PLAIN
|
|
193
245
|
PLAIN = "plain"
|
|
246
|
+
# SCRAM-SHA-256
|
|
194
247
|
SCRAM_SHA_256 = "scram-sha-256"
|
|
248
|
+
# SCRAM-SHA-512
|
|
195
249
|
SCRAM_SHA_512 = "scram-sha-512"
|
|
250
|
+
# GSSAPI/Kerberos
|
|
196
251
|
KERBEROS = "kerberos"
|
|
197
252
|
|
|
198
253
|
|
|
254
|
+
class OutputKafkaOauthParamTypedDict(TypedDict):
|
|
255
|
+
name: str
|
|
256
|
+
value: str
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
class OutputKafkaOauthParam(BaseModel):
|
|
260
|
+
name: str
|
|
261
|
+
|
|
262
|
+
value: str
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
class OutputKafkaSaslExtensionTypedDict(TypedDict):
|
|
266
|
+
name: str
|
|
267
|
+
value: str
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
class OutputKafkaSaslExtension(BaseModel):
|
|
271
|
+
name: str
|
|
272
|
+
|
|
273
|
+
value: str
|
|
274
|
+
|
|
275
|
+
|
|
199
276
|
class OutputKafkaAuthenticationTypedDict(TypedDict):
|
|
200
277
|
r"""Authentication parameters to use when connecting to brokers. Using TLS is highly recommended."""
|
|
201
278
|
|
|
202
279
|
disabled: NotRequired[bool]
|
|
280
|
+
username: NotRequired[str]
|
|
281
|
+
password: NotRequired[str]
|
|
282
|
+
auth_type: NotRequired[OutputKafkaAuthenticationMethod]
|
|
283
|
+
r"""Enter credentials directly, or select a stored secret"""
|
|
284
|
+
credentials_secret: NotRequired[str]
|
|
285
|
+
r"""Select or create a secret that references your credentials"""
|
|
203
286
|
mechanism: NotRequired[OutputKafkaSASLMechanism]
|
|
287
|
+
keytab_location: NotRequired[str]
|
|
288
|
+
r"""Location of keytab file for authentication principal"""
|
|
289
|
+
principal: NotRequired[str]
|
|
290
|
+
r"""Authentication principal, such as `kafka_user@example.com`"""
|
|
291
|
+
broker_service_class: NotRequired[str]
|
|
292
|
+
r"""Kerberos service class for Kafka brokers, such as `kafka`"""
|
|
293
|
+
oauth_enabled: NotRequired[bool]
|
|
294
|
+
r"""Enable OAuth authentication"""
|
|
295
|
+
token_url: NotRequired[str]
|
|
296
|
+
r"""URL of the token endpoint to use for OAuth authentication"""
|
|
297
|
+
client_id: NotRequired[str]
|
|
298
|
+
r"""Client ID to use for OAuth authentication"""
|
|
299
|
+
oauth_secret_type: NotRequired[str]
|
|
300
|
+
client_text_secret: NotRequired[str]
|
|
301
|
+
r"""Select or create a stored text secret"""
|
|
302
|
+
oauth_params: NotRequired[List[OutputKafkaOauthParamTypedDict]]
|
|
303
|
+
r"""Additional fields to send to the token endpoint, such as scope or audience"""
|
|
304
|
+
sasl_extensions: NotRequired[List[OutputKafkaSaslExtensionTypedDict]]
|
|
305
|
+
r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
|
|
204
306
|
|
|
205
307
|
|
|
206
308
|
class OutputKafkaAuthentication(BaseModel):
|
|
@@ -208,17 +310,98 @@ class OutputKafkaAuthentication(BaseModel):
|
|
|
208
310
|
|
|
209
311
|
disabled: Optional[bool] = True
|
|
210
312
|
|
|
211
|
-
|
|
313
|
+
username: Optional[str] = None
|
|
314
|
+
|
|
315
|
+
password: Optional[str] = None
|
|
212
316
|
|
|
317
|
+
auth_type: Annotated[
|
|
318
|
+
Annotated[
|
|
319
|
+
Optional[OutputKafkaAuthenticationMethod],
|
|
320
|
+
PlainValidator(validate_open_enum(False)),
|
|
321
|
+
],
|
|
322
|
+
pydantic.Field(alias="authType"),
|
|
323
|
+
] = OutputKafkaAuthenticationMethod.MANUAL
|
|
324
|
+
r"""Enter credentials directly, or select a stored secret"""
|
|
325
|
+
|
|
326
|
+
credentials_secret: Annotated[
|
|
327
|
+
Optional[str], pydantic.Field(alias="credentialsSecret")
|
|
328
|
+
] = None
|
|
329
|
+
r"""Select or create a secret that references your credentials"""
|
|
330
|
+
|
|
331
|
+
mechanism: Annotated[
|
|
332
|
+
Optional[OutputKafkaSASLMechanism], PlainValidator(validate_open_enum(False))
|
|
333
|
+
] = OutputKafkaSASLMechanism.PLAIN
|
|
334
|
+
|
|
335
|
+
keytab_location: Annotated[
|
|
336
|
+
Optional[str], pydantic.Field(alias="keytabLocation")
|
|
337
|
+
] = None
|
|
338
|
+
r"""Location of keytab file for authentication principal"""
|
|
339
|
+
|
|
340
|
+
principal: Optional[str] = None
|
|
341
|
+
r"""Authentication principal, such as `kafka_user@example.com`"""
|
|
342
|
+
|
|
343
|
+
broker_service_class: Annotated[
|
|
344
|
+
Optional[str], pydantic.Field(alias="brokerServiceClass")
|
|
345
|
+
] = None
|
|
346
|
+
r"""Kerberos service class for Kafka brokers, such as `kafka`"""
|
|
347
|
+
|
|
348
|
+
oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
|
|
349
|
+
False
|
|
350
|
+
)
|
|
351
|
+
r"""Enable OAuth authentication"""
|
|
352
|
+
|
|
353
|
+
token_url: Annotated[Optional[str], pydantic.Field(alias="tokenUrl")] = None
|
|
354
|
+
r"""URL of the token endpoint to use for OAuth authentication"""
|
|
355
|
+
|
|
356
|
+
client_id: Annotated[Optional[str], pydantic.Field(alias="clientId")] = None
|
|
357
|
+
r"""Client ID to use for OAuth authentication"""
|
|
358
|
+
|
|
359
|
+
oauth_secret_type: Annotated[
|
|
360
|
+
Optional[str], pydantic.Field(alias="oauthSecretType")
|
|
361
|
+
] = "secret"
|
|
362
|
+
|
|
363
|
+
client_text_secret: Annotated[
|
|
364
|
+
Optional[str], pydantic.Field(alias="clientTextSecret")
|
|
365
|
+
] = None
|
|
366
|
+
r"""Select or create a stored text secret"""
|
|
367
|
+
|
|
368
|
+
oauth_params: Annotated[
|
|
369
|
+
Optional[List[OutputKafkaOauthParam]], pydantic.Field(alias="oauthParams")
|
|
370
|
+
] = None
|
|
371
|
+
r"""Additional fields to send to the token endpoint, such as scope or audience"""
|
|
213
372
|
|
|
214
|
-
|
|
373
|
+
sasl_extensions: Annotated[
|
|
374
|
+
Optional[List[OutputKafkaSaslExtension]], pydantic.Field(alias="saslExtensions")
|
|
375
|
+
] = None
|
|
376
|
+
r"""Additional SASL extension fields, such as Confluent's logicalCluster or identityPoolId"""
|
|
377
|
+
|
|
378
|
+
@field_serializer("auth_type")
|
|
379
|
+
def serialize_auth_type(self, value):
|
|
380
|
+
if isinstance(value, str):
|
|
381
|
+
try:
|
|
382
|
+
return models.OutputKafkaAuthenticationMethod(value)
|
|
383
|
+
except ValueError:
|
|
384
|
+
return value
|
|
385
|
+
return value
|
|
386
|
+
|
|
387
|
+
@field_serializer("mechanism")
|
|
388
|
+
def serialize_mechanism(self, value):
|
|
389
|
+
if isinstance(value, str):
|
|
390
|
+
try:
|
|
391
|
+
return models.OutputKafkaSASLMechanism(value)
|
|
392
|
+
except ValueError:
|
|
393
|
+
return value
|
|
394
|
+
return value
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
class OutputKafkaMinimumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
215
398
|
TL_SV1 = "TLSv1"
|
|
216
399
|
TL_SV1_1 = "TLSv1.1"
|
|
217
400
|
TL_SV1_2 = "TLSv1.2"
|
|
218
401
|
TL_SV1_3 = "TLSv1.3"
|
|
219
402
|
|
|
220
403
|
|
|
221
|
-
class OutputKafkaMaximumTLSVersion(str, Enum):
|
|
404
|
+
class OutputKafkaMaximumTLSVersion(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
222
405
|
TL_SV1 = "TLSv1"
|
|
223
406
|
TL_SV1_1 = "TLSv1.1"
|
|
224
407
|
TL_SV1_2 = "TLSv1.2"
|
|
@@ -278,44 +461,80 @@ class OutputKafkaTLSSettingsClientSide(BaseModel):
|
|
|
278
461
|
r"""Passphrase to use to decrypt private key"""
|
|
279
462
|
|
|
280
463
|
min_version: Annotated[
|
|
281
|
-
|
|
464
|
+
Annotated[
|
|
465
|
+
Optional[OutputKafkaMinimumTLSVersion],
|
|
466
|
+
PlainValidator(validate_open_enum(False)),
|
|
467
|
+
],
|
|
468
|
+
pydantic.Field(alias="minVersion"),
|
|
282
469
|
] = None
|
|
283
470
|
|
|
284
471
|
max_version: Annotated[
|
|
285
|
-
|
|
472
|
+
Annotated[
|
|
473
|
+
Optional[OutputKafkaMaximumTLSVersion],
|
|
474
|
+
PlainValidator(validate_open_enum(False)),
|
|
475
|
+
],
|
|
476
|
+
pydantic.Field(alias="maxVersion"),
|
|
286
477
|
] = None
|
|
287
478
|
|
|
288
|
-
|
|
289
|
-
|
|
479
|
+
@field_serializer("min_version")
|
|
480
|
+
def serialize_min_version(self, value):
|
|
481
|
+
if isinstance(value, str):
|
|
482
|
+
try:
|
|
483
|
+
return models.OutputKafkaMinimumTLSVersion(value)
|
|
484
|
+
except ValueError:
|
|
485
|
+
return value
|
|
486
|
+
return value
|
|
487
|
+
|
|
488
|
+
@field_serializer("max_version")
|
|
489
|
+
def serialize_max_version(self, value):
|
|
490
|
+
if isinstance(value, str):
|
|
491
|
+
try:
|
|
492
|
+
return models.OutputKafkaMaximumTLSVersion(value)
|
|
493
|
+
except ValueError:
|
|
494
|
+
return value
|
|
495
|
+
return value
|
|
496
|
+
|
|
497
|
+
|
|
498
|
+
class OutputKafkaBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
290
499
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
291
500
|
|
|
501
|
+
# Block
|
|
292
502
|
BLOCK = "block"
|
|
503
|
+
# Drop
|
|
293
504
|
DROP = "drop"
|
|
505
|
+
# Persistent Queue
|
|
294
506
|
QUEUE = "queue"
|
|
295
507
|
|
|
296
508
|
|
|
297
|
-
class
|
|
509
|
+
class OutputKafkaMode(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
510
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
511
|
+
|
|
512
|
+
# Error
|
|
513
|
+
ERROR = "error"
|
|
514
|
+
# Backpressure
|
|
515
|
+
ALWAYS = "always"
|
|
516
|
+
# Always On
|
|
517
|
+
BACKPRESSURE = "backpressure"
|
|
518
|
+
|
|
519
|
+
|
|
520
|
+
class OutputKafkaPqCompressCompression(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
298
521
|
r"""Codec to use to compress the persisted data"""
|
|
299
522
|
|
|
523
|
+
# None
|
|
300
524
|
NONE = "none"
|
|
525
|
+
# Gzip
|
|
301
526
|
GZIP = "gzip"
|
|
302
527
|
|
|
303
528
|
|
|
304
|
-
class OutputKafkaQueueFullBehavior(str, Enum):
|
|
529
|
+
class OutputKafkaQueueFullBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
305
530
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
306
531
|
|
|
532
|
+
# Block
|
|
307
533
|
BLOCK = "block"
|
|
534
|
+
# Drop new data
|
|
308
535
|
DROP = "drop"
|
|
309
536
|
|
|
310
537
|
|
|
311
|
-
class OutputKafkaMode(str, Enum):
|
|
312
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
313
|
-
|
|
314
|
-
ERROR = "error"
|
|
315
|
-
BACKPRESSURE = "backpressure"
|
|
316
|
-
ALWAYS = "always"
|
|
317
|
-
|
|
318
|
-
|
|
319
538
|
class OutputKafkaPqControlsTypedDict(TypedDict):
|
|
320
539
|
pass
|
|
321
540
|
|
|
@@ -325,13 +544,13 @@ class OutputKafkaPqControls(BaseModel):
|
|
|
325
544
|
|
|
326
545
|
|
|
327
546
|
class OutputKafkaTypedDict(TypedDict):
|
|
547
|
+
type: OutputKafkaType
|
|
328
548
|
brokers: List[str]
|
|
329
549
|
r"""Enter each Kafka bootstrap server you want to use. Specify hostname and port, e.g., mykafkabroker:9092, or just hostname, in which case @{product} will assign port 9092."""
|
|
330
550
|
topic: str
|
|
331
551
|
r"""The topic to publish events to. Can be overridden using the __topicOut field."""
|
|
332
552
|
id: NotRequired[str]
|
|
333
553
|
r"""Unique ID for this output"""
|
|
334
|
-
type: NotRequired[OutputKafkaType]
|
|
335
554
|
pipeline: NotRequired[str]
|
|
336
555
|
r"""Pipeline to process data before sending out to this output"""
|
|
337
556
|
system_fields: NotRequired[List[str]]
|
|
@@ -379,6 +598,18 @@ class OutputKafkaTypedDict(TypedDict):
|
|
|
379
598
|
description: NotRequired[str]
|
|
380
599
|
protobuf_library_id: NotRequired[str]
|
|
381
600
|
r"""Select a set of Protobuf definitions for the events you want to send"""
|
|
601
|
+
protobuf_encoding_id: NotRequired[str]
|
|
602
|
+
r"""Select the type of object you want the Protobuf definitions to use for event encoding"""
|
|
603
|
+
pq_strict_ordering: NotRequired[bool]
|
|
604
|
+
r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
|
|
605
|
+
pq_rate_per_sec: NotRequired[float]
|
|
606
|
+
r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
|
|
607
|
+
pq_mode: NotRequired[OutputKafkaMode]
|
|
608
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
609
|
+
pq_max_buffer_size: NotRequired[float]
|
|
610
|
+
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
611
|
+
pq_max_backpressure_sec: NotRequired[float]
|
|
612
|
+
r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
|
|
382
613
|
pq_max_file_size: NotRequired[str]
|
|
383
614
|
r"""The maximum size to store in each queue file before closing and optionally compressing (KB, MB, etc.)"""
|
|
384
615
|
pq_max_size: NotRequired[str]
|
|
@@ -389,12 +620,12 @@ class OutputKafkaTypedDict(TypedDict):
|
|
|
389
620
|
r"""Codec to use to compress the persisted data"""
|
|
390
621
|
pq_on_backpressure: NotRequired[OutputKafkaQueueFullBehavior]
|
|
391
622
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
392
|
-
pq_mode: NotRequired[OutputKafkaMode]
|
|
393
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
394
623
|
pq_controls: NotRequired[OutputKafkaPqControlsTypedDict]
|
|
395
624
|
|
|
396
625
|
|
|
397
626
|
class OutputKafka(BaseModel):
|
|
627
|
+
type: OutputKafkaType
|
|
628
|
+
|
|
398
629
|
brokers: List[str]
|
|
399
630
|
r"""Enter each Kafka bootstrap server you want to use. Specify hostname and port, e.g., mykafkabroker:9092, or just hostname, in which case @{product} will assign port 9092."""
|
|
400
631
|
|
|
@@ -404,8 +635,6 @@ class OutputKafka(BaseModel):
|
|
|
404
635
|
id: Optional[str] = None
|
|
405
636
|
r"""Unique ID for this output"""
|
|
406
637
|
|
|
407
|
-
type: Optional[OutputKafkaType] = None
|
|
408
|
-
|
|
409
638
|
pipeline: Optional[str] = None
|
|
410
639
|
r"""Pipeline to process data before sending out to this output"""
|
|
411
640
|
|
|
@@ -420,15 +649,23 @@ class OutputKafka(BaseModel):
|
|
|
420
649
|
streamtags: Optional[List[str]] = None
|
|
421
650
|
r"""Tags for filtering and grouping in @{product}"""
|
|
422
651
|
|
|
423
|
-
ack:
|
|
652
|
+
ack: Annotated[
|
|
653
|
+
Optional[OutputKafkaAcknowledgments], PlainValidator(validate_open_enum(True))
|
|
654
|
+
] = OutputKafkaAcknowledgments.ONE
|
|
424
655
|
r"""Control the number of required acknowledgments."""
|
|
425
656
|
|
|
426
657
|
format_: Annotated[
|
|
427
|
-
|
|
658
|
+
Annotated[
|
|
659
|
+
Optional[OutputKafkaRecordDataFormat],
|
|
660
|
+
PlainValidator(validate_open_enum(False)),
|
|
661
|
+
],
|
|
662
|
+
pydantic.Field(alias="format"),
|
|
428
663
|
] = OutputKafkaRecordDataFormat.JSON
|
|
429
664
|
r"""Format to use to serialize events before writing to Kafka."""
|
|
430
665
|
|
|
431
|
-
compression:
|
|
666
|
+
compression: Annotated[
|
|
667
|
+
Optional[OutputKafkaCompression], PlainValidator(validate_open_enum(False))
|
|
668
|
+
] = OutputKafkaCompression.GZIP
|
|
432
669
|
r"""Codec to use to compress the data before sending to Kafka"""
|
|
433
670
|
|
|
434
671
|
max_record_size_kb: Annotated[
|
|
@@ -491,7 +728,10 @@ class OutputKafka(BaseModel):
|
|
|
491
728
|
tls: Optional[OutputKafkaTLSSettingsClientSide] = None
|
|
492
729
|
|
|
493
730
|
on_backpressure: Annotated[
|
|
494
|
-
|
|
731
|
+
Annotated[
|
|
732
|
+
Optional[OutputKafkaBackpressureBehavior],
|
|
733
|
+
PlainValidator(validate_open_enum(False)),
|
|
734
|
+
],
|
|
495
735
|
pydantic.Field(alias="onBackpressure"),
|
|
496
736
|
] = OutputKafkaBackpressureBehavior.BLOCK
|
|
497
737
|
r"""How to handle events when all receivers are exerting backpressure"""
|
|
@@ -503,6 +743,37 @@ class OutputKafka(BaseModel):
|
|
|
503
743
|
] = None
|
|
504
744
|
r"""Select a set of Protobuf definitions for the events you want to send"""
|
|
505
745
|
|
|
746
|
+
protobuf_encoding_id: Annotated[
|
|
747
|
+
Optional[str], pydantic.Field(alias="protobufEncodingId")
|
|
748
|
+
] = None
|
|
749
|
+
r"""Select the type of object you want the Protobuf definitions to use for event encoding"""
|
|
750
|
+
|
|
751
|
+
pq_strict_ordering: Annotated[
|
|
752
|
+
Optional[bool], pydantic.Field(alias="pqStrictOrdering")
|
|
753
|
+
] = True
|
|
754
|
+
r"""Use FIFO (first in, first out) processing. Disable to forward new events to receivers before queue is flushed."""
|
|
755
|
+
|
|
756
|
+
pq_rate_per_sec: Annotated[
|
|
757
|
+
Optional[float], pydantic.Field(alias="pqRatePerSec")
|
|
758
|
+
] = 0
|
|
759
|
+
r"""Throttling rate (in events per second) to impose while writing to Destinations from PQ. Defaults to 0, which disables throttling."""
|
|
760
|
+
|
|
761
|
+
pq_mode: Annotated[
|
|
762
|
+
Annotated[Optional[OutputKafkaMode], PlainValidator(validate_open_enum(False))],
|
|
763
|
+
pydantic.Field(alias="pqMode"),
|
|
764
|
+
] = OutputKafkaMode.ERROR
|
|
765
|
+
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
766
|
+
|
|
767
|
+
pq_max_buffer_size: Annotated[
|
|
768
|
+
Optional[float], pydantic.Field(alias="pqMaxBufferSize")
|
|
769
|
+
] = 42
|
|
770
|
+
r"""The maximum number of events to hold in memory before writing the events to disk"""
|
|
771
|
+
|
|
772
|
+
pq_max_backpressure_sec: Annotated[
|
|
773
|
+
Optional[float], pydantic.Field(alias="pqMaxBackpressureSec")
|
|
774
|
+
] = 30
|
|
775
|
+
r"""How long (in seconds) to wait for backpressure to resolve before engaging the queue"""
|
|
776
|
+
|
|
506
777
|
pq_max_file_size: Annotated[
|
|
507
778
|
Optional[str], pydantic.Field(alias="pqMaxFileSize")
|
|
508
779
|
] = "1 MB"
|
|
@@ -517,20 +788,86 @@ class OutputKafka(BaseModel):
|
|
|
517
788
|
r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/<output-id>."""
|
|
518
789
|
|
|
519
790
|
pq_compress: Annotated[
|
|
520
|
-
|
|
791
|
+
Annotated[
|
|
792
|
+
Optional[OutputKafkaPqCompressCompression],
|
|
793
|
+
PlainValidator(validate_open_enum(False)),
|
|
794
|
+
],
|
|
795
|
+
pydantic.Field(alias="pqCompress"),
|
|
521
796
|
] = OutputKafkaPqCompressCompression.NONE
|
|
522
797
|
r"""Codec to use to compress the persisted data"""
|
|
523
798
|
|
|
524
799
|
pq_on_backpressure: Annotated[
|
|
525
|
-
|
|
800
|
+
Annotated[
|
|
801
|
+
Optional[OutputKafkaQueueFullBehavior],
|
|
802
|
+
PlainValidator(validate_open_enum(False)),
|
|
803
|
+
],
|
|
804
|
+
pydantic.Field(alias="pqOnBackpressure"),
|
|
526
805
|
] = OutputKafkaQueueFullBehavior.BLOCK
|
|
527
806
|
r"""How to handle events when the queue is exerting backpressure (full capacity or low disk). 'Block' is the same behavior as non-PQ blocking. 'Drop new data' throws away incoming data, while leaving the contents of the PQ unchanged."""
|
|
528
807
|
|
|
529
|
-
pq_mode: Annotated[Optional[OutputKafkaMode], pydantic.Field(alias="pqMode")] = (
|
|
530
|
-
OutputKafkaMode.ERROR
|
|
531
|
-
)
|
|
532
|
-
r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
|
|
533
|
-
|
|
534
808
|
pq_controls: Annotated[
|
|
535
809
|
Optional[OutputKafkaPqControls], pydantic.Field(alias="pqControls")
|
|
536
810
|
] = None
|
|
811
|
+
|
|
812
|
+
@field_serializer("ack")
|
|
813
|
+
def serialize_ack(self, value):
|
|
814
|
+
if isinstance(value, str):
|
|
815
|
+
try:
|
|
816
|
+
return models.OutputKafkaAcknowledgments(value)
|
|
817
|
+
except ValueError:
|
|
818
|
+
return value
|
|
819
|
+
return value
|
|
820
|
+
|
|
821
|
+
@field_serializer("format_")
|
|
822
|
+
def serialize_format_(self, value):
|
|
823
|
+
if isinstance(value, str):
|
|
824
|
+
try:
|
|
825
|
+
return models.OutputKafkaRecordDataFormat(value)
|
|
826
|
+
except ValueError:
|
|
827
|
+
return value
|
|
828
|
+
return value
|
|
829
|
+
|
|
830
|
+
@field_serializer("compression")
|
|
831
|
+
def serialize_compression(self, value):
|
|
832
|
+
if isinstance(value, str):
|
|
833
|
+
try:
|
|
834
|
+
return models.OutputKafkaCompression(value)
|
|
835
|
+
except ValueError:
|
|
836
|
+
return value
|
|
837
|
+
return value
|
|
838
|
+
|
|
839
|
+
@field_serializer("on_backpressure")
|
|
840
|
+
def serialize_on_backpressure(self, value):
|
|
841
|
+
if isinstance(value, str):
|
|
842
|
+
try:
|
|
843
|
+
return models.OutputKafkaBackpressureBehavior(value)
|
|
844
|
+
except ValueError:
|
|
845
|
+
return value
|
|
846
|
+
return value
|
|
847
|
+
|
|
848
|
+
@field_serializer("pq_mode")
|
|
849
|
+
def serialize_pq_mode(self, value):
|
|
850
|
+
if isinstance(value, str):
|
|
851
|
+
try:
|
|
852
|
+
return models.OutputKafkaMode(value)
|
|
853
|
+
except ValueError:
|
|
854
|
+
return value
|
|
855
|
+
return value
|
|
856
|
+
|
|
857
|
+
@field_serializer("pq_compress")
|
|
858
|
+
def serialize_pq_compress(self, value):
|
|
859
|
+
if isinstance(value, str):
|
|
860
|
+
try:
|
|
861
|
+
return models.OutputKafkaPqCompressCompression(value)
|
|
862
|
+
except ValueError:
|
|
863
|
+
return value
|
|
864
|
+
return value
|
|
865
|
+
|
|
866
|
+
@field_serializer("pq_on_backpressure")
|
|
867
|
+
def serialize_pq_on_backpressure(self, value):
|
|
868
|
+
if isinstance(value, str):
|
|
869
|
+
try:
|
|
870
|
+
return models.OutputKafkaQueueFullBehavior(value)
|
|
871
|
+
except ValueError:
|
|
872
|
+
return value
|
|
873
|
+
return value
|