cribl-control-plane 0.0.21__py3-none-any.whl → 0.4.0b23__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cribl_control_plane/_hooks/clientcredentials.py +113 -48
- cribl_control_plane/_version.py +4 -4
- cribl_control_plane/acl.py +225 -0
- cribl_control_plane/auth_sdk.py +12 -176
- cribl_control_plane/basesdk.py +17 -1
- cribl_control_plane/branches.py +351 -0
- cribl_control_plane/commits.py +1403 -0
- cribl_control_plane/commits_files.py +391 -0
- cribl_control_plane/configs_versions.py +201 -0
- cribl_control_plane/cribl.py +495 -0
- cribl_control_plane/destinations.py +146 -805
- cribl_control_plane/destinations_pq.py +379 -0
- cribl_control_plane/errors/__init__.py +26 -10
- cribl_control_plane/errors/apierror.py +2 -0
- cribl_control_plane/errors/criblcontrolplaneerror.py +11 -7
- cribl_control_plane/errors/error.py +4 -2
- cribl_control_plane/errors/healthserverstatus_error.py +41 -0
- cribl_control_plane/errors/no_response_error.py +5 -1
- cribl_control_plane/errors/responsevalidationerror.py +2 -0
- cribl_control_plane/functions.py +367 -0
- cribl_control_plane/groups_configs.py +22 -0
- cribl_control_plane/groups_sdk.py +333 -578
- cribl_control_plane/health.py +38 -18
- cribl_control_plane/hectokens.py +503 -0
- cribl_control_plane/httpclient.py +0 -1
- cribl_control_plane/{lake.py → lakedatasets.py} +207 -115
- cribl_control_plane/models/__init__.py +3644 -5986
- cribl_control_plane/models/addhectokenrequest.py +7 -1
- cribl_control_plane/models/authtoken.py +5 -1
- cribl_control_plane/models/backupssettings_union.py +37 -0
- cribl_control_plane/models/{lookupversions.py → branchinfo.py} +4 -4
- cribl_control_plane/models/cacheconnection.py +30 -2
- cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
- cribl_control_plane/models/cloudprovider.py +2 -1
- cribl_control_plane/models/collectorazureblob.py +130 -0
- cribl_control_plane/models/collectorconf.py +56 -0
- cribl_control_plane/models/collectorcribllake.py +27 -0
- cribl_control_plane/models/collectordatabase.py +92 -0
- cribl_control_plane/models/collectorfilesystem.py +66 -0
- cribl_control_plane/models/collectorgooglecloudstorage.py +131 -0
- cribl_control_plane/models/collectorhealthcheck.py +269 -0
- cribl_control_plane/models/collectorrest.py +340 -0
- cribl_control_plane/models/collectors3.py +239 -0
- cribl_control_plane/models/collectorscript.py +59 -0
- cribl_control_plane/models/collectorsplunk.py +253 -0
- cribl_control_plane/models/configgroup.py +67 -11
- cribl_control_plane/models/configgroupcloud.py +17 -3
- cribl_control_plane/models/countedbranchinfo.py +20 -0
- cribl_control_plane/models/countedconfiggroup.py +20 -0
- cribl_control_plane/models/countedcribllakedataset.py +20 -0
- cribl_control_plane/models/counteddistributedsummary.py +20 -0
- cribl_control_plane/models/countedfunctionresponse.py +20 -0
- cribl_control_plane/models/countedgitcommitsummary.py +20 -0
- cribl_control_plane/models/countedgitcountresult.py +20 -0
- cribl_control_plane/models/countedgitdiffresult.py +20 -0
- cribl_control_plane/models/countedgitfilesresponse.py +20 -0
- cribl_control_plane/models/{getversioninfoop.py → countedgitinfo.py} +2 -6
- cribl_control_plane/models/countedgitlogresult.py +20 -0
- cribl_control_plane/models/countedgitrevertresult.py +20 -0
- cribl_control_plane/models/countedgitshowresult.py +20 -0
- cribl_control_plane/models/countedgitstatusresult.py +20 -0
- cribl_control_plane/models/{listinputop.py → countedinput.py} +2 -6
- cribl_control_plane/models/countedinputsplunkhec.py +20 -0
- cribl_control_plane/models/countedjobinfo.py +20 -0
- cribl_control_plane/models/countedmasterworkerentry.py +20 -0
- cribl_control_plane/models/countednumber.py +19 -0
- cribl_control_plane/models/{getversionbranchop.py → countedobject.py} +2 -6
- cribl_control_plane/models/{listoutputop.py → countedoutput.py} +2 -6
- cribl_control_plane/models/countedoutputsamplesresponse.py +20 -0
- cribl_control_plane/models/countedoutputtestresponse.py +20 -0
- cribl_control_plane/models/countedpackinfo.py +20 -0
- cribl_control_plane/models/{createpacksop.py → countedpackinstallinfo.py} +2 -6
- cribl_control_plane/models/{listpipelineop.py → countedpipeline.py} +2 -6
- cribl_control_plane/models/{listroutesop.py → countedroutes.py} +2 -6
- cribl_control_plane/models/countedstring.py +19 -0
- cribl_control_plane/models/countedsystemsettingsconf.py +20 -0
- cribl_control_plane/models/countedteamaccesscontrollist.py +20 -0
- cribl_control_plane/models/counteduseraccesscontrollist.py +20 -0
- cribl_control_plane/models/createauthloginop.py +18 -0
- cribl_control_plane/models/createconfiggroupbyproductop.py +46 -0
- cribl_control_plane/models/createcribllakedatasetbylakeidop.py +3 -21
- cribl_control_plane/models/createinputhectokenbyidop.py +3 -21
- cribl_control_plane/models/createoutputtestbyidop.py +3 -22
- cribl_control_plane/models/createroutesappendbyidop.py +4 -21
- cribl_control_plane/models/createversioncommitop.py +27 -20
- cribl_control_plane/models/createversionrevertop.py +7 -23
- cribl_control_plane/models/createversionundoop.py +7 -22
- cribl_control_plane/models/criblevent.py +15 -0
- cribl_control_plane/models/cribllakedataset.py +23 -3
- cribl_control_plane/models/cribllakedatasetupdate.py +95 -0
- cribl_control_plane/models/currentbranchresult.py +13 -0
- cribl_control_plane/models/datasetmetadata.py +18 -2
- cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +43 -0
- cribl_control_plane/models/deletecribllakedatasetbylakeidandidop.py +5 -24
- cribl_control_plane/models/deleteinputbyidop.py +3 -22
- cribl_control_plane/models/deleteoutputbyidop.py +3 -22
- cribl_control_plane/models/deleteoutputpqbyidop.py +3 -21
- cribl_control_plane/models/deletepacksbyidop.py +3 -22
- cribl_control_plane/models/deletepipelinebyidop.py +3 -22
- cribl_control_plane/models/difffiles.py +130 -0
- cribl_control_plane/models/diffline.py +26 -0
- cribl_control_plane/models/difflinecontext.py +28 -0
- cribl_control_plane/models/difflinedelete.py +25 -0
- cribl_control_plane/models/difflineinsert.py +25 -0
- cribl_control_plane/models/distributedsummary.py +6 -0
- cribl_control_plane/models/functionaggregatemetrics.py +206 -0
- cribl_control_plane/models/functionaggregation.py +172 -0
- cribl_control_plane/models/functionautotimestamp.py +173 -0
- cribl_control_plane/models/functioncef.py +111 -0
- cribl_control_plane/models/functionchain.py +75 -0
- cribl_control_plane/models/functionclone.py +75 -0
- cribl_control_plane/models/functioncode.py +96 -0
- cribl_control_plane/models/functioncomment.py +75 -0
- cribl_control_plane/models/functiondistinct.py +99 -0
- cribl_control_plane/models/functiondnslookup.py +250 -0
- cribl_control_plane/models/functiondrop.py +73 -0
- cribl_control_plane/models/functiondropdimensions.py +87 -0
- cribl_control_plane/models/functiondynamicsampling.py +121 -0
- cribl_control_plane/models/functioneval.py +103 -0
- cribl_control_plane/models/functioneventbreaker.py +103 -0
- cribl_control_plane/models/functioneventstats.py +92 -0
- cribl_control_plane/models/functionexternaldata.py +73 -0
- cribl_control_plane/models/functionflatten.py +90 -0
- cribl_control_plane/models/functionfoldkeys.py +89 -0
- cribl_control_plane/models/functiongenstats.py +73 -0
- cribl_control_plane/models/functiongeoip.py +120 -0
- cribl_control_plane/models/functiongrok.py +95 -0
- cribl_control_plane/models/functionhandlebar.py +112 -0
- cribl_control_plane/models/functionjoin.py +112 -0
- cribl_control_plane/models/functionjsonunroll.py +80 -0
- cribl_control_plane/models/functionlakeexport.py +102 -0
- cribl_control_plane/models/functionlimit.py +75 -0
- cribl_control_plane/models/functionlocalsearchdatatypeparser.py +76 -0
- cribl_control_plane/models/functionlocalsearchrulesetrunner.py +97 -0
- cribl_control_plane/models/functionlookup.py +148 -0
- cribl_control_plane/models/functionmask.py +121 -0
- cribl_control_plane/models/functionmvexpand.py +128 -0
- cribl_control_plane/models/functionmvpull.py +99 -0
- cribl_control_plane/models/functionnotificationpolicies.py +186 -0
- cribl_control_plane/models/functionnotifications.py +85 -0
- cribl_control_plane/models/functionnotify.py +196 -0
- cribl_control_plane/models/functionnumerify.py +119 -0
- cribl_control_plane/models/functionotlplogs.py +82 -0
- cribl_control_plane/models/functionotlpmetrics.py +118 -0
- cribl_control_plane/models/functionotlptraces.py +111 -0
- cribl_control_plane/models/functionpack.py +80 -0
- cribl_control_plane/models/functionpivot.py +85 -0
- cribl_control_plane/models/functionpublishmetrics.py +153 -0
- cribl_control_plane/models/functionredis.py +173 -0
- cribl_control_plane/models/functionregexextract.py +112 -0
- cribl_control_plane/models/functionregexfilter.py +95 -0
- cribl_control_plane/models/functionrename.py +107 -0
- cribl_control_plane/models/functionresponse.py +242 -0
- cribl_control_plane/models/functionrollupmetrics.py +114 -0
- cribl_control_plane/models/functionsampling.py +90 -0
- cribl_control_plane/models/functionsend.py +141 -0
- cribl_control_plane/models/functionsensitivedatascanner.py +128 -0
- cribl_control_plane/models/functionserde.py +161 -0
- cribl_control_plane/models/functionserialize.py +134 -0
- cribl_control_plane/models/functionsidlookup.py +93 -0
- cribl_control_plane/models/functionsnmptrapserialize.py +144 -0
- cribl_control_plane/models/functionsort.py +97 -0
- cribl_control_plane/models/functionstore.py +132 -0
- cribl_control_plane/models/functionsuppress.py +115 -0
- cribl_control_plane/models/functiontee.py +90 -0
- cribl_control_plane/models/functiontrimtimestamp.py +75 -0
- cribl_control_plane/models/functionunion.py +80 -0
- cribl_control_plane/models/functionunroll.py +80 -0
- cribl_control_plane/models/functionwindow.py +96 -0
- cribl_control_plane/models/functionxmlunroll.py +92 -0
- cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +63 -0
- cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +63 -0
- cribl_control_plane/models/getconfiggroupbyproductandidop.py +53 -0
- cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +43 -0
- cribl_control_plane/models/getcribllakedatasetbylakeidandidop.py +5 -24
- cribl_control_plane/models/getcribllakedatasetbylakeidop.py +56 -16
- cribl_control_plane/models/getfunctionsbyidop.py +18 -0
- cribl_control_plane/models/getinputbyidop.py +3 -22
- cribl_control_plane/models/getmasterworkerentryop.py +22 -0
- cribl_control_plane/models/getoutputbyidop.py +3 -22
- cribl_control_plane/models/getoutputpqbyidop.py +3 -21
- cribl_control_plane/models/getoutputsamplesbyidop.py +3 -22
- cribl_control_plane/models/getpacksbyidop.py +18 -0
- cribl_control_plane/models/getpacksop.py +3 -21
- cribl_control_plane/models/getpipelinebyidop.py +3 -22
- cribl_control_plane/models/getroutesbyidop.py +3 -22
- cribl_control_plane/models/getsummaryop.py +23 -32
- cribl_control_plane/models/getversioncountop.py +10 -27
- cribl_control_plane/models/getversiondiffop.py +12 -28
- cribl_control_plane/models/getversionfilesop.py +10 -28
- cribl_control_plane/models/getversionop.py +30 -0
- cribl_control_plane/models/getversionshowop.py +12 -28
- cribl_control_plane/models/getversionstatusop.py +7 -23
- cribl_control_plane/models/gitcommitsummary.py +3 -3
- cribl_control_plane/models/{routecloneconf.py → gitcountresult.py} +4 -4
- cribl_control_plane/models/gitdiffresult.py +16 -0
- cribl_control_plane/models/gitfilesresponse.py +7 -5
- cribl_control_plane/models/gitinfo.py +14 -3
- cribl_control_plane/models/gitlogresult.py +33 -0
- cribl_control_plane/models/gitrevertparams.py +3 -3
- cribl_control_plane/models/gitrevertresult.py +5 -5
- cribl_control_plane/models/gitshowresult.py +19 -0
- cribl_control_plane/models/gitstatusresult.py +0 -3
- cribl_control_plane/models/groupcreaterequest.py +172 -0
- cribl_control_plane/models/hbcriblinfo.py +42 -7
- cribl_control_plane/models/healthserverstatus.py +55 -0
- cribl_control_plane/models/heartbeatmetadata.py +6 -11
- cribl_control_plane/models/input.py +89 -81
- cribl_control_plane/models/inputappscope.py +131 -35
- cribl_control_plane/models/inputazureblob.py +62 -6
- cribl_control_plane/models/inputcloudflarehec.py +518 -0
- cribl_control_plane/models/inputcollection.py +49 -6
- cribl_control_plane/models/inputconfluentcloud.py +262 -22
- cribl_control_plane/models/inputcribl.py +52 -9
- cribl_control_plane/models/inputcriblhttp.py +124 -33
- cribl_control_plane/models/inputcribllakehttp.py +199 -29
- cribl_control_plane/models/inputcriblmetrics.py +53 -9
- cribl_control_plane/models/inputcribltcp.py +125 -27
- cribl_control_plane/models/inputcrowdstrike.py +99 -10
- cribl_control_plane/models/inputdatadogagent.py +101 -27
- cribl_control_plane/models/inputdatagen.py +47 -4
- cribl_control_plane/models/inputedgeprometheus.py +215 -58
- cribl_control_plane/models/inputelastic.py +170 -39
- cribl_control_plane/models/inputeventhub.py +212 -9
- cribl_control_plane/models/inputexec.py +59 -6
- cribl_control_plane/models/inputfile.py +83 -15
- cribl_control_plane/models/inputfirehose.py +100 -27
- cribl_control_plane/models/inputgooglepubsub.py +83 -15
- cribl_control_plane/models/{inputgrafana_union.py → inputgrafana.py} +261 -67
- cribl_control_plane/models/inputhttp.py +100 -27
- cribl_control_plane/models/inputhttpraw.py +100 -27
- cribl_control_plane/models/inputjournalfiles.py +51 -7
- cribl_control_plane/models/inputkafka.py +257 -19
- cribl_control_plane/models/inputkinesis.py +133 -17
- cribl_control_plane/models/inputkubeevents.py +52 -9
- cribl_control_plane/models/inputkubelogs.py +66 -13
- cribl_control_plane/models/inputkubemetrics.py +66 -13
- cribl_control_plane/models/inputloki.py +116 -30
- cribl_control_plane/models/inputmetrics.py +97 -24
- cribl_control_plane/models/inputmodeldriventelemetry.py +110 -29
- cribl_control_plane/models/inputmsk.py +148 -21
- cribl_control_plane/models/inputnetflow.py +50 -7
- cribl_control_plane/models/inputoffice365mgmt.py +115 -17
- cribl_control_plane/models/inputoffice365msgtrace.py +117 -19
- cribl_control_plane/models/inputoffice365service.py +117 -19
- cribl_control_plane/models/inputopentelemetry.py +146 -35
- cribl_control_plane/models/inputprometheus.py +196 -47
- cribl_control_plane/models/inputprometheusrw.py +117 -30
- cribl_control_plane/models/inputrawudp.py +50 -7
- cribl_control_plane/models/inputs3.py +85 -8
- cribl_control_plane/models/inputs3inventory.py +99 -10
- cribl_control_plane/models/inputsecuritylake.py +100 -10
- cribl_control_plane/models/inputsnmp.py +115 -24
- cribl_control_plane/models/inputsplunk.py +133 -31
- cribl_control_plane/models/inputsplunkhec.py +122 -32
- cribl_control_plane/models/inputsplunksearch.py +115 -18
- cribl_control_plane/models/inputsqs.py +102 -19
- cribl_control_plane/models/{inputsyslog_union.py → inputsyslog.py} +193 -51
- cribl_control_plane/models/inputsystemmetrics.py +207 -37
- cribl_control_plane/models/inputsystemstate.py +66 -13
- cribl_control_plane/models/inputtcp.py +125 -29
- cribl_control_plane/models/inputtcpjson.py +115 -29
- cribl_control_plane/models/inputwef.py +151 -22
- cribl_control_plane/models/inputwindowsmetrics.py +191 -38
- cribl_control_plane/models/inputwineventlogs.py +93 -11
- cribl_control_plane/models/inputwiz.py +176 -11
- cribl_control_plane/models/inputwizwebhook.py +466 -0
- cribl_control_plane/models/inputzscalerhec.py +122 -32
- cribl_control_plane/models/jobinfo.py +34 -0
- cribl_control_plane/models/jobstatus.py +48 -0
- cribl_control_plane/models/lakedatasetmetrics.py +17 -0
- cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
- cribl_control_plane/models/listconfiggroupbyproductop.py +46 -0
- cribl_control_plane/models/listmasterworkerentryop.py +64 -0
- cribl_control_plane/models/logininfo.py +3 -3
- cribl_control_plane/models/masterworkerentry.py +20 -13
- cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
- cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
- cribl_control_plane/models/nodeprovidedinfo.py +13 -11
- cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
- cribl_control_plane/models/nodeupgradestate.py +2 -1
- cribl_control_plane/models/nodeupgradestatus.py +51 -5
- cribl_control_plane/models/outpostnodeinfo.py +16 -0
- cribl_control_plane/models/output.py +103 -89
- cribl_control_plane/models/outputazureblob.py +174 -21
- cribl_control_plane/models/outputazuredataexplorer.py +517 -93
- cribl_control_plane/models/outputazureeventhub.py +318 -34
- cribl_control_plane/models/outputazurelogs.py +145 -26
- cribl_control_plane/models/outputchronicle.py +532 -0
- cribl_control_plane/models/outputclickhouse.py +208 -37
- cribl_control_plane/models/outputcloudflarer2.py +632 -0
- cribl_control_plane/models/outputcloudwatch.py +132 -26
- cribl_control_plane/models/outputconfluentcloud.py +387 -46
- cribl_control_plane/models/outputcriblhttp.py +203 -36
- cribl_control_plane/models/outputcribllake.py +161 -21
- cribl_control_plane/models/outputcribltcp.py +199 -34
- cribl_control_plane/models/outputcrowdstrikenextgensiem.py +176 -32
- cribl_control_plane/models/outputdatabricks.py +501 -0
- cribl_control_plane/models/outputdatadog.py +204 -36
- cribl_control_plane/models/outputdataset.py +186 -34
- cribl_control_plane/models/outputdevnull.py +5 -5
- cribl_control_plane/models/outputdiskspool.py +22 -7
- cribl_control_plane/models/outputdls3.py +238 -29
- cribl_control_plane/models/outputdynatracehttp.py +211 -37
- cribl_control_plane/models/outputdynatraceotlp.py +213 -39
- cribl_control_plane/models/outputelastic.py +199 -30
- cribl_control_plane/models/outputelasticcloud.py +174 -29
- cribl_control_plane/models/outputexabeam.py +99 -13
- cribl_control_plane/models/outputfilesystem.py +139 -14
- cribl_control_plane/models/outputgooglechronicle.py +216 -35
- cribl_control_plane/models/outputgooglecloudlogging.py +177 -34
- cribl_control_plane/models/outputgooglecloudstorage.py +220 -29
- cribl_control_plane/models/outputgooglepubsub.py +138 -51
- cribl_control_plane/models/outputgrafanacloud.py +386 -70
- cribl_control_plane/models/outputgraphite.py +131 -28
- cribl_control_plane/models/outputhoneycomb.py +145 -26
- cribl_control_plane/models/outputhumiohec.py +165 -31
- cribl_control_plane/models/outputinfluxdb.py +165 -28
- cribl_control_plane/models/outputkafka.py +378 -41
- cribl_control_plane/models/outputkinesis.py +168 -30
- cribl_control_plane/models/outputloki.py +171 -27
- cribl_control_plane/models/outputmicrosoftfabric.py +540 -0
- cribl_control_plane/models/outputminio.py +228 -28
- cribl_control_plane/models/outputmsk.py +270 -43
- cribl_control_plane/models/outputnewrelic.py +176 -34
- cribl_control_plane/models/outputnewrelicevents.py +166 -31
- cribl_control_plane/models/outputopentelemetry.py +240 -40
- cribl_control_plane/models/outputprometheus.py +145 -26
- cribl_control_plane/models/outputring.py +54 -13
- cribl_control_plane/models/outputs3.py +238 -31
- cribl_control_plane/models/outputsecuritylake.py +182 -21
- cribl_control_plane/models/outputsentinel.py +175 -32
- cribl_control_plane/models/outputsentineloneaisiem.py +184 -38
- cribl_control_plane/models/outputservicenow.py +226 -41
- cribl_control_plane/models/outputsignalfx.py +145 -26
- cribl_control_plane/models/outputsns.py +146 -28
- cribl_control_plane/models/outputsplunk.py +209 -39
- cribl_control_plane/models/outputsplunkhec.py +243 -31
- cribl_control_plane/models/outputsplunklb.py +266 -46
- cribl_control_plane/models/outputsqs.py +166 -36
- cribl_control_plane/models/outputstatsd.py +130 -28
- cribl_control_plane/models/outputstatsdext.py +131 -28
- cribl_control_plane/models/outputsumologic.py +146 -25
- cribl_control_plane/models/outputsyslog.py +323 -51
- cribl_control_plane/models/outputtcpjson.py +191 -37
- cribl_control_plane/models/outputwavefront.py +145 -26
- cribl_control_plane/models/outputwebhook.py +216 -38
- cribl_control_plane/models/outputxsiam.py +148 -31
- cribl_control_plane/models/packinfo.py +11 -8
- cribl_control_plane/models/packinstallinfo.py +14 -11
- cribl_control_plane/models/packrequestbody_union.py +140 -0
- cribl_control_plane/models/packupgraderequest.py +26 -0
- cribl_control_plane/models/piisettings_union.py +31 -0
- cribl_control_plane/models/productscore.py +10 -0
- cribl_control_plane/models/rbacresource.py +2 -1
- cribl_control_plane/models/resourcepolicy.py +15 -2
- cribl_control_plane/models/rollbacksettings_union.py +44 -0
- cribl_control_plane/models/routeconf.py +3 -4
- cribl_control_plane/models/routes.py +0 -24
- cribl_control_plane/models/runnablejob.py +27 -0
- cribl_control_plane/models/runnablejobcollection.py +628 -0
- cribl_control_plane/models/runnablejobexecutor.py +360 -0
- cribl_control_plane/models/runnablejobscheduledsearch.py +279 -0
- cribl_control_plane/models/schemeclientoauth.py +5 -0
- cribl_control_plane/models/snisettings_union.py +31 -0
- cribl_control_plane/models/systemsettingsconf.py +291 -0
- cribl_control_plane/models/tlssettings_union.py +43 -0
- cribl_control_plane/models/updateconfiggroupbyproductandidop.py +56 -0
- cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +53 -0
- cribl_control_plane/models/updatecribllakedatasetbylakeidandidop.py +14 -29
- cribl_control_plane/models/updatehectokenrequest.py +7 -1
- cribl_control_plane/models/updateinputbyidop.py +5 -23
- cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +5 -23
- cribl_control_plane/models/updateoutputbyidop.py +5 -23
- cribl_control_plane/models/updatepacksbyidop.py +12 -50
- cribl_control_plane/models/updatepacksop.py +12 -24
- cribl_control_plane/models/updatepipelinebyidop.py +5 -23
- cribl_control_plane/models/updateroutesbyidop.py +8 -27
- cribl_control_plane/models/upgradegroupsettings_union.py +43 -0
- cribl_control_plane/models/upgradepackageurls.py +20 -0
- cribl_control_plane/models/upgradesettings.py +38 -0
- cribl_control_plane/models/uploadpackresponse.py +13 -0
- cribl_control_plane/models/{appmode.py → workertypes.py} +2 -5
- cribl_control_plane/{workers_sdk.py → nodes.py} +102 -234
- cribl_control_plane/packs.py +385 -184
- cribl_control_plane/pipelines.py +116 -66
- cribl_control_plane/routes_sdk.py +102 -70
- cribl_control_plane/samples.py +407 -0
- cribl_control_plane/sdk.py +35 -25
- cribl_control_plane/settings.py +20 -0
- cribl_control_plane/sources.py +143 -545
- cribl_control_plane/statuses.py +195 -0
- cribl_control_plane/summaries.py +195 -0
- cribl_control_plane/system_sdk.py +20 -0
- cribl_control_plane/teams.py +36 -28
- cribl_control_plane/tokens.py +210 -0
- cribl_control_plane/utils/__init__.py +18 -5
- cribl_control_plane/utils/annotations.py +32 -8
- cribl_control_plane/utils/eventstreaming.py +10 -0
- cribl_control_plane/utils/forms.py +21 -10
- cribl_control_plane/utils/queryparams.py +14 -2
- cribl_control_plane/utils/retries.py +69 -5
- cribl_control_plane/utils/security.py +5 -0
- cribl_control_plane/utils/unmarshal_json_response.py +15 -1
- cribl_control_plane/versions.py +31 -0
- cribl_control_plane/{distributed.py → versions_configs.py} +29 -35
- cribl_control_plane-0.4.0b23.dist-info/METADATA +855 -0
- cribl_control_plane-0.4.0b23.dist-info/RECORD +450 -0
- {cribl_control_plane-0.0.21.dist-info → cribl_control_plane-0.4.0b23.dist-info}/WHEEL +1 -1
- cribl_control_plane-0.4.0b23.dist-info/licenses/LICENSE +201 -0
- cribl_control_plane/errors/healthstatus_error.py +0 -32
- cribl_control_plane/models/createinputop.py +0 -18238
- cribl_control_plane/models/createoutputop.py +0 -18437
- cribl_control_plane/models/createpipelineop.py +0 -24
- cribl_control_plane/models/createproductsgroupsbyproductop.py +0 -54
- cribl_control_plane/models/createversionpushop.py +0 -23
- cribl_control_plane/models/createversionsyncop.py +0 -23
- cribl_control_plane/models/deletegroupsbyidop.py +0 -37
- cribl_control_plane/models/getgroupsaclbyidop.py +0 -63
- cribl_control_plane/models/getgroupsbyidop.py +0 -49
- cribl_control_plane/models/getgroupsconfigversionbyidop.py +0 -36
- cribl_control_plane/models/getproductsgroupsaclteamsbyproductandidop.py +0 -78
- cribl_control_plane/models/getproductsgroupsbyproductop.py +0 -58
- cribl_control_plane/models/getsummaryworkersop.py +0 -39
- cribl_control_plane/models/getversioncurrentbranchop.py +0 -23
- cribl_control_plane/models/getworkersop.py +0 -82
- cribl_control_plane/models/healthstatus.py +0 -33
- cribl_control_plane/models/packrequestbody.py +0 -75
- cribl_control_plane/models/restartresponse.py +0 -26
- cribl_control_plane/models/routesroute_input.py +0 -67
- cribl_control_plane/models/updategroupsbyidop.py +0 -48
- cribl_control_plane/models/updategroupsdeploybyidop.py +0 -46
- cribl_control_plane/models/updateworkersrestartop.py +0 -24
- cribl_control_plane/versioning.py +0 -2309
- cribl_control_plane-0.0.21.dist-info/METADATA +0 -561
- cribl_control_plane-0.0.21.dist-info/RECORD +0 -301
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import models, utils
|
|
5
|
+
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
7
|
+
from enum import Enum
|
|
8
|
+
import pydantic
|
|
9
|
+
from pydantic import field_serializer
|
|
10
|
+
from pydantic.functional_validators import PlainValidator
|
|
11
|
+
from typing import Any, List, Optional
|
|
12
|
+
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RunnableJobExecutorJobType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
16
|
+
COLLECTION = "collection"
|
|
17
|
+
EXECUTOR = "executor"
|
|
18
|
+
SCHEDULED_SEARCH = "scheduledSearch"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class RunnableJobExecutorType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
22
|
+
COLLECTION = "collection"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class RunnableJobExecutorScheduleLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
26
|
+
r"""Level at which to set task logging"""
|
|
27
|
+
|
|
28
|
+
ERROR = "error"
|
|
29
|
+
WARN = "warn"
|
|
30
|
+
INFO = "info"
|
|
31
|
+
DEBUG = "debug"
|
|
32
|
+
SILLY = "silly"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class RunnableJobExecutorTimeWarningTypedDict(TypedDict):
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class RunnableJobExecutorTimeWarning(BaseModel):
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class RunnableJobExecutorRunSettingsTypedDict(TypedDict):
|
|
44
|
+
type: NotRequired[RunnableJobExecutorType]
|
|
45
|
+
reschedule_dropped_tasks: NotRequired[bool]
|
|
46
|
+
r"""Reschedule tasks that failed with non-fatal errors"""
|
|
47
|
+
max_task_reschedule: NotRequired[float]
|
|
48
|
+
r"""Maximum number of times a task can be rescheduled"""
|
|
49
|
+
log_level: NotRequired[RunnableJobExecutorScheduleLogLevel]
|
|
50
|
+
r"""Level at which to set task logging"""
|
|
51
|
+
job_timeout: NotRequired[str]
|
|
52
|
+
r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
|
|
53
|
+
mode: NotRequired[str]
|
|
54
|
+
r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
|
|
55
|
+
time_range_type: NotRequired[str]
|
|
56
|
+
earliest: NotRequired[float]
|
|
57
|
+
r"""Earliest time to collect data for the selected timezone"""
|
|
58
|
+
latest: NotRequired[float]
|
|
59
|
+
r"""Latest time to collect data for the selected timezone"""
|
|
60
|
+
timestamp_timezone: NotRequired[Any]
|
|
61
|
+
time_warning: NotRequired[RunnableJobExecutorTimeWarningTypedDict]
|
|
62
|
+
expression: NotRequired[str]
|
|
63
|
+
r"""A filter for tokens in the provided collect path and/or the events being collected"""
|
|
64
|
+
min_task_size: NotRequired[str]
|
|
65
|
+
r"""Limits the bundle size for small tasks. For example,
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
|
|
69
|
+
"""
|
|
70
|
+
max_task_size: NotRequired[str]
|
|
71
|
+
r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class RunnableJobExecutorRunSettings(BaseModel):
|
|
79
|
+
type: Annotated[
|
|
80
|
+
Optional[RunnableJobExecutorType], PlainValidator(validate_open_enum(False))
|
|
81
|
+
] = None
|
|
82
|
+
|
|
83
|
+
reschedule_dropped_tasks: Annotated[
|
|
84
|
+
Optional[bool], pydantic.Field(alias="rescheduleDroppedTasks")
|
|
85
|
+
] = True
|
|
86
|
+
r"""Reschedule tasks that failed with non-fatal errors"""
|
|
87
|
+
|
|
88
|
+
max_task_reschedule: Annotated[
|
|
89
|
+
Optional[float], pydantic.Field(alias="maxTaskReschedule")
|
|
90
|
+
] = 1
|
|
91
|
+
r"""Maximum number of times a task can be rescheduled"""
|
|
92
|
+
|
|
93
|
+
log_level: Annotated[
|
|
94
|
+
Annotated[
|
|
95
|
+
Optional[RunnableJobExecutorScheduleLogLevel],
|
|
96
|
+
PlainValidator(validate_open_enum(False)),
|
|
97
|
+
],
|
|
98
|
+
pydantic.Field(alias="logLevel"),
|
|
99
|
+
] = RunnableJobExecutorScheduleLogLevel.INFO
|
|
100
|
+
r"""Level at which to set task logging"""
|
|
101
|
+
|
|
102
|
+
job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
|
|
103
|
+
r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
|
|
104
|
+
|
|
105
|
+
mode: Optional[str] = "list"
|
|
106
|
+
r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
|
|
107
|
+
|
|
108
|
+
time_range_type: Annotated[Optional[str], pydantic.Field(alias="timeRangeType")] = (
|
|
109
|
+
"relative"
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
earliest: Optional[float] = None
|
|
113
|
+
r"""Earliest time to collect data for the selected timezone"""
|
|
114
|
+
|
|
115
|
+
latest: Optional[float] = None
|
|
116
|
+
r"""Latest time to collect data for the selected timezone"""
|
|
117
|
+
|
|
118
|
+
timestamp_timezone: Annotated[
|
|
119
|
+
Optional[Any], pydantic.Field(alias="timestampTimezone")
|
|
120
|
+
] = None
|
|
121
|
+
|
|
122
|
+
time_warning: Annotated[
|
|
123
|
+
Optional[RunnableJobExecutorTimeWarning], pydantic.Field(alias="timeWarning")
|
|
124
|
+
] = None
|
|
125
|
+
|
|
126
|
+
expression: Optional[str] = "true"
|
|
127
|
+
r"""A filter for tokens in the provided collect path and/or the events being collected"""
|
|
128
|
+
|
|
129
|
+
min_task_size: Annotated[Optional[str], pydantic.Field(alias="minTaskSize")] = "1MB"
|
|
130
|
+
r"""Limits the bundle size for small tasks. For example,
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
max_task_size: Annotated[Optional[str], pydantic.Field(alias="maxTaskSize")] = (
|
|
137
|
+
"10MB"
|
|
138
|
+
)
|
|
139
|
+
r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
|
|
143
|
+
"""
|
|
144
|
+
|
|
145
|
+
@field_serializer("type")
|
|
146
|
+
def serialize_type(self, value):
|
|
147
|
+
if isinstance(value, str):
|
|
148
|
+
try:
|
|
149
|
+
return models.RunnableJobExecutorType(value)
|
|
150
|
+
except ValueError:
|
|
151
|
+
return value
|
|
152
|
+
return value
|
|
153
|
+
|
|
154
|
+
@field_serializer("log_level")
|
|
155
|
+
def serialize_log_level(self, value):
|
|
156
|
+
if isinstance(value, str):
|
|
157
|
+
try:
|
|
158
|
+
return models.RunnableJobExecutorScheduleLogLevel(value)
|
|
159
|
+
except ValueError:
|
|
160
|
+
return value
|
|
161
|
+
return value
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
class RunnableJobExecutorScheduleTypedDict(TypedDict):
|
|
165
|
+
r"""Configuration for a scheduled job"""
|
|
166
|
+
|
|
167
|
+
enabled: NotRequired[bool]
|
|
168
|
+
r"""Enable to configure scheduling for this Collector"""
|
|
169
|
+
skippable: NotRequired[bool]
|
|
170
|
+
r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
|
|
171
|
+
resume_missed: NotRequired[bool]
|
|
172
|
+
r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
|
|
173
|
+
cron_schedule: NotRequired[str]
|
|
174
|
+
r"""A cron schedule on which to run this job"""
|
|
175
|
+
max_concurrent_runs: NotRequired[float]
|
|
176
|
+
r"""The maximum number of instances of this scheduled job that may be running at any time"""
|
|
177
|
+
run: NotRequired[RunnableJobExecutorRunSettingsTypedDict]
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
class RunnableJobExecutorSchedule(BaseModel):
|
|
181
|
+
r"""Configuration for a scheduled job"""
|
|
182
|
+
|
|
183
|
+
enabled: Optional[bool] = None
|
|
184
|
+
r"""Enable to configure scheduling for this Collector"""
|
|
185
|
+
|
|
186
|
+
skippable: Optional[bool] = True
|
|
187
|
+
r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
|
|
188
|
+
|
|
189
|
+
resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
|
|
190
|
+
False
|
|
191
|
+
)
|
|
192
|
+
r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
|
|
193
|
+
|
|
194
|
+
cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
|
|
195
|
+
"*/5 * * * *"
|
|
196
|
+
)
|
|
197
|
+
r"""A cron schedule on which to run this job"""
|
|
198
|
+
|
|
199
|
+
max_concurrent_runs: Annotated[
|
|
200
|
+
Optional[float], pydantic.Field(alias="maxConcurrentRuns")
|
|
201
|
+
] = 1
|
|
202
|
+
r"""The maximum number of instances of this scheduled job that may be running at any time"""
|
|
203
|
+
|
|
204
|
+
run: Optional[RunnableJobExecutorRunSettings] = None
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
class ExecutorSpecificSettingsTypedDict(TypedDict):
|
|
208
|
+
pass
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
class ExecutorSpecificSettings(BaseModel):
|
|
212
|
+
pass
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
class ExecutorTypedDict(TypedDict):
|
|
216
|
+
type: str
|
|
217
|
+
r"""The type of executor to run"""
|
|
218
|
+
store_task_results: NotRequired[bool]
|
|
219
|
+
r"""Determines whether or not to write task results to disk"""
|
|
220
|
+
conf: NotRequired[ExecutorSpecificSettingsTypedDict]
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
class Executor(BaseModel):
|
|
224
|
+
type: str
|
|
225
|
+
r"""The type of executor to run"""
|
|
226
|
+
|
|
227
|
+
store_task_results: Annotated[
|
|
228
|
+
Optional[bool], pydantic.Field(alias="storeTaskResults")
|
|
229
|
+
] = True
|
|
230
|
+
r"""Determines whether or not to write task results to disk"""
|
|
231
|
+
|
|
232
|
+
conf: Optional[ExecutorSpecificSettings] = None
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
class RunnableJobExecutorLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
236
|
+
r"""Level at which to set task logging"""
|
|
237
|
+
|
|
238
|
+
ERROR = "error"
|
|
239
|
+
WARN = "warn"
|
|
240
|
+
INFO = "info"
|
|
241
|
+
DEBUG = "debug"
|
|
242
|
+
SILLY = "silly"
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
class RunnableJobExecutorRunTypedDict(TypedDict):
|
|
246
|
+
reschedule_dropped_tasks: NotRequired[bool]
|
|
247
|
+
r"""Reschedule tasks that failed with non-fatal errors"""
|
|
248
|
+
max_task_reschedule: NotRequired[float]
|
|
249
|
+
r"""Maximum number of times a task can be rescheduled"""
|
|
250
|
+
log_level: NotRequired[RunnableJobExecutorLogLevel]
|
|
251
|
+
r"""Level at which to set task logging"""
|
|
252
|
+
job_timeout: NotRequired[str]
|
|
253
|
+
r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
class RunnableJobExecutorRun(BaseModel):
|
|
257
|
+
reschedule_dropped_tasks: Annotated[
|
|
258
|
+
Optional[bool], pydantic.Field(alias="rescheduleDroppedTasks")
|
|
259
|
+
] = True
|
|
260
|
+
r"""Reschedule tasks that failed with non-fatal errors"""
|
|
261
|
+
|
|
262
|
+
max_task_reschedule: Annotated[
|
|
263
|
+
Optional[float], pydantic.Field(alias="maxTaskReschedule")
|
|
264
|
+
] = 1
|
|
265
|
+
r"""Maximum number of times a task can be rescheduled"""
|
|
266
|
+
|
|
267
|
+
log_level: Annotated[
|
|
268
|
+
Annotated[
|
|
269
|
+
Optional[RunnableJobExecutorLogLevel],
|
|
270
|
+
PlainValidator(validate_open_enum(False)),
|
|
271
|
+
],
|
|
272
|
+
pydantic.Field(alias="logLevel"),
|
|
273
|
+
] = RunnableJobExecutorLogLevel.INFO
|
|
274
|
+
r"""Level at which to set task logging"""
|
|
275
|
+
|
|
276
|
+
job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
|
|
277
|
+
r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
|
|
278
|
+
|
|
279
|
+
@field_serializer("log_level")
|
|
280
|
+
def serialize_log_level(self, value):
|
|
281
|
+
if isinstance(value, str):
|
|
282
|
+
try:
|
|
283
|
+
return models.RunnableJobExecutorLogLevel(value)
|
|
284
|
+
except ValueError:
|
|
285
|
+
return value
|
|
286
|
+
return value
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
class RunnableJobExecutorTypedDict(TypedDict):
|
|
290
|
+
executor: ExecutorTypedDict
|
|
291
|
+
run: RunnableJobExecutorRunTypedDict
|
|
292
|
+
id: NotRequired[str]
|
|
293
|
+
r"""Unique ID for this Job"""
|
|
294
|
+
description: NotRequired[str]
|
|
295
|
+
type: NotRequired[RunnableJobExecutorJobType]
|
|
296
|
+
ttl: NotRequired[str]
|
|
297
|
+
r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
|
|
298
|
+
ignore_group_jobs_limit: NotRequired[bool]
|
|
299
|
+
r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
|
|
300
|
+
remove_fields: NotRequired[List[str]]
|
|
301
|
+
r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
|
|
302
|
+
resume_on_boot: NotRequired[bool]
|
|
303
|
+
r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
|
|
304
|
+
environment: NotRequired[str]
|
|
305
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
306
|
+
schedule: NotRequired[RunnableJobExecutorScheduleTypedDict]
|
|
307
|
+
r"""Configuration for a scheduled job"""
|
|
308
|
+
streamtags: NotRequired[List[str]]
|
|
309
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
class RunnableJobExecutor(BaseModel):
|
|
313
|
+
executor: Executor
|
|
314
|
+
|
|
315
|
+
run: RunnableJobExecutorRun
|
|
316
|
+
|
|
317
|
+
id: Optional[str] = None
|
|
318
|
+
r"""Unique ID for this Job"""
|
|
319
|
+
|
|
320
|
+
description: Optional[str] = None
|
|
321
|
+
|
|
322
|
+
type: Annotated[
|
|
323
|
+
Optional[RunnableJobExecutorJobType], PlainValidator(validate_open_enum(False))
|
|
324
|
+
] = None
|
|
325
|
+
|
|
326
|
+
ttl: Optional[str] = "4h"
|
|
327
|
+
r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
|
|
328
|
+
|
|
329
|
+
ignore_group_jobs_limit: Annotated[
|
|
330
|
+
Optional[bool], pydantic.Field(alias="ignoreGroupJobsLimit")
|
|
331
|
+
] = False
|
|
332
|
+
r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
|
|
333
|
+
|
|
334
|
+
remove_fields: Annotated[
|
|
335
|
+
Optional[List[str]], pydantic.Field(alias="removeFields")
|
|
336
|
+
] = None
|
|
337
|
+
r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
|
|
338
|
+
|
|
339
|
+
resume_on_boot: Annotated[Optional[bool], pydantic.Field(alias="resumeOnBoot")] = (
|
|
340
|
+
False
|
|
341
|
+
)
|
|
342
|
+
r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
|
|
343
|
+
|
|
344
|
+
environment: Optional[str] = None
|
|
345
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
346
|
+
|
|
347
|
+
schedule: Optional[RunnableJobExecutorSchedule] = None
|
|
348
|
+
r"""Configuration for a scheduled job"""
|
|
349
|
+
|
|
350
|
+
streamtags: Optional[List[str]] = None
|
|
351
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
352
|
+
|
|
353
|
+
@field_serializer("type")
|
|
354
|
+
def serialize_type(self, value):
|
|
355
|
+
if isinstance(value, str):
|
|
356
|
+
try:
|
|
357
|
+
return models.RunnableJobExecutorJobType(value)
|
|
358
|
+
except ValueError:
|
|
359
|
+
return value
|
|
360
|
+
return value
|
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane import models, utils
|
|
5
|
+
from cribl_control_plane.types import BaseModel
|
|
6
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
7
|
+
from enum import Enum
|
|
8
|
+
import pydantic
|
|
9
|
+
from pydantic import field_serializer
|
|
10
|
+
from pydantic.functional_validators import PlainValidator
|
|
11
|
+
from typing import Any, List, Optional
|
|
12
|
+
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RunnableJobScheduledSearchJobType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
16
|
+
COLLECTION = "collection"
|
|
17
|
+
EXECUTOR = "executor"
|
|
18
|
+
SCHEDULED_SEARCH = "scheduledSearch"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class RunnableJobScheduledSearchType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
22
|
+
COLLECTION = "collection"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class RunnableJobScheduledSearchLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
26
|
+
r"""Level at which to set task logging"""
|
|
27
|
+
|
|
28
|
+
ERROR = "error"
|
|
29
|
+
WARN = "warn"
|
|
30
|
+
INFO = "info"
|
|
31
|
+
DEBUG = "debug"
|
|
32
|
+
SILLY = "silly"
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class RunnableJobScheduledSearchTimeWarningTypedDict(TypedDict):
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class RunnableJobScheduledSearchTimeWarning(BaseModel):
|
|
40
|
+
pass
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class RunnableJobScheduledSearchRunSettingsTypedDict(TypedDict):
|
|
44
|
+
type: NotRequired[RunnableJobScheduledSearchType]
|
|
45
|
+
reschedule_dropped_tasks: NotRequired[bool]
|
|
46
|
+
r"""Reschedule tasks that failed with non-fatal errors"""
|
|
47
|
+
max_task_reschedule: NotRequired[float]
|
|
48
|
+
r"""Maximum number of times a task can be rescheduled"""
|
|
49
|
+
log_level: NotRequired[RunnableJobScheduledSearchLogLevel]
|
|
50
|
+
r"""Level at which to set task logging"""
|
|
51
|
+
job_timeout: NotRequired[str]
|
|
52
|
+
r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
|
|
53
|
+
mode: NotRequired[str]
|
|
54
|
+
r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
|
|
55
|
+
time_range_type: NotRequired[str]
|
|
56
|
+
earliest: NotRequired[float]
|
|
57
|
+
r"""Earliest time to collect data for the selected timezone"""
|
|
58
|
+
latest: NotRequired[float]
|
|
59
|
+
r"""Latest time to collect data for the selected timezone"""
|
|
60
|
+
timestamp_timezone: NotRequired[Any]
|
|
61
|
+
time_warning: NotRequired[RunnableJobScheduledSearchTimeWarningTypedDict]
|
|
62
|
+
expression: NotRequired[str]
|
|
63
|
+
r"""A filter for tokens in the provided collect path and/or the events being collected"""
|
|
64
|
+
min_task_size: NotRequired[str]
|
|
65
|
+
r"""Limits the bundle size for small tasks. For example,
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
|
|
69
|
+
"""
|
|
70
|
+
max_task_size: NotRequired[str]
|
|
71
|
+
r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class RunnableJobScheduledSearchRunSettings(BaseModel):
|
|
79
|
+
type: Annotated[
|
|
80
|
+
Optional[RunnableJobScheduledSearchType],
|
|
81
|
+
PlainValidator(validate_open_enum(False)),
|
|
82
|
+
] = None
|
|
83
|
+
|
|
84
|
+
reschedule_dropped_tasks: Annotated[
|
|
85
|
+
Optional[bool], pydantic.Field(alias="rescheduleDroppedTasks")
|
|
86
|
+
] = True
|
|
87
|
+
r"""Reschedule tasks that failed with non-fatal errors"""
|
|
88
|
+
|
|
89
|
+
max_task_reschedule: Annotated[
|
|
90
|
+
Optional[float], pydantic.Field(alias="maxTaskReschedule")
|
|
91
|
+
] = 1
|
|
92
|
+
r"""Maximum number of times a task can be rescheduled"""
|
|
93
|
+
|
|
94
|
+
log_level: Annotated[
|
|
95
|
+
Annotated[
|
|
96
|
+
Optional[RunnableJobScheduledSearchLogLevel],
|
|
97
|
+
PlainValidator(validate_open_enum(False)),
|
|
98
|
+
],
|
|
99
|
+
pydantic.Field(alias="logLevel"),
|
|
100
|
+
] = RunnableJobScheduledSearchLogLevel.INFO
|
|
101
|
+
r"""Level at which to set task logging"""
|
|
102
|
+
|
|
103
|
+
job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
|
|
104
|
+
r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
|
|
105
|
+
|
|
106
|
+
mode: Optional[str] = "list"
|
|
107
|
+
r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
|
|
108
|
+
|
|
109
|
+
time_range_type: Annotated[Optional[str], pydantic.Field(alias="timeRangeType")] = (
|
|
110
|
+
"relative"
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
earliest: Optional[float] = None
|
|
114
|
+
r"""Earliest time to collect data for the selected timezone"""
|
|
115
|
+
|
|
116
|
+
latest: Optional[float] = None
|
|
117
|
+
r"""Latest time to collect data for the selected timezone"""
|
|
118
|
+
|
|
119
|
+
timestamp_timezone: Annotated[
|
|
120
|
+
Optional[Any], pydantic.Field(alias="timestampTimezone")
|
|
121
|
+
] = None
|
|
122
|
+
|
|
123
|
+
time_warning: Annotated[
|
|
124
|
+
Optional[RunnableJobScheduledSearchTimeWarning],
|
|
125
|
+
pydantic.Field(alias="timeWarning"),
|
|
126
|
+
] = None
|
|
127
|
+
|
|
128
|
+
expression: Optional[str] = "true"
|
|
129
|
+
r"""A filter for tokens in the provided collect path and/or the events being collected"""
|
|
130
|
+
|
|
131
|
+
min_task_size: Annotated[Optional[str], pydantic.Field(alias="minTaskSize")] = "1MB"
|
|
132
|
+
r"""Limits the bundle size for small tasks. For example,
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
|
|
136
|
+
"""
|
|
137
|
+
|
|
138
|
+
max_task_size: Annotated[Optional[str], pydantic.Field(alias="maxTaskSize")] = (
|
|
139
|
+
"10MB"
|
|
140
|
+
)
|
|
141
|
+
r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
|
|
145
|
+
"""
|
|
146
|
+
|
|
147
|
+
@field_serializer("type")
|
|
148
|
+
def serialize_type(self, value):
|
|
149
|
+
if isinstance(value, str):
|
|
150
|
+
try:
|
|
151
|
+
return models.RunnableJobScheduledSearchType(value)
|
|
152
|
+
except ValueError:
|
|
153
|
+
return value
|
|
154
|
+
return value
|
|
155
|
+
|
|
156
|
+
@field_serializer("log_level")
|
|
157
|
+
def serialize_log_level(self, value):
|
|
158
|
+
if isinstance(value, str):
|
|
159
|
+
try:
|
|
160
|
+
return models.RunnableJobScheduledSearchLogLevel(value)
|
|
161
|
+
except ValueError:
|
|
162
|
+
return value
|
|
163
|
+
return value
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class RunnableJobScheduledSearchScheduleTypedDict(TypedDict):
|
|
167
|
+
r"""Configuration for a scheduled job"""
|
|
168
|
+
|
|
169
|
+
enabled: NotRequired[bool]
|
|
170
|
+
r"""Enable to configure scheduling for this Collector"""
|
|
171
|
+
skippable: NotRequired[bool]
|
|
172
|
+
r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
|
|
173
|
+
resume_missed: NotRequired[bool]
|
|
174
|
+
r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
|
|
175
|
+
cron_schedule: NotRequired[str]
|
|
176
|
+
r"""A cron schedule on which to run this job"""
|
|
177
|
+
max_concurrent_runs: NotRequired[float]
|
|
178
|
+
r"""The maximum number of instances of this scheduled job that may be running at any time"""
|
|
179
|
+
run: NotRequired[RunnableJobScheduledSearchRunSettingsTypedDict]
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
class RunnableJobScheduledSearchSchedule(BaseModel):
|
|
183
|
+
r"""Configuration for a scheduled job"""
|
|
184
|
+
|
|
185
|
+
enabled: Optional[bool] = None
|
|
186
|
+
r"""Enable to configure scheduling for this Collector"""
|
|
187
|
+
|
|
188
|
+
skippable: Optional[bool] = True
|
|
189
|
+
r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
|
|
190
|
+
|
|
191
|
+
resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
|
|
192
|
+
False
|
|
193
|
+
)
|
|
194
|
+
r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
|
|
195
|
+
|
|
196
|
+
cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
|
|
197
|
+
"*/5 * * * *"
|
|
198
|
+
)
|
|
199
|
+
r"""A cron schedule on which to run this job"""
|
|
200
|
+
|
|
201
|
+
max_concurrent_runs: Annotated[
|
|
202
|
+
Optional[float], pydantic.Field(alias="maxConcurrentRuns")
|
|
203
|
+
] = 1
|
|
204
|
+
r"""The maximum number of instances of this scheduled job that may be running at any time"""
|
|
205
|
+
|
|
206
|
+
run: Optional[RunnableJobScheduledSearchRunSettings] = None
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
class RunnableJobScheduledSearchTypedDict(TypedDict):
|
|
210
|
+
type: RunnableJobScheduledSearchJobType
|
|
211
|
+
saved_query_id: str
|
|
212
|
+
r"""Identifies which search query to run"""
|
|
213
|
+
id: NotRequired[str]
|
|
214
|
+
r"""Unique ID for this Job"""
|
|
215
|
+
description: NotRequired[str]
|
|
216
|
+
ttl: NotRequired[str]
|
|
217
|
+
r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
|
|
218
|
+
ignore_group_jobs_limit: NotRequired[bool]
|
|
219
|
+
r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
|
|
220
|
+
remove_fields: NotRequired[List[str]]
|
|
221
|
+
r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
|
|
222
|
+
resume_on_boot: NotRequired[bool]
|
|
223
|
+
r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
|
|
224
|
+
environment: NotRequired[str]
|
|
225
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
226
|
+
schedule: NotRequired[RunnableJobScheduledSearchScheduleTypedDict]
|
|
227
|
+
r"""Configuration for a scheduled job"""
|
|
228
|
+
streamtags: NotRequired[List[str]]
|
|
229
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
class RunnableJobScheduledSearch(BaseModel):
|
|
233
|
+
type: Annotated[
|
|
234
|
+
RunnableJobScheduledSearchJobType, PlainValidator(validate_open_enum(False))
|
|
235
|
+
]
|
|
236
|
+
|
|
237
|
+
saved_query_id: Annotated[str, pydantic.Field(alias="savedQueryId")]
|
|
238
|
+
r"""Identifies which search query to run"""
|
|
239
|
+
|
|
240
|
+
id: Optional[str] = None
|
|
241
|
+
r"""Unique ID for this Job"""
|
|
242
|
+
|
|
243
|
+
description: Optional[str] = None
|
|
244
|
+
|
|
245
|
+
ttl: Optional[str] = "4h"
|
|
246
|
+
r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
|
|
247
|
+
|
|
248
|
+
ignore_group_jobs_limit: Annotated[
|
|
249
|
+
Optional[bool], pydantic.Field(alias="ignoreGroupJobsLimit")
|
|
250
|
+
] = False
|
|
251
|
+
r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
|
|
252
|
+
|
|
253
|
+
remove_fields: Annotated[
|
|
254
|
+
Optional[List[str]], pydantic.Field(alias="removeFields")
|
|
255
|
+
] = None
|
|
256
|
+
r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
|
|
257
|
+
|
|
258
|
+
resume_on_boot: Annotated[Optional[bool], pydantic.Field(alias="resumeOnBoot")] = (
|
|
259
|
+
False
|
|
260
|
+
)
|
|
261
|
+
r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
|
|
262
|
+
|
|
263
|
+
environment: Optional[str] = None
|
|
264
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
265
|
+
|
|
266
|
+
schedule: Optional[RunnableJobScheduledSearchSchedule] = None
|
|
267
|
+
r"""Configuration for a scheduled job"""
|
|
268
|
+
|
|
269
|
+
streamtags: Optional[List[str]] = None
|
|
270
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
271
|
+
|
|
272
|
+
@field_serializer("type")
|
|
273
|
+
def serialize_type(self, value):
|
|
274
|
+
if isinstance(value, str):
|
|
275
|
+
try:
|
|
276
|
+
return models.RunnableJobScheduledSearchJobType(value)
|
|
277
|
+
except ValueError:
|
|
278
|
+
return value
|
|
279
|
+
return value
|
|
@@ -9,6 +9,7 @@ from typing_extensions import Annotated, TypedDict
|
|
|
9
9
|
class SchemeClientOauthTypedDict(TypedDict):
|
|
10
10
|
client_id: str
|
|
11
11
|
client_secret: str
|
|
12
|
+
audience: str
|
|
12
13
|
token_url: str
|
|
13
14
|
|
|
14
15
|
|
|
@@ -21,4 +22,8 @@ class SchemeClientOauth(BaseModel):
|
|
|
21
22
|
str, FieldMetadata(security=SecurityMetadata(field_name="clientSecret"))
|
|
22
23
|
]
|
|
23
24
|
|
|
25
|
+
audience: Annotated[
|
|
26
|
+
str, FieldMetadata(security=SecurityMetadata(field_name="audience"))
|
|
27
|
+
]
|
|
28
|
+
|
|
24
29
|
token_url: str = "https://login.cribl.cloud/oauth/token"
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from cribl_control_plane.types import BaseModel
|
|
5
|
+
import pydantic
|
|
6
|
+
from typing import Union
|
|
7
|
+
from typing_extensions import Annotated, TypeAliasType, TypedDict
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SniSettings2TypedDict(TypedDict):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class SniSettings2(BaseModel):
|
|
15
|
+
pass
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class SniSettings1TypedDict(TypedDict):
|
|
19
|
+
disable_sni_routing: bool
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SniSettings1(BaseModel):
|
|
23
|
+
disable_sni_routing: Annotated[bool, pydantic.Field(alias="disableSNIRouting")]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
SniSettingsUnionTypedDict = TypeAliasType(
|
|
27
|
+
"SniSettingsUnionTypedDict", Union[SniSettings2TypedDict, SniSettings1TypedDict]
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
SniSettingsUnion = TypeAliasType("SniSettingsUnion", Union[SniSettings2, SniSettings1])
|