cribl-control-plane 0.4.0b23__py3-none-any.whl → 0.5.0b3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cribl_control_plane/_version.py +3 -3
- cribl_control_plane/collectors_sdk.py +993 -0
- cribl_control_plane/models/__init__.py +2161 -734
- cribl_control_plane/models/configgroup.py +3 -0
- cribl_control_plane/models/countedsavedjob.py +20 -0
- cribl_control_plane/models/createsavedjobop.py +30 -0
- cribl_control_plane/models/deletesavedjobbyidop.py +42 -0
- cribl_control_plane/models/functionaggregatemetrics.py +7 -144
- cribl_control_plane/models/functionaggregation.py +7 -110
- cribl_control_plane/models/functionautotimestamp.py +7 -111
- cribl_control_plane/models/functioncef.py +6 -51
- cribl_control_plane/models/functionchain.py +6 -12
- cribl_control_plane/models/functionclone.py +7 -13
- cribl_control_plane/models/functioncode.py +8 -35
- cribl_control_plane/models/functioncomment.py +6 -12
- cribl_control_plane/models/functionconfschemaaggregatemetrics.py +153 -0
- cribl_control_plane/models/functionconfschemaaggregation.py +114 -0
- cribl_control_plane/models/functionconfschemaautotimestamp.py +116 -0
- cribl_control_plane/models/functionconfschemacef.py +83 -0
- cribl_control_plane/models/functionconfschemachain.py +16 -0
- cribl_control_plane/models/functionconfschemaclone.py +16 -0
- cribl_control_plane/models/functionconfschemacode.py +38 -0
- cribl_control_plane/models/functionconfschemacomment.py +16 -0
- cribl_control_plane/models/functionconfschemadistinct.py +41 -0
- cribl_control_plane/models/functionconfschemadnslookup.py +193 -0
- cribl_control_plane/models/functionconfschemadrop.py +13 -0
- cribl_control_plane/models/functionconfschemadropdimensions.py +31 -0
- cribl_control_plane/models/functionconfschemadynamicsampling.py +67 -0
- cribl_control_plane/models/functionconfschemaeval.py +44 -0
- cribl_control_plane/models/functionconfschemaeventbreaker.py +51 -0
- cribl_control_plane/models/functionconfschemaeventstats.py +34 -0
- cribl_control_plane/models/functionconfschemaexternaldata.py +13 -0
- cribl_control_plane/models/functionconfschemaflatten.py +31 -0
- cribl_control_plane/models/functionconfschemafoldkeys.py +31 -0
- cribl_control_plane/models/functionconfschemagenstats.py +14 -0
- cribl_control_plane/models/functionconfschemageoip.py +66 -0
- cribl_control_plane/models/functionconfschemagrok.py +38 -0
- cribl_control_plane/models/functionconfschemahandlebar.py +56 -0
- cribl_control_plane/models/functionconfschemajoin.py +55 -0
- cribl_control_plane/models/functionconfschemajsonunroll.py +21 -0
- cribl_control_plane/models/functionconfschemalakeexport.py +44 -0
- cribl_control_plane/models/functionconfschemalimit.py +16 -0
- cribl_control_plane/models/functionconfschemalocalsearchdatatypeparser.py +17 -0
- cribl_control_plane/models/functionconfschemalocalsearchrulesetrunner.py +40 -0
- cribl_control_plane/models/functionconfschemalookup.py +92 -0
- cribl_control_plane/models/functionconfschemamask.py +63 -0
- cribl_control_plane/models/functionconfschemamvexpand.py +76 -0
- cribl_control_plane/models/functionconfschemamvpull.py +45 -0
- cribl_control_plane/models/functionconfschemanotificationpolicies.py +129 -0
- cribl_control_plane/models/functionconfschemanotifications.py +26 -0
- cribl_control_plane/models/functionconfschemanotify.py +149 -0
- cribl_control_plane/models/functionconfschemanumerify.py +63 -0
- cribl_control_plane/models/functionconfschemaotlplogs.py +24 -0
- cribl_control_plane/models/functionconfschemaotlpmetrics.py +61 -0
- cribl_control_plane/models/functionconfschemaotlptraces.py +54 -0
- cribl_control_plane/models/functionconfschemapack.py +24 -0
- cribl_control_plane/models/functionconfschemapivot.py +31 -0
- cribl_control_plane/models/functionconfschemapublishmetrics.py +98 -0
- cribl_control_plane/models/functionconfschemaredis.py +121 -0
- cribl_control_plane/models/functionconfschemaregexextract.py +55 -0
- cribl_control_plane/models/functionconfschemaregexfilter.py +38 -0
- cribl_control_plane/models/functionconfschemarename.py +49 -0
- cribl_control_plane/models/functionconfschemarollupmetrics.py +57 -0
- cribl_control_plane/models/functionconfschemasampling.py +32 -0
- cribl_control_plane/models/functionconfschemasend.py +84 -0
- cribl_control_plane/models/functionconfschemasensitivedatascanner.py +75 -0
- cribl_control_plane/models/functionconfschemaserde.py +105 -0
- cribl_control_plane/models/functionconfschemaserialize.py +78 -0
- cribl_control_plane/models/functionconfschemasidlookup.py +34 -0
- cribl_control_plane/models/functionconfschemasnmptrapserialize.py +92 -0
- cribl_control_plane/models/functionconfschemasort.py +41 -0
- cribl_control_plane/models/functionconfschemastore.py +75 -0
- cribl_control_plane/models/functionconfschemasuppress.py +57 -0
- cribl_control_plane/models/functionconfschematee.py +32 -0
- cribl_control_plane/models/functionconfschematrimtimestamp.py +16 -0
- cribl_control_plane/models/functionconfschemaunion.py +22 -0
- cribl_control_plane/models/functionconfschemaunroll.py +22 -0
- cribl_control_plane/models/functionconfschemawindow.py +40 -0
- cribl_control_plane/models/functionconfschemaxmlunroll.py +34 -0
- cribl_control_plane/models/functiondistinct.py +7 -37
- cribl_control_plane/models/functiondnslookup.py +7 -188
- cribl_control_plane/models/functiondrop.py +8 -12
- cribl_control_plane/models/functiondropdimensions.py +7 -25
- cribl_control_plane/models/functiondynamicsampling.py +6 -58
- cribl_control_plane/models/functioneval.py +9 -43
- cribl_control_plane/models/functioneventbreaker.py +6 -40
- cribl_control_plane/models/functioneventstats.py +7 -30
- cribl_control_plane/models/functionexternaldata.py +6 -10
- cribl_control_plane/models/functionflatten.py +7 -28
- cribl_control_plane/models/functionfoldkeys.py +6 -26
- cribl_control_plane/models/functiongenstats.py +7 -11
- cribl_control_plane/models/functiongeoip.py +7 -58
- cribl_control_plane/models/functiongrok.py +9 -35
- cribl_control_plane/models/functionhandlebar.py +6 -49
- cribl_control_plane/models/functionjoin.py +9 -52
- cribl_control_plane/models/functionjsonunroll.py +6 -17
- cribl_control_plane/models/functionlakeexport.py +6 -39
- cribl_control_plane/models/functionlimit.py +6 -12
- cribl_control_plane/models/functionlocalsearchdatatypeparser.py +6 -12
- cribl_control_plane/models/functionlocalsearchrulesetrunner.py +7 -34
- cribl_control_plane/models/functionlookup.py +7 -86
- cribl_control_plane/models/functionmask.py +9 -61
- cribl_control_plane/models/functionmvexpand.py +7 -66
- cribl_control_plane/models/functionmvpull.py +6 -36
- cribl_control_plane/models/functionnotificationpolicies.py +8 -125
- cribl_control_plane/models/functionnotifications.py +6 -22
- cribl_control_plane/models/functionnotify.py +6 -133
- cribl_control_plane/models/functionnumerify.py +7 -57
- cribl_control_plane/models/functionotlplogs.py +6 -19
- cribl_control_plane/models/functionotlpmetrics.py +7 -56
- cribl_control_plane/models/functionotlptraces.py +6 -48
- cribl_control_plane/models/functionpack.py +9 -20
- cribl_control_plane/models/functionpivot.py +7 -23
- cribl_control_plane/models/functionpublishmetrics.py +7 -91
- cribl_control_plane/models/functionredis.py +7 -111
- cribl_control_plane/models/functionregexextract.py +7 -50
- cribl_control_plane/models/functionregexfilter.py +7 -33
- cribl_control_plane/models/functionrename.py +7 -45
- cribl_control_plane/models/functionrollupmetrics.py +7 -52
- cribl_control_plane/models/functionsampling.py +7 -28
- cribl_control_plane/models/functionsend.py +8 -80
- cribl_control_plane/models/functionsensitivedatascanner.py +7 -66
- cribl_control_plane/models/functionserde.py +6 -98
- cribl_control_plane/models/functionserialize.py +7 -72
- cribl_control_plane/models/functionsidlookup.py +7 -31
- cribl_control_plane/models/functionsnmptrapserialize.py +6 -81
- cribl_control_plane/models/functionsort.py +8 -36
- cribl_control_plane/models/functionstore.py +6 -69
- cribl_control_plane/models/functionsuppress.py +6 -52
- cribl_control_plane/models/functiontee.py +6 -30
- cribl_control_plane/models/functiontrimtimestamp.py +6 -12
- cribl_control_plane/models/functionunion.py +9 -20
- cribl_control_plane/models/functionunroll.py +6 -17
- cribl_control_plane/models/functionwindow.py +7 -34
- cribl_control_plane/models/functionxmlunroll.py +6 -29
- cribl_control_plane/models/getsavedjobbyidop.py +33 -0
- cribl_control_plane/models/getsavedjobop.py +40 -0
- cribl_control_plane/models/groupcreaterequest.py +3 -0
- cribl_control_plane/models/heartbeatmetadata.py +42 -0
- cribl_control_plane/models/input.py +1 -1
- cribl_control_plane/models/inputedgeprometheus.py +11 -11
- cribl_control_plane/models/inputprometheus.py +23 -18
- cribl_control_plane/models/nodeprovidedinfo.py +42 -0
- cribl_control_plane/models/output.py +15 -9
- cribl_control_plane/models/outputclickhouse.py +31 -0
- cribl_control_plane/models/outputcriblhttp.py +7 -0
- cribl_control_plane/models/outputcriblsearchengine.py +655 -0
- cribl_control_plane/models/outputnetflow.py +16 -2
- cribl_control_plane/models/pipeline.py +52 -4
- cribl_control_plane/models/pipelinefunctionaggregatemetrics.py +195 -0
- cribl_control_plane/models/pipelinefunctionaggregation.py +159 -0
- cribl_control_plane/models/pipelinefunctionautotimestamp.py +56 -0
- cribl_control_plane/models/pipelinefunctioncef.py +96 -0
- cribl_control_plane/models/pipelinefunctionchain.py +62 -0
- cribl_control_plane/models/pipelinefunctionclone.py +56 -0
- cribl_control_plane/models/pipelinefunctioncode.py +56 -0
- cribl_control_plane/models/pipelinefunctioncomment.py +56 -0
- cribl_control_plane/models/pipelinefunctionconf.py +343 -46
- cribl_control_plane/models/pipelinefunctionconf_input.py +353 -0
- cribl_control_plane/models/pipelinefunctiondistinct.py +86 -0
- cribl_control_plane/models/pipelinefunctiondnslookup.py +56 -0
- cribl_control_plane/models/pipelinefunctiondrop.py +56 -0
- cribl_control_plane/models/pipelinefunctiondropdimensions.py +74 -0
- cribl_control_plane/models/pipelinefunctiondynamicsampling.py +111 -0
- cribl_control_plane/models/pipelinefunctioneval.py +56 -0
- cribl_control_plane/models/pipelinefunctioneventbreaker.py +95 -0
- cribl_control_plane/models/pipelinefunctioneventstats.py +79 -0
- cribl_control_plane/models/pipelinefunctionexternaldata.py +56 -0
- cribl_control_plane/models/pipelinefunctionflatten.py +56 -0
- cribl_control_plane/models/pipelinefunctionfoldkeys.py +56 -0
- cribl_control_plane/models/pipelinefunctiongenstats.py +56 -0
- cribl_control_plane/models/pipelinefunctiongeoip.py +109 -0
- cribl_control_plane/models/pipelinefunctiongrok.py +83 -0
- cribl_control_plane/models/pipelinefunctionhandlebar.py +99 -0
- cribl_control_plane/models/pipelinefunctionjoin.py +100 -0
- cribl_control_plane/models/pipelinefunctionjsonunroll.py +67 -0
- cribl_control_plane/models/pipelinefunctionlakeexport.py +89 -0
- cribl_control_plane/models/pipelinefunctionlimit.py +56 -0
- cribl_control_plane/models/pipelinefunctionlocalsearchdatatypeparser.py +62 -0
- cribl_control_plane/models/pipelinefunctionlocalsearchrulesetrunner.py +56 -0
- cribl_control_plane/models/pipelinefunctionlookup.py +136 -0
- cribl_control_plane/models/pipelinefunctionmask.py +108 -0
- cribl_control_plane/models/pipelinefunctionmvexpand.py +116 -0
- cribl_control_plane/models/pipelinefunctionmvpull.py +86 -0
- cribl_control_plane/models/pipelinefunctionnotificationpolicies.py +56 -0
- cribl_control_plane/models/pipelinefunctionnotifications.py +72 -0
- cribl_control_plane/models/pipelinefunctionnotify.py +189 -0
- cribl_control_plane/models/pipelinefunctionnumerify.py +56 -0
- cribl_control_plane/models/pipelinefunctionotlplogs.py +56 -0
- cribl_control_plane/models/pipelinefunctionotlpmetrics.py +56 -0
- cribl_control_plane/models/pipelinefunctionotlptraces.py +56 -0
- cribl_control_plane/models/pipelinefunctionpack.py +67 -0
- cribl_control_plane/models/pipelinefunctionpivot.py +72 -0
- cribl_control_plane/models/pipelinefunctionpublishmetrics.py +56 -0
- cribl_control_plane/models/pipelinefunctionredis.py +165 -0
- cribl_control_plane/models/pipelinefunctionregexextract.py +100 -0
- cribl_control_plane/models/pipelinefunctionregexfilter.py +56 -0
- cribl_control_plane/models/pipelinefunctionrename.py +56 -0
- cribl_control_plane/models/pipelinefunctionrollupmetrics.py +56 -0
- cribl_control_plane/models/pipelinefunctionsampling.py +56 -0
- cribl_control_plane/models/pipelinefunctionsend.py +128 -0
- cribl_control_plane/models/pipelinefunctionsensitivedatascanner.py +120 -0
- cribl_control_plane/models/pipelinefunctionserde.py +149 -0
- cribl_control_plane/models/pipelinefunctionserialize.py +122 -0
- cribl_control_plane/models/pipelinefunctionsidlookup.py +56 -0
- cribl_control_plane/models/pipelinefunctionsnmptrapserialize.py +56 -0
- cribl_control_plane/models/pipelinefunctionsort.py +84 -0
- cribl_control_plane/models/pipelinefunctionstore.py +120 -0
- cribl_control_plane/models/pipelinefunctionsuppress.py +102 -0
- cribl_control_plane/models/pipelinefunctiontee.py +77 -0
- cribl_control_plane/models/pipelinefunctiontrimtimestamp.py +56 -0
- cribl_control_plane/models/pipelinefunctionunion.py +67 -0
- cribl_control_plane/models/pipelinefunctionunroll.py +67 -0
- cribl_control_plane/models/pipelinefunctionwindow.py +83 -0
- cribl_control_plane/models/pipelinefunctionxmlunroll.py +79 -0
- cribl_control_plane/models/runnablejobcollection.py +11 -10
- cribl_control_plane/models/runnablejobexecutor.py +8 -8
- cribl_control_plane/models/savedjob.py +26 -0
- cribl_control_plane/models/savedjobcollection.py +411 -0
- cribl_control_plane/models/savedjobexecutor.py +301 -0
- cribl_control_plane/models/savedjobscheduledsearch.py +278 -0
- cribl_control_plane/models/updatepipelinebyidop.py +4 -3
- cribl_control_plane/models/updatesavedjobbyidop.py +42 -0
- cribl_control_plane/pipelines.py +16 -16
- cribl_control_plane/sdk.py +4 -0
- {cribl_control_plane-0.4.0b23.dist-info → cribl_control_plane-0.5.0b3.dist-info}/METADATA +10 -2
- {cribl_control_plane-0.4.0b23.dist-info → cribl_control_plane-0.5.0b3.dist-info}/RECORD +229 -88
- {cribl_control_plane-0.4.0b23.dist-info → cribl_control_plane-0.5.0b3.dist-info}/WHEEL +0 -0
- {cribl_control_plane-0.4.0b23.dist-info → cribl_control_plane-0.5.0b3.dist-info}/licenses/LICENSE +0 -0
|
@@ -207,7 +207,7 @@ class RunnableJobCollectionSchedule(BaseModel):
|
|
|
207
207
|
run: Optional[RunnableJobCollectionRunSettings] = None
|
|
208
208
|
|
|
209
209
|
|
|
210
|
-
class
|
|
210
|
+
class RunnableJobCollectionCollectorTypedDict(TypedDict):
|
|
211
211
|
type: str
|
|
212
212
|
r"""The type of collector to run"""
|
|
213
213
|
conf: CollectorConfTypedDict
|
|
@@ -218,7 +218,7 @@ class CollectorTypedDict(TypedDict):
|
|
|
218
218
|
r"""Character encoding to use when parsing ingested data. When not set, @{product} will default to UTF-8 but may incorrectly interpret multi-byte characters."""
|
|
219
219
|
|
|
220
220
|
|
|
221
|
-
class
|
|
221
|
+
class RunnableJobCollectionCollector(BaseModel):
|
|
222
222
|
type: str
|
|
223
223
|
r"""The type of collector to run"""
|
|
224
224
|
|
|
@@ -232,7 +232,7 @@ class Collector(BaseModel):
|
|
|
232
232
|
r"""Character encoding to use when parsing ingested data. When not set, @{product} will default to UTF-8 but may incorrectly interpret multi-byte characters."""
|
|
233
233
|
|
|
234
234
|
|
|
235
|
-
class
|
|
235
|
+
class RunnableJobCollectionInputType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
236
236
|
COLLECTION = "collection"
|
|
237
237
|
|
|
238
238
|
|
|
@@ -268,7 +268,7 @@ class RunnableJobCollectionMetadatum(BaseModel):
|
|
|
268
268
|
|
|
269
269
|
|
|
270
270
|
class RunnableJobCollectionInputTypedDict(TypedDict):
|
|
271
|
-
type: NotRequired[
|
|
271
|
+
type: NotRequired[RunnableJobCollectionInputType]
|
|
272
272
|
breaker_rulesets: NotRequired[List[str]]
|
|
273
273
|
r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
|
|
274
274
|
stale_channel_flush_ms: NotRequired[float]
|
|
@@ -287,9 +287,10 @@ class RunnableJobCollectionInputTypedDict(TypedDict):
|
|
|
287
287
|
|
|
288
288
|
|
|
289
289
|
class RunnableJobCollectionInput(BaseModel):
|
|
290
|
-
type: Annotated[
|
|
291
|
-
|
|
292
|
-
|
|
290
|
+
type: Annotated[
|
|
291
|
+
Optional[RunnableJobCollectionInputType],
|
|
292
|
+
PlainValidator(validate_open_enum(False)),
|
|
293
|
+
] = RunnableJobCollectionInputType.COLLECTION
|
|
293
294
|
|
|
294
295
|
breaker_rulesets: Annotated[
|
|
295
296
|
Optional[List[str]], pydantic.Field(alias="breakerRulesets")
|
|
@@ -326,7 +327,7 @@ class RunnableJobCollectionInput(BaseModel):
|
|
|
326
327
|
def serialize_type(self, value):
|
|
327
328
|
if isinstance(value, str):
|
|
328
329
|
try:
|
|
329
|
-
return models.
|
|
330
|
+
return models.RunnableJobCollectionInputType(value)
|
|
330
331
|
except ValueError:
|
|
331
332
|
return value
|
|
332
333
|
return value
|
|
@@ -544,7 +545,7 @@ class RunnableJobCollectionRun(BaseModel):
|
|
|
544
545
|
|
|
545
546
|
|
|
546
547
|
class RunnableJobCollectionTypedDict(TypedDict):
|
|
547
|
-
collector:
|
|
548
|
+
collector: RunnableJobCollectionCollectorTypedDict
|
|
548
549
|
run: RunnableJobCollectionRunTypedDict
|
|
549
550
|
id: NotRequired[str]
|
|
550
551
|
r"""Unique ID for this Job"""
|
|
@@ -570,7 +571,7 @@ class RunnableJobCollectionTypedDict(TypedDict):
|
|
|
570
571
|
|
|
571
572
|
|
|
572
573
|
class RunnableJobCollection(BaseModel):
|
|
573
|
-
collector:
|
|
574
|
+
collector: RunnableJobCollectionCollector
|
|
574
575
|
|
|
575
576
|
run: RunnableJobCollectionRun
|
|
576
577
|
|
|
@@ -204,23 +204,23 @@ class RunnableJobExecutorSchedule(BaseModel):
|
|
|
204
204
|
run: Optional[RunnableJobExecutorRunSettings] = None
|
|
205
205
|
|
|
206
206
|
|
|
207
|
-
class
|
|
207
|
+
class RunnableJobExecutorExecutorSpecificSettingsTypedDict(TypedDict):
|
|
208
208
|
pass
|
|
209
209
|
|
|
210
210
|
|
|
211
|
-
class
|
|
211
|
+
class RunnableJobExecutorExecutorSpecificSettings(BaseModel):
|
|
212
212
|
pass
|
|
213
213
|
|
|
214
214
|
|
|
215
|
-
class
|
|
215
|
+
class RunnableJobExecutorExecutorTypedDict(TypedDict):
|
|
216
216
|
type: str
|
|
217
217
|
r"""The type of executor to run"""
|
|
218
218
|
store_task_results: NotRequired[bool]
|
|
219
219
|
r"""Determines whether or not to write task results to disk"""
|
|
220
|
-
conf: NotRequired[
|
|
220
|
+
conf: NotRequired[RunnableJobExecutorExecutorSpecificSettingsTypedDict]
|
|
221
221
|
|
|
222
222
|
|
|
223
|
-
class
|
|
223
|
+
class RunnableJobExecutorExecutor(BaseModel):
|
|
224
224
|
type: str
|
|
225
225
|
r"""The type of executor to run"""
|
|
226
226
|
|
|
@@ -229,7 +229,7 @@ class Executor(BaseModel):
|
|
|
229
229
|
] = True
|
|
230
230
|
r"""Determines whether or not to write task results to disk"""
|
|
231
231
|
|
|
232
|
-
conf: Optional[
|
|
232
|
+
conf: Optional[RunnableJobExecutorExecutorSpecificSettings] = None
|
|
233
233
|
|
|
234
234
|
|
|
235
235
|
class RunnableJobExecutorLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
@@ -287,7 +287,7 @@ class RunnableJobExecutorRun(BaseModel):
|
|
|
287
287
|
|
|
288
288
|
|
|
289
289
|
class RunnableJobExecutorTypedDict(TypedDict):
|
|
290
|
-
executor:
|
|
290
|
+
executor: RunnableJobExecutorExecutorTypedDict
|
|
291
291
|
run: RunnableJobExecutorRunTypedDict
|
|
292
292
|
id: NotRequired[str]
|
|
293
293
|
r"""Unique ID for this Job"""
|
|
@@ -310,7 +310,7 @@ class RunnableJobExecutorTypedDict(TypedDict):
|
|
|
310
310
|
|
|
311
311
|
|
|
312
312
|
class RunnableJobExecutor(BaseModel):
|
|
313
|
-
executor:
|
|
313
|
+
executor: RunnableJobExecutorExecutor
|
|
314
314
|
|
|
315
315
|
run: RunnableJobExecutorRun
|
|
316
316
|
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from .savedjobcollection import SavedJobCollection, SavedJobCollectionTypedDict
|
|
5
|
+
from .savedjobexecutor import SavedJobExecutor, SavedJobExecutorTypedDict
|
|
6
|
+
from .savedjobscheduledsearch import (
|
|
7
|
+
SavedJobScheduledSearch,
|
|
8
|
+
SavedJobScheduledSearchTypedDict,
|
|
9
|
+
)
|
|
10
|
+
from typing import Union
|
|
11
|
+
from typing_extensions import TypeAliasType
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
SavedJobTypedDict = TypeAliasType(
|
|
15
|
+
"SavedJobTypedDict",
|
|
16
|
+
Union[
|
|
17
|
+
SavedJobExecutorTypedDict,
|
|
18
|
+
SavedJobScheduledSearchTypedDict,
|
|
19
|
+
SavedJobCollectionTypedDict,
|
|
20
|
+
],
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
SavedJob = TypeAliasType(
|
|
25
|
+
"SavedJob", Union[SavedJobExecutor, SavedJobScheduledSearch, SavedJobCollection]
|
|
26
|
+
)
|
|
@@ -0,0 +1,411 @@
|
|
|
1
|
+
"""Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
from .collectorconf import CollectorConf, CollectorConfTypedDict
|
|
5
|
+
from cribl_control_plane import models, utils
|
|
6
|
+
from cribl_control_plane.types import BaseModel
|
|
7
|
+
from cribl_control_plane.utils import validate_open_enum
|
|
8
|
+
from enum import Enum
|
|
9
|
+
import pydantic
|
|
10
|
+
from pydantic import field_serializer
|
|
11
|
+
from pydantic.functional_validators import PlainValidator
|
|
12
|
+
from typing import Any, List, Optional
|
|
13
|
+
from typing_extensions import Annotated, NotRequired, TypedDict
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class SavedJobCollectionJobType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
17
|
+
COLLECTION = "collection"
|
|
18
|
+
EXECUTOR = "executor"
|
|
19
|
+
SCHEDULED_SEARCH = "scheduledSearch"
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SavedJobCollectionRunType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
23
|
+
COLLECTION = "collection"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class SavedJobCollectionLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
27
|
+
r"""Level at which to set task logging"""
|
|
28
|
+
|
|
29
|
+
ERROR = "error"
|
|
30
|
+
WARN = "warn"
|
|
31
|
+
INFO = "info"
|
|
32
|
+
DEBUG = "debug"
|
|
33
|
+
SILLY = "silly"
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class SavedJobCollectionTimeWarningTypedDict(TypedDict):
|
|
37
|
+
pass
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class SavedJobCollectionTimeWarning(BaseModel):
|
|
41
|
+
pass
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class SavedJobCollectionRunSettingsTypedDict(TypedDict):
|
|
45
|
+
type: NotRequired[SavedJobCollectionRunType]
|
|
46
|
+
reschedule_dropped_tasks: NotRequired[bool]
|
|
47
|
+
r"""Reschedule tasks that failed with non-fatal errors"""
|
|
48
|
+
max_task_reschedule: NotRequired[float]
|
|
49
|
+
r"""Maximum number of times a task can be rescheduled"""
|
|
50
|
+
log_level: NotRequired[SavedJobCollectionLogLevel]
|
|
51
|
+
r"""Level at which to set task logging"""
|
|
52
|
+
job_timeout: NotRequired[str]
|
|
53
|
+
r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
|
|
54
|
+
mode: NotRequired[str]
|
|
55
|
+
r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
|
|
56
|
+
time_range_type: NotRequired[str]
|
|
57
|
+
earliest: NotRequired[float]
|
|
58
|
+
r"""Earliest time to collect data for the selected timezone"""
|
|
59
|
+
latest: NotRequired[float]
|
|
60
|
+
r"""Latest time to collect data for the selected timezone"""
|
|
61
|
+
timestamp_timezone: NotRequired[Any]
|
|
62
|
+
time_warning: NotRequired[SavedJobCollectionTimeWarningTypedDict]
|
|
63
|
+
expression: NotRequired[str]
|
|
64
|
+
r"""A filter for tokens in the provided collect path and/or the events being collected"""
|
|
65
|
+
min_task_size: NotRequired[str]
|
|
66
|
+
r"""Limits the bundle size for small tasks. For example,
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
|
|
70
|
+
"""
|
|
71
|
+
max_task_size: NotRequired[str]
|
|
72
|
+
r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class SavedJobCollectionRunSettings(BaseModel):
|
|
80
|
+
type: Annotated[
|
|
81
|
+
Optional[SavedJobCollectionRunType], PlainValidator(validate_open_enum(False))
|
|
82
|
+
] = None
|
|
83
|
+
|
|
84
|
+
reschedule_dropped_tasks: Annotated[
|
|
85
|
+
Optional[bool], pydantic.Field(alias="rescheduleDroppedTasks")
|
|
86
|
+
] = True
|
|
87
|
+
r"""Reschedule tasks that failed with non-fatal errors"""
|
|
88
|
+
|
|
89
|
+
max_task_reschedule: Annotated[
|
|
90
|
+
Optional[float], pydantic.Field(alias="maxTaskReschedule")
|
|
91
|
+
] = 1
|
|
92
|
+
r"""Maximum number of times a task can be rescheduled"""
|
|
93
|
+
|
|
94
|
+
log_level: Annotated[
|
|
95
|
+
Annotated[
|
|
96
|
+
Optional[SavedJobCollectionLogLevel],
|
|
97
|
+
PlainValidator(validate_open_enum(False)),
|
|
98
|
+
],
|
|
99
|
+
pydantic.Field(alias="logLevel"),
|
|
100
|
+
] = SavedJobCollectionLogLevel.INFO
|
|
101
|
+
r"""Level at which to set task logging"""
|
|
102
|
+
|
|
103
|
+
job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
|
|
104
|
+
r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
|
|
105
|
+
|
|
106
|
+
mode: Optional[str] = "list"
|
|
107
|
+
r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
|
|
108
|
+
|
|
109
|
+
time_range_type: Annotated[Optional[str], pydantic.Field(alias="timeRangeType")] = (
|
|
110
|
+
"relative"
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
earliest: Optional[float] = None
|
|
114
|
+
r"""Earliest time to collect data for the selected timezone"""
|
|
115
|
+
|
|
116
|
+
latest: Optional[float] = None
|
|
117
|
+
r"""Latest time to collect data for the selected timezone"""
|
|
118
|
+
|
|
119
|
+
timestamp_timezone: Annotated[
|
|
120
|
+
Optional[Any], pydantic.Field(alias="timestampTimezone")
|
|
121
|
+
] = None
|
|
122
|
+
|
|
123
|
+
time_warning: Annotated[
|
|
124
|
+
Optional[SavedJobCollectionTimeWarning], pydantic.Field(alias="timeWarning")
|
|
125
|
+
] = None
|
|
126
|
+
|
|
127
|
+
expression: Optional[str] = "true"
|
|
128
|
+
r"""A filter for tokens in the provided collect path and/or the events being collected"""
|
|
129
|
+
|
|
130
|
+
min_task_size: Annotated[Optional[str], pydantic.Field(alias="minTaskSize")] = "1MB"
|
|
131
|
+
r"""Limits the bundle size for small tasks. For example,
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
|
|
135
|
+
"""
|
|
136
|
+
|
|
137
|
+
max_task_size: Annotated[Optional[str], pydantic.Field(alias="maxTaskSize")] = (
|
|
138
|
+
"10MB"
|
|
139
|
+
)
|
|
140
|
+
r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
|
|
144
|
+
"""
|
|
145
|
+
|
|
146
|
+
@field_serializer("type")
|
|
147
|
+
def serialize_type(self, value):
|
|
148
|
+
if isinstance(value, str):
|
|
149
|
+
try:
|
|
150
|
+
return models.SavedJobCollectionRunType(value)
|
|
151
|
+
except ValueError:
|
|
152
|
+
return value
|
|
153
|
+
return value
|
|
154
|
+
|
|
155
|
+
@field_serializer("log_level")
|
|
156
|
+
def serialize_log_level(self, value):
|
|
157
|
+
if isinstance(value, str):
|
|
158
|
+
try:
|
|
159
|
+
return models.SavedJobCollectionLogLevel(value)
|
|
160
|
+
except ValueError:
|
|
161
|
+
return value
|
|
162
|
+
return value
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
class SavedJobCollectionScheduleTypedDict(TypedDict):
|
|
166
|
+
r"""Configuration for a scheduled job"""
|
|
167
|
+
|
|
168
|
+
enabled: NotRequired[bool]
|
|
169
|
+
r"""Enable to configure scheduling for this Collector"""
|
|
170
|
+
skippable: NotRequired[bool]
|
|
171
|
+
r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
|
|
172
|
+
resume_missed: NotRequired[bool]
|
|
173
|
+
r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
|
|
174
|
+
cron_schedule: NotRequired[str]
|
|
175
|
+
r"""A cron schedule on which to run this job"""
|
|
176
|
+
max_concurrent_runs: NotRequired[float]
|
|
177
|
+
r"""The maximum number of instances of this scheduled job that may be running at any time"""
|
|
178
|
+
run: NotRequired[SavedJobCollectionRunSettingsTypedDict]
|
|
179
|
+
|
|
180
|
+
|
|
181
|
+
class SavedJobCollectionSchedule(BaseModel):
|
|
182
|
+
r"""Configuration for a scheduled job"""
|
|
183
|
+
|
|
184
|
+
enabled: Optional[bool] = None
|
|
185
|
+
r"""Enable to configure scheduling for this Collector"""
|
|
186
|
+
|
|
187
|
+
skippable: Optional[bool] = True
|
|
188
|
+
r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
|
|
189
|
+
|
|
190
|
+
resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
|
|
191
|
+
False
|
|
192
|
+
)
|
|
193
|
+
r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
|
|
194
|
+
|
|
195
|
+
cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
|
|
196
|
+
"*/5 * * * *"
|
|
197
|
+
)
|
|
198
|
+
r"""A cron schedule on which to run this job"""
|
|
199
|
+
|
|
200
|
+
max_concurrent_runs: Annotated[
|
|
201
|
+
Optional[float], pydantic.Field(alias="maxConcurrentRuns")
|
|
202
|
+
] = 1
|
|
203
|
+
r"""The maximum number of instances of this scheduled job that may be running at any time"""
|
|
204
|
+
|
|
205
|
+
run: Optional[SavedJobCollectionRunSettings] = None
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
class SavedJobCollectionCollectorTypedDict(TypedDict):
|
|
209
|
+
type: str
|
|
210
|
+
r"""The type of collector to run"""
|
|
211
|
+
conf: CollectorConfTypedDict
|
|
212
|
+
r"""Collector configuration"""
|
|
213
|
+
destructive: NotRequired[bool]
|
|
214
|
+
r"""Delete any files collected (where applicable)"""
|
|
215
|
+
encoding: NotRequired[str]
|
|
216
|
+
r"""Character encoding to use when parsing ingested data. When not set, @{product} will default to UTF-8 but may incorrectly interpret multi-byte characters."""
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
class SavedJobCollectionCollector(BaseModel):
|
|
220
|
+
type: str
|
|
221
|
+
r"""The type of collector to run"""
|
|
222
|
+
|
|
223
|
+
conf: CollectorConf
|
|
224
|
+
r"""Collector configuration"""
|
|
225
|
+
|
|
226
|
+
destructive: Optional[bool] = False
|
|
227
|
+
r"""Delete any files collected (where applicable)"""
|
|
228
|
+
|
|
229
|
+
encoding: Optional[str] = None
|
|
230
|
+
r"""Character encoding to use when parsing ingested data. When not set, @{product} will default to UTF-8 but may incorrectly interpret multi-byte characters."""
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
class SavedJobCollectionInputType(str, Enum, metaclass=utils.OpenEnumMeta):
|
|
234
|
+
COLLECTION = "collection"
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
class SavedJobCollectionPreprocessTypedDict(TypedDict):
|
|
238
|
+
disabled: NotRequired[bool]
|
|
239
|
+
command: NotRequired[str]
|
|
240
|
+
r"""Command to feed the data through (via stdin) and process its output (stdout)"""
|
|
241
|
+
args: NotRequired[List[str]]
|
|
242
|
+
r"""Arguments to be added to the custom command"""
|
|
243
|
+
|
|
244
|
+
|
|
245
|
+
class SavedJobCollectionPreprocess(BaseModel):
|
|
246
|
+
disabled: Optional[bool] = True
|
|
247
|
+
|
|
248
|
+
command: Optional[str] = None
|
|
249
|
+
r"""Command to feed the data through (via stdin) and process its output (stdout)"""
|
|
250
|
+
|
|
251
|
+
args: Optional[List[str]] = None
|
|
252
|
+
r"""Arguments to be added to the custom command"""
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
class SavedJobCollectionMetadatumTypedDict(TypedDict):
|
|
256
|
+
name: str
|
|
257
|
+
value: str
|
|
258
|
+
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
class SavedJobCollectionMetadatum(BaseModel):
|
|
262
|
+
name: str
|
|
263
|
+
|
|
264
|
+
value: str
|
|
265
|
+
r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
class SavedJobCollectionInputTypedDict(TypedDict):
|
|
269
|
+
type: NotRequired[SavedJobCollectionInputType]
|
|
270
|
+
breaker_rulesets: NotRequired[List[str]]
|
|
271
|
+
r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
|
|
272
|
+
stale_channel_flush_ms: NotRequired[float]
|
|
273
|
+
r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
|
|
274
|
+
send_to_routes: NotRequired[bool]
|
|
275
|
+
r"""Send events to normal routing and event processing. Disable to select a specific Pipeline/Destination combination."""
|
|
276
|
+
preprocess: NotRequired[SavedJobCollectionPreprocessTypedDict]
|
|
277
|
+
throttle_rate_per_sec: NotRequired[str]
|
|
278
|
+
r"""Rate (in bytes per second) to throttle while writing to an output. Accepts values with multiple-byte units, such as KB, MB, and GB. (Example: 42 MB) Default value of 0 specifies no throttling."""
|
|
279
|
+
metadata: NotRequired[List[SavedJobCollectionMetadatumTypedDict]]
|
|
280
|
+
r"""Fields to add to events from this input"""
|
|
281
|
+
pipeline: NotRequired[str]
|
|
282
|
+
r"""Pipeline to process results"""
|
|
283
|
+
output: NotRequired[str]
|
|
284
|
+
r"""Destination to send results to"""
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
class SavedJobCollectionInput(BaseModel):
|
|
288
|
+
type: Annotated[
|
|
289
|
+
Optional[SavedJobCollectionInputType], PlainValidator(validate_open_enum(False))
|
|
290
|
+
] = SavedJobCollectionInputType.COLLECTION
|
|
291
|
+
|
|
292
|
+
breaker_rulesets: Annotated[
|
|
293
|
+
Optional[List[str]], pydantic.Field(alias="breakerRulesets")
|
|
294
|
+
] = None
|
|
295
|
+
r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
|
|
296
|
+
|
|
297
|
+
stale_channel_flush_ms: Annotated[
|
|
298
|
+
Optional[float], pydantic.Field(alias="staleChannelFlushMs")
|
|
299
|
+
] = 10000
|
|
300
|
+
r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
|
|
301
|
+
|
|
302
|
+
send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
|
|
303
|
+
True
|
|
304
|
+
)
|
|
305
|
+
r"""Send events to normal routing and event processing. Disable to select a specific Pipeline/Destination combination."""
|
|
306
|
+
|
|
307
|
+
preprocess: Optional[SavedJobCollectionPreprocess] = None
|
|
308
|
+
|
|
309
|
+
throttle_rate_per_sec: Annotated[
|
|
310
|
+
Optional[str], pydantic.Field(alias="throttleRatePerSec")
|
|
311
|
+
] = "0"
|
|
312
|
+
r"""Rate (in bytes per second) to throttle while writing to an output. Accepts values with multiple-byte units, such as KB, MB, and GB. (Example: 42 MB) Default value of 0 specifies no throttling."""
|
|
313
|
+
|
|
314
|
+
metadata: Optional[List[SavedJobCollectionMetadatum]] = None
|
|
315
|
+
r"""Fields to add to events from this input"""
|
|
316
|
+
|
|
317
|
+
pipeline: Optional[str] = None
|
|
318
|
+
r"""Pipeline to process results"""
|
|
319
|
+
|
|
320
|
+
output: Optional[str] = None
|
|
321
|
+
r"""Destination to send results to"""
|
|
322
|
+
|
|
323
|
+
@field_serializer("type")
|
|
324
|
+
def serialize_type(self, value):
|
|
325
|
+
if isinstance(value, str):
|
|
326
|
+
try:
|
|
327
|
+
return models.SavedJobCollectionInputType(value)
|
|
328
|
+
except ValueError:
|
|
329
|
+
return value
|
|
330
|
+
return value
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
class SavedJobCollectionTypedDict(TypedDict):
|
|
334
|
+
type: SavedJobCollectionJobType
|
|
335
|
+
collector: SavedJobCollectionCollectorTypedDict
|
|
336
|
+
id: NotRequired[str]
|
|
337
|
+
r"""Unique ID for this Job"""
|
|
338
|
+
description: NotRequired[str]
|
|
339
|
+
ttl: NotRequired[str]
|
|
340
|
+
r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
|
|
341
|
+
ignore_group_jobs_limit: NotRequired[bool]
|
|
342
|
+
r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
|
|
343
|
+
remove_fields: NotRequired[List[str]]
|
|
344
|
+
r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
|
|
345
|
+
resume_on_boot: NotRequired[bool]
|
|
346
|
+
r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
|
|
347
|
+
environment: NotRequired[str]
|
|
348
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
349
|
+
schedule: NotRequired[SavedJobCollectionScheduleTypedDict]
|
|
350
|
+
r"""Configuration for a scheduled job"""
|
|
351
|
+
streamtags: NotRequired[List[str]]
|
|
352
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
353
|
+
worker_affinity: NotRequired[bool]
|
|
354
|
+
r"""If enabled, tasks are created and run by the same Worker Node"""
|
|
355
|
+
input: NotRequired[SavedJobCollectionInputTypedDict]
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
class SavedJobCollection(BaseModel):
|
|
359
|
+
type: Annotated[
|
|
360
|
+
SavedJobCollectionJobType, PlainValidator(validate_open_enum(False))
|
|
361
|
+
]
|
|
362
|
+
|
|
363
|
+
collector: SavedJobCollectionCollector
|
|
364
|
+
|
|
365
|
+
id: Optional[str] = None
|
|
366
|
+
r"""Unique ID for this Job"""
|
|
367
|
+
|
|
368
|
+
description: Optional[str] = None
|
|
369
|
+
|
|
370
|
+
ttl: Optional[str] = "4h"
|
|
371
|
+
r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
|
|
372
|
+
|
|
373
|
+
ignore_group_jobs_limit: Annotated[
|
|
374
|
+
Optional[bool], pydantic.Field(alias="ignoreGroupJobsLimit")
|
|
375
|
+
] = False
|
|
376
|
+
r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
|
|
377
|
+
|
|
378
|
+
remove_fields: Annotated[
|
|
379
|
+
Optional[List[str]], pydantic.Field(alias="removeFields")
|
|
380
|
+
] = None
|
|
381
|
+
r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
|
|
382
|
+
|
|
383
|
+
resume_on_boot: Annotated[Optional[bool], pydantic.Field(alias="resumeOnBoot")] = (
|
|
384
|
+
False
|
|
385
|
+
)
|
|
386
|
+
r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
|
|
387
|
+
|
|
388
|
+
environment: Optional[str] = None
|
|
389
|
+
r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
|
|
390
|
+
|
|
391
|
+
schedule: Optional[SavedJobCollectionSchedule] = None
|
|
392
|
+
r"""Configuration for a scheduled job"""
|
|
393
|
+
|
|
394
|
+
streamtags: Optional[List[str]] = None
|
|
395
|
+
r"""Tags for filtering and grouping in @{product}"""
|
|
396
|
+
|
|
397
|
+
worker_affinity: Annotated[
|
|
398
|
+
Optional[bool], pydantic.Field(alias="workerAffinity")
|
|
399
|
+
] = False
|
|
400
|
+
r"""If enabled, tasks are created and run by the same Worker Node"""
|
|
401
|
+
|
|
402
|
+
input: Optional[SavedJobCollectionInput] = None
|
|
403
|
+
|
|
404
|
+
@field_serializer("type")
|
|
405
|
+
def serialize_type(self, value):
|
|
406
|
+
if isinstance(value, str):
|
|
407
|
+
try:
|
|
408
|
+
return models.SavedJobCollectionJobType(value)
|
|
409
|
+
except ValueError:
|
|
410
|
+
return value
|
|
411
|
+
return value
|