cribl-control-plane 0.0.38__py3-none-any.whl → 0.4.0a6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (241) hide show
  1. cribl_control_plane/_hooks/clientcredentials.py +92 -42
  2. cribl_control_plane/_version.py +4 -4
  3. cribl_control_plane/acl.py +5 -3
  4. cribl_control_plane/auth_sdk.py +6 -3
  5. cribl_control_plane/basesdk.py +11 -1
  6. cribl_control_plane/commits.py +7 -5
  7. cribl_control_plane/destinations.py +6 -4
  8. cribl_control_plane/destinations_pq.py +2 -2
  9. cribl_control_plane/errors/__init__.py +23 -8
  10. cribl_control_plane/errors/apierror.py +2 -0
  11. cribl_control_plane/errors/criblcontrolplaneerror.py +11 -7
  12. cribl_control_plane/errors/error.py +4 -2
  13. cribl_control_plane/errors/healthserverstatus_error.py +41 -0
  14. cribl_control_plane/errors/no_response_error.py +5 -1
  15. cribl_control_plane/errors/responsevalidationerror.py +2 -0
  16. cribl_control_plane/groups_configs.py +8 -3
  17. cribl_control_plane/groups_sdk.py +64 -38
  18. cribl_control_plane/health.py +22 -12
  19. cribl_control_plane/httpclient.py +0 -1
  20. cribl_control_plane/lakedatasets.py +40 -12
  21. cribl_control_plane/models/__init__.py +1180 -54
  22. cribl_control_plane/models/authtoken.py +5 -1
  23. cribl_control_plane/models/{routecloneconf.py → branchinfo.py} +4 -4
  24. cribl_control_plane/models/cacheconnection.py +30 -2
  25. cribl_control_plane/models/cacheconnectionbackfillstatus.py +2 -1
  26. cribl_control_plane/models/cloudprovider.py +2 -1
  27. cribl_control_plane/models/configgroup.py +66 -11
  28. cribl_control_plane/models/configgroupcloud.py +17 -3
  29. cribl_control_plane/models/createconfiggroupbyproductop.py +27 -9
  30. cribl_control_plane/models/createinputhectokenbyidop.py +6 -5
  31. cribl_control_plane/models/createroutesappendbyidop.py +2 -2
  32. cribl_control_plane/models/createversionpushop.py +5 -5
  33. cribl_control_plane/models/createversionrevertop.py +2 -2
  34. cribl_control_plane/models/createversionundoop.py +3 -3
  35. cribl_control_plane/models/cribllakedataset.py +22 -2
  36. cribl_control_plane/models/cribllakedatasetupdate.py +95 -0
  37. cribl_control_plane/models/datasetmetadata.py +18 -2
  38. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +18 -2
  39. cribl_control_plane/models/deleteoutputpqbyidop.py +5 -5
  40. cribl_control_plane/models/deletepipelinebyidop.py +2 -2
  41. cribl_control_plane/models/difffiles.py +171 -0
  42. cribl_control_plane/models/distributedsummary.py +6 -0
  43. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +24 -2
  44. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +24 -2
  45. cribl_control_plane/models/getconfiggroupbyproductandidop.py +14 -1
  46. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +18 -2
  47. cribl_control_plane/models/getoutputpqbyidop.py +6 -5
  48. cribl_control_plane/models/getpipelinebyidop.py +2 -2
  49. cribl_control_plane/models/getroutesbyidop.py +2 -2
  50. cribl_control_plane/models/getsummaryop.py +18 -2
  51. cribl_control_plane/models/getversionbranchop.py +6 -5
  52. cribl_control_plane/models/getversioncountop.py +6 -5
  53. cribl_control_plane/models/getversiondiffop.py +6 -5
  54. cribl_control_plane/models/getversionshowop.py +6 -5
  55. cribl_control_plane/models/gitcountresult.py +13 -0
  56. cribl_control_plane/models/gitdiffresult.py +16 -0
  57. cribl_control_plane/models/gitinfo.py +14 -3
  58. cribl_control_plane/models/gitshowresult.py +19 -0
  59. cribl_control_plane/models/groupcreaterequest.py +171 -0
  60. cribl_control_plane/models/hbcriblinfo.py +39 -3
  61. cribl_control_plane/models/healthserverstatus.py +55 -0
  62. cribl_control_plane/models/heartbeatmetadata.py +3 -0
  63. cribl_control_plane/models/input.py +83 -78
  64. cribl_control_plane/models/inputappscope.py +126 -30
  65. cribl_control_plane/models/inputazureblob.py +62 -6
  66. cribl_control_plane/models/inputcloudflarehec.py +513 -0
  67. cribl_control_plane/models/inputcollection.py +47 -4
  68. cribl_control_plane/models/inputconfluentcloud.py +254 -30
  69. cribl_control_plane/models/inputcribl.py +47 -4
  70. cribl_control_plane/models/inputcriblhttp.py +121 -30
  71. cribl_control_plane/models/inputcribllakehttp.py +122 -30
  72. cribl_control_plane/models/inputcriblmetrics.py +48 -4
  73. cribl_control_plane/models/inputcribltcp.py +122 -24
  74. cribl_control_plane/models/inputcrowdstrike.py +92 -10
  75. cribl_control_plane/models/inputdatadogagent.py +98 -24
  76. cribl_control_plane/models/inputdatagen.py +47 -4
  77. cribl_control_plane/models/inputedgeprometheus.py +210 -50
  78. cribl_control_plane/models/inputelastic.py +167 -36
  79. cribl_control_plane/models/inputeventhub.py +209 -6
  80. cribl_control_plane/models/inputexec.py +59 -6
  81. cribl_control_plane/models/inputfile.py +78 -10
  82. cribl_control_plane/models/inputfirehose.py +97 -24
  83. cribl_control_plane/models/inputgooglepubsub.py +67 -6
  84. cribl_control_plane/models/inputgrafana.py +251 -71
  85. cribl_control_plane/models/inputhttp.py +97 -24
  86. cribl_control_plane/models/inputhttpraw.py +97 -24
  87. cribl_control_plane/models/inputjournalfiles.py +48 -4
  88. cribl_control_plane/models/inputkafka.py +248 -26
  89. cribl_control_plane/models/inputkinesis.py +130 -14
  90. cribl_control_plane/models/inputkubeevents.py +47 -4
  91. cribl_control_plane/models/inputkubelogs.py +61 -8
  92. cribl_control_plane/models/inputkubemetrics.py +61 -8
  93. cribl_control_plane/models/inputloki.py +113 -34
  94. cribl_control_plane/models/inputmetrics.py +97 -24
  95. cribl_control_plane/models/inputmodeldriventelemetry.py +107 -26
  96. cribl_control_plane/models/inputmsk.py +141 -30
  97. cribl_control_plane/models/inputnetflow.py +47 -4
  98. cribl_control_plane/models/inputoffice365mgmt.py +112 -14
  99. cribl_control_plane/models/inputoffice365msgtrace.py +114 -16
  100. cribl_control_plane/models/inputoffice365service.py +114 -16
  101. cribl_control_plane/models/inputopentelemetry.py +143 -32
  102. cribl_control_plane/models/inputprometheus.py +193 -44
  103. cribl_control_plane/models/inputprometheusrw.py +114 -27
  104. cribl_control_plane/models/inputrawudp.py +47 -4
  105. cribl_control_plane/models/inputs3.py +78 -8
  106. cribl_control_plane/models/inputs3inventory.py +92 -10
  107. cribl_control_plane/models/inputsecuritylake.py +93 -10
  108. cribl_control_plane/models/inputsnmp.py +68 -6
  109. cribl_control_plane/models/inputsplunk.py +130 -28
  110. cribl_control_plane/models/inputsplunkhec.py +111 -25
  111. cribl_control_plane/models/inputsplunksearch.py +108 -14
  112. cribl_control_plane/models/inputsqs.py +99 -16
  113. cribl_control_plane/models/inputsyslog.py +189 -47
  114. cribl_control_plane/models/inputsystemmetrics.py +202 -32
  115. cribl_control_plane/models/inputsystemstate.py +61 -8
  116. cribl_control_plane/models/inputtcp.py +122 -26
  117. cribl_control_plane/models/inputtcpjson.py +112 -26
  118. cribl_control_plane/models/inputwef.py +121 -15
  119. cribl_control_plane/models/inputwindowsmetrics.py +186 -33
  120. cribl_control_plane/models/inputwineventlogs.py +93 -11
  121. cribl_control_plane/models/inputwiz.py +78 -8
  122. cribl_control_plane/models/inputwizwebhook.py +97 -24
  123. cribl_control_plane/models/inputzscalerhec.py +111 -25
  124. cribl_control_plane/models/jobinfo.py +34 -0
  125. cribl_control_plane/models/jobstatus.py +48 -0
  126. cribl_control_plane/models/lakedatasetmetrics.py +17 -0
  127. cribl_control_plane/models/lakehouseconnectiontype.py +2 -1
  128. cribl_control_plane/models/listconfiggroupbyproductop.py +14 -1
  129. cribl_control_plane/models/logininfo.py +3 -3
  130. cribl_control_plane/models/masterworkerentry.py +17 -2
  131. cribl_control_plane/models/nodeactiveupgradestatus.py +2 -1
  132. cribl_control_plane/models/nodefailedupgradestatus.py +2 -1
  133. cribl_control_plane/models/nodeprovidedinfo.py +11 -1
  134. cribl_control_plane/models/nodeskippedupgradestatus.py +2 -1
  135. cribl_control_plane/models/nodeupgradestate.py +2 -1
  136. cribl_control_plane/models/nodeupgradestatus.py +51 -5
  137. cribl_control_plane/models/outpostnodeinfo.py +16 -0
  138. cribl_control_plane/models/output.py +104 -90
  139. cribl_control_plane/models/outputazureblob.py +171 -18
  140. cribl_control_plane/models/outputazuredataexplorer.py +514 -90
  141. cribl_control_plane/models/outputazureeventhub.py +315 -31
  142. cribl_control_plane/models/outputazurelogs.py +145 -26
  143. cribl_control_plane/models/outputchronicle.py +532 -0
  144. cribl_control_plane/models/outputclickhouse.py +205 -34
  145. cribl_control_plane/models/outputcloudflarer2.py +632 -0
  146. cribl_control_plane/models/outputcloudwatch.py +129 -23
  147. cribl_control_plane/models/outputconfluentcloud.py +384 -57
  148. cribl_control_plane/models/outputcriblhttp.py +199 -32
  149. cribl_control_plane/models/outputcribllake.py +156 -16
  150. cribl_control_plane/models/outputcribltcp.py +194 -29
  151. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +172 -28
  152. cribl_control_plane/models/outputdatabricks.py +501 -0
  153. cribl_control_plane/models/outputdatadog.py +199 -31
  154. cribl_control_plane/models/outputdataset.py +181 -29
  155. cribl_control_plane/models/outputdiskspool.py +17 -2
  156. cribl_control_plane/models/outputdls3.py +233 -24
  157. cribl_control_plane/models/outputdynatracehttp.py +208 -34
  158. cribl_control_plane/models/outputdynatraceotlp.py +210 -36
  159. cribl_control_plane/models/outputelastic.py +199 -30
  160. cribl_control_plane/models/outputelasticcloud.py +171 -26
  161. cribl_control_plane/models/outputexabeam.py +96 -10
  162. cribl_control_plane/models/outputfilesystem.py +139 -14
  163. cribl_control_plane/models/outputgooglechronicle.py +216 -35
  164. cribl_control_plane/models/outputgooglecloudlogging.py +174 -31
  165. cribl_control_plane/models/outputgooglecloudstorage.py +215 -24
  166. cribl_control_plane/models/outputgooglepubsub.py +131 -23
  167. cribl_control_plane/models/outputgrafanacloud.py +376 -74
  168. cribl_control_plane/models/outputgraphite.py +128 -25
  169. cribl_control_plane/models/outputhoneycomb.py +145 -26
  170. cribl_control_plane/models/outputhumiohec.py +162 -28
  171. cribl_control_plane/models/outputinfluxdb.py +165 -28
  172. cribl_control_plane/models/outputkafka.py +375 -52
  173. cribl_control_plane/models/outputkinesis.py +165 -27
  174. cribl_control_plane/models/outputloki.py +164 -34
  175. cribl_control_plane/models/outputmicrosoftfabric.py +540 -0
  176. cribl_control_plane/models/outputminio.py +225 -25
  177. cribl_control_plane/models/outputmsk.py +267 -54
  178. cribl_control_plane/models/outputnewrelic.py +171 -29
  179. cribl_control_plane/models/outputnewrelicevents.py +163 -28
  180. cribl_control_plane/models/outputopentelemetry.py +240 -40
  181. cribl_control_plane/models/outputprometheus.py +145 -26
  182. cribl_control_plane/models/outputring.py +49 -8
  183. cribl_control_plane/models/outputs3.py +233 -26
  184. cribl_control_plane/models/outputsecuritylake.py +179 -18
  185. cribl_control_plane/models/outputsentinel.py +172 -29
  186. cribl_control_plane/models/outputsentineloneaisiem.py +181 -35
  187. cribl_control_plane/models/outputservicenow.py +223 -38
  188. cribl_control_plane/models/outputsignalfx.py +145 -26
  189. cribl_control_plane/models/outputsns.py +143 -25
  190. cribl_control_plane/models/outputsplunk.py +206 -36
  191. cribl_control_plane/models/outputsplunkhec.py +238 -26
  192. cribl_control_plane/models/outputsplunklb.py +253 -43
  193. cribl_control_plane/models/outputsqs.py +163 -33
  194. cribl_control_plane/models/outputstatsd.py +127 -25
  195. cribl_control_plane/models/outputstatsdext.py +128 -25
  196. cribl_control_plane/models/outputsumologic.py +146 -25
  197. cribl_control_plane/models/outputsyslog.py +318 -46
  198. cribl_control_plane/models/outputtcpjson.py +186 -32
  199. cribl_control_plane/models/outputwavefront.py +145 -26
  200. cribl_control_plane/models/outputwebhook.py +211 -33
  201. cribl_control_plane/models/outputxsiam.py +143 -26
  202. cribl_control_plane/models/packinfo.py +8 -5
  203. cribl_control_plane/models/packinstallinfo.py +11 -8
  204. cribl_control_plane/models/productscore.py +2 -1
  205. cribl_control_plane/models/rbacresource.py +2 -1
  206. cribl_control_plane/models/resourcepolicy.py +15 -2
  207. cribl_control_plane/models/routeconf.py +3 -4
  208. cribl_control_plane/models/runnablejob.py +27 -0
  209. cribl_control_plane/models/runnablejobcollection.py +669 -0
  210. cribl_control_plane/models/runnablejobexecutor.py +368 -0
  211. cribl_control_plane/models/runnablejobscheduledsearch.py +286 -0
  212. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +19 -2
  213. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +19 -2
  214. cribl_control_plane/models/updatecribllakedatasetbylakeidandidop.py +9 -5
  215. cribl_control_plane/models/updateinputhectokenbyidandtokenop.py +6 -5
  216. cribl_control_plane/models/updatepacksop.py +25 -0
  217. cribl_control_plane/models/updatepipelinebyidop.py +6 -6
  218. cribl_control_plane/models/updateroutesbyidop.py +2 -2
  219. cribl_control_plane/models/uploadpackresponse.py +13 -0
  220. cribl_control_plane/models/workertypes.py +2 -1
  221. cribl_control_plane/nodes.py +5 -3
  222. cribl_control_plane/packs.py +202 -7
  223. cribl_control_plane/pipelines.py +18 -18
  224. cribl_control_plane/routes_sdk.py +22 -22
  225. cribl_control_plane/sdk.py +19 -6
  226. cribl_control_plane/sources.py +5 -3
  227. cribl_control_plane/tokens.py +23 -15
  228. cribl_control_plane/utils/__init__.py +15 -3
  229. cribl_control_plane/utils/annotations.py +32 -8
  230. cribl_control_plane/utils/eventstreaming.py +10 -0
  231. cribl_control_plane/utils/retries.py +69 -5
  232. cribl_control_plane/utils/unmarshal_json_response.py +15 -1
  233. cribl_control_plane/versions.py +11 -6
  234. {cribl_control_plane-0.0.38.dist-info → cribl_control_plane-0.4.0a6.dist-info}/METADATA +69 -23
  235. cribl_control_plane-0.4.0a6.dist-info/RECORD +336 -0
  236. {cribl_control_plane-0.0.38.dist-info → cribl_control_plane-0.4.0a6.dist-info}/WHEEL +1 -1
  237. cribl_control_plane-0.4.0a6.dist-info/licenses/LICENSE +201 -0
  238. cribl_control_plane/errors/healthstatus_error.py +0 -32
  239. cribl_control_plane/models/appmode.py +0 -13
  240. cribl_control_plane/models/healthstatus.py +0 -33
  241. cribl_control_plane-0.0.38.dist-info/RECORD +0 -315
@@ -0,0 +1,669 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane import models, utils
5
+ from cribl_control_plane.types import BaseModel
6
+ from cribl_control_plane.utils import validate_open_enum
7
+ from enum import Enum
8
+ import pydantic
9
+ from pydantic import field_serializer
10
+ from pydantic.functional_validators import PlainValidator
11
+ from typing import Any, List, Optional
12
+ from typing_extensions import Annotated, NotRequired, TypedDict
13
+
14
+
15
+ class RunnableJobCollectionJobType(str, Enum, metaclass=utils.OpenEnumMeta):
16
+ COLLECTION = "collection"
17
+ EXECUTOR = "executor"
18
+ SCHEDULED_SEARCH = "scheduledSearch"
19
+
20
+
21
+ class RunnableJobCollectionRunType(str, Enum):
22
+ COLLECTION = "collection"
23
+
24
+
25
+ class RunnableJobCollectionScheduleLogLevel(str, Enum):
26
+ r"""Level at which to set task logging"""
27
+
28
+ ERROR = "error"
29
+ WARN = "warn"
30
+ INFO = "info"
31
+ DEBUG = "debug"
32
+ SILLY = "silly"
33
+
34
+
35
+ class RunnableJobCollectionScheduleTimeWarningTypedDict(TypedDict):
36
+ pass
37
+
38
+
39
+ class RunnableJobCollectionScheduleTimeWarning(BaseModel):
40
+ pass
41
+
42
+
43
+ class RunnableJobCollectionRunSettingsTypedDict(TypedDict):
44
+ type: NotRequired[RunnableJobCollectionRunType]
45
+ reschedule_dropped_tasks: NotRequired[bool]
46
+ r"""Reschedule tasks that failed with non-fatal errors"""
47
+ max_task_reschedule: NotRequired[float]
48
+ r"""Maximum number of times a task can be rescheduled"""
49
+ log_level: NotRequired[RunnableJobCollectionScheduleLogLevel]
50
+ r"""Level at which to set task logging"""
51
+ job_timeout: NotRequired[str]
52
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
53
+ mode: NotRequired[str]
54
+ r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
55
+ time_range_type: NotRequired[str]
56
+ earliest: NotRequired[float]
57
+ r"""Earliest time to collect data for the selected timezone"""
58
+ latest: NotRequired[float]
59
+ r"""Latest time to collect data for the selected timezone"""
60
+ timestamp_timezone: NotRequired[Any]
61
+ time_warning: NotRequired[RunnableJobCollectionScheduleTimeWarningTypedDict]
62
+ expression: NotRequired[str]
63
+ r"""A filter for tokens in the provided collect path and/or the events being collected"""
64
+ min_task_size: NotRequired[str]
65
+ r"""Limits the bundle size for small tasks. For example,
66
+
67
+
68
+
69
+
70
+
71
+
72
+
73
+
74
+
75
+
76
+ if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
77
+ """
78
+ max_task_size: NotRequired[str]
79
+ r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
80
+
81
+
82
+
83
+
84
+
85
+
86
+
87
+
88
+
89
+
90
+ you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
91
+ """
92
+
93
+
94
+ class RunnableJobCollectionRunSettings(BaseModel):
95
+ type: Optional[RunnableJobCollectionRunType] = None
96
+
97
+ reschedule_dropped_tasks: Annotated[
98
+ Optional[bool], pydantic.Field(alias="rescheduleDroppedTasks")
99
+ ] = True
100
+ r"""Reschedule tasks that failed with non-fatal errors"""
101
+
102
+ max_task_reschedule: Annotated[
103
+ Optional[float], pydantic.Field(alias="maxTaskReschedule")
104
+ ] = 1
105
+ r"""Maximum number of times a task can be rescheduled"""
106
+
107
+ log_level: Annotated[
108
+ Optional[RunnableJobCollectionScheduleLogLevel],
109
+ pydantic.Field(alias="logLevel"),
110
+ ] = RunnableJobCollectionScheduleLogLevel.INFO
111
+ r"""Level at which to set task logging"""
112
+
113
+ job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
114
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
115
+
116
+ mode: Optional[str] = "list"
117
+ r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
118
+
119
+ time_range_type: Annotated[Optional[str], pydantic.Field(alias="timeRangeType")] = (
120
+ "relative"
121
+ )
122
+
123
+ earliest: Optional[float] = None
124
+ r"""Earliest time to collect data for the selected timezone"""
125
+
126
+ latest: Optional[float] = None
127
+ r"""Latest time to collect data for the selected timezone"""
128
+
129
+ timestamp_timezone: Annotated[
130
+ Optional[Any], pydantic.Field(alias="timestampTimezone")
131
+ ] = None
132
+
133
+ time_warning: Annotated[
134
+ Optional[RunnableJobCollectionScheduleTimeWarning],
135
+ pydantic.Field(alias="timeWarning"),
136
+ ] = None
137
+
138
+ expression: Optional[str] = "true"
139
+ r"""A filter for tokens in the provided collect path and/or the events being collected"""
140
+
141
+ min_task_size: Annotated[Optional[str], pydantic.Field(alias="minTaskSize")] = "1MB"
142
+ r"""Limits the bundle size for small tasks. For example,
143
+
144
+
145
+
146
+
147
+
148
+
149
+
150
+
151
+
152
+
153
+ if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
154
+ """
155
+
156
+ max_task_size: Annotated[Optional[str], pydantic.Field(alias="maxTaskSize")] = (
157
+ "10MB"
158
+ )
159
+ r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
160
+
161
+
162
+
163
+
164
+
165
+
166
+
167
+
168
+
169
+
170
+ you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
171
+ """
172
+
173
+
174
+ class RunnableJobCollectionScheduleTypedDict(TypedDict):
175
+ r"""Configuration for a scheduled job"""
176
+
177
+ enabled: NotRequired[bool]
178
+ r"""Enable to configure scheduling for this Collector"""
179
+ skippable: NotRequired[bool]
180
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
181
+ resume_missed: NotRequired[bool]
182
+ r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
183
+ cron_schedule: NotRequired[str]
184
+ r"""A cron schedule on which to run this job"""
185
+ max_concurrent_runs: NotRequired[float]
186
+ r"""The maximum number of instances of this scheduled job that may be running at any time"""
187
+ run: NotRequired[RunnableJobCollectionRunSettingsTypedDict]
188
+
189
+
190
+ class RunnableJobCollectionSchedule(BaseModel):
191
+ r"""Configuration for a scheduled job"""
192
+
193
+ enabled: Optional[bool] = None
194
+ r"""Enable to configure scheduling for this Collector"""
195
+
196
+ skippable: Optional[bool] = True
197
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
198
+
199
+ resume_missed: Annotated[Optional[bool], pydantic.Field(alias="resumeMissed")] = (
200
+ False
201
+ )
202
+ r"""If Stream Leader (or single instance) restarts, run all missed jobs according to their original schedules"""
203
+
204
+ cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
205
+ "*/5 * * * *"
206
+ )
207
+ r"""A cron schedule on which to run this job"""
208
+
209
+ max_concurrent_runs: Annotated[
210
+ Optional[float], pydantic.Field(alias="maxConcurrentRuns")
211
+ ] = 1
212
+ r"""The maximum number of instances of this scheduled job that may be running at any time"""
213
+
214
+ run: Optional[RunnableJobCollectionRunSettings] = None
215
+
216
+
217
+ class CollectorSpecificSettingsTypedDict(TypedDict):
218
+ pass
219
+
220
+
221
+ class CollectorSpecificSettings(BaseModel):
222
+ pass
223
+
224
+
225
+ class CollectorTypedDict(TypedDict):
226
+ type: str
227
+ r"""The type of collector to run"""
228
+ conf: CollectorSpecificSettingsTypedDict
229
+ destructive: NotRequired[bool]
230
+ r"""Delete any files collected (where applicable)"""
231
+ encoding: NotRequired[str]
232
+ r"""Character encoding to use when parsing ingested data. When not set, @{product} will default to UTF-8 but may incorrectly interpret multi-byte characters."""
233
+
234
+
235
+ class Collector(BaseModel):
236
+ type: str
237
+ r"""The type of collector to run"""
238
+
239
+ conf: CollectorSpecificSettings
240
+
241
+ destructive: Optional[bool] = False
242
+ r"""Delete any files collected (where applicable)"""
243
+
244
+ encoding: Optional[str] = None
245
+ r"""Character encoding to use when parsing ingested data. When not set, @{product} will default to UTF-8 but may incorrectly interpret multi-byte characters."""
246
+
247
+
248
+ class InputType(str, Enum, metaclass=utils.OpenEnumMeta):
249
+ COLLECTION = "collection"
250
+
251
+
252
+ class RunnableJobCollectionPreprocessTypedDict(TypedDict):
253
+ disabled: NotRequired[bool]
254
+ command: NotRequired[str]
255
+ r"""Command to feed the data through (via stdin) and process its output (stdout)"""
256
+ args: NotRequired[List[str]]
257
+ r"""Arguments to be added to the custom command"""
258
+
259
+
260
+ class RunnableJobCollectionPreprocess(BaseModel):
261
+ disabled: Optional[bool] = True
262
+
263
+ command: Optional[str] = None
264
+ r"""Command to feed the data through (via stdin) and process its output (stdout)"""
265
+
266
+ args: Optional[List[str]] = None
267
+ r"""Arguments to be added to the custom command"""
268
+
269
+
270
+ class RunnableJobCollectionMetadatumTypedDict(TypedDict):
271
+ name: str
272
+ value: str
273
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
274
+
275
+
276
+ class RunnableJobCollectionMetadatum(BaseModel):
277
+ name: str
278
+
279
+ value: str
280
+ r"""JavaScript expression to compute field's value, enclosed in quotes or backticks. (Can evaluate to a constant.)"""
281
+
282
+
283
+ class RunnableJobCollectionInputTypedDict(TypedDict):
284
+ type: NotRequired[InputType]
285
+ breaker_rulesets: NotRequired[List[str]]
286
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
287
+ stale_channel_flush_ms: NotRequired[float]
288
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
289
+ send_to_routes: NotRequired[bool]
290
+ r"""Send events to normal routing and event processing. Disable to select a specific Pipeline/Destination combination."""
291
+ preprocess: NotRequired[RunnableJobCollectionPreprocessTypedDict]
292
+ throttle_rate_per_sec: NotRequired[str]
293
+ r"""Rate (in bytes per second) to throttle while writing to an output. Accepts values with multiple-byte units, such as KB, MB, and GB. (Example: 42 MB) Default value of 0 specifies no throttling."""
294
+ metadata: NotRequired[List[RunnableJobCollectionMetadatumTypedDict]]
295
+ r"""Fields to add to events from this input"""
296
+ pipeline: NotRequired[str]
297
+ r"""Pipeline to process results"""
298
+ output: NotRequired[str]
299
+ r"""Destination to send results to"""
300
+
301
+
302
+ class RunnableJobCollectionInput(BaseModel):
303
+ type: Annotated[Optional[InputType], PlainValidator(validate_open_enum(False))] = (
304
+ InputType.COLLECTION
305
+ )
306
+
307
+ breaker_rulesets: Annotated[
308
+ Optional[List[str]], pydantic.Field(alias="breakerRulesets")
309
+ ] = None
310
+ r"""A list of event-breaking rulesets that will be applied, in order, to the input data stream"""
311
+
312
+ stale_channel_flush_ms: Annotated[
313
+ Optional[float], pydantic.Field(alias="staleChannelFlushMs")
314
+ ] = 10000
315
+ r"""How long (in milliseconds) the Event Breaker will wait for new data to be sent to a specific channel before flushing the data stream out, as is, to the Pipelines"""
316
+
317
+ send_to_routes: Annotated[Optional[bool], pydantic.Field(alias="sendToRoutes")] = (
318
+ True
319
+ )
320
+ r"""Send events to normal routing and event processing. Disable to select a specific Pipeline/Destination combination."""
321
+
322
+ preprocess: Optional[RunnableJobCollectionPreprocess] = None
323
+
324
+ throttle_rate_per_sec: Annotated[
325
+ Optional[str], pydantic.Field(alias="throttleRatePerSec")
326
+ ] = "0"
327
+ r"""Rate (in bytes per second) to throttle while writing to an output. Accepts values with multiple-byte units, such as KB, MB, and GB. (Example: 42 MB) Default value of 0 specifies no throttling."""
328
+
329
+ metadata: Optional[List[RunnableJobCollectionMetadatum]] = None
330
+ r"""Fields to add to events from this input"""
331
+
332
+ pipeline: Optional[str] = None
333
+ r"""Pipeline to process results"""
334
+
335
+ output: Optional[str] = None
336
+ r"""Destination to send results to"""
337
+
338
+ @field_serializer("type")
339
+ def serialize_type(self, value):
340
+ if isinstance(value, str):
341
+ try:
342
+ return models.InputType(value)
343
+ except ValueError:
344
+ return value
345
+ return value
346
+
347
+
348
+ class RunnableJobCollectionLogLevel(str, Enum, metaclass=utils.OpenEnumMeta):
349
+ r"""Level at which to set task logging"""
350
+
351
+ ERROR = "error"
352
+ WARN = "warn"
353
+ INFO = "info"
354
+ DEBUG = "debug"
355
+ SILLY = "silly"
356
+
357
+
358
+ class RunnableJobCollectionMode(str, Enum, metaclass=utils.OpenEnumMeta):
359
+ r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
360
+
361
+ LIST = "list"
362
+ PREVIEW = "preview"
363
+ RUN = "run"
364
+
365
+
366
+ class TimeRange(str, Enum, metaclass=utils.OpenEnumMeta):
367
+ ABSOLUTE = "absolute"
368
+ RELATIVE = "relative"
369
+
370
+
371
+ class RunnableJobCollectionTimeWarningTypedDict(TypedDict):
372
+ pass
373
+
374
+
375
+ class RunnableJobCollectionTimeWarning(BaseModel):
376
+ pass
377
+
378
+
379
+ class WhereToCapture(int, Enum, metaclass=utils.OpenEnumMeta):
380
+ # 1. Before pre-processing Pipeline
381
+ ZERO = 0
382
+ # 2. Before the Routes
383
+ ONE = 1
384
+ # 3. Before post-processing Pipeline
385
+ TWO = 2
386
+ # 4. Before the Destination
387
+ THREE = 3
388
+
389
+
390
+ class CaptureSettingsTypedDict(TypedDict):
391
+ duration: NotRequired[float]
392
+ r"""Amount of time to keep capture open, in seconds"""
393
+ max_events: NotRequired[float]
394
+ r"""Maximum number of events to capture"""
395
+ level: NotRequired[WhereToCapture]
396
+
397
+
398
+ class CaptureSettings(BaseModel):
399
+ duration: Optional[float] = 60
400
+ r"""Amount of time to keep capture open, in seconds"""
401
+
402
+ max_events: Annotated[Optional[float], pydantic.Field(alias="maxEvents")] = 100
403
+ r"""Maximum number of events to capture"""
404
+
405
+ level: Annotated[
406
+ Optional[WhereToCapture], PlainValidator(validate_open_enum(True))
407
+ ] = WhereToCapture.ZERO
408
+
409
+ @field_serializer("level")
410
+ def serialize_level(self, value):
411
+ if isinstance(value, str):
412
+ try:
413
+ return models.WhereToCapture(value)
414
+ except ValueError:
415
+ return value
416
+ return value
417
+
418
+
419
+ class RunnableJobCollectionRunTypedDict(TypedDict):
420
+ reschedule_dropped_tasks: NotRequired[bool]
421
+ r"""Reschedule tasks that failed with non-fatal errors"""
422
+ max_task_reschedule: NotRequired[float]
423
+ r"""Maximum number of times a task can be rescheduled"""
424
+ log_level: NotRequired[RunnableJobCollectionLogLevel]
425
+ r"""Level at which to set task logging"""
426
+ job_timeout: NotRequired[str]
427
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
428
+ mode: NotRequired[RunnableJobCollectionMode]
429
+ r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
430
+ time_range_type: NotRequired[TimeRange]
431
+ earliest: NotRequired[float]
432
+ r"""Earliest time to collect data for the selected timezone"""
433
+ latest: NotRequired[float]
434
+ r"""Latest time to collect data for the selected timezone"""
435
+ timestamp_timezone: NotRequired[str]
436
+ r"""Timezone to use for Earliest and Latest times"""
437
+ time_warning: NotRequired[RunnableJobCollectionTimeWarningTypedDict]
438
+ expression: NotRequired[str]
439
+ r"""A filter for tokens in the provided collect path and/or the events being collected"""
440
+ min_task_size: NotRequired[str]
441
+ r"""Limits the bundle size for small tasks. For example,
442
+
443
+
444
+
445
+
446
+
447
+
448
+
449
+
450
+
451
+ if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
452
+ """
453
+ max_task_size: NotRequired[str]
454
+ r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
455
+
456
+
457
+
458
+
459
+
460
+
461
+
462
+
463
+
464
+ you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
465
+ """
466
+ discover_to_routes: NotRequired[bool]
467
+ r"""Send discover results to Routes"""
468
+ capture: NotRequired[CaptureSettingsTypedDict]
469
+
470
+
471
+ class RunnableJobCollectionRun(BaseModel):
472
+ reschedule_dropped_tasks: Annotated[
473
+ Optional[bool], pydantic.Field(alias="rescheduleDroppedTasks")
474
+ ] = True
475
+ r"""Reschedule tasks that failed with non-fatal errors"""
476
+
477
+ max_task_reschedule: Annotated[
478
+ Optional[float], pydantic.Field(alias="maxTaskReschedule")
479
+ ] = 1
480
+ r"""Maximum number of times a task can be rescheduled"""
481
+
482
+ log_level: Annotated[
483
+ Annotated[
484
+ Optional[RunnableJobCollectionLogLevel],
485
+ PlainValidator(validate_open_enum(False)),
486
+ ],
487
+ pydantic.Field(alias="logLevel"),
488
+ ] = RunnableJobCollectionLogLevel.INFO
489
+ r"""Level at which to set task logging"""
490
+
491
+ job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
492
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
493
+
494
+ mode: Annotated[
495
+ Optional[RunnableJobCollectionMode], PlainValidator(validate_open_enum(False))
496
+ ] = RunnableJobCollectionMode.LIST
497
+ r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
498
+
499
+ time_range_type: Annotated[
500
+ Annotated[Optional[TimeRange], PlainValidator(validate_open_enum(False))],
501
+ pydantic.Field(alias="timeRangeType"),
502
+ ] = TimeRange.RELATIVE
503
+
504
+ earliest: Optional[float] = None
505
+ r"""Earliest time to collect data for the selected timezone"""
506
+
507
+ latest: Optional[float] = None
508
+ r"""Latest time to collect data for the selected timezone"""
509
+
510
+ timestamp_timezone: Annotated[
511
+ Optional[str], pydantic.Field(alias="timestampTimezone")
512
+ ] = "UTC"
513
+ r"""Timezone to use for Earliest and Latest times"""
514
+
515
+ time_warning: Annotated[
516
+ Optional[RunnableJobCollectionTimeWarning], pydantic.Field(alias="timeWarning")
517
+ ] = None
518
+
519
+ expression: Optional[str] = "true"
520
+ r"""A filter for tokens in the provided collect path and/or the events being collected"""
521
+
522
+ min_task_size: Annotated[Optional[str], pydantic.Field(alias="minTaskSize")] = "1MB"
523
+ r"""Limits the bundle size for small tasks. For example,
524
+
525
+
526
+
527
+
528
+
529
+
530
+
531
+
532
+
533
+ if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
534
+ """
535
+
536
+ max_task_size: Annotated[Optional[str], pydantic.Field(alias="maxTaskSize")] = (
537
+ "10MB"
538
+ )
539
+ r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
540
+
541
+
542
+
543
+
544
+
545
+
546
+
547
+
548
+
549
+ you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
550
+ """
551
+
552
+ discover_to_routes: Annotated[
553
+ Optional[bool], pydantic.Field(alias="discoverToRoutes")
554
+ ] = False
555
+ r"""Send discover results to Routes"""
556
+
557
+ capture: Optional[CaptureSettings] = None
558
+
559
+ @field_serializer("log_level")
560
+ def serialize_log_level(self, value):
561
+ if isinstance(value, str):
562
+ try:
563
+ return models.RunnableJobCollectionLogLevel(value)
564
+ except ValueError:
565
+ return value
566
+ return value
567
+
568
+ @field_serializer("mode")
569
+ def serialize_mode(self, value):
570
+ if isinstance(value, str):
571
+ try:
572
+ return models.RunnableJobCollectionMode(value)
573
+ except ValueError:
574
+ return value
575
+ return value
576
+
577
+ @field_serializer("time_range_type")
578
+ def serialize_time_range_type(self, value):
579
+ if isinstance(value, str):
580
+ try:
581
+ return models.TimeRange(value)
582
+ except ValueError:
583
+ return value
584
+ return value
585
+
586
+
587
+ class RunnableJobCollectionTypedDict(TypedDict):
588
+ collector: CollectorTypedDict
589
+ run: RunnableJobCollectionRunTypedDict
590
+ id: NotRequired[str]
591
+ r"""Unique ID for this Job"""
592
+ description: NotRequired[str]
593
+ type: NotRequired[RunnableJobCollectionJobType]
594
+ ttl: NotRequired[str]
595
+ r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
596
+ ignore_group_jobs_limit: NotRequired[bool]
597
+ r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
598
+ remove_fields: NotRequired[List[str]]
599
+ r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
600
+ resume_on_boot: NotRequired[bool]
601
+ r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
602
+ environment: NotRequired[str]
603
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
604
+ schedule: NotRequired[RunnableJobCollectionScheduleTypedDict]
605
+ r"""Configuration for a scheduled job"""
606
+ streamtags: NotRequired[List[str]]
607
+ r"""Tags for filtering and grouping in @{product}"""
608
+ worker_affinity: NotRequired[bool]
609
+ r"""If enabled, tasks are created and run by the same Worker Node"""
610
+ input: NotRequired[RunnableJobCollectionInputTypedDict]
611
+
612
+
613
+ class RunnableJobCollection(BaseModel):
614
+ collector: Collector
615
+
616
+ run: RunnableJobCollectionRun
617
+
618
+ id: Optional[str] = None
619
+ r"""Unique ID for this Job"""
620
+
621
+ description: Optional[str] = None
622
+
623
+ type: Annotated[
624
+ Optional[RunnableJobCollectionJobType],
625
+ PlainValidator(validate_open_enum(False)),
626
+ ] = None
627
+
628
+ ttl: Optional[str] = "4h"
629
+ r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
630
+
631
+ ignore_group_jobs_limit: Annotated[
632
+ Optional[bool], pydantic.Field(alias="ignoreGroupJobsLimit")
633
+ ] = False
634
+ r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
635
+
636
+ remove_fields: Annotated[
637
+ Optional[List[str]], pydantic.Field(alias="removeFields")
638
+ ] = None
639
+ r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
640
+
641
+ resume_on_boot: Annotated[Optional[bool], pydantic.Field(alias="resumeOnBoot")] = (
642
+ False
643
+ )
644
+ r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
645
+
646
+ environment: Optional[str] = None
647
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
648
+
649
+ schedule: Optional[RunnableJobCollectionSchedule] = None
650
+ r"""Configuration for a scheduled job"""
651
+
652
+ streamtags: Optional[List[str]] = None
653
+ r"""Tags for filtering and grouping in @{product}"""
654
+
655
+ worker_affinity: Annotated[
656
+ Optional[bool], pydantic.Field(alias="workerAffinity")
657
+ ] = False
658
+ r"""If enabled, tasks are created and run by the same Worker Node"""
659
+
660
+ input: Optional[RunnableJobCollectionInput] = None
661
+
662
+ @field_serializer("type")
663
+ def serialize_type(self, value):
664
+ if isinstance(value, str):
665
+ try:
666
+ return models.RunnableJobCollectionJobType(value)
667
+ except ValueError:
668
+ return value
669
+ return value