cribl-control-plane 0.0.38a1__py3-none-any.whl → 0.0.40a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (79) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/commits.py +52 -42
  3. cribl_control_plane/commits_files.py +12 -12
  4. cribl_control_plane/destinations.py +3 -3
  5. cribl_control_plane/destinations_pq.py +2 -2
  6. cribl_control_plane/groups_sdk.py +8 -8
  7. cribl_control_plane/models/__init__.py +315 -368
  8. cribl_control_plane/models/branchinfo.py +13 -0
  9. cribl_control_plane/models/configgroup.py +5 -3
  10. cribl_control_plane/models/createversioncommitop.py +26 -1
  11. cribl_control_plane/models/createversionrevertop.py +4 -2
  12. cribl_control_plane/models/createversionundoop.py +4 -2
  13. cribl_control_plane/models/deleteoutputpqbyidop.py +5 -5
  14. cribl_control_plane/models/difffiles.py +171 -0
  15. cribl_control_plane/models/getoutputpqbyidop.py +6 -5
  16. cribl_control_plane/models/getversionbranchop.py +6 -5
  17. cribl_control_plane/models/getversioncountop.py +9 -7
  18. cribl_control_plane/models/getversiondiffop.py +9 -7
  19. cribl_control_plane/models/getversionfilesop.py +3 -2
  20. cribl_control_plane/models/getversionop.py +4 -2
  21. cribl_control_plane/models/getversionshowop.py +3 -2
  22. cribl_control_plane/models/getversionstatusop.py +4 -2
  23. cribl_control_plane/models/gitcountresult.py +13 -0
  24. cribl_control_plane/models/gitdiffresult.py +16 -0
  25. cribl_control_plane/models/inputcribllakehttp.py +5 -4
  26. cribl_control_plane/models/inputcrowdstrike.py +2 -2
  27. cribl_control_plane/models/inputs3.py +2 -2
  28. cribl_control_plane/models/inputs3inventory.py +2 -2
  29. cribl_control_plane/models/inputsecuritylake.py +2 -2
  30. cribl_control_plane/models/jobinfo.py +25 -0
  31. cribl_control_plane/models/jobstatus.py +17 -0
  32. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +1 -1
  33. cribl_control_plane/models/outputgooglepubsub.py +7 -28
  34. cribl_control_plane/models/outputsentineloneaisiem.py +3 -3
  35. cribl_control_plane/models/packinfo.py +5 -5
  36. cribl_control_plane/models/packinstallinfo.py +5 -5
  37. cribl_control_plane/models/packrequestbody_union.py +140 -0
  38. cribl_control_plane/models/packupgraderequest.py +26 -0
  39. cribl_control_plane/models/runnablejob.py +27 -0
  40. cribl_control_plane/models/runnablejobcollection.py +589 -0
  41. cribl_control_plane/models/runnablejobexecutor.py +336 -0
  42. cribl_control_plane/models/runnablejobscheduledsearch.py +267 -0
  43. cribl_control_plane/models/updatepacksbyidop.py +9 -28
  44. cribl_control_plane/packs.py +62 -116
  45. cribl_control_plane/{destinations_samples.py → samples.py} +1 -1
  46. cribl_control_plane/sdk.py +0 -3
  47. cribl_control_plane/statuses.py +6 -6
  48. {cribl_control_plane-0.0.38a1.dist-info → cribl_control_plane-0.0.40a1.dist-info}/METADATA +6 -22
  49. {cribl_control_plane-0.0.38a1.dist-info → cribl_control_plane-0.0.40a1.dist-info}/RECORD +50 -67
  50. cribl_control_plane/cribl.py +0 -513
  51. cribl_control_plane/models/authconfig.py +0 -43
  52. cribl_control_plane/models/commonservicelimitconfigs.py +0 -14
  53. cribl_control_plane/models/edgeheartbeatmetricsmode.py +0 -11
  54. cribl_control_plane/models/getsystemsettingsauthop.py +0 -24
  55. cribl_control_plane/models/getsystemsettingsconfop.py +0 -24
  56. cribl_control_plane/models/getsystemsettingsgitsettingsop.py +0 -24
  57. cribl_control_plane/models/gitopstype.py +0 -10
  58. cribl_control_plane/models/gitsettings.py +0 -70
  59. cribl_control_plane/models/jobsettings.py +0 -83
  60. cribl_control_plane/models/limits.py +0 -127
  61. cribl_control_plane/models/packrequestbody.py +0 -75
  62. cribl_control_plane/models/rediscachelimits.py +0 -38
  63. cribl_control_plane/models/redisconnectionlimits.py +0 -20
  64. cribl_control_plane/models/redislimits.py +0 -14
  65. cribl_control_plane/models/searchsettings.py +0 -71
  66. cribl_control_plane/models/serviceslimits.py +0 -23
  67. cribl_control_plane/models/systemsettings.py +0 -358
  68. cribl_control_plane/models/systemsettingsconf.py +0 -311
  69. cribl_control_plane/models/updatesystemsettingsauthop.py +0 -24
  70. cribl_control_plane/models/updatesystemsettingsconfop.py +0 -24
  71. cribl_control_plane/models/updatesystemsettingsgitsettingsop.py +0 -24
  72. cribl_control_plane/models/upgradegroupsettings.py +0 -24
  73. cribl_control_plane/models/upgradepackageurls.py +0 -20
  74. cribl_control_plane/models/upgradesettings.py +0 -36
  75. cribl_control_plane/settings.py +0 -23
  76. cribl_control_plane/settings_auth.py +0 -339
  77. cribl_control_plane/settings_git.py +0 -339
  78. cribl_control_plane/system_sdk.py +0 -17
  79. {cribl_control_plane-0.0.38a1.dist-info → cribl_control_plane-0.0.40a1.dist-info}/WHEEL +0 -0
@@ -0,0 +1,336 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane.types import BaseModel
5
+ from enum import Enum
6
+ import pydantic
7
+ from typing import Any, List, Optional
8
+ from typing_extensions import Annotated, NotRequired, TypedDict
9
+
10
+
11
+ class RunnableJobExecutorJobType(str, Enum):
12
+ COLLECTION = "collection"
13
+ EXECUTOR = "executor"
14
+ SCHEDULED_SEARCH = "scheduledSearch"
15
+
16
+
17
+ class RunnableJobExecutorType(str, Enum):
18
+ COLLECTION = "collection"
19
+
20
+
21
+ class RunnableJobExecutorScheduleLogLevel(str, Enum):
22
+ r"""Level at which to set task logging"""
23
+
24
+ ERROR = "error"
25
+ WARN = "warn"
26
+ INFO = "info"
27
+ DEBUG = "debug"
28
+ SILLY = "silly"
29
+
30
+
31
+ class RunnableJobExecutorTimeWarningTypedDict(TypedDict):
32
+ pass
33
+
34
+
35
+ class RunnableJobExecutorTimeWarning(BaseModel):
36
+ pass
37
+
38
+
39
+ class RunnableJobExecutorRunSettingsTypedDict(TypedDict):
40
+ type: NotRequired[RunnableJobExecutorType]
41
+ reschedule_dropped_tasks: NotRequired[bool]
42
+ r"""Reschedule tasks that failed with non-fatal errors"""
43
+ max_task_reschedule: NotRequired[float]
44
+ r"""Maximum number of times a task can be rescheduled"""
45
+ log_level: NotRequired[RunnableJobExecutorScheduleLogLevel]
46
+ r"""Level at which to set task logging"""
47
+ job_timeout: NotRequired[str]
48
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
49
+ mode: NotRequired[str]
50
+ r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
51
+ time_range_type: NotRequired[str]
52
+ earliest: NotRequired[float]
53
+ r"""Earliest time to collect data for the selected timezone"""
54
+ latest: NotRequired[float]
55
+ r"""Latest time to collect data for the selected timezone"""
56
+ timestamp_timezone: NotRequired[Any]
57
+ time_warning: NotRequired[RunnableJobExecutorTimeWarningTypedDict]
58
+ expression: NotRequired[str]
59
+ r"""A filter for tokens in the provided collect path and/or the events being collected"""
60
+ min_task_size: NotRequired[str]
61
+ r"""Limits the bundle size for small tasks. For example,
62
+
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+
72
+ if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
73
+ """
74
+ max_task_size: NotRequired[str]
75
+ r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
76
+
77
+
78
+
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+ you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
87
+ """
88
+
89
+
90
+ class RunnableJobExecutorRunSettings(BaseModel):
91
+ type: Optional[RunnableJobExecutorType] = None
92
+
93
+ reschedule_dropped_tasks: Annotated[
94
+ Optional[bool], pydantic.Field(alias="rescheduleDroppedTasks")
95
+ ] = True
96
+ r"""Reschedule tasks that failed with non-fatal errors"""
97
+
98
+ max_task_reschedule: Annotated[
99
+ Optional[float], pydantic.Field(alias="maxTaskReschedule")
100
+ ] = 1
101
+ r"""Maximum number of times a task can be rescheduled"""
102
+
103
+ log_level: Annotated[
104
+ Optional[RunnableJobExecutorScheduleLogLevel], pydantic.Field(alias="logLevel")
105
+ ] = RunnableJobExecutorScheduleLogLevel.INFO
106
+ r"""Level at which to set task logging"""
107
+
108
+ job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
109
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
110
+
111
+ mode: Optional[str] = "list"
112
+ r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
113
+
114
+ time_range_type: Annotated[Optional[str], pydantic.Field(alias="timeRangeType")] = (
115
+ "relative"
116
+ )
117
+
118
+ earliest: Optional[float] = None
119
+ r"""Earliest time to collect data for the selected timezone"""
120
+
121
+ latest: Optional[float] = None
122
+ r"""Latest time to collect data for the selected timezone"""
123
+
124
+ timestamp_timezone: Annotated[
125
+ Optional[Any], pydantic.Field(alias="timestampTimezone")
126
+ ] = None
127
+
128
+ time_warning: Annotated[
129
+ Optional[RunnableJobExecutorTimeWarning], pydantic.Field(alias="timeWarning")
130
+ ] = None
131
+
132
+ expression: Optional[str] = "true"
133
+ r"""A filter for tokens in the provided collect path and/or the events being collected"""
134
+
135
+ min_task_size: Annotated[Optional[str], pydantic.Field(alias="minTaskSize")] = "1MB"
136
+ r"""Limits the bundle size for small tasks. For example,
137
+
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+
146
+
147
+ if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
148
+ """
149
+
150
+ max_task_size: Annotated[Optional[str], pydantic.Field(alias="maxTaskSize")] = (
151
+ "10MB"
152
+ )
153
+ r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
154
+
155
+
156
+
157
+
158
+
159
+
160
+
161
+
162
+
163
+
164
+ you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
165
+ """
166
+
167
+
168
+ class RunnableJobExecutorScheduleTypedDict(TypedDict):
169
+ r"""Configuration for a scheduled job"""
170
+
171
+ enabled: NotRequired[bool]
172
+ r"""Enable to configure scheduling for this Collector"""
173
+ cron_schedule: NotRequired[str]
174
+ r"""A cron schedule on which to run this job"""
175
+ max_concurrent_runs: NotRequired[float]
176
+ r"""The maximum number of instances of this scheduled job that may be running at any time"""
177
+ skippable: NotRequired[bool]
178
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
179
+ resume_missed: NotRequired[Any]
180
+ run: NotRequired[RunnableJobExecutorRunSettingsTypedDict]
181
+
182
+
183
+ class RunnableJobExecutorSchedule(BaseModel):
184
+ r"""Configuration for a scheduled job"""
185
+
186
+ enabled: Optional[bool] = None
187
+ r"""Enable to configure scheduling for this Collector"""
188
+
189
+ cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
190
+ "*/5 * * * *"
191
+ )
192
+ r"""A cron schedule on which to run this job"""
193
+
194
+ max_concurrent_runs: Annotated[
195
+ Optional[float], pydantic.Field(alias="maxConcurrentRuns")
196
+ ] = 1
197
+ r"""The maximum number of instances of this scheduled job that may be running at any time"""
198
+
199
+ skippable: Optional[bool] = True
200
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
201
+
202
+ resume_missed: Annotated[Optional[Any], pydantic.Field(alias="resumeMissed")] = None
203
+
204
+ run: Optional[RunnableJobExecutorRunSettings] = None
205
+
206
+
207
+ class ExecutorSpecificSettingsTypedDict(TypedDict):
208
+ pass
209
+
210
+
211
+ class ExecutorSpecificSettings(BaseModel):
212
+ pass
213
+
214
+
215
+ class ExecutorTypedDict(TypedDict):
216
+ type: str
217
+ r"""The type of executor to run"""
218
+ store_task_results: NotRequired[bool]
219
+ r"""Determines whether or not to write task results to disk"""
220
+ conf: NotRequired[ExecutorSpecificSettingsTypedDict]
221
+
222
+
223
+ class Executor(BaseModel):
224
+ type: str
225
+ r"""The type of executor to run"""
226
+
227
+ store_task_results: Annotated[
228
+ Optional[bool], pydantic.Field(alias="storeTaskResults")
229
+ ] = True
230
+ r"""Determines whether or not to write task results to disk"""
231
+
232
+ conf: Optional[ExecutorSpecificSettings] = None
233
+
234
+
235
+ class RunnableJobExecutorLogLevel(str, Enum):
236
+ r"""Level at which to set task logging"""
237
+
238
+ ERROR = "error"
239
+ WARN = "warn"
240
+ INFO = "info"
241
+ DEBUG = "debug"
242
+ SILLY = "silly"
243
+
244
+
245
+ class RunnableJobExecutorRunTypedDict(TypedDict):
246
+ reschedule_dropped_tasks: NotRequired[bool]
247
+ r"""Reschedule tasks that failed with non-fatal errors"""
248
+ max_task_reschedule: NotRequired[float]
249
+ r"""Maximum number of times a task can be rescheduled"""
250
+ log_level: NotRequired[RunnableJobExecutorLogLevel]
251
+ r"""Level at which to set task logging"""
252
+ job_timeout: NotRequired[str]
253
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
254
+
255
+
256
+ class RunnableJobExecutorRun(BaseModel):
257
+ reschedule_dropped_tasks: Annotated[
258
+ Optional[bool], pydantic.Field(alias="rescheduleDroppedTasks")
259
+ ] = True
260
+ r"""Reschedule tasks that failed with non-fatal errors"""
261
+
262
+ max_task_reschedule: Annotated[
263
+ Optional[float], pydantic.Field(alias="maxTaskReschedule")
264
+ ] = 1
265
+ r"""Maximum number of times a task can be rescheduled"""
266
+
267
+ log_level: Annotated[
268
+ Optional[RunnableJobExecutorLogLevel], pydantic.Field(alias="logLevel")
269
+ ] = RunnableJobExecutorLogLevel.INFO
270
+ r"""Level at which to set task logging"""
271
+
272
+ job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
273
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
274
+
275
+
276
+ class RunnableJobExecutorTypedDict(TypedDict):
277
+ executor: ExecutorTypedDict
278
+ run: RunnableJobExecutorRunTypedDict
279
+ id: NotRequired[str]
280
+ r"""Unique ID for this Job"""
281
+ description: NotRequired[str]
282
+ type: NotRequired[RunnableJobExecutorJobType]
283
+ ttl: NotRequired[str]
284
+ r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
285
+ ignore_group_jobs_limit: NotRequired[bool]
286
+ r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
287
+ remove_fields: NotRequired[List[str]]
288
+ r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
289
+ resume_on_boot: NotRequired[bool]
290
+ r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
291
+ environment: NotRequired[str]
292
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
293
+ schedule: NotRequired[RunnableJobExecutorScheduleTypedDict]
294
+ r"""Configuration for a scheduled job"""
295
+ streamtags: NotRequired[List[str]]
296
+ r"""Tags for filtering and grouping in @{product}"""
297
+
298
+
299
+ class RunnableJobExecutor(BaseModel):
300
+ executor: Executor
301
+
302
+ run: RunnableJobExecutorRun
303
+
304
+ id: Optional[str] = None
305
+ r"""Unique ID for this Job"""
306
+
307
+ description: Optional[str] = None
308
+
309
+ type: Optional[RunnableJobExecutorJobType] = None
310
+
311
+ ttl: Optional[str] = "4h"
312
+ r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
313
+
314
+ ignore_group_jobs_limit: Annotated[
315
+ Optional[bool], pydantic.Field(alias="ignoreGroupJobsLimit")
316
+ ] = False
317
+ r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
318
+
319
+ remove_fields: Annotated[
320
+ Optional[List[str]], pydantic.Field(alias="removeFields")
321
+ ] = None
322
+ r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
323
+
324
+ resume_on_boot: Annotated[Optional[bool], pydantic.Field(alias="resumeOnBoot")] = (
325
+ False
326
+ )
327
+ r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
328
+
329
+ environment: Optional[str] = None
330
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
331
+
332
+ schedule: Optional[RunnableJobExecutorSchedule] = None
333
+ r"""Configuration for a scheduled job"""
334
+
335
+ streamtags: Optional[List[str]] = None
336
+ r"""Tags for filtering and grouping in @{product}"""
@@ -0,0 +1,267 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane.types import BaseModel
5
+ from enum import Enum
6
+ import pydantic
7
+ from typing import Any, List, Optional
8
+ from typing_extensions import Annotated, NotRequired, TypedDict
9
+
10
+
11
+ class RunnableJobScheduledSearchJobType(str, Enum):
12
+ COLLECTION = "collection"
13
+ EXECUTOR = "executor"
14
+ SCHEDULED_SEARCH = "scheduledSearch"
15
+
16
+
17
+ class RunnableJobScheduledSearchType(str, Enum):
18
+ COLLECTION = "collection"
19
+
20
+
21
+ class RunnableJobScheduledSearchLogLevel(str, Enum):
22
+ r"""Level at which to set task logging"""
23
+
24
+ ERROR = "error"
25
+ WARN = "warn"
26
+ INFO = "info"
27
+ DEBUG = "debug"
28
+ SILLY = "silly"
29
+
30
+
31
+ class RunnableJobScheduledSearchTimeWarningTypedDict(TypedDict):
32
+ pass
33
+
34
+
35
+ class RunnableJobScheduledSearchTimeWarning(BaseModel):
36
+ pass
37
+
38
+
39
+ class RunnableJobScheduledSearchRunSettingsTypedDict(TypedDict):
40
+ type: NotRequired[RunnableJobScheduledSearchType]
41
+ reschedule_dropped_tasks: NotRequired[bool]
42
+ r"""Reschedule tasks that failed with non-fatal errors"""
43
+ max_task_reschedule: NotRequired[float]
44
+ r"""Maximum number of times a task can be rescheduled"""
45
+ log_level: NotRequired[RunnableJobScheduledSearchLogLevel]
46
+ r"""Level at which to set task logging"""
47
+ job_timeout: NotRequired[str]
48
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
49
+ mode: NotRequired[str]
50
+ r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
51
+ time_range_type: NotRequired[str]
52
+ earliest: NotRequired[float]
53
+ r"""Earliest time to collect data for the selected timezone"""
54
+ latest: NotRequired[float]
55
+ r"""Latest time to collect data for the selected timezone"""
56
+ timestamp_timezone: NotRequired[Any]
57
+ time_warning: NotRequired[RunnableJobScheduledSearchTimeWarningTypedDict]
58
+ expression: NotRequired[str]
59
+ r"""A filter for tokens in the provided collect path and/or the events being collected"""
60
+ min_task_size: NotRequired[str]
61
+ r"""Limits the bundle size for small tasks. For example,
62
+
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+
72
+ if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
73
+ """
74
+ max_task_size: NotRequired[str]
75
+ r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
76
+
77
+
78
+
79
+
80
+
81
+
82
+
83
+
84
+
85
+
86
+ you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
87
+ """
88
+
89
+
90
+ class RunnableJobScheduledSearchRunSettings(BaseModel):
91
+ type: Optional[RunnableJobScheduledSearchType] = None
92
+
93
+ reschedule_dropped_tasks: Annotated[
94
+ Optional[bool], pydantic.Field(alias="rescheduleDroppedTasks")
95
+ ] = True
96
+ r"""Reschedule tasks that failed with non-fatal errors"""
97
+
98
+ max_task_reschedule: Annotated[
99
+ Optional[float], pydantic.Field(alias="maxTaskReschedule")
100
+ ] = 1
101
+ r"""Maximum number of times a task can be rescheduled"""
102
+
103
+ log_level: Annotated[
104
+ Optional[RunnableJobScheduledSearchLogLevel], pydantic.Field(alias="logLevel")
105
+ ] = RunnableJobScheduledSearchLogLevel.INFO
106
+ r"""Level at which to set task logging"""
107
+
108
+ job_timeout: Annotated[Optional[str], pydantic.Field(alias="jobTimeout")] = "0"
109
+ r"""Maximum time the job is allowed to run. Time unit defaults to seconds if not specified (examples: 30, 45s, 15m). Enter 0 for unlimited time."""
110
+
111
+ mode: Optional[str] = "list"
112
+ r"""Job run mode. Preview will either return up to N matching results, or will run until capture time T is reached. Discovery will gather the list of files to turn into streaming tasks, without running the data collection job. Full Run will run the collection job."""
113
+
114
+ time_range_type: Annotated[Optional[str], pydantic.Field(alias="timeRangeType")] = (
115
+ "relative"
116
+ )
117
+
118
+ earliest: Optional[float] = None
119
+ r"""Earliest time to collect data for the selected timezone"""
120
+
121
+ latest: Optional[float] = None
122
+ r"""Latest time to collect data for the selected timezone"""
123
+
124
+ timestamp_timezone: Annotated[
125
+ Optional[Any], pydantic.Field(alias="timestampTimezone")
126
+ ] = None
127
+
128
+ time_warning: Annotated[
129
+ Optional[RunnableJobScheduledSearchTimeWarning],
130
+ pydantic.Field(alias="timeWarning"),
131
+ ] = None
132
+
133
+ expression: Optional[str] = "true"
134
+ r"""A filter for tokens in the provided collect path and/or the events being collected"""
135
+
136
+ min_task_size: Annotated[Optional[str], pydantic.Field(alias="minTaskSize")] = "1MB"
137
+ r"""Limits the bundle size for small tasks. For example,
138
+
139
+
140
+
141
+
142
+
143
+
144
+
145
+
146
+
147
+
148
+ if your lower bundle size is 1MB, you can bundle up to five 200KB files into one task.
149
+ """
150
+
151
+ max_task_size: Annotated[Optional[str], pydantic.Field(alias="maxTaskSize")] = (
152
+ "10MB"
153
+ )
154
+ r"""Limits the bundle size for files above the lower task bundle size. For example, if your upper bundle size is 10MB,
155
+
156
+
157
+
158
+
159
+
160
+
161
+
162
+
163
+
164
+
165
+ you can bundle up to five 2MB files into one task. Files greater than this size will be assigned to individual tasks.
166
+ """
167
+
168
+
169
+ class RunnableJobScheduledSearchScheduleTypedDict(TypedDict):
170
+ r"""Configuration for a scheduled job"""
171
+
172
+ enabled: NotRequired[bool]
173
+ r"""Enable to configure scheduling for this Collector"""
174
+ cron_schedule: NotRequired[str]
175
+ r"""A cron schedule on which to run this job"""
176
+ max_concurrent_runs: NotRequired[float]
177
+ r"""The maximum number of instances of this scheduled job that may be running at any time"""
178
+ skippable: NotRequired[bool]
179
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
180
+ resume_missed: NotRequired[Any]
181
+ run: NotRequired[RunnableJobScheduledSearchRunSettingsTypedDict]
182
+
183
+
184
+ class RunnableJobScheduledSearchSchedule(BaseModel):
185
+ r"""Configuration for a scheduled job"""
186
+
187
+ enabled: Optional[bool] = None
188
+ r"""Enable to configure scheduling for this Collector"""
189
+
190
+ cron_schedule: Annotated[Optional[str], pydantic.Field(alias="cronSchedule")] = (
191
+ "*/5 * * * *"
192
+ )
193
+ r"""A cron schedule on which to run this job"""
194
+
195
+ max_concurrent_runs: Annotated[
196
+ Optional[float], pydantic.Field(alias="maxConcurrentRuns")
197
+ ] = 1
198
+ r"""The maximum number of instances of this scheduled job that may be running at any time"""
199
+
200
+ skippable: Optional[bool] = True
201
+ r"""Skippable jobs can be delayed, up to their next run time, if the system is hitting concurrency limits"""
202
+
203
+ resume_missed: Annotated[Optional[Any], pydantic.Field(alias="resumeMissed")] = None
204
+
205
+ run: Optional[RunnableJobScheduledSearchRunSettings] = None
206
+
207
+
208
+ class RunnableJobScheduledSearchTypedDict(TypedDict):
209
+ type: RunnableJobScheduledSearchJobType
210
+ saved_query_id: str
211
+ r"""Identifies which search query to run"""
212
+ id: NotRequired[str]
213
+ r"""Unique ID for this Job"""
214
+ description: NotRequired[str]
215
+ ttl: NotRequired[str]
216
+ r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
217
+ ignore_group_jobs_limit: NotRequired[bool]
218
+ r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
219
+ remove_fields: NotRequired[List[str]]
220
+ r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
221
+ resume_on_boot: NotRequired[bool]
222
+ r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
223
+ environment: NotRequired[str]
224
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
225
+ schedule: NotRequired[RunnableJobScheduledSearchScheduleTypedDict]
226
+ r"""Configuration for a scheduled job"""
227
+ streamtags: NotRequired[List[str]]
228
+ r"""Tags for filtering and grouping in @{product}"""
229
+
230
+
231
+ class RunnableJobScheduledSearch(BaseModel):
232
+ type: RunnableJobScheduledSearchJobType
233
+
234
+ saved_query_id: Annotated[str, pydantic.Field(alias="savedQueryId")]
235
+ r"""Identifies which search query to run"""
236
+
237
+ id: Optional[str] = None
238
+ r"""Unique ID for this Job"""
239
+
240
+ description: Optional[str] = None
241
+
242
+ ttl: Optional[str] = "4h"
243
+ r"""Time to keep the job's artifacts on disk after job completion. This also affects how long a job is listed in the Job Inspector."""
244
+
245
+ ignore_group_jobs_limit: Annotated[
246
+ Optional[bool], pydantic.Field(alias="ignoreGroupJobsLimit")
247
+ ] = False
248
+ r"""When enabled, this job's artifacts are not counted toward the Worker Group's finished job artifacts limit. Artifacts will be removed only after the Collector's configured time to live."""
249
+
250
+ remove_fields: Annotated[
251
+ Optional[List[str]], pydantic.Field(alias="removeFields")
252
+ ] = None
253
+ r"""List of fields to remove from Discover results. Wildcards (for example, aws*) are allowed. This is useful when discovery returns sensitive fields that should not be exposed in the Jobs user interface."""
254
+
255
+ resume_on_boot: Annotated[Optional[bool], pydantic.Field(alias="resumeOnBoot")] = (
256
+ False
257
+ )
258
+ r"""Resume the ad hoc job if a failure condition causes Stream to restart during job execution"""
259
+
260
+ environment: Optional[str] = None
261
+ r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
262
+
263
+ schedule: Optional[RunnableJobScheduledSearchSchedule] = None
264
+ r"""Configuration for a scheduled job"""
265
+
266
+ streamtags: Optional[List[str]] = None
267
+ r"""Tags for filtering and grouping in @{product}"""
@@ -2,12 +2,9 @@
2
2
 
3
3
  from __future__ import annotations
4
4
  from .packinfo import PackInfo, PackInfoTypedDict
5
+ from .packupgraderequest import PackUpgradeRequest, PackUpgradeRequestTypedDict
5
6
  from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import (
7
- FieldMetadata,
8
- PathParamMetadata,
9
- QueryParamMetadata,
10
- )
7
+ from cribl_control_plane.utils import FieldMetadata, PathParamMetadata, RequestMetadata
11
8
  from typing import List, Optional
12
9
  from typing_extensions import Annotated, NotRequired, TypedDict
13
10
 
@@ -15,12 +12,8 @@ from typing_extensions import Annotated, NotRequired, TypedDict
15
12
  class UpdatePacksByIDRequestTypedDict(TypedDict):
16
13
  id: str
17
14
  r"""The <code>id</code> of the Pack to upgrade."""
18
- source: NotRequired[str]
19
- r"""body string required Pack source"""
20
- minor: NotRequired[str]
21
- r"""body boolean optional Only upgrade to minor/patch versions"""
22
- spec: NotRequired[str]
23
- r"""body string optional Specify a branch, tag or a semver spec"""
15
+ pack_upgrade_request: PackUpgradeRequestTypedDict
16
+ r"""PackUpgradeRequest object"""
24
17
 
25
18
 
26
19
  class UpdatePacksByIDRequest(BaseModel):
@@ -29,23 +22,11 @@ class UpdatePacksByIDRequest(BaseModel):
29
22
  ]
30
23
  r"""The <code>id</code> of the Pack to upgrade."""
31
24
 
32
- source: Annotated[
33
- Optional[str],
34
- FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
35
- ] = None
36
- r"""body string required Pack source"""
37
-
38
- minor: Annotated[
39
- Optional[str],
40
- FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
41
- ] = None
42
- r"""body boolean optional Only upgrade to minor/patch versions"""
43
-
44
- spec: Annotated[
45
- Optional[str],
46
- FieldMetadata(query=QueryParamMetadata(style="form", explode=True)),
47
- ] = None
48
- r"""body string optional Specify a branch, tag or a semver spec"""
25
+ pack_upgrade_request: Annotated[
26
+ PackUpgradeRequest,
27
+ FieldMetadata(request=RequestMetadata(media_type="application/json")),
28
+ ]
29
+ r"""PackUpgradeRequest object"""
49
30
 
50
31
 
51
32
  class UpdatePacksByIDResponseTypedDict(TypedDict):