cribl-control-plane 0.0.38a1__py3-none-any.whl → 0.0.40__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (80) hide show
  1. cribl_control_plane/_version.py +3 -3
  2. cribl_control_plane/commits.py +52 -42
  3. cribl_control_plane/commits_files.py +12 -12
  4. cribl_control_plane/destinations.py +3 -3
  5. cribl_control_plane/destinations_pq.py +2 -2
  6. cribl_control_plane/groups_sdk.py +8 -8
  7. cribl_control_plane/models/__init__.py +315 -368
  8. cribl_control_plane/models/branchinfo.py +13 -0
  9. cribl_control_plane/models/configgroup.py +5 -3
  10. cribl_control_plane/models/createversioncommitop.py +26 -1
  11. cribl_control_plane/models/createversionrevertop.py +4 -2
  12. cribl_control_plane/models/createversionundoop.py +4 -2
  13. cribl_control_plane/models/deleteoutputpqbyidop.py +5 -5
  14. cribl_control_plane/models/difffiles.py +171 -0
  15. cribl_control_plane/models/getoutputpqbyidop.py +6 -5
  16. cribl_control_plane/models/getversionbranchop.py +6 -5
  17. cribl_control_plane/models/getversioncountop.py +9 -7
  18. cribl_control_plane/models/getversiondiffop.py +9 -7
  19. cribl_control_plane/models/getversionfilesop.py +3 -2
  20. cribl_control_plane/models/getversionop.py +4 -2
  21. cribl_control_plane/models/getversionshowop.py +3 -2
  22. cribl_control_plane/models/getversionstatusop.py +4 -2
  23. cribl_control_plane/models/gitcountresult.py +13 -0
  24. cribl_control_plane/models/gitdiffresult.py +16 -0
  25. cribl_control_plane/models/inputcribllakehttp.py +5 -4
  26. cribl_control_plane/models/inputcrowdstrike.py +2 -2
  27. cribl_control_plane/models/inputs3.py +2 -2
  28. cribl_control_plane/models/inputs3inventory.py +2 -2
  29. cribl_control_plane/models/inputsecuritylake.py +2 -2
  30. cribl_control_plane/models/inputwineventlogs.py +1 -1
  31. cribl_control_plane/models/jobinfo.py +25 -0
  32. cribl_control_plane/models/jobstatus.py +17 -0
  33. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +1 -1
  34. cribl_control_plane/models/outputgooglepubsub.py +7 -28
  35. cribl_control_plane/models/outputsentineloneaisiem.py +3 -3
  36. cribl_control_plane/models/packinfo.py +5 -5
  37. cribl_control_plane/models/packinstallinfo.py +7 -7
  38. cribl_control_plane/models/packrequestbody_union.py +140 -0
  39. cribl_control_plane/models/packupgraderequest.py +26 -0
  40. cribl_control_plane/models/runnablejob.py +27 -0
  41. cribl_control_plane/models/runnablejobcollection.py +589 -0
  42. cribl_control_plane/models/runnablejobexecutor.py +336 -0
  43. cribl_control_plane/models/runnablejobscheduledsearch.py +267 -0
  44. cribl_control_plane/models/updatepacksbyidop.py +9 -28
  45. cribl_control_plane/packs.py +62 -116
  46. cribl_control_plane/{destinations_samples.py → samples.py} +1 -1
  47. cribl_control_plane/sdk.py +0 -3
  48. cribl_control_plane/statuses.py +6 -6
  49. {cribl_control_plane-0.0.38a1.dist-info → cribl_control_plane-0.0.40.dist-info}/METADATA +6 -22
  50. {cribl_control_plane-0.0.38a1.dist-info → cribl_control_plane-0.0.40.dist-info}/RECORD +51 -68
  51. cribl_control_plane/cribl.py +0 -513
  52. cribl_control_plane/models/authconfig.py +0 -43
  53. cribl_control_plane/models/commonservicelimitconfigs.py +0 -14
  54. cribl_control_plane/models/edgeheartbeatmetricsmode.py +0 -11
  55. cribl_control_plane/models/getsystemsettingsauthop.py +0 -24
  56. cribl_control_plane/models/getsystemsettingsconfop.py +0 -24
  57. cribl_control_plane/models/getsystemsettingsgitsettingsop.py +0 -24
  58. cribl_control_plane/models/gitopstype.py +0 -10
  59. cribl_control_plane/models/gitsettings.py +0 -70
  60. cribl_control_plane/models/jobsettings.py +0 -83
  61. cribl_control_plane/models/limits.py +0 -127
  62. cribl_control_plane/models/packrequestbody.py +0 -75
  63. cribl_control_plane/models/rediscachelimits.py +0 -38
  64. cribl_control_plane/models/redisconnectionlimits.py +0 -20
  65. cribl_control_plane/models/redislimits.py +0 -14
  66. cribl_control_plane/models/searchsettings.py +0 -71
  67. cribl_control_plane/models/serviceslimits.py +0 -23
  68. cribl_control_plane/models/systemsettings.py +0 -358
  69. cribl_control_plane/models/systemsettingsconf.py +0 -311
  70. cribl_control_plane/models/updatesystemsettingsauthop.py +0 -24
  71. cribl_control_plane/models/updatesystemsettingsconfop.py +0 -24
  72. cribl_control_plane/models/updatesystemsettingsgitsettingsop.py +0 -24
  73. cribl_control_plane/models/upgradegroupsettings.py +0 -24
  74. cribl_control_plane/models/upgradepackageurls.py +0 -20
  75. cribl_control_plane/models/upgradesettings.py +0 -36
  76. cribl_control_plane/settings.py +0 -23
  77. cribl_control_plane/settings_auth.py +0 -339
  78. cribl_control_plane/settings_git.py +0 -339
  79. cribl_control_plane/system_sdk.py +0 -17
  80. {cribl_control_plane-0.0.38a1.dist-info → cribl_control_plane-0.0.40.dist-info}/WHEEL +0 -0
@@ -201,7 +201,7 @@ class InputCrowdstrikeTypedDict(TypedDict):
201
201
  skip_on_error: NotRequired[bool]
202
202
  r"""Skip files that trigger a processing error. Disabled by default, which allows retries after processing errors."""
203
203
  include_sqs_metadata: NotRequired[bool]
204
- r"""Include metadata from SQS notifications on outgoing events"""
204
+ r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
205
205
  enable_assume_role: NotRequired[bool]
206
206
  r"""Use Assume Role credentials to access Amazon S3"""
207
207
  assume_role_arn: NotRequired[str]
@@ -338,7 +338,7 @@ class InputCrowdstrike(BaseModel):
338
338
  include_sqs_metadata: Annotated[
339
339
  Optional[bool], pydantic.Field(alias="includeSqsMetadata")
340
340
  ] = False
341
- r"""Include metadata from SQS notifications on outgoing events"""
341
+ r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
342
342
 
343
343
  enable_assume_role: Annotated[
344
344
  Optional[bool], pydantic.Field(alias="enableAssumeRole")
@@ -196,7 +196,7 @@ class InputS3TypedDict(TypedDict):
196
196
  skip_on_error: NotRequired[bool]
197
197
  r"""Skip files that trigger a processing error. Disabled by default, which allows retries after processing errors."""
198
198
  include_sqs_metadata: NotRequired[bool]
199
- r"""Include metadata from SQS notifications on outgoing events"""
199
+ r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
200
200
  enable_assume_role: NotRequired[bool]
201
201
  r"""Use Assume Role credentials to access Amazon S3"""
202
202
  assume_role_arn: NotRequired[str]
@@ -337,7 +337,7 @@ class InputS3(BaseModel):
337
337
  include_sqs_metadata: Annotated[
338
338
  Optional[bool], pydantic.Field(alias="includeSqsMetadata")
339
339
  ] = False
340
- r"""Include metadata from SQS notifications on outgoing events"""
340
+ r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
341
341
 
342
342
  enable_assume_role: Annotated[
343
343
  Optional[bool], pydantic.Field(alias="enableAssumeRole")
@@ -201,7 +201,7 @@ class InputS3InventoryTypedDict(TypedDict):
201
201
  skip_on_error: NotRequired[bool]
202
202
  r"""Skip files that trigger a processing error. Disabled by default, which allows retries after processing errors."""
203
203
  include_sqs_metadata: NotRequired[bool]
204
- r"""Include metadata from SQS notifications on outgoing events"""
204
+ r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
205
205
  enable_assume_role: NotRequired[bool]
206
206
  r"""Use Assume Role credentials to access Amazon S3"""
207
207
  assume_role_arn: NotRequired[str]
@@ -346,7 +346,7 @@ class InputS3Inventory(BaseModel):
346
346
  include_sqs_metadata: Annotated[
347
347
  Optional[bool], pydantic.Field(alias="includeSqsMetadata")
348
348
  ] = False
349
- r"""Include metadata from SQS notifications on outgoing events"""
349
+ r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
350
350
 
351
351
  enable_assume_role: Annotated[
352
352
  Optional[bool], pydantic.Field(alias="enableAssumeRole")
@@ -201,7 +201,7 @@ class InputSecurityLakeTypedDict(TypedDict):
201
201
  skip_on_error: NotRequired[bool]
202
202
  r"""Skip files that trigger a processing error. Disabled by default, which allows retries after processing errors."""
203
203
  include_sqs_metadata: NotRequired[bool]
204
- r"""Include metadata from SQS notifications on outgoing events"""
204
+ r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
205
205
  enable_assume_role: NotRequired[bool]
206
206
  r"""Use Assume Role credentials to access Amazon S3"""
207
207
  assume_role_arn: NotRequired[str]
@@ -342,7 +342,7 @@ class InputSecurityLake(BaseModel):
342
342
  include_sqs_metadata: Annotated[
343
343
  Optional[bool], pydantic.Field(alias="includeSqsMetadata")
344
344
  ] = False
345
- r"""Include metadata from SQS notifications on outgoing events"""
345
+ r"""Attach SQS notification metadata to a __sqsMetadata field on each event"""
346
346
 
347
347
  enable_assume_role: Annotated[
348
348
  Optional[bool], pydantic.Field(alias="enableAssumeRole")
@@ -181,7 +181,7 @@ class InputWinEventLogs(BaseModel):
181
181
  pq: Optional[InputWinEventLogsPq] = None
182
182
 
183
183
  read_mode: Annotated[Optional[ReadMode], pydantic.Field(alias="readMode")] = (
184
- ReadMode.OLDEST
184
+ ReadMode.NEWEST
185
185
  )
186
186
  r"""Read all stored and future event logs, or only future events"""
187
187
 
@@ -0,0 +1,25 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .jobstatus import JobStatus, JobStatusTypedDict
5
+ from .runnablejob import RunnableJob, RunnableJobTypedDict
6
+ from cribl_control_plane.types import BaseModel
7
+ from typing import Optional
8
+ from typing_extensions import NotRequired, TypedDict
9
+
10
+
11
+ class JobInfoTypedDict(TypedDict):
12
+ args: RunnableJobTypedDict
13
+ id: str
14
+ status: JobStatusTypedDict
15
+ keep: NotRequired[bool]
16
+
17
+
18
+ class JobInfo(BaseModel):
19
+ args: RunnableJob
20
+
21
+ id: str
22
+
23
+ status: JobStatus
24
+
25
+ keep: Optional[bool] = None
@@ -0,0 +1,17 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane.types import BaseModel
5
+ from typing import Any, Dict, Optional
6
+ from typing_extensions import NotRequired, TypedDict
7
+
8
+
9
+ class JobStatusTypedDict(TypedDict):
10
+ state: Dict[str, Any]
11
+ reason: NotRequired[Dict[str, Any]]
12
+
13
+
14
+ class JobStatus(BaseModel):
15
+ state: Dict[str, Any]
16
+
17
+ reason: Optional[Dict[str, Any]] = None
@@ -298,7 +298,7 @@ class OutputCrowdstrikeNextGenSiem(BaseModel):
298
298
  format_: Annotated[
299
299
  Optional[OutputCrowdstrikeNextGenSiemRequestFormat],
300
300
  pydantic.Field(alias="format"),
301
- ] = OutputCrowdstrikeNextGenSiemRequestFormat.RAW
301
+ ] = OutputCrowdstrikeNextGenSiemRequestFormat.JSON
302
302
  r"""When set to JSON, the event is automatically formatted with required fields before sending. When set to Raw, only the event's `_raw` value is sent."""
303
303
 
304
304
  auth_type: Annotated[
@@ -8,7 +8,7 @@ from typing import List, Optional
8
8
  from typing_extensions import Annotated, NotRequired, TypedDict
9
9
 
10
10
 
11
- class OutputGooglePubsubTypeGooglePubsub(str, Enum):
11
+ class OutputGooglePubsubType(str, Enum):
12
12
  GOOGLE_PUBSUB = "google_pubsub"
13
13
 
14
14
 
@@ -20,25 +20,6 @@ class OutputGooglePubsubGoogleAuthenticationMethod(str, Enum):
20
20
  SECRET = "secret"
21
21
 
22
22
 
23
- class FlushPeriodSecType(str, Enum):
24
- NUMBER = "number"
25
-
26
-
27
- class FlushPeriodSecTypedDict(TypedDict):
28
- r"""Maximum time to wait before sending a batch (when batch size limit is not reached)."""
29
-
30
- type: NotRequired[FlushPeriodSecType]
31
- default: NotRequired[float]
32
-
33
-
34
- class FlushPeriodSec(BaseModel):
35
- r"""Maximum time to wait before sending a batch (when batch size limit is not reached)."""
36
-
37
- type: Optional[FlushPeriodSecType] = None
38
-
39
- default: Optional[float] = None
40
-
41
-
42
23
  class OutputGooglePubsubBackpressureBehavior(str, Enum):
43
24
  r"""How to handle events when all receivers are exerting backpressure"""
44
25
 
@@ -78,7 +59,7 @@ class OutputGooglePubsubPqControls(BaseModel):
78
59
 
79
60
 
80
61
  class OutputGooglePubsubTypedDict(TypedDict):
81
- type: OutputGooglePubsubTypeGooglePubsub
62
+ type: OutputGooglePubsubType
82
63
  topic_name: str
83
64
  r"""ID of the topic to send events to."""
84
65
  id: NotRequired[str]
@@ -111,8 +92,8 @@ class OutputGooglePubsubTypedDict(TypedDict):
111
92
  r"""Maximum number of queued batches before blocking."""
112
93
  max_record_size_kb: NotRequired[float]
113
94
  r"""Maximum size (KB) of batches to send."""
114
- flush_period_sec: NotRequired[FlushPeriodSecTypedDict]
115
- r"""Maximum time to wait before sending a batch (when batch size limit is not reached)."""
95
+ flush_period: NotRequired[float]
96
+ r"""Maximum time to wait before sending a batch (when batch size limit is not reached)"""
116
97
  max_in_progress: NotRequired[float]
117
98
  r"""The maximum number of in-progress API requests before backpressure is applied."""
118
99
  on_backpressure: NotRequired[OutputGooglePubsubBackpressureBehavior]
@@ -134,7 +115,7 @@ class OutputGooglePubsubTypedDict(TypedDict):
134
115
 
135
116
 
136
117
  class OutputGooglePubsub(BaseModel):
137
- type: OutputGooglePubsubTypeGooglePubsub
118
+ type: OutputGooglePubsubType
138
119
 
139
120
  topic_name: Annotated[str, pydantic.Field(alias="topicName")]
140
121
  r"""ID of the topic to send events to."""
@@ -199,10 +180,8 @@ class OutputGooglePubsub(BaseModel):
199
180
  ] = 256
200
181
  r"""Maximum size (KB) of batches to send."""
201
182
 
202
- flush_period_sec: Annotated[
203
- Optional[FlushPeriodSec], pydantic.Field(alias="flushPeriodSec")
204
- ] = None
205
- r"""Maximum time to wait before sending a batch (when batch size limit is not reached)."""
183
+ flush_period: Annotated[Optional[float], pydantic.Field(alias="flushPeriod")] = 1
184
+ r"""Maximum time to wait before sending a batch (when batch size limit is not reached)"""
206
185
 
207
186
  max_in_progress: Annotated[
208
187
  Optional[float], pydantic.Field(alias="maxInProgress")
@@ -25,7 +25,7 @@ class OutputSentinelOneAiSiemRegion(str, Enum):
25
25
 
26
26
 
27
27
  class AISIEMEndpointPath(str, Enum):
28
- r"""Regional endpoint used to send events to, such as /services/collector/event or /services/collector/raw"""
28
+ r"""Endpoint to send events to. Use /services/collector/event for structured JSON payloads with standard HEC top-level fields. Use /services/collector/raw for unstructured log lines (plain text)."""
29
29
 
30
30
  ROOT_SERVICES_COLLECTOR_EVENT = "/services/collector/event"
31
31
  ROOT_SERVICES_COLLECTOR_RAW = "/services/collector/raw"
@@ -164,7 +164,7 @@ class OutputSentinelOneAiSiemTypedDict(TypedDict):
164
164
  region: NotRequired[OutputSentinelOneAiSiemRegion]
165
165
  r"""The SentinelOne region to send events to. In most cases you can find the region by either looking at your SentinelOne URL or knowing what geographic region your SentinelOne instance is contained in."""
166
166
  endpoint: NotRequired[AISIEMEndpointPath]
167
- r"""Regional endpoint used to send events to, such as /services/collector/event or /services/collector/raw"""
167
+ r"""Endpoint to send events to. Use /services/collector/event for structured JSON payloads with standard HEC top-level fields. Use /services/collector/raw for unstructured log lines (plain text)."""
168
168
  concurrency: NotRequired[float]
169
169
  r"""Maximum number of ongoing requests before blocking"""
170
170
  max_payload_size_kb: NotRequired[float]
@@ -281,7 +281,7 @@ class OutputSentinelOneAiSiem(BaseModel):
281
281
  endpoint: Optional[AISIEMEndpointPath] = (
282
282
  AISIEMEndpointPath.ROOT_SERVICES_COLLECTOR_EVENT
283
283
  )
284
- r"""Regional endpoint used to send events to, such as /services/collector/event or /services/collector/raw"""
284
+ r"""Endpoint to send events to. Use /services/collector/event for structured JSON payloads with standard HEC top-level fields. Use /services/collector/raw for unstructured log lines (plain text)."""
285
285
 
286
286
  concurrency: Optional[float] = 5
287
287
  r"""Maximum number of ongoing requests before blocking"""
@@ -9,19 +9,19 @@ from typing_extensions import Annotated, NotRequired, TypedDict
9
9
 
10
10
  class PackInfoTagsTypedDict(TypedDict):
11
11
  data_type: List[str]
12
- domain: List[str]
13
- streamtags: List[str]
14
12
  technology: List[str]
13
+ domain: NotRequired[List[str]]
14
+ streamtags: NotRequired[List[str]]
15
15
 
16
16
 
17
17
  class PackInfoTags(BaseModel):
18
18
  data_type: Annotated[List[str], pydantic.Field(alias="dataType")]
19
19
 
20
- domain: List[str]
20
+ technology: List[str]
21
21
 
22
- streamtags: List[str]
22
+ domain: Optional[List[str]] = None
23
23
 
24
- technology: List[str]
24
+ streamtags: Optional[List[str]] = None
25
25
 
26
26
 
27
27
  class PackInfoTypedDict(TypedDict):
@@ -9,25 +9,25 @@ from typing_extensions import Annotated, NotRequired, TypedDict
9
9
 
10
10
  class PackInstallInfoTagsTypedDict(TypedDict):
11
11
  data_type: List[str]
12
- domain: List[str]
13
- streamtags: List[str]
14
12
  technology: List[str]
13
+ domain: NotRequired[List[str]]
14
+ streamtags: NotRequired[List[str]]
15
15
 
16
16
 
17
17
  class PackInstallInfoTags(BaseModel):
18
18
  data_type: Annotated[List[str], pydantic.Field(alias="dataType")]
19
19
 
20
- domain: List[str]
20
+ technology: List[str]
21
21
 
22
- streamtags: List[str]
22
+ domain: Optional[List[str]] = None
23
23
 
24
- technology: List[str]
24
+ streamtags: Optional[List[str]] = None
25
25
 
26
26
 
27
27
  class PackInstallInfoTypedDict(TypedDict):
28
28
  id: str
29
29
  source: str
30
- warnings: Any
30
+ warnings: List[str]
31
31
  author: NotRequired[str]
32
32
  description: NotRequired[str]
33
33
  display_name: NotRequired[str]
@@ -47,7 +47,7 @@ class PackInstallInfo(BaseModel):
47
47
 
48
48
  source: str
49
49
 
50
- warnings: Any
50
+ warnings: List[str]
51
51
 
52
52
  author: Optional[str] = None
53
53
 
@@ -0,0 +1,140 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane.types import BaseModel
5
+ import pydantic
6
+ from typing import List, Optional, Union
7
+ from typing_extensions import Annotated, NotRequired, TypeAliasType, TypedDict
8
+
9
+
10
+ class PackRequestBodyTags2TypedDict(TypedDict):
11
+ data_type: NotRequired[List[str]]
12
+ domain: NotRequired[List[str]]
13
+ technology: NotRequired[List[str]]
14
+ streamtags: NotRequired[List[str]]
15
+
16
+
17
+ class PackRequestBodyTags2(BaseModel):
18
+ data_type: Annotated[Optional[List[str]], pydantic.Field(alias="dataType")] = None
19
+
20
+ domain: Optional[List[str]] = None
21
+
22
+ technology: Optional[List[str]] = None
23
+
24
+ streamtags: Optional[List[str]] = None
25
+
26
+
27
+ class PackRequestBody2TypedDict(TypedDict):
28
+ source: str
29
+ r"""The source of the pack. If not present, an empty pack will be created"""
30
+ id: NotRequired[str]
31
+ spec: NotRequired[str]
32
+ version: NotRequired[str]
33
+ min_log_stream_version: NotRequired[str]
34
+ display_name: NotRequired[str]
35
+ author: NotRequired[str]
36
+ description: NotRequired[str]
37
+ tags: NotRequired[PackRequestBodyTags2TypedDict]
38
+ allow_custom_functions: NotRequired[bool]
39
+ force: NotRequired[bool]
40
+
41
+
42
+ class PackRequestBody2(BaseModel):
43
+ source: str
44
+ r"""The source of the pack. If not present, an empty pack will be created"""
45
+
46
+ id: Optional[str] = None
47
+
48
+ spec: Optional[str] = None
49
+
50
+ version: Optional[str] = None
51
+
52
+ min_log_stream_version: Annotated[
53
+ Optional[str], pydantic.Field(alias="minLogStreamVersion")
54
+ ] = None
55
+
56
+ display_name: Annotated[Optional[str], pydantic.Field(alias="displayName")] = None
57
+
58
+ author: Optional[str] = None
59
+
60
+ description: Optional[str] = None
61
+
62
+ tags: Optional[PackRequestBodyTags2] = None
63
+
64
+ allow_custom_functions: Annotated[
65
+ Optional[bool], pydantic.Field(alias="allowCustomFunctions")
66
+ ] = None
67
+
68
+ force: Optional[bool] = None
69
+
70
+
71
+ class PackRequestBodyTags1TypedDict(TypedDict):
72
+ data_type: NotRequired[List[str]]
73
+ domain: NotRequired[List[str]]
74
+ technology: NotRequired[List[str]]
75
+ streamtags: NotRequired[List[str]]
76
+
77
+
78
+ class PackRequestBodyTags1(BaseModel):
79
+ data_type: Annotated[Optional[List[str]], pydantic.Field(alias="dataType")] = None
80
+
81
+ domain: Optional[List[str]] = None
82
+
83
+ technology: Optional[List[str]] = None
84
+
85
+ streamtags: Optional[List[str]] = None
86
+
87
+
88
+ class PackRequestBody1TypedDict(TypedDict):
89
+ id: str
90
+ spec: NotRequired[str]
91
+ version: NotRequired[str]
92
+ min_log_stream_version: NotRequired[str]
93
+ display_name: NotRequired[str]
94
+ author: NotRequired[str]
95
+ description: NotRequired[str]
96
+ source: NotRequired[str]
97
+ r"""The source of the pack. If not present, an empty pack will be created"""
98
+ tags: NotRequired[PackRequestBodyTags1TypedDict]
99
+ allow_custom_functions: NotRequired[bool]
100
+ force: NotRequired[bool]
101
+
102
+
103
+ class PackRequestBody1(BaseModel):
104
+ id: str
105
+
106
+ spec: Optional[str] = None
107
+
108
+ version: Optional[str] = None
109
+
110
+ min_log_stream_version: Annotated[
111
+ Optional[str], pydantic.Field(alias="minLogStreamVersion")
112
+ ] = None
113
+
114
+ display_name: Annotated[Optional[str], pydantic.Field(alias="displayName")] = None
115
+
116
+ author: Optional[str] = None
117
+
118
+ description: Optional[str] = None
119
+
120
+ source: Optional[str] = None
121
+ r"""The source of the pack. If not present, an empty pack will be created"""
122
+
123
+ tags: Optional[PackRequestBodyTags1] = None
124
+
125
+ allow_custom_functions: Annotated[
126
+ Optional[bool], pydantic.Field(alias="allowCustomFunctions")
127
+ ] = None
128
+
129
+ force: Optional[bool] = None
130
+
131
+
132
+ PackRequestBodyUnionTypedDict = TypeAliasType(
133
+ "PackRequestBodyUnionTypedDict",
134
+ Union[PackRequestBody1TypedDict, PackRequestBody2TypedDict],
135
+ )
136
+
137
+
138
+ PackRequestBodyUnion = TypeAliasType(
139
+ "PackRequestBodyUnion", Union[PackRequestBody1, PackRequestBody2]
140
+ )
@@ -0,0 +1,26 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from cribl_control_plane.types import BaseModel
5
+ import pydantic
6
+ from typing import Optional
7
+ from typing_extensions import Annotated, NotRequired, TypedDict
8
+
9
+
10
+ class PackUpgradeRequestTypedDict(TypedDict):
11
+ source: str
12
+ allow_custom_functions: NotRequired[bool]
13
+ minor: NotRequired[str]
14
+ spec: NotRequired[str]
15
+
16
+
17
+ class PackUpgradeRequest(BaseModel):
18
+ source: str
19
+
20
+ allow_custom_functions: Annotated[
21
+ Optional[bool], pydantic.Field(alias="allowCustomFunctions")
22
+ ] = None
23
+
24
+ minor: Optional[str] = None
25
+
26
+ spec: Optional[str] = None
@@ -0,0 +1,27 @@
1
+ """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
+
3
+ from __future__ import annotations
4
+ from .runnablejobcollection import RunnableJobCollection, RunnableJobCollectionTypedDict
5
+ from .runnablejobexecutor import RunnableJobExecutor, RunnableJobExecutorTypedDict
6
+ from .runnablejobscheduledsearch import (
7
+ RunnableJobScheduledSearch,
8
+ RunnableJobScheduledSearchTypedDict,
9
+ )
10
+ from typing import Union
11
+ from typing_extensions import TypeAliasType
12
+
13
+
14
+ RunnableJobTypedDict = TypeAliasType(
15
+ "RunnableJobTypedDict",
16
+ Union[
17
+ RunnableJobScheduledSearchTypedDict,
18
+ RunnableJobExecutorTypedDict,
19
+ RunnableJobCollectionTypedDict,
20
+ ],
21
+ )
22
+
23
+
24
+ RunnableJob = TypeAliasType(
25
+ "RunnableJob",
26
+ Union[RunnableJobScheduledSearch, RunnableJobExecutor, RunnableJobCollection],
27
+ )