cribl-control-plane 0.2.1rc7__py3-none-any.whl → 0.3.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (179) hide show
  1. cribl_control_plane/_version.py +4 -4
  2. cribl_control_plane/errors/__init__.py +5 -8
  3. cribl_control_plane/errors/{healthserverstatus_error.py → healthstatus_error.py} +9 -10
  4. cribl_control_plane/groups_sdk.py +28 -52
  5. cribl_control_plane/health.py +16 -22
  6. cribl_control_plane/models/__init__.py +54 -217
  7. cribl_control_plane/models/appmode.py +14 -0
  8. cribl_control_plane/models/authtoken.py +1 -5
  9. cribl_control_plane/models/cacheconnection.py +0 -20
  10. cribl_control_plane/models/configgroup.py +7 -55
  11. cribl_control_plane/models/configgroupcloud.py +1 -11
  12. cribl_control_plane/models/createconfiggroupbyproductop.py +5 -17
  13. cribl_control_plane/models/createroutesappendbyidop.py +2 -2
  14. cribl_control_plane/models/createversionundoop.py +3 -3
  15. cribl_control_plane/models/cribllakedataset.py +1 -11
  16. cribl_control_plane/models/cribllakedatasetupdate.py +1 -11
  17. cribl_control_plane/models/datasetmetadata.py +1 -11
  18. cribl_control_plane/models/deleteconfiggroupbyproductandidop.py +0 -11
  19. cribl_control_plane/models/deleteoutputpqbyidop.py +2 -2
  20. cribl_control_plane/models/distributedsummary.py +0 -6
  21. cribl_control_plane/models/error.py +16 -0
  22. cribl_control_plane/models/getconfiggroupaclbyproductandidop.py +0 -20
  23. cribl_control_plane/models/getconfiggroupaclteamsbyproductandidop.py +0 -20
  24. cribl_control_plane/models/getconfiggroupbyproductandidop.py +0 -11
  25. cribl_control_plane/models/getconfiggroupconfigversionbyproductandidop.py +0 -11
  26. cribl_control_plane/models/gethealthinfoop.py +17 -0
  27. cribl_control_plane/models/getsummaryop.py +0 -11
  28. cribl_control_plane/models/hbcriblinfo.py +3 -24
  29. cribl_control_plane/models/{healthserverstatus.py → healthstatus.py} +8 -27
  30. cribl_control_plane/models/heartbeatmetadata.py +0 -3
  31. cribl_control_plane/models/input.py +78 -80
  32. cribl_control_plane/models/inputappscope.py +17 -80
  33. cribl_control_plane/models/inputazureblob.py +1 -33
  34. cribl_control_plane/models/inputcollection.py +1 -24
  35. cribl_control_plane/models/inputconfluentcloud.py +18 -195
  36. cribl_control_plane/models/inputcribl.py +1 -24
  37. cribl_control_plane/models/inputcriblhttp.py +17 -62
  38. cribl_control_plane/models/inputcribllakehttp.py +17 -62
  39. cribl_control_plane/models/inputcriblmetrics.py +1 -24
  40. cribl_control_plane/models/inputcribltcp.py +17 -62
  41. cribl_control_plane/models/inputcrowdstrike.py +1 -54
  42. cribl_control_plane/models/inputdatadogagent.py +17 -62
  43. cribl_control_plane/models/inputdatagen.py +1 -24
  44. cribl_control_plane/models/inputedgeprometheus.py +34 -147
  45. cribl_control_plane/models/inputelastic.py +27 -119
  46. cribl_control_plane/models/inputeventhub.py +1 -182
  47. cribl_control_plane/models/inputexec.py +1 -33
  48. cribl_control_plane/models/inputfile.py +3 -42
  49. cribl_control_plane/models/inputfirehose.py +17 -62
  50. cribl_control_plane/models/inputgooglepubsub.py +1 -36
  51. cribl_control_plane/models/inputgrafana.py +32 -157
  52. cribl_control_plane/models/inputhttp.py +17 -62
  53. cribl_control_plane/models/inputhttpraw.py +17 -62
  54. cribl_control_plane/models/inputjournalfiles.py +1 -24
  55. cribl_control_plane/models/inputkafka.py +17 -189
  56. cribl_control_plane/models/inputkinesis.py +1 -80
  57. cribl_control_plane/models/inputkubeevents.py +1 -24
  58. cribl_control_plane/models/inputkubelogs.py +1 -33
  59. cribl_control_plane/models/inputkubemetrics.py +1 -33
  60. cribl_control_plane/models/inputloki.py +17 -71
  61. cribl_control_plane/models/inputmetrics.py +17 -62
  62. cribl_control_plane/models/inputmodeldriventelemetry.py +17 -62
  63. cribl_control_plane/models/inputmsk.py +18 -81
  64. cribl_control_plane/models/inputnetflow.py +1 -24
  65. cribl_control_plane/models/inputoffice365mgmt.py +1 -67
  66. cribl_control_plane/models/inputoffice365msgtrace.py +1 -67
  67. cribl_control_plane/models/inputoffice365service.py +1 -67
  68. cribl_control_plane/models/inputopentelemetry.py +16 -92
  69. cribl_control_plane/models/inputprometheus.py +34 -138
  70. cribl_control_plane/models/inputprometheusrw.py +17 -71
  71. cribl_control_plane/models/inputrawudp.py +1 -24
  72. cribl_control_plane/models/inputs3.py +1 -45
  73. cribl_control_plane/models/inputs3inventory.py +1 -54
  74. cribl_control_plane/models/inputsecuritylake.py +1 -54
  75. cribl_control_plane/models/inputsnmp.py +1 -40
  76. cribl_control_plane/models/inputsplunk.py +17 -85
  77. cribl_control_plane/models/inputsplunkhec.py +16 -70
  78. cribl_control_plane/models/inputsplunksearch.py +1 -63
  79. cribl_control_plane/models/inputsqs.py +1 -56
  80. cribl_control_plane/models/inputsyslog.py +32 -121
  81. cribl_control_plane/models/inputsystemmetrics.py +9 -142
  82. cribl_control_plane/models/inputsystemstate.py +1 -33
  83. cribl_control_plane/models/inputtcp.py +17 -81
  84. cribl_control_plane/models/inputtcpjson.py +17 -71
  85. cribl_control_plane/models/inputwef.py +1 -71
  86. cribl_control_plane/models/inputwindowsmetrics.py +9 -129
  87. cribl_control_plane/models/inputwineventlogs.py +1 -60
  88. cribl_control_plane/models/inputwiz.py +1 -45
  89. cribl_control_plane/models/inputwizwebhook.py +17 -62
  90. cribl_control_plane/models/inputzscalerhec.py +16 -70
  91. cribl_control_plane/models/jobinfo.py +1 -4
  92. cribl_control_plane/models/jobstatus.py +3 -34
  93. cribl_control_plane/models/listconfiggroupbyproductop.py +0 -11
  94. cribl_control_plane/models/logininfo.py +3 -3
  95. cribl_control_plane/models/masterworkerentry.py +1 -11
  96. cribl_control_plane/models/nodeprovidedinfo.py +1 -11
  97. cribl_control_plane/models/nodeupgradestatus.py +0 -38
  98. cribl_control_plane/models/output.py +88 -93
  99. cribl_control_plane/models/outputazureblob.py +1 -110
  100. cribl_control_plane/models/outputazuredataexplorer.py +87 -452
  101. cribl_control_plane/models/outputazureeventhub.py +19 -281
  102. cribl_control_plane/models/outputazurelogs.py +19 -115
  103. cribl_control_plane/models/outputchronicle.py +19 -115
  104. cribl_control_plane/models/outputclickhouse.py +19 -155
  105. cribl_control_plane/models/outputcloudwatch.py +19 -106
  106. cribl_control_plane/models/outputconfluentcloud.py +38 -311
  107. cribl_control_plane/models/outputcriblhttp.py +19 -135
  108. cribl_control_plane/models/outputcribllake.py +1 -97
  109. cribl_control_plane/models/outputcribltcp.py +19 -132
  110. cribl_control_plane/models/outputcrowdstrikenextgensiem.py +20 -129
  111. cribl_control_plane/models/outputdatadog.py +19 -159
  112. cribl_control_plane/models/outputdataset.py +19 -143
  113. cribl_control_plane/models/outputdiskspool.py +1 -11
  114. cribl_control_plane/models/outputdls3.py +1 -152
  115. cribl_control_plane/models/outputdynatracehttp.py +19 -160
  116. cribl_control_plane/models/outputdynatraceotlp.py +19 -160
  117. cribl_control_plane/models/outputelastic.py +19 -163
  118. cribl_control_plane/models/outputelasticcloud.py +19 -140
  119. cribl_control_plane/models/outputexabeam.py +1 -61
  120. cribl_control_plane/models/outputfilesystem.py +1 -87
  121. cribl_control_plane/models/outputgooglechronicle.py +20 -166
  122. cribl_control_plane/models/outputgooglecloudlogging.py +20 -131
  123. cribl_control_plane/models/outputgooglecloudstorage.py +1 -136
  124. cribl_control_plane/models/outputgooglepubsub.py +19 -106
  125. cribl_control_plane/models/outputgrafanacloud.py +37 -288
  126. cribl_control_plane/models/outputgraphite.py +19 -105
  127. cribl_control_plane/models/outputhoneycomb.py +19 -115
  128. cribl_control_plane/models/outputhumiohec.py +19 -126
  129. cribl_control_plane/models/outputinfluxdb.py +19 -130
  130. cribl_control_plane/models/outputkafka.py +34 -302
  131. cribl_control_plane/models/outputkinesis.py +19 -133
  132. cribl_control_plane/models/outputloki.py +17 -129
  133. cribl_control_plane/models/outputminio.py +1 -145
  134. cribl_control_plane/models/outputmsk.py +34 -193
  135. cribl_control_plane/models/outputnewrelic.py +19 -136
  136. cribl_control_plane/models/outputnewrelicevents.py +20 -128
  137. cribl_control_plane/models/outputopentelemetry.py +19 -178
  138. cribl_control_plane/models/outputprometheus.py +19 -115
  139. cribl_control_plane/models/outputring.py +1 -31
  140. cribl_control_plane/models/outputs3.py +1 -152
  141. cribl_control_plane/models/outputsecuritylake.py +1 -114
  142. cribl_control_plane/models/outputsentinel.py +19 -135
  143. cribl_control_plane/models/outputsentineloneaisiem.py +20 -134
  144. cribl_control_plane/models/outputservicenow.py +19 -168
  145. cribl_control_plane/models/outputsignalfx.py +19 -115
  146. cribl_control_plane/models/outputsns.py +17 -113
  147. cribl_control_plane/models/outputsplunk.py +19 -153
  148. cribl_control_plane/models/outputsplunkhec.py +19 -208
  149. cribl_control_plane/models/outputsplunklb.py +19 -182
  150. cribl_control_plane/models/outputsqs.py +17 -124
  151. cribl_control_plane/models/outputstatsd.py +19 -105
  152. cribl_control_plane/models/outputstatsdext.py +19 -105
  153. cribl_control_plane/models/outputsumologic.py +19 -117
  154. cribl_control_plane/models/outputsyslog.py +96 -259
  155. cribl_control_plane/models/outputtcpjson.py +19 -141
  156. cribl_control_plane/models/outputwavefront.py +19 -115
  157. cribl_control_plane/models/outputwebhook.py +19 -161
  158. cribl_control_plane/models/outputxsiam.py +17 -113
  159. cribl_control_plane/models/packinfo.py +5 -8
  160. cribl_control_plane/models/packinstallinfo.py +5 -8
  161. cribl_control_plane/models/resourcepolicy.py +0 -11
  162. cribl_control_plane/models/{uploadpackresponse.py → routecloneconf.py} +4 -4
  163. cribl_control_plane/models/routeconf.py +4 -3
  164. cribl_control_plane/models/runnablejobcollection.py +9 -72
  165. cribl_control_plane/models/runnablejobexecutor.py +9 -32
  166. cribl_control_plane/models/runnablejobscheduledsearch.py +9 -23
  167. cribl_control_plane/models/updateconfiggroupbyproductandidop.py +0 -11
  168. cribl_control_plane/models/updateconfiggroupdeploybyproductandidop.py +0 -11
  169. cribl_control_plane/packs.py +7 -202
  170. cribl_control_plane/routes_sdk.py +6 -6
  171. cribl_control_plane/tokens.py +15 -23
  172. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/METADATA +9 -50
  173. cribl_control_plane-0.3.0a1.dist-info/RECORD +330 -0
  174. cribl_control_plane/models/groupcreaterequest.py +0 -171
  175. cribl_control_plane/models/outpostnodeinfo.py +0 -16
  176. cribl_control_plane/models/outputdatabricks.py +0 -482
  177. cribl_control_plane/models/updatepacksop.py +0 -25
  178. cribl_control_plane-0.2.1rc7.dist-info/RECORD +0 -331
  179. {cribl_control_plane-0.2.1rc7.dist-info → cribl_control_plane-0.3.0a1.dist-info}/WHEEL +0 -0
@@ -1,171 +0,0 @@
1
- """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
-
3
- from __future__ import annotations
4
- from .commit import Commit, CommitTypedDict
5
- from .configgroupcloud import ConfigGroupCloud, ConfigGroupCloudTypedDict
6
- from .configgrouplookups import ConfigGroupLookups, ConfigGroupLookupsTypedDict
7
- from cribl_control_plane import models, utils
8
- from cribl_control_plane.types import BaseModel
9
- from cribl_control_plane.utils import validate_open_enum
10
- from enum import Enum
11
- import pydantic
12
- from pydantic import field_serializer
13
- from pydantic.functional_validators import PlainValidator
14
- from typing import List, Optional
15
- from typing_extensions import Annotated, NotRequired, TypedDict
16
-
17
-
18
- class GroupCreateRequestEstimatedIngestRate(int, Enum, metaclass=utils.OpenEnumMeta):
19
- r"""Maximum expected volume of data ingested by the @{group}. (This setting is available only on @{group}s consisting of Cribl-managed Cribl.Cloud @{node}s.)"""
20
-
21
- # 12 MB/sec
22
- RATE12_MB_PER_SEC = 1024
23
- # 24 MB/sec
24
- RATE24_MB_PER_SEC = 2048
25
- # 36 MB/sec
26
- RATE36_MB_PER_SEC = 3072
27
- # 48 MB/sec
28
- RATE48_MB_PER_SEC = 4096
29
- # 60 MB/sec
30
- RATE60_MB_PER_SEC = 5120
31
- # 84 MB/sec
32
- RATE84_MB_PER_SEC = 7168
33
- # 120 MB/sec
34
- RATE120_MB_PER_SEC = 10240
35
- # 156 MB/sec
36
- RATE156_MB_PER_SEC = 13312
37
- # 180 MB/sec
38
- RATE180_MB_PER_SEC = 15360
39
-
40
-
41
- class GroupCreateRequestGitTypedDict(TypedDict):
42
- commit: NotRequired[str]
43
- local_changes: NotRequired[float]
44
- log: NotRequired[List[CommitTypedDict]]
45
-
46
-
47
- class GroupCreateRequestGit(BaseModel):
48
- commit: Optional[str] = None
49
-
50
- local_changes: Annotated[Optional[float], pydantic.Field(alias="localChanges")] = (
51
- None
52
- )
53
-
54
- log: Optional[List[Commit]] = None
55
-
56
-
57
- class GroupCreateRequestType(str, Enum, metaclass=utils.OpenEnumMeta):
58
- LAKE_ACCESS = "lake_access"
59
-
60
-
61
- class GroupCreateRequestTypedDict(TypedDict):
62
- id: str
63
- cloud: NotRequired[ConfigGroupCloudTypedDict]
64
- deploying_worker_count: NotRequired[float]
65
- description: NotRequired[str]
66
- estimated_ingest_rate: NotRequired[GroupCreateRequestEstimatedIngestRate]
67
- r"""Maximum expected volume of data ingested by the @{group}. (This setting is available only on @{group}s consisting of Cribl-managed Cribl.Cloud @{node}s.)"""
68
- git: NotRequired[GroupCreateRequestGitTypedDict]
69
- incompatible_worker_count: NotRequired[float]
70
- inherits: NotRequired[str]
71
- is_fleet: NotRequired[bool]
72
- is_search: NotRequired[bool]
73
- lookup_deployments: NotRequired[List[ConfigGroupLookupsTypedDict]]
74
- max_worker_age: NotRequired[str]
75
- name: NotRequired[str]
76
- on_prem: NotRequired[bool]
77
- provisioned: NotRequired[bool]
78
- source_group_id: NotRequired[str]
79
- streamtags: NotRequired[List[str]]
80
- tags: NotRequired[str]
81
- type: NotRequired[GroupCreateRequestType]
82
- upgrade_version: NotRequired[str]
83
- worker_count: NotRequired[float]
84
- worker_remote_access: NotRequired[bool]
85
-
86
-
87
- class GroupCreateRequest(BaseModel):
88
- id: str
89
-
90
- cloud: Optional[ConfigGroupCloud] = None
91
-
92
- deploying_worker_count: Annotated[
93
- Optional[float], pydantic.Field(alias="deployingWorkerCount")
94
- ] = None
95
-
96
- description: Optional[str] = None
97
-
98
- estimated_ingest_rate: Annotated[
99
- Annotated[
100
- Optional[GroupCreateRequestEstimatedIngestRate],
101
- PlainValidator(validate_open_enum(True)),
102
- ],
103
- pydantic.Field(alias="estimatedIngestRate"),
104
- ] = None
105
- r"""Maximum expected volume of data ingested by the @{group}. (This setting is available only on @{group}s consisting of Cribl-managed Cribl.Cloud @{node}s.)"""
106
-
107
- git: Optional[GroupCreateRequestGit] = None
108
-
109
- incompatible_worker_count: Annotated[
110
- Optional[float], pydantic.Field(alias="incompatibleWorkerCount")
111
- ] = None
112
-
113
- inherits: Optional[str] = None
114
-
115
- is_fleet: Annotated[Optional[bool], pydantic.Field(alias="isFleet")] = None
116
-
117
- is_search: Annotated[Optional[bool], pydantic.Field(alias="isSearch")] = None
118
-
119
- lookup_deployments: Annotated[
120
- Optional[List[ConfigGroupLookups]], pydantic.Field(alias="lookupDeployments")
121
- ] = None
122
-
123
- max_worker_age: Annotated[Optional[str], pydantic.Field(alias="maxWorkerAge")] = (
124
- None
125
- )
126
-
127
- name: Optional[str] = None
128
-
129
- on_prem: Annotated[Optional[bool], pydantic.Field(alias="onPrem")] = None
130
-
131
- provisioned: Optional[bool] = None
132
-
133
- source_group_id: Annotated[Optional[str], pydantic.Field(alias="sourceGroupId")] = (
134
- None
135
- )
136
-
137
- streamtags: Optional[List[str]] = None
138
-
139
- tags: Optional[str] = None
140
-
141
- type: Annotated[
142
- Optional[GroupCreateRequestType], PlainValidator(validate_open_enum(False))
143
- ] = None
144
-
145
- upgrade_version: Annotated[
146
- Optional[str], pydantic.Field(alias="upgradeVersion")
147
- ] = None
148
-
149
- worker_count: Annotated[Optional[float], pydantic.Field(alias="workerCount")] = None
150
-
151
- worker_remote_access: Annotated[
152
- Optional[bool], pydantic.Field(alias="workerRemoteAccess")
153
- ] = None
154
-
155
- @field_serializer("estimated_ingest_rate")
156
- def serialize_estimated_ingest_rate(self, value):
157
- if isinstance(value, str):
158
- try:
159
- return models.GroupCreateRequestEstimatedIngestRate(value)
160
- except ValueError:
161
- return value
162
- return value
163
-
164
- @field_serializer("type")
165
- def serialize_type(self, value):
166
- if isinstance(value, str):
167
- try:
168
- return models.GroupCreateRequestType(value)
169
- except ValueError:
170
- return value
171
- return value
@@ -1,16 +0,0 @@
1
- """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
-
3
- from __future__ import annotations
4
- from cribl_control_plane.types import BaseModel
5
- from typing_extensions import TypedDict
6
-
7
-
8
- class OutpostNodeInfoTypedDict(TypedDict):
9
- guid: str
10
- host: str
11
-
12
-
13
- class OutpostNodeInfo(BaseModel):
14
- guid: str
15
-
16
- host: str
@@ -1,482 +0,0 @@
1
- """Code generated by Speakeasy (https://speakeasy.com). DO NOT EDIT."""
2
-
3
- from __future__ import annotations
4
- from cribl_control_plane import models, utils
5
- from cribl_control_plane.types import BaseModel
6
- from cribl_control_plane.utils import validate_open_enum
7
- from enum import Enum
8
- import pydantic
9
- from pydantic import field_serializer
10
- from pydantic.functional_validators import PlainValidator
11
- from typing import List, Optional
12
- from typing_extensions import Annotated, NotRequired, TypedDict
13
-
14
-
15
- class OutputDatabricksType(str, Enum):
16
- DATABRICKS = "databricks"
17
-
18
-
19
- class OutputDatabricksDataFormat(str, Enum, metaclass=utils.OpenEnumMeta):
20
- r"""Format of the output data"""
21
-
22
- # JSON
23
- JSON = "json"
24
- # Raw
25
- RAW = "raw"
26
- # Parquet
27
- PARQUET = "parquet"
28
-
29
-
30
- class OutputDatabricksBackpressureBehavior(str, Enum, metaclass=utils.OpenEnumMeta):
31
- r"""How to handle events when all receivers are exerting backpressure"""
32
-
33
- # Block
34
- BLOCK = "block"
35
- # Drop
36
- DROP = "drop"
37
-
38
-
39
- class OutputDatabricksDiskSpaceProtection(str, Enum, metaclass=utils.OpenEnumMeta):
40
- r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
41
-
42
- # Block
43
- BLOCK = "block"
44
- # Drop
45
- DROP = "drop"
46
-
47
-
48
- class OutputDatabricksCompression(str, Enum, metaclass=utils.OpenEnumMeta):
49
- r"""Data compression format to apply to HTTP content before it is delivered"""
50
-
51
- NONE = "none"
52
- GZIP = "gzip"
53
-
54
-
55
- class OutputDatabricksCompressionLevel(str, Enum, metaclass=utils.OpenEnumMeta):
56
- r"""Compression level to apply before moving files to final destination"""
57
-
58
- # Best Speed
59
- BEST_SPEED = "best_speed"
60
- # Normal
61
- NORMAL = "normal"
62
- # Best Compression
63
- BEST_COMPRESSION = "best_compression"
64
-
65
-
66
- class OutputDatabricksParquetVersion(str, Enum, metaclass=utils.OpenEnumMeta):
67
- r"""Determines which data types are supported and how they are represented"""
68
-
69
- # 1.0
70
- PARQUET_1_0 = "PARQUET_1_0"
71
- # 2.4
72
- PARQUET_2_4 = "PARQUET_2_4"
73
- # 2.6
74
- PARQUET_2_6 = "PARQUET_2_6"
75
-
76
-
77
- class OutputDatabricksDataPageVersion(str, Enum, metaclass=utils.OpenEnumMeta):
78
- r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
79
-
80
- # V1
81
- DATA_PAGE_V1 = "DATA_PAGE_V1"
82
- # V2
83
- DATA_PAGE_V2 = "DATA_PAGE_V2"
84
-
85
-
86
- class OutputDatabricksKeyValueMetadatumTypedDict(TypedDict):
87
- value: str
88
- key: NotRequired[str]
89
-
90
-
91
- class OutputDatabricksKeyValueMetadatum(BaseModel):
92
- value: str
93
-
94
- key: Optional[str] = ""
95
-
96
-
97
- class OutputDatabricksTypedDict(TypedDict):
98
- type: OutputDatabricksType
99
- workspace_id: str
100
- r"""Databricks workspace ID"""
101
- client_id: str
102
- r"""OAuth client ID for Unity Catalog authentication"""
103
- client_secret: str
104
- r"""OAuth client secret for Unity Catalog authentication"""
105
- id: NotRequired[str]
106
- r"""Unique ID for this output"""
107
- pipeline: NotRequired[str]
108
- r"""Pipeline to process data before sending out to this output"""
109
- system_fields: NotRequired[List[str]]
110
- r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
111
- environment: NotRequired[str]
112
- r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
113
- streamtags: NotRequired[List[str]]
114
- r"""Tags for filtering and grouping in @{product}"""
115
- dest_path: NotRequired[str]
116
- r"""Optional path to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myEventsVolumePath-${C.vars.myVar}`"""
117
- stage_path: NotRequired[str]
118
- r"""Filesystem location in which to buffer files before compressing and moving to final destination. Use performant, stable storage."""
119
- add_id_to_stage_path: NotRequired[bool]
120
- r"""Add the Output ID value to staging location"""
121
- remove_empty_dirs: NotRequired[bool]
122
- r"""Remove empty staging directories after moving files"""
123
- partition_expr: NotRequired[str]
124
- r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
125
- format_: NotRequired[OutputDatabricksDataFormat]
126
- r"""Format of the output data"""
127
- base_file_name: NotRequired[str]
128
- r"""JavaScript expression to define the output filename prefix (can be constant)"""
129
- file_name_suffix: NotRequired[str]
130
- r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
131
- max_file_size_mb: NotRequired[float]
132
- r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
133
- max_file_open_time_sec: NotRequired[float]
134
- r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
135
- max_file_idle_time_sec: NotRequired[float]
136
- r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
137
- max_open_files: NotRequired[float]
138
- r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
139
- header_line: NotRequired[str]
140
- r"""If set, this line will be written to the beginning of each output file"""
141
- write_high_water_mark: NotRequired[float]
142
- r"""Buffer size used to write to a file"""
143
- on_backpressure: NotRequired[OutputDatabricksBackpressureBehavior]
144
- r"""How to handle events when all receivers are exerting backpressure"""
145
- deadletter_enabled: NotRequired[bool]
146
- r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
147
- on_disk_full_backpressure: NotRequired[OutputDatabricksDiskSpaceProtection]
148
- r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
149
- scope: NotRequired[str]
150
- r"""OAuth scope for Unity Catalog authentication"""
151
- catalog: NotRequired[str]
152
- r"""Name of the catalog to use for the output"""
153
- schema_: NotRequired[str]
154
- r"""Name of the catalog schema to use for the output"""
155
- events_volume_name: NotRequired[str]
156
- r"""Name of the events volume in Databricks"""
157
- description: NotRequired[str]
158
- compress: NotRequired[OutputDatabricksCompression]
159
- r"""Data compression format to apply to HTTP content before it is delivered"""
160
- compression_level: NotRequired[OutputDatabricksCompressionLevel]
161
- r"""Compression level to apply before moving files to final destination"""
162
- automatic_schema: NotRequired[bool]
163
- r"""Automatically calculate the schema based on the events of each Parquet file generated"""
164
- parquet_schema: NotRequired[str]
165
- r"""To add a new schema, navigate to Processing > Knowledge > Parquet Schemas"""
166
- parquet_version: NotRequired[OutputDatabricksParquetVersion]
167
- r"""Determines which data types are supported and how they are represented"""
168
- parquet_data_page_version: NotRequired[OutputDatabricksDataPageVersion]
169
- r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
170
- parquet_row_group_length: NotRequired[float]
171
- r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
172
- parquet_page_size: NotRequired[str]
173
- r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
174
- should_log_invalid_rows: NotRequired[bool]
175
- r"""Log up to 3 rows that @{product} skips due to data mismatch"""
176
- key_value_metadata: NotRequired[List[OutputDatabricksKeyValueMetadatumTypedDict]]
177
- r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
178
- enable_statistics: NotRequired[bool]
179
- r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
180
- enable_write_page_index: NotRequired[bool]
181
- r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
182
- enable_page_checksum: NotRequired[bool]
183
- r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
184
- empty_dir_cleanup_sec: NotRequired[float]
185
- r"""How frequently, in seconds, to clean up empty directories"""
186
- deadletter_path: NotRequired[str]
187
- r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
188
- max_retry_num: NotRequired[float]
189
- r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
190
-
191
-
192
- class OutputDatabricks(BaseModel):
193
- type: OutputDatabricksType
194
-
195
- workspace_id: Annotated[str, pydantic.Field(alias="workspaceId")]
196
- r"""Databricks workspace ID"""
197
-
198
- client_id: Annotated[str, pydantic.Field(alias="clientId")]
199
- r"""OAuth client ID for Unity Catalog authentication"""
200
-
201
- client_secret: Annotated[str, pydantic.Field(alias="clientSecret")]
202
- r"""OAuth client secret for Unity Catalog authentication"""
203
-
204
- id: Optional[str] = None
205
- r"""Unique ID for this output"""
206
-
207
- pipeline: Optional[str] = None
208
- r"""Pipeline to process data before sending out to this output"""
209
-
210
- system_fields: Annotated[
211
- Optional[List[str]], pydantic.Field(alias="systemFields")
212
- ] = None
213
- r"""Fields to automatically add to events, such as cribl_pipe. Supports wildcards."""
214
-
215
- environment: Optional[str] = None
216
- r"""Optionally, enable this config only on a specified Git branch. If empty, will be enabled everywhere."""
217
-
218
- streamtags: Optional[List[str]] = None
219
- r"""Tags for filtering and grouping in @{product}"""
220
-
221
- dest_path: Annotated[Optional[str], pydantic.Field(alias="destPath")] = ""
222
- r"""Optional path to prepend to files before uploading. Must be a JavaScript expression (which can evaluate to a constant value), enclosed in quotes or backticks. Can be evaluated only at init time. Example referencing a Global Variable: `myEventsVolumePath-${C.vars.myVar}`"""
223
-
224
- stage_path: Annotated[Optional[str], pydantic.Field(alias="stagePath")] = (
225
- "$CRIBL_HOME/state/outputs/staging"
226
- )
227
- r"""Filesystem location in which to buffer files before compressing and moving to final destination. Use performant, stable storage."""
228
-
229
- add_id_to_stage_path: Annotated[
230
- Optional[bool], pydantic.Field(alias="addIdToStagePath")
231
- ] = True
232
- r"""Add the Output ID value to staging location"""
233
-
234
- remove_empty_dirs: Annotated[
235
- Optional[bool], pydantic.Field(alias="removeEmptyDirs")
236
- ] = True
237
- r"""Remove empty staging directories after moving files"""
238
-
239
- partition_expr: Annotated[Optional[str], pydantic.Field(alias="partitionExpr")] = (
240
- "C.Time.strftime(_time ? _time : Date.now()/1000, '%Y/%m/%d')"
241
- )
242
- r"""JavaScript expression defining how files are partitioned and organized. Default is date-based. If blank, Stream will fall back to the event's __partition field value – if present – otherwise to each location's root directory."""
243
-
244
- format_: Annotated[
245
- Annotated[
246
- Optional[OutputDatabricksDataFormat],
247
- PlainValidator(validate_open_enum(False)),
248
- ],
249
- pydantic.Field(alias="format"),
250
- ] = OutputDatabricksDataFormat.JSON
251
- r"""Format of the output data"""
252
-
253
- base_file_name: Annotated[Optional[str], pydantic.Field(alias="baseFileName")] = (
254
- "`CriblOut`"
255
- )
256
- r"""JavaScript expression to define the output filename prefix (can be constant)"""
257
-
258
- file_name_suffix: Annotated[
259
- Optional[str], pydantic.Field(alias="fileNameSuffix")
260
- ] = '`.${C.env["CRIBL_WORKER_ID"]}.${__format}${__compression === "gzip" ? ".gz" : ""}`'
261
- r"""JavaScript expression to define the output filename suffix (can be constant). The `__format` variable refers to the value of the `Data format` field (`json` or `raw`). The `__compression` field refers to the kind of compression being used (`none` or `gzip`)."""
262
-
263
- max_file_size_mb: Annotated[
264
- Optional[float], pydantic.Field(alias="maxFileSizeMB")
265
- ] = 32
266
- r"""Maximum uncompressed output file size. Files of this size will be closed and moved to final output location."""
267
-
268
- max_file_open_time_sec: Annotated[
269
- Optional[float], pydantic.Field(alias="maxFileOpenTimeSec")
270
- ] = 300
271
- r"""Maximum amount of time to write to a file. Files open for longer than this will be closed and moved to final output location."""
272
-
273
- max_file_idle_time_sec: Annotated[
274
- Optional[float], pydantic.Field(alias="maxFileIdleTimeSec")
275
- ] = 30
276
- r"""Maximum amount of time to keep inactive files open. Files open for longer than this will be closed and moved to final output location."""
277
-
278
- max_open_files: Annotated[Optional[float], pydantic.Field(alias="maxOpenFiles")] = (
279
- 100
280
- )
281
- r"""Maximum number of files to keep open concurrently. When exceeded, @{product} will close the oldest open files and move them to the final output location."""
282
-
283
- header_line: Annotated[Optional[str], pydantic.Field(alias="headerLine")] = ""
284
- r"""If set, this line will be written to the beginning of each output file"""
285
-
286
- write_high_water_mark: Annotated[
287
- Optional[float], pydantic.Field(alias="writeHighWaterMark")
288
- ] = 64
289
- r"""Buffer size used to write to a file"""
290
-
291
- on_backpressure: Annotated[
292
- Annotated[
293
- Optional[OutputDatabricksBackpressureBehavior],
294
- PlainValidator(validate_open_enum(False)),
295
- ],
296
- pydantic.Field(alias="onBackpressure"),
297
- ] = OutputDatabricksBackpressureBehavior.BLOCK
298
- r"""How to handle events when all receivers are exerting backpressure"""
299
-
300
- deadletter_enabled: Annotated[
301
- Optional[bool], pydantic.Field(alias="deadletterEnabled")
302
- ] = False
303
- r"""If a file fails to move to its final destination after the maximum number of retries, move it to a designated directory to prevent further errors"""
304
-
305
- on_disk_full_backpressure: Annotated[
306
- Annotated[
307
- Optional[OutputDatabricksDiskSpaceProtection],
308
- PlainValidator(validate_open_enum(False)),
309
- ],
310
- pydantic.Field(alias="onDiskFullBackpressure"),
311
- ] = OutputDatabricksDiskSpaceProtection.BLOCK
312
- r"""How to handle events when disk space is below the global 'Min free disk space' limit"""
313
-
314
- scope: Optional[str] = "all-apis"
315
- r"""OAuth scope for Unity Catalog authentication"""
316
-
317
- catalog: Optional[str] = "main"
318
- r"""Name of the catalog to use for the output"""
319
-
320
- schema_: Annotated[Optional[str], pydantic.Field(alias="schema")] = "external"
321
- r"""Name of the catalog schema to use for the output"""
322
-
323
- events_volume_name: Annotated[
324
- Optional[str], pydantic.Field(alias="eventsVolumeName")
325
- ] = "events"
326
- r"""Name of the events volume in Databricks"""
327
-
328
- description: Optional[str] = None
329
-
330
- compress: Annotated[
331
- Optional[OutputDatabricksCompression], PlainValidator(validate_open_enum(False))
332
- ] = OutputDatabricksCompression.GZIP
333
- r"""Data compression format to apply to HTTP content before it is delivered"""
334
-
335
- compression_level: Annotated[
336
- Annotated[
337
- Optional[OutputDatabricksCompressionLevel],
338
- PlainValidator(validate_open_enum(False)),
339
- ],
340
- pydantic.Field(alias="compressionLevel"),
341
- ] = OutputDatabricksCompressionLevel.BEST_SPEED
342
- r"""Compression level to apply before moving files to final destination"""
343
-
344
- automatic_schema: Annotated[
345
- Optional[bool], pydantic.Field(alias="automaticSchema")
346
- ] = False
347
- r"""Automatically calculate the schema based on the events of each Parquet file generated"""
348
-
349
- parquet_schema: Annotated[Optional[str], pydantic.Field(alias="parquetSchema")] = (
350
- None
351
- )
352
- r"""To add a new schema, navigate to Processing > Knowledge > Parquet Schemas"""
353
-
354
- parquet_version: Annotated[
355
- Annotated[
356
- Optional[OutputDatabricksParquetVersion],
357
- PlainValidator(validate_open_enum(False)),
358
- ],
359
- pydantic.Field(alias="parquetVersion"),
360
- ] = OutputDatabricksParquetVersion.PARQUET_2_6
361
- r"""Determines which data types are supported and how they are represented"""
362
-
363
- parquet_data_page_version: Annotated[
364
- Annotated[
365
- Optional[OutputDatabricksDataPageVersion],
366
- PlainValidator(validate_open_enum(False)),
367
- ],
368
- pydantic.Field(alias="parquetDataPageVersion"),
369
- ] = OutputDatabricksDataPageVersion.DATA_PAGE_V2
370
- r"""Serialization format of data pages. Note that some reader implementations use Data page V2's attributes to work more efficiently, while others ignore it."""
371
-
372
- parquet_row_group_length: Annotated[
373
- Optional[float], pydantic.Field(alias="parquetRowGroupLength")
374
- ] = 10000
375
- r"""The number of rows that every group will contain. The final group can contain a smaller number of rows."""
376
-
377
- parquet_page_size: Annotated[
378
- Optional[str], pydantic.Field(alias="parquetPageSize")
379
- ] = "1MB"
380
- r"""Target memory size for page segments, such as 1MB or 128MB. Generally, lower values improve reading speed, while higher values improve compression."""
381
-
382
- should_log_invalid_rows: Annotated[
383
- Optional[bool], pydantic.Field(alias="shouldLogInvalidRows")
384
- ] = None
385
- r"""Log up to 3 rows that @{product} skips due to data mismatch"""
386
-
387
- key_value_metadata: Annotated[
388
- Optional[List[OutputDatabricksKeyValueMetadatum]],
389
- pydantic.Field(alias="keyValueMetadata"),
390
- ] = None
391
- r"""The metadata of files the Destination writes will include the properties you add here as key-value pairs. Useful for tagging. Examples: \"key\":\"OCSF Event Class\", \"value\":\"9001\" """
392
-
393
- enable_statistics: Annotated[
394
- Optional[bool], pydantic.Field(alias="enableStatistics")
395
- ] = True
396
- r"""Statistics profile an entire file in terms of minimum/maximum values within data, numbers of nulls, etc. You can use Parquet tools to view statistics."""
397
-
398
- enable_write_page_index: Annotated[
399
- Optional[bool], pydantic.Field(alias="enableWritePageIndex")
400
- ] = True
401
- r"""One page index contains statistics for one data page. Parquet readers use statistics to enable page skipping."""
402
-
403
- enable_page_checksum: Annotated[
404
- Optional[bool], pydantic.Field(alias="enablePageChecksum")
405
- ] = False
406
- r"""Parquet tools can use the checksum of a Parquet page to verify data integrity"""
407
-
408
- empty_dir_cleanup_sec: Annotated[
409
- Optional[float], pydantic.Field(alias="emptyDirCleanupSec")
410
- ] = 300
411
- r"""How frequently, in seconds, to clean up empty directories"""
412
-
413
- deadletter_path: Annotated[
414
- Optional[str], pydantic.Field(alias="deadletterPath")
415
- ] = "$CRIBL_HOME/state/outputs/dead-letter"
416
- r"""Storage location for files that fail to reach their final destination after maximum retries are exceeded"""
417
-
418
- max_retry_num: Annotated[Optional[float], pydantic.Field(alias="maxRetryNum")] = 20
419
- r"""The maximum number of times a file will attempt to move to its final destination before being dead-lettered"""
420
-
421
- @field_serializer("format_")
422
- def serialize_format_(self, value):
423
- if isinstance(value, str):
424
- try:
425
- return models.OutputDatabricksDataFormat(value)
426
- except ValueError:
427
- return value
428
- return value
429
-
430
- @field_serializer("on_backpressure")
431
- def serialize_on_backpressure(self, value):
432
- if isinstance(value, str):
433
- try:
434
- return models.OutputDatabricksBackpressureBehavior(value)
435
- except ValueError:
436
- return value
437
- return value
438
-
439
- @field_serializer("on_disk_full_backpressure")
440
- def serialize_on_disk_full_backpressure(self, value):
441
- if isinstance(value, str):
442
- try:
443
- return models.OutputDatabricksDiskSpaceProtection(value)
444
- except ValueError:
445
- return value
446
- return value
447
-
448
- @field_serializer("compress")
449
- def serialize_compress(self, value):
450
- if isinstance(value, str):
451
- try:
452
- return models.OutputDatabricksCompression(value)
453
- except ValueError:
454
- return value
455
- return value
456
-
457
- @field_serializer("compression_level")
458
- def serialize_compression_level(self, value):
459
- if isinstance(value, str):
460
- try:
461
- return models.OutputDatabricksCompressionLevel(value)
462
- except ValueError:
463
- return value
464
- return value
465
-
466
- @field_serializer("parquet_version")
467
- def serialize_parquet_version(self, value):
468
- if isinstance(value, str):
469
- try:
470
- return models.OutputDatabricksParquetVersion(value)
471
- except ValueError:
472
- return value
473
- return value
474
-
475
- @field_serializer("parquet_data_page_version")
476
- def serialize_parquet_data_page_version(self, value):
477
- if isinstance(value, str):
478
- try:
479
- return models.OutputDatabricksDataPageVersion(value)
480
- except ValueError:
481
- return value
482
- return value