cribl-control-plane 0.0.42__py3-none-any.whl → 0.0.44__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cribl-control-plane might be problematic. Click here for more details.

Files changed (87) hide show
  1. cribl_control_plane/_hooks/clientcredentials.py +1 -1
  2. cribl_control_plane/_version.py +4 -4
  3. cribl_control_plane/acl.py +5 -3
  4. cribl_control_plane/auth_sdk.py +6 -3
  5. cribl_control_plane/basesdk.py +11 -1
  6. cribl_control_plane/commits.py +5 -3
  7. cribl_control_plane/destinations.py +6 -4
  8. cribl_control_plane/errors/__init__.py +15 -3
  9. cribl_control_plane/groups_configs.py +8 -3
  10. cribl_control_plane/groups_sdk.py +6 -4
  11. cribl_control_plane/models/__init__.py +401 -1
  12. cribl_control_plane/models/input.py +2 -2
  13. cribl_control_plane/models/inputappscope.py +13 -0
  14. cribl_control_plane/models/inputazureblob.py +13 -0
  15. cribl_control_plane/models/inputcollection.py +13 -0
  16. cribl_control_plane/models/inputconfluentcloud.py +20 -0
  17. cribl_control_plane/models/inputcribl.py +13 -0
  18. cribl_control_plane/models/inputcriblhttp.py +13 -0
  19. cribl_control_plane/models/inputcribllakehttp.py +38 -4
  20. cribl_control_plane/models/inputcriblmetrics.py +13 -0
  21. cribl_control_plane/models/inputcribltcp.py +13 -0
  22. cribl_control_plane/models/inputcrowdstrike.py +13 -0
  23. cribl_control_plane/models/inputdatadogagent.py +13 -0
  24. cribl_control_plane/models/inputdatagen.py +13 -0
  25. cribl_control_plane/models/inputedgeprometheus.py +13 -0
  26. cribl_control_plane/models/inputelastic.py +13 -0
  27. cribl_control_plane/models/inputeventhub.py +13 -0
  28. cribl_control_plane/models/inputexec.py +13 -0
  29. cribl_control_plane/models/inputfile.py +13 -0
  30. cribl_control_plane/models/inputfirehose.py +13 -0
  31. cribl_control_plane/models/inputgooglepubsub.py +13 -0
  32. cribl_control_plane/models/inputgrafana.py +26 -14
  33. cribl_control_plane/models/inputhttp.py +13 -0
  34. cribl_control_plane/models/inputhttpraw.py +13 -0
  35. cribl_control_plane/models/inputjournalfiles.py +13 -0
  36. cribl_control_plane/models/inputkafka.py +20 -0
  37. cribl_control_plane/models/inputkinesis.py +13 -0
  38. cribl_control_plane/models/inputkubeevents.py +13 -0
  39. cribl_control_plane/models/inputkubelogs.py +13 -0
  40. cribl_control_plane/models/inputkubemetrics.py +13 -0
  41. cribl_control_plane/models/inputloki.py +13 -7
  42. cribl_control_plane/models/inputmetrics.py +13 -0
  43. cribl_control_plane/models/inputmodeldriventelemetry.py +14 -0
  44. cribl_control_plane/models/inputmsk.py +13 -0
  45. cribl_control_plane/models/inputnetflow.py +13 -0
  46. cribl_control_plane/models/inputoffice365mgmt.py +13 -0
  47. cribl_control_plane/models/inputoffice365msgtrace.py +13 -0
  48. cribl_control_plane/models/inputoffice365service.py +13 -0
  49. cribl_control_plane/models/inputopentelemetry.py +13 -0
  50. cribl_control_plane/models/inputprometheus.py +13 -0
  51. cribl_control_plane/models/inputprometheusrw.py +13 -0
  52. cribl_control_plane/models/inputrawudp.py +13 -0
  53. cribl_control_plane/models/inputs3.py +13 -0
  54. cribl_control_plane/models/inputs3inventory.py +13 -0
  55. cribl_control_plane/models/inputsecuritylake.py +13 -0
  56. cribl_control_plane/models/inputsnmp.py +13 -0
  57. cribl_control_plane/models/inputsplunk.py +13 -0
  58. cribl_control_plane/models/inputsplunkhec.py +13 -0
  59. cribl_control_plane/models/inputsplunksearch.py +13 -0
  60. cribl_control_plane/models/inputsqs.py +13 -0
  61. cribl_control_plane/models/inputsyslog.py +26 -0
  62. cribl_control_plane/models/inputsystemmetrics.py +13 -0
  63. cribl_control_plane/models/inputsystemstate.py +13 -0
  64. cribl_control_plane/models/inputtcp.py +13 -0
  65. cribl_control_plane/models/inputtcpjson.py +13 -0
  66. cribl_control_plane/models/inputwef.py +13 -0
  67. cribl_control_plane/models/inputwindowsmetrics.py +13 -0
  68. cribl_control_plane/models/inputwineventlogs.py +13 -0
  69. cribl_control_plane/models/inputwiz.py +13 -0
  70. cribl_control_plane/models/inputwizwebhook.py +13 -0
  71. cribl_control_plane/models/inputzscalerhec.py +13 -0
  72. cribl_control_plane/models/output.py +18 -18
  73. cribl_control_plane/models/outputazuredataexplorer.py +7 -0
  74. cribl_control_plane/models/outputconfluentcloud.py +7 -0
  75. cribl_control_plane/models/outputgrafanacloud.py +0 -14
  76. cribl_control_plane/models/outputkafka.py +7 -0
  77. cribl_control_plane/models/outputloki.py +0 -7
  78. cribl_control_plane/models/outputsyslog.py +68 -0
  79. cribl_control_plane/nodes.py +5 -3
  80. cribl_control_plane/sdk.py +15 -2
  81. cribl_control_plane/sources.py +5 -3
  82. cribl_control_plane/utils/__init__.py +15 -3
  83. cribl_control_plane/utils/eventstreaming.py +10 -0
  84. cribl_control_plane/versions.py +11 -6
  85. {cribl_control_plane-0.0.42.dist-info → cribl_control_plane-0.0.44.dist-info}/METADATA +1 -1
  86. {cribl_control_plane-0.0.42.dist-info → cribl_control_plane-0.0.44.dist-info}/RECORD +87 -87
  87. {cribl_control_plane-0.0.42.dist-info → cribl_control_plane-0.0.44.dist-info}/WHEEL +0 -0
@@ -37,6 +37,14 @@ class InputTCPCompression(str, Enum):
37
37
  GZIP = "gzip"
38
38
 
39
39
 
40
+ class InputTCPPqControlsTypedDict(TypedDict):
41
+ pass
42
+
43
+
44
+ class InputTCPPqControls(BaseModel):
45
+ pass
46
+
47
+
40
48
  class InputTCPPqTypedDict(TypedDict):
41
49
  mode: NotRequired[InputTCPMode]
42
50
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,6 +60,7 @@ class InputTCPPqTypedDict(TypedDict):
52
60
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
61
  compress: NotRequired[InputTCPCompression]
54
62
  r"""Codec to use to compress the persisted data"""
63
+ pq_controls: NotRequired[InputTCPPqControlsTypedDict]
55
64
 
56
65
 
57
66
  class InputTCPPq(BaseModel):
@@ -82,6 +91,10 @@ class InputTCPPq(BaseModel):
82
91
  compress: Optional[InputTCPCompression] = InputTCPCompression.NONE
83
92
  r"""Codec to use to compress the persisted data"""
84
93
 
94
+ pq_controls: Annotated[
95
+ Optional[InputTCPPqControls], pydantic.Field(alias="pqControls")
96
+ ] = None
97
+
85
98
 
86
99
  class InputTCPMinimumTLSVersion(str, Enum):
87
100
  TL_SV1 = "TLSv1"
@@ -37,6 +37,14 @@ class InputTcpjsonCompression(str, Enum):
37
37
  GZIP = "gzip"
38
38
 
39
39
 
40
+ class InputTcpjsonPqControlsTypedDict(TypedDict):
41
+ pass
42
+
43
+
44
+ class InputTcpjsonPqControls(BaseModel):
45
+ pass
46
+
47
+
40
48
  class InputTcpjsonPqTypedDict(TypedDict):
41
49
  mode: NotRequired[InputTcpjsonMode]
42
50
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,6 +60,7 @@ class InputTcpjsonPqTypedDict(TypedDict):
52
60
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
61
  compress: NotRequired[InputTcpjsonCompression]
54
62
  r"""Codec to use to compress the persisted data"""
63
+ pq_controls: NotRequired[InputTcpjsonPqControlsTypedDict]
55
64
 
56
65
 
57
66
  class InputTcpjsonPq(BaseModel):
@@ -82,6 +91,10 @@ class InputTcpjsonPq(BaseModel):
82
91
  compress: Optional[InputTcpjsonCompression] = InputTcpjsonCompression.NONE
83
92
  r"""Codec to use to compress the persisted data"""
84
93
 
94
+ pq_controls: Annotated[
95
+ Optional[InputTcpjsonPqControls], pydantic.Field(alias="pqControls")
96
+ ] = None
97
+
85
98
 
86
99
  class InputTcpjsonMinimumTLSVersion(str, Enum):
87
100
  TL_SV1 = "TLSv1"
@@ -37,6 +37,14 @@ class InputWefCompression(str, Enum):
37
37
  GZIP = "gzip"
38
38
 
39
39
 
40
+ class InputWefPqControlsTypedDict(TypedDict):
41
+ pass
42
+
43
+
44
+ class InputWefPqControls(BaseModel):
45
+ pass
46
+
47
+
40
48
  class InputWefPqTypedDict(TypedDict):
41
49
  mode: NotRequired[InputWefMode]
42
50
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,6 +60,7 @@ class InputWefPqTypedDict(TypedDict):
52
60
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
61
  compress: NotRequired[InputWefCompression]
54
62
  r"""Codec to use to compress the persisted data"""
63
+ pq_controls: NotRequired[InputWefPqControlsTypedDict]
55
64
 
56
65
 
57
66
  class InputWefPq(BaseModel):
@@ -82,6 +91,10 @@ class InputWefPq(BaseModel):
82
91
  compress: Optional[InputWefCompression] = InputWefCompression.NONE
83
92
  r"""Codec to use to compress the persisted data"""
84
93
 
94
+ pq_controls: Annotated[
95
+ Optional[InputWefPqControls], pydantic.Field(alias="pqControls")
96
+ ] = None
97
+
85
98
 
86
99
  class InputWefAuthenticationMethod(str, Enum):
87
100
  r"""How to authenticate incoming client connections"""
@@ -37,6 +37,14 @@ class InputWindowsMetricsCompression(str, Enum):
37
37
  GZIP = "gzip"
38
38
 
39
39
 
40
+ class InputWindowsMetricsPqControlsTypedDict(TypedDict):
41
+ pass
42
+
43
+
44
+ class InputWindowsMetricsPqControls(BaseModel):
45
+ pass
46
+
47
+
40
48
  class InputWindowsMetricsPqTypedDict(TypedDict):
41
49
  mode: NotRequired[InputWindowsMetricsPqMode]
42
50
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,6 +60,7 @@ class InputWindowsMetricsPqTypedDict(TypedDict):
52
60
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
61
  compress: NotRequired[InputWindowsMetricsCompression]
54
62
  r"""Codec to use to compress the persisted data"""
63
+ pq_controls: NotRequired[InputWindowsMetricsPqControlsTypedDict]
55
64
 
56
65
 
57
66
  class InputWindowsMetricsPq(BaseModel):
@@ -84,6 +93,10 @@ class InputWindowsMetricsPq(BaseModel):
84
93
  )
85
94
  r"""Codec to use to compress the persisted data"""
86
95
 
96
+ pq_controls: Annotated[
97
+ Optional[InputWindowsMetricsPqControls], pydantic.Field(alias="pqControls")
98
+ ] = None
99
+
87
100
 
88
101
  class InputWindowsMetricsHostMode(str, Enum):
89
102
  r"""Select level of detail for host metrics"""
@@ -37,6 +37,14 @@ class InputWinEventLogsCompression(str, Enum):
37
37
  GZIP = "gzip"
38
38
 
39
39
 
40
+ class InputWinEventLogsPqControlsTypedDict(TypedDict):
41
+ pass
42
+
43
+
44
+ class InputWinEventLogsPqControls(BaseModel):
45
+ pass
46
+
47
+
40
48
  class InputWinEventLogsPqTypedDict(TypedDict):
41
49
  mode: NotRequired[InputWinEventLogsMode]
42
50
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,6 +60,7 @@ class InputWinEventLogsPqTypedDict(TypedDict):
52
60
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
61
  compress: NotRequired[InputWinEventLogsCompression]
54
62
  r"""Codec to use to compress the persisted data"""
63
+ pq_controls: NotRequired[InputWinEventLogsPqControlsTypedDict]
55
64
 
56
65
 
57
66
  class InputWinEventLogsPq(BaseModel):
@@ -82,6 +91,10 @@ class InputWinEventLogsPq(BaseModel):
82
91
  compress: Optional[InputWinEventLogsCompression] = InputWinEventLogsCompression.NONE
83
92
  r"""Codec to use to compress the persisted data"""
84
93
 
94
+ pq_controls: Annotated[
95
+ Optional[InputWinEventLogsPqControls], pydantic.Field(alias="pqControls")
96
+ ] = None
97
+
85
98
 
86
99
  class ReadMode(str, Enum):
87
100
  r"""Read all stored and future event logs, or only future events"""
@@ -37,6 +37,14 @@ class InputWizCompression(str, Enum):
37
37
  GZIP = "gzip"
38
38
 
39
39
 
40
+ class InputWizPqControlsTypedDict(TypedDict):
41
+ pass
42
+
43
+
44
+ class InputWizPqControls(BaseModel):
45
+ pass
46
+
47
+
40
48
  class InputWizPqTypedDict(TypedDict):
41
49
  mode: NotRequired[InputWizMode]
42
50
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,6 +60,7 @@ class InputWizPqTypedDict(TypedDict):
52
60
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
61
  compress: NotRequired[InputWizCompression]
54
62
  r"""Codec to use to compress the persisted data"""
63
+ pq_controls: NotRequired[InputWizPqControlsTypedDict]
55
64
 
56
65
 
57
66
  class InputWizPq(BaseModel):
@@ -82,6 +91,10 @@ class InputWizPq(BaseModel):
82
91
  compress: Optional[InputWizCompression] = InputWizCompression.NONE
83
92
  r"""Codec to use to compress the persisted data"""
84
93
 
94
+ pq_controls: Annotated[
95
+ Optional[InputWizPqControls], pydantic.Field(alias="pqControls")
96
+ ] = None
97
+
85
98
 
86
99
  class InputWizContentConfigTypedDict(TypedDict):
87
100
  content_type: str
@@ -37,6 +37,14 @@ class InputWizWebhookCompression(str, Enum):
37
37
  GZIP = "gzip"
38
38
 
39
39
 
40
+ class InputWizWebhookPqControlsTypedDict(TypedDict):
41
+ pass
42
+
43
+
44
+ class InputWizWebhookPqControls(BaseModel):
45
+ pass
46
+
47
+
40
48
  class InputWizWebhookPqTypedDict(TypedDict):
41
49
  mode: NotRequired[InputWizWebhookMode]
42
50
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,6 +60,7 @@ class InputWizWebhookPqTypedDict(TypedDict):
52
60
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
61
  compress: NotRequired[InputWizWebhookCompression]
54
62
  r"""Codec to use to compress the persisted data"""
63
+ pq_controls: NotRequired[InputWizWebhookPqControlsTypedDict]
55
64
 
56
65
 
57
66
  class InputWizWebhookPq(BaseModel):
@@ -82,6 +91,10 @@ class InputWizWebhookPq(BaseModel):
82
91
  compress: Optional[InputWizWebhookCompression] = InputWizWebhookCompression.NONE
83
92
  r"""Codec to use to compress the persisted data"""
84
93
 
94
+ pq_controls: Annotated[
95
+ Optional[InputWizWebhookPqControls], pydantic.Field(alias="pqControls")
96
+ ] = None
97
+
85
98
 
86
99
  class InputWizWebhookMinimumTLSVersion(str, Enum):
87
100
  TL_SV1 = "TLSv1"
@@ -37,6 +37,14 @@ class InputZscalerHecCompression(str, Enum):
37
37
  GZIP = "gzip"
38
38
 
39
39
 
40
+ class InputZscalerHecPqControlsTypedDict(TypedDict):
41
+ pass
42
+
43
+
44
+ class InputZscalerHecPqControls(BaseModel):
45
+ pass
46
+
47
+
40
48
  class InputZscalerHecPqTypedDict(TypedDict):
41
49
  mode: NotRequired[InputZscalerHecMode]
42
50
  r"""With Smart mode, PQ will write events to the filesystem only when it detects backpressure from the processing engine. With Always On mode, PQ will always write events directly to the queue before forwarding them to the processing engine."""
@@ -52,6 +60,7 @@ class InputZscalerHecPqTypedDict(TypedDict):
52
60
  r"""The location for the persistent queue files. To this field's value, the system will append: /<worker-id>/inputs/<input-id>"""
53
61
  compress: NotRequired[InputZscalerHecCompression]
54
62
  r"""Codec to use to compress the persisted data"""
63
+ pq_controls: NotRequired[InputZscalerHecPqControlsTypedDict]
55
64
 
56
65
 
57
66
  class InputZscalerHecPq(BaseModel):
@@ -82,6 +91,10 @@ class InputZscalerHecPq(BaseModel):
82
91
  compress: Optional[InputZscalerHecCompression] = InputZscalerHecCompression.NONE
83
92
  r"""Codec to use to compress the persisted data"""
84
93
 
94
+ pq_controls: Annotated[
95
+ Optional[InputZscalerHecPqControls], pydantic.Field(alias="pqControls")
96
+ ] = None
97
+
85
98
 
86
99
  class InputZscalerHecAuthenticationMethod(str, Enum):
87
100
  r"""Select Manual to enter an auth token directly, or select Secret to use a text secret to authenticate"""
@@ -90,8 +90,8 @@ OutputTypedDict = TypeAliasType(
90
90
  OutputDevnullTypedDict,
91
91
  OutputDefaultTypedDict,
92
92
  OutputRouterTypedDict,
93
- OutputNetflowTypedDict,
94
93
  OutputSnmpTypedDict,
94
+ OutputNetflowTypedDict,
95
95
  OutputDiskSpoolTypedDict,
96
96
  OutputRingTypedDict,
97
97
  OutputStatsdExtTypedDict,
@@ -99,19 +99,18 @@ OutputTypedDict = TypeAliasType(
99
99
  OutputStatsdTypedDict,
100
100
  OutputGooglePubsubTypedDict,
101
101
  OutputCriblTCPTypedDict,
102
- OutputSnsTypedDict,
103
102
  OutputSplunkTypedDict,
103
+ OutputSnsTypedDict,
104
104
  OutputCloudwatchTypedDict,
105
- OutputSyslogTypedDict,
106
105
  OutputAzureEventhubTypedDict,
107
106
  OutputWavefrontTypedDict,
108
107
  OutputSignalfxTypedDict,
109
108
  OutputHoneycombTypedDict,
110
109
  OutputSumoLogicTypedDict,
111
- OutputTcpjsonTypedDict,
110
+ OutputCrowdstrikeNextGenSiemTypedDict,
112
111
  OutputHumioHecTypedDict,
112
+ OutputTcpjsonTypedDict,
113
113
  OutputElasticCloudTypedDict,
114
- OutputCrowdstrikeNextGenSiemTypedDict,
115
114
  OutputKinesisTypedDict,
116
115
  OutputConfluentCloudTypedDict,
117
116
  OutputKafkaTypedDict,
@@ -119,21 +118,22 @@ OutputTypedDict = TypeAliasType(
119
118
  OutputNewrelicEventsTypedDict,
120
119
  OutputAzureLogsTypedDict,
121
120
  OutputSplunkLbTypedDict,
121
+ OutputSyslogTypedDict,
122
122
  OutputSqsTypedDict,
123
123
  OutputNewrelicTypedDict,
124
124
  OutputCriblHTTPTypedDict,
125
125
  OutputXsiamTypedDict,
126
126
  OutputFilesystemTypedDict,
127
127
  OutputDatasetTypedDict,
128
+ OutputLokiTypedDict,
128
129
  OutputSplunkHecTypedDict,
129
130
  OutputDynatraceHTTPTypedDict,
130
131
  OutputServiceNowTypedDict,
131
- OutputLokiTypedDict,
132
132
  OutputDynatraceOtlpTypedDict,
133
- OutputGoogleChronicleTypedDict,
134
133
  OutputElasticTypedDict,
135
- OutputDatadogTypedDict,
134
+ OutputGoogleChronicleTypedDict,
136
135
  OutputCriblLakeTypedDict,
136
+ OutputDatadogTypedDict,
137
137
  OutputPrometheusTypedDict,
138
138
  OutputMskTypedDict,
139
139
  OutputSentinelOneAiSiemTypedDict,
@@ -147,8 +147,8 @@ OutputTypedDict = TypeAliasType(
147
147
  OutputSecurityLakeTypedDict,
148
148
  OutputDlS3TypedDict,
149
149
  OutputS3TypedDict,
150
- OutputAzureDataExplorerTypedDict,
151
150
  OutputWebhookTypedDict,
151
+ OutputAzureDataExplorerTypedDict,
152
152
  OutputGoogleCloudLoggingTypedDict,
153
153
  OutputGrafanaCloudTypedDict,
154
154
  ],
@@ -161,8 +161,8 @@ Output = TypeAliasType(
161
161
  OutputDevnull,
162
162
  OutputDefault,
163
163
  OutputRouter,
164
- OutputNetflow,
165
164
  OutputSnmp,
165
+ OutputNetflow,
166
166
  OutputDiskSpool,
167
167
  OutputRing,
168
168
  OutputStatsdExt,
@@ -170,19 +170,18 @@ Output = TypeAliasType(
170
170
  OutputStatsd,
171
171
  OutputGooglePubsub,
172
172
  OutputCriblTCP,
173
- OutputSns,
174
173
  OutputSplunk,
174
+ OutputSns,
175
175
  OutputCloudwatch,
176
- OutputSyslog,
177
176
  OutputAzureEventhub,
178
177
  OutputWavefront,
179
178
  OutputSignalfx,
180
179
  OutputHoneycomb,
181
180
  OutputSumoLogic,
182
- OutputTcpjson,
181
+ OutputCrowdstrikeNextGenSiem,
183
182
  OutputHumioHec,
183
+ OutputTcpjson,
184
184
  OutputElasticCloud,
185
- OutputCrowdstrikeNextGenSiem,
186
185
  OutputKinesis,
187
186
  OutputConfluentCloud,
188
187
  OutputKafka,
@@ -190,21 +189,22 @@ Output = TypeAliasType(
190
189
  OutputNewrelicEvents,
191
190
  OutputAzureLogs,
192
191
  OutputSplunkLb,
192
+ OutputSyslog,
193
193
  OutputSqs,
194
194
  OutputNewrelic,
195
195
  OutputCriblHTTP,
196
196
  OutputXsiam,
197
197
  OutputFilesystem,
198
198
  OutputDataset,
199
+ OutputLoki,
199
200
  OutputSplunkHec,
200
201
  OutputDynatraceHTTP,
201
202
  OutputServiceNow,
202
- OutputLoki,
203
203
  OutputDynatraceOtlp,
204
- OutputGoogleChronicle,
205
204
  OutputElastic,
206
- OutputDatadog,
205
+ OutputGoogleChronicle,
207
206
  OutputCriblLake,
207
+ OutputDatadog,
208
208
  OutputPrometheus,
209
209
  OutputMsk,
210
210
  OutputSentinelOneAiSiem,
@@ -218,8 +218,8 @@ Output = TypeAliasType(
218
218
  OutputSecurityLake,
219
219
  OutputDlS3,
220
220
  OutputS3,
221
- OutputAzureDataExplorer,
222
221
  OutputWebhook,
222
+ OutputAzureDataExplorer,
223
223
  OutputGoogleCloudLogging,
224
224
  OutputGrafanaCloud,
225
225
  ],
@@ -336,6 +336,8 @@ class OutputAzureDataExplorerTypedDict(TypedDict):
336
336
  pq_mode: NotRequired[OutputAzureDataExplorerMode]
337
337
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
338
338
  pq_controls: NotRequired[OutputAzureDataExplorerPqControlsTypedDict]
339
+ empty_dir_cleanup_sec: NotRequired[float]
340
+ r"""How frequently, in seconds, to clean up empty directories"""
339
341
 
340
342
 
341
343
  class OutputAzureDataExplorer(BaseModel):
@@ -611,3 +613,8 @@ class OutputAzureDataExplorer(BaseModel):
611
613
  pq_controls: Annotated[
612
614
  Optional[OutputAzureDataExplorerPqControls], pydantic.Field(alias="pqControls")
613
615
  ] = None
616
+
617
+ empty_dir_cleanup_sec: Annotated[
618
+ Optional[float], pydantic.Field(alias="emptyDirCleanupSec")
619
+ ] = 300
620
+ r"""How frequently, in seconds, to clean up empty directories"""
@@ -294,6 +294,8 @@ class OutputConfluentCloudAuthenticationTypedDict(TypedDict):
294
294
 
295
295
  disabled: NotRequired[bool]
296
296
  mechanism: NotRequired[OutputConfluentCloudSASLMechanism]
297
+ oauth_enabled: NotRequired[bool]
298
+ r"""Enable OAuth authentication"""
297
299
 
298
300
 
299
301
  class OutputConfluentCloudAuthentication(BaseModel):
@@ -305,6 +307,11 @@ class OutputConfluentCloudAuthentication(BaseModel):
305
307
  OutputConfluentCloudSASLMechanism.PLAIN
306
308
  )
307
309
 
310
+ oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
311
+ False
312
+ )
313
+ r"""Enable OAuth authentication"""
314
+
308
315
 
309
316
  class OutputConfluentCloudBackpressureBehavior(str, Enum):
310
317
  r"""How to handle events when all receivers are exerting backpressure"""
@@ -284,8 +284,6 @@ class OutputGrafanaCloudGrafanaCloud2TypedDict(TypedDict):
284
284
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
285
285
  safe_headers: NotRequired[List[str]]
286
286
  r"""List of headers that are safe to log in plain text"""
287
- send_structured_metadata: NotRequired[bool]
288
- r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
289
287
  response_retry_settings: NotRequired[
290
288
  List[OutputGrafanaCloudResponseRetrySetting2TypedDict]
291
289
  ]
@@ -418,11 +416,6 @@ class OutputGrafanaCloudGrafanaCloud2(BaseModel):
418
416
  ] = None
419
417
  r"""List of headers that are safe to log in plain text"""
420
418
 
421
- send_structured_metadata: Annotated[
422
- Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
423
- ] = False
424
- r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
425
-
426
419
  response_retry_settings: Annotated[
427
420
  Optional[List[OutputGrafanaCloudResponseRetrySetting2]],
428
421
  pydantic.Field(alias="responseRetrySettings"),
@@ -760,8 +753,6 @@ class OutputGrafanaCloudGrafanaCloud1TypedDict(TypedDict):
760
753
  r"""Data to log when a request fails. All headers are redacted by default, unless listed as safe headers below."""
761
754
  safe_headers: NotRequired[List[str]]
762
755
  r"""List of headers that are safe to log in plain text"""
763
- send_structured_metadata: NotRequired[bool]
764
- r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
765
756
  response_retry_settings: NotRequired[
766
757
  List[OutputGrafanaCloudResponseRetrySetting1TypedDict]
767
758
  ]
@@ -896,11 +887,6 @@ class OutputGrafanaCloudGrafanaCloud1(BaseModel):
896
887
  ] = None
897
888
  r"""List of headers that are safe to log in plain text"""
898
889
 
899
- send_structured_metadata: Annotated[
900
- Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
901
- ] = False
902
- r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
903
-
904
890
  response_retry_settings: Annotated[
905
891
  Optional[List[OutputGrafanaCloudResponseRetrySetting1]],
906
892
  pydantic.Field(alias="responseRetrySettings"),
@@ -215,6 +215,8 @@ class OutputKafkaAuthenticationTypedDict(TypedDict):
215
215
 
216
216
  disabled: NotRequired[bool]
217
217
  mechanism: NotRequired[OutputKafkaSASLMechanism]
218
+ oauth_enabled: NotRequired[bool]
219
+ r"""Enable OAuth authentication"""
218
220
 
219
221
 
220
222
  class OutputKafkaAuthentication(BaseModel):
@@ -224,6 +226,11 @@ class OutputKafkaAuthentication(BaseModel):
224
226
 
225
227
  mechanism: Optional[OutputKafkaSASLMechanism] = OutputKafkaSASLMechanism.PLAIN
226
228
 
229
+ oauth_enabled: Annotated[Optional[bool], pydantic.Field(alias="oauthEnabled")] = (
230
+ False
231
+ )
232
+ r"""Enable OAuth authentication"""
233
+
227
234
 
228
235
  class OutputKafkaMinimumTLSVersion(str, Enum):
229
236
  TL_SV1 = "TLSv1"
@@ -200,8 +200,6 @@ class OutputLokiTypedDict(TypedDict):
200
200
  r"""Honor any Retry-After header that specifies a delay (in seconds) no longer than 180 seconds after the retry request. @{product} limits the delay to 180 seconds, even if the Retry-After header specifies a longer delay. When enabled, takes precedence over user-configured retry options. When disabled, all Retry-After headers are ignored."""
201
201
  enable_dynamic_headers: NotRequired[bool]
202
202
  r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
203
- send_structured_metadata: NotRequired[bool]
204
- r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
205
203
  on_backpressure: NotRequired[OutputLokiBackpressureBehavior]
206
204
  r"""How to handle events when all receivers are exerting backpressure"""
207
205
  total_memory_limit_kb: NotRequired[float]
@@ -344,11 +342,6 @@ class OutputLoki(BaseModel):
344
342
  ] = False
345
343
  r"""Add per-event HTTP headers from the __headers field to outgoing requests. Events with different headers are batched and sent separately."""
346
344
 
347
- send_structured_metadata: Annotated[
348
- Optional[bool], pydantic.Field(alias="sendStructuredMetadata")
349
- ] = False
350
- r"""Add structured metadata fields from __structuredMetadata to each log. Key-value pairs must be strings."""
351
-
352
345
  on_backpressure: Annotated[
353
346
  Optional[OutputLokiBackpressureBehavior], pydantic.Field(alias="onBackpressure")
354
347
  ] = OutputLokiBackpressureBehavior.BLOCK
@@ -186,6 +186,43 @@ class OutputSyslogPqControls(BaseModel):
186
186
  pass
187
187
 
188
188
 
189
+ class OutputSyslogTLS(str, Enum):
190
+ r"""Whether to inherit TLS configs from group setting or disable TLS"""
191
+
192
+ INHERIT = "inherit"
193
+ OFF = "off"
194
+
195
+
196
+ class OutputSyslogHostTypedDict(TypedDict):
197
+ host: str
198
+ r"""The hostname of the receiver"""
199
+ port: NotRequired[float]
200
+ r"""The port to connect to on the provided host"""
201
+ tls: NotRequired[OutputSyslogTLS]
202
+ r"""Whether to inherit TLS configs from group setting or disable TLS"""
203
+ servername: NotRequired[str]
204
+ r"""Servername to use if establishing a TLS connection. If not specified, defaults to connection host (if not an IP); otherwise, uses the global TLS settings."""
205
+ weight: NotRequired[float]
206
+ r"""Assign a weight (>0) to each endpoint to indicate its traffic-handling capability"""
207
+
208
+
209
+ class OutputSyslogHost(BaseModel):
210
+ host: str
211
+ r"""The hostname of the receiver"""
212
+
213
+ port: Optional[float] = 9997
214
+ r"""The port to connect to on the provided host"""
215
+
216
+ tls: Optional[OutputSyslogTLS] = OutputSyslogTLS.INHERIT
217
+ r"""Whether to inherit TLS configs from group setting or disable TLS"""
218
+
219
+ servername: Optional[str] = None
220
+ r"""Servername to use if establishing a TLS connection. If not specified, defaults to connection host (if not an IP); otherwise, uses the global TLS settings."""
221
+
222
+ weight: Optional[float] = 1
223
+ r"""Assign a weight (>0) to each endpoint to indicate its traffic-handling capability"""
224
+
225
+
189
226
  class OutputSyslogTypedDict(TypedDict):
190
227
  type: OutputSyslogType
191
228
  id: NotRequired[str]
@@ -247,6 +284,16 @@ class OutputSyslogTypedDict(TypedDict):
247
284
  pq_mode: NotRequired[OutputSyslogMode]
248
285
  r"""In Error mode, PQ writes events to the filesystem if the Destination is unavailable. In Backpressure mode, PQ writes events to the filesystem when it detects backpressure from the Destination. In Always On mode, PQ always writes events to the filesystem."""
249
286
  pq_controls: NotRequired[OutputSyslogPqControlsTypedDict]
287
+ dns_resolve_period_sec: NotRequired[float]
288
+ r"""The interval in which to re-resolve any hostnames and pick up destinations from A records"""
289
+ load_balance_stats_period_sec: NotRequired[float]
290
+ r"""How far back in time to keep traffic stats for load balancing purposes"""
291
+ max_concurrent_senders: NotRequired[float]
292
+ r"""Maximum number of concurrent connections (per Worker Process). A random set of IPs will be picked on every DNS resolution period. Use 0 for unlimited."""
293
+ exclude_self: NotRequired[bool]
294
+ r"""Exclude all IPs of the current host from the list of any resolved hostnames"""
295
+ hosts: NotRequired[List[OutputSyslogHostTypedDict]]
296
+ r"""Set of hosts to load-balance data to."""
250
297
 
251
298
 
252
299
  class OutputSyslog(BaseModel):
@@ -379,3 +426,24 @@ class OutputSyslog(BaseModel):
379
426
  pq_controls: Annotated[
380
427
  Optional[OutputSyslogPqControls], pydantic.Field(alias="pqControls")
381
428
  ] = None
429
+
430
+ dns_resolve_period_sec: Annotated[
431
+ Optional[float], pydantic.Field(alias="dnsResolvePeriodSec")
432
+ ] = 600
433
+ r"""The interval in which to re-resolve any hostnames and pick up destinations from A records"""
434
+
435
+ load_balance_stats_period_sec: Annotated[
436
+ Optional[float], pydantic.Field(alias="loadBalanceStatsPeriodSec")
437
+ ] = 300
438
+ r"""How far back in time to keep traffic stats for load balancing purposes"""
439
+
440
+ max_concurrent_senders: Annotated[
441
+ Optional[float], pydantic.Field(alias="maxConcurrentSenders")
442
+ ] = 0
443
+ r"""Maximum number of concurrent connections (per Worker Process). A random set of IPs will be picked on every DNS resolution period. Use 0 for unlimited."""
444
+
445
+ exclude_self: Annotated[Optional[bool], pydantic.Field(alias="excludeSelf")] = False
446
+ r"""Exclude all IPs of the current host from the list of any resolved hostnames"""
447
+
448
+ hosts: Optional[List[OutputSyslogHost]] = None
449
+ r"""Set of hosts to load-balance data to."""
@@ -14,13 +14,15 @@ from typing import Any, Mapping, Optional
14
14
  class Nodes(BaseSDK):
15
15
  summaries: Summaries
16
16
 
17
- def __init__(self, sdk_config: SDKConfiguration) -> None:
18
- BaseSDK.__init__(self, sdk_config)
17
+ def __init__(
18
+ self, sdk_config: SDKConfiguration, parent_ref: Optional[object] = None
19
+ ) -> None:
20
+ BaseSDK.__init__(self, sdk_config, parent_ref=parent_ref)
19
21
  self.sdk_configuration = sdk_config
20
22
  self._init_sdks()
21
23
 
22
24
  def _init_sdks(self):
23
- self.summaries = Summaries(self.sdk_configuration)
25
+ self.summaries = Summaries(self.sdk_configuration, parent_ref=self.parent_ref)
24
26
 
25
27
  def list(
26
28
  self,