catocli 3.0.14__py3-none-any.whl → 3.0.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of catocli might be problematic. Click here for more details.

Files changed (192) hide show
  1. catocli/Utils/clidriver.py +16 -8
  2. catocli/Utils/formatter_account_metrics.py +544 -0
  3. catocli/Utils/formatter_app_stats.py +184 -0
  4. catocli/Utils/formatter_app_stats_timeseries.py +377 -0
  5. catocli/Utils/formatter_events_timeseries.py +459 -0
  6. catocli/Utils/formatter_socket_port_metrics.py +189 -0
  7. catocli/Utils/formatter_socket_port_metrics_timeseries.py +339 -0
  8. catocli/Utils/formatter_utils.py +251 -0
  9. catocli/Utils/help_formatter.py +1 -1
  10. catocli/__init__.py +1 -1
  11. catocli/clisettings.json +37 -5
  12. catocli/parsers/custom/query_eventsFeed/README.md +94 -0
  13. catocli/parsers/custom/scim/README.md +346 -0
  14. catocli/parsers/custom/scim/scim_client.py +132 -26
  15. catocli/parsers/custom/scim/scim_commands.py +14 -56
  16. catocli/parsers/customParserApiClient.py +213 -65
  17. catocli/parsers/mutation_policy/__init__.py +405 -405
  18. catocli/parsers/mutation_site/__init__.py +15 -15
  19. catocli/parsers/mutation_sites/__init__.py +15 -15
  20. catocli/parsers/query_accountMetrics/README.md +99 -9
  21. catocli/parsers/query_accountMetrics/__init__.py +6 -0
  22. catocli/parsers/query_appStats/README.md +11 -11
  23. catocli/parsers/query_appStats/__init__.py +4 -2
  24. catocli/parsers/query_appStatsTimeSeries/README.md +10 -10
  25. catocli/parsers/query_appStatsTimeSeries/__init__.py +4 -2
  26. catocli/parsers/query_auditFeed/README.md +9 -9
  27. catocli/parsers/query_events/README.md +9 -9
  28. catocli/parsers/query_eventsTimeSeries/README.md +289 -9
  29. catocli/parsers/query_eventsTimeSeries/__init__.py +6 -0
  30. catocli/parsers/query_policy/__init__.py +42 -42
  31. catocli/parsers/query_socketPortMetrics/README.md +53 -9
  32. catocli/parsers/query_socketPortMetrics/__init__.py +6 -0
  33. catocli/parsers/query_socketPortMetricsTimeSeries/README.md +92 -9
  34. catocli/parsers/query_socketPortMetricsTimeSeries/__init__.py +4 -2
  35. {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/METADATA +1 -1
  36. {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/RECORD +187 -183
  37. {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/top_level.txt +0 -1
  38. models/mutation.policy.antiMalwareFileHash.addRule.json +20 -0
  39. models/mutation.policy.antiMalwareFileHash.addSection.json +103 -0
  40. models/mutation.policy.antiMalwareFileHash.createPolicyRevision.json +123 -0
  41. models/mutation.policy.antiMalwareFileHash.discardPolicyRevision.json +123 -0
  42. models/mutation.policy.antiMalwareFileHash.moveRule.json +20 -0
  43. models/mutation.policy.antiMalwareFileHash.moveSection.json +103 -0
  44. models/mutation.policy.antiMalwareFileHash.publishPolicyRevision.json +123 -0
  45. models/mutation.policy.antiMalwareFileHash.removeRule.json +20 -0
  46. models/mutation.policy.antiMalwareFileHash.removeSection.json +103 -0
  47. models/mutation.policy.antiMalwareFileHash.updatePolicy.json +123 -0
  48. models/mutation.policy.antiMalwareFileHash.updateRule.json +20 -0
  49. models/mutation.policy.antiMalwareFileHash.updateSection.json +103 -0
  50. models/mutation.policy.appTenantRestriction.addRule.json +20 -0
  51. models/mutation.policy.appTenantRestriction.addSection.json +103 -0
  52. models/mutation.policy.appTenantRestriction.createPolicyRevision.json +123 -0
  53. models/mutation.policy.appTenantRestriction.discardPolicyRevision.json +123 -0
  54. models/mutation.policy.appTenantRestriction.moveRule.json +20 -0
  55. models/mutation.policy.appTenantRestriction.moveSection.json +103 -0
  56. models/mutation.policy.appTenantRestriction.publishPolicyRevision.json +123 -0
  57. models/mutation.policy.appTenantRestriction.removeRule.json +20 -0
  58. models/mutation.policy.appTenantRestriction.removeSection.json +103 -0
  59. models/mutation.policy.appTenantRestriction.updatePolicy.json +123 -0
  60. models/mutation.policy.appTenantRestriction.updateRule.json +20 -0
  61. models/mutation.policy.appTenantRestriction.updateSection.json +103 -0
  62. models/mutation.policy.applicationControl.addRule.json +20 -0
  63. models/mutation.policy.applicationControl.addSection.json +103 -0
  64. models/mutation.policy.applicationControl.createPolicyRevision.json +123 -0
  65. models/mutation.policy.applicationControl.discardPolicyRevision.json +123 -0
  66. models/mutation.policy.applicationControl.moveRule.json +20 -0
  67. models/mutation.policy.applicationControl.moveSection.json +103 -0
  68. models/mutation.policy.applicationControl.publishPolicyRevision.json +123 -0
  69. models/mutation.policy.applicationControl.removeRule.json +20 -0
  70. models/mutation.policy.applicationControl.removeSection.json +103 -0
  71. models/mutation.policy.applicationControl.updatePolicy.json +123 -0
  72. models/mutation.policy.applicationControl.updateRule.json +20 -0
  73. models/mutation.policy.applicationControl.updateSection.json +103 -0
  74. models/mutation.policy.dynamicIpAllocation.addRule.json +20 -0
  75. models/mutation.policy.dynamicIpAllocation.addSection.json +103 -0
  76. models/mutation.policy.dynamicIpAllocation.createPolicyRevision.json +123 -0
  77. models/mutation.policy.dynamicIpAllocation.discardPolicyRevision.json +123 -0
  78. models/mutation.policy.dynamicIpAllocation.moveRule.json +20 -0
  79. models/mutation.policy.dynamicIpAllocation.moveSection.json +103 -0
  80. models/mutation.policy.dynamicIpAllocation.publishPolicyRevision.json +123 -0
  81. models/mutation.policy.dynamicIpAllocation.removeRule.json +20 -0
  82. models/mutation.policy.dynamicIpAllocation.removeSection.json +103 -0
  83. models/mutation.policy.dynamicIpAllocation.updatePolicy.json +123 -0
  84. models/mutation.policy.dynamicIpAllocation.updateRule.json +20 -0
  85. models/mutation.policy.dynamicIpAllocation.updateSection.json +103 -0
  86. models/mutation.policy.internetFirewall.addRule.json +20 -0
  87. models/mutation.policy.internetFirewall.addSection.json +103 -0
  88. models/mutation.policy.internetFirewall.createPolicyRevision.json +123 -0
  89. models/mutation.policy.internetFirewall.discardPolicyRevision.json +123 -0
  90. models/mutation.policy.internetFirewall.moveRule.json +20 -0
  91. models/mutation.policy.internetFirewall.moveSection.json +103 -0
  92. models/mutation.policy.internetFirewall.publishPolicyRevision.json +123 -0
  93. models/mutation.policy.internetFirewall.removeRule.json +20 -0
  94. models/mutation.policy.internetFirewall.removeSection.json +103 -0
  95. models/mutation.policy.internetFirewall.updatePolicy.json +123 -0
  96. models/mutation.policy.internetFirewall.updateRule.json +20 -0
  97. models/mutation.policy.internetFirewall.updateSection.json +103 -0
  98. models/mutation.policy.remotePortFwd.addRule.json +20 -0
  99. models/mutation.policy.remotePortFwd.addSection.json +103 -0
  100. models/mutation.policy.remotePortFwd.createPolicyRevision.json +123 -0
  101. models/mutation.policy.remotePortFwd.discardPolicyRevision.json +123 -0
  102. models/mutation.policy.remotePortFwd.moveRule.json +20 -0
  103. models/mutation.policy.remotePortFwd.moveSection.json +103 -0
  104. models/mutation.policy.remotePortFwd.publishPolicyRevision.json +123 -0
  105. models/mutation.policy.remotePortFwd.removeRule.json +20 -0
  106. models/mutation.policy.remotePortFwd.removeSection.json +103 -0
  107. models/mutation.policy.remotePortFwd.updatePolicy.json +123 -0
  108. models/mutation.policy.remotePortFwd.updateRule.json +20 -0
  109. models/mutation.policy.remotePortFwd.updateSection.json +103 -0
  110. models/mutation.policy.socketLan.addRule.json +40 -0
  111. models/mutation.policy.socketLan.addSection.json +103 -0
  112. models/mutation.policy.socketLan.createPolicyRevision.json +143 -0
  113. models/mutation.policy.socketLan.discardPolicyRevision.json +143 -0
  114. models/mutation.policy.socketLan.moveRule.json +40 -0
  115. models/mutation.policy.socketLan.moveSection.json +103 -0
  116. models/mutation.policy.socketLan.publishPolicyRevision.json +143 -0
  117. models/mutation.policy.socketLan.removeRule.json +40 -0
  118. models/mutation.policy.socketLan.removeSection.json +103 -0
  119. models/mutation.policy.socketLan.updatePolicy.json +143 -0
  120. models/mutation.policy.socketLan.updateRule.json +40 -0
  121. models/mutation.policy.socketLan.updateSection.json +103 -0
  122. models/mutation.policy.terminalServer.addRule.json +20 -0
  123. models/mutation.policy.terminalServer.addSection.json +103 -0
  124. models/mutation.policy.terminalServer.createPolicyRevision.json +123 -0
  125. models/mutation.policy.terminalServer.discardPolicyRevision.json +123 -0
  126. models/mutation.policy.terminalServer.moveRule.json +20 -0
  127. models/mutation.policy.terminalServer.moveSection.json +103 -0
  128. models/mutation.policy.terminalServer.publishPolicyRevision.json +123 -0
  129. models/mutation.policy.terminalServer.removeRule.json +20 -0
  130. models/mutation.policy.terminalServer.removeSection.json +103 -0
  131. models/mutation.policy.terminalServer.updatePolicy.json +123 -0
  132. models/mutation.policy.terminalServer.updateRule.json +20 -0
  133. models/mutation.policy.terminalServer.updateSection.json +103 -0
  134. models/mutation.policy.tlsInspect.addRule.json +20 -0
  135. models/mutation.policy.tlsInspect.addSection.json +103 -0
  136. models/mutation.policy.tlsInspect.createPolicyRevision.json +123 -0
  137. models/mutation.policy.tlsInspect.discardPolicyRevision.json +123 -0
  138. models/mutation.policy.tlsInspect.moveRule.json +20 -0
  139. models/mutation.policy.tlsInspect.moveSection.json +103 -0
  140. models/mutation.policy.tlsInspect.publishPolicyRevision.json +123 -0
  141. models/mutation.policy.tlsInspect.removeRule.json +20 -0
  142. models/mutation.policy.tlsInspect.removeSection.json +103 -0
  143. models/mutation.policy.tlsInspect.updatePolicy.json +123 -0
  144. models/mutation.policy.tlsInspect.updateRule.json +20 -0
  145. models/mutation.policy.tlsInspect.updateSection.json +103 -0
  146. models/mutation.policy.wanFirewall.addRule.json +20 -0
  147. models/mutation.policy.wanFirewall.addSection.json +103 -0
  148. models/mutation.policy.wanFirewall.createPolicyRevision.json +123 -0
  149. models/mutation.policy.wanFirewall.discardPolicyRevision.json +123 -0
  150. models/mutation.policy.wanFirewall.moveRule.json +20 -0
  151. models/mutation.policy.wanFirewall.moveSection.json +103 -0
  152. models/mutation.policy.wanFirewall.publishPolicyRevision.json +123 -0
  153. models/mutation.policy.wanFirewall.removeRule.json +20 -0
  154. models/mutation.policy.wanFirewall.removeSection.json +103 -0
  155. models/mutation.policy.wanFirewall.updatePolicy.json +123 -0
  156. models/mutation.policy.wanFirewall.updateRule.json +20 -0
  157. models/mutation.policy.wanFirewall.updateSection.json +103 -0
  158. models/mutation.policy.wanNetwork.addRule.json +20 -0
  159. models/mutation.policy.wanNetwork.addSection.json +103 -0
  160. models/mutation.policy.wanNetwork.createPolicyRevision.json +123 -0
  161. models/mutation.policy.wanNetwork.discardPolicyRevision.json +123 -0
  162. models/mutation.policy.wanNetwork.moveRule.json +20 -0
  163. models/mutation.policy.wanNetwork.moveSection.json +103 -0
  164. models/mutation.policy.wanNetwork.publishPolicyRevision.json +123 -0
  165. models/mutation.policy.wanNetwork.removeRule.json +20 -0
  166. models/mutation.policy.wanNetwork.removeSection.json +103 -0
  167. models/mutation.policy.wanNetwork.updatePolicy.json +123 -0
  168. models/mutation.policy.wanNetwork.updateRule.json +20 -0
  169. models/mutation.policy.wanNetwork.updateSection.json +103 -0
  170. models/mutation.xdr.analystFeedback.json +822 -87
  171. models/query.policy.antiMalwareFileHash.policy.json +123 -0
  172. models/query.policy.appTenantRestriction.policy.json +123 -0
  173. models/query.policy.applicationControl.policy.json +123 -0
  174. models/query.policy.dynamicIpAllocation.policy.json +123 -0
  175. models/query.policy.internetFirewall.policy.json +123 -0
  176. models/query.policy.remotePortFwd.policy.json +123 -0
  177. models/query.policy.socketLan.policy.json +143 -0
  178. models/query.policy.terminalServer.policy.json +123 -0
  179. models/query.policy.tlsInspect.policy.json +123 -0
  180. models/query.policy.wanFirewall.policy.json +123 -0
  181. models/query.policy.wanNetwork.policy.json +123 -0
  182. models/query.xdr.stories.json +822 -87
  183. models/query.xdr.story.json +822 -87
  184. schema/catolib.py +34 -17
  185. catocli/Utils/csv_formatter.py +0 -663
  186. scripts/catolib.py +0 -62
  187. scripts/export_if_rules_to_json.py +0 -188
  188. scripts/export_wf_rules_to_json.py +0 -111
  189. scripts/import_wf_rules_to_tfstate.py +0 -331
  190. {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/WHEEL +0 -0
  191. {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/entry_points.txt +0 -0
  192. {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,459 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Events TimeSeries Formatter for Cato CLI
4
+
5
+ This module provides functions to format eventsTimeSeries API responses
6
+ into JSON and CSV formats, with special handling for granularity multiplication
7
+ when sum aggregation is used on appropriate fields.
8
+
9
+ Key functionality:
10
+ - Handles granularity multiplication for sum aggregations when appropriate
11
+ - Excludes rate, percentage, and normalized fields from multiplication
12
+ - Provides both JSON and CSV output formats
13
+ """
14
+
15
+ import csv
16
+ import io
17
+ import json
18
+ import re
19
+ from datetime import datetime
20
+ from typing import Dict, List, Any, Tuple
21
+
22
+ # Import shared utility functions
23
+ try:
24
+ from .formatter_utils import format_timestamp, parse_label_for_dimensions_and_measure
25
+ except ImportError:
26
+ try:
27
+ from catocli.Utils.formatter_utils import format_timestamp, parse_label_for_dimensions_and_measure
28
+ except ImportError:
29
+ from formatter_utils import format_timestamp, parse_label_for_dimensions_and_measure
30
+
31
+
32
+ def should_multiply_by_granularity(field_name: str, agg_type: str) -> bool:
33
+ """
34
+ Determine if a field with sum aggregation should be multiplied by granularity
35
+
36
+ Args:
37
+ field_name: The name of the field being aggregated
38
+ agg_type: The aggregation type (e.g., 'sum', 'avg', 'max')
39
+
40
+ Returns:
41
+ True if the field should be multiplied by granularity, False otherwise
42
+ """
43
+ # Only apply to sum aggregations
44
+ if agg_type.lower() != 'sum':
45
+ return False
46
+
47
+ # Fields that should NOT be multiplied by granularity even with sum aggregation
48
+ exclude_patterns = [
49
+ # Rate fields (already per-time-unit)
50
+ '_per_second', '_per_minute', '_per_hour', 'rate', 'bps', 'pps',
51
+ 'bytes_per_second', 'packets_per_second',
52
+
53
+ # Percentage and ratio fields
54
+ 'percent', 'percentage', 'ratio', '_pct', 'utilization',
55
+ 'cpu_utilization', 'memory_usage_percent',
56
+
57
+ # Score and normalized values
58
+ 'score', 'threat_score', 'confidence_level', 'risk_level',
59
+
60
+ # Statistical measures (already calculated)
61
+ 'avg_', 'mean_', 'median_', 'p95_', 'p99_', 'percentile',
62
+ 'avg_response_time', 'p95_latency',
63
+
64
+ # Unique/distinct counts
65
+ 'distinct_', 'unique_', 'cardinality',
66
+ 'distinct_users', 'unique_ips',
67
+
68
+ # State/status values
69
+ 'status', 'state', 'health_score', 'connection_status'
70
+ ]
71
+
72
+ field_lower = field_name.lower()
73
+
74
+ # Check if field matches any exclusion pattern
75
+ for pattern in exclude_patterns:
76
+ if pattern in field_lower:
77
+ return False
78
+
79
+ # Default: multiply sum aggregations by granularity
80
+ return True
81
+
82
+
83
+ def format_events_timeseries(response_data: Dict[str, Any], output_format: str = 'json') -> str:
84
+ """
85
+ Convert eventsTimeSeries JSON response to specified format (JSON or CSV)
86
+
87
+ Args:
88
+ response_data: JSON response from eventsTimeSeries query
89
+ output_format: 'json' or 'csv'
90
+
91
+ Returns:
92
+ Formatted string in the requested format, or None if no processable data
93
+ """
94
+ if output_format.lower() == 'csv':
95
+ return _format_events_timeseries_to_csv(response_data)
96
+ else:
97
+ # Default to JSON format with organized structure
98
+ return _format_events_timeseries_to_json(response_data)
99
+
100
+
101
+ def _format_events_timeseries_to_json(response_data: Dict[str, Any]) -> str:
102
+ """
103
+ Convert eventsTimeSeries JSON response to organized JSON format with granularity multiplication
104
+
105
+ Args:
106
+ response_data: JSON response from eventsTimeSeries query
107
+
108
+ Returns:
109
+ JSON formatted string, or None if no processable data
110
+ """
111
+ if not response_data or not isinstance(response_data, dict):
112
+ return None
113
+
114
+ # Check for API errors
115
+ if 'errors' in response_data:
116
+ return None
117
+
118
+ if 'data' not in response_data or 'eventsTimeSeries' not in response_data['data']:
119
+ return None
120
+
121
+ events_ts = response_data['data']['eventsTimeSeries']
122
+ if events_ts is None:
123
+ return None
124
+
125
+ timeseries = events_ts.get('timeseries', [])
126
+ granularity = events_ts.get('granularity', 1)
127
+
128
+ if not timeseries:
129
+ return None
130
+
131
+ # Parse dimension information and measures from labels
132
+ parsed_series = []
133
+ all_timestamps = set()
134
+ all_dimensions = set()
135
+ all_measures = set()
136
+
137
+ for series in timeseries:
138
+ label = series.get('label', '')
139
+ data_points = series.get('data', [])
140
+ units = series.get('units', '')
141
+
142
+ # Get measure and aggregation type from key structure
143
+ key_info = series.get('key', {})
144
+ measure = key_info.get('measureFieldName', '')
145
+ dimensions = {}
146
+
147
+ # Extract aggregation type from label (e.g., "sum(event_count)")
148
+ agg_type = ''
149
+ if '(' in label and ')' in label:
150
+ agg_match = re.match(r'(\w+)\(', label)
151
+ if agg_match:
152
+ agg_type = agg_match.group(1)
153
+
154
+ # Extract dimensions from key.dimensions array
155
+ key_dimensions = key_info.get('dimensions', [])
156
+ for dim_info in key_dimensions:
157
+ if isinstance(dim_info, dict) and 'fieldName' in dim_info and 'value' in dim_info:
158
+ dimensions[dim_info['fieldName']] = dim_info['value']
159
+
160
+ # Fallback to label parsing if key method fails
161
+ if not measure and not dimensions:
162
+ measure, dimensions = parse_label_for_dimensions_and_measure(label)
163
+
164
+ # Determine if we should multiply by granularity
165
+ should_multiply = should_multiply_by_granularity(measure, agg_type)
166
+
167
+ # Create series entry with safe data parsing and granularity adjustment
168
+ data_dict = {}
169
+ for point in data_points:
170
+ if isinstance(point, (list, tuple)) and len(point) >= 2:
171
+ timestamp = int(point[0])
172
+ value = point[1]
173
+
174
+ # Apply granularity multiplication if appropriate
175
+ if should_multiply and value is not None and granularity > 1:
176
+ try:
177
+ computed_value = round(float(value) * granularity, 3)
178
+ data_dict[timestamp] = {
179
+ 'original_value': value,
180
+ 'computed_value': computed_value,
181
+ 'granularity': granularity,
182
+ 'granularity_applied': True
183
+ }
184
+ except (ValueError, TypeError):
185
+ data_dict[timestamp] = {
186
+ 'original_value': value,
187
+ 'computed_value': value,
188
+ 'granularity': granularity,
189
+ 'granularity_applied': False,
190
+ 'note': 'Could not convert to numeric for granularity adjustment'
191
+ }
192
+ else:
193
+ data_dict[timestamp] = {
194
+ 'original_value': value,
195
+ 'computed_value': value,
196
+ 'granularity': granularity,
197
+ 'granularity_applied': False,
198
+ 'note': 'Granularity not applied (field type or non-sum aggregation)'
199
+ }
200
+
201
+ all_timestamps.add(timestamp)
202
+
203
+ series_entry = {
204
+ 'label': label,
205
+ 'measure': measure,
206
+ 'aggregation_type': agg_type,
207
+ 'dimensions': dimensions,
208
+ 'granularity_multiplied': should_multiply,
209
+ 'data_points': len(data_dict),
210
+ 'time_range': {
211
+ 'start': format_timestamp(min(data_dict.keys())) if data_dict else None,
212
+ 'end': format_timestamp(max(data_dict.keys())) if data_dict else None
213
+ },
214
+ 'data': data_dict
215
+ }
216
+ parsed_series.append(series_entry)
217
+
218
+ # Collect metadata
219
+ all_measures.add(measure)
220
+ all_dimensions.update(dimensions.keys())
221
+
222
+ # Organize the response
223
+ organized_data = {
224
+ "eventsTimeSeries": {
225
+ "summary": {
226
+ "total_series": len(parsed_series),
227
+ "total_timestamps": len(all_timestamps),
228
+ "granularity": granularity,
229
+ "time_range": {
230
+ "start": format_timestamp(min(all_timestamps)) if all_timestamps else None,
231
+ "end": format_timestamp(max(all_timestamps)) if all_timestamps else None
232
+ },
233
+ "measures": sorted(list(all_measures)),
234
+ "dimensions": sorted(list(all_dimensions)),
235
+ "granularity_note": "Sum aggregations on count fields are multiplied by granularity when appropriate"
236
+ },
237
+ "series": []
238
+ }
239
+ }
240
+
241
+ # Group series by dimension combinations
242
+ dimension_groups = {}
243
+ for series in parsed_series:
244
+ dim_key = tuple(sorted(series['dimensions'].items()))
245
+ if dim_key not in dimension_groups:
246
+ dimension_groups[dim_key] = {
247
+ 'dimensions': series['dimensions'],
248
+ 'measures': {},
249
+ 'time_range': series['time_range']
250
+ }
251
+ dimension_groups[dim_key]['measures'][series['measure']] = {
252
+ 'label': series['label'],
253
+ 'aggregation_type': series['aggregation_type'],
254
+ 'granularity_multiplied': series['granularity_multiplied'],
255
+ 'data_points': series['data_points'],
256
+ 'data': series['data']
257
+ }
258
+
259
+ # Convert to organized format
260
+ for dim_combo, group_data in dimension_groups.items():
261
+ series_data = {
262
+ 'dimensions': group_data['dimensions'],
263
+ 'time_range': group_data['time_range'],
264
+ 'measures': {}
265
+ }
266
+
267
+ # Format each measure's data
268
+ for measure, measure_data in group_data['measures'].items():
269
+ formatted_data = {}
270
+ for timestamp, value_info in measure_data['data'].items():
271
+ timestamp_str = format_timestamp(timestamp)
272
+ formatted_data[timestamp_str] = value_info
273
+
274
+ series_data['measures'][measure] = {
275
+ 'label': measure_data['label'],
276
+ 'aggregation_type': measure_data['aggregation_type'],
277
+ 'granularity_multiplied': measure_data['granularity_multiplied'],
278
+ 'data_points': measure_data['data_points'],
279
+ 'data': formatted_data
280
+ }
281
+
282
+ organized_data["eventsTimeSeries"]["series"].append(series_data)
283
+
284
+ return json.dumps(organized_data, indent=2)
285
+
286
+
287
+ def _format_events_timeseries_to_csv(response_data: Dict[str, Any]) -> str:
288
+ """
289
+ Convert eventsTimeSeries JSON response to CSV format with granularity multiplication
290
+
291
+ Args:
292
+ response_data: JSON response from eventsTimeSeries query
293
+
294
+ Returns:
295
+ CSV formatted string in long format with one row per timestamp, or None if no processable data
296
+ """
297
+ if not response_data or 'data' not in response_data or 'eventsTimeSeries' not in response_data['data']:
298
+ return None
299
+
300
+ events_ts = response_data['data']['eventsTimeSeries']
301
+ if events_ts is None:
302
+ return None
303
+
304
+ timeseries = events_ts.get('timeseries', [])
305
+ granularity = events_ts.get('granularity', 1)
306
+
307
+ if not timeseries:
308
+ return None
309
+
310
+ # Parse dimension information and measures from labels
311
+ parsed_series = []
312
+ all_timestamps = set()
313
+
314
+ for series in timeseries:
315
+ label = series.get('label', '')
316
+ data_points = series.get('data', [])
317
+
318
+ # Get measure and aggregation type from key structure
319
+ key_info = series.get('key', {})
320
+ measure = key_info.get('measureFieldName', '')
321
+ dimensions = {}
322
+
323
+ # Extract aggregation type from label
324
+ agg_type = ''
325
+ if '(' in label and ')' in label:
326
+ agg_match = re.match(r'(\w+)\(', label)
327
+ if agg_match:
328
+ agg_type = agg_match.group(1)
329
+
330
+ # Extract dimensions from key.dimensions array
331
+ key_dimensions = key_info.get('dimensions', [])
332
+ for dim_info in key_dimensions:
333
+ if isinstance(dim_info, dict) and 'fieldName' in dim_info and 'value' in dim_info:
334
+ dimensions[dim_info['fieldName']] = dim_info['value']
335
+
336
+ # Fallback to label parsing if key method fails
337
+ if not measure and not dimensions:
338
+ measure, dimensions = parse_label_for_dimensions_and_measure(label)
339
+
340
+ # Determine if we should multiply by granularity
341
+ should_multiply = should_multiply_by_granularity(measure, agg_type)
342
+
343
+ # Create series entry with safe data parsing
344
+ data_dict = {}
345
+ for point in data_points:
346
+ if isinstance(point, (list, tuple)) and len(point) >= 2:
347
+ timestamp = int(point[0])
348
+ value = point[1]
349
+
350
+ # Apply granularity multiplication if appropriate
351
+ if should_multiply and value is not None and granularity > 1:
352
+ try:
353
+ computed_value = round(float(value) * granularity, 3)
354
+ data_dict[timestamp] = computed_value
355
+ except (ValueError, TypeError):
356
+ data_dict[timestamp] = value
357
+ else:
358
+ data_dict[timestamp] = value
359
+
360
+ all_timestamps.add(timestamp)
361
+
362
+ series_entry = {
363
+ 'measure': measure,
364
+ 'aggregation_type': agg_type,
365
+ 'dimensions': dimensions,
366
+ 'granularity_multiplied': should_multiply,
367
+ 'data': data_dict
368
+ }
369
+ parsed_series.append(series_entry)
370
+
371
+ # Sort timestamps
372
+ sorted_timestamps = sorted(all_timestamps)
373
+
374
+ # Collect all data in long format (one row per timestamp and dimension combination)
375
+ rows = []
376
+
377
+ # Get all unique dimension combinations
378
+ dimension_combos = {}
379
+ for series in parsed_series:
380
+ dim_key = tuple(sorted(series['dimensions'].items()))
381
+ if dim_key not in dimension_combos:
382
+ dimension_combos[dim_key] = {}
383
+ dimension_combos[dim_key][series['measure']] = {
384
+ 'data': series['data'],
385
+ 'aggregation_type': series['aggregation_type'],
386
+ 'granularity_multiplied': series['granularity_multiplied']
387
+ }
388
+
389
+ # Create rows for each timestamp and dimension combination
390
+ for dim_combo, measures_data in dimension_combos.items():
391
+ dim_dict = dict(dim_combo)
392
+
393
+ for timestamp in sorted_timestamps:
394
+ # Build row data for this timestamp
395
+ row_data = {
396
+ 'timestamp_period': format_timestamp(timestamp),
397
+ 'granularity': granularity
398
+ }
399
+
400
+ # Add dimension values
401
+ for key, value in dim_dict.items():
402
+ row_data[key] = value
403
+
404
+ # Add measure values for this timestamp
405
+ for measure, measure_info in measures_data.items():
406
+ value = measure_info['data'].get(timestamp, '')
407
+ agg_type = measure_info['aggregation_type']
408
+ granularity_applied = measure_info['granularity_multiplied']
409
+
410
+ # Add suffixes to indicate processing
411
+ if granularity_applied and granularity > 1:
412
+ row_data[f'{measure}_computed'] = value
413
+ row_data[f'{measure}_notes'] = f'Multiplied by granularity ({granularity}s) for {agg_type} aggregation'
414
+ else:
415
+ row_data[measure] = value
416
+ if agg_type == 'sum':
417
+ row_data[f'{measure}_notes'] = f'No granularity adjustment (field type exclusion)'
418
+
419
+ rows.append(row_data)
420
+
421
+ if not rows:
422
+ return None
423
+
424
+ # Create CSV output
425
+ output = io.StringIO()
426
+ writer = csv.writer(output)
427
+
428
+ # Build header dynamically from all available columns
429
+ all_columns = set()
430
+ for row_data in rows:
431
+ all_columns.update(row_data.keys())
432
+
433
+ # Sort columns with timestamp_period first, then granularity, then dimensions, then measures
434
+ dimension_columns = []
435
+ measure_columns = []
436
+ note_columns = []
437
+
438
+ for col in sorted(all_columns):
439
+ if col in ['timestamp_period', 'granularity']:
440
+ continue # Will be added first
441
+ elif col.endswith('_notes'):
442
+ note_columns.append(col)
443
+ elif col.endswith('_computed') or col in ['event_count', 'downstream', 'upstream', 'traffic']:
444
+ measure_columns.append(col)
445
+ else:
446
+ dimension_columns.append(col)
447
+
448
+ header = ['timestamp_period', 'granularity'] + sorted(dimension_columns) + sorted(measure_columns) + sorted(note_columns)
449
+ writer.writerow(header)
450
+
451
+ # Write data rows
452
+ for row_data in rows:
453
+ row = []
454
+ for col in header:
455
+ value = row_data.get(col, '')
456
+ row.append(value)
457
+ writer.writerow(row)
458
+
459
+ return output.getvalue()
@@ -0,0 +1,189 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Socket Port Metrics Formatter for Cato CLI
4
+
5
+ This module provides functions to format socketPortMetrics API responses
6
+ into JSON and CSV formats, with special handling for field data
7
+ and unit conversions.
8
+ """
9
+
10
+ import csv
11
+ import io
12
+ import json
13
+ from typing import Dict, List, Any
14
+
15
+ # Import shared utility functions
16
+ try:
17
+ from .formatter_utils import convert_bytes_to_mb, format_timestamp, is_bytes_measure, parse_label_for_dimensions_and_measure
18
+ except ImportError:
19
+ try:
20
+ from catocli.Utils.formatter_utils import convert_bytes_to_mb, format_timestamp, is_bytes_measure, parse_label_for_dimensions_and_measure
21
+ except ImportError:
22
+ from formatter_utils import convert_bytes_to_mb, format_timestamp, is_bytes_measure, parse_label_for_dimensions_and_measure
23
+
24
+
25
+ def format_socket_port_metrics(response_data: Dict[str, Any], output_format: str = 'json') -> str:
26
+ """
27
+ Convert socketPortMetrics JSON response to specified format (JSON or CSV)
28
+
29
+ Args:
30
+ response_data: JSON response from socketPortMetrics query
31
+ output_format: 'json' or 'csv'
32
+
33
+ Returns:
34
+ Formatted string in the requested format, or None if no processable data
35
+ """
36
+ if output_format.lower() == 'csv':
37
+ return _format_socket_port_metrics_to_csv(response_data)
38
+ else:
39
+ # Default to JSON format with organized structure
40
+ return _format_socket_port_metrics_to_json(response_data)
41
+
42
+
43
+ def _format_socket_port_metrics_to_json(response_data: Dict[str, Any]) -> str:
44
+ """
45
+ Convert socketPortMetrics JSON response to organized JSON format
46
+
47
+ Args:
48
+ response_data: JSON response from socketPortMetrics query
49
+
50
+ Returns:
51
+ JSON formatted string, or None if no processable data
52
+ """
53
+ if not response_data or not isinstance(response_data, dict):
54
+ return None
55
+
56
+ # Check for API errors
57
+ if 'errors' in response_data:
58
+ return None
59
+
60
+ if 'data' not in response_data or 'socketPortMetrics' not in response_data['data']:
61
+ return None
62
+
63
+ socket_metrics = response_data['data']['socketPortMetrics']
64
+ if not socket_metrics or not isinstance(socket_metrics, dict):
65
+ return None
66
+
67
+ records = socket_metrics.get('records', [])
68
+
69
+ if not records:
70
+ return None
71
+
72
+ # Organize data in a more structured format
73
+ organized_data = {
74
+ "socketPortMetrics": {
75
+ "summary": {
76
+ "total_records": len(records),
77
+ "field_names": list(records[0].get('fieldsMap', {}).keys()) if records else [],
78
+ "data_types": records[0].get('fieldsUnitTypes', []) if records else []
79
+ },
80
+ "records": []
81
+ }
82
+ }
83
+
84
+ # Process each record
85
+ for record in records:
86
+ fields_map = record.get('fieldsMap', {})
87
+ record_unit_types = record.get('fieldsUnitTypes', [])
88
+
89
+ record_data = {}
90
+
91
+ for i, (field, value) in enumerate(fields_map.items()):
92
+ # Get unit type for this field
93
+ unit_type = record_unit_types[i] if i < len(record_unit_types) else "unknown"
94
+
95
+ # Add unit type information for bytes fields using shared utility
96
+ if is_bytes_measure(field, unit_type):
97
+ formatted_mb = convert_bytes_to_mb(value)
98
+ if formatted_mb and formatted_mb != str(value):
99
+ record_data[field] = {
100
+ "value": value,
101
+ "formatted_mb": formatted_mb,
102
+ "unit_type": "bytes"
103
+ }
104
+ else:
105
+ record_data[field] = {
106
+ "value": value,
107
+ "unit_type": "bytes"
108
+ }
109
+ else:
110
+ record_data[field] = {
111
+ "value": value,
112
+ "unit_type": unit_type
113
+ }
114
+
115
+ organized_data["socketPortMetrics"]["records"].append(record_data)
116
+
117
+ return json.dumps(organized_data, indent=2)
118
+
119
+
120
+ def _format_socket_port_metrics_to_csv(response_data: Dict[str, Any]) -> str:
121
+ """
122
+ Convert socketPortMetrics JSON response to CSV format
123
+
124
+ Args:
125
+ response_data: JSON response from socketPortMetrics query
126
+
127
+ Returns:
128
+ CSV formatted string, or None if no processable data
129
+ """
130
+ if not response_data or not isinstance(response_data, dict):
131
+ return None
132
+
133
+ # Check for API errors
134
+ if 'errors' in response_data:
135
+ return None
136
+
137
+ if 'data' not in response_data or 'socketPortMetrics' not in response_data['data']:
138
+ return None
139
+
140
+ socket_metrics = response_data['data']['socketPortMetrics']
141
+ if not socket_metrics or not isinstance(socket_metrics, dict):
142
+ return None
143
+
144
+ records = socket_metrics.get('records', [])
145
+
146
+ if not records:
147
+ return None
148
+
149
+ # Get all possible field names from the first record's fieldsMap
150
+ first_record = records[0]
151
+ field_names = list(first_record.get('fieldsMap', {}).keys())
152
+ field_unit_types = first_record.get('fieldsUnitTypes', [])
153
+
154
+ # Create CSV output
155
+ output = io.StringIO()
156
+ writer = csv.writer(output)
157
+
158
+ # Create headers with _mb suffix for bytes fields using shared utility
159
+ headers = []
160
+ for i, field_name in enumerate(field_names):
161
+ unit_type = field_unit_types[i] if i < len(field_unit_types) else "unknown"
162
+ if is_bytes_measure(field_name, unit_type):
163
+ headers.append(f'{field_name}_mb')
164
+ else:
165
+ headers.append(field_name)
166
+
167
+ # Write header
168
+ writer.writerow(headers)
169
+
170
+ # Write data rows
171
+ for record in records:
172
+ fields_map = record.get('fieldsMap', {})
173
+ record_unit_types = record.get('fieldsUnitTypes', [])
174
+ row = []
175
+
176
+ for i, field in enumerate(field_names):
177
+ value = fields_map.get(field, '')
178
+ unit_type = record_unit_types[i] if i < len(record_unit_types) else "unknown"
179
+
180
+ # Convert bytes to MB using shared utility function
181
+ if is_bytes_measure(field, unit_type):
182
+ formatted_value = convert_bytes_to_mb(value)
183
+ row.append(formatted_value if formatted_value else value)
184
+ else:
185
+ row.append(value)
186
+
187
+ writer.writerow(row)
188
+
189
+ return output.getvalue()