catocli 3.0.18__py3-none-any.whl → 3.0.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of catocli might be problematic. Click here for more details.

Files changed (43) hide show
  1. catocli/Utils/clidriver.py +16 -8
  2. catocli/Utils/formatter_account_metrics.py +544 -0
  3. catocli/Utils/formatter_app_stats.py +184 -0
  4. catocli/Utils/formatter_app_stats_timeseries.py +377 -0
  5. catocli/Utils/formatter_events_timeseries.py +459 -0
  6. catocli/Utils/formatter_socket_port_metrics.py +189 -0
  7. catocli/Utils/formatter_socket_port_metrics_timeseries.py +339 -0
  8. catocli/Utils/formatter_utils.py +251 -0
  9. catocli/__init__.py +1 -1
  10. catocli/clisettings.json +37 -5
  11. catocli/parsers/customParserApiClient.py +211 -66
  12. catocli/parsers/mutation_policy/__init__.py +405 -405
  13. catocli/parsers/mutation_site/__init__.py +15 -15
  14. catocli/parsers/mutation_sites/__init__.py +15 -15
  15. catocli/parsers/query_accountMetrics/README.md +90 -0
  16. catocli/parsers/query_accountMetrics/__init__.py +6 -0
  17. catocli/parsers/query_appStats/README.md +2 -2
  18. catocli/parsers/query_appStats/__init__.py +4 -2
  19. catocli/parsers/query_appStatsTimeSeries/__init__.py +4 -2
  20. catocli/parsers/query_eventsTimeSeries/README.md +280 -0
  21. catocli/parsers/query_eventsTimeSeries/__init__.py +6 -0
  22. catocli/parsers/query_policy/__init__.py +42 -42
  23. catocli/parsers/query_socketPortMetrics/README.md +44 -0
  24. catocli/parsers/query_socketPortMetrics/__init__.py +6 -0
  25. catocli/parsers/query_socketPortMetricsTimeSeries/README.md +83 -0
  26. catocli/parsers/query_socketPortMetricsTimeSeries/__init__.py +4 -2
  27. catocli/parsers/utils/export_utils.py +6 -2
  28. catocli-3.0.24.dist-info/METADATA +184 -0
  29. {catocli-3.0.18.dist-info → catocli-3.0.24.dist-info}/RECORD +37 -35
  30. {catocli-3.0.18.dist-info → catocli-3.0.24.dist-info}/top_level.txt +0 -1
  31. models/mutation.xdr.analystFeedback.json +822 -87
  32. models/query.xdr.stories.json +822 -87
  33. models/query.xdr.story.json +822 -87
  34. schema/catolib.py +89 -64
  35. catocli/Utils/csv_formatter.py +0 -663
  36. catocli-3.0.18.dist-info/METADATA +0 -124
  37. scripts/catolib.py +0 -62
  38. scripts/export_if_rules_to_json.py +0 -188
  39. scripts/export_wf_rules_to_json.py +0 -111
  40. scripts/import_wf_rules_to_tfstate.py +0 -331
  41. {catocli-3.0.18.dist-info → catocli-3.0.24.dist-info}/WHEEL +0 -0
  42. {catocli-3.0.18.dist-info → catocli-3.0.24.dist-info}/entry_points.txt +0 -0
  43. {catocli-3.0.18.dist-info → catocli-3.0.24.dist-info}/licenses/LICENSE +0 -0
@@ -48,22 +48,22 @@ from ..parsers.query_enterpriseDirectory import query_enterpriseDirectory_parse
48
48
  from ..parsers.query_devices import query_devices_parse
49
49
  from ..parsers.query_accountSnapshot import query_accountSnapshot_parse
50
50
  from ..parsers.query_catalogs import query_catalogs_parse
51
- from ..parsers.query_xdr import query_xdr_parse
52
51
  from ..parsers.query_site import query_site_parse
52
+ from ..parsers.query_xdr import query_xdr_parse
53
53
  from ..parsers.query_groups import query_groups_parse
54
54
  from ..parsers.query_policy import query_policy_parse
55
55
  from ..parsers.mutation_xdr import mutation_xdr_parse
56
56
  from ..parsers.mutation_policy import mutation_policy_parse
57
57
  from ..parsers.mutation_site import mutation_site_parse
58
- from ..parsers.mutation_sites import mutation_sites_parse
59
58
  from ..parsers.mutation_container import mutation_container_parse
59
+ from ..parsers.mutation_sites import mutation_sites_parse
60
+ from ..parsers.mutation_admin import mutation_admin_parse
60
61
  from ..parsers.mutation_accountManagement import mutation_accountManagement_parse
61
62
  from ..parsers.mutation_sandbox import mutation_sandbox_parse
62
63
  from ..parsers.mutation_licensing import mutation_licensing_parse
63
64
  from ..parsers.mutation_hardware import mutation_hardware_parse
64
65
  from ..parsers.mutation_groups import mutation_groups_parse
65
66
  from ..parsers.mutation_enterpriseDirectory import mutation_enterpriseDirectory_parse
66
- from ..parsers.mutation_admin import mutation_admin_parse
67
67
 
68
68
  def show_version_info(args, configuration=None):
69
69
  print(f"catocli version {catocli.__version__}")
@@ -183,22 +183,22 @@ query_enterpriseDirectory_parser = query_enterpriseDirectory_parse(query_subpars
183
183
  query_devices_parser = query_devices_parse(query_subparsers)
184
184
  query_accountSnapshot_parser = query_accountSnapshot_parse(query_subparsers)
185
185
  query_catalogs_parser = query_catalogs_parse(query_subparsers)
186
- query_xdr_parser = query_xdr_parse(query_subparsers)
187
186
  query_site_parser = query_site_parse(query_subparsers)
187
+ query_xdr_parser = query_xdr_parse(query_subparsers)
188
188
  query_groups_parser = query_groups_parse(query_subparsers)
189
189
  query_policy_parser = query_policy_parse(query_subparsers)
190
190
  mutation_xdr_parser = mutation_xdr_parse(mutation_subparsers)
191
191
  mutation_policy_parser = mutation_policy_parse(mutation_subparsers)
192
192
  mutation_site_parser = mutation_site_parse(mutation_subparsers)
193
- mutation_sites_parser = mutation_sites_parse(mutation_subparsers)
194
193
  mutation_container_parser = mutation_container_parse(mutation_subparsers)
194
+ mutation_sites_parser = mutation_sites_parse(mutation_subparsers)
195
+ mutation_admin_parser = mutation_admin_parse(mutation_subparsers)
195
196
  mutation_accountManagement_parser = mutation_accountManagement_parse(mutation_subparsers)
196
197
  mutation_sandbox_parser = mutation_sandbox_parse(mutation_subparsers)
197
198
  mutation_licensing_parser = mutation_licensing_parse(mutation_subparsers)
198
199
  mutation_hardware_parser = mutation_hardware_parse(mutation_subparsers)
199
200
  mutation_groups_parser = mutation_groups_parse(mutation_subparsers)
200
201
  mutation_enterpriseDirectory_parser = mutation_enterpriseDirectory_parse(mutation_subparsers)
201
- mutation_admin_parser = mutation_admin_parse(mutation_subparsers)
202
202
 
203
203
 
204
204
  def parse_headers(header_strings):
@@ -287,8 +287,16 @@ def main(args=None):
287
287
  # Print CSV output directly without JSON formatting
288
288
  print(response[0]["__csv_output__"], end='')
289
289
  else:
290
- # Standard JSON output
291
- print(json.dumps(response[0], sort_keys=True, indent=4))
290
+ # Handle different response formats more robustly
291
+ if isinstance(response, list) and len(response) > 0:
292
+ # Standard format: [data, status, headers]
293
+ print(json.dumps(response[0], sort_keys=True, indent=4))
294
+ elif isinstance(response, dict):
295
+ # Direct dict response
296
+ print(json.dumps(response, sort_keys=True, indent=4))
297
+ else:
298
+ # Fallback: print as-is
299
+ print(json.dumps(response, sort_keys=True, indent=4))
292
300
  except KeyboardInterrupt:
293
301
  print('Operation cancelled by user (Ctrl+C).')
294
302
  exit(130) # Standard exit code for SIGINT
@@ -0,0 +1,544 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Account Metrics Formatter for Cato CLI
4
+
5
+ This module provides functions to format accountMetrics API responses
6
+ into JSON and CSV formats, with special handling for hierarchical
7
+ timeseries data.
8
+ """
9
+
10
+ import csv
11
+ import io
12
+ import json
13
+ from datetime import datetime
14
+ from typing import Dict, List, Any, Optional, Set, Tuple
15
+
16
+ # Import shared utility functions
17
+ try:
18
+ from .formatter_utils import convert_bytes_to_mb, format_timestamp, is_bytes_measure
19
+ except ImportError:
20
+ try:
21
+ from catocli.Utils.formatter_utils import convert_bytes_to_mb, format_timestamp, is_bytes_measure
22
+ except ImportError:
23
+ from formatter_utils import convert_bytes_to_mb, format_timestamp, is_bytes_measure
24
+
25
+
26
+ def format_account_metrics(response_data: Dict[str, Any], output_format: str = 'json') -> str:
27
+ """
28
+ Convert accountMetrics JSON response to specified format (JSON or CSV)
29
+
30
+ Args:
31
+ response_data: JSON response from accountMetrics query
32
+ output_format: 'json' or 'csv'
33
+
34
+ Returns:
35
+ Formatted string in the requested format
36
+ """
37
+ if output_format.lower() == 'csv':
38
+ return _format_account_metrics_to_csv(response_data)
39
+ else:
40
+ # Default to JSON format with organized structure
41
+ return _format_account_metrics_to_json(response_data)
42
+
43
+
44
+ def _format_account_metrics_to_json(response_data: Dict[str, Any]) -> str:
45
+ """
46
+ Convert accountMetrics JSON response to organized JSON format with hierarchical structure
47
+
48
+ Args:
49
+ response_data: JSON response from accountMetrics query
50
+
51
+ Returns:
52
+ JSON formatted string with organized sites, interfaces, and timeseries data
53
+ """
54
+ if not response_data or not isinstance(response_data, dict):
55
+ return json.dumps({"error": "Invalid response data"}, indent=2)
56
+
57
+ # Check for API errors
58
+ if 'errors' in response_data:
59
+ return json.dumps(response_data, indent=2)
60
+
61
+ if 'data' not in response_data or 'accountMetrics' not in response_data['data']:
62
+ return json.dumps({"error": "Invalid accountMetrics response structure"}, indent=2)
63
+
64
+ account_metrics = response_data['data']['accountMetrics']
65
+ if account_metrics is None:
66
+ return None
67
+ if not isinstance(account_metrics, dict):
68
+ return json.dumps({"error": "Invalid accountMetrics data"}, indent=2)
69
+
70
+ # Start building organized structure
71
+ organized_data = {
72
+ "accountMetrics": {
73
+ "metadata": {
74
+ "total_sites": 0,
75
+ "total_users": 0,
76
+ "total_interfaces": 0,
77
+ "has_sites": False,
78
+ "has_users": False,
79
+ "raw_structure": {
80
+ "available_keys": list(account_metrics.keys())
81
+ }
82
+ },
83
+ "sites": [],
84
+ "users": []
85
+ }
86
+ }
87
+
88
+ # Extract sites and users, handling None cases
89
+ sites = account_metrics.get('sites', []) or [] # Handle None case
90
+ users = account_metrics.get('users', []) or [] # Handle None case
91
+
92
+ # Update metadata
93
+ organized_data["accountMetrics"]["metadata"]["total_sites"] = len(sites)
94
+ organized_data["accountMetrics"]["metadata"]["total_users"] = len(users)
95
+ organized_data["accountMetrics"]["metadata"]["has_sites"] = len(sites) > 0
96
+ organized_data["accountMetrics"]["metadata"]["has_users"] = len(users) > 0
97
+
98
+ total_interfaces = 0
99
+
100
+ # Process sites if present
101
+ for site in sites:
102
+ site_id = site.get('id', '')
103
+ site_info = site.get('info', {}) or {} # Handle None case
104
+ site_name = site_info.get('name', '')
105
+ interfaces = site.get('interfaces', []) or [] # Handle None case
106
+ total_interfaces += len(interfaces)
107
+
108
+ site_data = {
109
+ 'site_id': site_id,
110
+ 'site_name': site_name,
111
+ 'site_info': site_info,
112
+ 'total_interfaces': len(interfaces),
113
+ 'interfaces': []
114
+ }
115
+
116
+ # Process interfaces for this site
117
+ for interface in interfaces:
118
+ interface_info = interface.get('interfaceInfo', {}) or {}
119
+ interface_name = interface_info.get('name', '') or interface.get('name', '')
120
+ timeseries_list = interface.get('timeseries', []) or []
121
+ interface_metrics = interface.get('metrics', {}) or {}
122
+
123
+ interface_data = {
124
+ 'interface_name': interface_name,
125
+ 'interface_info': interface_info,
126
+ 'total_timeseries': len(timeseries_list),
127
+ 'interface_metrics': interface_metrics,
128
+ 'timeseries_data': []
129
+ }
130
+
131
+ # Organize timeseries data by timestamp
132
+ timestamp_data = {}
133
+ info_fields = {}
134
+
135
+ for timeseries in timeseries_list:
136
+ label = timeseries.get('label', '')
137
+ units = timeseries.get('units', '')
138
+ data_points = timeseries.get('data', []) or []
139
+ info = timeseries.get('info', []) or []
140
+
141
+ # Store info fields
142
+ if info and len(info) >= 2:
143
+ info_fields['info_site_id'] = str(info[0])
144
+ info_fields['info_interface'] = str(info[1])
145
+
146
+ # Process each data point
147
+ for point in data_points:
148
+ if isinstance(point, (list, tuple)) and len(point) >= 2:
149
+ timestamp = int(point[0])
150
+ value = point[1]
151
+ timestamp_str = format_timestamp(timestamp)
152
+
153
+ if timestamp_str not in timestamp_data:
154
+ timestamp_data[timestamp_str] = {}
155
+
156
+ # Convert bytes measures to MB and add appropriate suffix
157
+ if is_bytes_measure(label, units) and value:
158
+ try:
159
+ converted_value = convert_bytes_to_mb(value)
160
+ timestamp_data[timestamp_str][label] = {
161
+ 'value': value,
162
+ 'formatted_mb': converted_value,
163
+ 'unit_type': 'bytes'
164
+ }
165
+ except (ValueError, ZeroDivisionError):
166
+ timestamp_data[timestamp_str][label] = {
167
+ 'value': value,
168
+ 'unit_type': 'bytes'
169
+ }
170
+ else:
171
+ timestamp_data[timestamp_str][label] = {
172
+ 'value': value,
173
+ 'unit_type': units or 'unknown'
174
+ }
175
+
176
+ # Add timestamp data to interface
177
+ interface_data['info_fields'] = info_fields
178
+ interface_data['time_range'] = {
179
+ 'start': min(timestamp_data.keys()) if timestamp_data else None,
180
+ 'end': max(timestamp_data.keys()) if timestamp_data else None,
181
+ 'total_timestamps': len(timestamp_data)
182
+ }
183
+ interface_data['metrics_by_timestamp'] = timestamp_data
184
+
185
+ site_data['interfaces'].append(interface_data)
186
+
187
+ organized_data["accountMetrics"]["sites"].append(site_data)
188
+
189
+ # Process users if present
190
+ for user in users:
191
+ user_id = user.get('id', '')
192
+ user_name = user.get('name', '')
193
+ user_metrics = user.get('metrics', {}) or {}
194
+ user_interfaces = user.get('interfaces', []) or []
195
+
196
+ user_data = {
197
+ 'user_id': user_id,
198
+ 'user_name': user_name,
199
+ 'user_metrics': user_metrics,
200
+ 'total_interfaces': len(user_interfaces),
201
+ 'interfaces': []
202
+ }
203
+
204
+ # Process user interfaces if any
205
+ for interface in user_interfaces:
206
+ interface_name = interface.get('name', '')
207
+ timeseries_list = interface.get('timeseries', []) or []
208
+ interface_metrics = interface.get('metrics', {}) or {}
209
+
210
+ interface_data = {
211
+ 'interface_name': interface_name,
212
+ 'total_timeseries': len(timeseries_list),
213
+ 'interface_metrics': interface_metrics,
214
+ 'timeseries_data': []
215
+ }
216
+
217
+ # Organize timeseries data by timestamp
218
+ timestamp_data = {}
219
+ info_fields = {}
220
+
221
+ for timeseries in timeseries_list:
222
+ label = timeseries.get('label', '')
223
+ units = timeseries.get('units', '')
224
+ data_points = timeseries.get('data', []) or []
225
+ info = timeseries.get('info', []) or []
226
+
227
+ # Store info fields
228
+ if info and len(info) >= 2:
229
+ info_fields['info_user_id'] = str(info[0])
230
+ info_fields['info_interface'] = str(info[1])
231
+
232
+ # Process each data point
233
+ for point in data_points:
234
+ if isinstance(point, (list, tuple)) and len(point) >= 2:
235
+ timestamp = int(point[0])
236
+ value = point[1]
237
+ timestamp_str = format_timestamp(timestamp)
238
+
239
+ if timestamp_str not in timestamp_data:
240
+ timestamp_data[timestamp_str] = {}
241
+
242
+ # Convert bytes measures to MB and add appropriate suffix
243
+ if is_bytes_measure(label, units) and value:
244
+ try:
245
+ converted_value = convert_bytes_to_mb(value)
246
+ timestamp_data[timestamp_str][label] = {
247
+ 'value': value,
248
+ 'formatted_mb': converted_value,
249
+ 'unit_type': 'bytes'
250
+ }
251
+ except (ValueError, ZeroDivisionError):
252
+ timestamp_data[timestamp_str][label] = {
253
+ 'value': value,
254
+ 'unit_type': 'bytes'
255
+ }
256
+ else:
257
+ timestamp_data[timestamp_str][label] = {
258
+ 'value': value,
259
+ 'unit_type': units or 'unknown'
260
+ }
261
+
262
+ # Add timestamp data to interface
263
+ interface_data['info_fields'] = info_fields
264
+ interface_data['time_range'] = {
265
+ 'start': min(timestamp_data.keys()) if timestamp_data else None,
266
+ 'end': max(timestamp_data.keys()) if timestamp_data else None,
267
+ 'total_timestamps': len(timestamp_data)
268
+ }
269
+ interface_data['metrics_by_timestamp'] = timestamp_data
270
+
271
+ user_data['interfaces'].append(interface_data)
272
+
273
+ organized_data["accountMetrics"]["users"].append(user_data)
274
+
275
+ return json.dumps(organized_data, indent=2)
276
+
277
+
278
+ def _format_account_metrics_to_csv(response_data: Dict[str, Any]) -> str:
279
+ """
280
+ Convert accountMetrics JSON response to CSV format
281
+
282
+ Args:
283
+ response_data: JSON response from accountMetrics query
284
+
285
+ Returns:
286
+ CSV formatted string with unique columns for each metric and bytes converted to MB
287
+ """
288
+ if not response_data or 'data' not in response_data or 'accountMetrics' not in response_data['data']:
289
+ return None
290
+
291
+ account_metrics = response_data['data']['accountMetrics']
292
+ # Handle the case where accountMetrics is None
293
+ if account_metrics is None:
294
+ return None
295
+
296
+ sites = account_metrics.get('sites', []) or [] # Handle None case
297
+ users = account_metrics.get('users', []) or [] # Handle None case
298
+
299
+ # Check if we have any data to process
300
+ if not sites and not users:
301
+ # Return None to indicate we should fall back to raw response
302
+ return None
303
+
304
+ # Define byte metrics that need conversion to MB
305
+ byte_metrics = {
306
+ 'bytesDownstream', 'bytesTotal', 'bytesUpstream',
307
+ 'bytesDownstreamMax', 'bytesUpstreamMax'
308
+ }
309
+
310
+ # First pass: collect all unique metric labels to create columns
311
+ all_metric_labels = set()
312
+ for site in sites:
313
+ interfaces = site.get('interfaces', [])
314
+ for interface in interfaces:
315
+ # Collect timeseries labels
316
+ timeseries_list = interface.get('timeseries', []) or []
317
+ for timeseries in timeseries_list:
318
+ metric_label = timeseries.get('label', '')
319
+ if metric_label in byte_metrics:
320
+ all_metric_labels.add(f"{metric_label}_mb")
321
+ else:
322
+ all_metric_labels.add(metric_label)
323
+
324
+ # Collect interface-level metrics (totals)
325
+ interface_metrics = interface.get('metrics', {})
326
+ for metric_key in interface_metrics.keys():
327
+ if metric_key in ['bytesDownstream', 'bytesUpstream', 'bytesTotal']:
328
+ # Use consistent naming: {metric}_mb for both timeseries and interface totals
329
+ all_metric_labels.add(f'{metric_key}_mb')
330
+ else:
331
+ all_metric_labels.add(f'{metric_key}_total')
332
+
333
+ # Sort metric labels for consistent column ordering
334
+ sorted_metric_labels = sorted(all_metric_labels)
335
+
336
+ # Group data by timestamp and interface to create one row per timestamp
337
+ data_by_timestamp = {}
338
+
339
+ for site in sites:
340
+ site_id = site.get('id', '')
341
+ site_info = site.get('info', {}) or {} # Handle None case
342
+ interfaces = site.get('interfaces', [])
343
+
344
+ for interface in interfaces:
345
+ interface_info = interface.get('interfaceInfo', {}) or {}
346
+ interface_name = interface_info.get('name', '') or interface.get('name', '')
347
+ timeseries_list = interface.get('timeseries', []) or []
348
+
349
+ # Extract interface-level metrics (totals for the entire period)
350
+ interface_metrics = interface.get('metrics', {})
351
+
352
+ # Process each timeseries for this interface
353
+ if timeseries_list:
354
+ for timeseries in timeseries_list:
355
+ metric_label = timeseries.get('label', '')
356
+ data_points = timeseries.get('data', [])
357
+
358
+ # Determine the column name (with _mb suffix for byte metrics)
359
+ if metric_label in byte_metrics:
360
+ column_name = f"{metric_label}_mb"
361
+ else:
362
+ column_name = metric_label
363
+
364
+ for timestamp, value in data_points:
365
+ # Create unique key for each timestamp/interface combination
366
+ key = (int(timestamp), interface_name, site_id)
367
+
368
+ if key not in data_by_timestamp:
369
+ data_by_timestamp[key] = {
370
+ 'timestamp_period': format_timestamp(int(timestamp)),
371
+ 'site_id': site_id,
372
+ 'site_name': site_info.get('name', ''),
373
+ 'interface_name': interface_name
374
+ }
375
+ # Initialize all metric columns to empty string
376
+ for label in sorted_metric_labels:
377
+ data_by_timestamp[key][label] = ''
378
+
379
+ # Add interface-level metrics with byte conversion
380
+ for metric_key, metric_value in interface_metrics.items():
381
+ if metric_key in ['bytesDownstream', 'bytesUpstream', 'bytesTotal']:
382
+ # Convert bytes to MB for these specific metrics
383
+ mb_value = float(metric_value) / (1024 * 1024) if metric_value and metric_value != 0 else 0
384
+ column_name = f'{metric_key}_mb'
385
+ # If timeseries data exists for this metric, don't overwrite it
386
+ if not data_by_timestamp[key][column_name]:
387
+ data_by_timestamp[key][column_name] = f"{mb_value:.3f}".rstrip('0').rstrip('.')
388
+ else:
389
+ # Add other interface metrics as-is
390
+ data_by_timestamp[key][f'{metric_key}_total'] = str(metric_value) if metric_value is not None else ''
391
+
392
+ # Convert bytes to MB if it's a byte metric
393
+ if metric_label in byte_metrics:
394
+ # Convert bytes to MB (divide by 1,048,576 = 1024^2)
395
+ mb_value = float(value) / (1024 * 1024) if value != 0 else 0
396
+ data_by_timestamp[key][column_name] = f"{mb_value:.3f}".rstrip('0').rstrip('.')
397
+ else:
398
+ data_by_timestamp[key][column_name] = str(value)
399
+ else:
400
+ # No timeseries data, but we still want to create a row with interface metrics if they exist
401
+ if interface_metrics:
402
+ # Use current time as placeholder timestamp since we have no timeseries data
403
+ import time
404
+ current_timestamp = int(time.time() * 1000)
405
+ key = (current_timestamp, interface_name, site_id)
406
+
407
+ data_by_timestamp[key] = {
408
+ 'timestamp_period': 'No timeseries data',
409
+ 'site_id': site_id,
410
+ 'site_name': site_info.get('name', ''),
411
+ 'interface_name': interface_name
412
+ }
413
+ # Initialize all metric columns to empty string
414
+ for label in sorted_metric_labels:
415
+ data_by_timestamp[key][label] = ''
416
+
417
+ # Add interface-level metrics with byte conversion
418
+ for metric_key, metric_value in interface_metrics.items():
419
+ if metric_key in ['bytesDownstream', 'bytesUpstream', 'bytesTotal']:
420
+ # Convert bytes to MB for these specific metrics
421
+ mb_value = float(metric_value) / (1024 * 1024) if metric_value and metric_value != 0 else 0
422
+ column_name = f'{metric_key}_mb'
423
+ data_by_timestamp[key][column_name] = f"{mb_value:.3f}".rstrip('0').rstrip('.')
424
+ else:
425
+ # Add other interface metrics as-is
426
+ data_by_timestamp[key][f'{metric_key}_total'] = str(metric_value) if metric_value is not None else ''
427
+
428
+ # Process user-level data if present
429
+ for user in users:
430
+ user_id = user.get('id', '')
431
+ user_name = user.get('name', '')
432
+ user_metrics = user.get('metrics', {})
433
+ user_interfaces = user.get('interfaces', [])
434
+
435
+ # Collect user-level metrics for the metric labels set
436
+ for metric_key in user_metrics.keys():
437
+ if metric_key in ['bytesDownstream', 'bytesUpstream', 'bytesTotal']:
438
+ all_metric_labels.add(f'{metric_key}_mb')
439
+ else:
440
+ all_metric_labels.add(metric_key)
441
+
442
+ # Process user interfaces (if any)
443
+ if user_interfaces:
444
+ for interface in user_interfaces:
445
+ interface_name = interface.get('name', '')
446
+ timeseries_list = interface.get('timeseries', []) or []
447
+ interface_metrics = interface.get('metrics', {})
448
+
449
+ # Add interface metrics to labels
450
+ for metric_key in interface_metrics.keys():
451
+ if metric_key in ['bytesDownstream', 'bytesUpstream', 'bytesTotal']:
452
+ all_metric_labels.add(f'{metric_key}_mb')
453
+ else:
454
+ all_metric_labels.add(f'{metric_key}_total')
455
+
456
+ # Process timeseries data if available
457
+ for timeseries in timeseries_list:
458
+ metric_label = timeseries.get('label', '')
459
+ data_points = timeseries.get('data', [])
460
+
461
+ # Add to labels
462
+ if metric_label in byte_metrics:
463
+ all_metric_labels.add(f"{metric_label}_mb")
464
+ else:
465
+ all_metric_labels.add(metric_label)
466
+
467
+ # Process data points
468
+ for timestamp, value in data_points:
469
+ key = (int(timestamp), f"user_{user_id}_{interface_name}", user_id)
470
+
471
+ if key not in data_by_timestamp:
472
+ data_by_timestamp[key] = {
473
+ 'timestamp_period': format_timestamp(int(timestamp)),
474
+ 'site_id': user_id,
475
+ 'site_name': user_name,
476
+ 'interface_name': f"user_{user_id}_{interface_name}"
477
+ }
478
+ # Initialize all metric columns
479
+ for label in all_metric_labels:
480
+ data_by_timestamp[key][label] = ''
481
+
482
+ # Add the metric value
483
+ if metric_label in byte_metrics:
484
+ mb_value = float(value) / (1024 * 1024) if value != 0 else 0
485
+ data_by_timestamp[key][f"{metric_label}_mb"] = f"{mb_value:.3f}".rstrip('0').rstrip('.')
486
+ else:
487
+ data_by_timestamp[key][metric_label] = str(value)
488
+ else:
489
+ # No interfaces, create a row with just user-level metrics
490
+ import time
491
+ current_timestamp = int(time.time() * 1000)
492
+ key = (current_timestamp, f"user_{user_id}", user_id)
493
+
494
+ data_by_timestamp[key] = {
495
+ 'timestamp_period': 'User summary',
496
+ 'site_id': user_id,
497
+ 'site_name': user_name,
498
+ 'interface_name': f"user_{user_id}"
499
+ }
500
+
501
+ # Re-sort metric labels after adding user metrics
502
+ sorted_metric_labels = sorted(all_metric_labels)
503
+
504
+ # Initialize all metric columns
505
+ for label in sorted_metric_labels:
506
+ data_by_timestamp[key][label] = ''
507
+
508
+ # Add user-level metrics
509
+ for metric_key, metric_value in user_metrics.items():
510
+ if metric_key in ['bytesDownstream', 'bytesUpstream', 'bytesTotal']:
511
+ mb_value = float(metric_value) / (1024 * 1024) if metric_value and metric_value != 0 else 0
512
+ data_by_timestamp[key][f'{metric_key}_mb'] = f"{mb_value:.3f}".rstrip('0').rstrip('.')
513
+ else:
514
+ data_by_timestamp[key][metric_key] = str(metric_value) if metric_value is not None else ''
515
+
516
+ # Re-sort metric labels after processing all data
517
+ sorted_metric_labels = sorted(all_metric_labels)
518
+
519
+ # Convert to list and sort by timestamp
520
+ rows = list(data_by_timestamp.values())
521
+ rows.sort(key=lambda x: (x['timestamp_period'], x['interface_name'], x['site_id']))
522
+
523
+ if not rows:
524
+ # Return None to indicate we should fall back to raw response
525
+ return None
526
+
527
+ # Create CSV output
528
+ output = io.StringIO()
529
+ writer = csv.writer(output)
530
+
531
+ # Build header: basic columns first, then metric columns
532
+ basic_columns = ['timestamp_period', 'site_id', 'site_name', 'interface_name']
533
+ header = basic_columns + sorted_metric_labels
534
+ writer.writerow(header)
535
+
536
+ # Write data rows
537
+ for row_data in rows:
538
+ row = []
539
+ for col in header:
540
+ value = row_data.get(col, '')
541
+ row.append(value)
542
+ writer.writerow(row)
543
+
544
+ return output.getvalue()