catocli 3.0.14__py3-none-any.whl → 3.0.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of catocli might be problematic. Click here for more details.
- catocli/Utils/clidriver.py +16 -8
- catocli/Utils/formatter_account_metrics.py +544 -0
- catocli/Utils/formatter_app_stats.py +184 -0
- catocli/Utils/formatter_app_stats_timeseries.py +377 -0
- catocli/Utils/formatter_events_timeseries.py +459 -0
- catocli/Utils/formatter_socket_port_metrics.py +189 -0
- catocli/Utils/formatter_socket_port_metrics_timeseries.py +339 -0
- catocli/Utils/formatter_utils.py +251 -0
- catocli/Utils/help_formatter.py +1 -1
- catocli/__init__.py +1 -1
- catocli/clisettings.json +37 -5
- catocli/parsers/custom/query_eventsFeed/README.md +94 -0
- catocli/parsers/custom/scim/README.md +346 -0
- catocli/parsers/custom/scim/scim_client.py +132 -26
- catocli/parsers/custom/scim/scim_commands.py +14 -56
- catocli/parsers/customParserApiClient.py +213 -65
- catocli/parsers/mutation_policy/__init__.py +405 -405
- catocli/parsers/mutation_site/__init__.py +15 -15
- catocli/parsers/mutation_sites/__init__.py +15 -15
- catocli/parsers/query_accountMetrics/README.md +99 -9
- catocli/parsers/query_accountMetrics/__init__.py +6 -0
- catocli/parsers/query_appStats/README.md +11 -11
- catocli/parsers/query_appStats/__init__.py +4 -2
- catocli/parsers/query_appStatsTimeSeries/README.md +10 -10
- catocli/parsers/query_appStatsTimeSeries/__init__.py +4 -2
- catocli/parsers/query_auditFeed/README.md +9 -9
- catocli/parsers/query_events/README.md +9 -9
- catocli/parsers/query_eventsTimeSeries/README.md +289 -9
- catocli/parsers/query_eventsTimeSeries/__init__.py +6 -0
- catocli/parsers/query_policy/__init__.py +42 -42
- catocli/parsers/query_socketPortMetrics/README.md +53 -9
- catocli/parsers/query_socketPortMetrics/__init__.py +6 -0
- catocli/parsers/query_socketPortMetricsTimeSeries/README.md +92 -9
- catocli/parsers/query_socketPortMetricsTimeSeries/__init__.py +4 -2
- {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/METADATA +1 -1
- {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/RECORD +187 -183
- {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/top_level.txt +0 -1
- models/mutation.policy.antiMalwareFileHash.addRule.json +20 -0
- models/mutation.policy.antiMalwareFileHash.addSection.json +103 -0
- models/mutation.policy.antiMalwareFileHash.createPolicyRevision.json +123 -0
- models/mutation.policy.antiMalwareFileHash.discardPolicyRevision.json +123 -0
- models/mutation.policy.antiMalwareFileHash.moveRule.json +20 -0
- models/mutation.policy.antiMalwareFileHash.moveSection.json +103 -0
- models/mutation.policy.antiMalwareFileHash.publishPolicyRevision.json +123 -0
- models/mutation.policy.antiMalwareFileHash.removeRule.json +20 -0
- models/mutation.policy.antiMalwareFileHash.removeSection.json +103 -0
- models/mutation.policy.antiMalwareFileHash.updatePolicy.json +123 -0
- models/mutation.policy.antiMalwareFileHash.updateRule.json +20 -0
- models/mutation.policy.antiMalwareFileHash.updateSection.json +103 -0
- models/mutation.policy.appTenantRestriction.addRule.json +20 -0
- models/mutation.policy.appTenantRestriction.addSection.json +103 -0
- models/mutation.policy.appTenantRestriction.createPolicyRevision.json +123 -0
- models/mutation.policy.appTenantRestriction.discardPolicyRevision.json +123 -0
- models/mutation.policy.appTenantRestriction.moveRule.json +20 -0
- models/mutation.policy.appTenantRestriction.moveSection.json +103 -0
- models/mutation.policy.appTenantRestriction.publishPolicyRevision.json +123 -0
- models/mutation.policy.appTenantRestriction.removeRule.json +20 -0
- models/mutation.policy.appTenantRestriction.removeSection.json +103 -0
- models/mutation.policy.appTenantRestriction.updatePolicy.json +123 -0
- models/mutation.policy.appTenantRestriction.updateRule.json +20 -0
- models/mutation.policy.appTenantRestriction.updateSection.json +103 -0
- models/mutation.policy.applicationControl.addRule.json +20 -0
- models/mutation.policy.applicationControl.addSection.json +103 -0
- models/mutation.policy.applicationControl.createPolicyRevision.json +123 -0
- models/mutation.policy.applicationControl.discardPolicyRevision.json +123 -0
- models/mutation.policy.applicationControl.moveRule.json +20 -0
- models/mutation.policy.applicationControl.moveSection.json +103 -0
- models/mutation.policy.applicationControl.publishPolicyRevision.json +123 -0
- models/mutation.policy.applicationControl.removeRule.json +20 -0
- models/mutation.policy.applicationControl.removeSection.json +103 -0
- models/mutation.policy.applicationControl.updatePolicy.json +123 -0
- models/mutation.policy.applicationControl.updateRule.json +20 -0
- models/mutation.policy.applicationControl.updateSection.json +103 -0
- models/mutation.policy.dynamicIpAllocation.addRule.json +20 -0
- models/mutation.policy.dynamicIpAllocation.addSection.json +103 -0
- models/mutation.policy.dynamicIpAllocation.createPolicyRevision.json +123 -0
- models/mutation.policy.dynamicIpAllocation.discardPolicyRevision.json +123 -0
- models/mutation.policy.dynamicIpAllocation.moveRule.json +20 -0
- models/mutation.policy.dynamicIpAllocation.moveSection.json +103 -0
- models/mutation.policy.dynamicIpAllocation.publishPolicyRevision.json +123 -0
- models/mutation.policy.dynamicIpAllocation.removeRule.json +20 -0
- models/mutation.policy.dynamicIpAllocation.removeSection.json +103 -0
- models/mutation.policy.dynamicIpAllocation.updatePolicy.json +123 -0
- models/mutation.policy.dynamicIpAllocation.updateRule.json +20 -0
- models/mutation.policy.dynamicIpAllocation.updateSection.json +103 -0
- models/mutation.policy.internetFirewall.addRule.json +20 -0
- models/mutation.policy.internetFirewall.addSection.json +103 -0
- models/mutation.policy.internetFirewall.createPolicyRevision.json +123 -0
- models/mutation.policy.internetFirewall.discardPolicyRevision.json +123 -0
- models/mutation.policy.internetFirewall.moveRule.json +20 -0
- models/mutation.policy.internetFirewall.moveSection.json +103 -0
- models/mutation.policy.internetFirewall.publishPolicyRevision.json +123 -0
- models/mutation.policy.internetFirewall.removeRule.json +20 -0
- models/mutation.policy.internetFirewall.removeSection.json +103 -0
- models/mutation.policy.internetFirewall.updatePolicy.json +123 -0
- models/mutation.policy.internetFirewall.updateRule.json +20 -0
- models/mutation.policy.internetFirewall.updateSection.json +103 -0
- models/mutation.policy.remotePortFwd.addRule.json +20 -0
- models/mutation.policy.remotePortFwd.addSection.json +103 -0
- models/mutation.policy.remotePortFwd.createPolicyRevision.json +123 -0
- models/mutation.policy.remotePortFwd.discardPolicyRevision.json +123 -0
- models/mutation.policy.remotePortFwd.moveRule.json +20 -0
- models/mutation.policy.remotePortFwd.moveSection.json +103 -0
- models/mutation.policy.remotePortFwd.publishPolicyRevision.json +123 -0
- models/mutation.policy.remotePortFwd.removeRule.json +20 -0
- models/mutation.policy.remotePortFwd.removeSection.json +103 -0
- models/mutation.policy.remotePortFwd.updatePolicy.json +123 -0
- models/mutation.policy.remotePortFwd.updateRule.json +20 -0
- models/mutation.policy.remotePortFwd.updateSection.json +103 -0
- models/mutation.policy.socketLan.addRule.json +40 -0
- models/mutation.policy.socketLan.addSection.json +103 -0
- models/mutation.policy.socketLan.createPolicyRevision.json +143 -0
- models/mutation.policy.socketLan.discardPolicyRevision.json +143 -0
- models/mutation.policy.socketLan.moveRule.json +40 -0
- models/mutation.policy.socketLan.moveSection.json +103 -0
- models/mutation.policy.socketLan.publishPolicyRevision.json +143 -0
- models/mutation.policy.socketLan.removeRule.json +40 -0
- models/mutation.policy.socketLan.removeSection.json +103 -0
- models/mutation.policy.socketLan.updatePolicy.json +143 -0
- models/mutation.policy.socketLan.updateRule.json +40 -0
- models/mutation.policy.socketLan.updateSection.json +103 -0
- models/mutation.policy.terminalServer.addRule.json +20 -0
- models/mutation.policy.terminalServer.addSection.json +103 -0
- models/mutation.policy.terminalServer.createPolicyRevision.json +123 -0
- models/mutation.policy.terminalServer.discardPolicyRevision.json +123 -0
- models/mutation.policy.terminalServer.moveRule.json +20 -0
- models/mutation.policy.terminalServer.moveSection.json +103 -0
- models/mutation.policy.terminalServer.publishPolicyRevision.json +123 -0
- models/mutation.policy.terminalServer.removeRule.json +20 -0
- models/mutation.policy.terminalServer.removeSection.json +103 -0
- models/mutation.policy.terminalServer.updatePolicy.json +123 -0
- models/mutation.policy.terminalServer.updateRule.json +20 -0
- models/mutation.policy.terminalServer.updateSection.json +103 -0
- models/mutation.policy.tlsInspect.addRule.json +20 -0
- models/mutation.policy.tlsInspect.addSection.json +103 -0
- models/mutation.policy.tlsInspect.createPolicyRevision.json +123 -0
- models/mutation.policy.tlsInspect.discardPolicyRevision.json +123 -0
- models/mutation.policy.tlsInspect.moveRule.json +20 -0
- models/mutation.policy.tlsInspect.moveSection.json +103 -0
- models/mutation.policy.tlsInspect.publishPolicyRevision.json +123 -0
- models/mutation.policy.tlsInspect.removeRule.json +20 -0
- models/mutation.policy.tlsInspect.removeSection.json +103 -0
- models/mutation.policy.tlsInspect.updatePolicy.json +123 -0
- models/mutation.policy.tlsInspect.updateRule.json +20 -0
- models/mutation.policy.tlsInspect.updateSection.json +103 -0
- models/mutation.policy.wanFirewall.addRule.json +20 -0
- models/mutation.policy.wanFirewall.addSection.json +103 -0
- models/mutation.policy.wanFirewall.createPolicyRevision.json +123 -0
- models/mutation.policy.wanFirewall.discardPolicyRevision.json +123 -0
- models/mutation.policy.wanFirewall.moveRule.json +20 -0
- models/mutation.policy.wanFirewall.moveSection.json +103 -0
- models/mutation.policy.wanFirewall.publishPolicyRevision.json +123 -0
- models/mutation.policy.wanFirewall.removeRule.json +20 -0
- models/mutation.policy.wanFirewall.removeSection.json +103 -0
- models/mutation.policy.wanFirewall.updatePolicy.json +123 -0
- models/mutation.policy.wanFirewall.updateRule.json +20 -0
- models/mutation.policy.wanFirewall.updateSection.json +103 -0
- models/mutation.policy.wanNetwork.addRule.json +20 -0
- models/mutation.policy.wanNetwork.addSection.json +103 -0
- models/mutation.policy.wanNetwork.createPolicyRevision.json +123 -0
- models/mutation.policy.wanNetwork.discardPolicyRevision.json +123 -0
- models/mutation.policy.wanNetwork.moveRule.json +20 -0
- models/mutation.policy.wanNetwork.moveSection.json +103 -0
- models/mutation.policy.wanNetwork.publishPolicyRevision.json +123 -0
- models/mutation.policy.wanNetwork.removeRule.json +20 -0
- models/mutation.policy.wanNetwork.removeSection.json +103 -0
- models/mutation.policy.wanNetwork.updatePolicy.json +123 -0
- models/mutation.policy.wanNetwork.updateRule.json +20 -0
- models/mutation.policy.wanNetwork.updateSection.json +103 -0
- models/mutation.xdr.analystFeedback.json +822 -87
- models/query.policy.antiMalwareFileHash.policy.json +123 -0
- models/query.policy.appTenantRestriction.policy.json +123 -0
- models/query.policy.applicationControl.policy.json +123 -0
- models/query.policy.dynamicIpAllocation.policy.json +123 -0
- models/query.policy.internetFirewall.policy.json +123 -0
- models/query.policy.remotePortFwd.policy.json +123 -0
- models/query.policy.socketLan.policy.json +143 -0
- models/query.policy.terminalServer.policy.json +123 -0
- models/query.policy.tlsInspect.policy.json +123 -0
- models/query.policy.wanFirewall.policy.json +123 -0
- models/query.policy.wanNetwork.policy.json +123 -0
- models/query.xdr.stories.json +822 -87
- models/query.xdr.story.json +822 -87
- schema/catolib.py +34 -17
- catocli/Utils/csv_formatter.py +0 -663
- scripts/catolib.py +0 -62
- scripts/export_if_rules_to_json.py +0 -188
- scripts/export_wf_rules_to_json.py +0 -111
- scripts/import_wf_rules_to_tfstate.py +0 -331
- {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/WHEEL +0 -0
- {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/entry_points.txt +0 -0
- {catocli-3.0.14.dist-info → catocli-3.0.22.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
App Stats Formatter for Cato CLI
|
|
4
|
+
|
|
5
|
+
This module provides functions to format appStats API responses
|
|
6
|
+
into JSON and CSV formats, with special handling for field data
|
|
7
|
+
and unit conversions.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import csv
|
|
11
|
+
import io
|
|
12
|
+
import json
|
|
13
|
+
from typing import Dict, List, Any
|
|
14
|
+
|
|
15
|
+
# Import shared utility functions
|
|
16
|
+
try:
|
|
17
|
+
from .formatter_utils import convert_bytes_to_mb
|
|
18
|
+
except ImportError:
|
|
19
|
+
try:
|
|
20
|
+
from catocli.Utils.formatter_utils import convert_bytes_to_mb
|
|
21
|
+
except ImportError:
|
|
22
|
+
from formatter_utils import convert_bytes_to_mb
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def format_app_stats(response_data: Dict[str, Any], output_format: str = 'json') -> str:
|
|
26
|
+
"""
|
|
27
|
+
Convert appStats JSON response to specified format (JSON or CSV)
|
|
28
|
+
|
|
29
|
+
Args:
|
|
30
|
+
response_data: JSON response from appStats query
|
|
31
|
+
output_format: 'json' or 'csv'
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Formatted string in the requested format, or None if no processable data
|
|
35
|
+
"""
|
|
36
|
+
if output_format.lower() == 'csv':
|
|
37
|
+
return _format_app_stats_to_csv(response_data)
|
|
38
|
+
else:
|
|
39
|
+
# Default to JSON format with organized structure
|
|
40
|
+
return _format_app_stats_to_json(response_data)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _format_app_stats_to_json(response_data: Dict[str, Any]) -> str:
|
|
44
|
+
"""
|
|
45
|
+
Convert appStats JSON response to organized JSON format
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
response_data: JSON response from appStats query
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
JSON formatted string, or None if no processable data
|
|
52
|
+
"""
|
|
53
|
+
if not response_data or not isinstance(response_data, dict):
|
|
54
|
+
return None
|
|
55
|
+
|
|
56
|
+
# Check for API errors
|
|
57
|
+
if 'errors' in response_data:
|
|
58
|
+
return None
|
|
59
|
+
|
|
60
|
+
if 'data' not in response_data or 'appStats' not in response_data['data']:
|
|
61
|
+
return None
|
|
62
|
+
|
|
63
|
+
app_stats = response_data['data']['appStats']
|
|
64
|
+
if not app_stats or not isinstance(app_stats, dict):
|
|
65
|
+
return None
|
|
66
|
+
|
|
67
|
+
records = app_stats.get('records', [])
|
|
68
|
+
|
|
69
|
+
if not records:
|
|
70
|
+
return None
|
|
71
|
+
|
|
72
|
+
# Organize data in a more structured format
|
|
73
|
+
organized_data = {
|
|
74
|
+
"appStats": {
|
|
75
|
+
"summary": {
|
|
76
|
+
"total_records": len(records),
|
|
77
|
+
"field_names": list(records[0].get('fieldsMap', {}).keys()) if records else [],
|
|
78
|
+
"data_types": records[0].get('fieldsUnitTypes', []) if records else []
|
|
79
|
+
},
|
|
80
|
+
"records": []
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
# Process each record
|
|
85
|
+
for record in records:
|
|
86
|
+
fields_map = record.get('fieldsMap', {})
|
|
87
|
+
record_unit_types = record.get('fieldsUnitTypes', [])
|
|
88
|
+
|
|
89
|
+
record_data = {}
|
|
90
|
+
|
|
91
|
+
for i, (field, value) in enumerate(fields_map.items()):
|
|
92
|
+
# Add unit type information for bytes fields
|
|
93
|
+
if (i < len(record_unit_types) and record_unit_types[i] == 'bytes'):
|
|
94
|
+
formatted_mb = convert_bytes_to_mb(value)
|
|
95
|
+
if formatted_mb and formatted_mb != str(value):
|
|
96
|
+
record_data[field] = {
|
|
97
|
+
"value": value,
|
|
98
|
+
"formatted_mb": formatted_mb,
|
|
99
|
+
"unit_type": "bytes"
|
|
100
|
+
}
|
|
101
|
+
else:
|
|
102
|
+
record_data[field] = {
|
|
103
|
+
"value": value,
|
|
104
|
+
"unit_type": "bytes"
|
|
105
|
+
}
|
|
106
|
+
else:
|
|
107
|
+
record_data[field] = {
|
|
108
|
+
"value": value,
|
|
109
|
+
"unit_type": record_unit_types[i] if i < len(record_unit_types) else "unknown"
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
organized_data["appStats"]["records"].append(record_data)
|
|
113
|
+
|
|
114
|
+
return json.dumps(organized_data, indent=2)
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def _format_app_stats_to_csv(response_data: Dict[str, Any]) -> str:
|
|
118
|
+
"""
|
|
119
|
+
Convert appStats JSON response to CSV format
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
response_data: JSON response from appStats query
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
CSV formatted string, or None if no processable data
|
|
126
|
+
"""
|
|
127
|
+
if not response_data or not isinstance(response_data, dict):
|
|
128
|
+
return None
|
|
129
|
+
|
|
130
|
+
# Check for API errors
|
|
131
|
+
if 'errors' in response_data:
|
|
132
|
+
return None
|
|
133
|
+
|
|
134
|
+
if 'data' not in response_data or 'appStats' not in response_data['data']:
|
|
135
|
+
return None
|
|
136
|
+
|
|
137
|
+
app_stats = response_data['data']['appStats']
|
|
138
|
+
if not app_stats or not isinstance(app_stats, dict):
|
|
139
|
+
return None
|
|
140
|
+
|
|
141
|
+
records = app_stats.get('records', [])
|
|
142
|
+
|
|
143
|
+
if not records:
|
|
144
|
+
return None
|
|
145
|
+
|
|
146
|
+
# Get all possible field names from the first record's fieldsMap
|
|
147
|
+
first_record = records[0]
|
|
148
|
+
field_names = list(first_record.get('fieldsMap', {}).keys())
|
|
149
|
+
field_unit_types = first_record.get('fieldsUnitTypes', [])
|
|
150
|
+
|
|
151
|
+
# Create CSV output
|
|
152
|
+
output = io.StringIO()
|
|
153
|
+
writer = csv.writer(output)
|
|
154
|
+
|
|
155
|
+
# Create headers with _mb suffix for bytes fields
|
|
156
|
+
headers = []
|
|
157
|
+
for i, field_name in enumerate(field_names):
|
|
158
|
+
if i < len(field_unit_types) and field_unit_types[i] == 'bytes':
|
|
159
|
+
headers.append(f'{field_name}_mb')
|
|
160
|
+
else:
|
|
161
|
+
headers.append(field_name)
|
|
162
|
+
|
|
163
|
+
# Write header
|
|
164
|
+
writer.writerow(headers)
|
|
165
|
+
|
|
166
|
+
# Write data rows
|
|
167
|
+
for record in records:
|
|
168
|
+
fields_map = record.get('fieldsMap', {})
|
|
169
|
+
record_unit_types = record.get('fieldsUnitTypes', [])
|
|
170
|
+
row = []
|
|
171
|
+
|
|
172
|
+
for i, field in enumerate(field_names):
|
|
173
|
+
value = fields_map.get(field, '')
|
|
174
|
+
|
|
175
|
+
# Convert bytes to MB if the field type is bytes
|
|
176
|
+
if (i < len(record_unit_types) and record_unit_types[i] == 'bytes'):
|
|
177
|
+
formatted_value = convert_bytes_to_mb(value)
|
|
178
|
+
row.append(formatted_value if formatted_value else value)
|
|
179
|
+
else:
|
|
180
|
+
row.append(value)
|
|
181
|
+
|
|
182
|
+
writer.writerow(row)
|
|
183
|
+
|
|
184
|
+
return output.getvalue()
|
|
@@ -0,0 +1,377 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
App Stats Timeseries Formatter for Cato CLI
|
|
4
|
+
|
|
5
|
+
This module provides functions to format appStatsTimeSeries API responses
|
|
6
|
+
into JSON and CSV formats, with special handling for timeseries data
|
|
7
|
+
and unit conversions.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import csv
|
|
11
|
+
import io
|
|
12
|
+
import json
|
|
13
|
+
import re
|
|
14
|
+
from datetime import datetime
|
|
15
|
+
from typing import Dict, List, Any, Tuple
|
|
16
|
+
|
|
17
|
+
# Import shared utility functions
|
|
18
|
+
try:
|
|
19
|
+
from .formatter_utils import format_timestamp, parse_label_for_dimensions_and_measure
|
|
20
|
+
except ImportError:
|
|
21
|
+
try:
|
|
22
|
+
from catocli.Utils.formatter_utils import format_timestamp, parse_label_for_dimensions_and_measure
|
|
23
|
+
except ImportError:
|
|
24
|
+
from formatter_utils import format_timestamp, parse_label_for_dimensions_and_measure
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def format_app_stats_timeseries(response_data: Dict[str, Any], output_format: str = 'json') -> str:
|
|
28
|
+
"""
|
|
29
|
+
Convert appStatsTimeSeries JSON response to specified format (JSON or CSV)
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
response_data: JSON response from appStatsTimeSeries query
|
|
33
|
+
output_format: 'json' or 'csv'
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Formatted string in the requested format, or None if no processable data
|
|
37
|
+
"""
|
|
38
|
+
if output_format.lower() == 'csv':
|
|
39
|
+
return _format_app_stats_timeseries_to_csv(response_data)
|
|
40
|
+
else:
|
|
41
|
+
# Default to JSON format with organized structure
|
|
42
|
+
return _format_app_stats_timeseries_to_json(response_data)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _format_app_stats_timeseries_to_json(response_data: Dict[str, Any]) -> str:
|
|
46
|
+
"""
|
|
47
|
+
Convert appStatsTimeSeries JSON response to organized JSON format
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
response_data: JSON response from appStatsTimeSeries query
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
JSON formatted string, or None if no processable data
|
|
54
|
+
"""
|
|
55
|
+
if not response_data or not isinstance(response_data, dict):
|
|
56
|
+
return None
|
|
57
|
+
|
|
58
|
+
# Check for API errors
|
|
59
|
+
if 'errors' in response_data:
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
if 'data' not in response_data or 'appStatsTimeSeries' not in response_data['data']:
|
|
63
|
+
return None
|
|
64
|
+
|
|
65
|
+
app_stats_ts = response_data['data']['appStatsTimeSeries']
|
|
66
|
+
if app_stats_ts is None:
|
|
67
|
+
return None
|
|
68
|
+
|
|
69
|
+
timeseries = app_stats_ts.get('timeseries', [])
|
|
70
|
+
|
|
71
|
+
if not timeseries:
|
|
72
|
+
return None
|
|
73
|
+
|
|
74
|
+
# Parse dimension information and measures from labels
|
|
75
|
+
parsed_series = []
|
|
76
|
+
all_timestamps = set()
|
|
77
|
+
all_dimensions = set()
|
|
78
|
+
all_measures = set()
|
|
79
|
+
|
|
80
|
+
for series in timeseries:
|
|
81
|
+
label = series.get('label', '')
|
|
82
|
+
data_points = series.get('data', [])
|
|
83
|
+
units = series.get('units', '')
|
|
84
|
+
|
|
85
|
+
# Get measure and dimensions from key structure (new API format)
|
|
86
|
+
key_info = series.get('key', {})
|
|
87
|
+
measure = key_info.get('measureFieldName', '')
|
|
88
|
+
dimensions = {}
|
|
89
|
+
|
|
90
|
+
# Extract dimensions from key.dimensions array
|
|
91
|
+
key_dimensions = key_info.get('dimensions', [])
|
|
92
|
+
for dim_info in key_dimensions:
|
|
93
|
+
if isinstance(dim_info, dict) and 'fieldName' in dim_info and 'value' in dim_info:
|
|
94
|
+
dimensions[dim_info['fieldName']] = dim_info['value']
|
|
95
|
+
|
|
96
|
+
# Fallback to label parsing if key method fails
|
|
97
|
+
if not measure and not dimensions:
|
|
98
|
+
measure, dimensions = parse_label_for_dimensions_and_measure(label)
|
|
99
|
+
|
|
100
|
+
# Create series entry with safe data parsing
|
|
101
|
+
data_dict = {}
|
|
102
|
+
for point in data_points:
|
|
103
|
+
if isinstance(point, (list, tuple)) and len(point) >= 2:
|
|
104
|
+
timestamp = int(point[0])
|
|
105
|
+
value = point[1]
|
|
106
|
+
data_dict[timestamp] = value
|
|
107
|
+
all_timestamps.add(timestamp)
|
|
108
|
+
|
|
109
|
+
series_entry = {
|
|
110
|
+
'label': label,
|
|
111
|
+
'measure': measure,
|
|
112
|
+
'dimensions': dimensions,
|
|
113
|
+
'data_points': len(data_dict),
|
|
114
|
+
'time_range': {
|
|
115
|
+
'start': format_timestamp(min(data_dict.keys())) if data_dict else None,
|
|
116
|
+
'end': format_timestamp(max(data_dict.keys())) if data_dict else None
|
|
117
|
+
},
|
|
118
|
+
'data': data_dict
|
|
119
|
+
}
|
|
120
|
+
parsed_series.append(series_entry)
|
|
121
|
+
|
|
122
|
+
# Collect metadata
|
|
123
|
+
all_measures.add(measure)
|
|
124
|
+
all_dimensions.update(dimensions.keys())
|
|
125
|
+
|
|
126
|
+
# Organize timeseries data by dimension combinations and timestamps
|
|
127
|
+
organized_data = {
|
|
128
|
+
"appStatsTimeSeries": {
|
|
129
|
+
"summary": {
|
|
130
|
+
"total_series": len(parsed_series),
|
|
131
|
+
"total_timestamps": len(all_timestamps),
|
|
132
|
+
"time_range": {
|
|
133
|
+
"start": format_timestamp(min(all_timestamps)) if all_timestamps else None,
|
|
134
|
+
"end": format_timestamp(max(all_timestamps)) if all_timestamps else None
|
|
135
|
+
},
|
|
136
|
+
"measures": sorted(list(all_measures)),
|
|
137
|
+
"dimensions": sorted(list(all_dimensions))
|
|
138
|
+
},
|
|
139
|
+
"series": []
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
# Group series by dimension combinations for better organization
|
|
144
|
+
dimension_groups = {}
|
|
145
|
+
for series in parsed_series:
|
|
146
|
+
dim_key = tuple(sorted(series['dimensions'].items()))
|
|
147
|
+
if dim_key not in dimension_groups:
|
|
148
|
+
dimension_groups[dim_key] = {
|
|
149
|
+
'dimensions': series['dimensions'],
|
|
150
|
+
'measures': {},
|
|
151
|
+
'time_range': series['time_range']
|
|
152
|
+
}
|
|
153
|
+
dimension_groups[dim_key]['measures'][series['measure']] = {
|
|
154
|
+
'label': series['label'],
|
|
155
|
+
'data_points': series['data_points'],
|
|
156
|
+
'data': series['data']
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
# Convert to organized format
|
|
160
|
+
for dim_combo, group_data in dimension_groups.items():
|
|
161
|
+
series_data = {
|
|
162
|
+
'dimensions': group_data['dimensions'],
|
|
163
|
+
'time_range': group_data['time_range'],
|
|
164
|
+
'measures': {}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
# Organize measures with unit conversion for bytes data
|
|
168
|
+
for measure, measure_data in group_data['measures'].items():
|
|
169
|
+
formatted_data = {}
|
|
170
|
+
for timestamp, value in measure_data['data'].items():
|
|
171
|
+
timestamp_str = format_timestamp(timestamp)
|
|
172
|
+
|
|
173
|
+
if measure in ['downstream', 'upstream', 'traffic'] and value:
|
|
174
|
+
try:
|
|
175
|
+
mb_value = value
|
|
176
|
+
# mb_value = float(value) / 1048576
|
|
177
|
+
formatted_value = f"{mb_value:.3f}".rstrip('0').rstrip('.')
|
|
178
|
+
formatted_data[timestamp_str] = {
|
|
179
|
+
'value': value,
|
|
180
|
+
'formatted_mb': formatted_value,
|
|
181
|
+
'unit_type': 'mb'
|
|
182
|
+
}
|
|
183
|
+
except (ValueError, ZeroDivisionError):
|
|
184
|
+
formatted_data[timestamp_str] = {
|
|
185
|
+
'value': value,
|
|
186
|
+
'unit_type': 'mb'
|
|
187
|
+
}
|
|
188
|
+
else:
|
|
189
|
+
formatted_data[timestamp_str] = {
|
|
190
|
+
'value': value,
|
|
191
|
+
'unit_type': 'unknown'
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
series_data['measures'][measure] = {
|
|
195
|
+
'label': measure_data['label'],
|
|
196
|
+
'data_points': measure_data['data_points'],
|
|
197
|
+
'data': formatted_data
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
organized_data["appStatsTimeSeries"]["series"].append(series_data)
|
|
201
|
+
|
|
202
|
+
return json.dumps(organized_data, indent=2)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
def _format_app_stats_timeseries_to_csv(response_data: Dict[str, Any]) -> str:
|
|
206
|
+
"""
|
|
207
|
+
Convert appStatsTimeSeries JSON response to CSV format
|
|
208
|
+
|
|
209
|
+
Args:
|
|
210
|
+
response_data: JSON response from appStatsTimeSeries query
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
CSV formatted string in long format with one row per timestamp, or None if no processable data
|
|
214
|
+
"""
|
|
215
|
+
if not response_data or 'data' not in response_data or 'appStatsTimeSeries' not in response_data['data']:
|
|
216
|
+
return None
|
|
217
|
+
|
|
218
|
+
app_stats_ts = response_data['data']['appStatsTimeSeries']
|
|
219
|
+
if app_stats_ts is None:
|
|
220
|
+
return None
|
|
221
|
+
|
|
222
|
+
timeseries = app_stats_ts.get('timeseries', [])
|
|
223
|
+
|
|
224
|
+
if not timeseries:
|
|
225
|
+
return None
|
|
226
|
+
|
|
227
|
+
# Parse dimension information and measures from labels
|
|
228
|
+
# Labels are like: "sum(traffic) for application_name='Google Applications', user_name='PM Analyst'"
|
|
229
|
+
parsed_series = []
|
|
230
|
+
all_timestamps = set()
|
|
231
|
+
|
|
232
|
+
for series in timeseries:
|
|
233
|
+
label = series.get('label', '')
|
|
234
|
+
data_points = series.get('data', [])
|
|
235
|
+
units = series.get('units', '')
|
|
236
|
+
|
|
237
|
+
# Get measure and dimensions from key structure (new API format)
|
|
238
|
+
key_info = series.get('key', {})
|
|
239
|
+
measure = key_info.get('measureFieldName', '')
|
|
240
|
+
dimensions = {}
|
|
241
|
+
|
|
242
|
+
# Extract dimensions from key.dimensions array
|
|
243
|
+
key_dimensions = key_info.get('dimensions', [])
|
|
244
|
+
for dim_info in key_dimensions:
|
|
245
|
+
if isinstance(dim_info, dict) and 'fieldName' in dim_info and 'value' in dim_info:
|
|
246
|
+
dimensions[dim_info['fieldName']] = dim_info['value']
|
|
247
|
+
|
|
248
|
+
# Fallback to label parsing if key method fails
|
|
249
|
+
if not measure and not dimensions:
|
|
250
|
+
try:
|
|
251
|
+
if ' for ' in label:
|
|
252
|
+
measure_part, dim_part = label.split(' for ', 1)
|
|
253
|
+
# Extract measure (e.g., "sum(traffic)")
|
|
254
|
+
if '(' in measure_part and ')' in measure_part:
|
|
255
|
+
measure = measure_part.split('(')[1].split(')')[0]
|
|
256
|
+
|
|
257
|
+
# Parse dimensions using regex for better handling of quoted values
|
|
258
|
+
dim_pattern = r'(\w+)=[\'"]*([^,\'"]+)[\'"]*'
|
|
259
|
+
matches = re.findall(dim_pattern, dim_part)
|
|
260
|
+
for key, value in matches:
|
|
261
|
+
dimensions[key.strip()] = value.strip()
|
|
262
|
+
else:
|
|
263
|
+
# Fallback: use the whole label as measure
|
|
264
|
+
measure = label
|
|
265
|
+
except Exception as e:
|
|
266
|
+
print(f"DEBUG: Error processing series with label '{label}': {e}")
|
|
267
|
+
continue
|
|
268
|
+
|
|
269
|
+
# Create series entry with safe data parsing
|
|
270
|
+
try:
|
|
271
|
+
data_dict = {}
|
|
272
|
+
for point in data_points:
|
|
273
|
+
if isinstance(point, (list, tuple)) and len(point) >= 2:
|
|
274
|
+
data_dict[int(point[0])] = point[1]
|
|
275
|
+
all_timestamps.add(int(point[0]))
|
|
276
|
+
|
|
277
|
+
series_entry = {
|
|
278
|
+
'measure': measure,
|
|
279
|
+
'dimensions': dimensions,
|
|
280
|
+
'data': data_dict
|
|
281
|
+
}
|
|
282
|
+
parsed_series.append(series_entry)
|
|
283
|
+
except Exception as e:
|
|
284
|
+
print(f"DEBUG: Error processing series with label '{label}': {e}")
|
|
285
|
+
continue
|
|
286
|
+
|
|
287
|
+
# Sort timestamps
|
|
288
|
+
sorted_timestamps = sorted(all_timestamps)
|
|
289
|
+
|
|
290
|
+
# Collect all data in long format (one row per timestamp and dimension combination)
|
|
291
|
+
rows = []
|
|
292
|
+
|
|
293
|
+
# Get all unique dimension combinations
|
|
294
|
+
dimension_combos = {}
|
|
295
|
+
for series in parsed_series:
|
|
296
|
+
try:
|
|
297
|
+
dim_key = tuple(sorted(series['dimensions'].items()))
|
|
298
|
+
if dim_key not in dimension_combos:
|
|
299
|
+
dimension_combos[dim_key] = {}
|
|
300
|
+
dimension_combos[dim_key][series['measure']] = series['data']
|
|
301
|
+
except Exception as e:
|
|
302
|
+
print(f"DEBUG: Error processing dimension combination for series: {e}")
|
|
303
|
+
print(f"DEBUG: Series dimensions: {series.get('dimensions', {})}")
|
|
304
|
+
continue
|
|
305
|
+
|
|
306
|
+
# Create rows for each timestamp and dimension combination
|
|
307
|
+
for dim_combo, measures_data in dimension_combos.items():
|
|
308
|
+
dim_dict = dict(dim_combo)
|
|
309
|
+
|
|
310
|
+
for timestamp in sorted_timestamps:
|
|
311
|
+
# Build row data for this timestamp
|
|
312
|
+
row_data = {
|
|
313
|
+
'timestamp_period': format_timestamp(timestamp)
|
|
314
|
+
}
|
|
315
|
+
|
|
316
|
+
# Add dimension values
|
|
317
|
+
for key, value in dim_dict.items():
|
|
318
|
+
row_data[key] = value
|
|
319
|
+
|
|
320
|
+
# Add measure values for this timestamp
|
|
321
|
+
for measure, data in measures_data.items():
|
|
322
|
+
value = data.get(timestamp, '')
|
|
323
|
+
|
|
324
|
+
# Convert bytes measures to MB and add appropriate suffix
|
|
325
|
+
if measure in ['downstream', 'upstream', 'traffic']:
|
|
326
|
+
if value:
|
|
327
|
+
try:
|
|
328
|
+
# Current bug in appStatsTimeSeries returning mb indicating unit as bytes
|
|
329
|
+
# mb_value = float(value) / 1048576
|
|
330
|
+
mb_value = value
|
|
331
|
+
formatted_value = f"{mb_value:.3f}".rstrip('0').rstrip('.')
|
|
332
|
+
row_data[f'{measure}_mb'] = formatted_value
|
|
333
|
+
except (ValueError, ZeroDivisionError):
|
|
334
|
+
row_data[f'{measure}_mb'] = value
|
|
335
|
+
else:
|
|
336
|
+
row_data[f'{measure}_mb'] = value
|
|
337
|
+
else:
|
|
338
|
+
row_data[measure] = value
|
|
339
|
+
|
|
340
|
+
rows.append(row_data)
|
|
341
|
+
|
|
342
|
+
if not rows:
|
|
343
|
+
return None
|
|
344
|
+
|
|
345
|
+
# Create CSV output
|
|
346
|
+
output = io.StringIO()
|
|
347
|
+
writer = csv.writer(output)
|
|
348
|
+
|
|
349
|
+
# Build header dynamically from all available columns
|
|
350
|
+
all_columns = set()
|
|
351
|
+
for row_data in rows:
|
|
352
|
+
all_columns.update(row_data.keys())
|
|
353
|
+
|
|
354
|
+
# Sort columns with timestamp_period first, then dimensions, then measures
|
|
355
|
+
dimension_columns = []
|
|
356
|
+
measure_columns = []
|
|
357
|
+
|
|
358
|
+
for col in sorted(all_columns):
|
|
359
|
+
if col == 'timestamp_period':
|
|
360
|
+
continue # Will be added first
|
|
361
|
+
elif col.endswith('_mb') or col in ['downstream', 'upstream', 'traffic']:
|
|
362
|
+
measure_columns.append(col)
|
|
363
|
+
else:
|
|
364
|
+
dimension_columns.append(col)
|
|
365
|
+
|
|
366
|
+
header = ['timestamp_period'] + sorted(dimension_columns) + sorted(measure_columns)
|
|
367
|
+
writer.writerow(header)
|
|
368
|
+
|
|
369
|
+
# Write data rows
|
|
370
|
+
for row_data in rows:
|
|
371
|
+
row = []
|
|
372
|
+
for col in header:
|
|
373
|
+
value = row_data.get(col, '')
|
|
374
|
+
row.append(value)
|
|
375
|
+
writer.writerow(row)
|
|
376
|
+
|
|
377
|
+
return output.getvalue()
|