catocli 2.0.4__py3-none-any.whl → 2.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of catocli might be problematic. Click here for more details.
- build/lib/catocli/Utils/clidriver.py +268 -0
- build/lib/catocli/Utils/profile_manager.py +188 -0
- build/lib/catocli/Utils/version_checker.py +192 -0
- build/lib/catocli/__init__.py +2 -0
- build/lib/catocli/__main__.py +12 -0
- build/lib/catocli/parsers/configure/__init__.py +115 -0
- build/lib/catocli/parsers/configure/configure.py +307 -0
- build/lib/catocli/parsers/custom/__init__.py +57 -0
- build/lib/catocli/parsers/custom/customLib.py +561 -0
- build/lib/catocli/parsers/custom/export_rules/__init__.py +42 -0
- build/lib/catocli/parsers/custom/export_rules/export_rules.py +234 -0
- build/lib/catocli/parsers/custom/export_sites/__init__.py +21 -0
- build/lib/catocli/parsers/custom/export_sites/export_sites.py +372 -0
- build/lib/catocli/parsers/custom/import_rules_to_tf/__init__.py +58 -0
- build/lib/catocli/parsers/custom/import_rules_to_tf/import_rules_to_tf.py +451 -0
- build/lib/catocli/parsers/custom/import_sites_to_tf/__init__.py +45 -0
- build/lib/catocli/parsers/custom/import_sites_to_tf/import_sites_to_tf.py +891 -0
- build/lib/catocli/parsers/mutation_accountManagement/__init__.py +48 -0
- build/lib/catocli/parsers/mutation_admin/__init__.py +48 -0
- build/lib/catocli/parsers/mutation_container/__init__.py +138 -0
- build/lib/catocli/parsers/mutation_hardware/__init__.py +22 -0
- build/lib/catocli/parsers/mutation_policy/__init__.py +1305 -0
- build/lib/catocli/parsers/mutation_sandbox/__init__.py +35 -0
- build/lib/catocli/parsers/mutation_site/__init__.py +373 -0
- build/lib/catocli/parsers/mutation_sites/__init__.py +373 -0
- build/lib/catocli/parsers/mutation_xdr/__init__.py +48 -0
- build/lib/catocli/parsers/parserApiClient.py +513 -0
- build/lib/catocli/parsers/query_accountBySubdomain/__init__.py +16 -0
- build/lib/catocli/parsers/query_accountManagement/__init__.py +16 -0
- build/lib/catocli/parsers/query_accountMetrics/__init__.py +16 -0
- build/lib/catocli/parsers/query_accountRoles/__init__.py +16 -0
- build/lib/catocli/parsers/query_accountSnapshot/__init__.py +16 -0
- build/lib/catocli/parsers/query_admin/__init__.py +16 -0
- build/lib/catocli/parsers/query_admins/__init__.py +16 -0
- build/lib/catocli/parsers/query_appStats/__init__.py +16 -0
- build/lib/catocli/parsers/query_appStatsTimeSeries/__init__.py +16 -0
- build/lib/catocli/parsers/query_auditFeed/__init__.py +16 -0
- build/lib/catocli/parsers/query_catalogs/__init__.py +16 -0
- build/lib/catocli/parsers/query_container/__init__.py +16 -0
- build/lib/catocli/parsers/query_devices/__init__.py +16 -0
- build/lib/catocli/parsers/query_entityLookup/__init__.py +16 -0
- build/lib/catocli/parsers/query_events/__init__.py +16 -0
- build/lib/catocli/parsers/query_eventsFeed/__init__.py +16 -0
- build/lib/catocli/parsers/query_eventsTimeSeries/__init__.py +16 -0
- build/lib/catocli/parsers/query_hardware/__init__.py +16 -0
- build/lib/catocli/parsers/query_hardwareManagement/__init__.py +16 -0
- build/lib/catocli/parsers/query_licensing/__init__.py +16 -0
- build/lib/catocli/parsers/query_policy/__init__.py +161 -0
- build/lib/catocli/parsers/query_sandbox/__init__.py +16 -0
- build/lib/catocli/parsers/query_site/__init__.py +100 -0
- build/lib/catocli/parsers/query_siteLocation/__init__.py +13 -0
- build/lib/catocli/parsers/query_subDomains/__init__.py +16 -0
- build/lib/catocli/parsers/query_xdr/__init__.py +35 -0
- build/lib/catocli/parsers/raw/__init__.py +12 -0
- build/lib/graphql_client/__init__.py +11 -0
- build/lib/graphql_client/api/__init__.py +3 -0
- build/lib/graphql_client/api/call_api.py +84 -0
- build/lib/graphql_client/api_client.py +192 -0
- build/lib/graphql_client/api_client_types.py +409 -0
- build/lib/graphql_client/configuration.py +232 -0
- build/lib/graphql_client/models/__init__.py +13 -0
- build/lib/graphql_client/models/no_schema.py +71 -0
- build/lib/schema/catolib.py +1141 -0
- build/lib/schema/importSchema.py +60 -0
- build/lib/schema/remove_policyid.py +89 -0
- build/lib/schema/remove_policyid_mutations.py +89 -0
- build/lib/scripts/catolib.py +62 -0
- build/lib/scripts/export_if_rules_to_json.py +188 -0
- build/lib/scripts/export_wf_rules_to_json.py +111 -0
- build/lib/scripts/import_wf_rules_to_tfstate.py +331 -0
- build/lib/vendor/certifi/__init__.py +4 -0
- build/lib/vendor/certifi/__main__.py +12 -0
- build/lib/vendor/certifi/core.py +114 -0
- build/lib/vendor/certifi/py.typed +0 -0
- build/lib/vendor/six.py +998 -0
- build/lib/vendor/urllib3/__init__.py +211 -0
- build/lib/vendor/urllib3/_base_connection.py +172 -0
- build/lib/vendor/urllib3/_collections.py +483 -0
- build/lib/vendor/urllib3/_request_methods.py +278 -0
- build/lib/vendor/urllib3/_version.py +16 -0
- build/lib/vendor/urllib3/connection.py +1033 -0
- build/lib/vendor/urllib3/connectionpool.py +1182 -0
- build/lib/vendor/urllib3/contrib/__init__.py +0 -0
- build/lib/vendor/urllib3/contrib/emscripten/__init__.py +18 -0
- build/lib/vendor/urllib3/contrib/emscripten/connection.py +254 -0
- build/lib/vendor/urllib3/contrib/emscripten/fetch.py +418 -0
- build/lib/vendor/urllib3/contrib/emscripten/request.py +22 -0
- build/lib/vendor/urllib3/contrib/emscripten/response.py +285 -0
- build/lib/vendor/urllib3/contrib/pyopenssl.py +552 -0
- build/lib/vendor/urllib3/contrib/socks.py +228 -0
- build/lib/vendor/urllib3/exceptions.py +321 -0
- build/lib/vendor/urllib3/fields.py +341 -0
- build/lib/vendor/urllib3/filepost.py +89 -0
- build/lib/vendor/urllib3/http2/__init__.py +53 -0
- build/lib/vendor/urllib3/http2/connection.py +356 -0
- build/lib/vendor/urllib3/http2/probe.py +87 -0
- build/lib/vendor/urllib3/poolmanager.py +637 -0
- build/lib/vendor/urllib3/py.typed +2 -0
- build/lib/vendor/urllib3/response.py +1265 -0
- build/lib/vendor/urllib3/util/__init__.py +42 -0
- build/lib/vendor/urllib3/util/connection.py +137 -0
- build/lib/vendor/urllib3/util/proxy.py +43 -0
- build/lib/vendor/urllib3/util/request.py +256 -0
- build/lib/vendor/urllib3/util/response.py +101 -0
- build/lib/vendor/urllib3/util/retry.py +533 -0
- build/lib/vendor/urllib3/util/ssl_.py +513 -0
- build/lib/vendor/urllib3/util/ssl_match_hostname.py +159 -0
- build/lib/vendor/urllib3/util/ssltransport.py +276 -0
- build/lib/vendor/urllib3/util/timeout.py +275 -0
- build/lib/vendor/urllib3/util/url.py +471 -0
- build/lib/vendor/urllib3/util/util.py +42 -0
- build/lib/vendor/urllib3/util/wait.py +124 -0
- catocli/Utils/clidriver.py +1 -4
- catocli/__init__.py +1 -1
- catocli/parsers/custom/export_sites/export_sites.py +2 -2
- catocli/parsers/mutation_container/__init__.py +116 -0
- catocli/parsers/mutation_container_fqdn/README.md +7 -0
- catocli/parsers/mutation_container_fqdn_addValues/README.md +17 -0
- catocli/parsers/mutation_container_fqdn_createFromFile/README.md +17 -0
- catocli/parsers/mutation_container_fqdn_removeValues/README.md +17 -0
- catocli/parsers/mutation_container_fqdn_updateFromFile/README.md +17 -0
- catocli/parsers/mutation_container_ipAddressRange/README.md +7 -0
- catocli/parsers/mutation_container_ipAddressRange_addValues/README.md +17 -0
- catocli/parsers/mutation_container_ipAddressRange_createFromFile/README.md +17 -0
- catocli/parsers/mutation_container_ipAddressRange_removeValues/README.md +17 -0
- catocli/parsers/mutation_container_ipAddressRange_updateFromFile/README.md +17 -0
- catocli/parsers/mutation_policy_internetFirewall_addRule/README.md +1 -1
- catocli/parsers/mutation_policy_internetFirewall_updateRule/README.md +1 -1
- catocli/parsers/mutation_policy_wanFirewall_addRule/README.md +1 -1
- catocli/parsers/mutation_policy_wanFirewall_updateRule/README.md +1 -1
- catocli/parsers/parserApiClient.py +25 -11
- catocli/parsers/query_policy/README.md +0 -17
- catocli/parsers/query_policy/__init__.py +153 -8
- catocli/parsers/query_policy_appTenantRestriction/README.md +7 -0
- catocli/parsers/query_policy_appTenantRestriction_policy/README.md +17 -0
- catocli/parsers/query_policy_dynamicIpAllocation/README.md +7 -0
- catocli/parsers/query_policy_dynamicIpAllocation_policy/README.md +17 -0
- catocli/parsers/query_policy_internetFirewall/README.md +7 -0
- catocli/parsers/query_policy_internetFirewall_policy/README.md +17 -0
- catocli/parsers/query_policy_remotePortFwd/README.md +7 -0
- catocli/parsers/query_policy_remotePortFwd_policy/README.md +17 -0
- catocli/parsers/query_policy_socketLan/README.md +7 -0
- catocli/parsers/query_policy_socketLan_policy/README.md +17 -0
- catocli/parsers/query_policy_terminalServer/README.md +7 -0
- catocli/parsers/query_policy_terminalServer_policy/README.md +17 -0
- catocli/parsers/query_policy_wanFirewall/README.md +7 -0
- catocli/parsers/query_policy_wanFirewall_policy/README.md +17 -0
- catocli/parsers/query_policy_wanNetwork/README.md +7 -0
- catocli/parsers/query_policy_wanNetwork_policy/README.md +17 -0
- catocli/parsers/query_site/README.md +0 -16
- catocli/parsers/query_site/__init__.py +92 -8
- catocli/parsers/query_site_availableVersionList/README.md +17 -0
- catocli/parsers/query_site_bgpPeer/README.md +17 -0
- catocli/parsers/query_site_bgpPeerList/README.md +17 -0
- catocli/parsers/query_site_cloudInterconnectConnectionConnectivity/README.md +17 -0
- catocli/parsers/query_site_cloudInterconnectPhysicalConnection/README.md +17 -0
- catocli/parsers/query_site_cloudInterconnectPhysicalConnectionId/README.md +17 -0
- catocli/parsers/query_site_siteBgpStatus/README.md +17 -0
- catocli/parsers/raw/README.md +0 -14
- catocli/parsers/raw/__init__.py +0 -2
- {catocli-2.0.4.dist-info → catocli-2.0.5.dist-info}/METADATA +1 -1
- {catocli-2.0.4.dist-info → catocli-2.0.5.dist-info}/RECORD +290 -120
- {catocli-2.0.4.dist-info → catocli-2.0.5.dist-info}/top_level.txt +1 -0
- graphql_client/api/call_api.py +4 -3
- models/mutation.container.fqdn.addValues.json +866 -0
- models/mutation.container.fqdn.createFromFile.json +819 -0
- models/mutation.container.fqdn.removeValues.json +866 -0
- models/mutation.container.fqdn.updateFromFile.json +1045 -0
- models/mutation.container.ipAddressRange.addValues.json +1020 -0
- models/mutation.container.ipAddressRange.createFromFile.json +819 -0
- models/mutation.container.ipAddressRange.removeValues.json +1020 -0
- models/mutation.container.ipAddressRange.updateFromFile.json +1045 -0
- models/mutation.policy.appTenantRestriction.addRule.json +8 -8
- models/mutation.policy.appTenantRestriction.addSection.json +1 -1
- models/mutation.policy.appTenantRestriction.createPolicyRevision.json +2 -2
- models/mutation.policy.appTenantRestriction.discardPolicyRevision.json +2 -2
- models/mutation.policy.appTenantRestriction.moveRule.json +2 -2
- models/mutation.policy.appTenantRestriction.moveSection.json +1 -1
- models/mutation.policy.appTenantRestriction.publishPolicyRevision.json +2 -2
- models/mutation.policy.appTenantRestriction.removeRule.json +2 -2
- models/mutation.policy.appTenantRestriction.removeSection.json +1 -1
- models/mutation.policy.appTenantRestriction.updatePolicy.json +2 -2
- models/mutation.policy.appTenantRestriction.updateRule.json +8 -8
- models/mutation.policy.appTenantRestriction.updateSection.json +1 -1
- models/mutation.policy.dynamicIpAllocation.addRule.json +1 -1
- models/mutation.policy.dynamicIpAllocation.addSection.json +1 -1
- models/mutation.policy.dynamicIpAllocation.createPolicyRevision.json +1 -1
- models/mutation.policy.dynamicIpAllocation.discardPolicyRevision.json +1 -1
- models/mutation.policy.dynamicIpAllocation.moveRule.json +1 -1
- models/mutation.policy.dynamicIpAllocation.moveSection.json +1 -1
- models/mutation.policy.dynamicIpAllocation.publishPolicyRevision.json +1 -1
- models/mutation.policy.dynamicIpAllocation.removeRule.json +1 -1
- models/mutation.policy.dynamicIpAllocation.removeSection.json +1 -1
- models/mutation.policy.dynamicIpAllocation.updatePolicy.json +1 -1
- models/mutation.policy.dynamicIpAllocation.updateRule.json +1 -1
- models/mutation.policy.dynamicIpAllocation.updateSection.json +1 -1
- models/mutation.policy.internetFirewall.addRule.json +502 -55
- models/mutation.policy.internetFirewall.addSection.json +1 -1
- models/mutation.policy.internetFirewall.createPolicyRevision.json +127 -10
- models/mutation.policy.internetFirewall.discardPolicyRevision.json +127 -10
- models/mutation.policy.internetFirewall.moveRule.json +127 -10
- models/mutation.policy.internetFirewall.moveSection.json +1 -1
- models/mutation.policy.internetFirewall.publishPolicyRevision.json +127 -10
- models/mutation.policy.internetFirewall.removeRule.json +127 -10
- models/mutation.policy.internetFirewall.removeSection.json +1 -1
- models/mutation.policy.internetFirewall.updatePolicy.json +127 -10
- models/mutation.policy.internetFirewall.updateRule.json +493 -55
- models/mutation.policy.internetFirewall.updateSection.json +1 -1
- models/mutation.policy.remotePortFwd.addRule.json +5 -5
- models/mutation.policy.remotePortFwd.addSection.json +1 -1
- models/mutation.policy.remotePortFwd.createPolicyRevision.json +2 -2
- models/mutation.policy.remotePortFwd.discardPolicyRevision.json +2 -2
- models/mutation.policy.remotePortFwd.moveRule.json +2 -2
- models/mutation.policy.remotePortFwd.moveSection.json +1 -1
- models/mutation.policy.remotePortFwd.publishPolicyRevision.json +2 -2
- models/mutation.policy.remotePortFwd.removeRule.json +2 -2
- models/mutation.policy.remotePortFwd.removeSection.json +1 -1
- models/mutation.policy.remotePortFwd.updatePolicy.json +2 -2
- models/mutation.policy.remotePortFwd.updateRule.json +5 -5
- models/mutation.policy.remotePortFwd.updateSection.json +1 -1
- models/mutation.policy.socketLan.addRule.json +16 -16
- models/mutation.policy.socketLan.addSection.json +1 -1
- models/mutation.policy.socketLan.createPolicyRevision.json +4 -4
- models/mutation.policy.socketLan.discardPolicyRevision.json +4 -4
- models/mutation.policy.socketLan.moveRule.json +4 -4
- models/mutation.policy.socketLan.moveSection.json +1 -1
- models/mutation.policy.socketLan.publishPolicyRevision.json +4 -4
- models/mutation.policy.socketLan.removeRule.json +4 -4
- models/mutation.policy.socketLan.removeSection.json +1 -1
- models/mutation.policy.socketLan.updatePolicy.json +4 -4
- models/mutation.policy.socketLan.updateRule.json +16 -16
- models/mutation.policy.socketLan.updateSection.json +1 -1
- models/mutation.policy.terminalServer.addRule.json +1 -1
- models/mutation.policy.terminalServer.addSection.json +1 -1
- models/mutation.policy.terminalServer.createPolicyRevision.json +1 -1
- models/mutation.policy.terminalServer.discardPolicyRevision.json +1 -1
- models/mutation.policy.terminalServer.moveRule.json +1 -1
- models/mutation.policy.terminalServer.moveSection.json +1 -1
- models/mutation.policy.terminalServer.publishPolicyRevision.json +1 -1
- models/mutation.policy.terminalServer.removeRule.json +1 -1
- models/mutation.policy.terminalServer.removeSection.json +1 -1
- models/mutation.policy.terminalServer.updatePolicy.json +1 -1
- models/mutation.policy.terminalServer.updateRule.json +1 -1
- models/mutation.policy.terminalServer.updateSection.json +1 -1
- models/mutation.policy.wanFirewall.addRule.json +500 -53
- models/mutation.policy.wanFirewall.addSection.json +1 -1
- models/mutation.policy.wanFirewall.createPolicyRevision.json +128 -11
- models/mutation.policy.wanFirewall.discardPolicyRevision.json +128 -11
- models/mutation.policy.wanFirewall.moveRule.json +128 -11
- models/mutation.policy.wanFirewall.moveSection.json +1 -1
- models/mutation.policy.wanFirewall.publishPolicyRevision.json +128 -11
- models/mutation.policy.wanFirewall.removeRule.json +128 -11
- models/mutation.policy.wanFirewall.removeSection.json +1 -1
- models/mutation.policy.wanFirewall.updatePolicy.json +128 -11
- models/mutation.policy.wanFirewall.updateRule.json +491 -53
- models/mutation.policy.wanFirewall.updateSection.json +1 -1
- models/mutation.policy.wanNetwork.addRule.json +13 -13
- models/mutation.policy.wanNetwork.addSection.json +1 -1
- models/mutation.policy.wanNetwork.createPolicyRevision.json +1 -1
- models/mutation.policy.wanNetwork.discardPolicyRevision.json +1 -1
- models/mutation.policy.wanNetwork.moveRule.json +1 -1
- models/mutation.policy.wanNetwork.moveSection.json +1 -1
- models/mutation.policy.wanNetwork.publishPolicyRevision.json +1 -1
- models/mutation.policy.wanNetwork.removeRule.json +1 -1
- models/mutation.policy.wanNetwork.removeSection.json +1 -1
- models/mutation.policy.wanNetwork.updatePolicy.json +1 -1
- models/mutation.policy.wanNetwork.updateRule.json +13 -13
- models/mutation.policy.wanNetwork.updateSection.json +1 -1
- models/query.policy.appTenantRestriction.policy.json +3086 -0
- models/query.policy.dynamicIpAllocation.policy.json +1934 -0
- models/query.policy.internetFirewall.policy.json +7833 -0
- models/query.policy.json +233 -0
- models/query.policy.remotePortFwd.policy.json +2387 -0
- models/query.policy.socketLan.policy.json +7140 -0
- models/query.policy.terminalServer.policy.json +1632 -0
- models/query.policy.wanFirewall.policy.json +9212 -0
- models/query.policy.wanNetwork.policy.json +8010 -0
- models/query.site.availableVersionList.json +365 -0
- models/query.site.bgpPeer.json +1917 -0
- models/query.site.bgpPeerList.json +2076 -0
- models/query.site.cloudInterconnectConnectionConnectivity.json +298 -0
- models/query.site.cloudInterconnectPhysicalConnection.json +728 -0
- models/query.site.cloudInterconnectPhysicalConnectionId.json +660 -0
- models/query.site.siteBgpStatus.json +869 -0
- schema/catolib.py +13 -6
- schema/remove_policyid.py +89 -0
- schema/remove_policyid_mutations.py +89 -0
- {catocli-2.0.4.dist-info → catocli-2.0.5.dist-info}/LICENSE +0 -0
- {catocli-2.0.4.dist-info → catocli-2.0.5.dist-info}/WHEEL +0 -0
- {catocli-2.0.4.dist-info → catocli-2.0.5.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import sys
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from graphql_client.api.call_api import ApiClient, CallApi
|
|
6
|
+
from graphql_client.api_client import ApiException
|
|
7
|
+
from ..customLib import writeDataToFile, makeCall, getAccountID
|
|
8
|
+
|
|
9
|
+
def strip_ids_recursive(data):
|
|
10
|
+
"""Recursively strip id attributes from data structure, but only from objects that contain only 'id' and 'name' keys"""
|
|
11
|
+
try:
|
|
12
|
+
if isinstance(data, dict):
|
|
13
|
+
# Check if this dict should have its 'id' stripped
|
|
14
|
+
# Only strip 'id' if the object contains only 'id' and 'name' keys
|
|
15
|
+
dict_keys = set(data.keys())
|
|
16
|
+
should_strip_id = dict_keys == {'id', 'name'} or dict_keys == {'name', 'id'}
|
|
17
|
+
|
|
18
|
+
result = {}
|
|
19
|
+
for k, v in data.items():
|
|
20
|
+
if k == 'id' and should_strip_id:
|
|
21
|
+
# Skip this 'id' key only if this object contains only id and name
|
|
22
|
+
continue
|
|
23
|
+
else:
|
|
24
|
+
# Keep the key and recursively process the value
|
|
25
|
+
result[k] = strip_ids_recursive(v)
|
|
26
|
+
return result
|
|
27
|
+
elif isinstance(data, list):
|
|
28
|
+
return [strip_ids_recursive(item) for item in data]
|
|
29
|
+
else:
|
|
30
|
+
return data
|
|
31
|
+
except Exception as e:
|
|
32
|
+
print(f"Error in strip_ids_recursive: {e}, data type: {type(data)}, data: {str(data)[:100]}")
|
|
33
|
+
raise
|
|
34
|
+
|
|
35
|
+
def export_if_rules_to_json(args, configuration):
|
|
36
|
+
"""
|
|
37
|
+
Export Internet Firewall rules to JSON format
|
|
38
|
+
Adapted from scripts/export_if_rules_to_json.py
|
|
39
|
+
"""
|
|
40
|
+
try:
|
|
41
|
+
account_id = getAccountID(args, configuration)
|
|
42
|
+
policy_query = {
|
|
43
|
+
"query": "query policy ( $accountId:ID! ) { policy ( accountId:$accountId ) { internetFirewall { policy { enabled rules { audit { updatedTime updatedBy } rule { id name description index section { id name } enabled source { ip host { id name } site { id name } subnet ipRange { from to } globalIpRange { id name } networkInterface { id name } siteNetworkSubnet { id name } floatingSubnet { id name } user { id name } usersGroup { id name } group { id name } systemGroup { id name } } connectionOrigin country { id name } device { id name } deviceOS deviceAttributes { category type model manufacturer os osVersion } destination { application { id name } customApp { id name } appCategory { id name } customCategory { id name } sanctionedAppsCategory { id name } country { id name } domain fqdn ip subnet ipRange { from to } globalIpRange { id name } remoteAsn containers { fqdnContainer { id name } ipAddressRangeContainer { id name } } } service { standard { id name } custom { port portRange { from to } protocol } } action tracking { event { enabled } alert { enabled frequency subscriptionGroup { id name } webhook { id name } mailingList { id name } } } schedule { activeOn customTimeframePolicySchedule: customTimeframe { from to } customRecurringPolicySchedule: customRecurring { from to days } } exceptions { name source { ip host { id name } site { id name } subnet ipRange { from to } globalIpRange { id name } networkInterface { id name } siteNetworkSubnet { id name } floatingSubnet { id name } user { id name } usersGroup { id name } group { id name } systemGroup { id name } } deviceOS country { id name } device { id name } deviceAttributes { category type model manufacturer os osVersion } destination { application { id name } customApp { id name } appCategory { id name } customCategory { id name } sanctionedAppsCategory { id name } country { id name } domain fqdn ip subnet ipRange { from to } globalIpRange { id name } remoteAsn containers { fqdnContainer { id name } ipAddressRangeContainer { id name } } } service { standard { id name } custom { port portRangeCustomService: portRange { from to } protocol } } connectionOrigin } } properties } sections { audit { updatedTime updatedBy } section { id name } properties } audit { publishedTime publishedBy } revision { id name description changes createdTime updatedTime } } } } }",
|
|
44
|
+
"variables": {
|
|
45
|
+
"accountId": account_id
|
|
46
|
+
},
|
|
47
|
+
"operationName": "policy"
|
|
48
|
+
}
|
|
49
|
+
all_ifw_rules = makeCall(args, configuration, policy_query)
|
|
50
|
+
|
|
51
|
+
# Processing data to strip id attributes
|
|
52
|
+
processed_data = strip_ids_recursive(all_ifw_rules)
|
|
53
|
+
|
|
54
|
+
# Filter out rules with properties[0]=="SYSTEM"
|
|
55
|
+
filtered_rules = []
|
|
56
|
+
for rule_data in processed_data['data']['policy']['internetFirewall']['policy']['rules']:
|
|
57
|
+
rule_properties = rule_data.get('properties', [])
|
|
58
|
+
# Skip rules where the first property is "SYSTEM"
|
|
59
|
+
if rule_properties and rule_properties[0] == "SYSTEM":
|
|
60
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
61
|
+
print(f"Excluding SYSTEM rule: {rule_data['rule']['name']}")
|
|
62
|
+
else:
|
|
63
|
+
filtered_rules.append(rule_data)
|
|
64
|
+
|
|
65
|
+
processed_data['data']['policy']['internetFirewall']['policy']['rules'] = filtered_rules
|
|
66
|
+
|
|
67
|
+
# Add index_in_section to each rule
|
|
68
|
+
# Handle empty section names by assigning a default section name
|
|
69
|
+
section_counters = {}
|
|
70
|
+
for rule_data in processed_data['data']['policy']['internetFirewall']['policy']['rules']:
|
|
71
|
+
section_name = rule_data['rule']['section']['name']
|
|
72
|
+
# If section name is empty, use "Default Section" as the section name
|
|
73
|
+
if not section_name or section_name.strip() == "":
|
|
74
|
+
section_name = "Default Section"
|
|
75
|
+
rule_data['rule']['section']['name'] = section_name
|
|
76
|
+
|
|
77
|
+
if section_name not in section_counters:
|
|
78
|
+
section_counters[section_name] = 0
|
|
79
|
+
section_counters[section_name] += 1
|
|
80
|
+
rule_data['rule']['index_in_section'] = section_counters[section_name]
|
|
81
|
+
|
|
82
|
+
# Create rules_in_sections array
|
|
83
|
+
rules_in_sections = []
|
|
84
|
+
for rule_data in processed_data['data']['policy']['internetFirewall']['policy']['rules']:
|
|
85
|
+
rule_info = rule_data['rule']
|
|
86
|
+
rules_in_sections.append({
|
|
87
|
+
"index_in_section": rule_info['index_in_section'],
|
|
88
|
+
"section_name": rule_info['section']['name'],
|
|
89
|
+
"rule_name": rule_info['name']
|
|
90
|
+
})
|
|
91
|
+
rule_info.pop("index_in_section", None)
|
|
92
|
+
rule_info.pop("index", None)
|
|
93
|
+
# rule_info["enabled"] = True
|
|
94
|
+
|
|
95
|
+
# Add rules_in_sections to the policy structure
|
|
96
|
+
processed_data['data']['policy']['internetFirewall']['policy']['rules_in_sections'] = rules_in_sections
|
|
97
|
+
|
|
98
|
+
# Reformat sections array to have index, section_id and section_name structure
|
|
99
|
+
# Exclude the first section from export
|
|
100
|
+
sections_with_ids = all_ifw_rules['data']['policy']['internetFirewall']['policy']['sections']
|
|
101
|
+
processed_sections = []
|
|
102
|
+
for index, section_data in enumerate(sections_with_ids):
|
|
103
|
+
# print("sections_with_ids",json.dumps(section_data, indent=2))
|
|
104
|
+
if index > 0: # Skip the first section (index 0)
|
|
105
|
+
processed_sections.append({
|
|
106
|
+
"section_index": index,
|
|
107
|
+
"section_name": section_data['section']['name'],
|
|
108
|
+
"section_id": section_data['section']['id']
|
|
109
|
+
})
|
|
110
|
+
|
|
111
|
+
# Replace the original sections array with the reformatted one
|
|
112
|
+
processed_data['data']['policy']['internetFirewall']['policy']['sections'] = processed_sections
|
|
113
|
+
|
|
114
|
+
# Handle timestamp in filename if requested
|
|
115
|
+
filename_template = "all_ifw_rules_and_sections_{account_id}.json"
|
|
116
|
+
if hasattr(args, 'append_timestamp') and args.append_timestamp:
|
|
117
|
+
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
|
118
|
+
filename_template = "all_ifw_rules_and_sections_{account_id}_" + timestamp + ".json"
|
|
119
|
+
|
|
120
|
+
output_file = writeDataToFile(
|
|
121
|
+
data=processed_data,
|
|
122
|
+
args=args,
|
|
123
|
+
account_id=account_id,
|
|
124
|
+
default_filename_template=filename_template,
|
|
125
|
+
default_directory="config_data"
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
return [{"success": True, "output_file": output_file, "account_id": account_id}]
|
|
129
|
+
|
|
130
|
+
except Exception as e:
|
|
131
|
+
print(f"ERROR: {str(e)}")
|
|
132
|
+
return [{"success": False, "error": str(e)}]
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def export_wf_rules_to_json(args, configuration):
|
|
136
|
+
"""
|
|
137
|
+
Export WAN Firewall rules to JSON format
|
|
138
|
+
Adapted from scripts/export_wf_rules_to_json.py
|
|
139
|
+
"""
|
|
140
|
+
try:
|
|
141
|
+
account_id = getAccountID(args, configuration)
|
|
142
|
+
|
|
143
|
+
policy_query = {
|
|
144
|
+
"query": "query policy ( $accountId:ID! ) { policy ( accountId:$accountId ) { wanFirewall { policy { enabled rules { audit { updatedTime updatedBy } rule { id name description index section { id name } enabled source { host { id name } site { id name } subnet ip ipRange { from to } globalIpRange { id name } networkInterface { id name } siteNetworkSubnet { id name } floatingSubnet { id name } user { id name } usersGroup { id name } group { id name } systemGroup { id name } } connectionOrigin country { id name } device { id name } deviceOS deviceAttributes { category type model manufacturer os osVersion } destination { host { id name } site { id name } subnet ip ipRange { from to } globalIpRange { id name } networkInterface { id name } siteNetworkSubnet { id name } floatingSubnet { id name } user { id name } usersGroup { id name } group { id name } systemGroup { id name } } application { application { id name } appCategory { id name } customApp { id name } customCategory { id name } sanctionedAppsCategory { id name } domain fqdn ip subnet ipRange { from to } globalIpRange { id name } } service { standard { id name } custom { port portRange { from to } protocol } } action tracking { event { enabled } alert { enabled frequency subscriptionGroup { id name } webhook { id name } mailingList { id name } } } schedule { activeOn customTimeframePolicySchedule: customTimeframe { from to } customRecurringPolicySchedule: customRecurring { from to days } } direction exceptions { name source { host { id name } site { id name } subnet ip ipRange { from to } globalIpRange { id name } networkInterface { id name } siteNetworkSubnet { id name } floatingSubnet { id name } user { id name } usersGroup { id name } group { id name } systemGroup { id name } } deviceOS destination { host { id name } site { id name } subnet ip ipRange { from to } globalIpRange { id name } networkInterface { id name } siteNetworkSubnet { id name } floatingSubnet { id name } user { id name } usersGroup { id name } group { id name } systemGroup { id name } } country { id name } device { id name } deviceAttributes { category type model manufacturer os osVersion } application { application { id name } appCategory { id name } customApp { id name } customCategory { id name } sanctionedAppsCategory { id name } domain fqdn ip subnet ipRange { from to } globalIpRange { id name } } service { standard { id name } custom { port portRangeCustomService: portRange { from to } protocol } } connectionOrigin direction } } properties } sections { audit { updatedTime updatedBy } section { id name } properties } audit { publishedTime publishedBy } revision { id name description changes createdTime updatedTime } } } } }",
|
|
145
|
+
"variables": {
|
|
146
|
+
"accountId": account_id
|
|
147
|
+
},
|
|
148
|
+
"operationName": "policy"
|
|
149
|
+
}
|
|
150
|
+
all_wf_rules = makeCall(args, configuration, policy_query)
|
|
151
|
+
|
|
152
|
+
if not all_wf_rules or 'data' not in all_wf_rules:
|
|
153
|
+
raise ValueError("Failed to retrieve data from API")
|
|
154
|
+
|
|
155
|
+
# Processing data to strip id attributes
|
|
156
|
+
processed_data = strip_ids_recursive(all_wf_rules)
|
|
157
|
+
|
|
158
|
+
# Filter out rules with properties[0]=="SYSTEM"
|
|
159
|
+
filtered_rules = []
|
|
160
|
+
for rule_data in processed_data['data']['policy']['wanFirewall']['policy']['rules']:
|
|
161
|
+
rule_properties = rule_data.get('properties', [])
|
|
162
|
+
# Skip rules where the first property is "SYSTEM"
|
|
163
|
+
if rule_properties and rule_properties[0] == "SYSTEM":
|
|
164
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
165
|
+
print(f"Excluding SYSTEM rule: {rule_data['rule']['name']}")
|
|
166
|
+
else:
|
|
167
|
+
filtered_rules.append(rule_data)
|
|
168
|
+
|
|
169
|
+
processed_data['data']['policy']['wanFirewall']['policy']['rules'] = filtered_rules
|
|
170
|
+
|
|
171
|
+
# Add index_in_section to each rule
|
|
172
|
+
# Handle empty section names by assigning a default section name
|
|
173
|
+
section_counters = {}
|
|
174
|
+
for rule_data in processed_data['data']['policy']['wanFirewall']['policy']['rules']:
|
|
175
|
+
section_name = rule_data['rule']['section']['name']
|
|
176
|
+
# If section name is empty, use "Default Section" as the section name
|
|
177
|
+
if not section_name or section_name.strip() == "":
|
|
178
|
+
section_name = "Default Section"
|
|
179
|
+
rule_data['rule']['section']['name'] = section_name
|
|
180
|
+
|
|
181
|
+
if section_name not in section_counters:
|
|
182
|
+
section_counters[section_name] = 0
|
|
183
|
+
section_counters[section_name] += 1
|
|
184
|
+
rule_data['rule']['index_in_section'] = section_counters[section_name]
|
|
185
|
+
|
|
186
|
+
# Create rules_in_sections array
|
|
187
|
+
rules_in_sections = []
|
|
188
|
+
for rule_data in processed_data['data']['policy']['wanFirewall']['policy']['rules']:
|
|
189
|
+
rule_info = rule_data['rule']
|
|
190
|
+
rules_in_sections.append({
|
|
191
|
+
"index_in_section": rule_info['index_in_section'],
|
|
192
|
+
"section_name": rule_info['section']['name'],
|
|
193
|
+
"rule_name": rule_info['name']
|
|
194
|
+
})
|
|
195
|
+
rule_info.pop("index_in_section", None)
|
|
196
|
+
rule_info.pop("index", None)
|
|
197
|
+
# rule_info["enabled"] = True
|
|
198
|
+
|
|
199
|
+
# Add rules_in_sections to the policy structure
|
|
200
|
+
processed_data['data']['policy']['wanFirewall']['policy']['rules_in_sections'] = rules_in_sections
|
|
201
|
+
|
|
202
|
+
# Reformat sections array to have index, section_id and section_name structure
|
|
203
|
+
# Exclude the first section from export
|
|
204
|
+
sections_with_ids = all_wf_rules['data']['policy']['wanFirewall']['policy']['sections']
|
|
205
|
+
processed_sections = []
|
|
206
|
+
for index, section_data in enumerate(sections_with_ids):
|
|
207
|
+
processed_sections.append({
|
|
208
|
+
"section_index": index+1,
|
|
209
|
+
"section_name": section_data['section']['name'],
|
|
210
|
+
"section_id": section_data['section']['id']
|
|
211
|
+
})
|
|
212
|
+
|
|
213
|
+
# Replace the original sections array with the reformatted one
|
|
214
|
+
processed_data['data']['policy']['wanFirewall']['policy']['sections'] = processed_sections
|
|
215
|
+
|
|
216
|
+
# Handle timestamp in filename if requested
|
|
217
|
+
filename_template = "all_wf_rules_and_sections_{account_id}.json"
|
|
218
|
+
if hasattr(args, 'append_timestamp') and args.append_timestamp:
|
|
219
|
+
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
|
220
|
+
filename_template = "all_wf_rules_and_sections_{account_id}_" + timestamp + ".json"
|
|
221
|
+
|
|
222
|
+
output_file = writeDataToFile(
|
|
223
|
+
data=processed_data,
|
|
224
|
+
args=args,
|
|
225
|
+
account_id=account_id,
|
|
226
|
+
default_filename_template=filename_template,
|
|
227
|
+
default_directory="config_data"
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
return [{"success": True, "output_file": output_file, "account_id": account_id}]
|
|
231
|
+
|
|
232
|
+
except Exception as e:
|
|
233
|
+
print(f"ERROR: {str(e)}")
|
|
234
|
+
return [{"success": False, "error": str(e)}]
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import catocli.parsers.custom.export_sites.export_sites as export_sites
|
|
2
|
+
|
|
3
|
+
def export_sites_parse(subparsers):
|
|
4
|
+
"""Create export_sites command parsers"""
|
|
5
|
+
|
|
6
|
+
# Create the socket_sites parser (direct command, no subparsers)
|
|
7
|
+
socket_sites_parser = subparsers.add_parser(
|
|
8
|
+
'socket_sites',
|
|
9
|
+
help='Export consolidated site and socket data to JSON format',
|
|
10
|
+
usage='catocli export socket_sites [-accountID <account_id>] [options]'
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
socket_sites_parser.add_argument('-accountID', help='Account ID to export data from (uses CATO_ACCOUNT_ID environment variable if not specified)', required=False)
|
|
14
|
+
socket_sites_parser.add_argument('-siteIDs', help='Comma-separated list of site IDs to export (e.g., "132606,132964,133511")', required=False)
|
|
15
|
+
socket_sites_parser.add_argument('--output-file-path', help='Full path including filename and extension for output file. If not specified, uses default: config_data/socket_site_data_{account_id}.json')
|
|
16
|
+
socket_sites_parser.add_argument('--append-timestamp', action='store_true', help='Append timestamp to the filename after account ID (format: YYYY-MM-DD_HH-MM-SS)')
|
|
17
|
+
socket_sites_parser.add_argument('-v', '--verbose', action='store_true', help='Verbose output')
|
|
18
|
+
|
|
19
|
+
socket_sites_parser.set_defaults(func=export_sites.export_socket_site_to_json)
|
|
20
|
+
|
|
21
|
+
return socket_sites_parser
|
|
@@ -0,0 +1,372 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import json
|
|
3
|
+
import traceback
|
|
4
|
+
import sys
|
|
5
|
+
from datetime import datetime
|
|
6
|
+
from graphql_client.api.call_api import ApiClient, CallApi
|
|
7
|
+
from graphql_client.api_client import ApiException
|
|
8
|
+
from ..customLib import writeDataToFile, makeCall, getAccountID
|
|
9
|
+
|
|
10
|
+
def export_socket_site_to_json(args, configuration):
|
|
11
|
+
"""
|
|
12
|
+
Export consolidated site and socket data to JSON format
|
|
13
|
+
"""
|
|
14
|
+
processed_data = {'sites':[]}
|
|
15
|
+
warning_stats = {
|
|
16
|
+
'missing_sites': 0,
|
|
17
|
+
'missing_interfaces': 0,
|
|
18
|
+
'missing_data': 0,
|
|
19
|
+
'missing_interface_details': []
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
settings = {}
|
|
24
|
+
with open(os.path.join(os.path.dirname(__file__), '../../../../settings.json'), 'r', encoding='utf-8') as f:
|
|
25
|
+
settings = json.load(f)
|
|
26
|
+
|
|
27
|
+
account_id = getAccountID(args, configuration)
|
|
28
|
+
# Get account snapshot with siteIDs if provided
|
|
29
|
+
# Get siteIDs from args if provided (comma-separated string)
|
|
30
|
+
site_ids = []
|
|
31
|
+
if hasattr(args, 'siteIDs') and args.siteIDs:
|
|
32
|
+
# Parse comma-separated string into list, removing whitespace
|
|
33
|
+
site_ids = [site_id.strip() for site_id in args.siteIDs.split(',') if site_id.strip()]
|
|
34
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
35
|
+
print(f"Filtering snapshot for site IDs: {site_ids}")
|
|
36
|
+
|
|
37
|
+
###############################################################
|
|
38
|
+
## Call APIs to retrieve sites, interface and network ranges ##
|
|
39
|
+
###############################################################
|
|
40
|
+
snapshot_sites = getAccountSnapshot(args, configuration, account_id, site_ids)
|
|
41
|
+
entity_network_interfaces = getEntityLookup(args, configuration, account_id, "networkInterface")
|
|
42
|
+
entity_network_ranges = getEntityLookup(args, configuration, account_id, "siteRange")
|
|
43
|
+
entity_sites = getEntityLookup(args, configuration, account_id, "site")
|
|
44
|
+
|
|
45
|
+
##################################################################
|
|
46
|
+
## Create processed_data object indexed by siteId with location ##
|
|
47
|
+
##################################################################
|
|
48
|
+
for snapshot_site in snapshot_sites['data']['accountSnapshot']['sites']:
|
|
49
|
+
cur_site = {
|
|
50
|
+
'wan_interfaces': [],
|
|
51
|
+
'lan_interfaces': [],
|
|
52
|
+
}
|
|
53
|
+
site_id = snapshot_site.get('id')
|
|
54
|
+
connectionType = snapshot_site.get('infoSiteSnapshot', {}).get('connType', "")
|
|
55
|
+
if connectionType not in settings["ignore_export_by_socket_type"]:
|
|
56
|
+
cur_site['id'] = site_id
|
|
57
|
+
cur_site['name'] = snapshot_site.get('infoSiteSnapshot', {}).get('name')
|
|
58
|
+
cur_site['description'] = snapshot_site.get('infoSiteSnapshot', {}).get('description')
|
|
59
|
+
cur_site['connectionType'] = connectionType
|
|
60
|
+
cur_site['type'] = snapshot_site.get('infoSiteSnapshot', {}).get('type')
|
|
61
|
+
cur_site = populateSiteLocationData(args, snapshot_site, cur_site)
|
|
62
|
+
|
|
63
|
+
site_interfaces = snapshot_site.get('infoSiteSnapshot', {}).get('interfaces', [])
|
|
64
|
+
for wan_ni in site_interfaces:
|
|
65
|
+
cur_wan_interface = {}
|
|
66
|
+
role = wan_ni.get('wanRoleInterfaceInfo', "")
|
|
67
|
+
interfaceName = wan_ni.get('id', "")
|
|
68
|
+
if role is not None and role[0:3] == "wan":
|
|
69
|
+
if interfaceName[0:3] in ("WAN", "USB", "LTE"):
|
|
70
|
+
cur_wan_interface['id'] = site_id+":"+ wan_ni.get('id', "")
|
|
71
|
+
else:
|
|
72
|
+
cur_wan_interface['id'] = site_id+":INT_"+ wan_ni.get('id', "")
|
|
73
|
+
cur_wan_interface['name'] = wan_ni.get('name', "")
|
|
74
|
+
cur_wan_interface['upstreamBandwidth'] = wan_ni.get('upstreamBandwidth', 0)
|
|
75
|
+
cur_wan_interface['downstreamBandwidth'] = wan_ni.get('downstreamBandwidth', 0)
|
|
76
|
+
cur_wan_interface['destType'] = wan_ni.get('destType', "")
|
|
77
|
+
cur_wan_interface['role'] = role
|
|
78
|
+
cur_site['wan_interfaces'].append(cur_wan_interface)
|
|
79
|
+
|
|
80
|
+
if site_id:
|
|
81
|
+
processed_data['sites'].append(cur_site)
|
|
82
|
+
|
|
83
|
+
##################################################################################
|
|
84
|
+
## Process entity lookup LAN network interfaces adding to site object by site_id##
|
|
85
|
+
##################################################################################
|
|
86
|
+
interface_map = {}
|
|
87
|
+
for lan_ni in entity_network_interfaces:
|
|
88
|
+
cur_lan_interface = {
|
|
89
|
+
'network_ranges': [],
|
|
90
|
+
}
|
|
91
|
+
site_id = str(lan_ni.get("helperFields","").get('siteId', ""))
|
|
92
|
+
id = str(lan_ni.get('entity', "").get('id', ""))
|
|
93
|
+
interfaceName = lan_ni.get('helperFields', "").get('interfaceName', "")
|
|
94
|
+
cur_lan_interface['id'] = id
|
|
95
|
+
cur_lan_interface['name'] = interfaceName
|
|
96
|
+
# Split interfaceName on " \ " and take the last element
|
|
97
|
+
cur_lan_interface['index'] = lan_ni.get("helperFields","").get('interfaceId', "")
|
|
98
|
+
cur_lan_interface['destType'] = lan_ni.get("helperFields","").get('destType', "")
|
|
99
|
+
|
|
100
|
+
# Create a composite key for interface mapping that includes site_id
|
|
101
|
+
interface_key = f"{site_id}_{interfaceName}"
|
|
102
|
+
interface_map[interface_key] = id
|
|
103
|
+
|
|
104
|
+
# Only add interface if the site exists in processed_data
|
|
105
|
+
site_entry = next((site for site in processed_data['sites'] if site['id'] == site_id), None)
|
|
106
|
+
if site_entry:
|
|
107
|
+
site_entry['lan_interfaces'].append(cur_lan_interface)
|
|
108
|
+
else:
|
|
109
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
110
|
+
print(f"WARNING: Site {site_id} not found in snapshot data, skipping interface {interfaceName} ({id})")
|
|
111
|
+
|
|
112
|
+
#############################################################################
|
|
113
|
+
## Process entity lookup network ranges populating by network interface id ##
|
|
114
|
+
#############################################################################
|
|
115
|
+
for range in entity_network_ranges:
|
|
116
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
117
|
+
print(f"Processing network range: {type(range)} - {range}")
|
|
118
|
+
cur_range = {}
|
|
119
|
+
helper_fields = range.get("helperFields", {})
|
|
120
|
+
entity_data = range.get('entity', {})
|
|
121
|
+
|
|
122
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
123
|
+
print(f" helperFields type: {type(helper_fields)}, value: {helper_fields}")
|
|
124
|
+
print(f" entity type: {type(entity_data)}, value: {entity_data}")
|
|
125
|
+
|
|
126
|
+
range_id = entity_data.get('id', "")
|
|
127
|
+
site_id = str(helper_fields.get('siteId', ""))
|
|
128
|
+
interface_name = str(helper_fields.get('interfaceName', ""))
|
|
129
|
+
# Use the composite key to lookup interface_id
|
|
130
|
+
interface_key = f"{site_id}_{interface_name}"
|
|
131
|
+
interface_id = str(interface_map.get(interface_key, ""))
|
|
132
|
+
cur_range['id'] = range_id
|
|
133
|
+
range_name = entity_data.get('name', "")
|
|
134
|
+
if range_name and " \\ " in range_name:
|
|
135
|
+
cur_range['rangeName'] = range_name.split(" \\ ").pop()
|
|
136
|
+
else:
|
|
137
|
+
cur_range['rangeName'] = range_name
|
|
138
|
+
cur_range['name'] = range_name
|
|
139
|
+
cur_range['subnet'] = helper_fields.get('subnet', "")
|
|
140
|
+
cur_range['vlanTag'] = helper_fields.get('vlanTag', "")
|
|
141
|
+
cur_range['microsegmentation'] = helper_fields.get('microsegmentation', "")
|
|
142
|
+
|
|
143
|
+
# Safely add to processed_data with existence checks
|
|
144
|
+
if site_id and interface_id and range_id:
|
|
145
|
+
site_entry = next((site for site in processed_data['sites'] if site['id'] == site_id), None)
|
|
146
|
+
if not site_entry:
|
|
147
|
+
# print(f"WARNING: Site ID {site_id} not found in processed_data")
|
|
148
|
+
warning_stats['missing_sites'] += 1
|
|
149
|
+
continue
|
|
150
|
+
|
|
151
|
+
# Find the interface in the lan_interfaces array
|
|
152
|
+
interface_entry = next((iface for iface in site_entry['lan_interfaces'] if iface['id'] == interface_id), None)
|
|
153
|
+
if not interface_entry:
|
|
154
|
+
print(f"WARNING: Interface {interface_id} (name: {interface_name}) not found in site {site_id}. Range {range_id} will be skipped.")
|
|
155
|
+
warning_stats['missing_interfaces'] += 1
|
|
156
|
+
warning_stats['missing_interface_details'].append({
|
|
157
|
+
'interface_id': interface_id,
|
|
158
|
+
'interface_name': interface_name,
|
|
159
|
+
'site_id': site_id,
|
|
160
|
+
'range_id': range_id
|
|
161
|
+
})
|
|
162
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
163
|
+
available_interfaces = [iface['id'] for iface in site_entry['lan_interfaces']]
|
|
164
|
+
print(f" Available interfaces in site {site_id}: {available_interfaces}")
|
|
165
|
+
print(f" Looked up interface with key: {interface_key}")
|
|
166
|
+
continue
|
|
167
|
+
interface_entry['network_ranges'].append(cur_range)
|
|
168
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
169
|
+
print(f" Successfully added range {range_id} to site {site_id}, interface_name {interface_name} with interface_id {interface_id}")
|
|
170
|
+
else:
|
|
171
|
+
if not interface_id:
|
|
172
|
+
print(f"WARNING: Interface lookup failed for range {range_id}. Site: {site_id}, Interface name: {interface_name}, Lookup key: {interface_key}")
|
|
173
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
174
|
+
print(f" Available interface keys: {list(interface_map.keys())[:10]}...") # Show first 10 keys
|
|
175
|
+
else:
|
|
176
|
+
print(f"WARNING: Missing required data for range: site_id={site_id}, interface_id={interface_id}, range_id={range_id}")
|
|
177
|
+
warning_stats['missing_data'] += 1
|
|
178
|
+
|
|
179
|
+
# Print warning summary
|
|
180
|
+
total_warnings = warning_stats['missing_sites'] + warning_stats['missing_interfaces'] + warning_stats['missing_data']
|
|
181
|
+
if total_warnings > 0:
|
|
182
|
+
print(f"\n=== WARNING SUMMARY ===")
|
|
183
|
+
print(f"Total warnings: {total_warnings}")
|
|
184
|
+
print(f"- Missing sites: {warning_stats['missing_sites']}")
|
|
185
|
+
print(f"- Missing interfaces: {warning_stats['missing_interfaces']}")
|
|
186
|
+
print(f"- Missing data: {warning_stats['missing_data']}")
|
|
187
|
+
|
|
188
|
+
if warning_stats['missing_interfaces'] > 0:
|
|
189
|
+
print(f"\nMissing interface details:")
|
|
190
|
+
unique_interfaces = {}
|
|
191
|
+
for detail in warning_stats['missing_interface_details']:
|
|
192
|
+
key = f"{detail['interface_id']} ({detail['interface_name']})"
|
|
193
|
+
if key not in unique_interfaces:
|
|
194
|
+
unique_interfaces[key] = []
|
|
195
|
+
unique_interfaces[key].append(detail['site_id'])
|
|
196
|
+
|
|
197
|
+
for interface, sites in unique_interfaces.items():
|
|
198
|
+
print(f" - Interface {interface} missing in sites: {', '.join(sites)}")
|
|
199
|
+
|
|
200
|
+
print(f"\nThese warnings indicate network ranges that reference interfaces that don't exist in the site data.")
|
|
201
|
+
print(f"This is usually caused by data inconsistencies and can be safely ignored if the export completes successfully.")
|
|
202
|
+
print(f"=========================\n")
|
|
203
|
+
|
|
204
|
+
# Handle timestamp in filename if requested
|
|
205
|
+
filename_template = "socket_sites_{account_id}.json"
|
|
206
|
+
if hasattr(args, 'append_timestamp') and args.append_timestamp:
|
|
207
|
+
timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
|
208
|
+
filename_template = "socket_sites_{account_id}_" + timestamp + ".json"
|
|
209
|
+
|
|
210
|
+
# Write the processed data to file using the general-purpose function
|
|
211
|
+
output_file = writeDataToFile(
|
|
212
|
+
data=processed_data,
|
|
213
|
+
args=args,
|
|
214
|
+
account_id=account_id,
|
|
215
|
+
default_filename_template=filename_template,
|
|
216
|
+
default_directory="config_data"
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
return [{"success": True, "output_file": output_file, "account_id": account_id}]
|
|
220
|
+
|
|
221
|
+
except Exception as e:
|
|
222
|
+
# Get the current exception info
|
|
223
|
+
exc_type, exc_value, exc_traceback = sys.exc_info()
|
|
224
|
+
|
|
225
|
+
# Get the line number where the error occurred
|
|
226
|
+
line_number = exc_traceback.tb_lineno
|
|
227
|
+
filename = exc_traceback.tb_frame.f_code.co_filename
|
|
228
|
+
function_name = exc_traceback.tb_frame.f_code.co_name
|
|
229
|
+
|
|
230
|
+
# Get the full traceback as a string
|
|
231
|
+
full_traceback = traceback.format_exc()
|
|
232
|
+
|
|
233
|
+
# Create detailed error message
|
|
234
|
+
error_details = {
|
|
235
|
+
"error_type": exc_type.__name__,
|
|
236
|
+
"error_message": str(exc_value),
|
|
237
|
+
"line_number": line_number,
|
|
238
|
+
"function_name": function_name,
|
|
239
|
+
"filename": os.path.basename(filename),
|
|
240
|
+
"full_traceback": full_traceback
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
# Print detailed error information
|
|
244
|
+
print(f"ERROR: {exc_type.__name__}: {str(exc_value)}")
|
|
245
|
+
print(f"Location: {os.path.basename(filename)}:{line_number} in {function_name}()")
|
|
246
|
+
print(f"Full traceback:\n{full_traceback}")
|
|
247
|
+
|
|
248
|
+
return [{"success": False, "error": str(e), "error_details": error_details}]
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
##########################################################################
|
|
252
|
+
########################### Helper functions #############################
|
|
253
|
+
##########################################################################
|
|
254
|
+
|
|
255
|
+
def populateSiteLocationData(args, site_data, cur_site):
|
|
256
|
+
# Load site location data for timezone and state code lookups
|
|
257
|
+
site_location_data = {}
|
|
258
|
+
try:
|
|
259
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
260
|
+
models_dir = os.path.join(script_dir, '..', '..', '..', '..', 'models')
|
|
261
|
+
location_file = os.path.join(models_dir, 'query.siteLocation.json')
|
|
262
|
+
|
|
263
|
+
if os.path.exists(location_file):
|
|
264
|
+
with open(location_file, 'r', encoding='utf-8') as f:
|
|
265
|
+
site_location_data = json.load(f)
|
|
266
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
267
|
+
print(f"Loaded {len(site_location_data)} location entries from {location_file}")
|
|
268
|
+
else:
|
|
269
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
270
|
+
print(f"Warning: Site location file not found at {location_file}")
|
|
271
|
+
except Exception as e:
|
|
272
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
273
|
+
print(f"Warning: Could not load site location data: {e}")
|
|
274
|
+
|
|
275
|
+
## siteLocation attributes
|
|
276
|
+
cur_site['site_location'] = {}
|
|
277
|
+
cur_site['site_location']['address'] = site_data.get('infoSiteSnapshot', {}).get('address')
|
|
278
|
+
cur_site['site_location']['city'] = site_data.get('infoSiteSnapshot', {}).get('cityName')
|
|
279
|
+
cur_site['site_location']['stateName'] = site_data.get('infoSiteSnapshot', {}).get('countryStateName')
|
|
280
|
+
cur_site['site_location']['countryCode'] = site_data.get('infoSiteSnapshot', {}).get('countryCode')
|
|
281
|
+
cur_site['site_location']['countryName'] = site_data.get('infoSiteSnapshot', {}).get('countryName')
|
|
282
|
+
|
|
283
|
+
# Look up timezone and state code from location data
|
|
284
|
+
country_name = cur_site['site_location']['countryName']
|
|
285
|
+
state_name = cur_site['site_location']['stateName']
|
|
286
|
+
city = cur_site['site_location']['city']
|
|
287
|
+
|
|
288
|
+
# Create lookup key based on available data
|
|
289
|
+
if state_name:
|
|
290
|
+
lookup_key = f"{country_name}___{state_name}___{city}"
|
|
291
|
+
else:
|
|
292
|
+
lookup_key = f"{country_name}___{city}"
|
|
293
|
+
|
|
294
|
+
# Debug output for lookup
|
|
295
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
296
|
+
print(f"Site {cur_site['name']}: Looking up '{lookup_key}'")
|
|
297
|
+
|
|
298
|
+
# Look up location details
|
|
299
|
+
location_data = site_location_data.get(lookup_key, {})
|
|
300
|
+
|
|
301
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
302
|
+
if location_data:
|
|
303
|
+
print(f" Found location data: {location_data}")
|
|
304
|
+
else:
|
|
305
|
+
print(f" No location data found for key: {lookup_key}")
|
|
306
|
+
# Try to find similar keys for debugging
|
|
307
|
+
similar_keys = [k for k in site_location_data.keys() if country_name in k and (not city or city in k)][:5]
|
|
308
|
+
if similar_keys:
|
|
309
|
+
print(f" Similar keys found: {similar_keys}")
|
|
310
|
+
|
|
311
|
+
cur_site['stateCode'] = location_data.get('stateCode', None)
|
|
312
|
+
|
|
313
|
+
# Get timezone - always use the 0 element in the timezones array
|
|
314
|
+
timezones = location_data.get('timezone', [])
|
|
315
|
+
cur_site['site_location']['timezone'] = timezones[0] if timezones else None
|
|
316
|
+
return cur_site
|
|
317
|
+
|
|
318
|
+
def getEntityLookup(args, configuration, account_id, entity_type):
|
|
319
|
+
"""
|
|
320
|
+
Helper function to get entity lookup data for a specific entity type
|
|
321
|
+
"""
|
|
322
|
+
#################################
|
|
323
|
+
## Get entity lookup for sites ##
|
|
324
|
+
#################################
|
|
325
|
+
entity_query = {
|
|
326
|
+
"query": "query entityLookup ( $accountID:ID! $type:EntityType! $sortInput:[SortInput] $lookupFilterInput:[LookupFilterInput] ) { entityLookup ( accountID:$accountID type:$type sort:$sortInput filters:$lookupFilterInput ) { items { entity { id name type } description helperFields } total } }",
|
|
327
|
+
"variables": {
|
|
328
|
+
"accountID": account_id,
|
|
329
|
+
"type": entity_type
|
|
330
|
+
},
|
|
331
|
+
"operationName": "entityLookup"
|
|
332
|
+
}
|
|
333
|
+
response = makeCall(args, configuration, entity_query)
|
|
334
|
+
|
|
335
|
+
# Check for GraphQL errors in snapshot response
|
|
336
|
+
if 'errors' in response:
|
|
337
|
+
error_messages = [error.get('message', 'Unknown error') for error in response['errors']]
|
|
338
|
+
raise Exception(f"Snapshot API returned errors: {', '.join(error_messages)}")
|
|
339
|
+
|
|
340
|
+
if not response or 'data' not in response or 'entityLookup' not in response['data']:
|
|
341
|
+
raise ValueError("Failed to retrieve snapshot data from API")
|
|
342
|
+
|
|
343
|
+
items = response['data']['entityLookup']['items']
|
|
344
|
+
if items is None:
|
|
345
|
+
items = []
|
|
346
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
347
|
+
print("No items found in entity lookup - "+ entity_type)
|
|
348
|
+
return items
|
|
349
|
+
|
|
350
|
+
def getAccountSnapshot(args, configuration, account_id, site_ids=None):
|
|
351
|
+
snapshot_query = {
|
|
352
|
+
"query": "query accountSnapshot ( $siteIDs:[ID!] $accountID:ID ) { accountSnapshot ( accountID:$accountID ) { id sites ( siteIDs:$siteIDs ) { id protoId connectivityStatusSiteSnapshot: connectivityStatus haStatusSiteSnapshot: haStatus { readiness wanConnectivity keepalive socketVersion } operationalStatusSiteSnapshot: operationalStatus lastConnected connectedSince popName devices { id name identifier connected haRole interfaces { connected id name physicalPort naturalOrder popName previousPopID previousPopName tunnelConnectionReason tunnelUptime tunnelRemoteIP tunnelRemoteIPInfoInterfaceSnapshot: tunnelRemoteIPInfo { ip countryCode countryName city state provider latitude longitude } type infoInterfaceSnapshot: info { id name upstreamBandwidth downstreamBandwidth upstreamBandwidthMbpsPrecision downstreamBandwidthMbpsPrecision destType wanRole } cellularInterfaceInfoInterfaceSnapshot: cellularInterfaceInfo { networkType simSlotId modemStatus isModemConnected iccid imei operatorName isModemSuspended apn apnSelectionMethod signalStrength isRoamingAllowed simNumber disconnectionReason isSimSlot1Detected isSimSlot2Detected } } lastConnected lastDuration connectedSince lastPopID lastPopName recentConnections { duration interfaceName deviceName lastConnected popName remoteIP remoteIPInfoRecentConnection: remoteIPInfo { ip countryCode countryName city state provider latitude longitude } } type deviceUptime socketInfo { id serial isPrimary platformSocketInfo: platform version versionUpdateTime } interfacesLinkState { id up mediaIn linkSpeed duplex hasAddress hasInternet hasTunnel } osType osVersion version versionNumber releaseGroup mfaExpirationTime mfaCreationTime internalIP } infoSiteSnapshot: info { name type description countryCode region countryName countryStateName cityName address isHA connType creationTime interfaces { id name upstreamBandwidth downstreamBandwidth upstreamBandwidthMbpsPrecision downstreamBandwidthMbpsPrecision destType wanRoleInterfaceInfo: wanRole } sockets { id serial isPrimary platformSocketInfo: platform version versionUpdateTime } ipsec { isPrimary catoIP remoteIP ikeVersion } } hostCount altWanStatus } users { id connectivityStatusUserSnapshot: connectivityStatus operationalStatusUserSnapshot: operationalStatus name deviceName uptime lastConnected version versionNumber popID popName remoteIP remoteIPInfoUserSnapshot: remoteIPInfo { ip countryCode countryName city state provider latitude longitude } internalIP osType osVersion devices { id name identifier connected haRole interfaces { connected id name physicalPort naturalOrder popName previousPopID previousPopName tunnelConnectionReason tunnelUptime tunnelRemoteIP tunnelRemoteIPInfoInterfaceSnapshot: tunnelRemoteIPInfo { ip countryCode countryName city state provider latitude longitude } type infoInterfaceSnapshot: info { id name upstreamBandwidth downstreamBandwidth upstreamBandwidthMbpsPrecision downstreamBandwidthMbpsPrecision destType wanRole } cellularInterfaceInfoInterfaceSnapshot: cellularInterfaceInfo { networkType simSlotId modemStatus isModemConnected iccid imei operatorName isModemSuspended apn apnSelectionMethod signalStrength isRoamingAllowed simNumber disconnectionReason isSimSlot1Detected isSimSlot2Detected } } lastConnected lastDuration connectedSince lastPopID lastPopName recentConnections { duration interfaceName deviceName lastConnected popName remoteIP remoteIPInfoRecentConnection: remoteIPInfo { ip countryCode countryName city state provider latitude longitude } } type deviceUptime socketInfo { id serial isPrimary platformSocketInfo: platform version versionUpdateTime } interfacesLinkState { id up mediaIn linkSpeed duplex hasAddress hasInternet hasTunnel } osType osVersion version versionNumber releaseGroup mfaExpirationTime mfaCreationTime internalIP } connectedInOffice infoUserSnapshot: info { name status email creationTime phoneNumber origin authMethod } recentConnections { duration interfaceName deviceName lastConnected popName remoteIP remoteIPInfo { ip countryCode countryName city state provider latitude longitude } } } timestamp } }",
|
|
353
|
+
"variables": {
|
|
354
|
+
"accountID": account_id,
|
|
355
|
+
"siteIDs": site_ids
|
|
356
|
+
},
|
|
357
|
+
"operationName": "accountSnapshot"
|
|
358
|
+
}
|
|
359
|
+
response = makeCall(args, configuration, snapshot_query)
|
|
360
|
+
|
|
361
|
+
# Check for GraphQL errors in snapshot response
|
|
362
|
+
if 'errors' in response:
|
|
363
|
+
error_messages = [error.get('message', 'Unknown error') for error in response['errors']]
|
|
364
|
+
raise Exception(f"Snapshot API returned errors: {', '.join(error_messages)}")
|
|
365
|
+
|
|
366
|
+
if not response or 'data' not in response or 'accountSnapshot' not in response['data']:
|
|
367
|
+
raise ValueError("Failed to retrieve snapshot data from API")
|
|
368
|
+
|
|
369
|
+
if not response or 'sites' not in response['data']['accountSnapshot'] or response['data']['accountSnapshot']['sites'] is None:
|
|
370
|
+
raise ValueError("No sites found in account snapshot data from API")
|
|
371
|
+
|
|
372
|
+
return response
|