catocli 2.0.1__py3-none-any.whl → 2.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of catocli might be problematic. Click here for more details.
- catocli/Utils/clidriver.py +41 -6
- catocli/__init__.py +1 -1
- catocli/parsers/custom/__init__.py +7 -5
- catocli/parsers/custom/customLib.py +490 -1
- catocli/parsers/custom/export_rules/__init__.py +5 -1
- catocli/parsers/custom/export_rules/export_rules.py +32 -183
- catocli/parsers/custom/export_sites/__init__.py +20 -0
- catocli/parsers/custom/export_sites/export_sites.py +365 -0
- catocli/parsers/custom/import_rules_to_tf/__init__.py +3 -3
- catocli/parsers/custom/import_rules_to_tf/import_rules_to_tf.py +20 -146
- catocli/parsers/custom/import_sites_to_tf/__init__.py +45 -0
- catocli/parsers/custom/import_sites_to_tf/import_sites_to_tf.py +891 -0
- catocli/parsers/mutation_accountManagement/__init__.py +18 -21
- catocli/parsers/mutation_admin/__init__.py +18 -21
- catocli/parsers/mutation_container/__init__.py +6 -7
- catocli/parsers/mutation_hardware/__init__.py +6 -7
- catocli/parsers/mutation_policy/__init__.py +666 -588
- catocli/parsers/mutation_policy_terminalServer/README.md +7 -0
- catocli/parsers/mutation_policy_terminalServer_addRule/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_addSection/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_createPolicyRevision/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_discardPolicyRevision/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_moveRule/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_moveSection/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_publishPolicyRevision/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_removeRule/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_removeSection/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_updatePolicy/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_updateRule/README.md +18 -0
- catocli/parsers/mutation_policy_terminalServer_updateSection/README.md +18 -0
- catocli/parsers/mutation_sandbox/__init__.py +12 -14
- catocli/parsers/mutation_site/__init__.py +189 -175
- catocli/parsers/mutation_site_addSocketAddOnCard/README.md +17 -0
- catocli/parsers/mutation_site_removeSocketAddOnCard/README.md +17 -0
- catocli/parsers/mutation_site_startSiteUpgrade/README.md +17 -0
- catocli/parsers/mutation_sites/__init__.py +189 -175
- catocli/parsers/mutation_sites_addSocketAddOnCard/README.md +17 -0
- catocli/parsers/mutation_sites_removeSocketAddOnCard/README.md +17 -0
- catocli/parsers/mutation_sites_startSiteUpgrade/README.md +17 -0
- catocli/parsers/mutation_xdr/__init__.py +18 -21
- catocli/parsers/parserApiClient.py +36 -11
- catocli/parsers/query_accountBySubdomain/__init__.py +6 -7
- catocli/parsers/query_accountManagement/__init__.py +6 -7
- catocli/parsers/query_accountMetrics/__init__.py +6 -7
- catocli/parsers/query_accountRoles/__init__.py +6 -7
- catocli/parsers/query_accountSnapshot/__init__.py +6 -7
- catocli/parsers/query_admin/__init__.py +6 -7
- catocli/parsers/query_admins/__init__.py +6 -7
- catocli/parsers/query_appStats/__init__.py +6 -7
- catocli/parsers/query_appStatsTimeSeries/__init__.py +6 -7
- catocli/parsers/query_auditFeed/__init__.py +6 -7
- catocli/parsers/query_catalogs/__init__.py +6 -7
- catocli/parsers/query_container/__init__.py +6 -7
- catocli/parsers/query_devices/README.md +2 -1
- catocli/parsers/query_devices/__init__.py +6 -7
- catocli/parsers/query_entityLookup/__init__.py +6 -7
- catocli/parsers/query_events/__init__.py +6 -7
- catocli/parsers/query_eventsFeed/README.md +1 -1
- catocli/parsers/query_eventsFeed/__init__.py +6 -7
- catocli/parsers/query_eventsTimeSeries/__init__.py +6 -7
- catocli/parsers/query_hardware/__init__.py +6 -7
- catocli/parsers/query_hardwareManagement/__init__.py +6 -7
- catocli/parsers/query_licensing/__init__.py +6 -7
- catocli/parsers/query_policy/README.md +2 -1
- catocli/parsers/query_policy/__init__.py +6 -7
- catocli/parsers/query_sandbox/__init__.py +6 -7
- catocli/parsers/query_site/README.md +2 -1
- catocli/parsers/query_site/__init__.py +6 -7
- catocli/parsers/query_siteLocation/__init__.py +4 -8
- catocli/parsers/query_subDomains/__init__.py +6 -7
- catocli/parsers/query_xdr/__init__.py +12 -14
- catocli/parsers/raw/README.md +4 -0
- catocli/parsers/raw/__init__.py +5 -2
- {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/METADATA +1 -1
- {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/RECORD +108 -67
- graphql_client/api/call_api.py +12 -6
- models/mutation.policy.remotePortFwd.updateRule.json +6 -6
- models/mutation.policy.terminalServer.addRule.json +2403 -0
- models/mutation.policy.terminalServer.addSection.json +1358 -0
- models/mutation.policy.terminalServer.createPolicyRevision.json +1873 -0
- models/mutation.policy.terminalServer.discardPolicyRevision.json +1807 -0
- models/mutation.policy.terminalServer.moveRule.json +1605 -0
- models/mutation.policy.terminalServer.moveSection.json +1259 -0
- models/mutation.policy.terminalServer.publishPolicyRevision.json +1864 -0
- models/mutation.policy.terminalServer.removeRule.json +1253 -0
- models/mutation.policy.terminalServer.removeSection.json +958 -0
- models/mutation.policy.terminalServer.updatePolicy.json +1883 -0
- models/mutation.policy.terminalServer.updateRule.json +2096 -0
- models/mutation.policy.terminalServer.updateSection.json +1111 -0
- models/mutation.site.addSocketAddOnCard.json +1050 -0
- models/mutation.site.removeSocketAddOnCard.json +786 -0
- models/mutation.site.startSiteUpgrade.json +802 -0
- models/mutation.sites.addSocketAddOnCard.json +1050 -0
- models/mutation.sites.removeSocketAddOnCard.json +786 -0
- models/mutation.sites.startSiteUpgrade.json +802 -0
- models/query.devices.json +311 -2
- models/query.events.json +48 -0
- models/query.eventsFeed.json +12 -0
- models/query.eventsTimeSeries.json +36 -0
- models/query.licensing.json +21815 -10093
- models/query.policy.json +1898 -305
- models/query.site.json +225 -0
- models/query.siteLocation.json +97190 -295396
- schema/catolib.py +63 -30
- {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/LICENSE +0 -0
- {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/WHEEL +0 -0
- {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/entry_points.txt +0 -0
- {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/top_level.txt +0 -0
catocli/Utils/clidriver.py
CHANGED
|
@@ -77,7 +77,7 @@ def show_version_info(args, configuration=None):
|
|
|
77
77
|
print("Unable to check for updates (check your internet connection)")
|
|
78
78
|
return [{"success": True, "current_version": catocli.__version__, "latest_version": latest_version if not args.current_only else None}]
|
|
79
79
|
|
|
80
|
-
def get_configuration():
|
|
80
|
+
def get_configuration(skip_api_key=False):
|
|
81
81
|
configuration = Configuration()
|
|
82
82
|
configuration.verify_ssl = False
|
|
83
83
|
configuration.debug = CATO_DEBUG
|
|
@@ -99,7 +99,9 @@ def get_configuration():
|
|
|
99
99
|
print(f"Run 'catocli configure set --profile {profile_name}' to update your credentials.")
|
|
100
100
|
exit(1)
|
|
101
101
|
|
|
102
|
-
|
|
102
|
+
# Only set API key if not using custom headers file
|
|
103
|
+
if not skip_api_key:
|
|
104
|
+
configuration.api_key["x-api-key"] = credentials['cato_token']
|
|
103
105
|
configuration.host = credentials['endpoint']
|
|
104
106
|
configuration.accountID = credentials['account_id']
|
|
105
107
|
|
|
@@ -119,6 +121,7 @@ https://github.com/catonetworks/cato-api-explorer
|
|
|
119
121
|
parser = argparse.ArgumentParser(prog='catocli', usage='%(prog)s <operationType> <operationName> [options]', description=defaultReadmeStr)
|
|
120
122
|
parser.add_argument('--version', action='version', version=catocli.__version__)
|
|
121
123
|
parser.add_argument('-H', '--header', action='append', dest='headers', help='Add custom headers in "Key: Value" format. Can be used multiple times.')
|
|
124
|
+
parser.add_argument('--headers-file', dest='headers_file', help='Load headers from a file. Each line should contain a header in "Key: Value" format.')
|
|
122
125
|
subparsers = parser.add_subparsers()
|
|
123
126
|
|
|
124
127
|
# Version command - enhanced with update checking
|
|
@@ -183,6 +186,28 @@ def parse_headers(header_strings):
|
|
|
183
186
|
headers[key.strip()] = value.strip()
|
|
184
187
|
return headers
|
|
185
188
|
|
|
189
|
+
def parse_headers_from_file(file_path):
|
|
190
|
+
headers = {}
|
|
191
|
+
try:
|
|
192
|
+
with open(file_path, 'r') as f:
|
|
193
|
+
for line_num, line in enumerate(f, 1):
|
|
194
|
+
line = line.strip()
|
|
195
|
+
if not line or line.startswith('#'):
|
|
196
|
+
# Skip empty lines and comments
|
|
197
|
+
continue
|
|
198
|
+
if ':' not in line:
|
|
199
|
+
print(f"ERROR: Invalid header format in {file_path} at line {line_num}: '{line}'. Use 'Key: Value' format.")
|
|
200
|
+
exit(1)
|
|
201
|
+
key, value = line.split(':', 1)
|
|
202
|
+
headers[key.strip()] = value.strip()
|
|
203
|
+
except FileNotFoundError:
|
|
204
|
+
print(f"ERROR: Headers file '{file_path}' not found.")
|
|
205
|
+
exit(1)
|
|
206
|
+
except IOError as e:
|
|
207
|
+
print(f"ERROR: Could not read headers file '{file_path}': {e}")
|
|
208
|
+
exit(1)
|
|
209
|
+
return headers
|
|
210
|
+
|
|
186
211
|
def main(args=None):
|
|
187
212
|
# Check if no arguments provided or help is requested
|
|
188
213
|
if args is None:
|
|
@@ -203,13 +228,20 @@ def main(args=None):
|
|
|
203
228
|
if hasattr(args, 'func') and hasattr(args.func, '__module__') and 'configure' in str(args.func.__module__):
|
|
204
229
|
response = args.func(args, None)
|
|
205
230
|
else:
|
|
231
|
+
# Check if using headers file to determine if we should skip API key
|
|
232
|
+
using_headers_file = hasattr(args, 'headers_file') and args.headers_file
|
|
233
|
+
|
|
206
234
|
# Get configuration from profiles
|
|
207
|
-
configuration = get_configuration()
|
|
235
|
+
configuration = get_configuration(skip_api_key=using_headers_file)
|
|
208
236
|
|
|
209
237
|
# Parse custom headers if provided
|
|
238
|
+
custom_headers = {}
|
|
210
239
|
if hasattr(args, 'headers') and args.headers:
|
|
211
|
-
custom_headers
|
|
212
|
-
|
|
240
|
+
custom_headers.update(parse_headers(args.headers))
|
|
241
|
+
if hasattr(args, 'headers_file') and args.headers_file:
|
|
242
|
+
custom_headers.update(parse_headers_from_file(args.headers_file))
|
|
243
|
+
if custom_headers:
|
|
244
|
+
configuration.custom_headers.update(custom_headers)
|
|
213
245
|
# Handle account ID override
|
|
214
246
|
if args.func.__name__ != "createRawRequest":
|
|
215
247
|
if hasattr(args, 'accountID') and args.accountID is not None:
|
|
@@ -224,6 +256,9 @@ def main(args=None):
|
|
|
224
256
|
else:
|
|
225
257
|
if response!=None:
|
|
226
258
|
print(json.dumps(response[0], sort_keys=True, indent=4))
|
|
259
|
+
except KeyboardInterrupt:
|
|
260
|
+
print("\n\nOperation interrupted by user (Ctrl+C). Exiting gracefully...")
|
|
261
|
+
exit(130) # Standard exit code for SIGINT
|
|
227
262
|
except Exception as e:
|
|
228
263
|
if isinstance(e, AttributeError):
|
|
229
264
|
print('Missing arguments. Usage: catocli <operation> -h')
|
|
@@ -233,4 +268,4 @@ def main(args=None):
|
|
|
233
268
|
else:
|
|
234
269
|
print('ERROR: ',e)
|
|
235
270
|
traceback.print_exc()
|
|
236
|
-
|
|
271
|
+
exit(1)
|
catocli/__init__.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
__version__ = "2.0.
|
|
1
|
+
__version__ = "2.0.3"
|
|
2
2
|
__cato_host__ = "https://api.catonetworks.com/api/v1/graphql2"
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
|
|
2
2
|
import catocli.parsers.custom.customLib as customLib
|
|
3
|
-
from catocli.parsers.custom.export_rules import
|
|
4
|
-
from catocli.parsers.custom.import_rules_to_tf import
|
|
3
|
+
from catocli.parsers.custom.export_rules import export_rules_parse
|
|
4
|
+
from catocli.parsers.custom.import_rules_to_tf import rule_import_parse
|
|
5
|
+
from catocli.parsers.custom.import_sites_to_tf import site_import_parse
|
|
5
6
|
from catocli.parsers.configure import configure_parse
|
|
7
|
+
from catocli.parsers.custom.export_sites import export_sites_parse
|
|
6
8
|
|
|
7
9
|
def custom_parse(subparsers):
|
|
8
10
|
entityTypes = ["account","admin","allocatedIP","any","availablePooledUsage","availableSiteUsage","dhcpRelayGroup","groupSubscription","host","lanFirewall","localRouting","location","mailingListSubscription","networkInterface","portProtocol","simpleService","site","siteRange","timezone","vpnUser","webhookSubscription"]
|
|
@@ -31,15 +33,15 @@ def custom_parse(subparsers):
|
|
|
31
33
|
item_list_parser.set_defaults(func=customLib.entityTypeList,operation_name=entity)
|
|
32
34
|
|
|
33
35
|
# Add additional custom parsers here
|
|
34
|
-
|
|
35
|
-
|
|
36
|
+
export_rules_parse(subparsers)
|
|
37
|
+
import_parser = rule_import_parse(subparsers)
|
|
38
|
+
site_import_parse(subparsers, import_parser)
|
|
36
39
|
configure_parse(subparsers)
|
|
37
40
|
|
|
38
41
|
def get_help_custom(path):
|
|
39
42
|
matchCmd = "catocli "+path.replace("_"," ")
|
|
40
43
|
import os
|
|
41
44
|
pwd = os.path.dirname(__file__)
|
|
42
|
-
# doc = path+"/README.md"
|
|
43
45
|
abs_path = os.path.join(pwd, "README.md")
|
|
44
46
|
new_line = "\nEXAMPLES:\n"
|
|
45
47
|
lines = open(abs_path, "r").readlines()
|
|
@@ -1,4 +1,8 @@
|
|
|
1
|
+
import os
|
|
1
2
|
import json
|
|
3
|
+
import subprocess
|
|
4
|
+
import glob
|
|
5
|
+
from pathlib import Path
|
|
2
6
|
from graphql_client.api.call_api import ApiClient, CallApi
|
|
3
7
|
from graphql_client.api_client import ApiException
|
|
4
8
|
import logging
|
|
@@ -16,7 +20,15 @@ def entityTypeList(args, configuration):
|
|
|
16
20
|
}
|
|
17
21
|
variablesObj = { "accountID": (params.get("accountID") if params.get("accountID") else params.get("accountId"))}
|
|
18
22
|
|
|
19
|
-
|
|
23
|
+
# Create the API client instance
|
|
24
|
+
api_client = ApiClient(configuration)
|
|
25
|
+
|
|
26
|
+
# Show masked API key in verbose mode (without affecting actual API calls)
|
|
27
|
+
if hasattr(args, 'verbose') and args.verbose and 'x-api-key' in api_client.configuration.api_key:
|
|
28
|
+
print(f"API Key (masked): ***MASKED***")
|
|
29
|
+
|
|
30
|
+
# Create the API instance
|
|
31
|
+
instance = CallApi(api_client)
|
|
20
32
|
operationName = params["operation_name"]
|
|
21
33
|
query = '''query entityLookup ( $type:EntityType! $accountID:ID! $search:String ) {
|
|
22
34
|
entityLookup ( accountID:$accountID type:$type search:$search ) {
|
|
@@ -70,3 +82,480 @@ def entityTypeList(args, configuration):
|
|
|
70
82
|
else:
|
|
71
83
|
print("ERROR: "+message,", ".join(invalidVars))
|
|
72
84
|
|
|
85
|
+
|
|
86
|
+
# def getEntityLookup(args, configuration, account_id, entity_type, indexIdName=None):
|
|
87
|
+
# """
|
|
88
|
+
# Get entity lookup data from the API and return entities indexed by entityID or custom ID from helperFields
|
|
89
|
+
|
|
90
|
+
# Args:
|
|
91
|
+
# args: Command line arguments containing verbose and other options
|
|
92
|
+
# configuration: API configuration object
|
|
93
|
+
# account_id: The account ID to use for the lookup
|
|
94
|
+
# entity_type: The type of entity to lookup (e.g., "site", "vpnUser", "host", etc.)
|
|
95
|
+
# indexIdName: Optional name of the ID attribute in helperFields to use for indexing instead of entity.id
|
|
96
|
+
|
|
97
|
+
# Returns:
|
|
98
|
+
# dict: A dictionary with entity IDs (or custom IDs) as keys and entity information as values
|
|
99
|
+
# Format: {"entityID1": {"id": "entityID1", "name": "entityName", "type": "entityType", "description": "desc", "indexId": "customID"}, ...}
|
|
100
|
+
# """
|
|
101
|
+
# # Define the entity lookup query
|
|
102
|
+
# entity_query = {
|
|
103
|
+
# "query": "query entityLookup ( $accountID:ID! $type:EntityType! $sortInput:[SortInput] $lookupFilterInput:[LookupFilterInput] ) { entityLookup ( accountID:$accountID type:$type sort:$sortInput filters:$lookupFilterInput ) { items { entity { id name type } description helperFields } total } }",
|
|
104
|
+
# "variables": {
|
|
105
|
+
# "accountID": account_id,
|
|
106
|
+
# "type": entity_type
|
|
107
|
+
# },
|
|
108
|
+
# "operationName": "entityLookup"
|
|
109
|
+
# }
|
|
110
|
+
|
|
111
|
+
# # Create API client instance with params
|
|
112
|
+
# # Create the API client instance
|
|
113
|
+
# entity_api_client = ApiClient(configuration)
|
|
114
|
+
|
|
115
|
+
# # Show masked API key in verbose mode (without affecting actual API calls)
|
|
116
|
+
# if hasattr(args, 'verbose') and args.verbose and 'x-api-key' in entity_api_client.configuration.api_key:
|
|
117
|
+
# print(f"Entity Lookup API Key (masked): ***MASKED***")
|
|
118
|
+
|
|
119
|
+
# # Create the API instance
|
|
120
|
+
# entity_query_instance = CallApi(entity_api_client)
|
|
121
|
+
# params = {
|
|
122
|
+
# 'v': hasattr(args, 'verbose') and args.verbose, # verbose mode
|
|
123
|
+
# 'f': 'json', # format
|
|
124
|
+
# 'p': False, # pretty print
|
|
125
|
+
# 't': False # test mode
|
|
126
|
+
# }
|
|
127
|
+
|
|
128
|
+
# try:
|
|
129
|
+
# # Call the entity lookup API
|
|
130
|
+
# entity_response = entity_query_instance.call_api(entity_query, params)
|
|
131
|
+
# entity_data = entity_response[0] if entity_response else {}
|
|
132
|
+
|
|
133
|
+
# # Show raw API response in verbose mode
|
|
134
|
+
# if hasattr(args, 'verbose') and args.verbose:
|
|
135
|
+
# print("\n" + "=" * 80)
|
|
136
|
+
# print(f"{entity_type.upper()} LOOKUP API RESPONSE:")
|
|
137
|
+
# print("=" * 80)
|
|
138
|
+
# print(json.dumps(entity_data, indent=2))
|
|
139
|
+
# print("=" * 80 + "\n")
|
|
140
|
+
|
|
141
|
+
# # Check for GraphQL errors in entity response
|
|
142
|
+
# if 'errors' in entity_data:
|
|
143
|
+
# error_messages = [error.get('message', 'Unknown error') for error in entity_data['errors']]
|
|
144
|
+
# raise Exception(f"{entity_type} lookup API returned errors: {', '.join(error_messages)}")
|
|
145
|
+
|
|
146
|
+
# if not entity_data or 'data' not in entity_data:
|
|
147
|
+
# raise ValueError(f"Failed to retrieve {entity_type} data from API")
|
|
148
|
+
|
|
149
|
+
# # Extract entity data and create indexed structure
|
|
150
|
+
# entities = {}
|
|
151
|
+
# entity_lookup = entity_data.get('data', {}).get('entityLookup', {})
|
|
152
|
+
# entity_items = entity_lookup.get('items', [])
|
|
153
|
+
|
|
154
|
+
# if hasattr(args, 'verbose') and args.verbose:
|
|
155
|
+
# print(f"Processing {len(entity_items)} {entity_type}s from entity lookup")
|
|
156
|
+
# if indexIdName:
|
|
157
|
+
# print(f"Using custom index field: {indexIdName}")
|
|
158
|
+
|
|
159
|
+
# for item in entity_items:
|
|
160
|
+
# entity = item.get('entity', {})
|
|
161
|
+
# entity_id = entity.get('id')
|
|
162
|
+
# helper_fields = item.get('helperFields', [])
|
|
163
|
+
|
|
164
|
+
# # Determine the index key to use
|
|
165
|
+
# index_key = entity_id # Default to entity ID
|
|
166
|
+
# custom_id = None
|
|
167
|
+
|
|
168
|
+
# if indexIdName and helper_fields:
|
|
169
|
+
# # Look for the custom ID in helperFields
|
|
170
|
+
# for field in helper_fields:
|
|
171
|
+
# if field.get('name') == indexIdName:
|
|
172
|
+
# custom_id = field.get('value')
|
|
173
|
+
# if custom_id:
|
|
174
|
+
# index_key = custom_id
|
|
175
|
+
# break
|
|
176
|
+
|
|
177
|
+
# if index_key:
|
|
178
|
+
# entity_data = {
|
|
179
|
+
# 'id': entity_id,
|
|
180
|
+
# 'name': entity.get('name', ''),
|
|
181
|
+
# 'type': entity.get('type', ''),
|
|
182
|
+
# 'description': item.get('description', ''),
|
|
183
|
+
# 'helperFields': helper_fields
|
|
184
|
+
# }
|
|
185
|
+
|
|
186
|
+
# # Add the custom index ID if it was found and used
|
|
187
|
+
# if custom_id and indexIdName:
|
|
188
|
+
# entity_data['indexId'] = custom_id
|
|
189
|
+
# entity_data['indexIdName'] = indexIdName
|
|
190
|
+
|
|
191
|
+
# entities[index_key] = entity_data
|
|
192
|
+
|
|
193
|
+
# if hasattr(args, 'verbose') and args.verbose and custom_id:
|
|
194
|
+
# print(f"Entity {entity_id} indexed by {indexIdName}: {custom_id}")
|
|
195
|
+
|
|
196
|
+
# if hasattr(args, 'verbose') and args.verbose:
|
|
197
|
+
# index_type = f"custom field '{indexIdName}'" if indexIdName else "entity ID"
|
|
198
|
+
# print(f"Successfully indexed {len(entities)} {entity_type}s by {index_type}")
|
|
199
|
+
|
|
200
|
+
# return entities
|
|
201
|
+
|
|
202
|
+
# except ApiException as e:
|
|
203
|
+
# raise Exception(f"{entity_type} lookup API call failed - {e}")
|
|
204
|
+
# except Exception as e:
|
|
205
|
+
# raise Exception(f"Unexpected error during {entity_type} lookup API call - {e}")
|
|
206
|
+
|
|
207
|
+
def makeCall(args, configuration, query):
|
|
208
|
+
# Create API client instance with params
|
|
209
|
+
instance = CallApi(ApiClient(configuration))
|
|
210
|
+
params = {
|
|
211
|
+
'v': hasattr(args, 'verbose') and args.verbose, # verbose mode
|
|
212
|
+
'f': 'json', # format
|
|
213
|
+
'p': False, # pretty print
|
|
214
|
+
't': False # test mode
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
try:
|
|
218
|
+
# Call the API directly
|
|
219
|
+
# NOTE: The API client (graphql_client/api_client_types.py lines 106-108)
|
|
220
|
+
# automatically prints error responses and exits on GraphQL errors.
|
|
221
|
+
# This means our custom error handling below may not be reached if there are GraphQL errors.
|
|
222
|
+
response = instance.call_api(query, params)
|
|
223
|
+
response = response[0] if response else {}
|
|
224
|
+
|
|
225
|
+
# Show raw API response in verbose mode
|
|
226
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
227
|
+
print("\n" + "=" * 80)
|
|
228
|
+
print("RAW API RESPONSE:")
|
|
229
|
+
print("=" * 80)
|
|
230
|
+
print(json.dumps(response, indent=2))
|
|
231
|
+
print("=" * 80 + "\n")
|
|
232
|
+
|
|
233
|
+
# Check for GraphQL errors first (may not be reached due to API client behavior)
|
|
234
|
+
if 'errors' in response:
|
|
235
|
+
error_messages = [error.get('message', 'Unknown error') for error in response['errors']]
|
|
236
|
+
raise Exception(f"API returned errors: {', '.join(error_messages)}")
|
|
237
|
+
|
|
238
|
+
if not response or 'data' not in response:
|
|
239
|
+
raise ValueError("Failed to retrieve data from API")
|
|
240
|
+
|
|
241
|
+
return response
|
|
242
|
+
|
|
243
|
+
except ApiException as e:
|
|
244
|
+
raise Exception(f"API call failed - {e}")
|
|
245
|
+
except Exception as e:
|
|
246
|
+
raise Exception(f"Unexpected error during API call - {e}")
|
|
247
|
+
|
|
248
|
+
def writeDataToFile(data, args, account_id=None, default_filename_template="data_{account_id}.json", default_directory="config_data"):
|
|
249
|
+
"""
|
|
250
|
+
Write data to a file with flexible output path configuration
|
|
251
|
+
|
|
252
|
+
Args:
|
|
253
|
+
data: The data to write to file (will be JSON serialized)
|
|
254
|
+
args: Command line arguments containing output_file_path and verbose options
|
|
255
|
+
account_id: Optional account ID for default filename generation
|
|
256
|
+
default_filename_template: Template for default filename (use {account_id} placeholder)
|
|
257
|
+
default_directory: Default directory for output files
|
|
258
|
+
|
|
259
|
+
Returns:
|
|
260
|
+
str: The path of the file that was written
|
|
261
|
+
|
|
262
|
+
Raises:
|
|
263
|
+
Exception: If file writing fails
|
|
264
|
+
"""
|
|
265
|
+
# Set up output file path
|
|
266
|
+
if hasattr(args, 'output_file_path') and args.output_file_path:
|
|
267
|
+
output_file = args.output_file_path
|
|
268
|
+
destination_dir = os.path.dirname(output_file)
|
|
269
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
270
|
+
print(f"Using output file path: {output_file}")
|
|
271
|
+
else:
|
|
272
|
+
destination_dir = default_directory
|
|
273
|
+
if account_id:
|
|
274
|
+
filename = default_filename_template.format(account_id=account_id)
|
|
275
|
+
else:
|
|
276
|
+
# If no account_id provided, remove the placeholder
|
|
277
|
+
filename = default_filename_template.replace("_{account_id}", "")
|
|
278
|
+
output_file = os.path.join(destination_dir, filename)
|
|
279
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
280
|
+
print(f"Using default path: {output_file}")
|
|
281
|
+
|
|
282
|
+
# Create destination directory if it doesn't exist
|
|
283
|
+
if destination_dir and not os.path.exists(destination_dir):
|
|
284
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
285
|
+
print(f"Creating directory: {destination_dir}")
|
|
286
|
+
os.makedirs(destination_dir)
|
|
287
|
+
|
|
288
|
+
try:
|
|
289
|
+
# Write the data to the file
|
|
290
|
+
with open(output_file, 'w', encoding='utf-8') as f:
|
|
291
|
+
json.dump(data, f, indent=4, ensure_ascii=False)
|
|
292
|
+
|
|
293
|
+
if hasattr(args, 'verbose') and args.verbose:
|
|
294
|
+
print(f"Successfully wrote data to: {output_file}")
|
|
295
|
+
|
|
296
|
+
return output_file
|
|
297
|
+
|
|
298
|
+
except Exception as e:
|
|
299
|
+
raise Exception(f"Failed to write data to file {output_file}: {str(e)}")
|
|
300
|
+
|
|
301
|
+
def getAccountID(args, configuration):
|
|
302
|
+
"""
|
|
303
|
+
Get the account ID from command line arguments, configuration, or environment variable.
|
|
304
|
+
|
|
305
|
+
Args:
|
|
306
|
+
args: Command line arguments
|
|
307
|
+
configuration: API configuration object
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
str: The account ID to use for API calls
|
|
311
|
+
|
|
312
|
+
Raises:
|
|
313
|
+
ValueError: If no account ID is provided or found
|
|
314
|
+
"""
|
|
315
|
+
account_id = None
|
|
316
|
+
if hasattr(args, 'accountID') and args.accountID:
|
|
317
|
+
account_id = args.accountID
|
|
318
|
+
elif hasattr(configuration, 'accountID') and configuration.accountID:
|
|
319
|
+
account_id = configuration.accountID
|
|
320
|
+
else:
|
|
321
|
+
account_id = os.getenv('CATO_ACCOUNT_ID')
|
|
322
|
+
|
|
323
|
+
if not account_id:
|
|
324
|
+
raise ValueError("Account ID is required. Provide it using the -accountID flag or set CATO_ACCOUNT_ID environment variable.")
|
|
325
|
+
|
|
326
|
+
return account_id
|
|
327
|
+
|
|
328
|
+
def check_terraform_binary():
|
|
329
|
+
"""Check if terraform binary is available"""
|
|
330
|
+
try:
|
|
331
|
+
result = subprocess.run(['terraform', '--version'], capture_output=True, text=True)
|
|
332
|
+
if result.returncode == 0:
|
|
333
|
+
return True, result.stdout.strip().split('\n')[0]
|
|
334
|
+
else:
|
|
335
|
+
return False, "Terraform binary not found or not working"
|
|
336
|
+
except FileNotFoundError:
|
|
337
|
+
return False, "Terraform binary not found in PATH"
|
|
338
|
+
except Exception as e:
|
|
339
|
+
return False, f"Error checking terraform binary: {e}"
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def check_terraform_config_files():
|
|
343
|
+
"""Check if Terraform configuration files exist in current directory"""
|
|
344
|
+
tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
|
|
345
|
+
if tf_files:
|
|
346
|
+
return True, tf_files
|
|
347
|
+
else:
|
|
348
|
+
return False, []
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def check_terraform_init():
|
|
352
|
+
"""Check if Terraform has been initialized"""
|
|
353
|
+
terraform_dir = Path('.terraform')
|
|
354
|
+
if terraform_dir.exists() and terraform_dir.is_dir():
|
|
355
|
+
# Check for providers
|
|
356
|
+
providers_dir = terraform_dir / 'providers'
|
|
357
|
+
if providers_dir.exists():
|
|
358
|
+
return True, "Terraform is initialized"
|
|
359
|
+
else:
|
|
360
|
+
return False, "Terraform directory exists but no providers found"
|
|
361
|
+
else:
|
|
362
|
+
return False, "Terraform not initialized (.terraform directory not found)"
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
def check_module_exists(module_name):
|
|
366
|
+
"""Check if the specified module exists in Terraform configuration"""
|
|
367
|
+
try:
|
|
368
|
+
# Remove 'module.' prefix if present
|
|
369
|
+
clean_module_name = module_name.replace('module.', '')
|
|
370
|
+
|
|
371
|
+
# Method 1: Check .tf files directly for module definitions
|
|
372
|
+
tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
|
|
373
|
+
for tf_file in tf_files:
|
|
374
|
+
try:
|
|
375
|
+
with open(tf_file, 'r') as f:
|
|
376
|
+
content = f.read()
|
|
377
|
+
# Look for module "module_name" blocks
|
|
378
|
+
if f'module "{clean_module_name}"' in content or f"module '{clean_module_name}'" in content:
|
|
379
|
+
return True, f"Module '{clean_module_name}' found in {tf_file}"
|
|
380
|
+
except Exception as e:
|
|
381
|
+
print(f"Warning: Could not read {tf_file}: {e}")
|
|
382
|
+
continue
|
|
383
|
+
|
|
384
|
+
# Method 2: Try terraform show -json as fallback
|
|
385
|
+
try:
|
|
386
|
+
result = subprocess.run(
|
|
387
|
+
['terraform', 'show', '-json'],
|
|
388
|
+
capture_output=True,
|
|
389
|
+
text=True,
|
|
390
|
+
cwd=Path.cwd()
|
|
391
|
+
)
|
|
392
|
+
|
|
393
|
+
if result.returncode == 0:
|
|
394
|
+
state_data = json.loads(result.stdout)
|
|
395
|
+
|
|
396
|
+
# Check if module exists in configuration
|
|
397
|
+
if 'configuration' in state_data and state_data['configuration']:
|
|
398
|
+
modules = state_data.get('configuration', {}).get('root_module', {}).get('module_calls', {})
|
|
399
|
+
if clean_module_name in modules:
|
|
400
|
+
return True, f"Module '{clean_module_name}' found in Terraform state"
|
|
401
|
+
|
|
402
|
+
# Also check in planned_values for modules
|
|
403
|
+
if 'planned_values' in state_data and state_data['planned_values']:
|
|
404
|
+
modules = state_data.get('planned_values', {}).get('root_module', {}).get('child_modules', [])
|
|
405
|
+
for module in modules:
|
|
406
|
+
module_addr = module.get('address', '')
|
|
407
|
+
if clean_module_name in module_addr:
|
|
408
|
+
return True, f"Module '{clean_module_name}' found in planned values"
|
|
409
|
+
except (subprocess.SubprocessError, json.JSONDecodeError) as e:
|
|
410
|
+
print(f"Warning: Could not check terraform state: {e}")
|
|
411
|
+
|
|
412
|
+
return False, f"Module '{clean_module_name}' not found in Terraform configuration files"
|
|
413
|
+
|
|
414
|
+
except Exception as e:
|
|
415
|
+
return False, f"Error checking module existence: {e}"
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
def validate_terraform_environment(module_name, verbose=False):
|
|
419
|
+
"""Validate the complete Terraform environment"""
|
|
420
|
+
print("\n Validating Terraform environment...")
|
|
421
|
+
|
|
422
|
+
# 1. Check terraform binary
|
|
423
|
+
print("\n Checking Terraform binary...")
|
|
424
|
+
has_terraform, terraform_msg = check_terraform_binary()
|
|
425
|
+
if not has_terraform:
|
|
426
|
+
raise Exception(f" Terraform not available: {terraform_msg}")
|
|
427
|
+
if verbose:
|
|
428
|
+
print(f" {terraform_msg}")
|
|
429
|
+
else:
|
|
430
|
+
print(" Terraform binary found")
|
|
431
|
+
|
|
432
|
+
# 2. Check for configuration files
|
|
433
|
+
print("\n Checking Terraform configuration files...")
|
|
434
|
+
has_config, config_files = check_terraform_config_files()
|
|
435
|
+
if not has_config:
|
|
436
|
+
raise Exception(" No Terraform configuration files (.tf or .tf.json) found in current directory")
|
|
437
|
+
if verbose:
|
|
438
|
+
print(f" Found {len(config_files)} configuration files: {', '.join(config_files)}")
|
|
439
|
+
else:
|
|
440
|
+
print(f" Found {len(config_files)} Terraform configuration files")
|
|
441
|
+
|
|
442
|
+
# 3. Check if terraform is initialized
|
|
443
|
+
print("\n Checking Terraform initialization...")
|
|
444
|
+
is_initialized, init_msg = check_terraform_init()
|
|
445
|
+
if not is_initialized:
|
|
446
|
+
raise Exception(f" {init_msg}. Run 'terraform init' first.")
|
|
447
|
+
if verbose:
|
|
448
|
+
print(f" {init_msg}")
|
|
449
|
+
else:
|
|
450
|
+
print(" Terraform is initialized")
|
|
451
|
+
|
|
452
|
+
# 4. Check if the specified module exists
|
|
453
|
+
print(f"\n Checking if module '{module_name}' exists...")
|
|
454
|
+
module_exists, module_msg = check_module_exists(module_name)
|
|
455
|
+
if not module_exists:
|
|
456
|
+
raise Exception(f" {module_msg}. Please add the module to your Terraform configuration first.")
|
|
457
|
+
if verbose:
|
|
458
|
+
print(f" {module_msg}")
|
|
459
|
+
else:
|
|
460
|
+
print(f" Module '{module_name}' found")
|
|
461
|
+
|
|
462
|
+
# 5. Check if modules are properly installed by running terraform validate
|
|
463
|
+
print("\n Checking if modules are properly installed...")
|
|
464
|
+
try:
|
|
465
|
+
result = subprocess.run(
|
|
466
|
+
['terraform', 'validate'],
|
|
467
|
+
capture_output=True,
|
|
468
|
+
text=True,
|
|
469
|
+
cwd=Path.cwd()
|
|
470
|
+
)
|
|
471
|
+
|
|
472
|
+
if result.returncode != 0:
|
|
473
|
+
error_output = result.stderr.strip()
|
|
474
|
+
if "module is not yet installed" in error_output or "Module not installed" in error_output:
|
|
475
|
+
raise Exception(f" Terraform modules are not installed. Please run 'terraform init' to install all required modules.")
|
|
476
|
+
else:
|
|
477
|
+
raise Exception(f" Terraform validation failed:\n\n{error_output}")
|
|
478
|
+
|
|
479
|
+
print(" All modules are properly installed")
|
|
480
|
+
|
|
481
|
+
except subprocess.SubprocessError as e:
|
|
482
|
+
raise Exception(f" Failed to validate Terraform configuration: {e}")
|
|
483
|
+
|
|
484
|
+
print("\n All Terraform environment checks passed!")
|
|
485
|
+
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
def check_terraform_config_files():
|
|
489
|
+
"""Check if Terraform configuration files exist in current directory"""
|
|
490
|
+
tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
|
|
491
|
+
if tf_files:
|
|
492
|
+
return True, tf_files
|
|
493
|
+
else:
|
|
494
|
+
return False, []
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
def check_terraform_init():
|
|
498
|
+
"""Check if Terraform has been initialized"""
|
|
499
|
+
terraform_dir = Path('.terraform')
|
|
500
|
+
if terraform_dir.exists() and terraform_dir.is_dir():
|
|
501
|
+
# Check for providers
|
|
502
|
+
providers_dir = terraform_dir / 'providers'
|
|
503
|
+
if providers_dir.exists():
|
|
504
|
+
return True, "Terraform is initialized"
|
|
505
|
+
else:
|
|
506
|
+
return False, "Terraform directory exists but no providers found"
|
|
507
|
+
else:
|
|
508
|
+
return False, "Terraform not initialized (.terraform directory not found)"
|
|
509
|
+
|
|
510
|
+
|
|
511
|
+
def check_module_exists(module_name):
|
|
512
|
+
"""Check if the specified module exists in Terraform configuration"""
|
|
513
|
+
try:
|
|
514
|
+
# Remove 'module.' prefix if present
|
|
515
|
+
clean_module_name = module_name.replace('module.', '')
|
|
516
|
+
|
|
517
|
+
# Method 1: Check .tf files directly for module definitions
|
|
518
|
+
tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
|
|
519
|
+
for tf_file in tf_files:
|
|
520
|
+
try:
|
|
521
|
+
with open(tf_file, 'r') as f:
|
|
522
|
+
content = f.read()
|
|
523
|
+
# Look for module "module_name" blocks
|
|
524
|
+
if f'module "{clean_module_name}"' in content or f"module '{clean_module_name}'" in content:
|
|
525
|
+
return True, f"Module '{clean_module_name}' found in {tf_file}"
|
|
526
|
+
except Exception as e:
|
|
527
|
+
print(f"Warning: Could not read {tf_file}: {e}")
|
|
528
|
+
continue
|
|
529
|
+
|
|
530
|
+
# Method 2: Try terraform show -json as fallback
|
|
531
|
+
try:
|
|
532
|
+
result = subprocess.run(
|
|
533
|
+
['terraform', 'show', '-json'],
|
|
534
|
+
capture_output=True,
|
|
535
|
+
text=True,
|
|
536
|
+
cwd=Path.cwd()
|
|
537
|
+
)
|
|
538
|
+
|
|
539
|
+
if result.returncode == 0:
|
|
540
|
+
state_data = json.loads(result.stdout)
|
|
541
|
+
|
|
542
|
+
# Check if module exists in configuration
|
|
543
|
+
if 'configuration' in state_data and state_data['configuration']:
|
|
544
|
+
modules = state_data.get('configuration', {}).get('root_module', {}).get('module_calls', {})
|
|
545
|
+
if clean_module_name in modules:
|
|
546
|
+
return True, f"Module '{clean_module_name}' found in Terraform state"
|
|
547
|
+
|
|
548
|
+
# Also check in planned_values for modules
|
|
549
|
+
if 'planned_values' in state_data and state_data['planned_values']:
|
|
550
|
+
modules = state_data.get('planned_values', {}).get('root_module', {}).get('child_modules', [])
|
|
551
|
+
for module in modules:
|
|
552
|
+
module_addr = module.get('address', '')
|
|
553
|
+
if clean_module_name in module_addr:
|
|
554
|
+
return True, f"Module '{clean_module_name}' found in planned values"
|
|
555
|
+
except (subprocess.SubprocessError, json.JSONDecodeError) as e:
|
|
556
|
+
print(f"Warning: Could not check terraform state: {e}")
|
|
557
|
+
|
|
558
|
+
return False, f"Module '{clean_module_name}' not found in Terraform configuration files"
|
|
559
|
+
|
|
560
|
+
except Exception as e:
|
|
561
|
+
return False, f"Error checking module existence: {e}"
|
|
@@ -1,12 +1,16 @@
|
|
|
1
1
|
import catocli.parsers.custom.export_rules.export_rules as export_rules
|
|
2
2
|
|
|
3
|
-
def
|
|
3
|
+
def export_rules_parse(subparsers):
|
|
4
4
|
"""Create export command parsers"""
|
|
5
5
|
|
|
6
6
|
# Create the main export parser
|
|
7
7
|
export_parser = subparsers.add_parser('export', help='Export data to various formats', usage='catocli export <operation> [options]')
|
|
8
8
|
export_subparsers = export_parser.add_subparsers(description='valid export operations', help='additional help')
|
|
9
9
|
|
|
10
|
+
# Add sites export functionality
|
|
11
|
+
from catocli.parsers.custom.export_sites import export_sites_parse
|
|
12
|
+
export_sites_parse(export_subparsers)
|
|
13
|
+
|
|
10
14
|
# Add if_rules command
|
|
11
15
|
if_rules_parser = export_subparsers.add_parser(
|
|
12
16
|
'if_rules',
|