catocli 2.0.1__py3-none-any.whl → 2.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of catocli might be problematic. Click here for more details.

Files changed (108) hide show
  1. catocli/Utils/clidriver.py +41 -6
  2. catocli/__init__.py +1 -1
  3. catocli/parsers/custom/__init__.py +7 -5
  4. catocli/parsers/custom/customLib.py +490 -1
  5. catocli/parsers/custom/export_rules/__init__.py +5 -1
  6. catocli/parsers/custom/export_rules/export_rules.py +32 -183
  7. catocli/parsers/custom/export_sites/__init__.py +20 -0
  8. catocli/parsers/custom/export_sites/export_sites.py +365 -0
  9. catocli/parsers/custom/import_rules_to_tf/__init__.py +3 -3
  10. catocli/parsers/custom/import_rules_to_tf/import_rules_to_tf.py +20 -146
  11. catocli/parsers/custom/import_sites_to_tf/__init__.py +45 -0
  12. catocli/parsers/custom/import_sites_to_tf/import_sites_to_tf.py +891 -0
  13. catocli/parsers/mutation_accountManagement/__init__.py +18 -21
  14. catocli/parsers/mutation_admin/__init__.py +18 -21
  15. catocli/parsers/mutation_container/__init__.py +6 -7
  16. catocli/parsers/mutation_hardware/__init__.py +6 -7
  17. catocli/parsers/mutation_policy/__init__.py +666 -588
  18. catocli/parsers/mutation_policy_terminalServer/README.md +7 -0
  19. catocli/parsers/mutation_policy_terminalServer_addRule/README.md +18 -0
  20. catocli/parsers/mutation_policy_terminalServer_addSection/README.md +18 -0
  21. catocli/parsers/mutation_policy_terminalServer_createPolicyRevision/README.md +18 -0
  22. catocli/parsers/mutation_policy_terminalServer_discardPolicyRevision/README.md +18 -0
  23. catocli/parsers/mutation_policy_terminalServer_moveRule/README.md +18 -0
  24. catocli/parsers/mutation_policy_terminalServer_moveSection/README.md +18 -0
  25. catocli/parsers/mutation_policy_terminalServer_publishPolicyRevision/README.md +18 -0
  26. catocli/parsers/mutation_policy_terminalServer_removeRule/README.md +18 -0
  27. catocli/parsers/mutation_policy_terminalServer_removeSection/README.md +18 -0
  28. catocli/parsers/mutation_policy_terminalServer_updatePolicy/README.md +18 -0
  29. catocli/parsers/mutation_policy_terminalServer_updateRule/README.md +18 -0
  30. catocli/parsers/mutation_policy_terminalServer_updateSection/README.md +18 -0
  31. catocli/parsers/mutation_sandbox/__init__.py +12 -14
  32. catocli/parsers/mutation_site/__init__.py +189 -175
  33. catocli/parsers/mutation_site_addSocketAddOnCard/README.md +17 -0
  34. catocli/parsers/mutation_site_removeSocketAddOnCard/README.md +17 -0
  35. catocli/parsers/mutation_site_startSiteUpgrade/README.md +17 -0
  36. catocli/parsers/mutation_sites/__init__.py +189 -175
  37. catocli/parsers/mutation_sites_addSocketAddOnCard/README.md +17 -0
  38. catocli/parsers/mutation_sites_removeSocketAddOnCard/README.md +17 -0
  39. catocli/parsers/mutation_sites_startSiteUpgrade/README.md +17 -0
  40. catocli/parsers/mutation_xdr/__init__.py +18 -21
  41. catocli/parsers/parserApiClient.py +36 -11
  42. catocli/parsers/query_accountBySubdomain/__init__.py +6 -7
  43. catocli/parsers/query_accountManagement/__init__.py +6 -7
  44. catocli/parsers/query_accountMetrics/__init__.py +6 -7
  45. catocli/parsers/query_accountRoles/__init__.py +6 -7
  46. catocli/parsers/query_accountSnapshot/__init__.py +6 -7
  47. catocli/parsers/query_admin/__init__.py +6 -7
  48. catocli/parsers/query_admins/__init__.py +6 -7
  49. catocli/parsers/query_appStats/__init__.py +6 -7
  50. catocli/parsers/query_appStatsTimeSeries/__init__.py +6 -7
  51. catocli/parsers/query_auditFeed/__init__.py +6 -7
  52. catocli/parsers/query_catalogs/__init__.py +6 -7
  53. catocli/parsers/query_container/__init__.py +6 -7
  54. catocli/parsers/query_devices/README.md +2 -1
  55. catocli/parsers/query_devices/__init__.py +6 -7
  56. catocli/parsers/query_entityLookup/__init__.py +6 -7
  57. catocli/parsers/query_events/__init__.py +6 -7
  58. catocli/parsers/query_eventsFeed/README.md +1 -1
  59. catocli/parsers/query_eventsFeed/__init__.py +6 -7
  60. catocli/parsers/query_eventsTimeSeries/__init__.py +6 -7
  61. catocli/parsers/query_hardware/__init__.py +6 -7
  62. catocli/parsers/query_hardwareManagement/__init__.py +6 -7
  63. catocli/parsers/query_licensing/__init__.py +6 -7
  64. catocli/parsers/query_policy/README.md +2 -1
  65. catocli/parsers/query_policy/__init__.py +6 -7
  66. catocli/parsers/query_sandbox/__init__.py +6 -7
  67. catocli/parsers/query_site/README.md +2 -1
  68. catocli/parsers/query_site/__init__.py +6 -7
  69. catocli/parsers/query_siteLocation/__init__.py +4 -8
  70. catocli/parsers/query_subDomains/__init__.py +6 -7
  71. catocli/parsers/query_xdr/__init__.py +12 -14
  72. catocli/parsers/raw/README.md +4 -0
  73. catocli/parsers/raw/__init__.py +5 -2
  74. {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/METADATA +1 -1
  75. {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/RECORD +108 -67
  76. graphql_client/api/call_api.py +12 -6
  77. models/mutation.policy.remotePortFwd.updateRule.json +6 -6
  78. models/mutation.policy.terminalServer.addRule.json +2403 -0
  79. models/mutation.policy.terminalServer.addSection.json +1358 -0
  80. models/mutation.policy.terminalServer.createPolicyRevision.json +1873 -0
  81. models/mutation.policy.terminalServer.discardPolicyRevision.json +1807 -0
  82. models/mutation.policy.terminalServer.moveRule.json +1605 -0
  83. models/mutation.policy.terminalServer.moveSection.json +1259 -0
  84. models/mutation.policy.terminalServer.publishPolicyRevision.json +1864 -0
  85. models/mutation.policy.terminalServer.removeRule.json +1253 -0
  86. models/mutation.policy.terminalServer.removeSection.json +958 -0
  87. models/mutation.policy.terminalServer.updatePolicy.json +1883 -0
  88. models/mutation.policy.terminalServer.updateRule.json +2096 -0
  89. models/mutation.policy.terminalServer.updateSection.json +1111 -0
  90. models/mutation.site.addSocketAddOnCard.json +1050 -0
  91. models/mutation.site.removeSocketAddOnCard.json +786 -0
  92. models/mutation.site.startSiteUpgrade.json +802 -0
  93. models/mutation.sites.addSocketAddOnCard.json +1050 -0
  94. models/mutation.sites.removeSocketAddOnCard.json +786 -0
  95. models/mutation.sites.startSiteUpgrade.json +802 -0
  96. models/query.devices.json +311 -2
  97. models/query.events.json +48 -0
  98. models/query.eventsFeed.json +12 -0
  99. models/query.eventsTimeSeries.json +36 -0
  100. models/query.licensing.json +21815 -10093
  101. models/query.policy.json +1898 -305
  102. models/query.site.json +225 -0
  103. models/query.siteLocation.json +97190 -295396
  104. schema/catolib.py +63 -30
  105. {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/LICENSE +0 -0
  106. {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/WHEEL +0 -0
  107. {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/entry_points.txt +0 -0
  108. {catocli-2.0.1.dist-info → catocli-2.0.3.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,365 @@
1
+ import os
2
+ import json
3
+ import traceback
4
+ import sys
5
+ from graphql_client.api.call_api import ApiClient, CallApi
6
+ from graphql_client.api_client import ApiException
7
+ from ..customLib import writeDataToFile, makeCall, getAccountID
8
+
9
+ def export_socket_site_to_json(args, configuration):
10
+ """
11
+ Export consolidated site and socket data to JSON format
12
+ """
13
+ processed_data = {'sites':[]}
14
+ warning_stats = {
15
+ 'missing_sites': 0,
16
+ 'missing_interfaces': 0,
17
+ 'missing_data': 0,
18
+ 'missing_interface_details': []
19
+ }
20
+
21
+ try:
22
+ settings = {}
23
+ with open(os.path.join(os.path.dirname(__file__), '../../../../settings.json'), 'r', encoding='utf-8') as f:
24
+ settings = json.load(f)
25
+
26
+ account_id = getAccountID(args, configuration)
27
+ # Get account snapshot with siteIDs if provided
28
+ # Get siteIDs from args if provided (comma-separated string)
29
+ site_ids = []
30
+ if hasattr(args, 'siteIDs') and args.siteIDs:
31
+ # Parse comma-separated string into list, removing whitespace
32
+ site_ids = [site_id.strip() for site_id in args.siteIDs.split(',') if site_id.strip()]
33
+ if hasattr(args, 'verbose') and args.verbose:
34
+ print(f"Filtering snapshot for site IDs: {site_ids}")
35
+
36
+ ###############################################################
37
+ ## Call APIs to retrieve sites, interface and network ranges ##
38
+ ###############################################################
39
+ snapshot_sites = getAccountSnapshot(args, configuration, account_id, site_ids)
40
+ entity_network_interfaces = getEntityLookup(args, configuration, account_id, "networkInterface")
41
+ entity_network_ranges = getEntityLookup(args, configuration, account_id, "siteRange")
42
+ entity_sites = getEntityLookup(args, configuration, account_id, "site")
43
+
44
+ ##################################################################
45
+ ## Create processed_data object indexed by siteId with location ##
46
+ ##################################################################
47
+ for snapshot_site in snapshot_sites['data']['accountSnapshot']['sites']:
48
+ cur_site = {
49
+ 'wan_interfaces': [],
50
+ 'lan_interfaces': [],
51
+ }
52
+ site_id = snapshot_site.get('id')
53
+ connectionType = snapshot_site.get('infoSiteSnapshot', {}).get('connType', "")
54
+ if connectionType not in settings["ignore_export_by_socket_type"]:
55
+ cur_site['id'] = site_id
56
+ cur_site['name'] = snapshot_site.get('infoSiteSnapshot', {}).get('name')
57
+ cur_site['description'] = snapshot_site.get('infoSiteSnapshot', {}).get('description')
58
+ cur_site['connectionType'] = connectionType
59
+ cur_site['type'] = snapshot_site.get('infoSiteSnapshot', {}).get('type')
60
+ cur_site = populateSiteLocationData(args, snapshot_site, cur_site)
61
+
62
+ site_interfaces = snapshot_site.get('infoSiteSnapshot', {}).get('interfaces', [])
63
+ for wan_ni in site_interfaces:
64
+ cur_wan_interface = {}
65
+ role = wan_ni.get('wanRoleInterfaceInfo', "")
66
+ if role is not None and role[0:3] == "wan":
67
+ if connectionType == "SOCKET_X1500":
68
+ cur_wan_interface['id'] = site_id+":"+ wan_ni.get('id', "")
69
+ else:
70
+ cur_wan_interface['id'] = site_id+":INT_"+ wan_ni.get('id', "")
71
+ cur_wan_interface['id'] = site_id+":INT_"+ wan_ni.get('id', "")
72
+ cur_wan_interface['name'] = wan_ni.get('name', "")
73
+ cur_wan_interface['upstreamBandwidth'] = wan_ni.get('upstreamBandwidth', 0)
74
+ cur_wan_interface['downstreamBandwidth'] = wan_ni.get('downstreamBandwidth', 0)
75
+ cur_wan_interface['destType'] = wan_ni.get('destType', "")
76
+ cur_wan_interface['role'] = role
77
+ cur_site['wan_interfaces'].append(cur_wan_interface)
78
+
79
+ if site_id:
80
+ processed_data['sites'].append(cur_site)
81
+
82
+ ##################################################################################
83
+ ## Process entity lookup LAN network interfaces adding to site object by site_id##
84
+ ##################################################################################
85
+ interface_map = {}
86
+ for lan_ni in entity_network_interfaces:
87
+ cur_lan_interface = {
88
+ 'network_ranges': [],
89
+ }
90
+ site_id = str(lan_ni.get("helperFields","").get('siteId', ""))
91
+ id = str(lan_ni.get('entity', "").get('id', ""))
92
+ interfaceName = lan_ni.get('helperFields', "").get('interfaceName', "")
93
+ cur_lan_interface['id'] = id
94
+ cur_lan_interface['name'] = interfaceName
95
+ # Split interfaceName on " \ " and take the last element
96
+ cur_lan_interface['index'] = lan_ni.get("helperFields","").get('interfaceId', "")
97
+ cur_lan_interface['destType'] = lan_ni.get("helperFields","").get('destType', "")
98
+
99
+ # Create a composite key for interface mapping that includes site_id
100
+ interface_key = f"{site_id}_{interfaceName}"
101
+ interface_map[interface_key] = id
102
+
103
+ # Only add interface if the site exists in processed_data
104
+ site_entry = next((site for site in processed_data['sites'] if site['id'] == site_id), None)
105
+ if site_entry:
106
+ site_entry['lan_interfaces'].append(cur_lan_interface)
107
+ else:
108
+ if hasattr(args, 'verbose') and args.verbose:
109
+ print(f"WARNING: Site {site_id} not found in snapshot data, skipping interface {interfaceName} ({id})")
110
+
111
+ #############################################################################
112
+ ## Process entity lookup network ranges populating by network interface id ##
113
+ #############################################################################
114
+ for range in entity_network_ranges:
115
+ if hasattr(args, 'verbose') and args.verbose:
116
+ print(f"Processing network range: {type(range)} - {range}")
117
+ cur_range = {}
118
+ helper_fields = range.get("helperFields", {})
119
+ entity_data = range.get('entity', {})
120
+
121
+ if hasattr(args, 'verbose') and args.verbose:
122
+ print(f" helperFields type: {type(helper_fields)}, value: {helper_fields}")
123
+ print(f" entity type: {type(entity_data)}, value: {entity_data}")
124
+
125
+ range_id = entity_data.get('id', "")
126
+ site_id = str(helper_fields.get('siteId', ""))
127
+ interface_name = str(helper_fields.get('interfaceName', ""))
128
+ # Use the composite key to lookup interface_id
129
+ interface_key = f"{site_id}_{interface_name}"
130
+ interface_id = str(interface_map.get(interface_key, ""))
131
+ cur_range['id'] = range_id
132
+ range_name = entity_data.get('name', "")
133
+ if range_name and " \\ " in range_name:
134
+ cur_range['rangeName'] = range_name.split(" \\ ").pop()
135
+ else:
136
+ cur_range['rangeName'] = range_name
137
+ cur_range['name'] = range_name
138
+ cur_range['subnet'] = helper_fields.get('subnet', "")
139
+ cur_range['vlanTag'] = helper_fields.get('vlanTag', "")
140
+ cur_range['microsegmentation'] = helper_fields.get('microsegmentation', "")
141
+
142
+ # Safely add to processed_data with existence checks
143
+ if site_id and interface_id and range_id:
144
+ site_entry = next((site for site in processed_data['sites'] if site['id'] == site_id), None)
145
+ if not site_entry:
146
+ # print(f"WARNING: Site ID {site_id} not found in processed_data")
147
+ warning_stats['missing_sites'] += 1
148
+ continue
149
+
150
+ # Find the interface in the lan_interfaces array
151
+ interface_entry = next((iface for iface in site_entry['lan_interfaces'] if iface['id'] == interface_id), None)
152
+ if not interface_entry:
153
+ print(f"WARNING: Interface {interface_id} (name: {interface_name}) not found in site {site_id}. Range {range_id} will be skipped.")
154
+ warning_stats['missing_interfaces'] += 1
155
+ warning_stats['missing_interface_details'].append({
156
+ 'interface_id': interface_id,
157
+ 'interface_name': interface_name,
158
+ 'site_id': site_id,
159
+ 'range_id': range_id
160
+ })
161
+ if hasattr(args, 'verbose') and args.verbose:
162
+ available_interfaces = [iface['id'] for iface in site_entry['lan_interfaces']]
163
+ print(f" Available interfaces in site {site_id}: {available_interfaces}")
164
+ print(f" Looked up interface with key: {interface_key}")
165
+ continue
166
+ interface_entry['network_ranges'].append(cur_range)
167
+ if hasattr(args, 'verbose') and args.verbose:
168
+ print(f" Successfully added range {range_id} to site {site_id}, interface_name {interface_name} with interface_id {interface_id}")
169
+ else:
170
+ if not interface_id:
171
+ print(f"WARNING: Interface lookup failed for range {range_id}. Site: {site_id}, Interface name: {interface_name}, Lookup key: {interface_key}")
172
+ if hasattr(args, 'verbose') and args.verbose:
173
+ print(f" Available interface keys: {list(interface_map.keys())[:10]}...") # Show first 10 keys
174
+ else:
175
+ print(f"WARNING: Missing required data for range: site_id={site_id}, interface_id={interface_id}, range_id={range_id}")
176
+ warning_stats['missing_data'] += 1
177
+
178
+ # Print warning summary
179
+ total_warnings = warning_stats['missing_sites'] + warning_stats['missing_interfaces'] + warning_stats['missing_data']
180
+ if total_warnings > 0:
181
+ print(f"\n=== WARNING SUMMARY ===")
182
+ print(f"Total warnings: {total_warnings}")
183
+ print(f"- Missing sites: {warning_stats['missing_sites']}")
184
+ print(f"- Missing interfaces: {warning_stats['missing_interfaces']}")
185
+ print(f"- Missing data: {warning_stats['missing_data']}")
186
+
187
+ if warning_stats['missing_interfaces'] > 0:
188
+ print(f"\nMissing interface details:")
189
+ unique_interfaces = {}
190
+ for detail in warning_stats['missing_interface_details']:
191
+ key = f"{detail['interface_id']} ({detail['interface_name']})"
192
+ if key not in unique_interfaces:
193
+ unique_interfaces[key] = []
194
+ unique_interfaces[key].append(detail['site_id'])
195
+
196
+ for interface, sites in unique_interfaces.items():
197
+ print(f" - Interface {interface} missing in sites: {', '.join(sites)}")
198
+
199
+ print(f"\nThese warnings indicate network ranges that reference interfaces that don't exist in the site data.")
200
+ print(f"This is usually caused by data inconsistencies and can be safely ignored if the export completes successfully.")
201
+ print(f"=========================\n")
202
+
203
+ # Write the processed data to file using the general-purpose function
204
+ output_file = writeDataToFile(
205
+ data=processed_data,
206
+ args=args,
207
+ account_id=account_id,
208
+ default_filename_template="socket_sites_{account_id}.json",
209
+ default_directory="config_data"
210
+ )
211
+
212
+ return [{"success": True, "output_file": output_file, "account_id": account_id}]
213
+
214
+ except Exception as e:
215
+ # Get the current exception info
216
+ exc_type, exc_value, exc_traceback = sys.exc_info()
217
+
218
+ # Get the line number where the error occurred
219
+ line_number = exc_traceback.tb_lineno
220
+ filename = exc_traceback.tb_frame.f_code.co_filename
221
+ function_name = exc_traceback.tb_frame.f_code.co_name
222
+
223
+ # Get the full traceback as a string
224
+ full_traceback = traceback.format_exc()
225
+
226
+ # Create detailed error message
227
+ error_details = {
228
+ "error_type": exc_type.__name__,
229
+ "error_message": str(exc_value),
230
+ "line_number": line_number,
231
+ "function_name": function_name,
232
+ "filename": os.path.basename(filename),
233
+ "full_traceback": full_traceback
234
+ }
235
+
236
+ # Print detailed error information
237
+ print(f"ERROR: {exc_type.__name__}: {str(exc_value)}")
238
+ print(f"Location: {os.path.basename(filename)}:{line_number} in {function_name}()")
239
+ print(f"Full traceback:\n{full_traceback}")
240
+
241
+ return [{"success": False, "error": str(e), "error_details": error_details}]
242
+
243
+
244
+ ##########################################################################
245
+ ########################### Helper functions #############################
246
+ ##########################################################################
247
+
248
+ def populateSiteLocationData(args, site_data, cur_site):
249
+ # Load site location data for timezone and state code lookups
250
+ site_location_data = {}
251
+ try:
252
+ script_dir = os.path.dirname(os.path.abspath(__file__))
253
+ models_dir = os.path.join(script_dir, '..', '..', '..', '..', 'models')
254
+ location_file = os.path.join(models_dir, 'query.siteLocation.json')
255
+
256
+ if os.path.exists(location_file):
257
+ with open(location_file, 'r', encoding='utf-8') as f:
258
+ site_location_data = json.load(f)
259
+ if hasattr(args, 'verbose') and args.verbose:
260
+ print(f"Loaded {len(site_location_data)} location entries from {location_file}")
261
+ else:
262
+ if hasattr(args, 'verbose') and args.verbose:
263
+ print(f"Warning: Site location file not found at {location_file}")
264
+ except Exception as e:
265
+ if hasattr(args, 'verbose') and args.verbose:
266
+ print(f"Warning: Could not load site location data: {e}")
267
+
268
+ ## siteLocation attributes
269
+ cur_site['site_location'] = {}
270
+ cur_site['site_location']['address'] = site_data.get('infoSiteSnapshot', {}).get('address')
271
+ cur_site['site_location']['city'] = site_data.get('infoSiteSnapshot', {}).get('cityName')
272
+ cur_site['site_location']['stateName'] = site_data.get('infoSiteSnapshot', {}).get('countryStateName')
273
+ cur_site['site_location']['countryCode'] = site_data.get('infoSiteSnapshot', {}).get('countryCode')
274
+ cur_site['site_location']['countryName'] = site_data.get('infoSiteSnapshot', {}).get('countryName')
275
+
276
+ # Look up timezone and state code from location data
277
+ country_name = cur_site['site_location']['countryName']
278
+ state_name = cur_site['site_location']['stateName']
279
+ city = cur_site['site_location']['city']
280
+
281
+ # Create lookup key based on available data
282
+ if state_name:
283
+ lookup_key = f"{country_name}___{state_name}___{city}"
284
+ else:
285
+ lookup_key = f"{country_name}___{city}"
286
+
287
+ # Debug output for lookup
288
+ if hasattr(args, 'verbose') and args.verbose:
289
+ print(f"Site {cur_site['name']}: Looking up '{lookup_key}'")
290
+
291
+ # Look up location details
292
+ location_data = site_location_data.get(lookup_key, {})
293
+
294
+ if hasattr(args, 'verbose') and args.verbose:
295
+ if location_data:
296
+ print(f" Found location data: {location_data}")
297
+ else:
298
+ print(f" No location data found for key: {lookup_key}")
299
+ # Try to find similar keys for debugging
300
+ similar_keys = [k for k in site_location_data.keys() if country_name in k and (not city or city in k)][:5]
301
+ if similar_keys:
302
+ print(f" Similar keys found: {similar_keys}")
303
+
304
+ cur_site['stateCode'] = location_data.get('stateCode', None)
305
+
306
+ # Get timezone - always use the 0 element in the timezones array
307
+ timezones = location_data.get('timezone', [])
308
+ cur_site['site_location']['timezone'] = timezones[0] if timezones else None
309
+ return cur_site
310
+
311
+ def getEntityLookup(args, configuration, account_id, entity_type):
312
+ """
313
+ Helper function to get entity lookup data for a specific entity type
314
+ """
315
+ #################################
316
+ ## Get entity lookup for sites ##
317
+ #################################
318
+ entity_query = {
319
+ "query": "query entityLookup ( $accountID:ID! $type:EntityType! $sortInput:[SortInput] $lookupFilterInput:[LookupFilterInput] ) { entityLookup ( accountID:$accountID type:$type sort:$sortInput filters:$lookupFilterInput ) { items { entity { id name type } description helperFields } total } }",
320
+ "variables": {
321
+ "accountID": account_id,
322
+ "type": entity_type
323
+ },
324
+ "operationName": "entityLookup"
325
+ }
326
+ response = makeCall(args, configuration, entity_query)
327
+
328
+ # Check for GraphQL errors in snapshot response
329
+ if 'errors' in response:
330
+ error_messages = [error.get('message', 'Unknown error') for error in response['errors']]
331
+ raise Exception(f"Snapshot API returned errors: {', '.join(error_messages)}")
332
+
333
+ if not response or 'data' not in response or 'entityLookup' not in response['data']:
334
+ raise ValueError("Failed to retrieve snapshot data from API")
335
+
336
+ items = response['data']['entityLookup']['items']
337
+ if items is None:
338
+ items = []
339
+ if hasattr(args, 'verbose') and args.verbose:
340
+ print("No items found in entity lookup - "+ entity_type)
341
+ return items
342
+
343
+ def getAccountSnapshot(args, configuration, account_id, site_ids=None):
344
+ snapshot_query = {
345
+ "query": "query accountSnapshot ( $siteIDs:[ID!] $accountID:ID ) { accountSnapshot ( accountID:$accountID ) { id sites ( siteIDs:$siteIDs ) { id protoId connectivityStatusSiteSnapshot: connectivityStatus haStatusSiteSnapshot: haStatus { readiness wanConnectivity keepalive socketVersion } operationalStatusSiteSnapshot: operationalStatus lastConnected connectedSince popName devices { id name identifier connected haRole interfaces { connected id name physicalPort naturalOrder popName previousPopID previousPopName tunnelConnectionReason tunnelUptime tunnelRemoteIP tunnelRemoteIPInfoInterfaceSnapshot: tunnelRemoteIPInfo { ip countryCode countryName city state provider latitude longitude } type infoInterfaceSnapshot: info { id name upstreamBandwidth downstreamBandwidth upstreamBandwidthMbpsPrecision downstreamBandwidthMbpsPrecision destType wanRole } cellularInterfaceInfoInterfaceSnapshot: cellularInterfaceInfo { networkType simSlotId modemStatus isModemConnected iccid imei operatorName isModemSuspended apn apnSelectionMethod signalStrength isRoamingAllowed simNumber disconnectionReason isSimSlot1Detected isSimSlot2Detected } } lastConnected lastDuration connectedSince lastPopID lastPopName recentConnections { duration interfaceName deviceName lastConnected popName remoteIP remoteIPInfoRecentConnection: remoteIPInfo { ip countryCode countryName city state provider latitude longitude } } type deviceUptime socketInfo { id serial isPrimary platformSocketInfo: platform version versionUpdateTime } interfacesLinkState { id up mediaIn linkSpeed duplex hasAddress hasInternet hasTunnel } osType osVersion version versionNumber releaseGroup mfaExpirationTime mfaCreationTime internalIP } infoSiteSnapshot: info { name type description countryCode region countryName countryStateName cityName address isHA connType creationTime interfaces { id name upstreamBandwidth downstreamBandwidth upstreamBandwidthMbpsPrecision downstreamBandwidthMbpsPrecision destType wanRoleInterfaceInfo: wanRole } sockets { id serial isPrimary platformSocketInfo: platform version versionUpdateTime } ipsec { isPrimary catoIP remoteIP ikeVersion } } hostCount altWanStatus } users { id connectivityStatusUserSnapshot: connectivityStatus operationalStatusUserSnapshot: operationalStatus name deviceName uptime lastConnected version versionNumber popID popName remoteIP remoteIPInfoUserSnapshot: remoteIPInfo { ip countryCode countryName city state provider latitude longitude } internalIP osType osVersion devices { id name identifier connected haRole interfaces { connected id name physicalPort naturalOrder popName previousPopID previousPopName tunnelConnectionReason tunnelUptime tunnelRemoteIP tunnelRemoteIPInfoInterfaceSnapshot: tunnelRemoteIPInfo { ip countryCode countryName city state provider latitude longitude } type infoInterfaceSnapshot: info { id name upstreamBandwidth downstreamBandwidth upstreamBandwidthMbpsPrecision downstreamBandwidthMbpsPrecision destType wanRole } cellularInterfaceInfoInterfaceSnapshot: cellularInterfaceInfo { networkType simSlotId modemStatus isModemConnected iccid imei operatorName isModemSuspended apn apnSelectionMethod signalStrength isRoamingAllowed simNumber disconnectionReason isSimSlot1Detected isSimSlot2Detected } } lastConnected lastDuration connectedSince lastPopID lastPopName recentConnections { duration interfaceName deviceName lastConnected popName remoteIP remoteIPInfoRecentConnection: remoteIPInfo { ip countryCode countryName city state provider latitude longitude } } type deviceUptime socketInfo { id serial isPrimary platformSocketInfo: platform version versionUpdateTime } interfacesLinkState { id up mediaIn linkSpeed duplex hasAddress hasInternet hasTunnel } osType osVersion version versionNumber releaseGroup mfaExpirationTime mfaCreationTime internalIP } connectedInOffice infoUserSnapshot: info { name status email creationTime phoneNumber origin authMethod } recentConnections { duration interfaceName deviceName lastConnected popName remoteIP remoteIPInfo { ip countryCode countryName city state provider latitude longitude } } } timestamp } }",
346
+ "variables": {
347
+ "accountID": account_id,
348
+ "siteIDs": site_ids
349
+ },
350
+ "operationName": "accountSnapshot"
351
+ }
352
+ response = makeCall(args, configuration, snapshot_query)
353
+
354
+ # Check for GraphQL errors in snapshot response
355
+ if 'errors' in response:
356
+ error_messages = [error.get('message', 'Unknown error') for error in response['errors']]
357
+ raise Exception(f"Snapshot API returned errors: {', '.join(error_messages)}")
358
+
359
+ if not response or 'data' not in response or 'accountSnapshot' not in response['data']:
360
+ raise ValueError("Failed to retrieve snapshot data from API")
361
+
362
+ if not response or 'sites' not in response['data']['accountSnapshot'] or response['data']['accountSnapshot']['sites'] is None:
363
+ raise ValueError("No sites found in account snapshot data from API")
364
+
365
+ return response
@@ -1,6 +1,6 @@
1
1
  import catocli.parsers.custom.import_rules_to_tf.import_rules_to_tf as import_rules_to_tf
2
2
 
3
- def import_parse(subparsers):
3
+ def rule_import_parse(subparsers):
4
4
  """Create import command parsers"""
5
5
 
6
6
  # Create the main import parser
@@ -11,7 +11,7 @@ def import_parse(subparsers):
11
11
  if_rules_parser = import_subparsers.add_parser(
12
12
  'if_rules_to_tf',
13
13
  help='Import Internet Firewall rules to Terraform state',
14
- usage='catocli import if_rules_to_tf <json_file> --module-name <module_name> [options]'
14
+ usage='catocli import if_rules_to_tf <json_file> --module-name <module_name> [options]\n\nexample: catocli import if_rules_to_tf config_data/all_wf_rules_and_sections.json --module-name module.if_rules'
15
15
  )
16
16
 
17
17
  if_rules_parser.add_argument('json_file', help='Path to the JSON file containing IFW rules and sections')
@@ -35,7 +35,7 @@ def import_parse(subparsers):
35
35
  wf_rules_parser = import_subparsers.add_parser(
36
36
  'wf_rules_to_tf',
37
37
  help='Import WAN Firewall rules to Terraform state',
38
- usage='catocli import wf_rules_to_tf <json_file> --module-name <module_name> [options]'
38
+ usage='catocli import wf_rules_to_tf <json_file> --module-name <module_name> [options]\n\nexample: catocli import wf_rules_to_tf config_data/all_wf_rules_and_sections.json --module-name module.wf_rules'
39
39
  )
40
40
 
41
41
  wf_rules_parser.add_argument('json_file', help='Path to the JSON file containing WF rules and sections')
@@ -13,6 +13,7 @@ import re
13
13
  import time
14
14
  import glob
15
15
  from pathlib import Path
16
+ from ..customLib import validate_terraform_environment
16
17
 
17
18
 
18
19
  def load_json_data(json_file):
@@ -60,14 +61,13 @@ def extract_rules_and_sections(policy_data):
60
61
  })
61
62
 
62
63
  # Extract sections
63
- section_ids = policy_data.get('section_ids', {})
64
64
  for section in policy_data.get('sections', []):
65
65
  if section.get('section_name'):
66
66
  sections.append({
67
67
  'section_name': section['section_name'],
68
68
  'section_index': section.get('section_index', 0),
69
- 'section_id': section_ids.get(section['section_name'], '')
70
- })
69
+ 'section_id': section.get('section_id', '')
70
+ })
71
71
  return rules, sections
72
72
 
73
73
 
@@ -123,8 +123,8 @@ def find_rule_index(rules, rule_name):
123
123
  return None
124
124
 
125
125
 
126
- def import_sections(sections, module_name, verbose=False,
127
- resource_type="cato_if_section", resource_name="sections"):
126
+ def import_sections(sections, module_name, resource_type,
127
+ resource_name="sections", verbose=False):
128
128
  """Import all sections"""
129
129
  print("\nStarting section imports...")
130
130
  total_sections = len(sections)
@@ -135,7 +135,7 @@ def import_sections(sections, module_name, verbose=False,
135
135
  section_id = section['section_id']
136
136
  section_name = section['section_name']
137
137
  section_index = section['section_index']
138
- resource_address = f'{module_name}.{resource_type}.{resource_name}["{str(section_name)}"]'
138
+ resource_address = f'{module_name}.{resource_type}.{resource_name}["{section_name}"]'
139
139
  print(f"\n[{i+1}/{total_sections}] Section: {section_name} (index: {section_index})")
140
140
 
141
141
  # For sections, we use the section name as the ID since that's how Cato identifies them
@@ -192,143 +192,6 @@ def import_rules(rules, module_name, verbose=False,
192
192
  return successful_imports, failed_imports
193
193
 
194
194
 
195
- def check_terraform_binary():
196
- """Check if terraform binary is available"""
197
- try:
198
- result = subprocess.run(['terraform', '--version'], capture_output=True, text=True)
199
- if result.returncode == 0:
200
- return True, result.stdout.strip().split('\n')[0]
201
- else:
202
- return False, "Terraform binary not found or not working"
203
- except FileNotFoundError:
204
- return False, "Terraform binary not found in PATH"
205
- except Exception as e:
206
- return False, f"Error checking terraform binary: {e}"
207
-
208
-
209
- def check_terraform_config_files():
210
- """Check if Terraform configuration files exist in current directory"""
211
- tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
212
- if tf_files:
213
- return True, tf_files
214
- else:
215
- return False, []
216
-
217
-
218
- def check_terraform_init():
219
- """Check if Terraform has been initialized"""
220
- terraform_dir = Path('.terraform')
221
- if terraform_dir.exists() and terraform_dir.is_dir():
222
- # Check for providers
223
- providers_dir = terraform_dir / 'providers'
224
- if providers_dir.exists():
225
- return True, "Terraform is initialized"
226
- else:
227
- return False, "Terraform directory exists but no providers found"
228
- else:
229
- return False, "Terraform not initialized (.terraform directory not found)"
230
-
231
-
232
- def check_module_exists(module_name):
233
- """Check if the specified module exists in Terraform configuration"""
234
- try:
235
- # Remove 'module.' prefix if present
236
- clean_module_name = module_name.replace('module.', '')
237
-
238
- # Method 1: Check .tf files directly for module definitions
239
- tf_files = glob.glob('*.tf') + glob.glob('*.tf.json')
240
- for tf_file in tf_files:
241
- try:
242
- with open(tf_file, 'r') as f:
243
- content = f.read()
244
- # Look for module "module_name" blocks
245
- if f'module "{clean_module_name}"' in content or f"module '{clean_module_name}'" in content:
246
- return True, f"Module '{clean_module_name}' found in {tf_file}"
247
- except Exception as e:
248
- print(f"Warning: Could not read {tf_file}: {e}")
249
- continue
250
-
251
- # Method 2: Try terraform show -json as fallback
252
- try:
253
- result = subprocess.run(
254
- ['terraform', 'show', '-json'],
255
- capture_output=True,
256
- text=True,
257
- cwd=Path.cwd()
258
- )
259
-
260
- if result.returncode == 0:
261
- state_data = json.loads(result.stdout)
262
-
263
- # Check if module exists in configuration
264
- if 'configuration' in state_data and state_data['configuration']:
265
- modules = state_data.get('configuration', {}).get('root_module', {}).get('module_calls', {})
266
- if clean_module_name in modules:
267
- return True, f"Module '{clean_module_name}' found in Terraform state"
268
-
269
- # Also check in planned_values for modules
270
- if 'planned_values' in state_data and state_data['planned_values']:
271
- modules = state_data.get('planned_values', {}).get('root_module', {}).get('child_modules', [])
272
- for module in modules:
273
- module_addr = module.get('address', '')
274
- if clean_module_name in module_addr:
275
- return True, f"Module '{clean_module_name}' found in planned values"
276
- except (subprocess.SubprocessError, json.JSONDecodeError) as e:
277
- print(f"Warning: Could not check terraform state: {e}")
278
-
279
- return False, f"Module '{clean_module_name}' not found in Terraform configuration files"
280
-
281
- except Exception as e:
282
- return False, f"Error checking module existence: {e}"
283
-
284
-
285
- def validate_terraform_environment(module_name, verbose=False):
286
- """Validate the complete Terraform environment"""
287
- print("\n Validating Terraform environment...")
288
-
289
- # 1. Check terraform binary
290
- print("\n Checking Terraform binary...")
291
- has_terraform, terraform_msg = check_terraform_binary()
292
- if not has_terraform:
293
- raise Exception(f" Terraform not available: {terraform_msg}")
294
- if verbose:
295
- print(f" {terraform_msg}")
296
- else:
297
- print(" Terraform binary found")
298
-
299
- # 2. Check for configuration files
300
- print("\n Checking Terraform configuration files...")
301
- has_config, config_files = check_terraform_config_files()
302
- if not has_config:
303
- raise Exception(" No Terraform configuration files (.tf or .tf.json) found in current directory")
304
- if verbose:
305
- print(f" Found {len(config_files)} configuration files: {', '.join(config_files)}")
306
- else:
307
- print(f" Found {len(config_files)} Terraform configuration files")
308
-
309
- # 3. Check if terraform is initialized
310
- print("\n Checking Terraform initialization...")
311
- is_initialized, init_msg = check_terraform_init()
312
- if not is_initialized:
313
- raise Exception(f" {init_msg}. Run 'terraform init' first.")
314
- if verbose:
315
- print(f" {init_msg}")
316
- else:
317
- print(" Terraform is initialized")
318
-
319
- # 4. Check if the specified module exists
320
- print(f"\n Checking if module '{module_name}' exists...")
321
- module_exists, module_msg = check_module_exists(module_name)
322
- if not module_exists:
323
- raise Exception(f" {module_msg}. Please add the module to your Terraform configuration first.")
324
- if verbose:
325
- print(f" {module_msg}")
326
- else:
327
- print(f" Module '{module_name}' found")
328
-
329
- print("\n All Terraform environment checks passed!")
330
-
331
-
332
195
  def import_if_rules_to_tf(args, configuration):
333
196
  """Main function to orchestrate the import process"""
334
197
  try:
@@ -376,7 +239,7 @@ def import_if_rules_to_tf(args, configuration):
376
239
 
377
240
  # Import sections first (if not skipped)
378
241
  if not args.rules_only and sections:
379
- successful, failed = import_sections(sections, module_name=args.module_name, verbose=args.verbose)
242
+ successful, failed = import_sections(sections, module_name=args.module_name, resource_type="cato_if_section", verbose=args.verbose)
380
243
  total_successful += successful
381
244
  total_failed += failed
382
245
 
@@ -439,7 +302,10 @@ def import_wf_sections(sections, module_name, verbose=False,
439
302
  section_id = section['section_id']
440
303
  section_name = section['section_name']
441
304
  section_index = section['section_index']
442
- resource_address = f'{module_name}.{resource_type}.{resource_name}["{str(section_name)}"]'
305
+ # Add module. prefix if not present
306
+ if not module_name.startswith('module.'):
307
+ module_name = f'module.{module_name}'
308
+ resource_address = f'{module_name}.{resource_type}.{resource_name}["{section_name}"]'
443
309
  print(f"\n[{i+1}/{total_sections}] Section: {section_name} (index: {section_index})")
444
310
 
445
311
  # For sections, we use the section name as the ID since that's how Cato identifies them
@@ -469,6 +335,10 @@ def import_wf_rules(rules, module_name, verbose=False,
469
335
  rule_index = find_rule_index(rules, rule_name)
470
336
  terraform_key = sanitize_name_for_terraform(rule_name)
471
337
 
338
+ # Add module. prefix if not present
339
+ if not module_name.startswith('module.'):
340
+ module_name = f'module.{module_name}'
341
+
472
342
  # Use array index syntax instead of rule ID
473
343
  resource_address = f'{module_name}.{resource_type}.{resource_name}["{str(rule_name)}"]'
474
344
  print(f"\n[{i+1}/{total_rules}] Rule: {rule_name} (index: {rule_index})")
@@ -519,8 +389,12 @@ def import_wf_rules_to_tf(args, configuration):
519
389
  print(" No rules or sections found. Exiting.")
520
390
  return [{"success": False, "error": "No rules or sections found"}]
521
391
 
392
+ # Add module. prefix if not present
393
+ module_name = args.module_name
394
+ if not module_name.startswith('module.'):
395
+ module_name = f'module.{module_name}'
522
396
  # Validate Terraform environment before proceeding
523
- validate_terraform_environment(args.module_name, verbose=args.verbose)
397
+ validate_terraform_environment(module_name, verbose=args.verbose)
524
398
 
525
399
  # Ask for confirmation (unless auto-approved)
526
400
  if not args.rules_only and not args.sections_only: