catocli 2.1.2__py3-none-any.whl → 2.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of catocli might be problematic. Click here for more details.

Files changed (98) hide show
  1. catocli/Utils/clidriver.py +18 -18
  2. catocli/Utils/cliutils.py +165 -0
  3. catocli/Utils/csv_formatter.py +652 -0
  4. catocli/__init__.py +1 -1
  5. catocli/parsers/custom/export_rules/__init__.py +0 -4
  6. catocli/parsers/custom/export_sites/__init__.py +4 -3
  7. catocli/parsers/custom/export_sites/export_sites.py +198 -55
  8. catocli/parsers/custom/import_sites_to_tf/import_sites_to_tf.py +473 -393
  9. catocli/parsers/customParserApiClient.py +444 -38
  10. catocli/parsers/custom_private/__init__.py +19 -13
  11. catocli/parsers/mutation_accountManagement/__init__.py +21 -0
  12. catocli/parsers/mutation_accountManagement_disableAccount/README.md +15 -0
  13. catocli/parsers/mutation_admin/__init__.py +12 -0
  14. catocli/parsers/mutation_container/__init__.py +18 -0
  15. catocli/parsers/mutation_enterpriseDirectory/__init__.py +8 -0
  16. catocli/parsers/mutation_groups/__init__.py +6 -0
  17. catocli/parsers/mutation_hardware/__init__.py +2 -0
  18. catocli/parsers/mutation_policy/__init__.py +378 -0
  19. catocli/parsers/mutation_policy_antiMalwareFileHash_addRule/README.md +20 -0
  20. catocli/parsers/mutation_policy_antiMalwareFileHash_addSection/README.md +20 -0
  21. catocli/parsers/mutation_policy_antiMalwareFileHash_createPolicyRevision/README.md +20 -0
  22. catocli/parsers/mutation_policy_antiMalwareFileHash_discardPolicyRevision/README.md +20 -0
  23. catocli/parsers/mutation_policy_antiMalwareFileHash_moveRule/README.md +20 -0
  24. catocli/parsers/mutation_policy_antiMalwareFileHash_moveSection/README.md +20 -0
  25. catocli/parsers/mutation_policy_antiMalwareFileHash_publishPolicyRevision/README.md +20 -0
  26. catocli/parsers/mutation_policy_antiMalwareFileHash_removeRule/README.md +20 -0
  27. catocli/parsers/mutation_policy_antiMalwareFileHash_removeSection/README.md +20 -0
  28. catocli/parsers/mutation_policy_antiMalwareFileHash_updatePolicy/README.md +20 -0
  29. catocli/parsers/mutation_policy_antiMalwareFileHash_updateRule/README.md +20 -0
  30. catocli/parsers/mutation_policy_antiMalwareFileHash_updateSection/README.md +20 -0
  31. catocli/parsers/mutation_sandbox/__init__.py +4 -0
  32. catocli/parsers/mutation_site/__init__.py +72 -0
  33. catocli/parsers/mutation_sites/__init__.py +72 -0
  34. catocli/parsers/mutation_xdr/__init__.py +6 -0
  35. catocli/parsers/query_accountBySubdomain/__init__.py +2 -0
  36. catocli/parsers/query_accountManagement/__init__.py +2 -0
  37. catocli/parsers/query_accountMetrics/__init__.py +6 -0
  38. catocli/parsers/query_accountRoles/__init__.py +2 -0
  39. catocli/parsers/query_accountSnapshot/__init__.py +2 -0
  40. catocli/parsers/query_admin/__init__.py +2 -0
  41. catocli/parsers/query_admins/__init__.py +2 -0
  42. catocli/parsers/query_appStats/__init__.py +6 -0
  43. catocli/parsers/query_appStatsTimeSeries/README.md +3 -0
  44. catocli/parsers/query_appStatsTimeSeries/__init__.py +6 -0
  45. catocli/parsers/query_auditFeed/__init__.py +2 -0
  46. catocli/parsers/query_catalogs/__init__.py +2 -0
  47. catocli/parsers/query_container/__init__.py +2 -0
  48. catocli/parsers/query_devices/README.md +1 -1
  49. catocli/parsers/query_devices/__init__.py +2 -0
  50. catocli/parsers/query_enterpriseDirectory/__init__.py +2 -0
  51. catocli/parsers/query_entityLookup/__init__.py +2 -0
  52. catocli/parsers/query_events/__init__.py +2 -0
  53. catocli/parsers/query_eventsFeed/__init__.py +2 -0
  54. catocli/parsers/query_eventsTimeSeries/__init__.py +2 -0
  55. catocli/parsers/query_groups/__init__.py +6 -0
  56. catocli/parsers/query_hardware/README.md +1 -1
  57. catocli/parsers/query_hardware/__init__.py +2 -0
  58. catocli/parsers/query_hardwareManagement/__init__.py +2 -0
  59. catocli/parsers/query_licensing/__init__.py +2 -0
  60. catocli/parsers/query_policy/__init__.py +37 -0
  61. catocli/parsers/query_policy_antiMalwareFileHash_policy/README.md +19 -0
  62. catocli/parsers/query_popLocations/__init__.py +2 -0
  63. catocli/parsers/query_sandbox/__init__.py +2 -0
  64. catocli/parsers/query_servicePrincipalAdmin/__init__.py +2 -0
  65. catocli/parsers/query_site/__init__.py +33 -0
  66. catocli/parsers/query_siteLocation/__init__.py +2 -0
  67. catocli/parsers/query_site_siteGeneralDetails/README.md +19 -0
  68. catocli/parsers/query_socketPortMetrics/__init__.py +2 -0
  69. catocli/parsers/query_socketPortMetricsTimeSeries/__init__.py +6 -0
  70. catocli/parsers/query_subDomains/__init__.py +2 -0
  71. catocli/parsers/query_xdr/__init__.py +4 -0
  72. catocli/parsers/raw/__init__.py +3 -1
  73. {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/METADATA +1 -1
  74. {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/RECORD +98 -66
  75. models/mutation.accountManagement.disableAccount.json +545 -0
  76. models/mutation.policy.antiMalwareFileHash.addRule.json +2068 -0
  77. models/mutation.policy.antiMalwareFileHash.addSection.json +1350 -0
  78. models/mutation.policy.antiMalwareFileHash.createPolicyRevision.json +1822 -0
  79. models/mutation.policy.antiMalwareFileHash.discardPolicyRevision.json +1758 -0
  80. models/mutation.policy.antiMalwareFileHash.moveRule.json +1552 -0
  81. models/mutation.policy.antiMalwareFileHash.moveSection.json +1251 -0
  82. models/mutation.policy.antiMalwareFileHash.publishPolicyRevision.json +1813 -0
  83. models/mutation.policy.antiMalwareFileHash.removeRule.json +1204 -0
  84. models/mutation.policy.antiMalwareFileHash.removeSection.json +954 -0
  85. models/mutation.policy.antiMalwareFileHash.updatePolicy.json +1834 -0
  86. models/mutation.policy.antiMalwareFileHash.updateRule.json +1757 -0
  87. models/mutation.policy.antiMalwareFileHash.updateSection.json +1105 -0
  88. models/mutation.site.updateSiteGeneralDetails.json +3 -3
  89. models/mutation.sites.updateSiteGeneralDetails.json +3 -3
  90. models/query.devices.json +249 -2
  91. models/query.hardware.json +224 -0
  92. models/query.policy.antiMalwareFileHash.policy.json +1583 -0
  93. models/query.site.siteGeneralDetails.json +899 -0
  94. schema/catolib.py +52 -14
  95. {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/WHEEL +0 -0
  96. {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/entry_points.txt +0 -0
  97. {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/licenses/LICENSE +0 -0
  98. {catocli-2.1.2.dist-info → catocli-2.1.4.dist-info}/top_level.txt +0 -0
@@ -24,7 +24,14 @@ import pprint
24
24
  import uuid
25
25
  import string
26
26
  from urllib3.filepost import encode_multipart_formdata
27
-
27
+ import base64
28
+ import hmac
29
+ import hashlib
30
+ import datetime
31
+ import ssl
32
+ import urllib.request
33
+ import urllib.error
34
+ import socket
28
35
 
29
36
  class CustomAPIClient:
30
37
  """Enhanced API Client with custom query generation capabilities"""
@@ -66,6 +73,10 @@ def createRequest(args, configuration):
66
73
  API response or error object
67
74
  """
68
75
  params = vars(args)
76
+
77
+ # Process output routing options
78
+ network_config, sentinel_config = process_output_options(args)
79
+
69
80
  instance = CallApi(ApiClient(configuration))
70
81
  operation_name = params["operation_name"]
71
82
 
@@ -75,6 +86,24 @@ def createRequest(args, configuration):
75
86
  print(f"ERROR: Failed to load operation model for {operation_name}: {e}")
76
87
  return None
77
88
 
89
+ # Load CSV configuration for this operation
90
+ csv_function = None
91
+ output_format = getattr(args, 'format', 'json') # Default to json if -f not provided
92
+
93
+ if output_format == 'csv':
94
+ try:
95
+ settings = loadJSON("clisettings.json")
96
+ csv_supported_operations = settings.get("queryOperationCsvOutput", {})
97
+ csv_function = csv_supported_operations.get(operation_name)
98
+ except Exception as e:
99
+ print(f"WARNING: Could not load CSV settings: {e}")
100
+ csv_function = None
101
+
102
+ if not csv_function:
103
+ print(f"ERROR: CSV output not supported for operation '{operation_name}'")
104
+ print(f"Supported CSV operations: {list(csv_supported_operations.keys()) if 'csv_supported_operations' in locals() else 'none'}")
105
+ return None
106
+
78
107
  variables_obj = {}
79
108
 
80
109
  # Parse JSON input with better error handling (including for -t flag)
@@ -106,8 +135,8 @@ def createRequest(args, configuration):
106
135
  variables_obj["accountID"] = configuration.accountID
107
136
 
108
137
  # Validation logic
109
- if params["t"]:
110
- # Skip validation when using -t flag
138
+ if params["t"] or params.get("skip_validation", False):
139
+ # Skip validation when using -t flag or --skip-validation flag
111
140
  is_ok = True
112
141
  else:
113
142
  is_ok, invalid_vars, message = validateArgs(variables_obj, operation)
@@ -121,7 +150,104 @@ def createRequest(args, configuration):
121
150
  return None
122
151
  else:
123
152
  try:
124
- return instance.call_api(body, params)
153
+ response = instance.call_api(body, params)
154
+
155
+ # Handle output routing if network or sentinel options are specified
156
+ if (network_config or sentinel_config) and response:
157
+ # Get the response data
158
+ response_data = response[0] if isinstance(response, list) and len(response) > 0 else response
159
+
160
+ # Send to network endpoint if specified
161
+ if network_config:
162
+ send_events_to_network(response_data, network_config['host'], network_config['port'])
163
+
164
+ # Send to Sentinel if specified
165
+ if sentinel_config:
166
+ # Convert response to JSON bytes for Sentinel
167
+ json_data = json.dumps(response_data).encode('utf-8')
168
+ result_code = post_sentinel_data(
169
+ sentinel_config['customer_id'],
170
+ sentinel_config['shared_key'],
171
+ json_data
172
+ )
173
+ print(f"Sentinel API response code: {result_code}")
174
+
175
+ # Apply CSV formatting if requested
176
+ if output_format == 'csv' and csv_function and response:
177
+ try:
178
+ # Import the CSV formatter dynamically
179
+ # Get the response data (handle both list and tuple responses)
180
+ if isinstance(response, (list, tuple)) and len(response) > 0:
181
+ response_data = response[0]
182
+ else:
183
+ response_data = response
184
+
185
+ # Add Utils directory to path and import csv_formatter
186
+ current_dir = os.path.dirname(os.path.abspath(__file__))
187
+ utils_dir = os.path.join(os.path.dirname(current_dir), 'Utils')
188
+ if utils_dir not in sys.path:
189
+ sys.path.insert(0, utils_dir)
190
+
191
+ # Import the csv_formatter module
192
+ import csv_formatter
193
+
194
+ # Call the appropriate CSV formatter function
195
+ if hasattr(csv_formatter, csv_function):
196
+ csv_formatter_func = getattr(csv_formatter, csv_function)
197
+ csv_output = csv_formatter_func(response_data)
198
+
199
+ if csv_output:
200
+ # Determine output directory (reports) in current folder
201
+ reports_dir = os.path.join(os.getcwd(), 'reports')
202
+ if not os.path.exists(reports_dir):
203
+ os.makedirs(reports_dir)
204
+
205
+ # Default filename is the operation name (second segment) lowercased
206
+ op_base = operation_name.split('.')[-1].lower()
207
+ default_filename = f"{op_base}.csv"
208
+ filename = default_filename
209
+
210
+ # Override filename if provided
211
+ if hasattr(args, 'csv_filename') and getattr(args, 'csv_filename'):
212
+ filename = getattr(args, 'csv_filename')
213
+ # Ensure .csv extension
214
+ if not filename.lower().endswith('.csv'):
215
+ filename += '.csv'
216
+
217
+ # Append timestamp if requested
218
+ if hasattr(args, 'append_timestamp') and getattr(args, 'append_timestamp'):
219
+ ts = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
220
+ name, ext = os.path.splitext(filename)
221
+ filename = f"{name}_{ts}{ext}"
222
+
223
+ output_path = os.path.join(reports_dir, filename)
224
+
225
+ # Write CSV to file
226
+ try:
227
+ with open(output_path, 'w', encoding='utf-8', newline='') as f:
228
+ f.write(csv_output)
229
+ except Exception as write_err:
230
+ print(f"ERROR: Failed to write CSV to file {output_path}: {write_err}")
231
+ # Fallback: return CSV to stdout behavior
232
+ return [{"__csv_output__": csv_output}]
233
+
234
+ if params.get('v'):
235
+ print(f"Saved CSV report to: {output_path}")
236
+
237
+ # Return structured response similar to export functions
238
+ return [{"success": True, "output_file": output_path, "operation": operation_name}]
239
+ else:
240
+ print("WARNING: CSV formatter returned empty result")
241
+ return response
242
+ else:
243
+ print(f"ERROR: CSV formatter function '{csv_function}' not found")
244
+ return response
245
+ except Exception as e:
246
+ print(f"ERROR: Failed to format CSV output: {e}")
247
+ return response
248
+
249
+ return response
250
+
125
251
  except ApiException as e:
126
252
  return e
127
253
  else:
@@ -235,12 +361,149 @@ def querySiteLocation(args, configuration):
235
361
  return [response]
236
362
 
237
363
 
364
+ def process_output_options(args):
365
+ """
366
+ Process network streaming and sentinel output options
367
+
368
+ Returns:
369
+ tuple: (network_config, sentinel_config) where each is None or dict with parsed options
370
+ """
371
+ network_config = None
372
+ sentinel_config = None
373
+
374
+ # Process network options
375
+ if hasattr(args, 'stream_events') and args.stream_events is not None:
376
+ network_elements = args.stream_events.split(":")
377
+ if len(network_elements) != 2:
378
+ print("Error: -n value must be in the form of host:port")
379
+ sys.exit(1)
380
+
381
+ try:
382
+ host = network_elements[0]
383
+ port = int(network_elements[1])
384
+ network_config = {'host': host, 'port': port}
385
+ except ValueError:
386
+ print("Error: -n port must be a valid integer")
387
+ sys.exit(1)
388
+
389
+ # Process sentinel options
390
+ if hasattr(args, 'sentinel') and args.sentinel is not None:
391
+ sentinel_elements = args.sentinel.split(":")
392
+ if len(sentinel_elements) != 2:
393
+ print("Error: -z value must be in the form of customerid:sharedkey")
394
+ sys.exit(1)
395
+
396
+ customer_id = sentinel_elements[0]
397
+ shared_key = sentinel_elements[1]
398
+ sentinel_config = {'customer_id': customer_id, 'shared_key': shared_key}
399
+
400
+ return network_config, sentinel_config
401
+
402
+
403
+ def send_events_to_network(data, host, port):
404
+ """
405
+ Send events over network to host:port TCP
406
+
407
+ Args:
408
+ data: JSON data to send
409
+ host: Target hostname or IP
410
+ port: Target port number
411
+ """
412
+ try:
413
+ # Convert data to JSON string if it's not already
414
+ if isinstance(data, (dict, list)):
415
+ json_data = json.dumps(data)
416
+ else:
417
+ json_data = str(data)
418
+
419
+ # Create TCP socket and send data
420
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
421
+ sock.connect((host, port))
422
+ sock.sendall(json_data.encode('utf-8'))
423
+
424
+ print(f"Successfully sent data to {host}:{port}")
425
+
426
+ except socket.error as e:
427
+ print(f"Network error sending to {host}:{port}: {e}")
428
+ sys.exit(1)
429
+ except Exception as e:
430
+ print(f"Error sending data to network: {e}")
431
+ sys.exit(1)
432
+
433
+
434
+ def build_signature(customer_id, shared_key, date, content_length):
435
+ """
436
+ Build the API signature for Sentinel
437
+
438
+ Args:
439
+ customer_id: Azure customer ID
440
+ shared_key: Shared key for authentication
441
+ date: RFC1123 date string
442
+ content_length: Length of content being sent
443
+
444
+ Returns:
445
+ Authorization header value
446
+ """
447
+ x_headers = 'x-ms-date:' + date
448
+ string_to_hash = f"POST\n{content_length}\napplication/json\n{x_headers}\n/api/logs"
449
+ bytes_to_hash = bytes(string_to_hash, encoding="utf-8")
450
+ decoded_key = base64.b64decode(shared_key)
451
+ encoded_hash = base64.b64encode(hmac.new(decoded_key, bytes_to_hash, digestmod=hashlib.sha256).digest()).decode()
452
+ authorization = "SharedKey {}:{}".format(customer_id, encoded_hash)
453
+ return authorization
454
+
455
+
456
+ def post_sentinel_data(customer_id, shared_key, body):
457
+ """
458
+ Build and send a request to the POST API for Sentinel
459
+
460
+ Args:
461
+ customer_id: Azure customer ID
462
+ shared_key: Shared key for authentication
463
+ body: JSON data to send (as bytes)
464
+
465
+ Returns:
466
+ Response code from the API
467
+ """
468
+ rfc1123date = datetime.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
469
+ content_length = len(body)
470
+ signature = build_signature(customer_id, shared_key, rfc1123date, content_length)
471
+
472
+ headers = {
473
+ 'content-type': 'application/json',
474
+ 'Authorization': signature,
475
+ 'Log-Type': 'CatoEvents',
476
+ 'Time-generated-field': 'event_timestamp',
477
+ 'x-ms-date': rfc1123date
478
+ }
479
+
480
+ no_verify = ssl._create_unverified_context()
481
+
482
+ try:
483
+ request = urllib.request.Request(
484
+ url='https://' + customer_id + '.ods.opinsights.azure.com/api/logs?api-version=2016-04-01',
485
+ data=body,
486
+ headers=headers
487
+ )
488
+ response = urllib.request.urlopen(request, context=no_verify)
489
+ return response.code
490
+ except urllib.error.URLError as e:
491
+ print(f"Azure API ERROR:{e}")
492
+ sys.exit(1)
493
+ except OSError as e:
494
+ print(f"Azure API ERROR: {e}")
495
+ sys.exit(1)
496
+
497
+
238
498
  def createRawRequest(args, configuration):
239
499
  """
240
500
  Enhanced raw request handling with better error reporting
241
501
  """
242
502
  params = vars(args)
243
503
 
504
+ # Process output routing options
505
+ network_config, sentinel_config = process_output_options(args)
506
+
244
507
  # Handle endpoint override
245
508
  if hasattr(args, 'endpoint') and args.endpoint:
246
509
  configuration.host = args.endpoint
@@ -278,7 +541,30 @@ def createRawRequest(args, configuration):
278
541
  return None
279
542
  else:
280
543
  try:
281
- return instance.call_api(body, params)
544
+ response = instance.call_api(body, params)
545
+
546
+ # Handle output routing if network or sentinel options are specified
547
+ if (network_config or sentinel_config) and response:
548
+ # Get the response data
549
+ response_data = response[0] if isinstance(response, list) and len(response) > 0 else response
550
+
551
+ # Send to network endpoint if specified
552
+ if network_config:
553
+ send_events_to_network(response_data, network_config['host'], network_config['port'])
554
+
555
+ # Send to Sentinel if specified
556
+ if sentinel_config:
557
+ # Convert response to JSON bytes for Sentinel
558
+ json_data = json.dumps(response_data).encode('utf-8')
559
+ result_code = post_sentinel_data(
560
+ sentinel_config['customer_id'],
561
+ sentinel_config['shared_key'],
562
+ json_data
563
+ )
564
+ print(f"Sentinel API response code: {result_code}")
565
+
566
+ return response
567
+
282
568
  except ApiException as e:
283
569
  print(f"ERROR: API request failed: {e}")
284
570
  return None
@@ -392,30 +678,14 @@ def get_help(path):
392
678
  def validateArgs(variables_obj, operation):
393
679
  """
394
680
  Enhanced argument validation with detailed error reporting
681
+ Skip required field validation to allow any request to be sent
395
682
  """
396
683
  is_ok = True
397
684
  invalid_vars = []
398
685
  message = "Arguments are missing or have invalid values: "
399
686
 
400
- # Check for invalid variable names
401
- operation_args = operation.get("operationArgs", {})
402
- for var_name in variables_obj:
403
- if var_name not in operation_args:
404
- is_ok = False
405
- invalid_vars.append(f'"{var_name}"')
406
- message = f"Invalid argument names. Expected: {', '.join(list(operation_args.keys()))}"
407
-
408
- # Check for missing required variables
409
- if is_ok:
410
- for var_name, arg_info in operation_args.items():
411
- if arg_info.get("required", False) and var_name not in variables_obj:
412
- is_ok = False
413
- invalid_vars.append(f'"{var_name}"')
414
- elif var_name in variables_obj:
415
- value = variables_obj[var_name]
416
- if arg_info.get("required", False) and (value == "" or value is None):
417
- is_ok = False
418
- invalid_vars.append(f'"{var_name}":"{str(value)}"')
687
+ # Skip all validation - allow any request to be sent to the API
688
+ # This allows users to send any GraphQL request and see the API response directly
419
689
 
420
690
  return is_ok, invalid_vars, message
421
691
 
@@ -566,22 +836,46 @@ def renderArgsAndFields(response_arg_str, variables_obj, cur_operation, definiti
566
836
  response_arg_str += " {\n"
567
837
  response_arg_str = renderArgsAndFields(response_arg_str, variables_obj, cur_operation, subfield['type']['definition'], operation_name, indent + "\t\t")
568
838
  if subfield['type']['definition'].get('possibleTypes'):
569
- for possible_type_name in subfield['type']['definition']['possibleTypes']:
570
- possible_type = subfield['type']['definition']['possibleTypes'][possible_type_name]
571
- response_arg_str += f"{indent}\t\t... on {possible_type['name']} {{\n"
572
- if possible_type.get('fields') or possible_type.get('inputFields'):
573
- response_arg_str = renderArgsAndFields(response_arg_str, variables_obj, cur_operation, possible_type, operation_name, indent + "\t\t\t")
574
- response_arg_str += f"{indent}\t\t}}\n"
839
+ possible_types = subfield['type']['definition']['possibleTypes']
840
+ # Handle both list and dict formats for possibleTypes
841
+ if isinstance(possible_types, list):
842
+ for possible_type in possible_types:
843
+ if isinstance(possible_type, dict) and 'name' in possible_type:
844
+ # Only create fragment if there are actually fields to render
845
+ if possible_type.get('fields') or possible_type.get('inputFields'):
846
+ response_arg_str += f"{indent}\t\t... on {possible_type['name']} {{\n"
847
+ response_arg_str = renderArgsAndFields(response_arg_str, variables_obj, cur_operation, possible_type, operation_name, indent + "\t\t\t")
848
+ response_arg_str += f"{indent}\t\t}}\n"
849
+ elif isinstance(possible_types, dict):
850
+ for possible_type_name in possible_types:
851
+ possible_type = possible_types[possible_type_name]
852
+ # Only create fragment if there are actually fields to render
853
+ if possible_type.get('fields') or possible_type.get('inputFields'):
854
+ response_arg_str += f"{indent}\t\t... on {possible_type['name']} {{\n"
855
+ response_arg_str = renderArgsAndFields(response_arg_str, variables_obj, cur_operation, possible_type, operation_name, indent + "\t\t\t")
856
+ response_arg_str += f"{indent}\t\t}}\n"
575
857
  response_arg_str += f"{indent}\t}}"
576
858
  elif subfield.get('type') and subfield['type'].get('definition') and subfield['type']['definition'].get('possibleTypes'):
577
859
  response_arg_str += " {\n"
578
860
  response_arg_str += f"{indent}\t\t__typename\n"
579
- for possible_type_name in subfield['type']['definition']['possibleTypes']:
580
- possible_type = subfield['type']['definition']['possibleTypes'][possible_type_name]
581
- response_arg_str += f"{indent}\t\t... on {possible_type['name']} {{\n"
582
- if possible_type.get('fields') or possible_type.get('inputFields'):
583
- response_arg_str = renderArgsAndFields(response_arg_str, variables_obj, cur_operation, possible_type, operation_name, indent + "\t\t\t")
584
- response_arg_str += f"{indent}\t\t}}\n"
861
+ possible_types = subfield['type']['definition']['possibleTypes']
862
+ # Handle both list and dict formats for possibleTypes
863
+ if isinstance(possible_types, list):
864
+ for possible_type in possible_types:
865
+ if isinstance(possible_type, dict) and 'name' in possible_type:
866
+ # Only create fragment if there are actually fields to render
867
+ if possible_type.get('fields') or possible_type.get('inputFields'):
868
+ response_arg_str += f"{indent}\t\t... on {possible_type['name']} {{\n"
869
+ response_arg_str = renderArgsAndFields(response_arg_str, variables_obj, cur_operation, possible_type, operation_name, indent + "\t\t\t")
870
+ response_arg_str += f"{indent}\t\t}}\n"
871
+ elif isinstance(possible_types, dict):
872
+ for possible_type_name in possible_types:
873
+ possible_type = possible_types[possible_type_name]
874
+ # Only create fragment if there are actually fields to render
875
+ if possible_type.get('fields') or possible_type.get('inputFields'):
876
+ response_arg_str += f"{indent}\t\t... on {possible_type['name']} {{\n"
877
+ response_arg_str = renderArgsAndFields(response_arg_str, variables_obj, cur_operation, possible_type, operation_name, indent + "\t\t\t")
878
+ response_arg_str += f"{indent}\t\t}}\n"
585
879
  response_arg_str += f"{indent}\t}}\n"
586
880
  response_arg_str += "\n"
587
881
 
@@ -775,6 +1069,19 @@ def get_private_help(command_name, command_config):
775
1069
  if 'paginationParam' in command_config:
776
1070
  usage += f" (pagination: {command_config['paginationParam']})"
777
1071
 
1072
+ # Add examples section if available
1073
+ if 'examples' in command_config and command_config['examples']:
1074
+ usage += "\n\nEXAMPLES:\n"
1075
+ for i, example in enumerate(command_config['examples']):
1076
+ description = example.get('description', '')
1077
+ command = example.get('command', '')
1078
+
1079
+ if description and command:
1080
+ usage += f"{description}:\n{command}\n"
1081
+ # Add a blank line between examples (except for the last one)
1082
+ if i < len(command_config['examples']) - 1:
1083
+ usage += "\n"
1084
+
778
1085
  return usage
779
1086
 
780
1087
 
@@ -998,7 +1305,13 @@ def createPrivateRequest(args, configuration):
998
1305
  if hasattr(args, arg_name):
999
1306
  arg_value = getattr(args, arg_name)
1000
1307
  if arg_value is not None:
1001
- variables[arg_name] = arg_value
1308
+ # Handle type conversion based on argument configuration
1309
+ arg_type = arg.get('type', 'string')
1310
+ if arg_type == 'array' and not isinstance(arg_value, list):
1311
+ # Convert string to single-element array
1312
+ variables[arg_name] = [arg_value]
1313
+ else:
1314
+ variables[arg_name] = arg_value
1002
1315
 
1003
1316
  # Load the payload template
1004
1317
  try:
@@ -1020,7 +1333,100 @@ def createPrivateRequest(args, configuration):
1020
1333
 
1021
1334
  # Execute the GraphQL request
1022
1335
  try:
1023
- return sendPrivateGraphQLRequest(configuration, body, params)
1336
+ response = sendPrivateGraphQLRequest(configuration, body, params)
1337
+
1338
+ # Handle CSV output if requested and configured
1339
+ output_format = getattr(args, 'format', 'json') # Default to json if -f not provided
1340
+ if output_format == 'csv' and 'csvOutputOperation' in private_config:
1341
+ csv_operation = private_config['csvOutputOperation']
1342
+
1343
+ # Load CSV configuration from clisettings.json
1344
+ try:
1345
+ settings = loadJSON("clisettings.json")
1346
+ csv_supported_operations = settings.get("queryOperationCsvOutput", {})
1347
+ csv_function = csv_supported_operations.get(csv_operation)
1348
+ except Exception as e:
1349
+ print(f"WARNING: Could not load CSV settings: {e}")
1350
+ csv_function = None
1351
+
1352
+ if csv_function and response:
1353
+ try:
1354
+ # Get the response data (handle both list and tuple responses)
1355
+ if isinstance(response, (list, tuple)) and len(response) > 0:
1356
+ response_data = response[0]
1357
+ else:
1358
+ response_data = response
1359
+
1360
+ # Add Utils directory to path and import csv_formatter
1361
+ current_dir = os.path.dirname(os.path.abspath(__file__))
1362
+ utils_dir = os.path.join(os.path.dirname(current_dir), 'Utils')
1363
+ if utils_dir not in sys.path:
1364
+ sys.path.insert(0, utils_dir)
1365
+
1366
+ # Import the csv_formatter module
1367
+ import csv_formatter
1368
+
1369
+ # Call the appropriate CSV formatter function
1370
+ if hasattr(csv_formatter, csv_function):
1371
+ csv_formatter_func = getattr(csv_formatter, csv_function)
1372
+ csv_output = csv_formatter_func(response_data)
1373
+
1374
+ if csv_output:
1375
+ # Determine output directory (reports) in current folder
1376
+ reports_dir = os.path.join(os.getcwd(), 'reports')
1377
+ if not os.path.exists(reports_dir):
1378
+ os.makedirs(reports_dir)
1379
+
1380
+ # Default filename is the private command name lowercased
1381
+ default_filename = f"{private_command}.csv"
1382
+ filename = default_filename
1383
+
1384
+ # Override filename if provided
1385
+ if hasattr(args, 'csv_filename') and getattr(args, 'csv_filename'):
1386
+ filename = getattr(args, 'csv_filename')
1387
+ # Ensure .csv extension
1388
+ if not filename.lower().endswith('.csv'):
1389
+ filename += '.csv'
1390
+
1391
+ # Append timestamp if requested
1392
+ if hasattr(args, 'append_timestamp') and getattr(args, 'append_timestamp'):
1393
+ ts = datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
1394
+ name, ext = os.path.splitext(filename)
1395
+ filename = f"{name}_{ts}{ext}"
1396
+
1397
+ output_path = os.path.join(reports_dir, filename)
1398
+
1399
+ # Write CSV to file
1400
+ try:
1401
+ with open(output_path, 'w', encoding='utf-8', newline='') as f:
1402
+ f.write(csv_output)
1403
+ except Exception as write_err:
1404
+ print(f"ERROR: Failed to write CSV to file {output_path}: {write_err}")
1405
+ # Fallback: return CSV to stdout behavior
1406
+ return [{"__csv_output__": csv_output}]
1407
+
1408
+ if params.get('v'):
1409
+ print(f"Saved CSV report to: {output_path}")
1410
+
1411
+ # Return structured response similar to export functions
1412
+ return [{"success": True, "output_file": output_path, "operation": csv_operation, "private_command": private_command}]
1413
+ else:
1414
+ print("WARNING: CSV formatter returned empty result")
1415
+ return response
1416
+ else:
1417
+ print(f"ERROR: CSV formatter function '{csv_function}' not found")
1418
+ return response
1419
+ except Exception as e:
1420
+ print(f"ERROR: Failed to format CSV output: {e}")
1421
+ return response
1422
+ else:
1423
+ if not csv_function:
1424
+ print(f"ERROR: CSV output not supported for private command '{private_command}' with operation '{csv_operation}'")
1425
+ print(f"Available CSV operations: {list(csv_supported_operations.keys()) if 'csv_supported_operations' in locals() else 'none'}")
1426
+ return response
1427
+
1428
+ return response
1429
+
1024
1430
  except Exception as e:
1025
1431
  return e
1026
1432
 
@@ -4,21 +4,9 @@ Private commands parser for custom GraphQL payloads
4
4
  Dynamically loads commands from ~/.cato/settings.json
5
5
  """
6
6
 
7
- import os
8
- import json
9
7
  import argparse
10
8
  from ..customParserApiClient import createPrivateRequest, get_private_help
11
-
12
-
13
- def load_private_settings():
14
- """Load private settings from ~/.cato/settings.json"""
15
- settings_file = os.path.expanduser("~/.cato/settings.json")
16
- try:
17
- with open(settings_file, 'r') as f:
18
- settings = json.load(f)
19
- return settings.get('privateCommands', {})
20
- except (FileNotFoundError, json.JSONDecodeError, KeyError):
21
- return {}
9
+ from ...Utils.cliutils import load_private_settings
22
10
 
23
11
 
24
12
  def private_parse(subparsers):
@@ -107,6 +95,24 @@ def create_private_command_parser(subparsers, command_name, command_config):
107
95
  help='Override the account ID from profile with this value.'
108
96
  )
109
97
 
98
+ # Add CSV output arguments (if the command supports CSV)
99
+ if 'csvOutputOperation' in command_config:
100
+ cmd_parser.add_argument(
101
+ '-f', '--format',
102
+ choices=['json', 'csv'],
103
+ default='json',
104
+ help='Output format (default: json)'
105
+ )
106
+ cmd_parser.add_argument(
107
+ '--csv-filename',
108
+ help=f'Override CSV file name (default: {command_name}.csv)'
109
+ )
110
+ cmd_parser.add_argument(
111
+ '--append-timestamp',
112
+ action='store_true',
113
+ help='Append timestamp to the CSV file name'
114
+ )
115
+
110
116
  # Add dynamic arguments based on command configuration (excluding accountId since it's handled above)
111
117
  if 'arguments' in command_config:
112
118
  for arg in command_config['arguments']: