pvw-cli 1.2.3__py3-none-any.whl → 1.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pvw-cli might be problematic. Click here for more details.

@@ -168,7 +168,9 @@ class Entity(Endpoint):
168
168
  def entityReadUniqueAttribute(self, args):
169
169
  """Get entity by unique attributes (Official API: Get By Unique Attributes)"""
170
170
  self.method = "GET"
171
- self.endpoint = ENDPOINTS["entity"]["get_by_unique_attributes"].format(typeName=args["--typeName"])
171
+ self.endpoint = ENDPOINTS["entity"]["get_by_unique_attributes"].format(
172
+ typeName=args["--typeName"]
173
+ )
172
174
  self.params = {
173
175
  **get_api_version_params("datamap"),
174
176
  "attr:qualifiedName": args["--qualifiedName"],
@@ -180,7 +182,9 @@ class Entity(Endpoint):
180
182
  def entityReadBulkUniqueAttribute(self, args):
181
183
  """List entities by unique attributes (Official API: List By Unique Attributes)"""
182
184
  self.method = "GET"
183
- self.endpoint = ENDPOINTS["entity"]["list_by_unique_attributes"].format(typeName=args["--typeName"])
185
+ self.endpoint = ENDPOINTS["entity"]["list_by_unique_attributes"].format(
186
+ typeName=args["--typeName"]
187
+ )
184
188
  params = {
185
189
  **get_api_version_params("datamap"),
186
190
  "ignoreRelationships": str(args.get("--ignoreRelationships", False)).lower(),
@@ -197,7 +201,9 @@ class Entity(Endpoint):
197
201
  def entityUpdateUniqueAttribute(self, args):
198
202
  """Update entity by unique attributes (Official API: Update By Unique Attributes)"""
199
203
  self.method = "PUT"
200
- self.endpoint = ENDPOINTS["entity"]["update_by_unique_attributes"].format(typeName=args["--typeName"])
204
+ self.endpoint = ENDPOINTS["entity"]["update_by_unique_attributes"].format(
205
+ typeName=args["--typeName"]
206
+ )
201
207
  self.params = {
202
208
  **get_api_version_params("datamap"),
203
209
  "attr:qualifiedName": args["--qualifiedName"],
@@ -208,7 +214,9 @@ class Entity(Endpoint):
208
214
  def entityDeleteUniqueAttribute(self, args):
209
215
  """Delete entity by unique attributes (Official API: Delete By Unique Attribute)"""
210
216
  self.method = "DELETE"
211
- self.endpoint = ENDPOINTS["entity"]["delete_by_unique_attribute"].format(typeName=args["--typeName"])
217
+ self.endpoint = ENDPOINTS["entity"]["delete_by_unique_attribute"].format(
218
+ typeName=args["--typeName"]
219
+ )
212
220
  self.params = {
213
221
  **get_api_version_params("datamap"),
214
222
  "attr:qualifiedName": args["--qualifiedName"],
@@ -290,7 +298,9 @@ class Entity(Endpoint):
290
298
  def entityUpdateClassificationsByUniqueAttribute(self, args):
291
299
  """Update classifications to an entity by unique attribute (Official API: Update Classifications By Unique Attribute)"""
292
300
  self.method = "PUT"
293
- self.endpoint = ENDPOINTS["entity"]["update_classifications_by_unique_attribute"].format(typeName=args["--typeName"])
301
+ self.endpoint = ENDPOINTS["entity"]["update_classifications_by_unique_attribute"].format(
302
+ typeName=args["--typeName"]
303
+ )
294
304
  self.params = {
295
305
  **get_api_version_params("datamap"),
296
306
  "attr:qualifiedName": args["--qualifiedName"],
@@ -301,7 +311,9 @@ class Entity(Endpoint):
301
311
  def entityCreateClassificationsByUniqueAttribute(self, args):
302
312
  """Add classifications to an entity by unique attribute (Official API: Add Classifications By Unique Attribute)"""
303
313
  self.method = "POST"
304
- self.endpoint = ENDPOINTS["entity"]["add_classifications_by_unique_attribute"].format(typeName=args["--typeName"])
314
+ self.endpoint = ENDPOINTS["entity"]["add_classifications_by_unique_attribute"].format(
315
+ typeName=args["--typeName"]
316
+ )
305
317
  self.params = {
306
318
  **get_api_version_params("datamap"),
307
319
  "attr:qualifiedName": args["--qualifiedName"],
@@ -322,8 +334,25 @@ class Entity(Endpoint):
322
334
  def entityDeleteBusinessMetadata(self, args):
323
335
  """Remove business metadata from an entity (Official API: Remove Business Metadata)"""
324
336
  self.method = "DELETE"
325
- self.endpoint = ENDPOINTS["entity"]["remove_business_metadata"].format(guid=args["--guid"][0])
326
- self.params = {**get_api_version_params("datamap"), "businessMetadataName": args["--businessMetadataName"]}
337
+
338
+ # Support both --businessMetadataName (direct) and --payloadFile (from CLI)
339
+ if "--payloadFile" in args:
340
+ payload = get_json(args, "--payloadFile")
341
+ # Get the first business metadata name from the payload
342
+ business_metadata_names = list(payload.keys())
343
+ if not business_metadata_names:
344
+ raise ValueError("No business metadata names found in payload file")
345
+ business_metadata_name = business_metadata_names[0]
346
+ else:
347
+ business_metadata_name = args["--businessMetadataName"]
348
+
349
+ self.endpoint = ENDPOINTS["entity"]["remove_business_metadata"].format(
350
+ guid=args["--guid"][0]
351
+ )
352
+ self.params = {
353
+ **get_api_version_params("datamap"),
354
+ "businessMetadataName": business_metadata_name,
355
+ }
327
356
 
328
357
  @decorator
329
358
  def entityCreateBusinessMetadataAttributes(self, args):
@@ -342,7 +371,10 @@ class Entity(Endpoint):
342
371
  self.endpoint = ENDPOINTS["entity"]["remove_business_metadata_attributes"].format(
343
372
  guid=args["--guid"][0], businessMetadataName=args["--businessMetadataName"]
344
373
  )
345
- self.params = {**get_api_version_params("datamap"), "businessMetadataAttributes": args["--attributes"]}
374
+ self.params = {
375
+ **get_api_version_params("datamap"),
376
+ "businessMetadataAttributes": args["--attributes"],
377
+ }
346
378
 
347
379
  @decorator
348
380
  def entityImportBusinessMetadata(self, args):
@@ -359,6 +391,23 @@ class Entity(Endpoint):
359
391
  self.endpoint = ENDPOINTS["entity"]["business_metadata_template"]
360
392
  self.params = get_api_version_params("datamap")
361
393
 
394
+ # Aliases for CLI compatibility
395
+ def entityAddOrUpdateBusinessMetadata(self, args):
396
+ """Alias for entityCreateBusinessMetadata"""
397
+ return self.entityCreateBusinessMetadata(args)
398
+
399
+ def entityAddOrUpdateBusinessMetadataAttributes(self, args):
400
+ """Alias for entityCreateBusinessMetadataAttributes"""
401
+ return self.entityCreateBusinessMetadataAttributes(args)
402
+
403
+ def entityRemoveBusinessMetadata(self, args):
404
+ """Alias for entityDeleteBusinessMetadata"""
405
+ return self.entityDeleteBusinessMetadata(args)
406
+
407
+ def entityRemoveBusinessMetadataAttributes(self, args):
408
+ """Alias for entityDeleteBusinessMetadataAttributes"""
409
+ return self.entityDeleteBusinessMetadataAttributes(args)
410
+
362
411
  # === LABEL OPERATIONS ===
363
412
 
364
413
  @decorator
@@ -391,7 +440,9 @@ class Entity(Endpoint):
391
440
  def entityCreateLabelsByUniqueAttribute(self, args):
392
441
  """Add labels to an entity by unique attribute (Official API: Add Labels By Unique Attribute)"""
393
442
  self.method = "POST"
394
- self.endpoint = ENDPOINTS["entity"]["add_labels_by_unique_attribute"].format(typeName=args["--typeName"])
443
+ self.endpoint = ENDPOINTS["entity"]["add_labels_by_unique_attribute"].format(
444
+ typeName=args["--typeName"]
445
+ )
395
446
  self.params = {
396
447
  **get_api_version_params("datamap"),
397
448
  "attr:qualifiedName": args["--qualifiedName"],
@@ -402,7 +453,9 @@ class Entity(Endpoint):
402
453
  def entityUpdateLabelsByUniqueAttribute(self, args):
403
454
  """Set labels to an entity by unique attribute (Official API: Set Labels By Unique Attribute)"""
404
455
  self.method = "PUT"
405
- self.endpoint = ENDPOINTS["entity"]["set_labels_by_unique_attribute"].format(typeName=args["--typeName"])
456
+ self.endpoint = ENDPOINTS["entity"]["set_labels_by_unique_attribute"].format(
457
+ typeName=args["--typeName"]
458
+ )
406
459
  self.params = {
407
460
  **get_api_version_params("datamap"),
408
461
  "attr:qualifiedName": args["--qualifiedName"],
@@ -413,7 +466,9 @@ class Entity(Endpoint):
413
466
  def entityDeleteLabelsByUniqueAttribute(self, args):
414
467
  """Remove labels from an entity by unique attribute (Official API: Remove Labels By Unique Attribute)"""
415
468
  self.method = "DELETE"
416
- self.endpoint = ENDPOINTS["entity"]["remove_labels_by_unique_attribute"].format(typeName=args["--typeName"])
469
+ self.endpoint = ENDPOINTS["entity"]["remove_labels_by_unique_attribute"].format(
470
+ typeName=args["--typeName"]
471
+ )
417
472
  self.params = {
418
473
  **get_api_version_params("datamap"),
419
474
  "attr:qualifiedName": args["--qualifiedName"],
@@ -440,7 +495,7 @@ class Entity(Endpoint):
440
495
  self.params = {
441
496
  **get_api_version_params("datamap"),
442
497
  "limit": args.get("--limit", 100),
443
- "offset": args.get("--offset", 0)
498
+ "offset": args.get("--offset", 0),
444
499
  }
445
500
 
446
501
  @decorator
@@ -452,7 +507,7 @@ class Entity(Endpoint):
452
507
  **get_api_version_params("datamap"),
453
508
  "startTime": args.get("--startTime"),
454
509
  "endTime": args.get("--endTime"),
455
- "auditAction": args.get("--auditAction")
510
+ "auditAction": args.get("--auditAction"),
456
511
  }
457
512
 
458
513
  @decorator
@@ -467,11 +522,13 @@ class Entity(Endpoint):
467
522
  def entityReadDependencies(self, args):
468
523
  """Get entity dependencies for given GUID (Advanced API: Get Entity Dependencies)"""
469
524
  self.method = "GET"
470
- self.endpoint = ENDPOINTS["entity"]["get_entity_dependencies"].format(guid=args["--guid"][0])
525
+ self.endpoint = ENDPOINTS["entity"]["get_entity_dependencies"].format(
526
+ guid=args["--guid"][0]
527
+ )
471
528
  self.params = {
472
529
  **get_api_version_params("datamap"),
473
530
  "direction": args.get("--direction", "both"),
474
- "depth": args.get("--depth", 1)
531
+ "depth": args.get("--depth", 1),
475
532
  }
476
533
 
477
534
  @decorator
@@ -483,7 +540,7 @@ class Entity(Endpoint):
483
540
  **get_api_version_params("datamap"),
484
541
  "startTime": args.get("--startTime"),
485
542
  "endTime": args.get("--endTime"),
486
- "aggregation": args.get("--aggregation", "daily")
543
+ "aggregation": args.get("--aggregation", "daily"),
487
544
  }
488
545
 
489
546
  # === LEGACY COMPATIBILITY METHODS ===
@@ -209,20 +209,29 @@ class Lineage(Endpoint):
209
209
  # Read CSV file
210
210
  df = pd.read_csv(csv_file)
211
211
 
212
- # Validate required columns
213
- required_columns = ['source_qualified_name', 'target_qualified_name']
214
- missing_columns = [col for col in required_columns if col not in df.columns]
215
- if missing_columns:
216
- raise ValueError(f"Missing required columns: {missing_columns}")
212
+ # Determine which format is being used (GUID-based or qualified name-based)
213
+ has_guid_columns = 'source_entity_guid' in df.columns and 'target_entity_guid' in df.columns
214
+ has_qn_columns = 'source_qualified_name' in df.columns and 'target_qualified_name' in df.columns
215
+
216
+ if not has_guid_columns and not has_qn_columns:
217
+ raise ValueError(
218
+ "CSV must contain either (source_entity_guid, target_entity_guid) "
219
+ "or (source_qualified_name, target_qualified_name) columns"
220
+ )
217
221
 
218
222
  # Generate lineage entities and relationships
219
223
  lineage_entities = []
220
224
  lineage_relationships = []
221
225
 
222
- for _, row in df.iterrows():
226
+ for idx, row in df.iterrows():
223
227
  # Create process entity for each lineage relationship
224
228
  process_guid = str(uuid.uuid4())
225
- process_name = row.get('process_name', f"Process_{datetime.now().strftime('%Y%m%d_%H%M%S')}")
229
+ process_name = row.get('process_name', f"Process_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{idx}")
230
+
231
+ # Clean GUIDs if present (remove guid= prefix and quotes)
232
+ if has_guid_columns:
233
+ source_guid = str(row['source_entity_guid']).strip().replace('guid=', '').strip('"')
234
+ target_guid = str(row['target_entity_guid']).strip().replace('guid=', '').strip('"')
226
235
 
227
236
  # Process entity
228
237
  process_entity = {
@@ -231,8 +240,8 @@ class Lineage(Endpoint):
231
240
  "attributes": {
232
241
  "qualifiedName": f"{process_name}@{args.get('--cluster', 'default')}",
233
242
  "name": process_name,
234
- "description": row.get('description', ''),
235
- "owner": row.get('owner', ''),
243
+ "description": str(row.get('description', '')),
244
+ "owner": str(row.get('owner', '')),
236
245
  },
237
246
  "classifications": [],
238
247
  "meanings": []
@@ -241,7 +250,7 @@ class Lineage(Endpoint):
241
250
  # Add custom attributes if present
242
251
  custom_attrs = ['confidence_score', 'metadata', 'tags']
243
252
  for attr in custom_attrs:
244
- if attr in row and pd.notna(row[attr]):
253
+ if attr in row and pd.notna(row[attr]) and str(row[attr]).strip():
245
254
  if attr == 'tags':
246
255
  process_entity["attributes"][attr] = str(row[attr]).split(',')
247
256
  elif attr == 'metadata':
@@ -254,41 +263,74 @@ class Lineage(Endpoint):
254
263
 
255
264
  lineage_entities.append(process_entity)
256
265
 
257
- # Input relationship (source -> process)
258
- input_relationship = {
259
- "guid": str(uuid.uuid4()),
260
- "typeName": "Process",
261
- "end1": {
262
- "guid": "-1", # Will be resolved by qualified name
263
- "typeName": row.get('source_type', 'DataSet'),
264
- "uniqueAttributes": {
265
- "qualifiedName": row['source_qualified_name']
266
- }
267
- },
268
- "end2": {
269
- "guid": process_guid,
270
- "typeName": "Process"
271
- },
272
- "label": "inputToProcesses"
273
- }
266
+ # Determine relationship type
267
+ relationship_type = str(row.get('relationship_type', 'Process')).strip() or 'Process'
274
268
 
275
- # Output relationship (process -> target)
276
- output_relationship = {
277
- "guid": str(uuid.uuid4()),
278
- "typeName": "Process",
279
- "end1": {
280
- "guid": process_guid,
281
- "typeName": "Process"
282
- },
283
- "end2": {
284
- "guid": "-1", # Will be resolved by qualified name
285
- "typeName": row.get('target_type', 'DataSet'),
286
- "uniqueAttributes": {
287
- "qualifiedName": row['target_qualified_name']
288
- }
289
- },
290
- "label": "outputFromProcesses"
291
- }
269
+ # Input relationship (source -> process)
270
+ if has_guid_columns:
271
+ input_relationship = {
272
+ "guid": str(uuid.uuid4()),
273
+ "typeName": relationship_type,
274
+ "end1": {
275
+ "guid": source_guid,
276
+ "typeName": row.get('source_type', 'DataSet')
277
+ },
278
+ "end2": {
279
+ "guid": process_guid,
280
+ "typeName": "Process"
281
+ },
282
+ "label": "inputToProcesses"
283
+ }
284
+
285
+ # Output relationship (process -> target)
286
+ output_relationship = {
287
+ "guid": str(uuid.uuid4()),
288
+ "typeName": relationship_type,
289
+ "end1": {
290
+ "guid": process_guid,
291
+ "typeName": "Process"
292
+ },
293
+ "end2": {
294
+ "guid": target_guid,
295
+ "typeName": row.get('target_type', 'DataSet')
296
+ },
297
+ "label": "outputFromProcesses"
298
+ }
299
+ else:
300
+ # Use qualified names
301
+ input_relationship = {
302
+ "guid": str(uuid.uuid4()),
303
+ "typeName": relationship_type,
304
+ "end1": {
305
+ "guid": "-1",
306
+ "typeName": row.get('source_type', 'DataSet'),
307
+ "uniqueAttributes": {
308
+ "qualifiedName": row['source_qualified_name']
309
+ }
310
+ },
311
+ "end2": {
312
+ "guid": process_guid,
313
+ "typeName": "Process"
314
+ },
315
+ "label": "inputToProcesses"
316
+ }
317
+
318
+ output_relationship = {
319
+ "guid": str(uuid.uuid4()),
320
+ "typeName": relationship_type,
321
+ "end1": {
322
+ "guid": process_guid,
323
+ "typeName": "Process"
324
+ },
325
+ "end2": {
326
+ "guid": "-1",
327
+ "typeName": row.get('target_type', 'DataSet'),
328
+ "uniqueAttributes": {
329
+ "qualifiedName": row['target_qualified_name']
330
+ }
331
+ },
332
+ "label": "outputFromProcesses"
333
+ }
292
334
 
293
335
  lineage_relationships.extend([input_relationship, output_relationship])
294
336
 
@@ -298,6 +340,126 @@ class Lineage(Endpoint):
298
340
  "referredEntities": {}
299
341
  }
300
342
 
343
+ # === CSV LINEAGE OPERATIONS ===
344
+
345
+ @decorator
346
+ def lineageCSVProcess(self, args):
347
+ """Process CSV file and create lineage relationships"""
348
+ csv_file = args.get("csv_file") or args.get("--csv-file")
349
+ if not csv_file:
350
+ raise ValueError("CSV file path is required")
351
+
352
+ # Process CSV and create lineage payload
353
+ lineage_data = self._process_csv_lineage(csv_file, args)
354
+
355
+ # Create lineage using the API
356
+ self.method = "POST"
357
+ self.endpoint = ENDPOINTS["lineage"]["create_lineage"]
358
+ self.params = get_api_version_params("datamap")
359
+ self.payload = lineage_data
360
+
361
+ # Return the payload for inspection (actual API call handled by decorator)
362
+ return lineage_data
363
+
364
+ def lineageCSVValidate(self, args):
365
+ """Validate CSV lineage file format (no API call)"""
366
+ import pandas as pd
367
+
368
+ csv_file = args.get("csv_file") or args.get("--csv-file")
369
+ if not csv_file:
370
+ return {"success": False, "error": "CSV file path is required"}
371
+
372
+ try:
373
+ # Read CSV
374
+ df = pd.read_csv(csv_file)
375
+
376
+ # Check required columns
377
+ required_columns = ['source_entity_guid', 'target_entity_guid']
378
+ missing_columns = [col for col in required_columns if col not in df.columns]
379
+
380
+ if missing_columns:
381
+ return {
382
+ "success": False,
383
+ "error": f"Missing required columns: {', '.join(missing_columns)}",
384
+ "expected_columns": required_columns
385
+ }
386
+
387
+ # Validate GUIDs format
388
+ import re
389
+ guid_pattern = re.compile(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', re.IGNORECASE)
390
+
391
+ invalid_guids = []
392
+ for idx, row in df.iterrows():
393
+ source_guid = str(row['source_entity_guid']).strip()
394
+ target_guid = str(row['target_entity_guid']).strip()
395
+
396
+ # Remove guid= prefix if present
397
+ source_guid = source_guid.replace('guid=', '').strip('"')
398
+ target_guid = target_guid.replace('guid=', '').strip('"')
399
+
400
+ if not guid_pattern.match(source_guid):
401
+ invalid_guids.append(f"Row {int(idx) + 1}: Invalid source GUID '{source_guid}'")
402
+ if not guid_pattern.match(target_guid):
403
+ invalid_guids.append(f"Row {int(idx) + 1}: Invalid target GUID '{target_guid}'")
404
+
405
+ if invalid_guids:
406
+ return {
407
+ "success": False,
408
+ "error": "Invalid GUID format(s) found",
409
+ "details": invalid_guids
410
+ }
411
+
412
+ return {
413
+ "success": True,
414
+ "rows": len(df),
415
+ "columns": list(df.columns)
416
+ }
417
+
418
+ except Exception as e:
419
+ return {"success": False, "error": str(e)}
420
+
421
+ def lineageCSVSample(self, args):
422
+ """Generate sample CSV lineage file (no API call)"""
423
+ sample_data = """source_entity_guid,target_entity_guid,relationship_type,process_name,description,confidence_score,owner,metadata
424
+ ea3412c3-7387-4bc1-9923-11f6f6f60000,2d21eba5-b08b-4571-b31d-7bf6f6f60000,Process,ETL_Customer_Transform,Transform customer data,0.95,data-engineering,"{""tool"": ""Azure Data Factory""}"
425
+ 2d21eba5-b08b-4571-b31d-7bf6f6f60000,4fae348b-e960-42f7-834c-38f6f6f60000,Process,Customer_Address_Join,Join customer with address,0.90,data-engineering,"{""tool"": ""Databricks""}"
426
+ """
427
+ output_file = args.get("--output-file") or args.get("output_file") or "lineage_sample.csv"
428
+
429
+ try:
430
+ with open(output_file, 'w', encoding='utf-8') as f:
431
+ f.write(sample_data)
432
+
433
+ return {
434
+ "success": True,
435
+ "file": output_file,
436
+ "message": f"Sample CSV file created: {output_file}"
437
+ }
438
+ except Exception as e:
439
+ return {"success": False, "error": str(e)}
440
+
441
+ def lineageCSVTemplates(self, args):
442
+ """Get available CSV lineage templates (no API call)"""
443
+ templates = {
444
+ "basic": {
445
+ "columns": ["source_entity_guid", "target_entity_guid", "relationship_type", "process_name"],
446
+ "description": "Basic lineage with source, target, and process name"
447
+ },
448
+ "detailed": {
449
+ "columns": ["source_entity_guid", "target_entity_guid", "relationship_type", "process_name", "description", "confidence_score", "owner", "metadata"],
450
+ "description": "Detailed lineage with additional metadata"
451
+ },
452
+ "qualified_names": {
453
+ "columns": ["source_qualified_name", "target_qualified_name", "source_type", "target_type", "process_name", "description"],
454
+ "description": "Lineage using qualified names instead of GUIDs"
455
+ }
456
+ }
457
+
458
+ return {
459
+ "templates": templates,
460
+ "recommended": "detailed"
461
+ }
462
+
301
463
  # === LINEAGE ANALYTICS AND REPORTING ===
302
464
 
303
465
  @decorator
@@ -120,6 +120,37 @@ class Types(Endpoint):
120
120
  self.endpoint = ENDPOINTS["types"]["get_business_metadata_def_by_name"].format(name=args["--name"])
121
121
  self.params = get_api_version_params("datamap")
122
122
 
123
+ @decorator
124
+ def createBusinessMetadataDef(self, args):
125
+ """Create business metadata definition (Official API: Create Business Metadata Definition)"""
126
+ self.method = "POST"
127
+ self.endpoint = ENDPOINTS["types"]["bulk_create"]
128
+ self.params = get_api_version_params("datamap")
129
+ self.payload = get_json(args, "--payloadFile")
130
+
131
+ @decorator
132
+ def updateBusinessMetadataDef(self, args):
133
+ """Update business metadata definition (Official API: Update Business Metadata Definition)"""
134
+ self.method = "PUT"
135
+ self.endpoint = ENDPOINTS["types"]["bulk_update"]
136
+ self.params = get_api_version_params("datamap")
137
+ self.payload = get_json(args, "--payloadFile")
138
+
139
+ @decorator
140
+ def deleteBusinessMetadataDef(self, args):
141
+ """Delete business metadata definition by name (Official API: Delete Business Metadata Definition)"""
142
+ self.method = "DELETE"
143
+ self.endpoint = ENDPOINTS["types"]["bulk_delete"]
144
+ self.params = get_api_version_params("datamap")
145
+ # Construct payload with businessMetadataDefs array containing the name to delete
146
+ self.payload = {
147
+ "businessMetadataDefs": [
148
+ {
149
+ "name": args["--name"]
150
+ }
151
+ ]
152
+ }
153
+
123
154
  # === CLASSIFICATION DEFINITIONS ===
124
155
 
125
156
  @decorator