pvw-cli 1.0.14__py3-none-any.whl → 1.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pvw-cli might be problematic. Click here for more details.

purviewcli/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- __version__ = "1.0.14"
1
+ __version__ = "1.2.0"
2
2
 
3
3
  # Import main client modules
4
4
  from .client import *
purviewcli/cli/entity.py CHANGED
@@ -1689,44 +1689,150 @@ def bulk_update_csv(ctx, csv_file, batch_size, dry_run, error_csv):
1689
1689
  return
1690
1690
 
1691
1691
  df = pd.read_csv(csv_file)
1692
- if "guid" not in df.columns:
1693
- console.print("[red][X] CSV must contain 'guid' column[/red]")
1692
+ if df.empty:
1693
+ console.print("[yellow]No rows found in CSV. Exiting.[/yellow]")
1694
1694
  return
1695
+
1695
1696
  entity_client = Entity()
1696
1697
  total = len(df)
1697
1698
  success, failed = 0, 0
1698
1699
  errors = []
1699
1700
  failed_rows = []
1701
+
1702
+ # Determine mode:
1703
+ # - If CSV has both 'typeName' and 'qualifiedName' -> map rows to Purview entities and call bulk create-or-update
1704
+ # - Else if CSV has 'guid' -> build guid-based payloads (preferred for partial attribute updates)
1705
+ has_type_qn = ("typeName" in df.columns and "qualifiedName" in df.columns)
1706
+ has_guid = "guid" in df.columns
1707
+
1700
1708
  for i in range(0, total, batch_size):
1701
- batch = df.iloc[i:i+batch_size]
1702
- payload = {
1703
- "entities": [
1704
- {col: row[col] for col in batch.columns if pd.notnull(row[col])}
1705
- for _, row in batch.iterrows()
1706
- ]
1707
- }
1708
- if dry_run:
1709
- console.print(f"[blue]DRY RUN: Would update batch {i//batch_size+1} with {len(batch)} entities[/blue]")
1710
- continue
1711
- with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False) as tmpf:
1712
- json.dump(payload, tmpf, indent=2)
1713
- tmpf.flush()
1714
- payload_file = tmpf.name
1715
- try:
1716
- args = {"--payloadFile": payload_file}
1717
- result = entity_client.entityBulkUpdate(args)
1718
- if result and (not isinstance(result, dict) or result.get("status") != "error"):
1719
- success += len(batch)
1720
- else:
1709
+ batch = df.iloc[i : i + batch_size]
1710
+
1711
+ if has_type_qn:
1712
+ # Map flat rows to Purview entity objects using helper
1713
+ from purviewcli.client._entity import map_flat_entity_to_purview_entity
1714
+
1715
+ entities = [map_flat_entity_to_purview_entity(row) for _, row in batch.iterrows()]
1716
+ payload = {"entities": entities}
1717
+
1718
+ if dry_run:
1719
+ console.print(f"[blue]DRY RUN: Would bulk-create/update batch {i//batch_size+1} with {len(batch)} entities[/blue]")
1720
+ continue
1721
+
1722
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False, encoding="utf-8") as tmpf:
1723
+ json.dump(payload, tmpf, indent=2)
1724
+ tmpf.flush()
1725
+ payload_file = tmpf.name
1726
+
1727
+ try:
1728
+ args = {"--payloadFile": payload_file}
1729
+ result = entity_client.entityCreateBulk(args)
1730
+ if result and (not isinstance(result, dict) or result.get("status") != "error"):
1731
+ success += len(batch)
1732
+ else:
1733
+ failed += len(batch)
1734
+ errors.append(f"Batch {i//batch_size+1}: {result}")
1735
+ failed_rows.extend(batch.to_dict(orient="records"))
1736
+ except Exception as e:
1721
1737
  failed += len(batch)
1722
- errors.append(f"Batch {i//batch_size+1}: {result}")
1738
+ errors.append(f"Batch {i//batch_size+1}: {str(e)}")
1723
1739
  failed_rows.extend(batch.to_dict(orient="records"))
1724
- except Exception as e:
1725
- failed += len(batch)
1726
- errors.append(f"Batch {i//batch_size+1}: {str(e)}")
1727
- failed_rows.extend(batch.to_dict(orient="records"))
1728
- finally:
1729
- os.remove(payload_file)
1740
+ finally:
1741
+ try:
1742
+ os.remove(payload_file)
1743
+ except Exception:
1744
+ pass
1745
+
1746
+ elif has_guid:
1747
+ # Build guid-based updates. If the CSV contains only guid + attr columns, we'll attempt to perform
1748
+ # partial attribute updates by calling entityPartialUpdateAttribute where possible.
1749
+ # If a row contains multiple attributes, we will call entityCreateBulk with a payload containing
1750
+ # the guid and attributes (server supports bulk create-or-update by guid in some endpoints).
1751
+
1752
+ # Normalize rows into dicts
1753
+ rows = [row.to_dict() for _, row in batch.iterrows()]
1754
+
1755
+ # Attempt to detect single-attribute update pattern: columns [guid, attrName, attrValue]
1756
+ if set(["guid", "attrName", "attrValue"]).issubset(set(batch.columns)):
1757
+ # perform per-guid partial updates in batch
1758
+ for r in rows:
1759
+ guid = str(r.get("guid"))
1760
+ attr_name = r.get("attrName")
1761
+ attr_value = r.get("attrValue")
1762
+ if pd.isna(guid) or pd.isna(attr_name):
1763
+ failed += 1
1764
+ failed_rows.append(r)
1765
+ continue
1766
+ if dry_run:
1767
+ console.print(f"[blue]DRY RUN: Would update GUID {guid} set {attr_name}={attr_value}[/blue]")
1768
+ success += 1
1769
+ continue
1770
+ try:
1771
+ args = {"--guid": [guid], "--attrName": attr_name, "--attrValue": attr_value}
1772
+ result = entity_client.entityPartialUpdateAttribute(args)
1773
+ if result and (not isinstance(result, dict) or result.get("status") != "error"):
1774
+ success += 1
1775
+ else:
1776
+ failed += 1
1777
+ errors.append(f"GUID {guid}: {result}")
1778
+ failed_rows.append(r)
1779
+ except Exception as e:
1780
+ failed += 1
1781
+ errors.append(f"GUID {guid}: {str(e)}")
1782
+ failed_rows.append(r)
1783
+
1784
+ else:
1785
+ # Fallback: call bulk create-or-update with guid included in each entity object.
1786
+ # Map each row into an entity dict keeping non-null columns.
1787
+ entities = []
1788
+ for r in rows:
1789
+ if pd.isna(r.get("guid")):
1790
+ failed_rows.append(r)
1791
+ failed += 1
1792
+ continue
1793
+ ent = {k: v for k, v in r.items() if pd.notnull(v)}
1794
+ # ensure guid is string under top-level 'guid' field for server bulk endpoints
1795
+ ent["guid"] = str(ent.get("guid"))
1796
+ entities.append(ent)
1797
+
1798
+ if not entities:
1799
+ continue
1800
+
1801
+ payload = {"entities": entities}
1802
+ if dry_run:
1803
+ console.print(f"[blue]DRY RUN: Would bulk-update (by guid) batch {i//batch_size+1} with {len(entities)} entities[/blue]")
1804
+ success += len(entities)
1805
+ continue
1806
+
1807
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".json", delete=False, encoding="utf-8") as tmpf:
1808
+ json.dump(payload, tmpf, indent=2)
1809
+ tmpf.flush()
1810
+ payload_file = tmpf.name
1811
+
1812
+ try:
1813
+ args = {"--payloadFile": payload_file}
1814
+ # Use the create-or-update bulk endpoint - server will use guid when present
1815
+ result = entity_client.entityCreateBulk(args)
1816
+ if result and (not isinstance(result, dict) or result.get("status") != "error"):
1817
+ success += len(entities)
1818
+ else:
1819
+ failed += len(entities)
1820
+ errors.append(f"Batch {i//batch_size+1}: {result}")
1821
+ failed_rows.extend(batch.to_dict(orient="records"))
1822
+ except Exception as e:
1823
+ failed += len(entities)
1824
+ errors.append(f"Batch {i//batch_size+1}: {str(e)}")
1825
+ failed_rows.extend(batch.to_dict(orient="records"))
1826
+ finally:
1827
+ try:
1828
+ os.remove(payload_file)
1829
+ except Exception:
1830
+ pass
1831
+
1832
+ else:
1833
+ console.print(f"[red][X] CSV must contain either (typeName and qualifiedName) or guid column[/red]")
1834
+ return
1835
+
1730
1836
  console.print(f"[green][OK] Bulk update completed. Success: {success}, Failed: {failed}[/green]")
1731
1837
  if errors:
1732
1838
  console.print("[red]Errors:[/red]")
@@ -1734,7 +1840,7 @@ def bulk_update_csv(ctx, csv_file, batch_size, dry_run, error_csv):
1734
1840
  console.print(f"[red]- {err}[/red]")
1735
1841
  if error_csv and failed_rows:
1736
1842
  pd.DataFrame(failed_rows).to_csv(error_csv, index=False)
1737
- console.print(f"[yellow][X] Failed rows written to {error_csv}[/yellow]")
1843
+ console.print(f"[yellow]WARNING: Failed rows written to {error_csv}[/yellow]")
1738
1844
  except Exception as e:
1739
1845
  console.print(f"[red][X] Error executing entity bulk-update-csv: {str(e)}[/red]")
1740
1846
 
@@ -1774,7 +1880,7 @@ def bulk_delete_csv(ctx, csv_file, batch_size, dry_run, error_csv):
1774
1880
  continue
1775
1881
  try:
1776
1882
  args = {"--guid": guids}
1777
- result = entity_client.entityBulkDelete(args)
1883
+ result = entity_client.entityDeleteBulk(args)
1778
1884
  if result and (not isinstance(result, dict) or result.get("status") != "error"):
1779
1885
  success += len(guids)
1780
1886
  else:
@@ -813,6 +813,7 @@ def term():
813
813
  @click.option("--name", required=True, help="Name of the glossary term")
814
814
  @click.option("--description", required=False, default="", help="Rich text description of the term")
815
815
  @click.option("--domain-id", required=True, help="Governance domain ID")
816
+ @click.option("--parent-id", required=False, help="Parent term ID (for hierarchical terms)")
816
817
  @click.option(
817
818
  "--status",
818
819
  required=False,
@@ -834,7 +835,7 @@ def term():
834
835
  )
835
836
  @click.option("--resource-name", required=False, help="Resource name for additional reading (can be specified multiple times)", multiple=True)
836
837
  @click.option("--resource-url", required=False, help="Resource URL for additional reading (can be specified multiple times)", multiple=True)
837
- def create(name, description, domain_id, status, acronym, owner_id, resource_name, resource_url):
838
+ def create(name, description, domain_id, parent_id, status, acronym, owner_id, resource_name, resource_url):
838
839
  """Create a new Unified Catalog term (Governance Domain term)."""
839
840
  try:
840
841
  client = UnifiedCatalogClient()
@@ -847,6 +848,8 @@ def create(name, description, domain_id, status, acronym, owner_id, resource_nam
847
848
  "--status": [status],
848
849
  }
849
850
 
851
+ if parent_id:
852
+ args["--parent-id"] = [parent_id]
850
853
  if acronym:
851
854
  args["--acronym"] = list(acronym)
852
855
  if owner_id:
@@ -1037,6 +1040,7 @@ def delete(term_id, force):
1037
1040
  @click.option("--name", required=False, help="Name of the glossary term")
1038
1041
  @click.option("--description", required=False, help="Rich text description of the term")
1039
1042
  @click.option("--domain-id", required=False, help="Governance domain ID")
1043
+ @click.option("--parent-id", required=False, help="Parent term ID (for hierarchical terms)")
1040
1044
  @click.option(
1041
1045
  "--status",
1042
1046
  required=False,
@@ -1059,7 +1063,7 @@ def delete(term_id, force):
1059
1063
  @click.option("--resource-url", required=False, help="Resource URL for additional reading (can be specified multiple times, replaces existing)", multiple=True)
1060
1064
  @click.option("--add-acronym", required=False, help="Add acronym to existing ones (can be specified multiple times)", multiple=True)
1061
1065
  @click.option("--add-owner-id", required=False, help="Add owner to existing ones (can be specified multiple times)", multiple=True)
1062
- def update(term_id, name, description, domain_id, status, acronym, owner_id, resource_name, resource_url, add_acronym, add_owner_id):
1066
+ def update(term_id, name, description, domain_id, parent_id, status, acronym, owner_id, resource_name, resource_url, add_acronym, add_owner_id):
1063
1067
  """Update an existing Unified Catalog term."""
1064
1068
  try:
1065
1069
  client = UnifiedCatalogClient()
@@ -1073,6 +1077,8 @@ def update(term_id, name, description, domain_id, status, acronym, owner_id, res
1073
1077
  args["--description"] = [description]
1074
1078
  if domain_id:
1075
1079
  args["--governance-domain-id"] = [domain_id]
1080
+ if parent_id:
1081
+ args["--parent-id"] = [parent_id]
1076
1082
  if status:
1077
1083
  args["--status"] = [status]
1078
1084
 
@@ -1386,7 +1392,7 @@ def update_terms_from_csv(csv_file, dry_run):
1386
1392
  """Bulk update glossary terms from a CSV file.
1387
1393
 
1388
1394
  CSV Format:
1389
- term_id,name,description,status,acronyms,owner_ids,add_acronyms,add_owner_ids
1395
+ term_id,name,description,status,parent_id,acronyms,owner_ids,add_acronyms,add_owner_ids
1390
1396
 
1391
1397
  Required:
1392
1398
  - term_id: The ID of the term to update
@@ -1395,15 +1401,16 @@ def update_terms_from_csv(csv_file, dry_run):
1395
1401
  - name: New term name (replaces existing)
1396
1402
  - description: New description (replaces existing)
1397
1403
  - status: New status (Draft, Published, Archived)
1404
+ - parent_id: Parent term ID for hierarchical relationships (replaces existing)
1398
1405
  - acronyms: New acronyms separated by semicolons (replaces all existing)
1399
1406
  - owner_ids: New owner IDs separated by semicolons (replaces all existing)
1400
1407
  - add_acronyms: Acronyms to add separated by semicolons (preserves existing)
1401
1408
  - add_owner_ids: Owner IDs to add separated by semicolons (preserves existing)
1402
1409
 
1403
1410
  Example CSV:
1404
- term_id,name,description,status,add_acronyms,add_owner_ids
1405
- abc-123,,Updated description,Published,API;REST,user1@company.com
1406
- def-456,New Name,,,SQL,
1411
+ term_id,name,description,status,parent_id,add_acronyms,add_owner_ids
1412
+ abc-123,,Updated description,Published,parent-term-guid,API;REST,user1@company.com
1413
+ def-456,New Name,,,parent-term-guid,SQL,
1407
1414
  """
1408
1415
  import csv
1409
1416
 
@@ -1440,6 +1447,8 @@ def update_terms_from_csv(csv_file, dry_run):
1440
1447
  changes.append(f"desc: {update['description'][:50]}...")
1441
1448
  if update.get('status', '').strip():
1442
1449
  changes.append(f"status: {update['status']}")
1450
+ if update.get('parent_id', '').strip():
1451
+ changes.append(f"parent: {update['parent_id'][:20]}...")
1443
1452
  if update.get('acronyms', '').strip():
1444
1453
  changes.append(f"acronyms: {update['acronyms']}")
1445
1454
  if update.get('add_acronyms', '').strip():
@@ -1479,6 +1488,8 @@ def update_terms_from_csv(csv_file, dry_run):
1479
1488
  args['--description'] = [update['description'].strip()]
1480
1489
  if update.get('status', '').strip():
1481
1490
  args['--status'] = [update['status'].strip()]
1491
+ if update.get('parent_id', '').strip():
1492
+ args['--parent-id'] = [update['parent_id'].strip()]
1482
1493
  if update.get('acronyms', '').strip():
1483
1494
  args['--acronym'] = [a.strip() for a in update['acronyms'].split(';') if a.strip()]
1484
1495
  if update.get('owner_ids', '').strip():
@@ -1537,6 +1548,7 @@ def update_terms_from_json(json_file, dry_run):
1537
1548
  "name": "New Name", // Optional: Replace name
1538
1549
  "description": "New description", // Optional: Replace description
1539
1550
  "status": "Published", // Optional: Change status
1551
+ "parent_id": "parent-term-guid", // Optional: Set parent term (hierarchical)
1540
1552
  "acronyms": ["API", "REST"], // Optional: Replace all acronyms
1541
1553
  "owner_ids": ["user@company.com"], // Optional: Replace all owners
1542
1554
  "add_acronyms": ["SQL"], // Optional: Add acronyms (preserves existing)
@@ -1599,6 +1611,8 @@ def update_terms_from_json(json_file, dry_run):
1599
1611
  args['--description'] = [update['description']]
1600
1612
  if update.get('status'):
1601
1613
  args['--status'] = [update['status']]
1614
+ if update.get('parent_id'):
1615
+ args['--parent-id'] = [update['parent_id']]
1602
1616
  if update.get('acronyms'):
1603
1617
  args['--acronym'] = update['acronyms'] if isinstance(update['acronyms'], list) else [update['acronyms']]
1604
1618
  if update.get('owner_ids'):
@@ -19,6 +19,41 @@ from .endpoint import Endpoint, decorator, get_json, no_api_call_decorator
19
19
  from .endpoints import ENDPOINTS, get_api_version_params
20
20
 
21
21
 
22
+ def map_flat_entity_to_purview_entity(row):
23
+ """Map a flat row (pandas Series or dict) into a Purview entity dict.
24
+
25
+ Expected minimal input: { 'typeName': 'DataSet', 'qualifiedName': '...','attr1': 'v', ... }
26
+ Produces: { 'typeName': ..., 'attributes': { 'qualifiedName': ..., 'attr1': 'v', ... } }
27
+ """
28
+ try:
29
+ data = row.to_dict()
30
+ except Exception:
31
+ data = dict(row)
32
+
33
+ # pop typeName
34
+ type_name = data.pop("typeName", None)
35
+
36
+ # build attributes, skipping null-like values
37
+ attrs = {}
38
+ from math import isnan
39
+
40
+ for k, v in data.items():
41
+ # skip empty column names
42
+ if k is None or (isinstance(k, str) and k.strip() == ""):
43
+ continue
44
+ # treat NaN/None as missing
45
+ try:
46
+ if v is None:
47
+ continue
48
+ if isinstance(v, float) and isnan(v):
49
+ continue
50
+ except Exception:
51
+ pass
52
+ attrs[k] = v
53
+
54
+ return {"typeName": type_name, "attributes": attrs}
55
+
56
+
22
57
  class Entity(Endpoint):
23
58
  """Entity Management Operations - Complete Official API Implementation with 100% Coverage"""
24
59
 
@@ -411,6 +411,11 @@ class UnifiedCatalogClient(Endpoint):
411
411
  "status": status,
412
412
  }
413
413
 
414
+ # Add parent_id if provided
415
+ parent_id = args.get("--parent-id", [""])[0]
416
+ if parent_id:
417
+ payload["parentId"] = parent_id
418
+
414
419
  # Add optional fields
415
420
  if owners:
416
421
  payload["contacts"] = {"owner": owners}
@@ -450,6 +455,8 @@ class UnifiedCatalogClient(Endpoint):
450
455
  payload["description"] = args.get("--description", [""])[0]
451
456
  if args.get("--governance-domain-id"):
452
457
  payload["domain"] = args["--governance-domain-id"][0]
458
+ if args.get("--parent-id"):
459
+ payload["parentId"] = args["--parent-id"][0]
453
460
  if args.get("--status"):
454
461
  payload["status"] = args["--status"][0]
455
462
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pvw-cli
3
- Version: 1.0.14
3
+ Version: 1.2.0
4
4
  Summary: Microsoft Purview CLI with comprehensive automation capabilities
5
5
  Author-email: AYOUB KEBAILI <keayoub@msn.com>
6
6
  Maintainer-email: AYOUB KEBAILI <keayoub@msn.com>
@@ -56,7 +56,7 @@ Requires-Dist: pytest-asyncio>=0.20.0; extra == "test"
56
56
  Requires-Dist: pytest-cov>=4.0.0; extra == "test"
57
57
  Requires-Dist: requests-mock>=1.9.0; extra == "test"
58
58
 
59
- # PURVIEW CLI v1.0.14 - Microsoft Purview Automation & Data Governance
59
+ # PURVIEW CLI v1.2.0 - Microsoft Purview Automation & Data Governance
60
60
 
61
61
  > **LATEST UPDATE (October 2025):**
62
62
  > - **� NEW: Bulk Term Import/Export** - Import multiple terms from CSV/JSON with dry-run support
@@ -72,7 +72,7 @@ Requires-Dist: requests-mock>=1.9.0; extra == "test"
72
72
 
73
73
  ## What is PVW CLI?
74
74
 
75
- **PVW CLI v1.0.14** is a modern, full-featured command-line interface and Python library for Microsoft Purview. It enables automation and management of *all major Purview APIs* including:
75
+ **PVW CLI v1.2.0** is a modern, full-featured command-line interface and Python library for Microsoft Purview. It enables automation and management of *all major Purview APIs* including:
76
76
 
77
77
  - **Unified Catalog (UC) Management** - Complete governance domains, glossary terms, data products, OKRs, CDEs
78
78
  - **Bulk Operations** - Import/export terms from CSV/JSON, bulk delete scripts with progress tracking
@@ -164,7 +164,7 @@ For more advanced usage, see the documentation in `doc/` or the project docs: ht
164
164
 
165
165
  ## Overview
166
166
 
167
- **PVW CLI v1.0.14** is a modern command-line interface and Python library for Microsoft Purview, enabling:
167
+ **PVW CLI v1.2.0** is a modern command-line interface and Python library for Microsoft Purview, enabling:
168
168
 
169
169
  - Advanced data catalog search and discovery
170
170
  - Bulk import/export of entities, glossary terms, and lineage
@@ -1203,6 +1203,6 @@ See [LICENSE](LICENSE) file for details.
1203
1203
 
1204
1204
  ---
1205
1205
 
1206
- **PVW CLI v1.0.14 empowers data engineers, stewards, and architects to automate, scale, and enhance their Microsoft Purview experience with powerful command-line and programmatic capabilities.**
1206
+ **PVW CLI v1.2.0 empowers data engineers, stewards, and architects to automate, scale, and enhance their Microsoft Purview experience with powerful command-line and programmatic capabilities.**
1207
1207
 
1208
1208
  **Latest Features:** Bulk term import/export, PowerShell integration, multiple output formats, and comprehensive bulk delete scripts with beautiful progress tracking.
@@ -1,11 +1,11 @@
1
- purviewcli/__init__.py,sha256=3T2hEotAV7XiN0xccqTrMd32xoAYF1VtnjccW5jcbco,415
1
+ purviewcli/__init__.py,sha256=JSVtflE3uFle2j8Tk2W2LGlEtezs_ftOB8kLvpWxEPI,414
2
2
  purviewcli/__main__.py,sha256=n_PFo1PjW8L1OKCNLsW0vlVSo8tzac_saEYYLTu93iQ,372
3
3
  purviewcli/cli/__init__.py,sha256=UGMctZaXXsV2l2ycnmhTgyksH81_JBQjAPq3oRF2Dqk,56
4
4
  purviewcli/cli/account.py,sha256=Z_bwhKriMQpoBicOORM64wpQ1MJ94QG7jGKiaG-D_r8,7092
5
5
  purviewcli/cli/cli.py,sha256=XAXUvh5B0_TdnVQVSHdknqXZPbPWGBhNyeVPHGF1-QY,5801
6
6
  purviewcli/cli/collections.py,sha256=hdSadZ-yecx3-UHPvlEAe_booHqM71DnkekH1fL4oM0,18596
7
7
  purviewcli/cli/domain.py,sha256=XQr4Y3nxVCwuPWIie69475MM6uz4YAcQ_SRTuqnIJB4,20895
8
- purviewcli/cli/entity.py,sha256=e4Zhd6YGui3dJHbLDRAj6Lio9L14J6Nnf1-iFi0HCkw,89257
8
+ purviewcli/cli/entity.py,sha256=CMUnzYyRbHUQFGa_XLo2Yq189WcomZ16OyiSDzMvN4A,94842
9
9
  purviewcli/cli/glossary.py,sha256=6oTU7qPkUFkty4TPBPc8c1w2tbn1HicQ5qSxrP5vHFE,25696
10
10
  purviewcli/cli/health.py,sha256=_vuJgoLiihkKeU_L6bTzrCY2o9_gWqQS6gMXJ4EZyCA,10549
11
11
  purviewcli/cli/insight.py,sha256=dJYgTC9jQz62YnpDGUGj4ClXcdDsrGDr4sEvZbq90yE,3853
@@ -17,13 +17,13 @@ purviewcli/cli/scan.py,sha256=91iKDH8iVNJKndJAisrKx3J4HRoPH2qfmxguLZH3xHY,13807
17
17
  purviewcli/cli/search.py,sha256=B0Ae3-9JCTKICOkJrYS29tMiFTuLJzlx0ISW_23OHF0,13599
18
18
  purviewcli/cli/share.py,sha256=QRZhHM59RxdYqXOjSYLfVRZmjwMg4Y-bWxMSQVTQiIE,20197
19
19
  purviewcli/cli/types.py,sha256=zo_8rAqDQ1vqi5y-dBh_sVY6i16UaJLLx_vBJBfZrrw,23729
20
- purviewcli/cli/unified_catalog.py,sha256=1HOncX6XjN5H1EtNMjoSFOdpkhaKB2iAnGoyE2ivhgo,73820
20
+ purviewcli/cli/unified_catalog.py,sha256=ZQn5GsHsfXCrjzTPnfNA6gfGBOD5AH2m7uauGZt-oZs,74753
21
21
  purviewcli/cli/workflow.py,sha256=c1Gmlffbs7DV_rBw7LIunyc2PukcRiV1Sv5ifnQBZD4,14735
22
22
  purviewcli/client/__init__.py,sha256=qjhTkXkgxlNUY3R1HkrT_Znt03-2d8JDolPVOeVv2xI,37
23
23
  purviewcli/client/_account.py,sha256=5lacA7vvjGBLHUDRjFR7B5E8eN6T07rctVDRXR9JFTY,12397
24
24
  purviewcli/client/_collections.py,sha256=17ma6aD6gftIe-Nhwy96TPE42O6qp0hmw84xei4VPpo,12101
25
25
  purviewcli/client/_domain.py,sha256=Yt4RsIoGWz6ND9Ii4CtoGM6leEAL_KNXYcp6AK9KMqs,3744
26
- purviewcli/client/_entity.py,sha256=6yU_j0RSGUF-xzzOhLaensm6ulkmrQPWkSar6pycC04,20601
26
+ purviewcli/client/_entity.py,sha256=oGvnHpI8bFX7VOyH_aqgoGU37hnMqAjsXPEEBtk29vQ,21608
27
27
  purviewcli/client/_glossary.py,sha256=7kB3RXVOCCR1RGlaALmr_BwN6S76-xCoyVqD5ZMzt-k,20985
28
28
  purviewcli/client/_health.py,sha256=bKX2PChw2OB2kD_ZEfWlDxc093qQX3ai31YCQQ2VaKg,5921
29
29
  purviewcli/client/_insight.py,sha256=2KX6dVAkyFFMLuQ02Ko2J7WhARKlCLhSgtFkjFJxZ7c,16031
@@ -35,7 +35,7 @@ purviewcli/client/_scan.py,sha256=2atEBD-kKWtFuBSWh2P0cwp42gfg7qgwWq-072QZMs4,15
35
35
  purviewcli/client/_search.py,sha256=vUDgjZtnNkHaCqsCXPp1Drq9Kknrkid17RNSXZhi1yw,11890
36
36
  purviewcli/client/_share.py,sha256=vKENIhePuzi3WQazNfv5U9y-6yxRk222zrFA-SGh1pc,10494
37
37
  purviewcli/client/_types.py,sha256=ONa3wh1F02QOVy51UGq54121TkqRcWczdXIvNqPIFU0,15454
38
- purviewcli/client/_unified_catalog.py,sha256=S2t7hIA_wmaZPRVHkIgBkixqKpO1YRduGfIwoz0hkcg,39760
38
+ purviewcli/client/_unified_catalog.py,sha256=WDpl68NJXfMJ5OIlING0wrfXwbOLwbnGRAFL9WPZnCE,40017
39
39
  purviewcli/client/_workflow.py,sha256=po5lomq07s3d7IAzZ5ved5JO6SsBU_JUA4lQSObdJR4,17904
40
40
  purviewcli/client/api_client.py,sha256=rNRUhkmZhoCHKWhUvZFUXEWj5eC8LSSnXYYNMb5lHNM,22964
41
41
  purviewcli/client/business_rules.py,sha256=VR4QqOE1Pg0nFjqAE-zbt-KqIenvzImLU-TBLki9bYc,27560
@@ -53,8 +53,8 @@ purviewcli/client/settings.py,sha256=nYdnYurTZsgv9vcgljnzVxLPtYVl9q6IplqOzi1aRvI
53
53
  purviewcli/client/sync_client.py,sha256=gwCqesJTNaXn1Q-j57O95R9mn3fIOhdP4sc8jBaBcYw,9493
54
54
  purviewcli/plugins/__init__.py,sha256=rpt3OhFt_wSE_o8Ga8AXvw1pqkdBxLmjrhYtE_-LuJo,29
55
55
  purviewcli/plugins/plugin_system.py,sha256=C-_dL4FUj90o1JS7Saxkpov6fz0GIF5PFhZTYwqBkWE,26774
56
- pvw_cli-1.0.14.dist-info/METADATA,sha256=ExhctUBzpS4X5IEtdt6d7bPqtuh20AK5ZNGpkI7kUXw,39127
57
- pvw_cli-1.0.14.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
58
- pvw_cli-1.0.14.dist-info/entry_points.txt,sha256=VI6AAbc6sWahOCX7sn_lhJIr9OiJM0pHF7rmw1YVGlE,82
59
- pvw_cli-1.0.14.dist-info/top_level.txt,sha256=LrADzPoKwF1xY0pGKpWauyOVruHCIWKCkT7cwIl6IuI,11
60
- pvw_cli-1.0.14.dist-info/RECORD,,
56
+ pvw_cli-1.2.0.dist-info/METADATA,sha256=3NxGHKofl6xqOZXegUTzuTzT-AsdNYJw0gNpMj4kxdw,39122
57
+ pvw_cli-1.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
58
+ pvw_cli-1.2.0.dist-info/entry_points.txt,sha256=VI6AAbc6sWahOCX7sn_lhJIr9OiJM0pHF7rmw1YVGlE,82
59
+ pvw_cli-1.2.0.dist-info/top_level.txt,sha256=LrADzPoKwF1xY0pGKpWauyOVruHCIWKCkT7cwIl6IuI,11
60
+ pvw_cli-1.2.0.dist-info/RECORD,,