acryl-datahub 1.1.0.5rc7__py3-none-any.whl → 1.1.0.5rc9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (59) hide show
  1. {acryl_datahub-1.1.0.5rc7.dist-info → acryl_datahub-1.1.0.5rc9.dist-info}/METADATA +2620 -2622
  2. {acryl_datahub-1.1.0.5rc7.dist-info → acryl_datahub-1.1.0.5rc9.dist-info}/RECORD +59 -59
  3. datahub/_version.py +1 -1
  4. datahub/cli/check_cli.py +0 -7
  5. datahub/cli/cli_utils.py +73 -0
  6. datahub/cli/delete_cli.py +0 -6
  7. datahub/cli/docker_check.py +107 -12
  8. datahub/cli/docker_cli.py +148 -228
  9. datahub/cli/exists_cli.py +0 -4
  10. datahub/cli/get_cli.py +0 -4
  11. datahub/cli/ingest_cli.py +1 -20
  12. datahub/cli/put_cli.py +0 -6
  13. datahub/cli/quickstart_versioning.py +50 -5
  14. datahub/cli/specific/assertions_cli.py +0 -6
  15. datahub/cli/specific/datacontract_cli.py +0 -6
  16. datahub/cli/specific/dataproduct_cli.py +0 -22
  17. datahub/cli/specific/dataset_cli.py +0 -11
  18. datahub/cli/specific/forms_cli.py +0 -6
  19. datahub/cli/specific/group_cli.py +0 -4
  20. datahub/cli/specific/structuredproperties_cli.py +0 -7
  21. datahub/cli/specific/user_cli.py +0 -4
  22. datahub/cli/state_cli.py +0 -4
  23. datahub/cli/timeline_cli.py +0 -4
  24. datahub/entrypoints.py +4 -3
  25. datahub/ingestion/autogenerated/capability_summary.json +88 -23
  26. datahub/ingestion/extractor/schema_util.py +13 -4
  27. datahub/ingestion/graph/client.py +2 -2
  28. datahub/ingestion/run/pipeline.py +43 -0
  29. datahub/ingestion/source/bigquery_v2/bigquery.py +9 -1
  30. datahub/ingestion/source/datahub/datahub_database_reader.py +1 -2
  31. datahub/ingestion/source/dremio/dremio_source.py +1 -4
  32. datahub/ingestion/source/gcs/gcs_source.py +9 -1
  33. datahub/ingestion/source/identity/okta.py +0 -13
  34. datahub/ingestion/source/powerbi/powerbi.py +0 -5
  35. datahub/ingestion/source/powerbi/rest_api_wrapper/powerbi_api.py +0 -1
  36. datahub/ingestion/source/powerbi_report_server/report_server.py +0 -23
  37. datahub/ingestion/source/sigma/sigma.py +6 -1
  38. datahub/ingestion/source/snowflake/snowflake_config.py +11 -0
  39. datahub/ingestion/source/snowflake/snowflake_queries.py +100 -58
  40. datahub/ingestion/source/snowflake/snowflake_v2.py +11 -1
  41. datahub/ingestion/source/snowflake/stored_proc_lineage.py +1 -1
  42. datahub/ingestion/source/sql/hive_metastore.py +0 -10
  43. datahub/ingestion/source/sql/sql_common.py +8 -0
  44. datahub/ingestion/source/sql/teradata.py +993 -234
  45. datahub/ingestion/source/sql/vertica.py +0 -4
  46. datahub/ingestion/source/sql_queries.py +2 -2
  47. datahub/ingestion/source/superset.py +56 -1
  48. datahub/ingestion/source/tableau/tableau.py +40 -34
  49. datahub/ingestion/source/tableau/tableau_constant.py +0 -2
  50. datahub/ingestion/source/unity/source.py +9 -1
  51. datahub/sdk/lineage_client.py +2 -2
  52. datahub/sql_parsing/sql_parsing_aggregator.py +21 -12
  53. datahub/sql_parsing/sqlglot_lineage.py +40 -15
  54. datahub/upgrade/upgrade.py +46 -13
  55. datahub/utilities/server_config_util.py +8 -0
  56. {acryl_datahub-1.1.0.5rc7.dist-info → acryl_datahub-1.1.0.5rc9.dist-info}/WHEEL +0 -0
  57. {acryl_datahub-1.1.0.5rc7.dist-info → acryl_datahub-1.1.0.5rc9.dist-info}/entry_points.txt +0 -0
  58. {acryl_datahub-1.1.0.5rc7.dist-info → acryl_datahub-1.1.0.5rc9.dist-info}/licenses/LICENSE +0 -0
  59. {acryl_datahub-1.1.0.5rc7.dist-info → acryl_datahub-1.1.0.5rc9.dist-info}/top_level.txt +0 -0
datahub/cli/ingest_cli.py CHANGED
@@ -22,9 +22,7 @@ from datahub.ingestion.graph.config import ClientMode
22
22
  from datahub.ingestion.run.connection import ConnectionManager
23
23
  from datahub.ingestion.run.pipeline import Pipeline
24
24
  from datahub.telemetry import telemetry
25
- from datahub.upgrade import upgrade
26
25
  from datahub.utilities.ingest_utils import deploy_source_vars
27
- from datahub.utilities.perf_timer import PerfTimer
28
26
 
29
27
  logger = logging.getLogger(__name__)
30
28
 
@@ -178,14 +176,7 @@ def run(
178
176
  no_progress=no_progress,
179
177
  raw_config=raw_pipeline_config,
180
178
  )
181
- with PerfTimer() as timer:
182
- ret = run_pipeline_to_completion(pipeline)
183
-
184
- # The main ingestion has completed. If it was successful, potentially show an upgrade nudge message.
185
- if ret == 0:
186
- upgrade.check_upgrade_post(
187
- main_method_runtime=timer.elapsed_seconds(), graph=pipeline.ctx.graph
188
- )
179
+ ret = run_pipeline_to_completion(pipeline)
189
180
 
190
181
  if ret:
191
182
  sys.exit(ret)
@@ -193,8 +184,6 @@ def run(
193
184
 
194
185
 
195
186
  @ingest.command()
196
- @upgrade.check_upgrade
197
- @telemetry.with_telemetry()
198
187
  @click.option(
199
188
  "-n",
200
189
  "--name",
@@ -385,8 +374,6 @@ def mcps(path: str) -> None:
385
374
  @click.option(
386
375
  "--source", type=str, default=None, help="Filter by ingestion source name."
387
376
  )
388
- @upgrade.check_upgrade
389
- @telemetry.with_telemetry()
390
377
  def list_source_runs(page_offset: int, page_size: int, urn: str, source: str) -> None:
391
378
  """
392
379
  List ingestion source runs with their details, optionally filtered by URN or source.
@@ -514,8 +501,6 @@ def list_source_runs(page_offset: int, page_size: int, urn: str, source: str) ->
514
501
  default=False,
515
502
  help="If enabled, will list ingestion runs which have been soft deleted",
516
503
  )
517
- @upgrade.check_upgrade
518
- @telemetry.with_telemetry()
519
504
  def list_runs(page_offset: int, page_size: int, include_soft_deletes: bool) -> None:
520
505
  """List recent ingestion runs to datahub"""
521
506
 
@@ -564,8 +549,6 @@ def list_runs(page_offset: int, page_size: int, include_soft_deletes: bool) -> N
564
549
  help="If enabled, will include aspects that have been soft deleted",
565
550
  )
566
551
  @click.option("-a", "--show-aspect", required=False, is_flag=True)
567
- @upgrade.check_upgrade
568
- @telemetry.with_telemetry()
569
552
  def show(
570
553
  run_id: str, start: int, count: int, include_soft_deletes: bool, show_aspect: bool
571
554
  ) -> None:
@@ -614,8 +597,6 @@ def show(
614
597
  default="./rollback-reports",
615
598
  help="Path to directory where rollback reports will be saved to",
616
599
  )
617
- @upgrade.check_upgrade
618
- @telemetry.with_telemetry()
619
600
  def rollback(
620
601
  run_id: str, force: bool, dry_run: bool, safe: bool, report_dir: str
621
602
  ) -> None:
datahub/cli/put_cli.py CHANGED
@@ -14,8 +14,6 @@ from datahub.metadata.schema_classes import (
14
14
  PlatformTypeClass,
15
15
  SystemMetadataClass,
16
16
  )
17
- from datahub.telemetry import telemetry
18
- from datahub.upgrade import upgrade
19
17
  from datahub.utilities.urns.data_platform_urn import DataPlatformUrn
20
18
  from datahub.utilities.urns.urn import guess_entity_type
21
19
 
@@ -44,8 +42,6 @@ def put() -> None:
44
42
  required=False,
45
43
  help="Run ID into which we should log the aspect.",
46
44
  )
47
- @upgrade.check_upgrade
48
- @telemetry.with_telemetry()
49
45
  def aspect(urn: str, aspect: str, aspect_data: str, run_id: Optional[str]) -> None:
50
46
  """Update a single aspect of an entity"""
51
47
 
@@ -75,8 +71,6 @@ def aspect(urn: str, aspect: str, aspect_data: str, run_id: Optional[str]) -> No
75
71
 
76
72
  @put.command()
77
73
  @click.pass_context
78
- @upgrade.check_upgrade
79
- @telemetry.with_telemetry()
80
74
  @click.option(
81
75
  "--name",
82
76
  type=str,
@@ -6,17 +6,40 @@ import re
6
6
  from typing import Dict, Optional
7
7
 
8
8
  import click
9
+ import packaging
9
10
  import requests
10
11
  import yaml
11
12
  from packaging.version import parse
12
13
  from pydantic import BaseModel
13
14
 
15
+ from datahub._version import nice_version_name
16
+
14
17
  logger = logging.getLogger(__name__)
15
18
 
16
19
  LOCAL_QUICKSTART_MAPPING_FILE = os.environ.get("FORCE_LOCAL_QUICKSTART_MAPPING", "")
17
20
  DEFAULT_LOCAL_CONFIG_PATH = "~/.datahub/quickstart/quickstart_version_mapping.yaml"
18
21
  DEFAULT_REMOTE_CONFIG_PATH = "https://raw.githubusercontent.com/datahub-project/datahub/master/docker/quickstart/quickstart_version_mapping.yaml"
19
22
 
23
+ MINIMUM_SUPPORTED_VERSION = "v1.1.0"
24
+
25
+
26
+ def get_minimum_supported_version_message(version: str) -> str:
27
+ MINIMUM_SUPPORTED_VERSION_MESSAGE = f"""
28
+ DataHub CLI Version Compatibility Issue
29
+
30
+ You're trying to install DataHub server version {version} which is not supported by this CLI version.
31
+
32
+ This CLI (version {nice_version_name()}) only supports installing DataHub server versions {MINIMUM_SUPPORTED_VERSION} and above.
33
+
34
+ To install older server versions:
35
+ 1. Uninstall current CLI: pip uninstall acryl-datahub
36
+ 2. Install older CLI: pip install acryl-datahub==1.1
37
+ 3. Run quickstart with your desired version: datahub docker quickstart --version <version>
38
+
39
+ For more information: https://docs.datahub.com/docs/quickstart#install-datahub-server
40
+ """
41
+ return MINIMUM_SUPPORTED_VERSION_MESSAGE
42
+
20
43
 
21
44
  class QuickstartExecutionPlan(BaseModel):
22
45
  composefile_git_ref: str
@@ -126,15 +149,25 @@ class QuickstartVersionMappingConfig(BaseModel):
126
149
  mysql_tag=str(mysql_tag),
127
150
  ),
128
151
  )
152
+
153
+ if not is_minimum_supported_version(requested_version):
154
+ click.secho(
155
+ get_minimum_supported_version_message(version=requested_version),
156
+ fg="red",
157
+ )
158
+ raise click.ClickException("Minimum supported version not met")
159
+
129
160
  # new CLI version is downloading the composefile corresponding to the requested version
130
- # if the version is older than v0.10.1, it doesn't contain the setup job labels and the
131
- # the checks will fail, so in those cases we pick the composefile from v0.10.1 which contains
132
- # the setup job labels
161
+ # if the version is older than <MINIMUM_SUPPORTED_VERSION>, it doesn't contain the
162
+ # docker compose based resolved compose file. In those cases, we pick up the composefile from
163
+ # MINIMUM_SUPPORTED_VERSION which contains the compose file.
133
164
  if _is_it_a_version(result.composefile_git_ref):
134
- if parse("v0.10.1") > parse(result.composefile_git_ref):
165
+ if (
166
+ parse("v1.2.0") > parse(result.composefile_git_ref)
167
+ ): # TODO: Once we decide what the new version that supports profile based compose.
135
168
  # The merge commit where the labels were added
136
169
  # https://github.com/datahub-project/datahub/pull/7473
137
- result.composefile_git_ref = "1d3339276129a7cb8385c07a958fcc93acda3b4e"
170
+ result.composefile_git_ref = "1d3339276129a7cb8385c07a958fcc93acda3b4e" # TODO update after compose file is merged
138
171
 
139
172
  return result
140
173
 
@@ -148,3 +181,15 @@ def save_quickstart_config(
148
181
  with open(path, "w") as f:
149
182
  yaml.dump(config.dict(), f)
150
183
  logger.info(f"Saved quickstart config to {path}.")
184
+
185
+
186
+ def is_minimum_supported_version(version: str) -> bool:
187
+ if not _is_it_a_version(version):
188
+ return True
189
+
190
+ requested_version = packaging.version.parse(version)
191
+ minimum_supported_version = packaging.version.parse(MINIMUM_SUPPORTED_VERSION)
192
+ if requested_version < minimum_supported_version:
193
+ return False
194
+
195
+ return True
@@ -17,8 +17,6 @@ from datahub.emitter.mcp import MetadataChangeProposalWrapper
17
17
  from datahub.ingestion.graph.client import get_default_graph
18
18
  from datahub.ingestion.graph.config import ClientMode
19
19
  from datahub.integrations.assertion.registry import ASSERTION_PLATFORMS
20
- from datahub.telemetry import telemetry
21
- from datahub.upgrade import upgrade
22
20
 
23
21
  logger = logging.getLogger(__name__)
24
22
 
@@ -33,8 +31,6 @@ def assertions() -> None:
33
31
 
34
32
  @assertions.command()
35
33
  @click.option("-f", "--file", required=True, type=click.Path(exists=True))
36
- @upgrade.check_upgrade
37
- @telemetry.with_telemetry()
38
34
  def upsert(file: str) -> None:
39
35
  """Upsert (create or update) a set of assertions in DataHub."""
40
36
 
@@ -71,8 +67,6 @@ def upsert(file: str) -> None:
71
67
  default=[],
72
68
  help="Platform-specific extra key-value inputs in form key=value",
73
69
  )
74
- @upgrade.check_upgrade
75
- @telemetry.with_telemetry()
76
70
  def compile(
77
71
  file: str, platform: str, output_to: Optional[str], extras: List[str]
78
72
  ) -> None:
@@ -7,8 +7,6 @@ from click_default_group import DefaultGroup
7
7
  from datahub.api.entities.datacontract.datacontract import DataContract
8
8
  from datahub.ingestion.graph.client import get_default_graph
9
9
  from datahub.ingestion.graph.config import ClientMode
10
- from datahub.telemetry import telemetry
11
- from datahub.upgrade import upgrade
12
10
 
13
11
  logger = logging.getLogger(__name__)
14
12
 
@@ -21,8 +19,6 @@ def datacontract() -> None:
21
19
 
22
20
  @datacontract.command()
23
21
  @click.option("-f", "--file", required=True, type=click.Path(exists=True))
24
- @upgrade.check_upgrade
25
- @telemetry.with_telemetry()
26
22
  def upsert(file: str) -> None:
27
23
  """Upsert (create or update) a Data Contract in DataHub."""
28
24
 
@@ -59,8 +55,6 @@ def upsert(file: str) -> None:
59
55
  help="The file containing the data contract definition",
60
56
  )
61
57
  @click.option("--hard/--soft", required=False, is_flag=True, default=False)
62
- @upgrade.check_upgrade
63
- @telemetry.with_telemetry()
64
58
  def delete(urn: Optional[str], file: Optional[str], hard: bool) -> None:
65
59
  """Delete a Data Contract in DataHub. Defaults to a soft-delete. Use --hard to completely erase metadata."""
66
60
 
@@ -23,8 +23,6 @@ from datahub.ingestion.graph.client import DataHubGraph, get_default_graph
23
23
  from datahub.ingestion.graph.config import ClientMode
24
24
  from datahub.metadata.schema_classes import OwnerClass, OwnershipTypeClass
25
25
  from datahub.specific.dataproduct import DataProductPatchBuilder
26
- from datahub.telemetry import telemetry
27
- from datahub.upgrade import upgrade
28
26
  from datahub.utilities.urns.urn import Urn
29
27
 
30
28
  logger = logging.getLogger(__name__)
@@ -129,8 +127,6 @@ def mutate(file: Path, validate_assets: bool, external_url: str, upsert: bool) -
129
127
  "--validate-assets/--no-validate-assets", required=False, is_flag=True, default=True
130
128
  )
131
129
  @click.option("--external-url", required=False, type=str)
132
- @upgrade.check_upgrade
133
- @telemetry.with_telemetry()
134
130
  def update(file: Path, validate_assets: bool, external_url: str) -> None:
135
131
  """Create or Update a Data Product in DataHub. Use upsert if you want to apply partial updates."""
136
132
 
@@ -145,8 +141,6 @@ def update(file: Path, validate_assets: bool, external_url: str) -> None:
145
141
  "--validate-assets/--no-validate-assets", required=False, is_flag=True, default=True
146
142
  )
147
143
  @click.option("--external-url", required=False, type=str)
148
- @upgrade.check_upgrade
149
- @telemetry.with_telemetry()
150
144
  def upsert(file: Path, validate_assets: bool, external_url: str) -> None:
151
145
  """Upsert attributes to a Data Product in DataHub."""
152
146
 
@@ -158,8 +152,6 @@ def upsert(file: Path, validate_assets: bool, external_url: str) -> None:
158
152
  )
159
153
  @click.option("-f", "--file", required=True, type=click.Path(exists=True))
160
154
  @click.option("--update", required=False, is_flag=True, default=False)
161
- @upgrade.check_upgrade
162
- @telemetry.with_telemetry()
163
155
  def diff(file: Path, update: bool) -> None:
164
156
  """Diff a Data Product file with its twin in DataHub"""
165
157
 
@@ -205,8 +197,6 @@ def diff(file: Path, update: bool) -> None:
205
197
  help="The file containing the data product definition",
206
198
  )
207
199
  @click.option("--hard/--soft", required=False, is_flag=True, default=False)
208
- @upgrade.check_upgrade
209
- @telemetry.with_telemetry()
210
200
  def delete(urn: str, file: Path, hard: bool) -> None:
211
201
  """Delete a Data Product in DataHub. Defaults to a soft-delete. Use --hard to completely erase metadata."""
212
202
 
@@ -241,8 +231,6 @@ def delete(urn: str, file: Path, hard: bool) -> None:
241
231
  )
242
232
  @click.option("--urn", required=True, type=str)
243
233
  @click.option("--to-file", required=False, type=str)
244
- @upgrade.check_upgrade
245
- @telemetry.with_telemetry()
246
234
  def get(urn: str, to_file: str) -> None:
247
235
  """Get a Data Product from DataHub"""
248
236
 
@@ -278,8 +266,6 @@ def get(urn: str, to_file: str) -> None:
278
266
  type=click.Path(exists=True),
279
267
  help="A markdown file that contains documentation for this data product",
280
268
  )
281
- @upgrade.check_upgrade
282
- @telemetry.with_telemetry()
283
269
  def set_description(urn: str, description: str, md_file: Path) -> None:
284
270
  """Set description for a Data Product in DataHub"""
285
271
 
@@ -329,8 +315,6 @@ def set_description(urn: str, description: str, md_file: Path) -> None:
329
315
  ),
330
316
  default=OwnershipTypeClass.TECHNICAL_OWNER,
331
317
  )
332
- @upgrade.check_upgrade
333
- @telemetry.with_telemetry()
334
318
  def add_owner(urn: str, owner: str, owner_type: str) -> None:
335
319
  """Add owner for a Data Product in DataHub"""
336
320
 
@@ -352,8 +336,6 @@ def add_owner(urn: str, owner: str, owner_type: str) -> None:
352
336
  @dataproduct.command(name="remove_owner", help="Remove an owner from a Data Product")
353
337
  @click.option("--urn", required=True, type=str)
354
338
  @click.argument("owner_urn", required=True, type=str)
355
- @upgrade.check_upgrade
356
- @telemetry.with_telemetry()
357
339
  def remove_owner(urn: str, owner_urn: str) -> None:
358
340
  """Remove owner for a Data Product in DataHub"""
359
341
 
@@ -374,8 +356,6 @@ def remove_owner(urn: str, owner_urn: str) -> None:
374
356
  @click.option(
375
357
  "--validate-assets/--no-validate-assets", required=False, is_flag=True, default=True
376
358
  )
377
- @upgrade.check_upgrade
378
- @telemetry.with_telemetry()
379
359
  def add_asset(urn: str, asset: str, validate_assets: bool) -> None:
380
360
  """Add asset for a Data Product in DataHub"""
381
361
 
@@ -401,8 +381,6 @@ def add_asset(urn: str, asset: str, validate_assets: bool) -> None:
401
381
  @click.option(
402
382
  "--validate-assets/--no-validate-assets", required=False, is_flag=True, default=True
403
383
  )
404
- @upgrade.check_upgrade
405
- @telemetry.with_telemetry()
406
384
  def remove_asset(urn: str, asset: str, validate_assets: bool) -> None:
407
385
  """Remove asset for a Data Product in DataHub"""
408
386
 
@@ -14,8 +14,6 @@ from datahub.emitter.mcp import MetadataChangeProposalWrapper
14
14
  from datahub.ingestion.graph.client import DataHubGraph, get_default_graph
15
15
  from datahub.ingestion.graph.config import ClientMode
16
16
  from datahub.metadata.com.linkedin.pegasus2avro.common import Siblings
17
- from datahub.telemetry import telemetry
18
- from datahub.upgrade import upgrade
19
17
 
20
18
  logger = logging.getLogger(__name__)
21
19
 
@@ -33,8 +31,6 @@ def dataset() -> None:
33
31
  @click.option(
34
32
  "-n", "--dry-run", type=bool, is_flag=True, default=False, help="Perform a dry run"
35
33
  )
36
- @upgrade.check_upgrade
37
- @telemetry.with_telemetry()
38
34
  def upsert(file: Path, dry_run: bool) -> None:
39
35
  """Upsert attributes to a Dataset in DataHub."""
40
36
  # Call the sync command with to_datahub=True to perform the upsert operation
@@ -47,8 +43,6 @@ def upsert(file: Path, dry_run: bool) -> None:
47
43
  )
48
44
  @click.option("--urn", required=True, type=str)
49
45
  @click.option("--to-file", required=False, type=str)
50
- @upgrade.check_upgrade
51
- @telemetry.with_telemetry()
52
46
  def get(urn: str, to_file: str) -> None:
53
47
  """Get a Dataset from DataHub"""
54
48
 
@@ -77,7 +71,6 @@ def get(urn: str, to_file: str) -> None:
77
71
  help="URN of secondary sibling(s)",
78
72
  multiple=True,
79
73
  )
80
- @telemetry.with_telemetry()
81
74
  def add_sibling(urn: str, sibling_urns: Tuple[str]) -> None:
82
75
  all_urns = set()
83
76
  all_urns.add(urn)
@@ -117,8 +110,6 @@ def _get_existing_siblings(graph: DataHubGraph, urn: str) -> Set[str]:
117
110
  @click.option("--lintCheck", required=False, is_flag=True)
118
111
  @click.option("--lintFix", required=False, is_flag=True)
119
112
  @click.argument("file", type=click.Path(exists=True))
120
- @upgrade.check_upgrade
121
- @telemetry.with_telemetry()
122
113
  def file(lintcheck: bool, lintfix: bool, file: str) -> None:
123
114
  """Operate on a Dataset file"""
124
115
 
@@ -174,8 +165,6 @@ def file(lintcheck: bool, lintfix: bool, file: str) -> None:
174
165
  @click.option(
175
166
  "-n", "--dry-run", type=bool, is_flag=True, default=False, help="Perform a dry run"
176
167
  )
177
- @upgrade.check_upgrade
178
- @telemetry.with_telemetry()
179
168
  def sync(file: str, to_datahub: bool, dry_run: bool) -> None:
180
169
  """Sync a Dataset file to/from DataHub"""
181
170
 
@@ -8,8 +8,6 @@ from click_default_group import DefaultGroup
8
8
  from datahub.api.entities.forms.forms import Forms
9
9
  from datahub.ingestion.graph.client import get_default_graph
10
10
  from datahub.ingestion.graph.config import ClientMode
11
- from datahub.telemetry import telemetry
12
- from datahub.upgrade import upgrade
13
11
 
14
12
  logger = logging.getLogger(__name__)
15
13
 
@@ -24,8 +22,6 @@ def forms() -> None:
24
22
  name="upsert",
25
23
  )
26
24
  @click.option("-f", "--file", required=True, type=click.Path(exists=True))
27
- @upgrade.check_upgrade
28
- @telemetry.with_telemetry()
29
25
  def upsert(file: Path) -> None:
30
26
  """Upsert forms in DataHub."""
31
27
 
@@ -37,8 +33,6 @@ def upsert(file: Path) -> None:
37
33
  )
38
34
  @click.option("--urn", required=True, type=str)
39
35
  @click.option("--to-file", required=False, type=str)
40
- @upgrade.check_upgrade
41
- @telemetry.with_telemetry()
42
36
  def get(urn: str, to_file: str) -> None:
43
37
  """Get form from DataHub"""
44
38
  with get_default_graph(ClientMode.CLI) as graph:
@@ -11,8 +11,6 @@ from datahub.api.entities.corpgroup.corpgroup import (
11
11
  from datahub.cli.specific.file_loader import load_file
12
12
  from datahub.ingestion.graph.client import get_default_graph
13
13
  from datahub.ingestion.graph.config import ClientMode
14
- from datahub.telemetry import telemetry
15
- from datahub.upgrade import upgrade
16
14
 
17
15
  logger = logging.getLogger(__name__)
18
16
 
@@ -34,8 +32,6 @@ def group() -> None:
34
32
  default=False,
35
33
  help="When set, writes to the editable section of the metadata graph, overwriting writes from the UI",
36
34
  )
37
- @upgrade.check_upgrade
38
- @telemetry.with_telemetry()
39
35
  def upsert(file: Path, override_editable: bool) -> None:
40
36
  """Create or Update a Group with embedded Users"""
41
37
 
@@ -12,8 +12,6 @@ from datahub.api.entities.structuredproperties.structuredproperties import (
12
12
  )
13
13
  from datahub.ingestion.graph.client import get_default_graph
14
14
  from datahub.ingestion.graph.config import ClientMode
15
- from datahub.telemetry import telemetry
16
- from datahub.upgrade import upgrade
17
15
  from datahub.utilities.urns.urn import Urn
18
16
 
19
17
  logger = logging.getLogger(__name__)
@@ -29,8 +27,6 @@ def properties() -> None:
29
27
  name="upsert",
30
28
  )
31
29
  @click.option("-f", "--file", required=True, type=click.Path(exists=True))
32
- @upgrade.check_upgrade
33
- @telemetry.with_telemetry()
34
30
  def upsert(file: Path) -> None:
35
31
  """Upsert structured properties in DataHub."""
36
32
 
@@ -43,8 +39,6 @@ def upsert(file: Path) -> None:
43
39
  )
44
40
  @click.option("--urn", required=True, type=str)
45
41
  @click.option("--to-file", required=False, type=str)
46
- @upgrade.check_upgrade
47
- @telemetry.with_telemetry()
48
42
  def get(urn: str, to_file: str) -> None:
49
43
  """Get structured properties from DataHub"""
50
44
  urn = Urn.make_structured_property_urn(urn)
@@ -71,7 +65,6 @@ def get(urn: str, to_file: str) -> None:
71
65
  )
72
66
  @click.option("--details/--no-details", is_flag=True, default=True)
73
67
  @click.option("--to-file", required=False, type=str)
74
- @telemetry.with_telemetry()
75
68
  def list(details: bool, to_file: str) -> None:
76
69
  """List structured properties in DataHub"""
77
70
 
@@ -9,8 +9,6 @@ from datahub.api.entities.corpuser.corpuser import CorpUser, CorpUserGenerationC
9
9
  from datahub.cli.specific.file_loader import load_file
10
10
  from datahub.ingestion.graph.client import get_default_graph
11
11
  from datahub.ingestion.graph.config import ClientMode
12
- from datahub.telemetry import telemetry
13
- from datahub.upgrade import upgrade
14
12
 
15
13
  logger = logging.getLogger(__name__)
16
14
 
@@ -32,8 +30,6 @@ def user() -> None:
32
30
  is_flag=True,
33
31
  help="Use this flag to overwrite the information that is set via the UI",
34
32
  )
35
- @upgrade.check_upgrade
36
- @telemetry.with_telemetry()
37
33
  def upsert(file: Path, override_editable: bool) -> None:
38
34
  """Create or Update a User in DataHub"""
39
35
 
datahub/cli/state_cli.py CHANGED
@@ -6,8 +6,6 @@ from click_default_group import DefaultGroup
6
6
 
7
7
  from datahub.ingestion.graph.client import get_default_graph
8
8
  from datahub.ingestion.graph.config import ClientMode
9
- from datahub.telemetry import telemetry
10
- from datahub.upgrade import upgrade
11
9
 
12
10
  logger = logging.getLogger(__name__)
13
11
 
@@ -21,8 +19,6 @@ def state() -> None:
21
19
  @state.command()
22
20
  @click.option("--pipeline-name", required=True, type=str)
23
21
  @click.option("--platform", required=True, type=str)
24
- @upgrade.check_upgrade
25
- @telemetry.with_telemetry()
26
22
  def inspect(pipeline_name: str, platform: str) -> None:
27
23
  """
28
24
  Get the latest stateful ingestion state for a given pipeline.
@@ -10,8 +10,6 @@ from requests import Response
10
10
  from datahub.emitter.mce_builder import dataset_urn_to_key, schema_field_urn_to_key
11
11
  from datahub.ingestion.graph.client import DataHubGraph, get_default_graph
12
12
  from datahub.ingestion.graph.config import ClientMode
13
- from datahub.telemetry import telemetry
14
- from datahub.upgrade import upgrade
15
13
  from datahub.utilities.urns.urn import Urn
16
14
 
17
15
  logger = logging.getLogger(__name__)
@@ -129,8 +127,6 @@ def get_timeline(
129
127
  )
130
128
  @click.option("--raw", type=bool, is_flag=True, help="Show the raw diff")
131
129
  @click.pass_context
132
- @upgrade.check_upgrade
133
- @telemetry.with_telemetry()
134
130
  def timeline(
135
131
  ctx: Any,
136
132
  urn: str,
datahub/entrypoints.py CHANGED
@@ -10,6 +10,7 @@ import click
10
10
  import datahub._version as datahub_version
11
11
  from datahub.cli.check_cli import check
12
12
  from datahub.cli.cli_utils import (
13
+ enable_auto_decorators,
13
14
  fixup_gms_url,
14
15
  generate_access_token,
15
16
  make_shim_command,
@@ -38,7 +39,6 @@ from datahub.cli.timeline_cli import timeline
38
39
  from datahub.configuration.common import should_show_stack_trace
39
40
  from datahub.ingestion.graph.client import get_default_graph
40
41
  from datahub.ingestion.graph.config import ClientMode
41
- from datahub.telemetry import telemetry
42
42
  from datahub.utilities._custom_package_loader import model_version_name
43
43
  from datahub.utilities.logging_manager import configure_logging
44
44
  from datahub.utilities.server_config_util import get_gms_config
@@ -111,7 +111,6 @@ def datahub(
111
111
  default=False,
112
112
  help="If passed will show server config. Assumes datahub init has happened.",
113
113
  )
114
- @telemetry.with_telemetry()
115
114
  def version(include_server: bool = False) -> None:
116
115
  """Print version number and exit."""
117
116
 
@@ -131,7 +130,6 @@ def version(include_server: bool = False) -> None:
131
130
  default=False,
132
131
  help="If passed then uses password to initialise token.",
133
132
  )
134
- @telemetry.with_telemetry()
135
133
  def init(use_password: bool = False) -> None:
136
134
  """Configure which datahub instance to connect to"""
137
135
 
@@ -218,6 +216,9 @@ except ImportError as e:
218
216
  make_shim_command("actions", "run `pip install acryl-datahub-actions`")
219
217
  )
220
218
 
219
+ # Adding telemetry and upgrade decorators to all commands
220
+ enable_auto_decorators(datahub)
221
+
221
222
 
222
223
  def main(**kwargs):
223
224
  # We use threads in a variety of places within our CLI. The multiprocessing