dvt-core 0.59.0a51__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbt/__init__.py +7 -0
- dbt/_pydantic_shim.py +26 -0
- dbt/artifacts/__init__.py +0 -0
- dbt/artifacts/exceptions/__init__.py +1 -0
- dbt/artifacts/exceptions/schemas.py +31 -0
- dbt/artifacts/resources/__init__.py +116 -0
- dbt/artifacts/resources/base.py +67 -0
- dbt/artifacts/resources/types.py +93 -0
- dbt/artifacts/resources/v1/analysis.py +10 -0
- dbt/artifacts/resources/v1/catalog.py +23 -0
- dbt/artifacts/resources/v1/components.py +274 -0
- dbt/artifacts/resources/v1/config.py +277 -0
- dbt/artifacts/resources/v1/documentation.py +11 -0
- dbt/artifacts/resources/v1/exposure.py +51 -0
- dbt/artifacts/resources/v1/function.py +52 -0
- dbt/artifacts/resources/v1/generic_test.py +31 -0
- dbt/artifacts/resources/v1/group.py +21 -0
- dbt/artifacts/resources/v1/hook.py +11 -0
- dbt/artifacts/resources/v1/macro.py +29 -0
- dbt/artifacts/resources/v1/metric.py +172 -0
- dbt/artifacts/resources/v1/model.py +145 -0
- dbt/artifacts/resources/v1/owner.py +10 -0
- dbt/artifacts/resources/v1/saved_query.py +111 -0
- dbt/artifacts/resources/v1/seed.py +41 -0
- dbt/artifacts/resources/v1/semantic_layer_components.py +72 -0
- dbt/artifacts/resources/v1/semantic_model.py +314 -0
- dbt/artifacts/resources/v1/singular_test.py +14 -0
- dbt/artifacts/resources/v1/snapshot.py +91 -0
- dbt/artifacts/resources/v1/source_definition.py +84 -0
- dbt/artifacts/resources/v1/sql_operation.py +10 -0
- dbt/artifacts/resources/v1/unit_test_definition.py +77 -0
- dbt/artifacts/schemas/__init__.py +0 -0
- dbt/artifacts/schemas/base.py +191 -0
- dbt/artifacts/schemas/batch_results.py +24 -0
- dbt/artifacts/schemas/catalog/__init__.py +11 -0
- dbt/artifacts/schemas/catalog/v1/__init__.py +0 -0
- dbt/artifacts/schemas/catalog/v1/catalog.py +59 -0
- dbt/artifacts/schemas/freshness/__init__.py +1 -0
- dbt/artifacts/schemas/freshness/v3/__init__.py +0 -0
- dbt/artifacts/schemas/freshness/v3/freshness.py +158 -0
- dbt/artifacts/schemas/manifest/__init__.py +2 -0
- dbt/artifacts/schemas/manifest/v12/__init__.py +0 -0
- dbt/artifacts/schemas/manifest/v12/manifest.py +211 -0
- dbt/artifacts/schemas/results.py +147 -0
- dbt/artifacts/schemas/run/__init__.py +2 -0
- dbt/artifacts/schemas/run/v5/__init__.py +0 -0
- dbt/artifacts/schemas/run/v5/run.py +184 -0
- dbt/artifacts/schemas/upgrades/__init__.py +4 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest.py +174 -0
- dbt/artifacts/schemas/upgrades/upgrade_manifest_dbt_version.py +2 -0
- dbt/artifacts/utils/validation.py +153 -0
- dbt/cli/__init__.py +1 -0
- dbt/cli/context.py +17 -0
- dbt/cli/exceptions.py +57 -0
- dbt/cli/flags.py +560 -0
- dbt/cli/main.py +2660 -0
- dbt/cli/option_types.py +121 -0
- dbt/cli/options.py +80 -0
- dbt/cli/params.py +844 -0
- dbt/cli/requires.py +490 -0
- dbt/cli/resolvers.py +60 -0
- dbt/cli/types.py +40 -0
- dbt/clients/__init__.py +0 -0
- dbt/clients/checked_load.py +83 -0
- dbt/clients/git.py +164 -0
- dbt/clients/jinja.py +206 -0
- dbt/clients/jinja_static.py +245 -0
- dbt/clients/registry.py +192 -0
- dbt/clients/yaml_helper.py +68 -0
- dbt/compilation.py +876 -0
- dbt/compute/__init__.py +14 -0
- dbt/compute/engines/__init__.py +12 -0
- dbt/compute/engines/spark_engine.py +642 -0
- dbt/compute/federated_executor.py +1080 -0
- dbt/compute/filter_pushdown.py +273 -0
- dbt/compute/jar_provisioning.py +273 -0
- dbt/compute/java_compat.py +689 -0
- dbt/compute/jdbc_utils.py +1252 -0
- dbt/compute/metadata/__init__.py +63 -0
- dbt/compute/metadata/adapters_registry.py +370 -0
- dbt/compute/metadata/catalog_store.py +1036 -0
- dbt/compute/metadata/registry.py +674 -0
- dbt/compute/metadata/store.py +1020 -0
- dbt/compute/smart_selector.py +377 -0
- dbt/compute/spark_logger.py +272 -0
- dbt/compute/strategies/__init__.py +55 -0
- dbt/compute/strategies/base.py +165 -0
- dbt/compute/strategies/dataproc.py +207 -0
- dbt/compute/strategies/emr.py +203 -0
- dbt/compute/strategies/local.py +472 -0
- dbt/compute/strategies/standalone.py +262 -0
- dbt/config/__init__.py +4 -0
- dbt/config/catalogs.py +94 -0
- dbt/config/compute.py +513 -0
- dbt/config/dvt_profile.py +408 -0
- dbt/config/profile.py +422 -0
- dbt/config/project.py +888 -0
- dbt/config/project_utils.py +48 -0
- dbt/config/renderer.py +231 -0
- dbt/config/runtime.py +564 -0
- dbt/config/selectors.py +208 -0
- dbt/config/utils.py +77 -0
- dbt/constants.py +28 -0
- dbt/context/__init__.py +0 -0
- dbt/context/base.py +745 -0
- dbt/context/configured.py +135 -0
- dbt/context/context_config.py +382 -0
- dbt/context/docs.py +82 -0
- dbt/context/exceptions_jinja.py +178 -0
- dbt/context/macro_resolver.py +195 -0
- dbt/context/macros.py +171 -0
- dbt/context/manifest.py +72 -0
- dbt/context/providers.py +2249 -0
- dbt/context/query_header.py +13 -0
- dbt/context/secret.py +58 -0
- dbt/context/target.py +74 -0
- dbt/contracts/__init__.py +0 -0
- dbt/contracts/files.py +413 -0
- dbt/contracts/graph/__init__.py +0 -0
- dbt/contracts/graph/manifest.py +1904 -0
- dbt/contracts/graph/metrics.py +97 -0
- dbt/contracts/graph/model_config.py +70 -0
- dbt/contracts/graph/node_args.py +42 -0
- dbt/contracts/graph/nodes.py +1806 -0
- dbt/contracts/graph/semantic_manifest.py +232 -0
- dbt/contracts/graph/unparsed.py +811 -0
- dbt/contracts/project.py +419 -0
- dbt/contracts/results.py +53 -0
- dbt/contracts/selection.py +23 -0
- dbt/contracts/sql.py +85 -0
- dbt/contracts/state.py +68 -0
- dbt/contracts/util.py +46 -0
- dbt/deprecations.py +348 -0
- dbt/deps/__init__.py +0 -0
- dbt/deps/base.py +152 -0
- dbt/deps/git.py +195 -0
- dbt/deps/local.py +79 -0
- dbt/deps/registry.py +130 -0
- dbt/deps/resolver.py +149 -0
- dbt/deps/tarball.py +120 -0
- dbt/docs/source/_ext/dbt_click.py +119 -0
- dbt/docs/source/conf.py +32 -0
- dbt/env_vars.py +64 -0
- dbt/event_time/event_time.py +40 -0
- dbt/event_time/sample_window.py +60 -0
- dbt/events/__init__.py +15 -0
- dbt/events/base_types.py +36 -0
- dbt/events/core_types_pb2.py +2 -0
- dbt/events/logging.py +108 -0
- dbt/events/types.py +2516 -0
- dbt/exceptions.py +1486 -0
- dbt/flags.py +89 -0
- dbt/graph/__init__.py +11 -0
- dbt/graph/cli.py +249 -0
- dbt/graph/graph.py +172 -0
- dbt/graph/queue.py +214 -0
- dbt/graph/selector.py +374 -0
- dbt/graph/selector_methods.py +975 -0
- dbt/graph/selector_spec.py +222 -0
- dbt/graph/thread_pool.py +18 -0
- dbt/hooks.py +21 -0
- dbt/include/README.md +49 -0
- dbt/include/__init__.py +3 -0
- dbt/include/data/adapters_registry.duckdb +0 -0
- dbt/include/data/build_comprehensive_registry.py +1254 -0
- dbt/include/data/build_registry.py +242 -0
- dbt/include/data/csv/adapter_queries.csv +33 -0
- dbt/include/data/csv/syntax_rules.csv +9 -0
- dbt/include/data/csv/type_mappings_bigquery.csv +28 -0
- dbt/include/data/csv/type_mappings_databricks.csv +30 -0
- dbt/include/data/csv/type_mappings_mysql.csv +40 -0
- dbt/include/data/csv/type_mappings_oracle.csv +30 -0
- dbt/include/data/csv/type_mappings_postgres.csv +56 -0
- dbt/include/data/csv/type_mappings_redshift.csv +33 -0
- dbt/include/data/csv/type_mappings_snowflake.csv +38 -0
- dbt/include/data/csv/type_mappings_sqlserver.csv +35 -0
- dbt/include/dvt_starter_project/README.md +15 -0
- dbt/include/dvt_starter_project/__init__.py +3 -0
- dbt/include/dvt_starter_project/analyses/PLACEHOLDER +0 -0
- dbt/include/dvt_starter_project/dvt_project.yml +39 -0
- dbt/include/dvt_starter_project/logs/PLACEHOLDER +0 -0
- dbt/include/dvt_starter_project/macros/PLACEHOLDER +0 -0
- dbt/include/dvt_starter_project/models/example/my_first_dbt_model.sql +27 -0
- dbt/include/dvt_starter_project/models/example/my_second_dbt_model.sql +6 -0
- dbt/include/dvt_starter_project/models/example/schema.yml +21 -0
- dbt/include/dvt_starter_project/seeds/PLACEHOLDER +0 -0
- dbt/include/dvt_starter_project/snapshots/PLACEHOLDER +0 -0
- dbt/include/dvt_starter_project/tests/PLACEHOLDER +0 -0
- dbt/internal_deprecations.py +26 -0
- dbt/jsonschemas/__init__.py +3 -0
- dbt/jsonschemas/jsonschemas.py +309 -0
- dbt/jsonschemas/project/0.0.110.json +4717 -0
- dbt/jsonschemas/project/0.0.85.json +2015 -0
- dbt/jsonschemas/resources/0.0.110.json +2636 -0
- dbt/jsonschemas/resources/0.0.85.json +2536 -0
- dbt/jsonschemas/resources/latest.json +6773 -0
- dbt/links.py +4 -0
- dbt/materializations/__init__.py +0 -0
- dbt/materializations/incremental/__init__.py +0 -0
- dbt/materializations/incremental/microbatch.py +236 -0
- dbt/mp_context.py +8 -0
- dbt/node_types.py +37 -0
- dbt/parser/__init__.py +23 -0
- dbt/parser/analysis.py +21 -0
- dbt/parser/base.py +548 -0
- dbt/parser/common.py +266 -0
- dbt/parser/docs.py +52 -0
- dbt/parser/fixtures.py +51 -0
- dbt/parser/functions.py +30 -0
- dbt/parser/generic_test.py +100 -0
- dbt/parser/generic_test_builders.py +333 -0
- dbt/parser/hooks.py +122 -0
- dbt/parser/macros.py +137 -0
- dbt/parser/manifest.py +2208 -0
- dbt/parser/models.py +573 -0
- dbt/parser/partial.py +1178 -0
- dbt/parser/read_files.py +445 -0
- dbt/parser/schema_generic_tests.py +422 -0
- dbt/parser/schema_renderer.py +111 -0
- dbt/parser/schema_yaml_readers.py +935 -0
- dbt/parser/schemas.py +1466 -0
- dbt/parser/search.py +149 -0
- dbt/parser/seeds.py +28 -0
- dbt/parser/singular_test.py +20 -0
- dbt/parser/snapshots.py +44 -0
- dbt/parser/sources.py +558 -0
- dbt/parser/sql.py +62 -0
- dbt/parser/unit_tests.py +621 -0
- dbt/plugins/__init__.py +20 -0
- dbt/plugins/contracts.py +9 -0
- dbt/plugins/exceptions.py +2 -0
- dbt/plugins/manager.py +163 -0
- dbt/plugins/manifest.py +21 -0
- dbt/profiler.py +20 -0
- dbt/py.typed +1 -0
- dbt/query_analyzer.py +410 -0
- dbt/runners/__init__.py +2 -0
- dbt/runners/exposure_runner.py +7 -0
- dbt/runners/no_op_runner.py +45 -0
- dbt/runners/saved_query_runner.py +7 -0
- dbt/selected_resources.py +8 -0
- dbt/task/__init__.py +0 -0
- dbt/task/base.py +506 -0
- dbt/task/build.py +197 -0
- dbt/task/clean.py +56 -0
- dbt/task/clone.py +161 -0
- dbt/task/compile.py +150 -0
- dbt/task/compute.py +458 -0
- dbt/task/debug.py +513 -0
- dbt/task/deps.py +280 -0
- dbt/task/docs/__init__.py +3 -0
- dbt/task/docs/api/__init__.py +23 -0
- dbt/task/docs/api/catalog.py +204 -0
- dbt/task/docs/api/lineage.py +234 -0
- dbt/task/docs/api/profile.py +204 -0
- dbt/task/docs/api/spark.py +186 -0
- dbt/task/docs/generate.py +1002 -0
- dbt/task/docs/index.html +250 -0
- dbt/task/docs/serve.py +174 -0
- dbt/task/dvt_output.py +509 -0
- dbt/task/dvt_run.py +282 -0
- dbt/task/dvt_seed.py +806 -0
- dbt/task/freshness.py +322 -0
- dbt/task/function.py +121 -0
- dbt/task/group_lookup.py +46 -0
- dbt/task/init.py +1022 -0
- dbt/task/java.py +316 -0
- dbt/task/list.py +236 -0
- dbt/task/metadata.py +804 -0
- dbt/task/migrate.py +714 -0
- dbt/task/printer.py +175 -0
- dbt/task/profile.py +1489 -0
- dbt/task/profile_serve.py +662 -0
- dbt/task/retract.py +441 -0
- dbt/task/retry.py +175 -0
- dbt/task/run.py +1647 -0
- dbt/task/run_operation.py +141 -0
- dbt/task/runnable.py +758 -0
- dbt/task/seed.py +103 -0
- dbt/task/show.py +149 -0
- dbt/task/snapshot.py +56 -0
- dbt/task/spark.py +414 -0
- dbt/task/sql.py +110 -0
- dbt/task/target_sync.py +814 -0
- dbt/task/test.py +464 -0
- dbt/tests/fixtures/__init__.py +1 -0
- dbt/tests/fixtures/project.py +620 -0
- dbt/tests/util.py +651 -0
- dbt/tracking.py +529 -0
- dbt/utils/__init__.py +3 -0
- dbt/utils/artifact_upload.py +151 -0
- dbt/utils/utils.py +408 -0
- dbt/version.py +271 -0
- dvt_cli/__init__.py +158 -0
- dvt_core-0.59.0a51.dist-info/METADATA +288 -0
- dvt_core-0.59.0a51.dist-info/RECORD +299 -0
- dvt_core-0.59.0a51.dist-info/WHEEL +5 -0
- dvt_core-0.59.0a51.dist-info/entry_points.txt +2 -0
- dvt_core-0.59.0a51.dist-info/top_level.txt +2 -0
dbt/task/retract.py
ADDED
|
@@ -0,0 +1,441 @@
|
|
|
1
|
+
# =============================================================================
|
|
2
|
+
# DVT Retract Task
|
|
3
|
+
# =============================================================================
|
|
4
|
+
# Drops all materialized models from target databases.
|
|
5
|
+
#
|
|
6
|
+
# Usage:
|
|
7
|
+
# dvt retract # Drop all materialized models
|
|
8
|
+
# dvt retract --dry-run # Preview what would be dropped
|
|
9
|
+
# dvt retract --select "model*" # Drop matching models only
|
|
10
|
+
# dvt retract --exclude "dim_*" # Exclude matching models
|
|
11
|
+
#
|
|
12
|
+
# DVT v0.58.1: Added reverse DAG order support
|
|
13
|
+
# DVT v0.59.0a29: Removed CASCADE - reverse DAG order handles dependencies
|
|
14
|
+
# =============================================================================
|
|
15
|
+
|
|
16
|
+
from __future__ import annotations
|
|
17
|
+
|
|
18
|
+
import json
|
|
19
|
+
import fnmatch
|
|
20
|
+
import time
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
from typing import Any, Dict, List, Optional, Set, Tuple
|
|
23
|
+
|
|
24
|
+
from dbt.cli.flags import Flags
|
|
25
|
+
from dbt.config import RuntimeConfig
|
|
26
|
+
from dbt.contracts.graph.manifest import Manifest
|
|
27
|
+
from dbt.task.base import BaseTask
|
|
28
|
+
|
|
29
|
+
# Try to import Rich for beautiful output
|
|
30
|
+
try:
|
|
31
|
+
from rich.console import Console
|
|
32
|
+
from rich.table import Table
|
|
33
|
+
from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, MofNCompleteColumn, TimeElapsedColumn
|
|
34
|
+
from rich.panel import Panel
|
|
35
|
+
from rich import box
|
|
36
|
+
HAS_RICH = True
|
|
37
|
+
except ImportError:
|
|
38
|
+
HAS_RICH = False
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class RetractTask(BaseTask):
|
|
42
|
+
"""
|
|
43
|
+
Task to drop materialized models from target databases.
|
|
44
|
+
|
|
45
|
+
This task:
|
|
46
|
+
1. Reads the manifest to find all materialized models
|
|
47
|
+
2. Builds dependency graph and orders models in REVERSE DAG order
|
|
48
|
+
3. Groups models by their target adapter
|
|
49
|
+
4. Drops each model's relation (table/view)
|
|
50
|
+
5. Supports --dry-run, --select, and --exclude flags
|
|
51
|
+
|
|
52
|
+
DVT v0.58.1 Changes:
|
|
53
|
+
- Follows reverse DAG order (drop dependents first, then dependencies)
|
|
54
|
+
|
|
55
|
+
DVT v0.59.0a29 Changes:
|
|
56
|
+
- Removed CASCADE - reverse DAG order already ensures dependents are dropped first
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
def __init__(self, args: Flags, config: RuntimeConfig, manifest: Manifest):
|
|
60
|
+
super().__init__(args)
|
|
61
|
+
self.config = config
|
|
62
|
+
self.manifest = manifest
|
|
63
|
+
self.dry_run = getattr(args, 'DRY_RUN', False)
|
|
64
|
+
self._console = None
|
|
65
|
+
self._use_rich = HAS_RICH
|
|
66
|
+
if self._use_rich:
|
|
67
|
+
self._console = Console()
|
|
68
|
+
|
|
69
|
+
def _print(self, message: str, style: str = None):
|
|
70
|
+
"""Print with optional Rich styling."""
|
|
71
|
+
if self._use_rich and style:
|
|
72
|
+
self._console.print(f"[{style}]{message}[/{style}]")
|
|
73
|
+
elif self._use_rich:
|
|
74
|
+
self._console.print(message)
|
|
75
|
+
else:
|
|
76
|
+
print(message)
|
|
77
|
+
|
|
78
|
+
def run(self) -> Tuple[bool, Dict[str, Any]]:
|
|
79
|
+
"""Execute the retract task."""
|
|
80
|
+
start_time = time.time()
|
|
81
|
+
|
|
82
|
+
# Header
|
|
83
|
+
self._print("")
|
|
84
|
+
if self.dry_run:
|
|
85
|
+
if self._use_rich:
|
|
86
|
+
self._console.print(Panel(
|
|
87
|
+
"[bold cyan]Preview of models that would be dropped[/bold cyan]\n"
|
|
88
|
+
"[dim]Using reverse DAG order (dependents first)[/dim]",
|
|
89
|
+
title="[bold cyan]DVT RETRACT (DRY RUN)[/bold cyan]",
|
|
90
|
+
border_style="cyan",
|
|
91
|
+
box=box.DOUBLE,
|
|
92
|
+
))
|
|
93
|
+
else:
|
|
94
|
+
self._print("=" * 60)
|
|
95
|
+
self._print(" DVT RETRACT (DRY RUN)")
|
|
96
|
+
self._print(" Preview of models that would be dropped")
|
|
97
|
+
self._print("=" * 60)
|
|
98
|
+
else:
|
|
99
|
+
if self._use_rich:
|
|
100
|
+
self._console.print(Panel(
|
|
101
|
+
"[bold red]Dropping materialized models from databases[/bold red]\n"
|
|
102
|
+
"[dim]Using reverse DAG order (dependents dropped first)[/dim]",
|
|
103
|
+
title="[bold red]DVT RETRACT[/bold red]",
|
|
104
|
+
border_style="red",
|
|
105
|
+
box=box.DOUBLE,
|
|
106
|
+
))
|
|
107
|
+
else:
|
|
108
|
+
self._print("=" * 60)
|
|
109
|
+
self._print(" DVT RETRACT")
|
|
110
|
+
self._print(" Dropping materialized models from databases")
|
|
111
|
+
self._print("=" * 60)
|
|
112
|
+
self._print("")
|
|
113
|
+
|
|
114
|
+
# Get models to retract
|
|
115
|
+
models = self._get_models_to_retract()
|
|
116
|
+
|
|
117
|
+
if not models:
|
|
118
|
+
self._print("No materialized models found to retract.", "yellow")
|
|
119
|
+
return True, {"dropped": [], "failed": [], "skipped": []}
|
|
120
|
+
|
|
121
|
+
# Filter by --select and --exclude
|
|
122
|
+
models = self._filter_models(models)
|
|
123
|
+
|
|
124
|
+
if not models:
|
|
125
|
+
self._print("No models match the selection criteria.", "yellow")
|
|
126
|
+
return True, {"dropped": [], "failed": [], "skipped": []}
|
|
127
|
+
|
|
128
|
+
# Sort in REVERSE DAG order (dependents first, then dependencies)
|
|
129
|
+
models = self._sort_reverse_dag_order(models)
|
|
130
|
+
|
|
131
|
+
# Group by target
|
|
132
|
+
models_by_target = self._group_models_by_target(models)
|
|
133
|
+
|
|
134
|
+
dropped = []
|
|
135
|
+
failed = []
|
|
136
|
+
skipped = []
|
|
137
|
+
|
|
138
|
+
# Calculate total for progress bar
|
|
139
|
+
total_models = sum(len(m) for m in models_by_target.values())
|
|
140
|
+
|
|
141
|
+
if self._use_rich and not self.dry_run:
|
|
142
|
+
# Use progress bar for actual drops
|
|
143
|
+
with Progress(
|
|
144
|
+
SpinnerColumn(),
|
|
145
|
+
TextColumn("[bold blue]{task.description}"),
|
|
146
|
+
BarColumn(bar_width=40),
|
|
147
|
+
MofNCompleteColumn(),
|
|
148
|
+
TimeElapsedColumn(),
|
|
149
|
+
console=self._console,
|
|
150
|
+
) as progress:
|
|
151
|
+
task = progress.add_task("[cyan]Dropping models...", total=total_models)
|
|
152
|
+
|
|
153
|
+
for target_name, target_models in models_by_target.items():
|
|
154
|
+
for model in target_models:
|
|
155
|
+
model_name = model.get("name")
|
|
156
|
+
relation_type = model.get("relation_type", "table")
|
|
157
|
+
schema = model.get("schema")
|
|
158
|
+
database = model.get("database")
|
|
159
|
+
|
|
160
|
+
# Build relation identifier
|
|
161
|
+
if database:
|
|
162
|
+
relation_id = f"{database}.{schema}.{model_name}"
|
|
163
|
+
else:
|
|
164
|
+
relation_id = f"{schema}.{model_name}"
|
|
165
|
+
|
|
166
|
+
progress.update(task, description=f"[cyan]Dropping[/cyan] [bold]{relation_id}[/bold]")
|
|
167
|
+
|
|
168
|
+
success = self._drop_relation(target_name, model, use_cascade=True)
|
|
169
|
+
if success:
|
|
170
|
+
dropped.append({"name": model_name, "target": target_name, "type": relation_type})
|
|
171
|
+
else:
|
|
172
|
+
failed.append({"name": model_name, "target": target_name, "error": "Drop failed"})
|
|
173
|
+
|
|
174
|
+
progress.advance(task)
|
|
175
|
+
else:
|
|
176
|
+
# Process each target (dry run or non-Rich)
|
|
177
|
+
for target_name, target_models in models_by_target.items():
|
|
178
|
+
self._print(f"\n Target: {target_name}", "bold")
|
|
179
|
+
self._print(" " + "-" * 40)
|
|
180
|
+
|
|
181
|
+
for model in target_models:
|
|
182
|
+
model_name = model.get("name")
|
|
183
|
+
relation_type = model.get("relation_type", "table")
|
|
184
|
+
schema = model.get("schema")
|
|
185
|
+
database = model.get("database")
|
|
186
|
+
|
|
187
|
+
# Build relation identifier
|
|
188
|
+
if database:
|
|
189
|
+
relation_id = f"{database}.{schema}.{model_name}"
|
|
190
|
+
else:
|
|
191
|
+
relation_id = f"{schema}.{model_name}"
|
|
192
|
+
|
|
193
|
+
if self.dry_run:
|
|
194
|
+
self._print(f" [would drop] {relation_id} ({relation_type})", "dim cyan")
|
|
195
|
+
dropped.append({"name": model_name, "target": target_name, "type": relation_type})
|
|
196
|
+
else:
|
|
197
|
+
success = self._drop_relation(target_name, model, use_cascade=False)
|
|
198
|
+
if success:
|
|
199
|
+
self._print(f" [green]OK[/green] Dropped {relation_id} ({relation_type})", "green")
|
|
200
|
+
dropped.append({"name": model_name, "target": target_name, "type": relation_type})
|
|
201
|
+
else:
|
|
202
|
+
self._print(f" [red]FAIL[/red] Failed to drop {relation_id}", "red")
|
|
203
|
+
failed.append({"name": model_name, "target": target_name, "error": "Drop failed"})
|
|
204
|
+
|
|
205
|
+
# Summary
|
|
206
|
+
elapsed = time.time() - start_time
|
|
207
|
+
self._print("")
|
|
208
|
+
|
|
209
|
+
if self._use_rich:
|
|
210
|
+
# Rich summary panel
|
|
211
|
+
if self.dry_run:
|
|
212
|
+
summary_text = f"[bold cyan]{len(dropped)} models would be dropped[/bold cyan]"
|
|
213
|
+
border_color = "cyan"
|
|
214
|
+
elif failed:
|
|
215
|
+
summary_text = f"[bold green]Dropped: {len(dropped)}[/bold green] | [bold red]Failed: {len(failed)}[/bold red]"
|
|
216
|
+
border_color = "yellow"
|
|
217
|
+
else:
|
|
218
|
+
summary_text = f"[bold green]Successfully dropped {len(dropped)} models[/bold green]"
|
|
219
|
+
border_color = "green"
|
|
220
|
+
|
|
221
|
+
self._console.print(Panel(
|
|
222
|
+
f"{summary_text}\n[dim]Time: {elapsed:.2f}s[/dim]",
|
|
223
|
+
title="[bold]Summary[/bold]",
|
|
224
|
+
border_style=border_color,
|
|
225
|
+
box=box.ROUNDED,
|
|
226
|
+
))
|
|
227
|
+
else:
|
|
228
|
+
self._print("=" * 60)
|
|
229
|
+
if self.dry_run:
|
|
230
|
+
self._print(f" DRY RUN: {len(dropped)} models would be dropped")
|
|
231
|
+
else:
|
|
232
|
+
if failed:
|
|
233
|
+
self._print(f" Dropped: {len(dropped)} | Failed: {len(failed)}")
|
|
234
|
+
else:
|
|
235
|
+
self._print(f" Successfully dropped {len(dropped)} models")
|
|
236
|
+
self._print(f" Time: {elapsed:.2f}s")
|
|
237
|
+
self._print("=" * 60)
|
|
238
|
+
|
|
239
|
+
self._print("")
|
|
240
|
+
|
|
241
|
+
success = len(failed) == 0
|
|
242
|
+
return success, {"dropped": dropped, "failed": failed, "skipped": skipped}
|
|
243
|
+
|
|
244
|
+
def _get_models_to_retract(self) -> List[Dict[str, Any]]:
|
|
245
|
+
"""Get list of materialized models from manifest."""
|
|
246
|
+
models = []
|
|
247
|
+
|
|
248
|
+
for node_id, node in self.manifest.nodes.items():
|
|
249
|
+
# Only process model nodes
|
|
250
|
+
if not node_id.startswith("model."):
|
|
251
|
+
continue
|
|
252
|
+
|
|
253
|
+
# Skip ephemeral models (not materialized)
|
|
254
|
+
materialization = getattr(node.config, 'materialized', 'view')
|
|
255
|
+
if materialization == 'ephemeral':
|
|
256
|
+
continue
|
|
257
|
+
|
|
258
|
+
# Determine relation type
|
|
259
|
+
if materialization == 'table':
|
|
260
|
+
relation_type = 'table'
|
|
261
|
+
elif materialization == 'incremental':
|
|
262
|
+
relation_type = 'table'
|
|
263
|
+
elif materialization == 'view':
|
|
264
|
+
relation_type = 'view'
|
|
265
|
+
else:
|
|
266
|
+
relation_type = 'table' # Default to table for custom materializations
|
|
267
|
+
|
|
268
|
+
# Get target - use model config target override or default
|
|
269
|
+
target = getattr(node.config, 'target', None) or self.config.target_name
|
|
270
|
+
|
|
271
|
+
# Get dependencies (models this model depends on)
|
|
272
|
+
depends_on = []
|
|
273
|
+
if hasattr(node, 'depends_on') and hasattr(node.depends_on, 'nodes'):
|
|
274
|
+
depends_on = [
|
|
275
|
+
dep for dep in node.depends_on.nodes
|
|
276
|
+
if dep.startswith('model.')
|
|
277
|
+
]
|
|
278
|
+
|
|
279
|
+
# Get model info
|
|
280
|
+
model_info = {
|
|
281
|
+
"name": node.name,
|
|
282
|
+
"unique_id": node_id,
|
|
283
|
+
"schema": node.schema,
|
|
284
|
+
"database": getattr(node, 'database', None),
|
|
285
|
+
"relation_type": relation_type,
|
|
286
|
+
"target": target,
|
|
287
|
+
"materialization": materialization,
|
|
288
|
+
"depends_on": depends_on,
|
|
289
|
+
}
|
|
290
|
+
models.append(model_info)
|
|
291
|
+
|
|
292
|
+
return models
|
|
293
|
+
|
|
294
|
+
def _sort_reverse_dag_order(self, models: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
295
|
+
"""
|
|
296
|
+
Sort models in REVERSE DAG order.
|
|
297
|
+
|
|
298
|
+
Models that depend on others should be dropped FIRST,
|
|
299
|
+
then their dependencies. This ensures we can drop without
|
|
300
|
+
foreign key or view dependency errors.
|
|
301
|
+
|
|
302
|
+
Uses topological sort reversed (Kahn's algorithm).
|
|
303
|
+
"""
|
|
304
|
+
# Build lookup and dependency graph
|
|
305
|
+
model_lookup = {m["unique_id"]: m for m in models}
|
|
306
|
+
model_ids = set(model_lookup.keys())
|
|
307
|
+
|
|
308
|
+
# Build reverse dependency graph (who depends on me?)
|
|
309
|
+
# For reverse DAG order, we want to drop dependents before dependencies
|
|
310
|
+
dependents: Dict[str, Set[str]] = {uid: set() for uid in model_ids}
|
|
311
|
+
in_degree: Dict[str, int] = {uid: 0 for uid in model_ids}
|
|
312
|
+
|
|
313
|
+
for model in models:
|
|
314
|
+
uid = model["unique_id"]
|
|
315
|
+
for dep in model.get("depends_on", []):
|
|
316
|
+
if dep in model_ids:
|
|
317
|
+
# dep is a dependency of uid
|
|
318
|
+
# In reverse order, uid should come BEFORE dep
|
|
319
|
+
dependents[dep].add(uid)
|
|
320
|
+
in_degree[uid] += 1
|
|
321
|
+
|
|
322
|
+
# Kahn's algorithm for topological sort (reversed)
|
|
323
|
+
# Start with models that have no dependencies (in_degree = 0)
|
|
324
|
+
# These are the "leaf" nodes in the normal DAG, which should be dropped first
|
|
325
|
+
queue = [uid for uid, deg in in_degree.items() if deg == 0]
|
|
326
|
+
result = []
|
|
327
|
+
|
|
328
|
+
while queue:
|
|
329
|
+
# Get next model with no remaining dependencies
|
|
330
|
+
current = queue.pop(0)
|
|
331
|
+
result.append(model_lookup[current])
|
|
332
|
+
|
|
333
|
+
# For each model that depends on current
|
|
334
|
+
for dependent in dependents[current]:
|
|
335
|
+
in_degree[dependent] -= 1
|
|
336
|
+
if in_degree[dependent] == 0:
|
|
337
|
+
queue.append(dependent)
|
|
338
|
+
|
|
339
|
+
# Handle cycles (shouldn't happen in well-formed DAGs)
|
|
340
|
+
remaining = [m for m in models if m["unique_id"] not in {r["unique_id"] for r in result}]
|
|
341
|
+
result.extend(remaining)
|
|
342
|
+
|
|
343
|
+
# Reverse the result so dependents come first
|
|
344
|
+
# (models at the "top" of the DAG - those with many dependents - should be dropped last)
|
|
345
|
+
result.reverse()
|
|
346
|
+
|
|
347
|
+
return result
|
|
348
|
+
|
|
349
|
+
def _filter_models(self, models: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
350
|
+
"""Filter models by --select and --exclude patterns."""
|
|
351
|
+
select_patterns = getattr(self.args, 'SELECT', None) or []
|
|
352
|
+
exclude_patterns = getattr(self.args, 'EXCLUDE', None) or []
|
|
353
|
+
|
|
354
|
+
# Flatten if nested
|
|
355
|
+
if select_patterns:
|
|
356
|
+
select_patterns = [p for ps in select_patterns for p in (ps if isinstance(ps, (list, tuple)) else [ps])]
|
|
357
|
+
if exclude_patterns:
|
|
358
|
+
exclude_patterns = [p for ps in exclude_patterns for p in (ps if isinstance(ps, (list, tuple)) else [ps])]
|
|
359
|
+
|
|
360
|
+
filtered = []
|
|
361
|
+
for model in models:
|
|
362
|
+
model_name = model.get("name", "")
|
|
363
|
+
|
|
364
|
+
# Check if matches any exclude pattern
|
|
365
|
+
if exclude_patterns:
|
|
366
|
+
excluded = any(fnmatch.fnmatch(model_name, p) for p in exclude_patterns)
|
|
367
|
+
if excluded:
|
|
368
|
+
continue
|
|
369
|
+
|
|
370
|
+
# Check if matches any select pattern (if provided)
|
|
371
|
+
if select_patterns:
|
|
372
|
+
selected = any(fnmatch.fnmatch(model_name, p) for p in select_patterns)
|
|
373
|
+
if not selected:
|
|
374
|
+
continue
|
|
375
|
+
|
|
376
|
+
filtered.append(model)
|
|
377
|
+
|
|
378
|
+
return filtered
|
|
379
|
+
|
|
380
|
+
def _group_models_by_target(self, models: List[Dict[str, Any]]) -> Dict[str, List[Dict[str, Any]]]:
|
|
381
|
+
"""Group models by their target adapter, preserving order."""
|
|
382
|
+
grouped = {}
|
|
383
|
+
for model in models:
|
|
384
|
+
target = model.get("target", self.config.target_name)
|
|
385
|
+
if target not in grouped:
|
|
386
|
+
grouped[target] = []
|
|
387
|
+
grouped[target].append(model)
|
|
388
|
+
return grouped
|
|
389
|
+
|
|
390
|
+
def _drop_relation(self, target_name: str, model: Dict[str, Any], use_cascade: bool = False) -> bool:
|
|
391
|
+
"""
|
|
392
|
+
Drop a relation from the database.
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
target_name: Name of the target adapter
|
|
396
|
+
model: Model info dictionary
|
|
397
|
+
use_cascade: Whether to use DROP ... CASCADE (default: False since v0.59.0a29)
|
|
398
|
+
|
|
399
|
+
Returns:
|
|
400
|
+
True if successful, False otherwise
|
|
401
|
+
"""
|
|
402
|
+
try:
|
|
403
|
+
from dbt.adapters.factory import get_adapter
|
|
404
|
+
|
|
405
|
+
# Get adapter for target
|
|
406
|
+
adapter = get_adapter(self.config)
|
|
407
|
+
|
|
408
|
+
# Build DROP statement
|
|
409
|
+
model_name = model.get("name")
|
|
410
|
+
schema = model.get("schema")
|
|
411
|
+
database = model.get("database")
|
|
412
|
+
relation_type = model.get("relation_type", "table").upper()
|
|
413
|
+
|
|
414
|
+
# Build qualified name with proper quoting
|
|
415
|
+
if database:
|
|
416
|
+
qualified_name = f'"{database}"."{schema}"."{model_name}"'
|
|
417
|
+
else:
|
|
418
|
+
qualified_name = f'"{schema}"."{model_name}"'
|
|
419
|
+
|
|
420
|
+
# Execute DROP with CASCADE
|
|
421
|
+
cascade_clause = " CASCADE" if use_cascade else ""
|
|
422
|
+
drop_sql = f"DROP {relation_type} IF EXISTS {qualified_name}{cascade_clause}"
|
|
423
|
+
|
|
424
|
+
with adapter.connection_named("retract"):
|
|
425
|
+
adapter.execute(drop_sql, auto_begin=True, fetch=False)
|
|
426
|
+
adapter.commit_if_has_connection()
|
|
427
|
+
|
|
428
|
+
return True
|
|
429
|
+
|
|
430
|
+
except Exception as e:
|
|
431
|
+
# Log error but don't fail the entire task
|
|
432
|
+
if self._use_rich:
|
|
433
|
+
self._console.print(f" [dim red]Error: {str(e)[:80]}[/dim red]")
|
|
434
|
+
return False
|
|
435
|
+
|
|
436
|
+
def interpret_results(self, results: Tuple[bool, Dict[str, Any]]) -> bool:
|
|
437
|
+
"""Interpret task results."""
|
|
438
|
+
if isinstance(results, tuple):
|
|
439
|
+
success, data = results
|
|
440
|
+
return success
|
|
441
|
+
return bool(results)
|
dbt/task/retry.py
ADDED
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from click import get_current_context
|
|
4
|
+
from click.core import ParameterSource
|
|
5
|
+
|
|
6
|
+
from dbt.artifacts.schemas.results import NodeStatus
|
|
7
|
+
from dbt.cli.flags import Flags
|
|
8
|
+
from dbt.cli.types import Command as CliCommand
|
|
9
|
+
from dbt.config import RuntimeConfig
|
|
10
|
+
from dbt.constants import RUN_RESULTS_FILE_NAME
|
|
11
|
+
from dbt.contracts.state import load_result_state
|
|
12
|
+
from dbt.flags import get_flags, set_flags
|
|
13
|
+
from dbt.graph import GraphQueue
|
|
14
|
+
from dbt.parser.manifest import parse_manifest
|
|
15
|
+
from dbt.task.base import ConfiguredTask
|
|
16
|
+
from dbt.task.build import BuildTask
|
|
17
|
+
from dbt.task.clone import CloneTask
|
|
18
|
+
from dbt.task.compile import CompileTask
|
|
19
|
+
from dbt.task.docs.generate import GenerateTask
|
|
20
|
+
from dbt.task.run import RunTask
|
|
21
|
+
from dbt.task.run_operation import RunOperationTask
|
|
22
|
+
from dbt.task.seed import SeedTask
|
|
23
|
+
from dbt.task.snapshot import SnapshotTask
|
|
24
|
+
from dbt.task.test import TestTask
|
|
25
|
+
from dbt_common.exceptions import DbtRuntimeError
|
|
26
|
+
|
|
27
|
+
RETRYABLE_STATUSES = {
|
|
28
|
+
NodeStatus.Error,
|
|
29
|
+
NodeStatus.Fail,
|
|
30
|
+
NodeStatus.Skipped,
|
|
31
|
+
NodeStatus.RuntimeErr,
|
|
32
|
+
NodeStatus.PartialSuccess,
|
|
33
|
+
}
|
|
34
|
+
IGNORE_PARENT_FLAGS = {
|
|
35
|
+
"log_path",
|
|
36
|
+
"output_path",
|
|
37
|
+
"profiles_dir",
|
|
38
|
+
"profiles_dir_exists_false",
|
|
39
|
+
"project_dir",
|
|
40
|
+
"defer_state",
|
|
41
|
+
"deprecated_state",
|
|
42
|
+
"target_path",
|
|
43
|
+
"warn_error",
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
ALLOW_CLI_OVERRIDE_FLAGS = {"vars", "threads"}
|
|
47
|
+
|
|
48
|
+
TASK_DICT = {
|
|
49
|
+
"build": BuildTask,
|
|
50
|
+
"compile": CompileTask,
|
|
51
|
+
"clone": CloneTask,
|
|
52
|
+
"generate": GenerateTask,
|
|
53
|
+
"seed": SeedTask,
|
|
54
|
+
"snapshot": SnapshotTask,
|
|
55
|
+
"test": TestTask,
|
|
56
|
+
"run": RunTask,
|
|
57
|
+
"run-operation": RunOperationTask,
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
CMD_DICT = {
|
|
61
|
+
"build": CliCommand.BUILD,
|
|
62
|
+
"compile": CliCommand.COMPILE,
|
|
63
|
+
"clone": CliCommand.CLONE,
|
|
64
|
+
"generate": CliCommand.DOCS_GENERATE,
|
|
65
|
+
"seed": CliCommand.SEED,
|
|
66
|
+
"snapshot": CliCommand.SNAPSHOT,
|
|
67
|
+
"test": CliCommand.TEST,
|
|
68
|
+
"run": CliCommand.RUN,
|
|
69
|
+
"run-operation": CliCommand.RUN_OPERATION,
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class RetryTask(ConfiguredTask):
|
|
74
|
+
def __init__(self, args: Flags, config: RuntimeConfig) -> None:
|
|
75
|
+
# load previous run results
|
|
76
|
+
state_path = args.state or config.target_path
|
|
77
|
+
self.previous_results = load_result_state(
|
|
78
|
+
Path(config.project_root) / Path(state_path) / RUN_RESULTS_FILE_NAME
|
|
79
|
+
)
|
|
80
|
+
if not self.previous_results:
|
|
81
|
+
raise DbtRuntimeError(
|
|
82
|
+
f"Could not find previous run in '{state_path}' target directory"
|
|
83
|
+
)
|
|
84
|
+
self.previous_args = self.previous_results.args
|
|
85
|
+
self.previous_command_name = self.previous_args.get("which")
|
|
86
|
+
|
|
87
|
+
# Reslove flags and config
|
|
88
|
+
if args.warn_error:
|
|
89
|
+
RETRYABLE_STATUSES.add(NodeStatus.Warn)
|
|
90
|
+
|
|
91
|
+
cli_command = CMD_DICT.get(self.previous_command_name) # type: ignore
|
|
92
|
+
# Remove these args when their default values are present, otherwise they'll raise an exception
|
|
93
|
+
args_to_remove = {
|
|
94
|
+
"show": lambda x: True,
|
|
95
|
+
"resource_types": lambda x: x == [],
|
|
96
|
+
"warn_error_options": lambda x: x == {"warn": [], "error": [], "silence": []},
|
|
97
|
+
}
|
|
98
|
+
for k, v in args_to_remove.items():
|
|
99
|
+
if k in self.previous_args and v(self.previous_args[k]):
|
|
100
|
+
del self.previous_args[k]
|
|
101
|
+
previous_args = {
|
|
102
|
+
k: v for k, v in self.previous_args.items() if k not in IGNORE_PARENT_FLAGS
|
|
103
|
+
}
|
|
104
|
+
click_context = get_current_context()
|
|
105
|
+
current_args = {
|
|
106
|
+
k: v
|
|
107
|
+
for k, v in args.__dict__.items()
|
|
108
|
+
if k in IGNORE_PARENT_FLAGS
|
|
109
|
+
or (
|
|
110
|
+
click_context.get_parameter_source(k) == ParameterSource.COMMANDLINE
|
|
111
|
+
and k in ALLOW_CLI_OVERRIDE_FLAGS
|
|
112
|
+
)
|
|
113
|
+
}
|
|
114
|
+
combined_args = {**previous_args, **current_args}
|
|
115
|
+
retry_flags = Flags.from_dict(cli_command, combined_args) # type: ignore
|
|
116
|
+
set_flags(retry_flags)
|
|
117
|
+
retry_config = RuntimeConfig.from_args(args=retry_flags)
|
|
118
|
+
|
|
119
|
+
# Parse manifest using resolved config/flags
|
|
120
|
+
manifest = parse_manifest(retry_config, False, True, retry_flags.write_json, []) # type: ignore
|
|
121
|
+
super().__init__(args, retry_config, manifest)
|
|
122
|
+
self.task_class = TASK_DICT.get(self.previous_command_name) # type: ignore
|
|
123
|
+
|
|
124
|
+
def run(self):
|
|
125
|
+
unique_ids = {
|
|
126
|
+
result.unique_id
|
|
127
|
+
for result in self.previous_results.results
|
|
128
|
+
if result.status in RETRYABLE_STATUSES
|
|
129
|
+
and not (
|
|
130
|
+
self.previous_command_name != "run-operation"
|
|
131
|
+
and result.unique_id.startswith("operation.")
|
|
132
|
+
)
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
# We need this so that re-running of a microbatch model will only rerun
|
|
136
|
+
# batches that previously failed. Note _explicitly_ do no pass the
|
|
137
|
+
# batch info if there were _no_ successful batches previously. This is
|
|
138
|
+
# because passing the batch info _forces_ the microbatch process into
|
|
139
|
+
# _incremental_ model, and it may be that we need to be in full refresh
|
|
140
|
+
# mode which is only handled if previous_batch_results _isn't_ passed for a node
|
|
141
|
+
batch_map = {
|
|
142
|
+
result.unique_id: result.batch_results
|
|
143
|
+
for result in self.previous_results.results
|
|
144
|
+
if result.batch_results is not None
|
|
145
|
+
and len(result.batch_results.successful) != 0
|
|
146
|
+
and len(result.batch_results.failed) > 0
|
|
147
|
+
and not (
|
|
148
|
+
self.previous_command_name != "run-operation"
|
|
149
|
+
and result.unique_id.startswith("operation.")
|
|
150
|
+
)
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
class TaskWrapper(self.task_class):
|
|
154
|
+
def get_graph_queue(self):
|
|
155
|
+
new_graph = self.graph.get_subset_graph(unique_ids)
|
|
156
|
+
return GraphQueue(
|
|
157
|
+
new_graph.graph,
|
|
158
|
+
self.manifest,
|
|
159
|
+
unique_ids,
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
task = TaskWrapper(
|
|
163
|
+
get_flags(),
|
|
164
|
+
self.config,
|
|
165
|
+
self.manifest,
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
if self.task_class == RunTask:
|
|
169
|
+
task.batch_map = batch_map
|
|
170
|
+
|
|
171
|
+
return_value = task.run()
|
|
172
|
+
return return_value
|
|
173
|
+
|
|
174
|
+
def interpret_results(self, *args, **kwargs):
|
|
175
|
+
return self.task_class.interpret_results(*args, **kwargs)
|