acryl-datahub 1.2.0.1rc1__py3-none-any.whl → 1.2.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (54) hide show
  1. {acryl_datahub-1.2.0.1rc1.dist-info → acryl_datahub-1.2.0.2.dist-info}/METADATA +2525 -2523
  2. {acryl_datahub-1.2.0.1rc1.dist-info → acryl_datahub-1.2.0.2.dist-info}/RECORD +54 -46
  3. datahub/_version.py +1 -1
  4. datahub/api/entities/dataset/dataset.py +13 -1
  5. datahub/emitter/rest_emitter.py +3 -1
  6. datahub/ingestion/autogenerated/capability_summary.json +97 -6
  7. datahub/ingestion/source/abs/source.py +5 -29
  8. datahub/ingestion/source/aws/glue.py +8 -0
  9. datahub/ingestion/source/cassandra/cassandra.py +5 -7
  10. datahub/ingestion/source/common/subtypes.py +2 -0
  11. datahub/ingestion/source/data_lake_common/data_lake_utils.py +37 -0
  12. datahub/ingestion/source/datahub/datahub_source.py +3 -0
  13. datahub/ingestion/source/dbt/dbt_common.py +69 -2
  14. datahub/ingestion/source/delta_lake/source.py +1 -0
  15. datahub/ingestion/source/ge_data_profiler.py +9 -1
  16. datahub/ingestion/source/grafana/entity_mcp_builder.py +272 -0
  17. datahub/ingestion/source/grafana/field_utils.py +307 -0
  18. datahub/ingestion/source/grafana/grafana_api.py +142 -0
  19. datahub/ingestion/source/grafana/grafana_config.py +104 -0
  20. datahub/ingestion/source/grafana/grafana_source.py +522 -84
  21. datahub/ingestion/source/grafana/lineage.py +202 -0
  22. datahub/ingestion/source/grafana/models.py +120 -0
  23. datahub/ingestion/source/grafana/report.py +91 -0
  24. datahub/ingestion/source/grafana/types.py +16 -0
  25. datahub/ingestion/source/hex/hex.py +8 -0
  26. datahub/ingestion/source/looker/looker_common.py +40 -4
  27. datahub/ingestion/source/looker/looker_source.py +9 -0
  28. datahub/ingestion/source/looker/lookml_source.py +8 -0
  29. datahub/ingestion/source/mongodb.py +11 -1
  30. datahub/ingestion/source/redshift/redshift.py +8 -1
  31. datahub/ingestion/source/s3/source.py +14 -34
  32. datahub/ingestion/source/sql/athena.py +8 -2
  33. datahub/ingestion/source/sql/clickhouse.py +9 -0
  34. datahub/ingestion/source/sql/postgres.py +190 -1
  35. datahub/ingestion/source/sql_queries.py +111 -76
  36. datahub/ingestion/source/unity/proxy.py +8 -8
  37. datahub/metadata/_internal_schema_classes.py +96 -0
  38. datahub/metadata/com/linkedin/pegasus2avro/module/__init__.py +2 -0
  39. datahub/metadata/schema.avsc +69 -0
  40. datahub/metadata/schemas/CorpUserSettings.avsc +10 -1
  41. datahub/metadata/schemas/DataHubPageModuleProperties.avsc +42 -0
  42. datahub/metadata/schemas/MetadataChangeEvent.avsc +18 -0
  43. datahub/metadata/schemas/MetadataChangeLog.avsc +62 -44
  44. datahub/metadata/schemas/MetadataChangeProposal.avsc +61 -0
  45. datahub/metadata/schemas/SystemMetadata.avsc +61 -0
  46. datahub/sdk/dataset.py +44 -0
  47. datahub/sdk/search_filters.py +84 -15
  48. datahub/sql_parsing/sql_parsing_aggregator.py +6 -0
  49. datahub/telemetry/telemetry.py +4 -1
  50. datahub/upgrade/upgrade.py +5 -3
  51. {acryl_datahub-1.2.0.1rc1.dist-info → acryl_datahub-1.2.0.2.dist-info}/WHEEL +0 -0
  52. {acryl_datahub-1.2.0.1rc1.dist-info → acryl_datahub-1.2.0.2.dist-info}/entry_points.txt +0 -0
  53. {acryl_datahub-1.2.0.1rc1.dist-info → acryl_datahub-1.2.0.2.dist-info}/licenses/LICENSE +0 -0
  54. {acryl_datahub-1.2.0.1rc1.dist-info → acryl_datahub-1.2.0.2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,307 @@
1
+ import logging
2
+ from typing import Any, Dict, List, Optional, Union
3
+
4
+ from datahub.ingestion.graph.client import DataHubGraph
5
+ from datahub.ingestion.source.grafana.models import Panel
6
+ from datahub.metadata.schema_classes import (
7
+ NumberTypeClass,
8
+ SchemaFieldClass,
9
+ SchemaFieldDataTypeClass,
10
+ StringTypeClass,
11
+ TimeTypeClass,
12
+ )
13
+ from datahub.sql_parsing.sqlglot_lineage import (
14
+ create_lineage_sql_parsed_result,
15
+ infer_output_schema,
16
+ )
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def extract_sql_column_fields(target: Dict[str, Any]) -> List[SchemaFieldClass]:
22
+ """Extract fields from SQL-style columns."""
23
+ fields = []
24
+ for col in target.get("sql", {}).get("columns", []):
25
+ for param in col.get("parameters", []):
26
+ if param.get("type") == "column" and param.get("name"):
27
+ field_type: Union[NumberTypeClass, StringTypeClass, TimeTypeClass] = (
28
+ TimeTypeClass()
29
+ if col["type"] == "time"
30
+ else NumberTypeClass()
31
+ if col["type"] == "number"
32
+ else StringTypeClass()
33
+ )
34
+ fields.append(
35
+ SchemaFieldClass(
36
+ fieldPath=param["name"],
37
+ type=SchemaFieldDataTypeClass(type=field_type),
38
+ nativeDataType=col["type"],
39
+ )
40
+ )
41
+ return fields
42
+
43
+
44
+ def extract_prometheus_fields(target: Dict[str, Any]) -> List[SchemaFieldClass]:
45
+ """Extract fields from Prometheus expressions."""
46
+ expr = target.get("expr")
47
+ if expr:
48
+ legend = target.get("legendFormat", expr)
49
+ return [
50
+ SchemaFieldClass(
51
+ fieldPath=legend,
52
+ type=SchemaFieldDataTypeClass(type=NumberTypeClass()),
53
+ nativeDataType="prometheus_metric",
54
+ )
55
+ ]
56
+ return []
57
+
58
+
59
+ def extract_raw_sql_fields(
60
+ target: Dict[str, Any],
61
+ panel: Optional[Panel] = None,
62
+ connection_to_platform_map: Optional[Dict[str, Any]] = None,
63
+ graph: Optional[DataHubGraph] = None,
64
+ report: Optional[Any] = None,
65
+ ) -> List[SchemaFieldClass]:
66
+ """Extract fields from raw SQL queries using DataHub's SQL parsing."""
67
+ raw_sql = target.get("rawSql", "")
68
+ if not raw_sql:
69
+ return []
70
+
71
+ # Determine upstream platform and environment from datasource mapping
72
+ platform = "unknown"
73
+ env = "PROD"
74
+ default_db = None
75
+ default_schema = None
76
+ platform_instance = None
77
+ schema_aware = False
78
+
79
+ if panel and panel.datasource_ref and connection_to_platform_map:
80
+ ds_type = panel.datasource_ref.type or "unknown"
81
+ ds_uid = panel.datasource_ref.uid or "unknown"
82
+
83
+ # Try to find mapping by datasource UID first, then by type
84
+ platform_config = connection_to_platform_map.get(
85
+ ds_uid
86
+ ) or connection_to_platform_map.get(ds_type)
87
+
88
+ if platform_config:
89
+ platform = platform_config.platform
90
+ env = getattr(platform_config, "env", env)
91
+ default_db = getattr(platform_config, "database", None)
92
+ default_schema = getattr(platform_config, "database_schema", None)
93
+ platform_instance = getattr(platform_config, "platform_instance", None)
94
+
95
+ # Enable schema-aware parsing if we have platform mapping and graph access
96
+ if graph and platform != "unknown":
97
+ schema_aware = True
98
+
99
+ # Track SQL parsing attempt
100
+ if report:
101
+ report.report_sql_parsing_attempt()
102
+
103
+ try:
104
+ # Use DataHub's standard SQL parsing approach
105
+ sql_parsing_result = create_lineage_sql_parsed_result(
106
+ query=raw_sql,
107
+ default_db=default_db,
108
+ default_schema=default_schema,
109
+ platform=platform,
110
+ platform_instance=platform_instance,
111
+ env=env,
112
+ schema_aware=schema_aware,
113
+ graph=graph,
114
+ )
115
+
116
+ # Extract the output schema from the parsing result
117
+ output_schema = infer_output_schema(sql_parsing_result)
118
+
119
+ if output_schema:
120
+ if report:
121
+ report.report_sql_parsing_success()
122
+ return output_schema
123
+ else:
124
+ # If sqlglot parsing succeeds but no schema is inferred,
125
+ # fall back to basic parsing
126
+ logger.debug(f"No schema inferred from SQL: {raw_sql}")
127
+ fallback_result = _extract_raw_sql_fields_fallback(target)
128
+ if fallback_result and report:
129
+ report.report_sql_parsing_success()
130
+ elif report:
131
+ report.report_sql_parsing_failure()
132
+ return fallback_result
133
+
134
+ except Exception as e:
135
+ logger.debug(f"Failed to parse SQL with DataHub parser: {raw_sql}, error: {e}")
136
+ if report:
137
+ report.report_sql_parsing_failure()
138
+ # Fallback to basic parsing for backwards compatibility
139
+ return _extract_raw_sql_fields_fallback(target)
140
+
141
+
142
+ def _extract_raw_sql_fields_fallback(target: Dict[str, Any]) -> List[SchemaFieldClass]:
143
+ """Fallback basic SQL parsing for when sqlglot fails."""
144
+ raw_sql = target.get("rawSql", "").lower()
145
+ if not raw_sql:
146
+ return []
147
+
148
+ try:
149
+ sql = raw_sql.lower()
150
+ select_start = sql.index("select") + 6 # len("select")
151
+ from_start = sql.index("from")
152
+ select_part = sql[select_start:from_start].strip()
153
+
154
+ # Split by comma, handling nested parentheses
155
+ columns = []
156
+ current_column = ""
157
+ paren_count = 0
158
+
159
+ for char in select_part:
160
+ if char == "," and paren_count == 0:
161
+ if current_column.strip():
162
+ columns.append(current_column.strip())
163
+ current_column = ""
164
+ else:
165
+ if char == "(":
166
+ paren_count += 1
167
+ elif char == ")":
168
+ paren_count -= 1
169
+ current_column += char
170
+
171
+ if current_column.strip():
172
+ columns.append(current_column.strip())
173
+
174
+ # For each column, extract the alias if it exists
175
+ fields = []
176
+ for col in columns:
177
+ # Check for alias with 'AS' keyword
178
+ if " as " in col:
179
+ field_name = col.split(" as ")[-1].strip()
180
+ else:
181
+ # If no alias, use the last part after last space
182
+ # This handles both simple columns and function calls without alias
183
+ field_name = col.split()[-1].strip()
184
+
185
+ # Clean up any remaining quotes or parentheses
186
+ field_name = field_name.strip("\"'()")
187
+
188
+ fields.append(
189
+ SchemaFieldClass(
190
+ fieldPath=field_name,
191
+ type=SchemaFieldDataTypeClass(type=StringTypeClass()),
192
+ nativeDataType="sql_column",
193
+ )
194
+ )
195
+
196
+ return fields
197
+
198
+ except (IndexError, ValueError, StopIteration) as e:
199
+ logger.warning(f"Failed to parse SQL: {target.get('rawSql')}", e)
200
+ return []
201
+
202
+
203
+ def extract_fields_from_panel(
204
+ panel: Panel,
205
+ connection_to_platform_map: Optional[Dict[str, Any]] = None,
206
+ graph: Optional[DataHubGraph] = None,
207
+ report: Optional[Any] = None,
208
+ ) -> List[SchemaFieldClass]:
209
+ """Extract all fields from a panel."""
210
+ fields = []
211
+ fields.extend(
212
+ extract_fields_from_targets(
213
+ panel.query_targets, panel, connection_to_platform_map, graph, report
214
+ )
215
+ )
216
+ fields.extend(get_fields_from_field_config(panel.field_config))
217
+ fields.extend(get_fields_from_transformations(panel.transformations))
218
+
219
+ # Track schema field extraction
220
+ if report:
221
+ if fields:
222
+ report.report_schema_fields_extracted()
223
+ else:
224
+ report.report_no_schema_fields()
225
+
226
+ return fields
227
+
228
+
229
+ def extract_fields_from_targets(
230
+ targets: List[Dict[str, Any]],
231
+ panel: Optional[Panel] = None,
232
+ connection_to_platform_map: Optional[Dict[str, Any]] = None,
233
+ graph: Optional[DataHubGraph] = None,
234
+ report: Optional[Any] = None,
235
+ ) -> List[SchemaFieldClass]:
236
+ """Extract fields from panel targets."""
237
+ fields = []
238
+ for target in targets:
239
+ fields.extend(extract_sql_column_fields(target))
240
+ fields.extend(extract_prometheus_fields(target))
241
+ fields.extend(
242
+ extract_raw_sql_fields(
243
+ target, panel, connection_to_platform_map, graph, report
244
+ )
245
+ )
246
+ fields.extend(extract_time_format_fields(target))
247
+ return fields
248
+
249
+
250
+ def extract_time_format_fields(target: Dict[str, Any]) -> List[SchemaFieldClass]:
251
+ """Extract fields from time series and table formats."""
252
+ if target.get("format") in {"time_series", "table"}:
253
+ return [
254
+ SchemaFieldClass(
255
+ fieldPath="time",
256
+ type=SchemaFieldDataTypeClass(type=TimeTypeClass()),
257
+ nativeDataType="timestamp",
258
+ )
259
+ ]
260
+ return []
261
+
262
+
263
+ def get_fields_from_field_config(
264
+ field_config: Dict[str, Any],
265
+ ) -> List[SchemaFieldClass]:
266
+ """Extract fields from field configuration."""
267
+ fields = []
268
+ defaults = field_config.get("defaults", {})
269
+ unit = defaults.get("unit")
270
+ if unit:
271
+ fields.append(
272
+ SchemaFieldClass(
273
+ fieldPath=f"value_{unit}",
274
+ type=SchemaFieldDataTypeClass(type=NumberTypeClass()),
275
+ nativeDataType="value",
276
+ )
277
+ )
278
+ for override in field_config.get("overrides", []):
279
+ if override.get("matcher", {}).get("id") == "byName":
280
+ field_name = override.get("matcher", {}).get("options")
281
+ if field_name:
282
+ fields.append(
283
+ SchemaFieldClass(
284
+ fieldPath=field_name,
285
+ type=SchemaFieldDataTypeClass(type=NumberTypeClass()),
286
+ nativeDataType="metric",
287
+ )
288
+ )
289
+ return fields
290
+
291
+
292
+ def get_fields_from_transformations(
293
+ transformations: List[Dict[str, Any]],
294
+ ) -> List[SchemaFieldClass]:
295
+ """Extract fields from transformations."""
296
+ fields = []
297
+ for transform in transformations:
298
+ if transform.get("type") == "organize":
299
+ for field_name in transform.get("options", {}).get("indexByName", {}):
300
+ fields.append(
301
+ SchemaFieldClass(
302
+ fieldPath=field_name,
303
+ type=SchemaFieldDataTypeClass(type=StringTypeClass()),
304
+ nativeDataType="transformed",
305
+ )
306
+ )
307
+ return fields
@@ -0,0 +1,142 @@
1
+ """API client for Grafana metadata extraction"""
2
+
3
+ import logging
4
+ from typing import Dict, List, Optional, Union
5
+
6
+ import requests
7
+ import urllib3.exceptions
8
+ from pydantic import SecretStr
9
+
10
+ from datahub.ingestion.source.grafana.models import Dashboard, Folder
11
+ from datahub.ingestion.source.grafana.report import GrafanaSourceReport
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class GrafanaAPIClient:
17
+ """Client for making requests to Grafana API"""
18
+
19
+ def __init__(
20
+ self,
21
+ base_url: str,
22
+ token: SecretStr,
23
+ verify_ssl: bool,
24
+ page_size: int,
25
+ report: GrafanaSourceReport,
26
+ ) -> None:
27
+ self.base_url = base_url
28
+ self.verify_ssl = verify_ssl
29
+ self.page_size = page_size
30
+ self.report = report
31
+ self.session = self._create_session(token)
32
+
33
+ def _create_session(self, token: SecretStr) -> requests.Session:
34
+ session = requests.Session()
35
+ session.headers.update(
36
+ {
37
+ "Authorization": f"Bearer {token.get_secret_value()}",
38
+ "Accept": "application/json",
39
+ "Content-Type": "application/json",
40
+ }
41
+ )
42
+ session.verify = self.verify_ssl
43
+
44
+ # If SSL verification is disabled, suppress the warnings
45
+ if not self.verify_ssl:
46
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
47
+ self.report.warning(
48
+ title="SSL Configuration Warning",
49
+ message="SSL Verification is recommended.",
50
+ )
51
+
52
+ return session
53
+
54
+ def get_folders(self) -> List[Folder]:
55
+ """Fetch all folders from Grafana with pagination."""
56
+ folders: List[Folder] = []
57
+ page = 1
58
+ per_page = self.page_size
59
+
60
+ while True:
61
+ try:
62
+ response = self.session.get(
63
+ f"{self.base_url}/api/folders",
64
+ params={"page": page, "limit": per_page},
65
+ )
66
+ response.raise_for_status()
67
+
68
+ batch = response.json()
69
+ if not batch:
70
+ break
71
+
72
+ folders.extend(Folder.parse_obj(folder) for folder in batch)
73
+ page += 1
74
+ except requests.exceptions.RequestException as e:
75
+ self.report.report_failure(
76
+ title="Folder Fetch Error",
77
+ message="Failed to fetch folders on page",
78
+ context=str(page),
79
+ exc=e,
80
+ )
81
+ self.report.report_permission_warning() # Likely a permission issue
82
+ break
83
+
84
+ return folders
85
+
86
+ def get_dashboard(self, uid: str) -> Optional[Dashboard]:
87
+ """Fetch a specific dashboard by UID"""
88
+ try:
89
+ response = self.session.get(f"{self.base_url}/api/dashboards/uid/{uid}")
90
+ response.raise_for_status()
91
+ return Dashboard.parse_obj(response.json())
92
+ except requests.exceptions.RequestException as e:
93
+ self.report.warning(
94
+ title="Dashboard Fetch Error",
95
+ message="Failed to fetch dashboard",
96
+ context=uid,
97
+ exc=e,
98
+ )
99
+ if e.response and e.response.status_code in (401, 403):
100
+ self.report.report_permission_warning()
101
+ return None
102
+
103
+ def get_dashboards(self) -> List[Dashboard]:
104
+ """Fetch all dashboards from search endpoint with pagination."""
105
+ dashboards: List[Dashboard] = []
106
+ page = 1
107
+ per_page = self.page_size
108
+
109
+ while True:
110
+ try:
111
+ params: Dict[str, Union[str, int]] = {
112
+ "type": "dash-db",
113
+ "page": page,
114
+ "limit": per_page,
115
+ }
116
+ response = self.session.get(
117
+ f"{self.base_url}/api/search",
118
+ params=params,
119
+ )
120
+ response.raise_for_status()
121
+
122
+ batch = response.json()
123
+ if not batch:
124
+ break
125
+
126
+ for result in batch:
127
+ dashboard = self.get_dashboard(result["uid"])
128
+ if dashboard:
129
+ dashboards.append(dashboard)
130
+ page += 1
131
+ except requests.exceptions.RequestException as e:
132
+ self.report.report_failure(
133
+ title="Dashboard Search Error",
134
+ message="Failed to fetch dashboards on page",
135
+ context=str(page),
136
+ exc=e,
137
+ )
138
+ if e.response and e.response.status_code in (401, 403):
139
+ self.report.report_permission_warning()
140
+ break
141
+
142
+ return dashboards
@@ -0,0 +1,104 @@
1
+ from typing import Dict, Optional
2
+
3
+ from pydantic import Field, SecretStr, validator
4
+
5
+ from datahub.configuration.common import AllowDenyPattern
6
+ from datahub.configuration.source_common import (
7
+ DatasetLineageProviderConfigBase,
8
+ EnvConfigMixin,
9
+ PlatformInstanceConfigMixin,
10
+ )
11
+ from datahub.ingestion.source.state.stateful_ingestion_base import (
12
+ StatefulIngestionConfigBase,
13
+ )
14
+ from datahub.utilities import config_clean
15
+
16
+
17
+ class PlatformConnectionConfig(
18
+ EnvConfigMixin,
19
+ PlatformInstanceConfigMixin,
20
+ ):
21
+ """Platform connection configuration for mapping Grafana datasources to their actual platforms."""
22
+
23
+ platform: str = Field(
24
+ description="The platform name (e.g., 'postgres', 'mysql', 'snowflake')"
25
+ )
26
+ database: Optional[str] = Field(default=None, description="Default database name")
27
+ database_schema: Optional[str] = Field(
28
+ default=None, description="Default schema name"
29
+ )
30
+
31
+
32
+ class GrafanaSourceConfig(
33
+ DatasetLineageProviderConfigBase,
34
+ StatefulIngestionConfigBase,
35
+ PlatformInstanceConfigMixin,
36
+ EnvConfigMixin,
37
+ ):
38
+ """Configuration for Grafana source"""
39
+
40
+ platform: str = Field(default="grafana", hidden_from_docs=True)
41
+ url: str = Field(
42
+ description="Grafana URL in the format http://your-grafana-instance with no trailing slash"
43
+ )
44
+ service_account_token: SecretStr = Field(
45
+ description="Service account token for Grafana"
46
+ )
47
+ verify_ssl: bool = Field(
48
+ default=True,
49
+ description="Whether to verify SSL certificates when connecting to Grafana",
50
+ )
51
+
52
+ # API pagination configuration
53
+ page_size: int = Field(
54
+ default=100,
55
+ description="Number of items to fetch per API call when paginating through folders and dashboards",
56
+ )
57
+
58
+ # Extraction mode configuration
59
+ basic_mode: bool = Field(
60
+ default=False,
61
+ description="Enable basic extraction mode for users with limited permissions. "
62
+ "In basic mode, only dashboard metadata is extracted without detailed panel information, "
63
+ "lineage, or folder hierarchy. This requires only basic dashboard read permissions.",
64
+ )
65
+
66
+ # Content filtering
67
+ dashboard_pattern: AllowDenyPattern = Field(
68
+ default=AllowDenyPattern.allow_all(),
69
+ description="Regex pattern to filter dashboards for ingestion",
70
+ )
71
+ folder_pattern: AllowDenyPattern = Field(
72
+ default=AllowDenyPattern.allow_all(),
73
+ description="Regex pattern to filter folders for ingestion",
74
+ )
75
+
76
+ # Feature toggles
77
+ ingest_tags: bool = Field(
78
+ default=True, description="Whether to ingest dashboard and chart tags"
79
+ )
80
+ ingest_owners: bool = Field(
81
+ default=True, description="Whether to ingest dashboard ownership information"
82
+ )
83
+
84
+ include_lineage: bool = Field(
85
+ default=True,
86
+ description="Whether to extract lineage between charts and data sources. "
87
+ "When enabled, the source will parse SQL queries and datasource configurations "
88
+ "to build lineage relationships.",
89
+ )
90
+ include_column_lineage: bool = Field(
91
+ default=True,
92
+ description="Whether to extract column-level lineage from SQL queries. "
93
+ "Only applicable when include_lineage is enabled.",
94
+ )
95
+
96
+ # Platform connection mappings
97
+ connection_to_platform_map: Dict[str, PlatformConnectionConfig] = Field(
98
+ default_factory=dict,
99
+ description="Map of Grafana datasource types/UIDs to platform connection configs for lineage extraction",
100
+ )
101
+
102
+ @validator("url", allow_reuse=True)
103
+ def remove_trailing_slash(cls, v):
104
+ return config_clean.remove_trailing_slashes(v)