disdrodb 0.1.4__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. disdrodb/__init__.py +1 -5
  2. disdrodb/_version.py +2 -2
  3. disdrodb/accessor/methods.py +14 -3
  4. disdrodb/api/checks.py +10 -0
  5. disdrodb/api/create_directories.py +0 -2
  6. disdrodb/api/io.py +14 -17
  7. disdrodb/api/path.py +42 -77
  8. disdrodb/api/search.py +89 -23
  9. disdrodb/cli/disdrodb_create_summary.py +11 -1
  10. disdrodb/cli/disdrodb_create_summary_station.py +10 -0
  11. disdrodb/cli/disdrodb_run_l0.py +1 -1
  12. disdrodb/cli/disdrodb_run_l0a.py +1 -1
  13. disdrodb/cli/disdrodb_run_l0b.py +1 -1
  14. disdrodb/cli/disdrodb_run_l0c.py +1 -1
  15. disdrodb/cli/disdrodb_run_l1.py +1 -1
  16. disdrodb/cli/disdrodb_run_l2e.py +1 -1
  17. disdrodb/cli/disdrodb_run_l2m.py +1 -1
  18. disdrodb/configs.py +30 -83
  19. disdrodb/constants.py +4 -3
  20. disdrodb/data_transfer/download_data.py +4 -2
  21. disdrodb/docs.py +2 -2
  22. disdrodb/etc/products/L1/1MIN.yaml +13 -0
  23. disdrodb/etc/products/L1/LPM/1MIN.yaml +13 -0
  24. disdrodb/etc/products/L1/PARSIVEL/1MIN.yaml +13 -0
  25. disdrodb/etc/products/L1/PARSIVEL2/1MIN.yaml +13 -0
  26. disdrodb/etc/products/L1/PWS100/1MIN.yaml +13 -0
  27. disdrodb/etc/products/L1/RD80/1MIN.yaml +13 -0
  28. disdrodb/etc/products/L1/SWS250/1MIN.yaml +13 -0
  29. disdrodb/etc/products/L1/global.yaml +7 -1
  30. disdrodb/etc/products/L2E/10MIN.yaml +1 -12
  31. disdrodb/etc/products/L2E/5MIN.yaml +1 -0
  32. disdrodb/etc/products/L2E/global.yaml +1 -1
  33. disdrodb/etc/products/L2M/MODELS/GAMMA_GS_ND_MAE.yaml +6 -0
  34. disdrodb/etc/products/L2M/{GAMMA_ML.yaml → MODELS/GAMMA_ML.yaml} +1 -1
  35. disdrodb/etc/products/L2M/MODELS/LOGNORMAL_GS_LOG_ND_MAE.yaml +6 -0
  36. disdrodb/etc/products/L2M/MODELS/LOGNORMAL_GS_ND_MAE.yaml +6 -0
  37. disdrodb/etc/products/L2M/MODELS/LOGNORMAL_ML.yaml +8 -0
  38. disdrodb/etc/products/L2M/MODELS/NGAMMA_GS_R_MAE.yaml +6 -0
  39. disdrodb/etc/products/L2M/global.yaml +11 -3
  40. disdrodb/l0/check_configs.py +49 -16
  41. disdrodb/l0/configs/LPM/l0a_encodings.yml +2 -2
  42. disdrodb/l0/configs/LPM/l0b_cf_attrs.yml +2 -2
  43. disdrodb/l0/configs/LPM/l0b_encodings.yml +2 -2
  44. disdrodb/l0/configs/LPM/raw_data_format.yml +2 -2
  45. disdrodb/l0/configs/PARSIVEL/l0b_encodings.yml +1 -1
  46. disdrodb/l0/configs/PWS100/l0b_encodings.yml +1 -0
  47. disdrodb/l0/configs/SWS250/bins_diameter.yml +108 -0
  48. disdrodb/l0/configs/SWS250/bins_velocity.yml +83 -0
  49. disdrodb/l0/configs/SWS250/l0a_encodings.yml +18 -0
  50. disdrodb/l0/configs/SWS250/l0b_cf_attrs.yml +72 -0
  51. disdrodb/l0/configs/SWS250/l0b_encodings.yml +155 -0
  52. disdrodb/l0/configs/SWS250/raw_data_format.yml +148 -0
  53. disdrodb/l0/l0_reader.py +2 -2
  54. disdrodb/l0/l0b_processing.py +70 -15
  55. disdrodb/l0/l0c_processing.py +7 -3
  56. disdrodb/l0/readers/LPM/ARM/ARM_LPM.py +1 -1
  57. disdrodb/l0/readers/LPM/AUSTRALIA/MELBOURNE_2007_LPM.py +2 -2
  58. disdrodb/l0/readers/LPM/BELGIUM/ULIEGE.py +256 -0
  59. disdrodb/l0/readers/LPM/BRAZIL/CHUVA_LPM.py +2 -2
  60. disdrodb/l0/readers/LPM/BRAZIL/GOAMAZON_LPM.py +2 -2
  61. disdrodb/l0/readers/LPM/GERMANY/DWD.py +491 -0
  62. disdrodb/l0/readers/LPM/ITALY/GID_LPM.py +2 -2
  63. disdrodb/l0/readers/LPM/ITALY/GID_LPM_W.py +2 -2
  64. disdrodb/l0/readers/LPM/KIT/CHWALA.py +2 -2
  65. disdrodb/l0/readers/LPM/SLOVENIA/ARSO.py +107 -12
  66. disdrodb/l0/readers/LPM/SLOVENIA/UL.py +3 -3
  67. disdrodb/l0/readers/LPM/SWITZERLAND/INNERERIZ_LPM.py +2 -2
  68. disdrodb/l0/readers/PARSIVEL/BASQUECOUNTRY/EUSKALMET_OTT.py +227 -0
  69. disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/LPVEX.py +1 -1
  70. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010.py +5 -14
  71. disdrodb/l0/readers/PARSIVEL/NCAR/VORTEX2_2010_UF.py +8 -17
  72. disdrodb/l0/readers/PARSIVEL/SLOVENIA/UL.py +117 -8
  73. disdrodb/l0/readers/PARSIVEL2/BASQUECOUNTRY/EUSKALMET_OTT2.py +232 -0
  74. disdrodb/l0/readers/PARSIVEL2/BRAZIL/CHUVA_PARSIVEL2.py +10 -14
  75. disdrodb/l0/readers/PARSIVEL2/BRAZIL/GOAMAZON_PARSIVEL2.py +10 -14
  76. disdrodb/l0/readers/PARSIVEL2/DENMARK/DTU.py +8 -14
  77. disdrodb/l0/readers/PARSIVEL2/DENMARK/EROSION_raw.py +382 -0
  78. disdrodb/l0/readers/PARSIVEL2/FINLAND/FMI_PARSIVEL2.py +4 -0
  79. disdrodb/l0/readers/PARSIVEL2/FRANCE/OSUG.py +1 -1
  80. disdrodb/l0/readers/PARSIVEL2/GREECE/NOA.py +127 -0
  81. disdrodb/l0/readers/PARSIVEL2/ITALY/HYDROX.py +239 -0
  82. disdrodb/l0/readers/PARSIVEL2/NCAR/FARM_PARSIVEL2.py +5 -11
  83. disdrodb/l0/readers/PARSIVEL2/NCAR/PERILS_MIPS.py +4 -17
  84. disdrodb/l0/readers/PARSIVEL2/NCAR/RELAMPAGO_PARSIVEL2.py +5 -14
  85. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_PJ.py +10 -13
  86. disdrodb/l0/readers/PARSIVEL2/NCAR/SNOWIE_SB.py +10 -13
  87. disdrodb/l0/readers/PARSIVEL2/PHILIPPINES/PAGASA.py +232 -0
  88. disdrodb/l0/readers/PARSIVEL2/SPAIN/CENER.py +6 -18
  89. disdrodb/l0/readers/PARSIVEL2/{NASA/LPVEX.py → SPAIN/GRANADA.py} +46 -35
  90. disdrodb/l0/readers/PARSIVEL2/SWEDEN/SMHI.py +189 -0
  91. disdrodb/l0/readers/PARSIVEL2/USA/{C3WE.py → CW3E.py} +10 -28
  92. disdrodb/l0/readers/PWS100/AUSTRIA/HOAL.py +321 -0
  93. disdrodb/l0/readers/SW250/BELGIUM/KMI.py +239 -0
  94. disdrodb/l1/beard_model.py +31 -129
  95. disdrodb/l1/fall_velocity.py +136 -83
  96. disdrodb/l1/filters.py +25 -28
  97. disdrodb/l1/processing.py +16 -17
  98. disdrodb/l1/resampling.py +101 -38
  99. disdrodb/l1_env/routines.py +46 -17
  100. disdrodb/l2/empirical_dsd.py +6 -0
  101. disdrodb/l2/processing.py +6 -5
  102. disdrodb/metadata/geolocation.py +0 -2
  103. disdrodb/metadata/search.py +3 -4
  104. disdrodb/psd/fitting.py +16 -13
  105. disdrodb/routines/l0.py +2 -2
  106. disdrodb/routines/l1.py +173 -60
  107. disdrodb/routines/l2.py +148 -284
  108. disdrodb/routines/options.py +345 -0
  109. disdrodb/routines/wrappers.py +14 -1
  110. disdrodb/scattering/axis_ratio.py +90 -84
  111. disdrodb/scattering/permittivity.py +6 -0
  112. disdrodb/summary/routines.py +735 -670
  113. disdrodb/utils/archiving.py +51 -44
  114. disdrodb/utils/attrs.py +3 -1
  115. disdrodb/utils/dask.py +4 -4
  116. disdrodb/utils/dict.py +33 -0
  117. disdrodb/utils/encoding.py +6 -1
  118. disdrodb/utils/routines.py +9 -8
  119. disdrodb/utils/time.py +11 -3
  120. disdrodb/viz/__init__.py +0 -13
  121. disdrodb/viz/plots.py +231 -1
  122. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/METADATA +2 -1
  123. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/RECORD +135 -103
  124. /disdrodb/etc/products/L2M/{NGAMMA_GS_LOG_ND_MAE.yaml → MODELS/NGAMMA_GS_LOG_ND_MAE.yaml} +0 -0
  125. /disdrodb/etc/products/L2M/{NGAMMA_GS_ND_MAE.yaml → MODELS/NGAMMA_GS_ND_MAE.yaml} +0 -0
  126. /disdrodb/etc/products/L2M/{NGAMMA_GS_Z_MAE.yaml → MODELS/NGAMMA_GS_Z_MAE.yaml} +0 -0
  127. /disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/IFLOODS.py +0 -0
  128. /disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/MC3E.py +0 -0
  129. /disdrodb/l0/readers/PARSIVEL/{GPM → NASA}/PIERS.py +0 -0
  130. /disdrodb/l0/readers/PARSIVEL2/{GPM → NASA}/GCPEX.py +0 -0
  131. /disdrodb/l0/readers/PARSIVEL2/{GPM → NASA}/NSSTC.py +0 -0
  132. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/WHEEL +0 -0
  133. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/entry_points.txt +0 -0
  134. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/licenses/LICENSE +0 -0
  135. {disdrodb-0.1.4.dist-info → disdrodb-0.2.0.dist-info}/top_level.txt +0 -0
disdrodb/routines/l1.py CHANGED
@@ -24,13 +24,14 @@ import os
24
24
  import time
25
25
  from typing import Optional
26
26
 
27
- import xarray as xr
27
+ import pandas as pd
28
28
 
29
29
  from disdrodb.api.checks import check_station_inputs
30
30
  from disdrodb.api.create_directories import (
31
31
  create_logs_directory,
32
32
  create_product_directory,
33
33
  )
34
+ from disdrodb.api.io import open_netcdf_files
34
35
  from disdrodb.api.path import (
35
36
  define_file_folder_path,
36
37
  define_l1_filename,
@@ -40,9 +41,11 @@ from disdrodb.configs import (
40
41
  get_data_archive_dir,
41
42
  get_folder_partitioning,
42
43
  get_metadata_archive_dir,
43
- get_product_options,
44
44
  )
45
45
  from disdrodb.l1.processing import generate_l1
46
+ from disdrodb.l1.resampling import resample_dataset
47
+ from disdrodb.metadata.reader import read_station_metadata
48
+ from disdrodb.routines.options import L1ProcessingOptions
46
49
  from disdrodb.utils.dask import execute_tasks_safely
47
50
  from disdrodb.utils.decorators import delayed_if_parallel, single_threaded_if_parallel
48
51
 
@@ -52,20 +55,37 @@ from disdrodb.utils.logger import (
52
55
  log_info,
53
56
  )
54
57
  from disdrodb.utils.routines import run_product_generation, try_get_required_filepaths
58
+ from disdrodb.utils.time import (
59
+ ensure_sample_interval_in_seconds,
60
+ )
55
61
  from disdrodb.utils.writer import write_product
56
62
 
57
63
  logger = logging.getLogger(__name__)
58
64
 
59
65
 
66
+ def define_l1_logs_filename(campaign_name, station_name, start_time, end_time, temporal_resolution):
67
+ """Define L1 logs filename."""
68
+ starting_time = pd.to_datetime(start_time).strftime("%Y%m%d%H%M%S")
69
+ ending_time = pd.to_datetime(end_time).strftime("%Y%m%d%H%M%S")
70
+ logs_filename = f"L1.{temporal_resolution}.{campaign_name}.{station_name}.s{starting_time}.e{ending_time}"
71
+ return logs_filename
72
+
73
+
60
74
  @delayed_if_parallel
61
75
  @single_threaded_if_parallel
62
76
  def _generate_l1(
63
- filepath,
77
+ start_time,
78
+ end_time,
79
+ filepaths,
64
80
  data_dir,
65
81
  logs_dir,
66
82
  logs_filename,
83
+ folder_partitioning,
67
84
  campaign_name,
68
85
  station_name,
86
+ # L1 options
87
+ temporal_resolution,
88
+ product_options,
69
89
  # Processing options
70
90
  force,
71
91
  verbose,
@@ -107,29 +127,57 @@ def _generate_l1(
107
127
 
108
128
  # Define product processing function
109
129
  def core(
110
- filepath,
130
+ filepaths,
131
+ start_time,
132
+ end_time,
111
133
  campaign_name,
112
134
  station_name,
135
+ # Processing options
136
+ # logger,
137
+ # verbose,
138
+ force,
139
+ # Product options
140
+ temporal_resolution,
141
+ product_options,
142
+ # Archiving arguments
113
143
  data_dir,
114
144
  folder_partitioning,
115
145
  ):
116
146
  """Define L1 product processing."""
117
- # Retrieve L1 configurations
118
- l1_options = get_product_options("L1").get("product_options") # TODO: MOVE OUTSIDE
147
+ # Open the L0C netCDF files
148
+ ds = open_netcdf_files(
149
+ filepaths,
150
+ start_time=start_time,
151
+ end_time=end_time,
152
+ variables=["raw_drop_number", "time_qc"],
153
+ parallel=False,
154
+ compute=True,
155
+ )
156
+
157
+ # Define sample interval in seconds
158
+ sample_interval = ensure_sample_interval_in_seconds(ds["sample_interval"]).to_numpy().item()
119
159
 
120
- # Open the raw netCDF
121
- with xr.open_dataset(filepath, chunks=-1, decode_timedelta=False, cache=False) as ds:
122
- ds = ds[["raw_drop_number"]].load()
160
+ # Resample dataset
161
+ ds = resample_dataset(
162
+ ds=ds,
163
+ sample_interval=sample_interval,
164
+ temporal_resolution=temporal_resolution,
165
+ )
123
166
 
124
167
  # Produce L1 dataset
125
- ds = generate_l1(ds=ds, **l1_options)
168
+ ds = generate_l1(ds=ds, **product_options)
126
169
 
127
170
  # Ensure at least 1 timestep available
128
171
  if ds["time"].size <= 1:
129
172
  return None
130
173
 
131
174
  # Write L1 netCDF4 dataset
132
- filename = define_l1_filename(ds, campaign_name=campaign_name, station_name=station_name)
175
+ filename = define_l1_filename(
176
+ ds,
177
+ campaign_name=campaign_name,
178
+ station_name=station_name,
179
+ temporal_resolution=temporal_resolution,
180
+ )
133
181
  folder_path = define_file_folder_path(ds, dir_path=data_dir, folder_partitioning=folder_partitioning)
134
182
  filepath = os.path.join(folder_path, filename)
135
183
  write_product(ds, filepath=filepath, force=force)
@@ -139,13 +187,23 @@ def _generate_l1(
139
187
 
140
188
  # Define product processing function kwargs
141
189
  core_func_kwargs = dict( # noqa: C408
142
- filepath=filepath,
190
+ filepaths=filepaths,
191
+ start_time=start_time,
192
+ end_time=end_time,
143
193
  campaign_name=campaign_name,
144
194
  station_name=station_name,
195
+ # Processing options
196
+ # verbose=verbose,
197
+ force=force,
198
+ # Product options
199
+ temporal_resolution=temporal_resolution,
200
+ product_options=product_options,
145
201
  # Archiving options
146
202
  data_dir=data_dir,
147
203
  folder_partitioning=folder_partitioning,
148
204
  )
205
+
206
+ # TODO: Inspect core arguments: pass logger, verbose, folder_partitioning if present?
149
207
  # Run product generation
150
208
  logger_filepath = run_product_generation(
151
209
  product=product,
@@ -231,15 +289,6 @@ def run_l1_station(
231
289
  station_name=station_name,
232
290
  )
233
291
 
234
- # Define logs directory
235
- logs_dir = create_logs_directory(
236
- product=product,
237
- data_archive_dir=data_archive_dir,
238
- data_source=data_source,
239
- campaign_name=campaign_name,
240
- station_name=station_name,
241
- )
242
-
243
292
  # ------------------------------------------------------------------------.
244
293
  # Start processing
245
294
  if verbose:
@@ -247,18 +296,6 @@ def run_l1_station(
247
296
  msg = f"{product} processing of station {station_name} has started."
248
297
  log_info(logger=logger, msg=msg, verbose=verbose)
249
298
 
250
- # ------------------------------------------------------------------------.
251
- # Create directory structure
252
- data_dir = create_product_directory(
253
- data_archive_dir=data_archive_dir,
254
- metadata_archive_dir=metadata_archive_dir,
255
- data_source=data_source,
256
- campaign_name=campaign_name,
257
- station_name=station_name,
258
- product=product,
259
- force=force,
260
- )
261
-
262
299
  # -------------------------------------------------------------------------.
263
300
  # List files to process
264
301
  # - If no data available, print error message and return None
@@ -275,38 +312,114 @@ def run_l1_station(
275
312
  if filepaths is None:
276
313
  return
277
314
 
278
- # -----------------------------------------------------------------.
279
- # Generate L1 files
280
- # - Loop over the L0 netCDF files and generate L1 files.
281
- # - If parallel=True, it does that in parallel using dask.delayed
282
- list_tasks = [
283
- _generate_l1(
284
- filepath=filepath,
285
- data_dir=data_dir,
286
- logs_dir=logs_dir,
287
- logs_filename=os.path.basename(filepath),
315
+ # -------------------------------------------------------------------------.
316
+ # Read station metadata and retrieve sensor name
317
+ metadata = read_station_metadata(
318
+ metadata_archive_dir=metadata_archive_dir,
319
+ data_source=data_source,
320
+ campaign_name=campaign_name,
321
+ station_name=station_name,
322
+ )
323
+ sensor_name = metadata["sensor_name"]
324
+
325
+ # -------------------------------------------------------------------------.
326
+ # Retrieve L1 processing options
327
+ l1_processing_options = L1ProcessingOptions(
328
+ sensor_name=sensor_name,
329
+ filepaths=filepaths,
330
+ parallel=parallel,
331
+ )
332
+
333
+ # -------------------------------------------------------------------------.
334
+ # Generate products for each temporal resolution
335
+ # temporal_resolution = "1MIN"
336
+ # temporal_resolution = "10MIN"
337
+ for temporal_resolution in l1_processing_options.temporal_resolutions:
338
+ # Print progress message
339
+ msg = f"Production of {product} {temporal_resolution} has started."
340
+ log_info(logger=logger, msg=msg, verbose=verbose)
341
+
342
+ # Retrieve event info
343
+ files_partitions = l1_processing_options.group_files_by_temporal_partitions(temporal_resolution)
344
+
345
+ # Retrieve folder partitioning (for files and logs)
346
+ folder_partitioning = l1_processing_options.get_folder_partitioning(temporal_resolution)
347
+
348
+ # Retrieve product options
349
+ product_options = l1_processing_options.get_product_options(temporal_resolution)
350
+ product_options = product_options.get("product_options")
351
+
352
+ # ------------------------------------------------------------------.
353
+ # Create product directory
354
+ data_dir = create_product_directory(
355
+ data_archive_dir=data_archive_dir,
356
+ metadata_archive_dir=metadata_archive_dir,
357
+ data_source=data_source,
288
358
  campaign_name=campaign_name,
289
359
  station_name=station_name,
290
- # Processing options
360
+ product=product,
291
361
  force=force,
292
- verbose=verbose,
293
- parallel=parallel,
362
+ # Option for L1
363
+ temporal_resolution=temporal_resolution,
294
364
  )
295
- for filepath in filepaths
296
- ]
297
- list_logs = execute_tasks_safely(list_tasks=list_tasks, parallel=parallel, logs_dir=logs_dir)
298
365
 
299
- # -----------------------------------------------------------------.
300
- # Define L1 summary logs
301
- create_product_logs(
302
- product=product,
303
- data_source=data_source,
304
- campaign_name=campaign_name,
305
- station_name=station_name,
306
- data_archive_dir=data_archive_dir,
307
- # Logs list
308
- list_logs=list_logs,
309
- )
366
+ # Define logs directory
367
+ logs_dir = create_logs_directory(
368
+ product=product,
369
+ data_archive_dir=data_archive_dir,
370
+ data_source=data_source,
371
+ campaign_name=campaign_name,
372
+ station_name=station_name,
373
+ # Option for L1
374
+ temporal_resolution=temporal_resolution,
375
+ )
376
+
377
+ # ------------------------------------------------------------------.
378
+ # Generate files
379
+ # - Loop over the L0C netCDF files and generate L1 files.
380
+ # - If parallel=True, it does that in parallel using dask.delayed
381
+ list_tasks = [
382
+ _generate_l1(
383
+ start_time=event_info["start_time"],
384
+ end_time=event_info["end_time"],
385
+ filepaths=event_info["filepaths"],
386
+ data_dir=data_dir,
387
+ logs_dir=logs_dir,
388
+ logs_filename=define_l1_logs_filename(
389
+ campaign_name=campaign_name,
390
+ station_name=station_name,
391
+ start_time=event_info["start_time"],
392
+ end_time=event_info["end_time"],
393
+ temporal_resolution=temporal_resolution,
394
+ ),
395
+ folder_partitioning=folder_partitioning,
396
+ campaign_name=campaign_name,
397
+ station_name=station_name,
398
+ # L1 product options
399
+ temporal_resolution=temporal_resolution,
400
+ product_options=product_options,
401
+ # Processing options
402
+ force=force,
403
+ verbose=verbose,
404
+ parallel=parallel,
405
+ )
406
+ for event_info in files_partitions
407
+ ]
408
+ list_logs = execute_tasks_safely(list_tasks=list_tasks, parallel=parallel, logs_dir=logs_dir)
409
+
410
+ # -----------------------------------------------------------------.
411
+ # Define product summary logs
412
+ create_product_logs(
413
+ product=product,
414
+ data_source=data_source,
415
+ campaign_name=campaign_name,
416
+ station_name=station_name,
417
+ data_archive_dir=data_archive_dir,
418
+ # Product options
419
+ temporal_resolution=temporal_resolution,
420
+ # Logs list
421
+ list_logs=list_logs,
422
+ )
310
423
 
311
424
  # ---------------------------------------------------------------------.
312
425
  # End L1 processing