py-ewr 2.1.9__py3-none-any.whl → 2.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- py_ewr/data_inputs.py +13 -0
- py_ewr/io.py +24 -0
- py_ewr/model_metadata/iqqm_stations.csv +11 -0
- py_ewr/parameter_metadata/parameter_sheet.csv +3 -3
- py_ewr/scenario_handling.py +107 -0
- {py_ewr-2.1.9.dist-info → py_ewr-2.2.1.dist-info}/METADATA +14 -3
- py_ewr-2.2.1.dist-info/RECORD +17 -0
- {py_ewr-2.1.9.dist-info → py_ewr-2.2.1.dist-info}/WHEEL +1 -1
- py_ewr-2.1.9.dist-info/RECORD +0 -15
- {py_ewr-2.1.9.dist-info → py_ewr-2.2.1.dist-info}/LICENSE +0 -0
- {py_ewr-2.1.9.dist-info → py_ewr-2.2.1.dist-info}/top_level.txt +0 -0
py_ewr/data_inputs.py
CHANGED
|
@@ -138,6 +138,19 @@ def get_NSW_codes() -> pd.DataFrame:
|
|
|
138
138
|
|
|
139
139
|
return metadata
|
|
140
140
|
|
|
141
|
+
def get_iqqm_codes() -> dict:
|
|
142
|
+
'''
|
|
143
|
+
Load metadata file for Macquarie containing model nodes
|
|
144
|
+
and gauges they correspond to
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
dict: dict for linking model nodes to gauges
|
|
148
|
+
'''
|
|
149
|
+
|
|
150
|
+
metadf = pd.read_csv( BASE_PATH / 'model_metadata/iqqm_stations.csv', dtype=str)
|
|
151
|
+
metadata = metadf.set_index(metadf.columns[0]).to_dict()[metadf.columns[1]]
|
|
152
|
+
return metadata
|
|
153
|
+
|
|
141
154
|
def get_level_gauges() -> tuple:
|
|
142
155
|
'''Returning level gauges with EWRs
|
|
143
156
|
|
py_ewr/io.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import xarray as xr
|
|
2
|
+
from pandas import DataFrame as Dataframe
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def read_netcdf_as_dataframe(netcdf_path: str) -> Dataframe:
|
|
6
|
+
dataset = xr.open_dataset(netcdf_path, engine='netcdf4')
|
|
7
|
+
df = dataset.to_dataframe()
|
|
8
|
+
dataset.close()
|
|
9
|
+
|
|
10
|
+
return df
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def save_dataframe_as_netcdf(df, output_path: str) -> None:
|
|
14
|
+
# Convert DataFrame to Xarray Dataset
|
|
15
|
+
ds = xr.Dataset.from_dataframe(df)
|
|
16
|
+
|
|
17
|
+
# Modify variable names to ensure they are valid for NetCDF
|
|
18
|
+
for var_name in ds.variables:
|
|
19
|
+
new_var_name = var_name.replace(" ", "_") # Replace spaces with underscores
|
|
20
|
+
new_var_name = ''.join(c for c in new_var_name if c.isalnum() or c == "_") # Remove non-alphanumeric characters
|
|
21
|
+
ds = ds.rename({var_name: new_var_name})
|
|
22
|
+
|
|
23
|
+
# Save the modified Xarray Dataset as a NetCDF file
|
|
24
|
+
ds.to_netcdf(output_path)
|
|
@@ -3306,9 +3306,9 @@ PU_0000999,Border Rivers and Moonie Long-term watering plan,Border Rivers,Kanown
|
|
|
3306
3306
|
PU_0000999,Border Rivers and Moonie Long-term watering plan,Border Rivers,Goondiwindi,416201A,NE,7,6,,,,1,1,1,5000,10000,4,,,F,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,F,NF1-NF2-NF3-NF4-NF5-NF6-NF8-NF9-NV1-WB1-EF1-EF2-EF3-EF5 ,NF_NV_WB_EF,QLD,Border Rivers,QLD Border Rivers
|
|
3307
3307
|
PU_0000999,Border Rivers and Moonie Long-term watering plan,Border Rivers,Goondiwindi,416201A,PP,7,6,,,,1,1,1,5000,10000,4,,,F,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,F,NF1-NF2-NF3-NF4-NF5-NF6-NF8-NF9-NV1-WB1-EF1-EF2-EF3-EF5 ,NF_NV_WB_EF,QLD,Border Rivers,QLD Border Rivers
|
|
3308
3308
|
PU_0000999,Border Rivers and Moonie Long-term watering plan,Border Rivers,Goondiwindi,416201A,rANA,10,2,,,,2,1,1,5000,10000,2,,,F,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,F,NF1-NF2-NF3-NF4-NF5-NF6-NF8-NF9-NV1-WB1-EF1-EF2-EF3-EF5 ,NF_NV_WB_EF,QLD,Border Rivers,QLD Border Rivers
|
|
3309
|
-
PU_0000991,Condamine and Balonne Long-term watering plan,Condamine,Wilby Wilby,422016,BBR1_a,7,6,,,,1,90,90,100,1000000,4,,422034,V,,1.08,154000
|
|
3310
|
-
PU_0000991,Condamine and Balonne Long-term watering plan,Condamine,Wilby Wilby,422016,BBR1_b,7,6,,,,1,10,10,100,1000000,4,,422034,V,,1.08,20000
|
|
3311
|
-
PU_0000991,Condamine and Balonne Long-term watering plan,Condamine,Wilby Wilby,422016,BBR2,7,6,76,,,1,1,1,0,1000000,,,,V,,,25000
|
|
3309
|
+
PU_0000991,Condamine and Balonne Long-term watering plan,Condamine,Wilby Wilby,422016,BBR1_a,7,6,,,,1,90,90,100,1000000,4,,422034,V,,1.08,154000,,90,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,F,NF1-NF2-NF3-NF4-NF5-NF6-NF8-NF9-NV1-NV2-NV4-WB1-WB2-WB5-EF1-EF2-EF3-EF5-EF6-EF7 ,NF_NV_WB_EF,NSW,Condamine-Balonne,Intersecting Streams
|
|
3310
|
+
PU_0000991,Condamine and Balonne Long-term watering plan,Condamine,Wilby Wilby,422016,BBR1_b,7,6,,,,1,10,10,100,1000000,4,,422034,V,,1.08,20000,,10,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,F,NF1-NF2-NF3-NF4-NF5-NF6-NF8-NF9-NV1-NV2-NV4-WB1-WB2-WB5-EF1-EF2-EF3-EF5-EF6-EF7 ,NF_NV_WB_EF,NSW,Condamine-Balonne,Intersecting Streams
|
|
3311
|
+
PU_0000991,Condamine and Balonne Long-term watering plan,Condamine,Wilby Wilby,422016,BBR2,7,6,76,,,1,1,1,0,1000000,,,,V,,,25000,,60,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,F,NF1-NF2-NF3-NF4-NF5-NF6-NF8-NF9-NV1-NV2-NV4-WB1-WB2-WB5-EF1-EF2-EF3-EF5-EF6-EF7 ,NF_NV_WB_EF,NSW,Condamine-Balonne,Intersecting Streams
|
|
3312
3312
|
PU_0000992,Warrego Paroo Bulloo and Nebine Long-term watering plan,Condamine,Nebine Creek at Roseleigh Crossing,422502A,DR1,7,6,100,,,1,1,1,2,,1,,,F,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,F,NF1-EF1-EF2 ,NF_EF,QLD,Condamine-Balonne,Nebine
|
|
3313
3313
|
PU_0000992,Warrego Paroo Bulloo and Nebine Long-term watering plan,Condamine,Nebine Creek at Roseleigh Crossing,422502A,DR2,7,6,,,,1,1,1,2540,,,,,F,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,F,NF1-NF2-NF3-NF4-NF5-NF6-NF8-NF9-NV1-WB1-EF1-EF2-EF3-EF5 ,NF_NV_WB_EF,QLD,Condamine-Balonne,Nebine
|
|
3314
3314
|
PU_0000992,Warrego Paroo Bulloo and Nebine Long-term watering plan,Condamine,Nebine Creek at Roseleigh Crossing,422502A,FD1,7,6,,,,1,8,8,8,,,,,F,,,,,,,,,,,,,,,,,,,,,,,548,,,,,,,,,,,,F,NF1-EF1-EF2 ,NF_EF,QLD,Condamine-Balonne,Nebine
|
py_ewr/scenario_handling.py
CHANGED
|
@@ -8,6 +8,8 @@ import logging
|
|
|
8
8
|
|
|
9
9
|
import pandas as pd
|
|
10
10
|
from tqdm import tqdm
|
|
11
|
+
import xarray as xr
|
|
12
|
+
import netCDF4
|
|
11
13
|
|
|
12
14
|
log = logging.getLogger(__name__)
|
|
13
15
|
log.addHandler(logging.NullHandler())
|
|
@@ -15,6 +17,61 @@ log.addHandler(logging.NullHandler())
|
|
|
15
17
|
|
|
16
18
|
from . import data_inputs, evaluate_EWRs, summarise_results
|
|
17
19
|
#----------------------------------- Scenario testing handling functions--------------------------#
|
|
20
|
+
def is_valid_netcdf_file(file_path: str) -> bool:
|
|
21
|
+
try:
|
|
22
|
+
with netCDF4.Dataset(file_path, 'r'):
|
|
23
|
+
# If the file opens successfully, it's a valid NetCDF file
|
|
24
|
+
return True
|
|
25
|
+
except Exception as e:
|
|
26
|
+
# If an exception is raised, it's not a valid NetCDF file
|
|
27
|
+
return False
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def unpack_netcdf_as_dataframe(netcdf_file: str) -> pd.DataFrame:
|
|
31
|
+
'''Ingesting netCDF files and outputting as dataframes in memory.
|
|
32
|
+
# Example usage:
|
|
33
|
+
# df = unpack_netcdf_as_dataframe('your_file.nc')
|
|
34
|
+
|
|
35
|
+
Args:
|
|
36
|
+
netcdf_file (str): location of netCDF file
|
|
37
|
+
|
|
38
|
+
Results:
|
|
39
|
+
pd.Dataframe: netCDF file converted to dataframe
|
|
40
|
+
'''
|
|
41
|
+
try:
|
|
42
|
+
# Check if the file is a valid NetCDF file
|
|
43
|
+
if not is_valid_netcdf_file(netcdf_file):
|
|
44
|
+
raise ValueError("Not a valid NetCDF file.")
|
|
45
|
+
|
|
46
|
+
# Open the NetCDF file
|
|
47
|
+
dataset = xr.open_dataset(netcdf_file, engine='netcdf4')
|
|
48
|
+
|
|
49
|
+
# Check if the dataset is empty
|
|
50
|
+
if dataset is None:
|
|
51
|
+
raise ValueError("NetCDF dataset is empty.")
|
|
52
|
+
|
|
53
|
+
# extract the bits we actually can use
|
|
54
|
+
# Some of this needs to move/get cleaned up
|
|
55
|
+
iqqm_dict = data_inputs.get_iqqm_codes()
|
|
56
|
+
# the nodes are ints, but the above is str
|
|
57
|
+
ints_list = list(map(int, list(iqqm_dict)))
|
|
58
|
+
|
|
59
|
+
# Is there any reason to do these in one step?
|
|
60
|
+
dataset = dataset.sel(node=dataset['node'].isin(ints_list))
|
|
61
|
+
dataset = dataset[['Simulated flow']]
|
|
62
|
+
|
|
63
|
+
# Convert to DataFrame
|
|
64
|
+
df = dataset.to_dataframe()
|
|
65
|
+
|
|
66
|
+
# Close the dataset
|
|
67
|
+
dataset.close()
|
|
68
|
+
|
|
69
|
+
return df
|
|
70
|
+
except Exception as e:
|
|
71
|
+
# Handle any exceptions that may occur
|
|
72
|
+
print(f"Error: {str(e)}")
|
|
73
|
+
return None
|
|
74
|
+
|
|
18
75
|
|
|
19
76
|
def unpack_model_file(csv_file: str, main_key: str, header_key: str) -> tuple:
|
|
20
77
|
'''Ingesting scenario file locations of model files with all formats (excluding standard timeseries format), seperates the flow data and header data
|
|
@@ -280,6 +337,52 @@ def cleaner_standard_timeseries(input_df: pd.DataFrame, ewr_table_path: str = No
|
|
|
280
337
|
log.info('Could not identify gauge in column name:', gauge, ', skipping analysis of data in this column.')
|
|
281
338
|
return df_flow, df_level
|
|
282
339
|
|
|
340
|
+
def cleaner_netcdf_werp(input_df: pd.DataFrame, stations: dict) -> pd.DataFrame:
|
|
341
|
+
|
|
342
|
+
'''Ingests dataframe, cleans up into a format matching IQQM csv
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
input_df (pd.DataFrame): raw xarray dataframe read-in
|
|
346
|
+
|
|
347
|
+
statios(dict): dict mapping IQQM stations to gauge numbers
|
|
348
|
+
|
|
349
|
+
Results:
|
|
350
|
+
tuple[pd.DataFrame, pd.DataFrame]: Cleaned flow dataframe; cleaned water level dataframe
|
|
351
|
+
|
|
352
|
+
'''
|
|
353
|
+
|
|
354
|
+
# organise like the rest of the dataframes- make this look just like we've read it in from an IQQM csv
|
|
355
|
+
cleaned_df = input_df.reset_index(level = 'node')
|
|
356
|
+
cleaned_df['node'] = cleaned_df['node'].astype(str)
|
|
357
|
+
|
|
358
|
+
cleaned_df['gauge'] = cleaned_df['node'].map(stations)
|
|
359
|
+
cleaned_df = cleaned_df.drop('node', axis = 1)
|
|
360
|
+
|
|
361
|
+
# drop the values that don't map to a gauge (lots of nodes in iqqm don't)
|
|
362
|
+
# This should be deprecated with the new way of choosing nodes on read-in, but being careful
|
|
363
|
+
cleaned_df = cleaned_df.query('gauge.notna()')
|
|
364
|
+
|
|
365
|
+
# give each gauge its own column- that's what the tool expects
|
|
366
|
+
cleaned_df = cleaned_df.pivot(columns = 'gauge', values = 'Simulated flow')
|
|
367
|
+
cleaned_df.columns.name = None
|
|
368
|
+
|
|
369
|
+
# the csvs return an 'object' type, not a datetime in the index
|
|
370
|
+
# but it gets converted to datetime in cleaner_***, so leave it.
|
|
371
|
+
cleaned_df.index.names = ['Date']
|
|
372
|
+
|
|
373
|
+
# Split gauges into flow and level, allocate to respective dataframe
|
|
374
|
+
flow_gauges = data_inputs.get_gauges('flow gauges')
|
|
375
|
+
level_gauges = data_inputs.get_gauges('level gauges')
|
|
376
|
+
df_flow = pd.DataFrame(index = cleaned_df.index)
|
|
377
|
+
df_level = pd.DataFrame(index = cleaned_df.index)
|
|
378
|
+
for gauge in cleaned_df.columns:
|
|
379
|
+
if gauge in flow_gauges:
|
|
380
|
+
df_flow[gauge] = cleaned_df[gauge].copy(deep=True)
|
|
381
|
+
if gauge in level_gauges:
|
|
382
|
+
df_level[gauge] = cleaned_df[gauge].copy(deep=True)
|
|
383
|
+
|
|
384
|
+
return df_flow, df_level
|
|
385
|
+
|
|
283
386
|
|
|
284
387
|
def cleaner_ten_thousand_year(input_df: pd.DataFrame, ewr_table_path: str = None) -> pd.DataFrame:
|
|
285
388
|
'''Ingests dataframe, removes junk columns, fixes date, allocates gauges to either flow/level
|
|
@@ -455,6 +558,10 @@ class ScenarioHandler:
|
|
|
455
558
|
df_clean = cleaner_NSW(data)
|
|
456
559
|
df_F, df_L = match_NSW_nodes(df_clean, data_inputs.get_NSW_codes())
|
|
457
560
|
|
|
561
|
+
elif self.model_format == 'IQQM - netcdf':
|
|
562
|
+
df_unpacked = unpack_netcdf_as_dataframe(scenarios[scenario])
|
|
563
|
+
df_F, df_L = cleaner_netcdf_werp(df_unpacked, data_inputs.get_iqqm_codes())
|
|
564
|
+
|
|
458
565
|
elif self.model_format == 'ten thousand year':
|
|
459
566
|
df = pd.read_csv(scenarios[scenario], index_col = 'Date')
|
|
460
567
|
df_F, df_L = cleaner_ten_thousand_year(df, self.parameter_sheet)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: py_ewr
|
|
3
|
-
Version: 2.1
|
|
3
|
+
Version: 2.2.1
|
|
4
4
|
Summary: Environmental Water Requirement calculator
|
|
5
5
|
Home-page: https://github.com/MDBAuth/EWR_tool
|
|
6
6
|
Author: Martin Job
|
|
@@ -23,9 +23,11 @@ Requires-Dist: ipython ==8.8.0
|
|
|
23
23
|
Requires-Dist: ipywidgets ==7.7.0
|
|
24
24
|
Requires-Dist: pandas ==2.0.3
|
|
25
25
|
Requires-Dist: requests ==2.25.1
|
|
26
|
-
Requires-Dist: tqdm
|
|
26
|
+
Requires-Dist: tqdm >=4.66.1
|
|
27
27
|
Requires-Dist: mdba-gauge-getter ==0.5.1
|
|
28
28
|
Requires-Dist: cachetools ==5.2.0
|
|
29
|
+
Requires-Dist: xarray ==2023.01.0
|
|
30
|
+
Requires-Dist: netCDF4 ==1.6.4
|
|
29
31
|
Requires-Dist: numpy <2
|
|
30
32
|
|
|
31
33
|
[]()
|
|
@@ -33,9 +35,10 @@ Requires-Dist: numpy <2
|
|
|
33
35
|
[](https://pypi.org/project/py-ewr/)
|
|
34
36
|
[](https://zenodo.org/badge/latestdoi/342122359)
|
|
35
37
|
|
|
36
|
-
### **EWR tool version 2.1
|
|
38
|
+
### **EWR tool version 2.2.1 README**
|
|
37
39
|
|
|
38
40
|
### **Notes on recent version update**
|
|
41
|
+
- Adding new model format handling - 'IQQM - netcdf'
|
|
39
42
|
- Standard time-series handling added - each column needs a gauge, followed by and underscore, followed by either flow or level (e.g. 409025_flow). This handling also has missing date filling - so any missing dates will be filled with NaN values in all columns.
|
|
40
43
|
- ten thousand year handling - This has been briefly taken offline for this version.
|
|
41
44
|
- bug fixes: spells of length equal to the minimum required spell length were getting filtered out of the successful events table and successful interevents table, fixed misclassification of some gauges to flow, level, and lake level categories
|
|
@@ -222,3 +225,11 @@ NSW:
|
|
|
222
225
|
|
|
223
226
|
Consult the user manual for instructions on how to run the tool. Please email the above email addresses for a copy of the user manual.
|
|
224
227
|
|
|
228
|
+
To disable progress bars, as for example when running remote scripted runs, use
|
|
229
|
+
|
|
230
|
+
``` python
|
|
231
|
+
import os
|
|
232
|
+
os.environ["TQDM_DISABLE"] = "1"
|
|
233
|
+
```
|
|
234
|
+
*before* importing py-ewr in your script.
|
|
235
|
+
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
py_ewr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
py_ewr/data_inputs.py,sha256=SmY92Xq7oAzkHsJgxncVzC1C9YqnLpDAbVadBF0nS7k,18440
|
|
3
|
+
py_ewr/evaluate_EWRs.py,sha256=1JYWNtw5MYgf92jXhDNlgGkDx2mcfBP34OO-0jgal1w,229279
|
|
4
|
+
py_ewr/io.py,sha256=Is0xPAzLx6-ylpTFyYJxMimkNVxxoTxUcknTk6bQbgs,840
|
|
5
|
+
py_ewr/observed_handling.py,sha256=mpmLZGq9EEy7x_6YFhtZARhKJvFvcmLThmjan4pfMEo,17893
|
|
6
|
+
py_ewr/scenario_handling.py,sha256=yrx0y_ZIXihpfb8hPhZG3ug1xN-5kKmYJzAASWVIH18,32442
|
|
7
|
+
py_ewr/summarise_results.py,sha256=7w2Tbriwob21UXG0N2rSKffneg3M49hWouJPTHVjDAU,29747
|
|
8
|
+
py_ewr/model_metadata/SiteID_MDBA.csv,sha256=DcwFmBBoLmv1lGik40IwTMSjSBPaDsTt8Nluh2s7wjM,183665
|
|
9
|
+
py_ewr/model_metadata/SiteID_NSW.csv,sha256=UVBxN43Z5KWCvWhQ5Rh6TNEn35q4_sjPxKyHg8wPFws,6805
|
|
10
|
+
py_ewr/model_metadata/iqqm_stations.csv,sha256=vl4CPtPslG5VplSzf_yLZulTrmab-mEBHOfzFtS1kf4,110
|
|
11
|
+
py_ewr/parameter_metadata/ewr_calc_config.json,sha256=l1AgIRlf7UUmk3BNQ4r3kutU48pYHHVKmLELjoB-8rQ,17664
|
|
12
|
+
py_ewr/parameter_metadata/parameter_sheet.csv,sha256=IiYAvf0hG9fchuwqtfDZhI8WSPB5jgaaaJ0MLwfPYAw,899556
|
|
13
|
+
py_ewr-2.2.1.dist-info/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048
|
|
14
|
+
py_ewr-2.2.1.dist-info/METADATA,sha256=coWcS6PVhx_mqwxT15xFL-gbHugA4KqWrPk-uu8DZNE,10049
|
|
15
|
+
py_ewr-2.2.1.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
|
|
16
|
+
py_ewr-2.2.1.dist-info/top_level.txt,sha256=n3725d-64Cjyb-YMUMV64UAuIflzUh2_UZSxiIbrur4,7
|
|
17
|
+
py_ewr-2.2.1.dist-info/RECORD,,
|
py_ewr-2.1.9.dist-info/RECORD
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
py_ewr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
py_ewr/data_inputs.py,sha256=BLJrrasZYpSUkC0yVBzVqnz6WGT2Y3SJYJJhguDi-Vo,18046
|
|
3
|
-
py_ewr/evaluate_EWRs.py,sha256=1JYWNtw5MYgf92jXhDNlgGkDx2mcfBP34OO-0jgal1w,229279
|
|
4
|
-
py_ewr/observed_handling.py,sha256=mpmLZGq9EEy7x_6YFhtZARhKJvFvcmLThmjan4pfMEo,17893
|
|
5
|
-
py_ewr/scenario_handling.py,sha256=yMAfRNIZtuGntdey2-FML5nfUaGXMIApAUvIzQbyXUs,28476
|
|
6
|
-
py_ewr/summarise_results.py,sha256=7w2Tbriwob21UXG0N2rSKffneg3M49hWouJPTHVjDAU,29747
|
|
7
|
-
py_ewr/model_metadata/SiteID_MDBA.csv,sha256=DcwFmBBoLmv1lGik40IwTMSjSBPaDsTt8Nluh2s7wjM,183665
|
|
8
|
-
py_ewr/model_metadata/SiteID_NSW.csv,sha256=UVBxN43Z5KWCvWhQ5Rh6TNEn35q4_sjPxKyHg8wPFws,6805
|
|
9
|
-
py_ewr/parameter_metadata/ewr_calc_config.json,sha256=l1AgIRlf7UUmk3BNQ4r3kutU48pYHHVKmLELjoB-8rQ,17664
|
|
10
|
-
py_ewr/parameter_metadata/parameter_sheet.csv,sha256=p92qiaYiBfAVGDrMeJ-vb0qQnepeTgvDBKSNYj2TG5c,899556
|
|
11
|
-
py_ewr-2.1.9.dist-info/LICENSE,sha256=ogEPNDSH0_dhiv_lT3ifVIdgIzHAqNA_SemnxUfPBJk,7048
|
|
12
|
-
py_ewr-2.1.9.dist-info/METADATA,sha256=7mICFTABXvrkiEsAXUW-Gquev0Z8c9bctPhj520I-jE,9748
|
|
13
|
-
py_ewr-2.1.9.dist-info/WHEEL,sha256=y4mX-SOX4fYIkonsAGA5N0Oy-8_gI4FXw5HNI1xqvWg,91
|
|
14
|
-
py_ewr-2.1.9.dist-info/top_level.txt,sha256=n3725d-64Cjyb-YMUMV64UAuIflzUh2_UZSxiIbrur4,7
|
|
15
|
-
py_ewr-2.1.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|