gimu 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gimu/__init__.py +0 -0
- gimu/config.py +82 -0
- gimu/easy_date.py +209 -0
- gimu/geo_common.py +222 -0
- gimu/project_cli.py +56 -0
- gimu/save2incon.py +30 -0
- gimu/t2listingh5.py +375 -0
- gimu/waiwera_copy.py +43 -0
- gimu-0.1.0.dist-info/METADATA +84 -0
- gimu-0.1.0.dist-info/RECORD +13 -0
- gimu-0.1.0.dist-info/WHEEL +4 -0
- gimu-0.1.0.dist-info/entry_points.txt +4 -0
- gimu-0.1.0.dist-info/licenses/LICENSE.txt +9 -0
gimu/__init__.py
ADDED
|
File without changes
|
gimu/config.py
ADDED
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
""" CONFIG
|
|
2
|
+
"""
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Singleton(object):
|
|
6
|
+
__single = None # the one, true Singleton
|
|
7
|
+
|
|
8
|
+
def __new__(classtype, *args, **kwargs):
|
|
9
|
+
# Check to see if a __single exists already for this class
|
|
10
|
+
# Compare class types instead of just looking for None so
|
|
11
|
+
# that subclasses will create their own __single objects
|
|
12
|
+
if classtype != type(classtype.__single):
|
|
13
|
+
classtype.__single = object.__new__(classtype, *args, **kwargs)
|
|
14
|
+
return classtype.__single
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
#class config(Singleton):
|
|
18
|
+
class config(object):
|
|
19
|
+
def __init__(self,filename=''):
|
|
20
|
+
self.empty()
|
|
21
|
+
if filename: self.read_from_file(filename)
|
|
22
|
+
def empty(self):
|
|
23
|
+
self._config_entries = {}
|
|
24
|
+
|
|
25
|
+
def read_from_file(self,filename=None):
|
|
26
|
+
""" read config from a whole file, until [END], if filename
|
|
27
|
+
is not given, user will be prompted for a filename."""
|
|
28
|
+
if filename==None:
|
|
29
|
+
filename=raw_input(' Configuration file (.cfg) name? ')
|
|
30
|
+
print(" Reading file", filename.strip(), "for configurations...")
|
|
31
|
+
|
|
32
|
+
cfgfile=open(filename)
|
|
33
|
+
self._read(cfgfile)
|
|
34
|
+
cfgfile.close()
|
|
35
|
+
|
|
36
|
+
def read_from_file_section(self,file_with_cfg):
|
|
37
|
+
""" only read config entries from the current location,
|
|
38
|
+
file_with_cfg should be already opened, and reading will
|
|
39
|
+
be terminated once [END] is reached, the calling program
|
|
40
|
+
can continue read the rest of the file. """
|
|
41
|
+
self._read(file_with_cfg)
|
|
42
|
+
|
|
43
|
+
def _read(self,cfgfile):
|
|
44
|
+
finished=False
|
|
45
|
+
while not finished:
|
|
46
|
+
line = cfgfile.readline()
|
|
47
|
+
if line:
|
|
48
|
+
if line.strip()=='': continue
|
|
49
|
+
if line.strip()[0]=='!': continue
|
|
50
|
+
if line.strip()[0]=='#': continue
|
|
51
|
+
if line.strip()[0]=='[':
|
|
52
|
+
ikeyend=line.find(']')
|
|
53
|
+
keyword=line[1:ikeyend]
|
|
54
|
+
if keyword=='END': break
|
|
55
|
+
self._config_entries[keyword]=[]
|
|
56
|
+
else:
|
|
57
|
+
self._config_entries[keyword].append(line.rstrip('\n\r'))
|
|
58
|
+
else: finished = True
|
|
59
|
+
def add_value(self,keyword,value):
|
|
60
|
+
""" add value manually, value can be a single value, or a list,
|
|
61
|
+
or whatever object, as long as you know what it is when you
|
|
62
|
+
get it out, if keyword exist, the value wil be appended """
|
|
63
|
+
if keyword in self._config_entries:
|
|
64
|
+
try:
|
|
65
|
+
self._config_entries[keyword].append(value)
|
|
66
|
+
except:
|
|
67
|
+
self._config_entries[keyword] = [self._config_entries[keyword], value]
|
|
68
|
+
else:
|
|
69
|
+
self._config_entries[keyword] = [value]
|
|
70
|
+
|
|
71
|
+
def get_value(self,keyword):
|
|
72
|
+
if len(self._config_entries[keyword]) == 0:
|
|
73
|
+
return ''
|
|
74
|
+
else:
|
|
75
|
+
return self._config_entries[keyword][0]
|
|
76
|
+
|
|
77
|
+
def get_list(self,keyword):
|
|
78
|
+
return self._config_entries[keyword]
|
|
79
|
+
|
|
80
|
+
def check_optional(self,keyword):
|
|
81
|
+
return keyword in self._config_entries.keys()
|
|
82
|
+
|
gimu/easy_date.py
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
""" Lazy man's easy conversion between date strings and date tuple/date objects.
|
|
2
|
+
|
|
3
|
+
It's actually very easy to code it directly, plus it's better to reduce
|
|
4
|
+
dependency. This module provides very few additional functionality. One being
|
|
5
|
+
able to detect if a list of strings/date tuples are given, and act accordingly.
|
|
6
|
+
Use these few lines of code instead if you are not too lazy:
|
|
7
|
+
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
d = datetime.strptime(s, '%Y-%m-%d').date()
|
|
10
|
+
|
|
11
|
+
from datetime import date
|
|
12
|
+
s = str(date((1995,2,3)))
|
|
13
|
+
s = str(date(*tp))
|
|
14
|
+
# tp is your existing tuple, s will be string '1995-02-03'
|
|
15
|
+
"""
|
|
16
|
+
import unittest
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def date_tuple_to_string(tp):
|
|
20
|
+
""" Converts date integer tuple (y,m,d) into string 'yyyy-mm-dd'. Input
|
|
21
|
+
supports both a single date-tuple as well as a list of date-tuples. """
|
|
22
|
+
from datetime import date
|
|
23
|
+
single = False
|
|
24
|
+
if len(tp) == 3:
|
|
25
|
+
if all([isinstance(i, int) for i in tp]):
|
|
26
|
+
single = True
|
|
27
|
+
if single:
|
|
28
|
+
return str(date(*tp))
|
|
29
|
+
else:
|
|
30
|
+
# assuming input is a list of date-tuples
|
|
31
|
+
return [str(date(*d)) for d in tp]
|
|
32
|
+
|
|
33
|
+
def _s2d(s):
|
|
34
|
+
""" Converts a date string of format 'yyyy-mm-dd' or 'dd/mm/yyyy' into a
|
|
35
|
+
python datetime.date object. """
|
|
36
|
+
from datetime import datetime
|
|
37
|
+
if '/' in s:
|
|
38
|
+
frmt = '%d/%m/%Y'
|
|
39
|
+
elif '-' in s:
|
|
40
|
+
frmt = '%Y-%m-%d'
|
|
41
|
+
else:
|
|
42
|
+
raise Exception("Date string format only support 'yyyy-mm-dd' or 'dd/mm/yyyy'")
|
|
43
|
+
return datetime.strptime(s, frmt).date()
|
|
44
|
+
|
|
45
|
+
def date_string_to_tuple(s):
|
|
46
|
+
""" Converts a date string of format 'yyyy-mm-dd' or 'dd/mm/yyyy' into a
|
|
47
|
+
tuple of integers (y,m,d). Input supports both a single string as well as a
|
|
48
|
+
list of strings. """
|
|
49
|
+
def d2t(d):
|
|
50
|
+
return (d.year, d.month, d.day)
|
|
51
|
+
if isinstance(s, list):
|
|
52
|
+
return [d2t(_s2d(sd)) for sd in s]
|
|
53
|
+
else:
|
|
54
|
+
return d2t(_s2d(s))
|
|
55
|
+
|
|
56
|
+
def date_string_to_date(s):
|
|
57
|
+
""" Converts a date string of format 'yyyy-mm-dd' or 'dd/mm/yyyy' into a
|
|
58
|
+
python datetime.date object. Input supports both a single string as well as
|
|
59
|
+
a list of strings. """
|
|
60
|
+
from datetime import date
|
|
61
|
+
if isinstance(s, list):
|
|
62
|
+
return [_s2d(sd) for sd in s]
|
|
63
|
+
else:
|
|
64
|
+
return _s2d(s)
|
|
65
|
+
|
|
66
|
+
def year_fraction_to_date(year_fraction):
|
|
67
|
+
""" Converts a decimal/float year into a python datetime.date object.
|
|
68
|
+
Input supports both a single float as well as a list of floats. """
|
|
69
|
+
from datetime import date, timedelta
|
|
70
|
+
def yf2d(yf):
|
|
71
|
+
year = int(yf)
|
|
72
|
+
fraction = yf - year
|
|
73
|
+
# assuming 365.25 days per year
|
|
74
|
+
day_of_year = int(fraction * 365.25)
|
|
75
|
+
return date(year, 1, 1) + timedelta(days=day_of_year)
|
|
76
|
+
if isinstance(year_fraction, list):
|
|
77
|
+
return [yf2d(yf) for yf in year_fraction]
|
|
78
|
+
else:
|
|
79
|
+
return yf2d(year_fraction)
|
|
80
|
+
|
|
81
|
+
def year_fraction_to_date_str(year_fraction):
|
|
82
|
+
""" Converts a decimal/float year into a string of format 'dd/mm/yyyy'.
|
|
83
|
+
Input supports both a single float as well as a list of floats. """
|
|
84
|
+
dd = year_fraction_to_date(year_fraction)
|
|
85
|
+
if isinstance(dd, list):
|
|
86
|
+
return [d.strftime("%d/%m/%Y") for d in dd]
|
|
87
|
+
else:
|
|
88
|
+
return dd.strftime("%d/%m/%Y")
|
|
89
|
+
|
|
90
|
+
def toYearFraction(date):
|
|
91
|
+
""" converts python datetime objects into decimal/float years
|
|
92
|
+
https://stackoverflow.com/a/6451892/2368167
|
|
93
|
+
"""
|
|
94
|
+
from datetime import datetime as dt
|
|
95
|
+
import time
|
|
96
|
+
def sinceEpoch(date): # returns seconds since epoch
|
|
97
|
+
return time.mktime(date.timetuple())
|
|
98
|
+
s = sinceEpoch
|
|
99
|
+
|
|
100
|
+
year = date.year
|
|
101
|
+
startOfThisYear = dt(year=year, month=1, day=1)
|
|
102
|
+
startOfNextYear = dt(year=year+1, month=1, day=1)
|
|
103
|
+
|
|
104
|
+
yearElapsed = s(date) - s(startOfThisYear)
|
|
105
|
+
yearDuration = s(startOfNextYear) - s(startOfThisYear)
|
|
106
|
+
fraction = yearElapsed/yearDuration
|
|
107
|
+
|
|
108
|
+
return date.year + fraction
|
|
109
|
+
|
|
110
|
+
def toYearFraction2(date):
|
|
111
|
+
""" converts python datetime objects into decimal/float years
|
|
112
|
+
https://stackoverflow.com/a/6451892/2368167
|
|
113
|
+
|
|
114
|
+
fix Epoch issue?
|
|
115
|
+
"""
|
|
116
|
+
from datetime import datetime as dt
|
|
117
|
+
from datetime import date as dd
|
|
118
|
+
import time
|
|
119
|
+
def sinceEpoch(date): # returns seconds since epoch
|
|
120
|
+
return date - dd(1970,1,1)
|
|
121
|
+
s = sinceEpoch
|
|
122
|
+
|
|
123
|
+
year = date.year
|
|
124
|
+
startOfThisYear = dd(year=year, month=1, day=1)
|
|
125
|
+
startOfNextYear = dd(year=year+1, month=1, day=1)
|
|
126
|
+
|
|
127
|
+
yearElapsed = s(date) - s(startOfThisYear)
|
|
128
|
+
yearDuration = s(startOfNextYear) - s(startOfThisYear)
|
|
129
|
+
fraction = yearElapsed.total_seconds()/yearDuration.total_seconds()
|
|
130
|
+
|
|
131
|
+
return date.year + fraction
|
|
132
|
+
|
|
133
|
+
def year_fraction(year, month, day):
|
|
134
|
+
""" https://stackoverflow.com/a/36949905/2368167
|
|
135
|
+
"""
|
|
136
|
+
import datetime
|
|
137
|
+
date = datetime.date(year, month, day)
|
|
138
|
+
start = datetime.date(date.year, 1, 1).toordinal()
|
|
139
|
+
year_length = datetime.date(date.year+1, 1, 1).toordinal() - start
|
|
140
|
+
return date.year + float(date.toordinal() - start) / year_length
|
|
141
|
+
|
|
142
|
+
class TestEasyDate(unittest.TestCase):
|
|
143
|
+
"""docstring for TestEasyDate"""
|
|
144
|
+
def test_year_fraction(self):
|
|
145
|
+
for d,f in [
|
|
146
|
+
((2023, 1, 1), 2023.0),
|
|
147
|
+
((2023, 6, 30), 2023.493),
|
|
148
|
+
((2023, 12, 31), 2023.997),
|
|
149
|
+
((2024, 1, 1), 2024.0),
|
|
150
|
+
]:
|
|
151
|
+
result = year_fraction(*d)
|
|
152
|
+
self.assertAlmostEqual(result, f, places=3)
|
|
153
|
+
|
|
154
|
+
def test_year_fraction_mid_year(self):
|
|
155
|
+
"""Test year_fraction for middle of the year"""
|
|
156
|
+
# Test July 1st (approximately mid-year)
|
|
157
|
+
result = year_fraction(2023, 7, 1)
|
|
158
|
+
self.assertGreater(result, 2023.4)
|
|
159
|
+
self.assertLess(result, 2023.6)
|
|
160
|
+
|
|
161
|
+
def test_year_fraction_to_date(self):
|
|
162
|
+
""" test by starting with dd/mm/yyyy, convert to fraction, then back to dd/mm/yyyy, final one should be the same as the first one """
|
|
163
|
+
ds = ['2011-03-05', '2011-3-6', '04/11/2011', '5/11/2011']
|
|
164
|
+
for d in ds:
|
|
165
|
+
dd = _s2d(d)
|
|
166
|
+
f = toYearFraction2(dd)
|
|
167
|
+
d2 = year_fraction_to_date(f)
|
|
168
|
+
self.assertEqual(dd, d2)
|
|
169
|
+
|
|
170
|
+
ds = ['04/11/2011', '05/01/2011']
|
|
171
|
+
for d in ds:
|
|
172
|
+
dd = _s2d(d)
|
|
173
|
+
f = toYearFraction2(dd)
|
|
174
|
+
d2 = year_fraction_to_date_str(f)
|
|
175
|
+
self.assertEqual(d, d2)
|
|
176
|
+
|
|
177
|
+
def test_tuple_to_string(self):
|
|
178
|
+
s1 = (1995,10,2)
|
|
179
|
+
s2 = [1995,10,2]
|
|
180
|
+
ss = [s1, s2]
|
|
181
|
+
for s in ss:
|
|
182
|
+
self.assertEqual('1995-10-02', date_tuple_to_string(s))
|
|
183
|
+
self.assertEqual(['1995-10-02']*2, date_tuple_to_string(ss))
|
|
184
|
+
|
|
185
|
+
def test_string_to_tuple(self):
|
|
186
|
+
d1 = '2011-03-05'
|
|
187
|
+
d2 = '2011-3-5'
|
|
188
|
+
d3 = '05/03/2011'
|
|
189
|
+
d4 = '5/3/2011'
|
|
190
|
+
ds = [d1,d2,d3,d4]
|
|
191
|
+
for d in ds:
|
|
192
|
+
self.assertEqual((2011,3,5), date_string_to_tuple(d))
|
|
193
|
+
self.assertEqual([(2011,3,5)]*4, date_string_to_tuple(ds))
|
|
194
|
+
|
|
195
|
+
def test_string_to_date(self):
|
|
196
|
+
from datetime import date
|
|
197
|
+
d1 = '2011-03-05'
|
|
198
|
+
d2 = '2011-3-5'
|
|
199
|
+
d3 = '05/03/2011'
|
|
200
|
+
d4 = '5/3/2011'
|
|
201
|
+
ds = [d1,d2,d3,d4]
|
|
202
|
+
for d in ds:
|
|
203
|
+
self.assertEqual(date(2011,3,5), date_string_to_date(d))
|
|
204
|
+
self.assertEqual([date(2011,3,5)]*4, date_string_to_date(ds))
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
if __name__ == '__main__':
|
|
209
|
+
unittest.main(verbosity=2)
|
gimu/geo_common.py
ADDED
|
@@ -0,0 +1,222 @@
|
|
|
1
|
+
from mulgrids import *
|
|
2
|
+
import string
|
|
3
|
+
|
|
4
|
+
def quick_enthalpy(t_or_p,ph='liq'):
|
|
5
|
+
""" Return enthalpy J/kg ('liq', 'vap', or 'dif') of water at specified
|
|
6
|
+
temperature (<=500.0 in degC) or pressu (>500.0 in Pa) """
|
|
7
|
+
import t2thermo
|
|
8
|
+
def enth(t,p,f):
|
|
9
|
+
d,u = f(t,p)
|
|
10
|
+
return u + p/d
|
|
11
|
+
def hlhs(t,p):
|
|
12
|
+
return enth(t,p,t2thermo.cowat), enth(t,p,t2thermo.supst)
|
|
13
|
+
def sat_tp(t_or_p):
|
|
14
|
+
if t_or_p > 500.0:
|
|
15
|
+
return t2thermo.tsat(t_or_p), t_or_p
|
|
16
|
+
else:
|
|
17
|
+
return t_or_p, t2thermo.sat(t_or_p)
|
|
18
|
+
(hl,hs) = hlhs(*sat_tp(t_or_p))
|
|
19
|
+
# xxxx
|
|
20
|
+
return {'liq': hl,'vap': hs,'dif': hs-hl}[ph]
|
|
21
|
+
|
|
22
|
+
class RelativePermeability:
|
|
23
|
+
""" Class to calculate linear relative permeability, this is used in
|
|
24
|
+
TOUGH2 for the linear relative permeability model. The class has a
|
|
25
|
+
single method, which takes the saturation and returns the relative
|
|
26
|
+
permeability. """
|
|
27
|
+
def __init__(self, wj_object=None):
|
|
28
|
+
if wj_object is None:
|
|
29
|
+
wj_object = {
|
|
30
|
+
"type": "linear",
|
|
31
|
+
"liquid": [0.0, 1.0],
|
|
32
|
+
"vapour": [0.0, 1.0]
|
|
33
|
+
}
|
|
34
|
+
self.setting = wj_object
|
|
35
|
+
self.func = {
|
|
36
|
+
"linear": self.linear,
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
def calc(self, vapour_saturation):
|
|
40
|
+
""" return (kr_liquid, kr_vapour) """
|
|
41
|
+
return self.func[self.setting['type']](vapour_saturation)
|
|
42
|
+
|
|
43
|
+
def linear(self, vapour_saturation):
|
|
44
|
+
liq_limits = self.setting['liquid']
|
|
45
|
+
vap_limits = self.setting['vapour']
|
|
46
|
+
liquid_saturation = 1.0 - vapour_saturation
|
|
47
|
+
kr_liq = np.interp(liquid_saturation, liq_limits, [0., 1.], left=0.0, right=1.0)
|
|
48
|
+
kr_vap = np.interp(vapour_saturation, vap_limits, [0., 1.], left=0.0, right=1.0)
|
|
49
|
+
return kr_liq, kr_vap
|
|
50
|
+
|
|
51
|
+
def flowing_enthalpy(lst, wj, block):
|
|
52
|
+
""" calculate flowing enthalpy of a given block if producing from waiwera h5
|
|
53
|
+
|
|
54
|
+
NOTE use values from waiwera h5 at current time
|
|
55
|
+
"""
|
|
56
|
+
phases = ['liquid', 'vapour']
|
|
57
|
+
rp = RelativePermeability(wj['rock']['relative_permeability'])
|
|
58
|
+
vapour_saturation = lst.element[block][f'fluid_vapour_saturation']
|
|
59
|
+
rel_perm = rp.calc(vapour_saturation)
|
|
60
|
+
fluid = lst.element[block]
|
|
61
|
+
# mobility
|
|
62
|
+
mobility, sum_mobility = {}, 0.0
|
|
63
|
+
for iph, phase in enumerate(phases):
|
|
64
|
+
density = fluid[f'fluid_{phase}_density']
|
|
65
|
+
viscosity = fluid[f'fluid_{phase}_viscosity']
|
|
66
|
+
if viscosity == 0.0:
|
|
67
|
+
mobility[phase] = 0.0
|
|
68
|
+
else:
|
|
69
|
+
mobility[phase] = rel_perm[iph] * density / viscosity
|
|
70
|
+
sum_mobility += mobility[phase]
|
|
71
|
+
# flow fraction
|
|
72
|
+
flow_frac = {}
|
|
73
|
+
for phase in phases:
|
|
74
|
+
flow_frac[phase] = mobility[phase] / sum_mobility
|
|
75
|
+
# enthalpy
|
|
76
|
+
enthalpy = 0.0
|
|
77
|
+
for phase in phases:
|
|
78
|
+
enthalpy += flow_frac[phase] * fluid[f'fluid_{phase}_specific_enthalpy']
|
|
79
|
+
# breakpoint()
|
|
80
|
+
return enthalpy
|
|
81
|
+
|
|
82
|
+
def bottomhole_pressure(depth, temperature=20.0, whp=1.0e5, division=100,
|
|
83
|
+
min_depth_interval=10.0):
|
|
84
|
+
""" Calculate the pressure at the bottom of a well, given the wellhead
|
|
85
|
+
pressure (whp) in pascal, temperature in degC, and division in m. The
|
|
86
|
+
pressure is calculated as: pressure = whp + rho * G * depth where rho is
|
|
87
|
+
the density of water at the given temperature, G is the gravitational
|
|
88
|
+
acceleration (9.81 m/s^2), and depth is the depth of the well divided by
|
|
89
|
+
division.
|
|
90
|
+
|
|
91
|
+
What tis code does is then divide the depth into inv=tervals. Then the
|
|
92
|
+
pressure is accumulated, this allows the density calculation to use the
|
|
93
|
+
actual pressure at the depth. For liquid water, it is quite
|
|
94
|
+
incompressible, so the result won't be that different to the easy
|
|
95
|
+
rho*G*depth method.
|
|
96
|
+
"""
|
|
97
|
+
import t2thermo
|
|
98
|
+
# divide well depth to intervals, use larger of the min interval and division
|
|
99
|
+
interval = max(min_depth_interval, depth/float(division))
|
|
100
|
+
d = 0.0
|
|
101
|
+
pressure = whp
|
|
102
|
+
while d < depth:
|
|
103
|
+
rho, u = t2thermo.cowat(temperature, pressure)
|
|
104
|
+
pressure += rho * 9.81 * interval
|
|
105
|
+
d += interval
|
|
106
|
+
rho, u = t2thermo.cowat(temperature, pressure)
|
|
107
|
+
pressure += rho * 9.81 * (depth - d)
|
|
108
|
+
return pressure
|
|
109
|
+
|
|
110
|
+
def block_depth(block, geo):
|
|
111
|
+
""" Return the depth of a block in a geo object, this is the depth of the
|
|
112
|
+
centre of the block. This also work for Waiwera cell if block is an
|
|
113
|
+
integer number.
|
|
114
|
+
"""
|
|
115
|
+
if isinstance(block, int):
|
|
116
|
+
block = geo.block_name_list(block + geo.num_atmosphere_blocks)
|
|
117
|
+
lay, col = geo.layer_name(block), geo.column_name(block)
|
|
118
|
+
return geo.column[col].surface - geo.block_centre(lay, col)[2]
|
|
119
|
+
|
|
120
|
+
def xyz2fit(fn):
|
|
121
|
+
data = np.fromfile(fn,sep=" ")
|
|
122
|
+
nrow = np.size(data) / 3
|
|
123
|
+
data= data.reshape(nrow,3)
|
|
124
|
+
return data
|
|
125
|
+
|
|
126
|
+
def find_all_cols_below(geo,level,surfer_file=None):
|
|
127
|
+
if surfer_file is not None: outfile = file(surfer_file,'w')
|
|
128
|
+
cols = []
|
|
129
|
+
for col in geo.columnlist:
|
|
130
|
+
surf = col.surface
|
|
131
|
+
if surf < level:
|
|
132
|
+
#print(col.name, str(surf))
|
|
133
|
+
cols.append([col.centre[0], col.centre[1], surf, col.name])
|
|
134
|
+
if surfer_file is not None: outfile.write(
|
|
135
|
+
str(col.centre[0])+' '+str(col.centre[1])+' '+str(surf)+' '+
|
|
136
|
+
col.name + '\n')
|
|
137
|
+
if surfer_file is not None:
|
|
138
|
+
outfile.close()
|
|
139
|
+
print(' -- file: ', surfer_file, ' is written.')
|
|
140
|
+
|
|
141
|
+
def t2_strict_name(n):
|
|
142
|
+
""" convert any name into the common TOUGH style 5 character name """
|
|
143
|
+
import string
|
|
144
|
+
def make_number(c):
|
|
145
|
+
""" change a character into a single number, mostly 'A' or 'a' will
|
|
146
|
+
become a '1' """
|
|
147
|
+
if len(c.strip()) == 0: return ' '
|
|
148
|
+
d = c.lower().strip()[:1]
|
|
149
|
+
i = string.ascii_lowercase.find(d)+1
|
|
150
|
+
if i == 0: return ' '
|
|
151
|
+
else: return str(i%10)
|
|
152
|
+
newn = list(n.strip())
|
|
153
|
+
if len(newn) <= 3:
|
|
154
|
+
return "".join(newn).strip()[:5].ljust(5)
|
|
155
|
+
for i in range(3,len(newn)):
|
|
156
|
+
if newn[i] not in '0123456789':
|
|
157
|
+
newn[i] = make_number(newn[i])
|
|
158
|
+
return "".join(newn).strip()[:5].ljust(5)
|
|
159
|
+
|
|
160
|
+
def is_leap_year(y):
|
|
161
|
+
if int(y)%400 == 0: return True
|
|
162
|
+
elif int(y)%100 == 0: return False
|
|
163
|
+
elif int(y)%4 == 0: return True
|
|
164
|
+
else: return False
|
|
165
|
+
|
|
166
|
+
def days_in_month(month, leap_year=False):
|
|
167
|
+
if leap_year:
|
|
168
|
+
d_month = [ 31 , 29 , 31 , 30 , 31 , 30 , 31 , 31 , 30 , 31 , 30 , 31 ]
|
|
169
|
+
else:
|
|
170
|
+
d_month = [ 31 , 28 , 31 , 30 , 31 , 30 , 31 , 31 , 30 , 31 , 30 , 31 ]
|
|
171
|
+
return d_month[month - 1]
|
|
172
|
+
|
|
173
|
+
def date2str(d,m,y):
|
|
174
|
+
ds, ms, ys = str(d), str(m), str(y)
|
|
175
|
+
while len(ds) < 2: ds = '0'+ds
|
|
176
|
+
while len(ms) < 2: ms = '0'+ms
|
|
177
|
+
while len(ys) < 4: ys = '0'+ys
|
|
178
|
+
return ds+'/'+ms+'/'+ys
|
|
179
|
+
|
|
180
|
+
def date2num(enddate):
|
|
181
|
+
|
|
182
|
+
d,m,y = enddate.split('/')
|
|
183
|
+
months = [ '01','02','03','04','05','06','07','08','09','10','11','12' ]
|
|
184
|
+
d_month = [ 31 , 28 , 31 , 30 , 31 , 30 , 31 , 31 , 30 , 31 , 30 , 31 ]
|
|
185
|
+
d_month_l = [ 31 , 29 , 31 , 30 , 31 , 30 , 31 , 31 , 30 , 31 , 30 , 31 ]
|
|
186
|
+
acum_ds = [sum(d_month[:i]) for i in range(12)]
|
|
187
|
+
acum_ds_l = [sum(d_month[:i]) for i in range(12)]
|
|
188
|
+
ad_m = dict(zip(months, acum_ds)) # accumulated days before this month
|
|
189
|
+
ad_m_l = dict(zip(months, acum_ds_l)) # accumulated days before this month
|
|
190
|
+
ds_m = dict(zip(months, d_month)) # maximum days in this month
|
|
191
|
+
ds_m_l = dict(zip(months, d_month_l)) # maximum days in this month
|
|
192
|
+
# check and process d and m, this depends on data
|
|
193
|
+
if m not in months:
|
|
194
|
+
print(' Error, unable to convert ', enddate, ' to numeric format. check month.')
|
|
195
|
+
sys.exit()
|
|
196
|
+
if is_leap_year:
|
|
197
|
+
if int(d) not in range(1,ds_m_l[m]+1):
|
|
198
|
+
print(' Error, unable to convert ', enddate, ' to numeric format. check day.')
|
|
199
|
+
sys.exit()
|
|
200
|
+
num = float(y) + ((float(d) + float(ad_m_l[m]))/float(sum(d_month_l)))
|
|
201
|
+
else:
|
|
202
|
+
if int(d) not in range(1,ds_m[m]+1):
|
|
203
|
+
print(' Error, unable to convert ', enddate, ' to numeric format. check day.')
|
|
204
|
+
sys.exit()
|
|
205
|
+
num = float(y) + ((float(d) + float(ad_m[m]))/float(sum(d_month)))
|
|
206
|
+
return num
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def identifier(x, chars='abcdefghijklmnopqrstuvwxyz', width=5):
|
|
210
|
+
""" creates a character-based unique identifier from a given integer,
|
|
211
|
+
both chars and width are customisable, output is space filled and
|
|
212
|
+
right aligned """
|
|
213
|
+
output = []
|
|
214
|
+
base = len(chars)
|
|
215
|
+
while x:
|
|
216
|
+
output.append(chars[x % base])
|
|
217
|
+
x /= base
|
|
218
|
+
final = ''.join(reversed(output))
|
|
219
|
+
if len(final) > width:
|
|
220
|
+
raise Exception('identifier() failed, not enough width.')
|
|
221
|
+
return ('%' + str(width) + 's') % final
|
|
222
|
+
|
gimu/project_cli.py
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
import importlib.metadata
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from pprint import pprint
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
try:
|
|
7
|
+
import tomllib # Python ≥3.11
|
|
8
|
+
except ImportError:
|
|
9
|
+
import tomli as tomllib # Python <3.11
|
|
10
|
+
|
|
11
|
+
def get_distribution():
|
|
12
|
+
top_pkg = __package__.split('.')[0] if __package__ else None
|
|
13
|
+
if top_pkg:
|
|
14
|
+
try:
|
|
15
|
+
return importlib.metadata.distribution(top_pkg)
|
|
16
|
+
except importlib.metadata.PackageNotFoundError:
|
|
17
|
+
pass
|
|
18
|
+
return None
|
|
19
|
+
|
|
20
|
+
def get_project_meta():
|
|
21
|
+
""" return pyproject.toml [project] """
|
|
22
|
+
current_file = Path(__file__).resolve()
|
|
23
|
+
for parent in [current_file.parent] + list(current_file.parents):
|
|
24
|
+
pyproject = parent / "pyproject.toml"
|
|
25
|
+
if pyproject.exists():
|
|
26
|
+
with open(pyproject, "rb") as f:
|
|
27
|
+
data = tomllib.load(f)
|
|
28
|
+
return data.get("project", {})
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
def show_help():
|
|
32
|
+
dist = get_distribution()
|
|
33
|
+
name = dist.metadata['Name']
|
|
34
|
+
version = dist.metadata['Version']
|
|
35
|
+
description = dist.metadata['Summary']
|
|
36
|
+
commands = [ep.name for ep in dist.entry_points]
|
|
37
|
+
|
|
38
|
+
# meta = get_project_meta()
|
|
39
|
+
# name = meta['name']
|
|
40
|
+
# description = meta['description']
|
|
41
|
+
|
|
42
|
+
msg = '\n'.join([
|
|
43
|
+
f"",
|
|
44
|
+
f" {name} - {description}",
|
|
45
|
+
f"",
|
|
46
|
+
f" Version: {version}",
|
|
47
|
+
f"",
|
|
48
|
+
f" Available commands:",
|
|
49
|
+
] + [" " + cmd for cmd in sorted(commands)] + [
|
|
50
|
+
f"",
|
|
51
|
+
f" e.g.:",
|
|
52
|
+
f" save2incon model_1.save model_2.incon",
|
|
53
|
+
f"",
|
|
54
|
+
])
|
|
55
|
+
print(msg)
|
|
56
|
+
|
gimu/save2incon.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
""" use: save2incon a.save b.incon [-reset_kcyc] """
|
|
2
|
+
|
|
3
|
+
from sys import *
|
|
4
|
+
from t2incons import *
|
|
5
|
+
|
|
6
|
+
def main():
|
|
7
|
+
if len(argv) < 2:
|
|
8
|
+
print('use: save2incon a.save b.incon [-reset_kcyc] [-reset_porosity]')
|
|
9
|
+
exit(1)
|
|
10
|
+
|
|
11
|
+
readFrom = argv[1]
|
|
12
|
+
saveTo = argv[2]
|
|
13
|
+
|
|
14
|
+
if len(argv) > 3:
|
|
15
|
+
opts = argv[3:]
|
|
16
|
+
else:
|
|
17
|
+
opts = []
|
|
18
|
+
inc = t2incon(readFrom)
|
|
19
|
+
|
|
20
|
+
for opt in opts:
|
|
21
|
+
if opt == '-reset_kcyc':
|
|
22
|
+
inc.timing['kcyc'] = 1
|
|
23
|
+
inc.timing['iter'] = 1
|
|
24
|
+
if opt == '-reset_porosity':
|
|
25
|
+
inc.porosity = None
|
|
26
|
+
|
|
27
|
+
inc.write(saveTo)
|
|
28
|
+
|
|
29
|
+
if __name__ == '__main__':
|
|
30
|
+
main()
|
gimu/t2listingh5.py
ADDED
|
@@ -0,0 +1,375 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright 2013, 2014 University of Auckland.
|
|
3
|
+
|
|
4
|
+
This file is part of TIM (Tim Isn't Mulgraph).
|
|
5
|
+
|
|
6
|
+
TIM is free software: you can redistribute it and/or modify
|
|
7
|
+
it under the terms of the GNU General Public License as published by
|
|
8
|
+
the Free Software Foundation, either version 3 of the License, or
|
|
9
|
+
(at your option) any later version.
|
|
10
|
+
|
|
11
|
+
TIM is distributed in the hope that it will be useful,
|
|
12
|
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
13
|
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
14
|
+
GNU General Public License for more details.
|
|
15
|
+
|
|
16
|
+
You should have received a copy of the GNU General Public License
|
|
17
|
+
along with TIM. If not, see <http://www.gnu.org/licenses/>.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
"""
|
|
21
|
+
Wrap AUTOUGH2's hdf5 output as t2listing
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
import h5py
|
|
25
|
+
import numpy as np
|
|
26
|
+
|
|
27
|
+
from t2listing import *
|
|
28
|
+
# from t2listing import listingtable
|
|
29
|
+
from mulgrids import fix_blockname, unfix_blockname
|
|
30
|
+
|
|
31
|
+
from pprint import pprint as pp
|
|
32
|
+
import unittest
|
|
33
|
+
|
|
34
|
+
class h5table(listingtable):
|
|
35
|
+
""" Class emulating the listingtable class in PyTOUGH.
|
|
36
|
+
|
|
37
|
+
Class for table in listing file, with values addressable by index (0-based)
|
|
38
|
+
or row name, and column name: e.g. table[i] returns the ith row (as a
|
|
39
|
+
dictionary), table[rowname] returns the row with the specified name, and
|
|
40
|
+
table[colname] returns the column with the specified name.
|
|
41
|
+
|
|
42
|
+
!!! IMPORTANT !!!
|
|
43
|
+
.index needs to be set whenever listing object changed time index
|
|
44
|
+
"""
|
|
45
|
+
def __init__(self, cols, rows, h5_table,
|
|
46
|
+
num_keys = 1, allow_reverse_keys = False,
|
|
47
|
+
index = 0):
|
|
48
|
+
""" The row_format parameter is a dictionary with three keys,
|
|
49
|
+
'key','index' and 'values'. These contain the positions, in each row of
|
|
50
|
+
the table, of the start of the keys, index and data fields. The
|
|
51
|
+
row_line parameter is a list containing, for each row of the table, the
|
|
52
|
+
number of lines before it in the listing file, from the start of the
|
|
53
|
+
table. This is needed for TOUGH2_MP listing files, in which the rows
|
|
54
|
+
are not in index order and can also be duplicated.
|
|
55
|
+
|
|
56
|
+
h5_table should be the table within the h5 file.
|
|
57
|
+
"""
|
|
58
|
+
self.column_name = cols
|
|
59
|
+
self.row_name = rows
|
|
60
|
+
self.num_keys = num_keys
|
|
61
|
+
self.allow_reverse_keys = allow_reverse_keys
|
|
62
|
+
self._col = dict([(c,i) for i,c in enumerate(cols)])
|
|
63
|
+
self._row = dict([(r,i) for i,r in enumerate(rows)])
|
|
64
|
+
self._h5_table = h5_table
|
|
65
|
+
self._index = index # time index
|
|
66
|
+
|
|
67
|
+
def __repr__(self):
|
|
68
|
+
# h5 table lst._h5['element'][time index, eleme index, field index]
|
|
69
|
+
return repr(self.column_name) + '\n' + repr(self._h5_table[self._index, :, :])
|
|
70
|
+
|
|
71
|
+
def __getitem__(self, key):
|
|
72
|
+
if isinstance(key, int):
|
|
73
|
+
return dict(zip(['key'] + self.column_name, [self.row_name[key]] +
|
|
74
|
+
list(self._h5_table[self._index, key, :])))
|
|
75
|
+
else:
|
|
76
|
+
if key in self.column_name:
|
|
77
|
+
return self._h5_table[self._index, :, self._col[key]]
|
|
78
|
+
elif key in self.row_name:
|
|
79
|
+
rowindex = self._row[key]
|
|
80
|
+
return dict(zip(['key'] + self.column_name,
|
|
81
|
+
[self.row_name[rowindex]] +
|
|
82
|
+
list(self._h5_table[self._index, rowindex, :])))
|
|
83
|
+
elif len(key) > 1 and self.allow_reverse_keys:
|
|
84
|
+
revkey = key[::-1] # try reversed key for multi-key tables
|
|
85
|
+
if revkey in self.row_name:
|
|
86
|
+
rowindex = self._row[revkey]
|
|
87
|
+
return dict(zip(['key'] + self.column_name,
|
|
88
|
+
[self.row_name[rowindex][::-1]] +
|
|
89
|
+
list(-self._h5_table[self._index, rowindex, :])))
|
|
90
|
+
else: return None
|
|
91
|
+
|
|
92
|
+
def __add__(self, other):
|
|
93
|
+
raise NotImplementedError
|
|
94
|
+
"""Adds two listing tables together."""
|
|
95
|
+
if self.column_name == other.column_name and self.row_name == other.row_name:
|
|
96
|
+
from copy import copy
|
|
97
|
+
result = listingtable(copy(self.column_name), copy(self.row_name), num_keys = self.num_keys,
|
|
98
|
+
allow_reverse_keys = self.allow_reverse_keys)
|
|
99
|
+
result._data = self._data + other._data
|
|
100
|
+
return result
|
|
101
|
+
else: raise Exception("Incompatible tables: can't be added together.")
|
|
102
|
+
|
|
103
|
+
def __sub__(self, other):
|
|
104
|
+
raise NotImplementedError
|
|
105
|
+
"""Subtracts one listing table from another."""
|
|
106
|
+
if self.column_name == other.column_name and self.row_name == other.row_name:
|
|
107
|
+
from copy import copy
|
|
108
|
+
result = listingtable(copy(self.column_name), copy(self.row_name), num_keys = self.num_keys,
|
|
109
|
+
allow_reverse_keys = self.allow_reverse_keys)
|
|
110
|
+
result._data = self._data - other._data
|
|
111
|
+
return result
|
|
112
|
+
else: raise Exception("Incompatible tables: can't be subtracted.")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class t2listingh5(object):
|
|
116
|
+
def __init__(self, filename):
|
|
117
|
+
"""
|
|
118
|
+
"""
|
|
119
|
+
self._table = {}
|
|
120
|
+
self._h5 = h5py.File(filename, 'r')
|
|
121
|
+
self.filename = filename
|
|
122
|
+
self.setup()
|
|
123
|
+
self.simulator = 'AUTOUGH2_H5'
|
|
124
|
+
|
|
125
|
+
def setup(self):
|
|
126
|
+
self.fulltimes = self._h5['fulltimes']['TIME']
|
|
127
|
+
self.num_fulltimes = len(self.fulltimes)
|
|
128
|
+
self._index = 0 # this is the internal one
|
|
129
|
+
### element table
|
|
130
|
+
if 'element' in self._h5:
|
|
131
|
+
cols = [str(x.decode("utf-8")) for x in self._h5['element_fields']]
|
|
132
|
+
blocks = [fix_blockname(str(x.decode("utf-8"))) for x in self._h5['element_names']]
|
|
133
|
+
table = h5table(cols, blocks, self._h5['element'], num_keys=1)
|
|
134
|
+
self._table['element'] = table
|
|
135
|
+
### connection table
|
|
136
|
+
if 'connection' in self._h5:
|
|
137
|
+
cols = [str(x.decode("utf-8") ) for x in self._h5['connection_fields'][:]]
|
|
138
|
+
b1 = [fix_blockname(str(x.decode("utf-8"))) for x in self._h5['connection_names1'][:]]
|
|
139
|
+
b2 = [fix_blockname(str(x.decode("utf-8"))) for x in self._h5['connection_names2'][:]]
|
|
140
|
+
table = h5table(cols, [(b1,b2) for b1, b2 in zip(b1,b2)], self._h5['connection'], num_keys=2,
|
|
141
|
+
allow_reverse_keys=True)
|
|
142
|
+
self._table['connection'] = table
|
|
143
|
+
### generation table
|
|
144
|
+
if 'generation' in self._h5:
|
|
145
|
+
cols = [str(x.decode("utf-8")) for x in self._h5['generation_fields'][:]]
|
|
146
|
+
blocks = [fix_blockname(str(x.decode("utf-8"))) for x in self._h5['generation_eleme'][:]]
|
|
147
|
+
geners = [fix_blockname(str(x.decode("utf-8"))) for x in self._h5['generation_names'][:]]
|
|
148
|
+
table = h5table(cols, [(b,g) for b,g in zip(blocks,geners)], self._h5['generation'], num_keys=1)
|
|
149
|
+
self._table['generation'] = table
|
|
150
|
+
# makes tables in self._table accessible as attributes
|
|
151
|
+
for key,table in self._table.items():
|
|
152
|
+
setattr(self, key, table)
|
|
153
|
+
# have to be get first table ready
|
|
154
|
+
self.index = 0
|
|
155
|
+
|
|
156
|
+
def history(self, selection, short=False, start_datetime=None):
|
|
157
|
+
"""
|
|
158
|
+
short is not used at the moment
|
|
159
|
+
"""
|
|
160
|
+
if isinstance(selection, tuple):
|
|
161
|
+
selection = [selection]
|
|
162
|
+
results = []
|
|
163
|
+
for tbl,b,cname in selection:
|
|
164
|
+
table_name, bi, fieldi = self.selection_index(tbl, b, cname)
|
|
165
|
+
if bi < 0:
|
|
166
|
+
bi = len(self.block_name_index) + bi
|
|
167
|
+
### important to convert cell index
|
|
168
|
+
ys = self._h5[table_name][:,bi,fieldi]
|
|
169
|
+
results.append((self.fulltimes, ys))
|
|
170
|
+
if len(results) == 1: results = results[0]
|
|
171
|
+
return results
|
|
172
|
+
|
|
173
|
+
def selection_index(self, tbl, b, field):
|
|
174
|
+
dname = {
|
|
175
|
+
'e': 'element',
|
|
176
|
+
'c': 'connection',
|
|
177
|
+
'g': 'generation',
|
|
178
|
+
}
|
|
179
|
+
def eleme_index(b):
|
|
180
|
+
if isinstance(b, str):
|
|
181
|
+
bi = self.block_name_index[b]
|
|
182
|
+
elif isinstance(b, unicode):
|
|
183
|
+
bi = self.block_name_index[str(b)]
|
|
184
|
+
elif isinstance(b, int):
|
|
185
|
+
bi = b
|
|
186
|
+
else:
|
|
187
|
+
raise Exception('.history() block must be an int or str: %s (%s)' % (str(b),str(type(b))))
|
|
188
|
+
return bi
|
|
189
|
+
def conne_index(b):
|
|
190
|
+
if isinstance(b, tuple):
|
|
191
|
+
bi = self.connection_name_index[(str(b[0]), str(b[1]))]
|
|
192
|
+
elif isinstance(b, int):
|
|
193
|
+
bi = b
|
|
194
|
+
else:
|
|
195
|
+
raise Exception('.history() conne must be an int or (str,str): %s (%s)' % (str(b),str(type(b))))
|
|
196
|
+
return bi
|
|
197
|
+
def gener_index(b):
|
|
198
|
+
if isinstance(b, tuple):
|
|
199
|
+
bi = self.generation_name_index[(str(b[0]), str(b[1]))]
|
|
200
|
+
elif isinstance(b, int):
|
|
201
|
+
bi = b
|
|
202
|
+
else:
|
|
203
|
+
raise Exception('.history() gener must be an int or (str,str): %s (%s)' % (str(b),str(type(b))))
|
|
204
|
+
return bi
|
|
205
|
+
iname = {
|
|
206
|
+
'e': eleme_index,
|
|
207
|
+
'c': conne_index,
|
|
208
|
+
'g': gener_index,
|
|
209
|
+
}
|
|
210
|
+
if not hasattr(self, 'field_index'):
|
|
211
|
+
self.field_index = {}
|
|
212
|
+
for n,nn in dname.items():
|
|
213
|
+
for i,ff in enumerate(self._h5[nn + '_fields']):
|
|
214
|
+
self.field_index[(n,ff.decode("utf-8"))] = i
|
|
215
|
+
return dname[tbl], iname[tbl](b), self.field_index[(tbl,field)]
|
|
216
|
+
|
|
217
|
+
@property
|
|
218
|
+
def block_name_index(self):
|
|
219
|
+
if not hasattr(self, '_block_name_index'):
|
|
220
|
+
self._block_name_index = {}
|
|
221
|
+
# self._block_name_index.update({str(e):i for i,e in enumerate(self._h5['element_names'])})
|
|
222
|
+
self._block_name_index.update({fix_blockname(str(e.decode("utf-8"))):i for i,e in enumerate(self._h5['element_names'])})
|
|
223
|
+
return self._block_name_index
|
|
224
|
+
|
|
225
|
+
@property
|
|
226
|
+
def connection_name_index(self):
|
|
227
|
+
if not hasattr(self, '_connection_name_index'):
|
|
228
|
+
a = self._h5['connection_names1']
|
|
229
|
+
b = self._h5['connection_names2']
|
|
230
|
+
self._connection_name_index = {}
|
|
231
|
+
self._connection_name_index.update({(fix_blockname(str(x[0].decode("utf-8"))),fix_blockname(str(x[1].decode("utf-8")))):i for i,x in enumerate(zip(a,b))})
|
|
232
|
+
return self._connection_name_index
|
|
233
|
+
|
|
234
|
+
@property
|
|
235
|
+
def generation_name_index(self):
|
|
236
|
+
if not hasattr(self, '_generation_name_index'):
|
|
237
|
+
a = self._h5['generation_eleme']
|
|
238
|
+
b = self._h5['generation_names']
|
|
239
|
+
self._generation_name_index = {}
|
|
240
|
+
# self._generation_name_index.update({(str(x[0]),str(x[1])):i for i,x in enumerate(zip(a,b))})
|
|
241
|
+
# self._generation_name_index.update({(fix_blockname(str(x[0])),(str(x[1]))):i for i,x in enumerate(zip(a,b))})
|
|
242
|
+
# self._generation_name_index.update({((str(x[0])),fix_blockname(str(x[1]))):i for i,x in enumerate(zip(a,b))})
|
|
243
|
+
self._generation_name_index.update({(fix_blockname(str(x[0].decode("utf-8"))),fix_blockname(str(x[1].decode("utf-8")))):i for i,x in enumerate(zip(a,b))})
|
|
244
|
+
return self._generation_name_index
|
|
245
|
+
|
|
246
|
+
|
|
247
|
+
def read_tables(self):
|
|
248
|
+
""" copy values from h5 into listingtables, with slicing """
|
|
249
|
+
if 'element' in self.table_names:
|
|
250
|
+
self.element._index = self.index
|
|
251
|
+
# for i,cname in enumerate(self.element.column_name):
|
|
252
|
+
# self.element._data[:,i] = self._h5['element'][self._index, :, i]
|
|
253
|
+
if 'connection' in self.table_names:
|
|
254
|
+
self.connection._index = self.index
|
|
255
|
+
# for i,cname in enumerate(self.connection.column_name):
|
|
256
|
+
# self.connection._data[:,i] = self._h5['connection'][self._index, :, i]
|
|
257
|
+
if 'generation' in self.table_names:
|
|
258
|
+
self.generation._index = self.index
|
|
259
|
+
# for i,cname in enumerate(self.generation.column_name):
|
|
260
|
+
# self.generation._data[:,i] = self._h5['generation'][self._index, :, i]
|
|
261
|
+
|
|
262
|
+
def get_index(self): return self._index
|
|
263
|
+
def set_index(self, i):
|
|
264
|
+
self._index = i
|
|
265
|
+
if self._index < 0: self._index += self.num_fulltimes
|
|
266
|
+
self.read_tables()
|
|
267
|
+
index = property(get_index, set_index)
|
|
268
|
+
|
|
269
|
+
def first(self): self.index = 0
|
|
270
|
+
def last(self): self.index = -1
|
|
271
|
+
def next(self):
|
|
272
|
+
"""Find and read next set of results; returns false if at end of listing"""
|
|
273
|
+
more = self.index < self.num_fulltimes - 1
|
|
274
|
+
if more: self.index += 1
|
|
275
|
+
return more
|
|
276
|
+
def prev(self):
|
|
277
|
+
"""Find and read previous set of results; returns false if at start of listing"""
|
|
278
|
+
more = self.index > 0
|
|
279
|
+
if more: self.index -= 1
|
|
280
|
+
return more
|
|
281
|
+
|
|
282
|
+
def get_table_names(self):
|
|
283
|
+
return sorted(self._table.keys())
|
|
284
|
+
table_names = property(get_table_names)
|
|
285
|
+
|
|
286
|
+
def get_time(self): return self.fulltimes[self.index]
|
|
287
|
+
def set_time(self, t):
|
|
288
|
+
if t < self.fulltimes[0]: self.index = 0
|
|
289
|
+
elif t > self.fulltimes[-1]: self.index = -1
|
|
290
|
+
else:
|
|
291
|
+
dt = np.abs(self.fulltimes - t)
|
|
292
|
+
self.index = np.argmin(dt)
|
|
293
|
+
time = property(get_time, set_time)
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
class test_fivespot(unittest.TestCase):
|
|
297
|
+
def setUp(self):
|
|
298
|
+
self.lst_h = t2listingh5('fivespot.h5')
|
|
299
|
+
self.lst_t= t2listing('expected.listing')
|
|
300
|
+
|
|
301
|
+
def test_match_tables(self):
|
|
302
|
+
# check row and column names
|
|
303
|
+
def check_names(tbl):
|
|
304
|
+
tbl_h = getattr(self.lst_h, tbl)
|
|
305
|
+
tbl_t = getattr(self.lst_t, tbl)
|
|
306
|
+
self.assertEqual(tbl_h.row_name, tbl_t.row_name)
|
|
307
|
+
for i,field in enumerate(tbl_h.column_name):
|
|
308
|
+
if tbl_t.column_name[i] in field:
|
|
309
|
+
match = True
|
|
310
|
+
else:
|
|
311
|
+
match = False
|
|
312
|
+
self.assertEqual(match, True, '%s: column name mismatch' % tbl)
|
|
313
|
+
for tbl in ['element', 'connection', 'generation']:
|
|
314
|
+
check_names(tbl)
|
|
315
|
+
# check table values, after change index also
|
|
316
|
+
def check_tables():
|
|
317
|
+
rtol = 1.0e-5 # roughly 4~5 significant digits from text listing file
|
|
318
|
+
for field in ['Temperature', 'Pressure', 'Vapour saturation']:
|
|
319
|
+
np.testing.assert_allclose(self.lst_h.element[field],
|
|
320
|
+
self.lst_t.element[field], rtol=rtol)
|
|
321
|
+
for field in ['Mass flow', 'Enthalpy', 'Heat flow']:
|
|
322
|
+
np.testing.assert_allclose(self.lst_h.connection[field],
|
|
323
|
+
self.lst_t.connection[field], rtol=rtol)
|
|
324
|
+
for field in ['Generation rate', 'Enthalpy']:
|
|
325
|
+
np.testing.assert_allclose(self.lst_h.generation[field],
|
|
326
|
+
self.lst_t.generation[field], rtol=rtol)
|
|
327
|
+
check_tables()
|
|
328
|
+
self.lst_h.last(); self.lst_t.last()
|
|
329
|
+
check_tables()
|
|
330
|
+
self.lst_h.first(); self.lst_t.first()
|
|
331
|
+
check_tables()
|
|
332
|
+
|
|
333
|
+
# check table with element index
|
|
334
|
+
def check_table_by_index(i):
|
|
335
|
+
tbl_h = self.lst_h.element[i]
|
|
336
|
+
tbl_t = self.lst_t.element[i]
|
|
337
|
+
for k in tbl_h.keys():
|
|
338
|
+
if k == 'key':
|
|
339
|
+
self.assertEqual(tbl_h[k], tbl_t[k])
|
|
340
|
+
else:
|
|
341
|
+
np.testing.assert_approx_equal(tbl_h[k], tbl_t[k], significant=5)
|
|
342
|
+
for i in range(len(self.lst_h.element.row_name)):
|
|
343
|
+
check_table_by_index(i)
|
|
344
|
+
|
|
345
|
+
# check table with element name
|
|
346
|
+
def check_table_by_name(b):
|
|
347
|
+
tbl_h = self.lst_h.element[b]
|
|
348
|
+
tbl_t = self.lst_t.element[b]
|
|
349
|
+
for k in tbl_h.keys():
|
|
350
|
+
if k == 'key':
|
|
351
|
+
self.assertEqual(tbl_h[k], tbl_t[k])
|
|
352
|
+
else:
|
|
353
|
+
np.testing.assert_approx_equal(tbl_h[k], tbl_t[k], significant=5)
|
|
354
|
+
for b in self.lst_h.element.row_name:
|
|
355
|
+
check_table_by_index(b)
|
|
356
|
+
|
|
357
|
+
def test_match_history(self):
|
|
358
|
+
self.assertEqual(self.lst_h.num_fulltimes, self.lst_t.num_fulltimes)
|
|
359
|
+
np.testing.assert_allclose(self.lst_h.fulltimes, self.lst_t.fulltimes)
|
|
360
|
+
rtol = 1.0e-5
|
|
361
|
+
# seems allfield names are identical with fivespot's EOS
|
|
362
|
+
for sel in [('e', 'AA106', 'Pressure'),
|
|
363
|
+
('e', 'AA 66', 'Temperature'),
|
|
364
|
+
('c', ('AA 66', 'AA 67'), 'Mass flow'),
|
|
365
|
+
('g', ('AA 11', 'PRO 1'), 'Generation rate'),
|
|
366
|
+
('g', ('AA 11', 'PRO 1'), 'Enthalpy'),
|
|
367
|
+
]:
|
|
368
|
+
xs_h, ys_h = self.lst_h.history(sel)
|
|
369
|
+
xs_t, ys_t = self.lst_t.history(sel)
|
|
370
|
+
np.testing.assert_allclose(xs_h, xs_t, rtol=rtol)
|
|
371
|
+
np.testing.assert_allclose(ys_h, ys_t, rtol=rtol)
|
|
372
|
+
|
|
373
|
+
|
|
374
|
+
if __name__ == '__main__':
|
|
375
|
+
unittest.main(verbosity=2)
|
gimu/waiwera_copy.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
|
|
2
|
+
import json
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
def main():
|
|
7
|
+
"""
|
|
8
|
+
Duplicates a JSON file, adding a postfix to the filename and updating
|
|
9
|
+
an internal value.
|
|
10
|
+
"""
|
|
11
|
+
if len(sys.argv) != 3:
|
|
12
|
+
print("Usage: make_waiwera_copy <input.json> <postfix>")
|
|
13
|
+
sys.exit(1)
|
|
14
|
+
|
|
15
|
+
input_path = Path(sys.argv[1])
|
|
16
|
+
postfix = sys.argv[2]
|
|
17
|
+
|
|
18
|
+
if not input_path.exists() or input_path.suffix != '.json':
|
|
19
|
+
print(f"Error: Input file '{input_path}' does not exist or is not a .json file.")
|
|
20
|
+
sys.exit(1)
|
|
21
|
+
|
|
22
|
+
# Construct output path
|
|
23
|
+
output_path = input_path.with_name(f"{input_path.stem}{postfix}{input_path.suffix}")
|
|
24
|
+
|
|
25
|
+
# Read and modify JSON content
|
|
26
|
+
with open(input_path, 'r') as f:
|
|
27
|
+
data = json.load(f)
|
|
28
|
+
|
|
29
|
+
if "output" in data and "filename" in data["output"]:
|
|
30
|
+
h5_path = Path(data["output"]["filename"])
|
|
31
|
+
new_h5_filename = f"{h5_path.stem}{postfix}{h5_path.suffix}"
|
|
32
|
+
data["output"]["filename"] = new_h5_filename
|
|
33
|
+
else:
|
|
34
|
+
print("Warning: ['output']['filename'] key not found in the JSON file. No change made to content.")
|
|
35
|
+
|
|
36
|
+
# Write new JSON file
|
|
37
|
+
with open(output_path, 'w') as f:
|
|
38
|
+
json.dump(data, f, indent=2)
|
|
39
|
+
|
|
40
|
+
print(f"Successfully created '{output_path}'")
|
|
41
|
+
|
|
42
|
+
if __name__ == "__main__":
|
|
43
|
+
main()
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: gimu
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A toolkit and python library for modelling at Geothermal Institute, University of Auckland.
|
|
5
|
+
Project-URL: Documentation, https://github.com/cyeh015/gimu/blob/main/README.md
|
|
6
|
+
Project-URL: Issues, https://github.com/cyeh015/gimu/issues
|
|
7
|
+
Project-URL: Source, https://github.com/cyeh015/gimu
|
|
8
|
+
Author-email: Angus Yeh <a.yeh@auckland.ac.nz>
|
|
9
|
+
License-Expression: MIT
|
|
10
|
+
License-File: LICENSE.txt
|
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
|
12
|
+
Classifier: Programming Language :: Python
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
17
|
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
18
|
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
19
|
+
Requires-Python: >=3.8
|
|
20
|
+
Requires-Dist: h5py
|
|
21
|
+
Requires-Dist: matplotlib
|
|
22
|
+
Requires-Dist: numpy
|
|
23
|
+
Requires-Dist: pint
|
|
24
|
+
Requires-Dist: pytough
|
|
25
|
+
Requires-Dist: scipy
|
|
26
|
+
Description-Content-Type: text/markdown
|
|
27
|
+
|
|
28
|
+
# gimu
|
|
29
|
+
|
|
30
|
+
A minimal toolkit and python library for modelling at Geothermal Institute, University of Auckland.
|
|
31
|
+
|
|
32
|
+
-----
|
|
33
|
+
|
|
34
|
+
## Table of Contents
|
|
35
|
+
|
|
36
|
+
- [Installation](#installation)
|
|
37
|
+
- [Commands](#Commands)
|
|
38
|
+
- [Related Packages](#related-packages)
|
|
39
|
+
- [License](#license)
|
|
40
|
+
- [Developer](#Developer)
|
|
41
|
+
|
|
42
|
+
## Installation
|
|
43
|
+
|
|
44
|
+
```console
|
|
45
|
+
pip install -U gimu
|
|
46
|
+
```
|
|
47
|
+
|
|
48
|
+
If you use conda, use the supplied `environment.yml` to create `py311-gimu`. This installs packages using conda as much as possible before installing packages from PyPI.
|
|
49
|
+
|
|
50
|
+
```console
|
|
51
|
+
conda env create -f environment.yml
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Commands
|
|
55
|
+
|
|
56
|
+
### Convert SAVE file to INCON file
|
|
57
|
+
|
|
58
|
+
```console
|
|
59
|
+
save2incon a.save b.incon
|
|
60
|
+
```
|
|
61
|
+
|
|
62
|
+
NOTE this command is used during the scenario run.
|
|
63
|
+
|
|
64
|
+
## License
|
|
65
|
+
|
|
66
|
+
`gimu` is distributed under the terms of the [MIT](https://spdx.org/licenses/MIT.html) license.
|
|
67
|
+
|
|
68
|
+
## Developer
|
|
69
|
+
|
|
70
|
+
### Build and Publish
|
|
71
|
+
|
|
72
|
+
To bump version, create a tag, eg. `v0.1.0`
|
|
73
|
+
|
|
74
|
+
PyPI token is expected in ~/.pypirc
|
|
75
|
+
|
|
76
|
+
Publish to PyPI:
|
|
77
|
+
|
|
78
|
+
```console
|
|
79
|
+
hatch build
|
|
80
|
+
hatch publish
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
### TODO
|
|
84
|
+
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
gimu/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
+
gimu/config.py,sha256=W3dNfdJzsszPySx7T4ereIOR76mPQuBWq5eNp79WEQk,3089
|
|
3
|
+
gimu/easy_date.py,sha256=Aq7RW94cdvEcrJ4sXq7aG91KMqZyNFCQywE6scyiQzk,7162
|
|
4
|
+
gimu/geo_common.py,sha256=7U5hmT4cCuFBAx5cm_pxNu2owz4wNrQGW0zpINHG7fU,8678
|
|
5
|
+
gimu/project_cli.py,sha256=GBi6SHuRzcsuTgAfZ58oOP8RAK1jw7KN7KdypyAGqfU,1541
|
|
6
|
+
gimu/save2incon.py,sha256=mAVnTqkFSDXSc5Nt0U6M6_5zFSVyX21CuchXxsUsv3k,630
|
|
7
|
+
gimu/t2listingh5.py,sha256=sepCah3lV6etWr-aJ_Tc9d3Rubio7DitEn0SQP2oJ2E,16628
|
|
8
|
+
gimu/waiwera_copy.py,sha256=G84UQWbVq--cfkETCs_jpcquTL-x5SftAFNNNs3GfJs,1286
|
|
9
|
+
gimu-0.1.0.dist-info/METADATA,sha256=2Yrir90xi_3GIAsEjLJGp0HiWf-jK8BUtwwQ5ZZ0XFc,2065
|
|
10
|
+
gimu-0.1.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
11
|
+
gimu-0.1.0.dist-info/entry_points.txt,sha256=FbtLXv_XXn6Wh2DXRsqJpTp85lxEvJiE36TTUTa0e1o,129
|
|
12
|
+
gimu-0.1.0.dist-info/licenses/LICENSE.txt,sha256=pGRJUqv1E2xL4lfPHMkmmqp868-cgtiVfDzTWDtcUiM,1121
|
|
13
|
+
gimu-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025-present Angus Yeh, University of Auckland <a.yeh@auckland.ac.nz>
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
6
|
+
|
|
7
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
8
|
+
|
|
9
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|