goPEST 0.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
gopest/obs_def.py ADDED
@@ -0,0 +1,2086 @@
1
+ """Definition file for observation data types"""
2
+
3
+ # each obsType should have TWO routines defined: _fielddata and _modelresult apended
4
+ # _fielddata(geo,dat,userEntry,obsDefault)
5
+ # _modelresult(geo,dat,lst,userEntry,obsDefault)
6
+
7
+ # userEntry is where input file (a list of observation specificaitions)
8
+ # each has obsType,obsInfo,fieldDataFile,customFilter,obsDefault
9
+ #
10
+ # obsInfo and fieldDataFile unique for each obs entry
11
+ # customFilter and obsDefault can be set or defaulted, mostly passed down
12
+ # between onb entries unless modified.
13
+ #
14
+ # obsInfo is a list of items, specified as first line of each obs entry
15
+ # it can be anything, eg. [string of name, number of time]
16
+ # fieldDataFile is a single string of ilename that the fielddata obs will be read from
17
+ #
18
+ # customFilter is a string that can be evaluated by the code to eg. limit range of
19
+ # data etc
20
+ # obsDefault is an PestObservData object, the .OBSNME is the basename that should be
21
+ # used for all created obs, other properties used as default.
22
+ #
23
+ # Each Observation is specified like this:
24
+ # [Obs]
25
+ # 1, 2, 3, 4, 5
26
+ # a_file_name.dat
27
+ #
28
+ # [Obs]
29
+ # 'abc'
30
+ # a_file_name.dat
31
+ # another_file.dat
32
+ #
33
+ # the first line (obsInfo) is always parsed by python, either as a list or a single value (of any type)
34
+ # the second and following line (fieldDataFile) are processed one by one, and will create different user entries.
35
+
36
+
37
+
38
+ # AY, May 2015
39
+ #
40
+ # I have done something major to allow the more flexible ways of specifying each observation.
41
+
42
+ from gopest.common import config as cfg
43
+
44
+ if 'waiwera' in cfg['simulator']['executable']:
45
+ sim = 'waiwera'
46
+ else:
47
+ sim = 'aut2'
48
+ FIELD = {
49
+ 'aut2': {
50
+ 'temp': 'Temperature',
51
+ 'pres': 'Pressure',
52
+ 'pco2': 'CO2 partial pressure',
53
+ 'rate': 'Generation rate',
54
+ 'enth': 'Enthalpy',
55
+ },
56
+ 'waiwera': {
57
+ 'temp': 'fluid_temperature',
58
+ 'pres': 'fluid_pressure',
59
+ 'pco2': 'fluid_CO2_partial_pressure',
60
+ 'rate': 'source_rate',
61
+ 'enth': 'source_enthalpy',
62
+ },
63
+ }[sim]
64
+
65
+
66
+ PLOT_RAW_FIELD_DATA = True
67
+
68
+ shortNames = {
69
+ 'Ex' : 'external', # get obs value(s) from external source, thru json file
70
+ 'En' : 'enthalpy',
71
+ 'Ej' : 'enthalpy_json',
72
+ 'Eb' : 'boiling',
73
+ 'Bj' : 'boiling_json',
74
+ 'Pr' : 'pressure',
75
+ 'Pw' : 'pressure_by_well',
76
+ 'Pb' : 'pressure_block_average',
77
+ 'Pj' : 'pressure_block_average_json',
78
+ 'Tw' : 'temperature',
79
+ 'Th' : 'temperature_thickness',
80
+ 'Tj' : 'temperature_thickness_json',
81
+ 'Ti' : 'temp_interp_thickness_json',
82
+ 'Tb' : 'blocktemperature',
83
+ 'Hf' : 'heatflow',
84
+ 'Hm' : 'heatflowminimum',
85
+ 'Uf' : 'totalupflow',
86
+ 'Ht' : 'totalheat',
87
+ 'Se' : 'target_time',
88
+ }
89
+
90
+ # this for unique obs name
91
+ obsBaseNameCount = {}
92
+
93
+ def unique_obs_name(type_name, base):
94
+ """ Form a unique observation name. It will usually endup like
95
+ XX_YYYYY_0012. XX is shortNames[type_name], YYYYY is based on base and
96
+ trimmed to 5 chars long.
97
+
98
+ PEST max length of obs name is 20 chars. If type_name is not one of the
99
+ keys in shorNames, it will be used directly after trim to length of two.
100
+ TODO: it's a bit ugly here, work on it!
101
+ """
102
+ base1 = type_name
103
+ for s,t in shortNames.items():
104
+ if t == type_name:
105
+ base1 = s
106
+ if base1 == type_name:
107
+ base1 = type_name[:2]
108
+
109
+ from gopest.common import private_cleanup_name
110
+ base2 = private_cleanup_name(base)[:5]
111
+
112
+ baseName = base1 +'_'+ base2
113
+ if baseName not in obsBaseNameCount:
114
+ obsBaseNameCount[baseName] = 0
115
+ obsBaseNameCount[baseName] += 1
116
+ if obsBaseNameCount[baseName] > 9999:
117
+ raise Exception("Time to improve unique_obs_name()!")
118
+ return baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
119
+
120
+ def _matchInputGeners(dat, reg_exp_list, gener_types):
121
+ """ return a list of (name matched) actual generators (objects)
122
+
123
+ Matches is done using a list of re objects (reg_exp_list). And the gener
124
+ must have type in the specified tpye list. Fixed/unfixed names will be
125
+ dealt with properly.
126
+ """
127
+ gs = []
128
+ from mulgrids import unfix_blockname
129
+ for g in dat.generatorlist:
130
+ for r in reg_exp_list:
131
+ if r.match(g.name) or r.match(unfix_blockname(g.name)):
132
+ if g.type in gener_types:
133
+ gs.append(g)
134
+ # already know this GENER is included, check next gener
135
+ return gs
136
+
137
+ def external_fielddata(geo, dat, userEntry):
138
+ """ This observation type is designed for observation value(s) that is not
139
+ extractable from the .listing file that goPESTobs reads. Hence user is
140
+ responsible for getting the values externally, and then stores it in a json
141
+ file for goPESTobs to read back in.
142
+
143
+ [Obs]
144
+ filename.json # first line of entry is the json file storing the value(s)
145
+ 'data_key' # the key name used to acces the json file
146
+ 140.0 # obs value (a single float?)
147
+
148
+ [Obs]
149
+ abc.json # first line of entry is the json file storing the value(s)
150
+ 'data_name' # the key name used to acces the json file
151
+ 5.0 # a list of values, len() must match from json
152
+ 7.0
153
+ 9.0
154
+
155
+ [Obs]
156
+ abc.json # first line of entry is the json file storing the value(s)
157
+ 'data_name' # the key name used to acces the json file
158
+ 1., 5. # lines of tuple of two values
159
+ 2., 7.
160
+ 3., 9.
161
+ # data from json file will be interp() to match these
162
+
163
+ The motivation for this is the predictive run for Bacman case. Where
164
+ results from a second .listing file is required. Instead of making
165
+ goPESTobs reads multiple listing file, I decided to make it an external
166
+ extraction. Usually this kind of obs needs very specialised calculation and
167
+ may not be all that re-usable.
168
+
169
+ WIP, the need for reading multiple listing files is still needed, as
170
+ projects like Lihir needs a whole series of runs, and all results from all
171
+ runs may be important. I will do this when I am doing big surgery in goPEST
172
+ code next time. Further investigation for best design is required.
173
+ """
174
+ import numpy as np
175
+ from copy import deepcopy
176
+
177
+ obsInfo = userEntry.obsInfo
178
+ customFilter = userEntry.customFilter
179
+ obsDefault = userEntry.obsDefault
180
+ key = eval(obsInfo[1]) # usually use string as data key
181
+
182
+ obses = []
183
+ if len(obsInfo) == 3:
184
+ expected = eval(obsInfo[2])
185
+ if not isinstance(expected, float) and isinstance(expected, int):
186
+ raise Exception
187
+ ### single values
188
+ expected = float(expected)
189
+ obs = deepcopy(obsDefault)
190
+ obs.OBSNME = unique_obs_name(obs.OBSNME, key)
191
+ obs.OBSVAL = float(expected)
192
+ obses.append(obs)
193
+ elif len(obsInfo) > 3:
194
+ expected = [eval(line) for line in obsInfo[2:]]
195
+ if isinstance(expected[0], tuple):
196
+ # list of tuples
197
+ for x,y in expected:
198
+ obs = deepcopy(obsDefault)
199
+ obs.OBSNME = unique_obs_name(obs.OBSNME, key)
200
+ obs.OBSVAL = y
201
+ obses.append(obs)
202
+ elif isinstance(expected[0], float) or isinstance(expected[0], int):
203
+ # list of values
204
+ for y in expected:
205
+ obs = deepcopy(obsDefault)
206
+ obs.OBSNME = unique_obs_name(obs.OBSNME, key)
207
+ obs.OBSVAL = y
208
+ obses.append(obs)
209
+ else:
210
+ raise Exception
211
+ return obses
212
+
213
+ def external_modelresult(geo, dat, lst, userEntry):
214
+ import numpy as np
215
+ import json
216
+ obsInfo = userEntry.obsInfo
217
+ customFilter = userEntry.customFilter
218
+ obsDefault = userEntry.obsDefault
219
+
220
+ with open(obsInfo[0], 'r') as f:
221
+ data = json.load(f)
222
+ key = eval(obsInfo[1]) # usually use string as data key
223
+
224
+ values = []
225
+ if len(obsInfo) == 3:
226
+ ### single values
227
+ if not isinstance(data[key], float) and not isinstance(data[key], int):
228
+ raise Exception
229
+ values.append(float(data[key]))
230
+ elif len(obsInfo) > 3:
231
+ expected = [eval(line) for line in obsInfo[2:]]
232
+ if isinstance(expected[0], tuple):
233
+ ### x-y values
234
+ if len(data[key][0]) != len(data[key][1]):
235
+ msg = '%s [%s] does not not have matching xs,ys: ' % (obsInfo[0], key)
236
+ msg += '%i != %i' % (len(data[key][0]), len(data[key][1]))
237
+ raise Exception(msg)
238
+ exs = [float(e[0]) for e in expected]
239
+ vs = np.interp(exs, data[key][0], data[key][1])
240
+ values += list(vs)
241
+ elif isinstance(expected[0], float) or isinstance(expected[0], int):
242
+ ### list of values
243
+ if not isinstance(data[key], list) or len(data[key]) != len(expected):
244
+ msg = '%s [%s] does not match dimension of expected: ' % (obsInfo[0], key)
245
+ msg += str(expected)
246
+ raise Exception(msg)
247
+ values += [float(x) for x in data[key]]
248
+ else:
249
+ raise Exception
250
+ return values
251
+
252
+ def target_time_fielddata(geo, dat, userEntry):
253
+ obsInfo = userEntry.obsInfo
254
+ customFilter = userEntry.customFilter
255
+ obsDefault = userEntry.obsDefault
256
+
257
+ expected_value = float(eval(obsInfo[0]))
258
+
259
+ obses = []
260
+ from copy import deepcopy
261
+ obs = deepcopy(obsDefault)
262
+ obs.OBSNME = unique_obs_name(obs.OBSNME, 'Se')
263
+ obs.OBSVAL = float(expected_value)
264
+ obses.append(obs)
265
+ return obses
266
+
267
+ def target_time_modelresult(geo, dat, lst, userEntry):
268
+ return [lst.fulltimes[-1]]
269
+
270
+ def totalheat_fielddata(geo,dat,userEntry):
271
+ """ Extracting the total heat from specified GENERs (in dat/input file) as
272
+ observation.
273
+
274
+ [ObservationType]
275
+ totalheat
276
+
277
+ [Defaults]
278
+ OBSNME = 'Heat'
279
+ OBGNME = 'HtTotal'
280
+
281
+ # !!! Note here uses all upper cases, don't touch other ObservationTypes
282
+ [Obs]
283
+ 2000.00
284
+ 'abc99'
285
+ 'bbb99'
286
+ 'ccc99'
287
+
288
+ # here it means an observation that adds up all heat in HEAT geners 'abc99',
289
+ # 'bbb99', 'ccc99' (from data/input file) and the target of this sum is
290
+ # 2000.0 J/s
291
+ """
292
+ obsInfo = userEntry.obsInfo
293
+ customFilter = userEntry.customFilter
294
+ obsDefault = userEntry.obsDefault
295
+
296
+ expected_value = float(eval(obsInfo[0]))
297
+
298
+ # check if entry matches anything
299
+ import re
300
+ res = [re.compile(eval(line)) for line in userEntry.obsInfo[1:]]
301
+ gs = _matchInputGeners(dat, res, ['HEAT'])
302
+ if len(gs) == 0:
303
+ name = "'%s'" % ("','".join([r.pattern for r in res]))
304
+ raise Exception("Specified gener names does not match any geners: %s" % name)
305
+
306
+ if len(obsInfo) > 2:
307
+ ap = '_'
308
+ else:
309
+ ap = ''
310
+
311
+ obses = []
312
+ from copy import deepcopy
313
+ obs = deepcopy(obsDefault)
314
+ obs.OBSNME = unique_obs_name(obs.OBSNME, eval(obsInfo[1]) + ap)
315
+ obs.OBSVAL = float(expected_value)
316
+ obses.append(obs)
317
+ return obses
318
+
319
+ def totalheat_modelresult(geo,dat,lst,userEntry):
320
+ #go through all mass geners and extract their rate, sum this and return the total value..
321
+ import re
322
+ res = [re.compile(eval(line)) for line in userEntry.obsInfo[1:]]
323
+ gs = _matchInputGeners(dat, res, ['HEAT'])
324
+ total = float(sum([g.gx for g in gs]))
325
+ return [total]
326
+
327
+ def totalupflow_fielddata(geo,dat,userEntry):
328
+ obsInfo = userEntry.obsInfo
329
+ customFilter = userEntry.customFilter
330
+ obsDefault = userEntry.obsDefault
331
+
332
+ expected_value = float(eval(obsInfo[0]))
333
+
334
+ if len(obsInfo) > 2:
335
+ ap = '_'
336
+ else:
337
+ ap = ''
338
+
339
+ obses = []
340
+ from copy import deepcopy
341
+ obs = deepcopy(obsDefault)
342
+ obs.OBSNME = unique_obs_name(obs.OBSNME, eval(obsInfo[1]) + ap)
343
+ obs.OBSVAL = float(expected_value)
344
+ obses.append(obs)
345
+ return obses
346
+
347
+ def totalupflow_modelresult(geo,dat,lst,userEntry):
348
+ #go through all mass geners and extract their rate, sum this and return the total value..
349
+ import re
350
+ from mulgrids import unfix_blockname,fix_blockname
351
+ res = []
352
+ for line in userEntry.obsInfo[1:]:
353
+ name = eval(line)
354
+ res.append(re.compile(name))
355
+ # these matchese should use the unfixed blockname rules
356
+ total = 0.0
357
+ for g in dat.generatorlist:
358
+ for r in res:
359
+ if r.match(g.name) or r.match(unfix_blockname(g.name)):
360
+ if g.type in ('MASS','COM1'):
361
+ total += g.gx
362
+ # already know this GENER is included, check next gener
363
+ break
364
+ return [total]
365
+
366
+
367
+ def heatflowminimum_fielddata(geo,dat,userEntry):
368
+ expected_value = float(eval(userEntry.obsInfo[1]))
369
+ customFilter = userEntry.customFilter
370
+ obsDefault = userEntry.obsDefault
371
+ zoneName = eval(userEntry.obsInfo[0])
372
+
373
+ from gopest.common import private_cleanup_name
374
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(zoneName)[:5]
375
+ if baseName not in obsBaseNameCount:
376
+ obsBaseNameCount[baseName] = 0
377
+ obses = []
378
+ obsBaseNameCount[baseName] += 1
379
+ from copy import deepcopy
380
+ obs = deepcopy(obsDefault)
381
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
382
+ obs.OBSVAL = expected_value
383
+ obses.append(obs)
384
+ return obses
385
+
386
+ def heatflowminimum_modelresult(geo,dat,lst,userEntry):
387
+ expected_value = float(eval(userEntry.obsInfo[1]))
388
+ customFilter = userEntry.customFilter
389
+ obsDefault = userEntry.obsDefault
390
+ zoneName = eval(userEntry.obsInfo[0])
391
+
392
+ minimum = expected_value
393
+
394
+ import config
395
+ cfg = config.config('get_surface_heatflowminimum.cfg')
396
+
397
+ from get_surface_heatflow import get_surface_heatflow_proc_cfg
398
+ (geners,colsinzones,ListingTableNames,syear,coldenthalpy,
399
+ show_fig,save_fig,outflow_only,calc_notinany
400
+ ) = get_surface_heatflow_proc_cfg(cfg,geo,lst)
401
+
402
+ if zoneName not in colsinzones: raise Exception("'%s' not in colsinzones" % zoneName + str(sorted(colsinzones.keys())))
403
+ # so to skip other zones, still ineffecient, but that's it for now
404
+ for z in colsinzones.keys():
405
+ if z != zoneName: del colsinzones[z]
406
+
407
+ from get_surface_heatflow import get_surface_heatflow
408
+ (t_in_sec, zone_total, zone_area) = get_surface_heatflow(geo,lst,
409
+ geners,colsinzones,ListingTableNames,syear,coldenthalpy,
410
+ False,False,outflow_only,calc_notinany)
411
+
412
+ # Only NS total heatflow (result index [-1]). If model result is greater or
413
+ # equal to the expected_value, than return expected_value, so it fits
414
+ # perfectly as long as model result is > expected_value.
415
+ if list(zone_total[zoneName])[-1] >= minimum:
416
+ return [minimum]
417
+ else:
418
+ return [list(zone_total[zoneName])[-1]]
419
+
420
+
421
+
422
+ def heatflow_fielddata(geo,dat,userEntry):
423
+ expected_value = float(eval(userEntry.obsInfo[1]))
424
+ customFilter = userEntry.customFilter
425
+ obsDefault = userEntry.obsDefault
426
+ zoneName = eval(userEntry.obsInfo[0])
427
+
428
+ from gopest.common import private_cleanup_name
429
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(zoneName)[:5]
430
+ if baseName not in obsBaseNameCount:
431
+ obsBaseNameCount[baseName] = 0
432
+ obses = []
433
+ obsBaseNameCount[baseName] += 1
434
+ from copy import deepcopy
435
+ obs = deepcopy(obsDefault)
436
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
437
+ obs.OBSVAL = expected_value
438
+ obses.append(obs)
439
+ return obses
440
+
441
+ def heatflow_modelresult(geo,dat,lst,userEntry):
442
+ expected_value = userEntry.obsInfo[1]
443
+ customFilter = userEntry.customFilter
444
+ obsDefault = userEntry.obsDefault
445
+ zoneName = eval(userEntry.obsInfo[0])
446
+
447
+ import config
448
+ cfg = config.config('get_surface_heatflow.cfg')
449
+
450
+ from get_surface_heatflow import get_surface_heatflow_proc_cfg
451
+ (geners,colsinzones,ListingTableNames,syear,coldenthalpy,
452
+ show_fig,save_fig,outflow_only,calc_notinany
453
+ ) = get_surface_heatflow_proc_cfg(cfg,geo,lst)
454
+
455
+ if zoneName not in colsinzones: raise Exception
456
+ # so to skip other zones, still ineffecient, but that's it for now
457
+ for z in colsinzones.keys():
458
+ if z != zoneName: del colsinzones[z]
459
+
460
+ from get_surface_heatflow import get_surface_heatflow
461
+ (t_in_sec, zone_total, zone_area) = get_surface_heatflow(geo,lst,
462
+ geners,colsinzones,ListingTableNames,syear,coldenthalpy,
463
+ False,False,outflow_only,calc_notinany)
464
+
465
+ # just get NS total heatflow for now
466
+ return [list(zone_total[zoneName])[-1]]
467
+
468
+
469
+ def private_well_track_blocks(geo,wname):
470
+ """ generate well track blocks if not already cached """
471
+ if 'wellblocks' not in geo.__dict__:
472
+ geo.wellblocks = {}
473
+ if wname in geo.wellblocks:
474
+ return geo.wellblocks[wname]
475
+ else:
476
+ # spped up, hopefully
477
+ if 'qtree' not in geo.__dict__:
478
+ geo.qtree = geo.column_quadtree()
479
+
480
+ # work out all blocks
481
+ w = geo.well[wname]
482
+ blocks, blocks_cen = [], []
483
+ for lc in [lay.centre for lay in geo.layerlist if w.bottom[2] <= lay.centre <= w.head[2]]:
484
+ b = geo.block_name_containing_point(w.elevation_pos(lc),geo.qtree)
485
+ if b is not None:
486
+ blocks.append(b)
487
+ blocks_cen.append(lc)
488
+
489
+ # check if well head block is missed
490
+ wh_col = geo.column_containing_point(w.head[:2],
491
+ guess=geo.column[geo.column_name(blocks[0])],qtree=geo.qtree)
492
+ for lay in geo.layerlist:
493
+ if lay.bottom < wh_col.surface:
494
+ wh_block_elev = (wh_col.surface + lay.bottom ) / 2.0
495
+ wh_block = geo.block_name(lay.name,wh_col.name)
496
+ break
497
+ if wh_block not in blocks:
498
+ blocks.insert(0, wh_block)
499
+ blocks_cen.insert(0, wh_block_elev)
500
+ geo.wellblocks[wname] = (blocks, blocks_cen)
501
+ return geo.wellblocks[wname]
502
+
503
+ def _loadBlockTempFile(fname, customFilter):
504
+ """ get all block names and temp out of field data file """
505
+ from mulgrids import fix_blockname
506
+ allblks, alltemp = [], []
507
+ f = open(fname,'r')
508
+ for line in f.readlines():
509
+ if line.strip() == '': break
510
+ block,temp = [eval(x) for x in line.split(',')[0:2]]
511
+ if eval(customFilter):
512
+ allblks.append(fix_blockname(block))
513
+ alltemp.append(float(temp))
514
+ f.close()
515
+ return allblks, alltemp
516
+
517
+ def blocktemperature_fielddata(geo,dat,userEntry):
518
+ """ a user field data file is a list of blocks with oberved temperature """
519
+ fieldDataFile = userEntry.obsInfo[1]
520
+ customFilter = userEntry.customFilter
521
+ obsDefault = userEntry.obsDefault
522
+
523
+ allblks, alltemp = _loadBlockTempFile(fieldDataFile, customFilter)
524
+
525
+ from mulgrids import fix_blockname
526
+ from copy import deepcopy
527
+ obses = []
528
+ for (b,t) in zip(allblks,alltemp):
529
+ obs = deepcopy(obsDefault)
530
+ obs.OBSNME = unique_obs_name(obsDefault.OBSNME, fix_blockname(b))
531
+ obs.OBSVAL = t
532
+ obses.append(obs)
533
+ return obses
534
+
535
+ def blocktemperature_modelresult(geo,dat,lst,userEntry):
536
+ """ a user field data file is a list of blocks with oberved temperature """
537
+ fieldDataFile = userEntry.obsInfo[1]
538
+ customFilter = userEntry.customFilter
539
+
540
+ allblks, alltemp = _loadBlockTempFile(fieldDataFile, customFilter)
541
+
542
+ vals = eval(userEntry.obsInfo[0])
543
+ time = 0.0
544
+ if isinstance(vals,tuple) and len(vals) == 2:
545
+ time = float(vals[1])
546
+
547
+ import numpy as np
548
+ lst.index = np.abs(lst.fulltimes-time).argmin()
549
+
550
+ field_name = [c for c in lst.element.column_name if c.startswith(FIELD['temp'])][0]
551
+
552
+ return [lst.element[b][field_name] for b in allblks]
553
+
554
+ def temperature_fielddata(geo,dat,userEntry):
555
+ # ugly, need re-writting and remove repeative actions
556
+ fieldDataFile = userEntry.obsInfo[1]
557
+ customFilter = userEntry.customFilter
558
+ obsDefault = userEntry.obsDefault
559
+ from mulgrids import fix_blockname
560
+ vals = eval(userEntry.obsInfo[0])
561
+ if isinstance(vals,str):
562
+ # only wellname is specified
563
+ wname = fix_blockname(vals)
564
+ elif isinstance(vals,tuple):
565
+ wname = fix_blockname(vals[0])
566
+
567
+ # get temp vs elev from datafile first
568
+ allelev, alltemp = [], []
569
+ f = open(fieldDataFile,'r')
570
+ for line in f.readlines():
571
+ if line.strip() == '': break
572
+ elev,temp = [float(x) for x in line.split()[0:2]]
573
+ if eval(customFilter):
574
+ allelev.append(elev)
575
+ alltemp.append(temp)
576
+ f.close()
577
+
578
+ (bs, bs_c) = private_well_track_blocks(geo,wname)
579
+ blocks, blocks_cen = [], []
580
+ for (b,c) in zip(bs, bs_c):
581
+ if allelev[-1] <= c <= allelev[0]:
582
+ blocks.append(b)
583
+ blocks_cen.append(c)
584
+
585
+ # blocks and blocks_cen ready for use
586
+ from numpy import interp
587
+ blocks_temp = interp(blocks_cen,allelev[::-1],alltemp[::-1])
588
+
589
+ from gopest.common import private_cleanup_name
590
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(wname)[:5]
591
+ if baseName not in obsBaseNameCount:
592
+ obsBaseNameCount[baseName] = 0
593
+ obses = []
594
+ for (b,t) in zip(blocks,blocks_temp):
595
+ obsBaseNameCount[baseName] += 1
596
+ from copy import deepcopy
597
+ obs = deepcopy(obsDefault)
598
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
599
+ obs.OBSVAL = t
600
+ obses.append(obs)
601
+ return obses
602
+
603
+ def temperature_modelresult(geo,dat,lst,userEntry):
604
+ # ugly, need re-writting and remove repeative actions
605
+ # maybe communication between pre- and post - process is still needed
606
+ fieldDataFile = userEntry.obsInfo[1]
607
+ customFilter = userEntry.customFilter
608
+ obsDefault = userEntry.obsDefault
609
+ from mulgrids import fix_blockname
610
+ vals = eval(userEntry.obsInfo[0])
611
+ time = 0.0
612
+ if isinstance(vals,str):
613
+ # only wellname is specified
614
+ wname = fix_blockname(vals)
615
+ elif isinstance(vals,tuple):
616
+ wname = fix_blockname(vals[0])
617
+ if len(vals) == 2:
618
+ time = float(vals[1])
619
+
620
+ # get temp vs elev from datafile first
621
+ allelev, alltemp = [], []
622
+ f = open(fieldDataFile,'r')
623
+ for line in f.readlines():
624
+ if line.strip() == '': break
625
+ elev,temp = [float(x) for x in line.split()[0:2]]
626
+ if eval(customFilter):
627
+ allelev.append(elev)
628
+ alltemp.append(temp)
629
+ f.close()
630
+
631
+ (bs, bs_c) = private_well_track_blocks(geo,wname)
632
+ blocks = []
633
+ for (b,c) in zip(bs, bs_c):
634
+ if allelev[-1] <= c <= allelev[0]:
635
+ blocks.append(b)
636
+
637
+ import numpy as np
638
+ lst.index = np.abs(lst.fulltimes-time).argmin()
639
+
640
+ field_name = [c for c in lst.element.column_name if c.startswith(FIELD['temp'])][0]
641
+ return [lst.element[b][field_name] for b in blocks]
642
+
643
+ def temperature_thickness_fielddata(geo,dat,userEntry):
644
+ """ This is very simialr to the norml temperature obs type, only that the
645
+ weight of each observation is assigned as the thickness of that layer. The
646
+ observation actually seen by PEST is saved as .obs files. """
647
+ # ugly, need re-writting and remove repeative actions
648
+ fieldDataFile = userEntry.obsInfo[1]
649
+ customFilter = userEntry.customFilter
650
+ obsDefault = userEntry.obsDefault
651
+ from mulgrids import fix_blockname
652
+ vals = eval(userEntry.obsInfo[0])
653
+ if isinstance(vals,str):
654
+ # only wellname is specified
655
+ wname = fix_blockname(vals)
656
+ elif isinstance(vals,tuple):
657
+ wname = fix_blockname(vals[0])
658
+
659
+ # get temp vs elev from datafile first
660
+ allelev, alltemp = [], []
661
+ f = open(fieldDataFile,'r')
662
+ for line in f.readlines():
663
+ if line.strip() == '': break
664
+ elev,temp = [float(x) for x in line.split()[0:2]]
665
+ if eval(customFilter):
666
+ allelev.append(elev)
667
+ alltemp.append(temp)
668
+ f.close()
669
+
670
+ (bs, bs_c) = private_well_track_blocks(geo,wname)
671
+ blocks, blocks_cen, blocks_thickness = [], [], []
672
+ for (b,c) in zip(bs, bs_c):
673
+ if allelev[-1] <= c <= allelev[0]:
674
+ blocks.append(b)
675
+ blocks_cen.append(c)
676
+ blocks_thickness.append(geo.layer[geo.layer_name(b)].thickness)
677
+
678
+ # blocks and blocks_cen ready for use
679
+ from numpy import interp
680
+ blocks_temp = interp(blocks_cen,allelev[::-1],alltemp[::-1])
681
+
682
+ fo = open(fieldDataFile+'.obs', 'w')
683
+ for (z,t) in zip(blocks_cen,blocks_temp):
684
+ fo.write('%.2f %.2f\n' % (z,t))
685
+ fo.close()
686
+
687
+ from gopest.common import private_cleanup_name
688
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(wname)[:5]
689
+ if baseName not in obsBaseNameCount:
690
+ obsBaseNameCount[baseName] = 0
691
+ obses = []
692
+
693
+
694
+ if obsDefault.OBGNME not in userEntry.coverage:
695
+ userEntry.coverage[obsDefault.OBGNME] = []
696
+ for (b,t,h) in zip(blocks,blocks_temp, blocks_thickness):
697
+ obsBaseNameCount[baseName] += 1
698
+ from copy import deepcopy
699
+ obs = deepcopy(obsDefault)
700
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
701
+ obs.OBSVAL = t
702
+ obs.WEIGHT = h * obsDefault.WEIGHT
703
+ userEntry.coverage[obsDefault.OBGNME].append(b)
704
+ obses.append(obs)
705
+ return obses
706
+
707
+ def temperature_thickness_modelresult(geo,dat,lst,userEntry):
708
+ return temperature_modelresult(geo,dat,lst,userEntry)
709
+
710
+ def _well_interp_layers(wname, geo):
711
+ """
712
+ WARNING: this can be very problematic at the surface layers near wellhead.
713
+ There would be cases where blocks involved is partial layer, then the
714
+ interpolation would wrongly use layer centre as interpolation in 2D.
715
+ """
716
+ well = geo.well[wname]
717
+ # cache qtree/kdtree for faster repeated searching
718
+ if not hasattr(geo, 'kdtree'): geo.kdtree = geo.get_node_kdtree()
719
+ if not hasattr(geo, 'qtree'): geo.qtree = geo.column_quadtree()
720
+
721
+ interp_block_i, well_poses, elevs = [], [], []
722
+ for lay in geo.layerlist[1:]:
723
+ pos = well.elevation_pos(lay.centre, extend=False)
724
+ if pos is None:
725
+ # skip, well pos above or below well track
726
+ continue
727
+
728
+ nearest_node = geo.node_nearest_to(pos[:2], kdtree=geo.kdtree)
729
+ well_col = geo.column_containing_point(pos[:2], qtree=geo.qtree)
730
+ well_bname = geo.block_name_containing_point(pos, qtree=geo.qtree)
731
+
732
+ if well_bname is None:
733
+ # well block not in model, skip layer
734
+ continue
735
+
736
+ blocks_i = [] # block index for slicing listing table
737
+ points = []
738
+ for col in nearest_node.column | well_col.neighbour:
739
+ bname = geo.block_name(lay.name, col.name)
740
+ try:
741
+ blocks_i.append(geo.block_name_index[bname])
742
+ except KeyError:
743
+ # incomplete layer, just use well block
744
+ interp_block_i.append([geo.block_name_index[well_bname]])
745
+ well_poses.append(pos)
746
+ elevs.append(lay.centre)
747
+ # next layer
748
+ continue
749
+
750
+ interp_block_i.append(blocks_i)
751
+ well_poses.append(pos)
752
+ elevs.append(lay.centre)
753
+ return interp_block_i, well_poses, elevs
754
+
755
+ def _well_interp_temp(block_i_by_layer, well_poses, geo, lst):
756
+ """ block_i_by_layer is a list of lists, each a group of block indices to
757
+ interpolate temperature data. well_poses is a list of pos, one for each
758
+ layers same as block_i_by_layer.
759
+ """
760
+ import scipy.interpolate as interpolate
761
+ # will cause error if no field name found
762
+ for field in lst.element.column_name:
763
+ if field.startswith('Temperatu'):
764
+ field_name = field
765
+ break
766
+ temps = []
767
+ for blocks_i, pos in zip(block_i_by_layer, well_poses):
768
+ v = lst.element[field_name][blocks_i]
769
+ cols = [geo.column_name(geo.block_name_list[bi]) for bi in blocks_i]
770
+ points = [geo.column[c].centre for c in cols]
771
+ if len(points) > 1:
772
+ vi = interpolate.griddata(points, v, [pos[:2]], method='linear')
773
+ temps.append(vi[0])
774
+ else:
775
+ temps.append(v[0])
776
+ return temps
777
+
778
+
779
+ def temp_interp_thickness_json_fielddata(geo,dat,userEntry):
780
+ jfilename = userEntry.obsInfo[0]
781
+ customFilter = userEntry.customFilter
782
+ offsetTime = userEntry.offsetTime
783
+ obsDefault = userEntry.obsDefault
784
+ tFactor = 365.25*24.*60.*60.
785
+
786
+ import json
787
+ # 1st line is json file name (of all wells)
788
+ with open(jfilename, 'r') as f:
789
+ t_bywell = json.load(f)
790
+
791
+ obses = []
792
+
793
+ for oline in userEntry.obsInfo[1:]:
794
+ wname = eval(oline)
795
+ if 'geo_well_name' in t_bywell[wname]:
796
+ geo_wname = t_bywell[wname]['geo_well_name']
797
+ else:
798
+ geo_wname = wname
799
+ time = t_bywell[wname]['time']
800
+ allelev, alltemp = [], []
801
+ for elev,temp in zip(t_bywell[wname]['elevations'],
802
+ t_bywell[wname]['temperatures']):
803
+ if eval(customFilter):
804
+ allelev.append(elev)
805
+ alltemp.append(temp)
806
+
807
+ # bis, wps, es = blk indices, well positions, elevations
808
+ bis, wps, es = _well_interp_layers(geo_wname, geo)
809
+ blocks, blocks_cen, blocks_thickness = [], [], []
810
+ bis2, wps2, es2, bhs2 = [], [], [], []
811
+ for bidx, wpos, elev in zip(bis, wps, es):
812
+ if elev > allelev[0] or elev < allelev[-1]:
813
+ # skip if out of field data range
814
+ continue
815
+ if eval(customFilter):
816
+ bis2.append(bidx)
817
+ wps2.append(wpos)
818
+ es2.append(elev)
819
+ # all same layer, just use first one
820
+ lay = geo.layer_name(geo.block_name_list[bidx[0]])
821
+ bhs2.append(geo.layer[lay].thickness)
822
+
823
+ # blocks and blocks_cen ready for use
824
+ from numpy import interp
825
+ blocks_temp = interp(es2, allelev[::-1],alltemp[::-1])
826
+
827
+ from gopest.common import private_cleanup_name
828
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(geo_wname)[:5]
829
+ if baseName not in obsBaseNameCount:
830
+ obsBaseNameCount[baseName] = 0
831
+
832
+ if obsDefault.OBGNME not in userEntry.coverage:
833
+ userEntry.coverage[obsDefault.OBGNME] = []
834
+ userEntry.coverage[obsDefault.OBGNME+'_interp_source'] = []
835
+
836
+
837
+
838
+ for bidx,pos,t,h in zip(bis2, wps2, blocks_temp, bhs2):
839
+ obsBaseNameCount[baseName] += 1
840
+ from copy import deepcopy
841
+ obs = deepcopy(obsDefault)
842
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
843
+ obs.OBSVAL = t
844
+ obs.WEIGHT = h * obsDefault.WEIGHT
845
+ # additional for model result extraction
846
+ obs._bindx_ = bidx
847
+ obs._wpos_ = pos
848
+ obs._dtime_ = time * tFactor - offsetTime # data's time tag
849
+ blks = [geo.block_name_list[i] for i in bidx]
850
+ userEntry.coverage[obsDefault.OBGNME] = userEntry.coverage[obsDefault.OBGNME] + blks
851
+ userEntry.coverage[obsDefault.OBGNME+'_interp_source'].append((
852
+ obs.OBSNME, bidx))
853
+ obses.append(obs)
854
+
855
+ # generate batch plot entries
856
+ plot = temperature_plot(
857
+ geo_wname, time, list(es2), list(blocks_temp),
858
+ baseName, ('Well %s at time %f' % (wname,time)) )
859
+ if PLOT_RAW_FIELD_DATA:
860
+ y, x = t_bywell[wname]['elevations'], t_bywell[wname]['temperatures']
861
+ plot = plot_append_raw_data(plot, 'raw_'+wname, x, y, xunit="degC", yunit="meter")
862
+ userEntry.batch_plot_entry.append(plot)
863
+
864
+ return obses
865
+
866
+ def temp_interp_thickness_json_modelresult(geo,dat,lst,userEntry):
867
+ import numpy as np
868
+ import scipy.interpolate as interpolate
869
+ # will cause error if no field name found
870
+ for field in lst.element.column_name:
871
+ if field.startswith(FIELD['temp']):
872
+ field_name = field
873
+ break
874
+ print('+++ use field: %s' % field_name)
875
+
876
+ obses = temp_interp_thickness_json_fielddata(geo, dat, userEntry)
877
+ vals = []
878
+
879
+ t_prev = obses[0]._dtime_
880
+ lst.index = np.abs(lst.fulltimes-t_prev).argmin()
881
+ for obs in obses:
882
+ bis = obs._bindx_
883
+ wps = obs._wpos_
884
+ t = obs._dtime_
885
+
886
+ # TODO, this is slow, can be much faster
887
+ if t != t_prev:
888
+ lst.index = np.abs(lst.fulltimes-t).argmin()
889
+ t_prev = t
890
+
891
+ v = lst.element[field_name][bis]
892
+ cols = [geo.column_name(geo.block_name_list[bi]) for bi in bis]
893
+ points = [geo.column[c].centre for c in cols]
894
+ if len(points) > 1:
895
+ vi = interpolate.griddata(points, v, [wps[:2]], method='linear')
896
+ vals.append(vi[0])
897
+ else:
898
+ vals.append(v[0])
899
+ return vals
900
+
901
+
902
+ def temperature_thickness_json_fielddata(geo,dat,userEntry):
903
+ """ This is very simialr to the norml temperature obs type, only that the
904
+ weight of each observation is assigned as the thickness of that layer. The
905
+ observation actually seen by PEST is saved as .obs files. """
906
+ # ugly, need re-writting and remove repeative actions
907
+ jfilename = userEntry.obsInfo[0]
908
+ customFilter = userEntry.customFilter
909
+ offsetTime = userEntry.offsetTime
910
+ obsDefault = userEntry.obsDefault
911
+ tFactor = 365.25*24.*60.*60.
912
+
913
+ import json
914
+ # 1st line is json file name (of all wells)
915
+ with open(jfilename, 'r') as f:
916
+ t_bywell = json.load(f)
917
+
918
+ obses = []
919
+
920
+ for oline in userEntry.obsInfo[1:]:
921
+ wname = eval(oline)
922
+ if 'geo_well_name' in t_bywell[wname]:
923
+ geo_wname = t_bywell[wname]['geo_well_name']
924
+ else:
925
+ geo_wname = wname
926
+ time = t_bywell[wname]['time']
927
+ allelev, alltemp = [], []
928
+ for elev,temp in zip(t_bywell[wname]['elevations'], t_bywell[wname]['temperatures']):
929
+ if eval(customFilter):
930
+ allelev.append(elev)
931
+ alltemp.append(temp)
932
+
933
+ if len(allelev) <= 1:
934
+ print('temperature_thickness_json: User entry has no data, skipping: %s' % wname)
935
+ continue
936
+
937
+ (bs, bs_c) = private_well_track_blocks(geo,geo_wname)
938
+ blocks, blocks_cen, blocks_thickness = [], [], []
939
+ for (b,c) in zip(bs, bs_c):
940
+ if allelev[-1] <= c <= allelev[0]:
941
+ blocks.append(b)
942
+ blocks_cen.append(c)
943
+ blocks_thickness.append(geo.layer[geo.layer_name(b)].thickness)
944
+
945
+ # blocks and blocks_cen ready for use
946
+ from numpy import interp
947
+ blocks_temp = interp(blocks_cen,allelev[::-1],alltemp[::-1])
948
+
949
+ from gopest.common import private_cleanup_name
950
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(geo_wname)[:5]
951
+ if baseName not in obsBaseNameCount:
952
+ obsBaseNameCount[baseName] = 0
953
+
954
+ if obsDefault.OBGNME not in userEntry.coverage:
955
+ userEntry.coverage[obsDefault.OBGNME] = []
956
+ for (b,t,h) in zip(blocks,blocks_temp, blocks_thickness):
957
+ obsBaseNameCount[baseName] += 1
958
+ from copy import deepcopy
959
+ obs = deepcopy(obsDefault)
960
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
961
+ obs.OBSVAL = t
962
+ obs.WEIGHT = h * obsDefault.WEIGHT
963
+ # additional for model result extraction
964
+ obs._block_ = b
965
+ obs._dtime_ = time * tFactor - offsetTime # data's time tag
966
+ userEntry.coverage[obsDefault.OBGNME].append(b)
967
+ obses.append(obs)
968
+
969
+ # generate batch plot entries
970
+ plot = temperature_plot(
971
+ geo_wname, time, blocks_cen, list(blocks_temp),
972
+ baseName, ('Well %s at time %f' % (wname,time)) )
973
+ if PLOT_RAW_FIELD_DATA:
974
+ y, x = t_bywell[wname]['elevations'], t_bywell[wname]['temperatures']
975
+ plot = plot_append_raw_data(plot, 'raw_'+wname, x, y, xunit="degC", yunit="meter")
976
+ userEntry.batch_plot_entry.append(plot)
977
+ # generate batch plot entries
978
+ # userEntry.batch_plot_entry.append(temperature_plot(
979
+ # wname, time, allelev, alltemp,
980
+ # baseName, ('Well %s at time %f' % (wname,time)) ))
981
+
982
+ return obses
983
+
984
+ def temperature_thickness_json_modelresult(geo,dat,lst,userEntry):
985
+ import numpy as np
986
+ obses = temperature_thickness_json_fielddata(geo, dat, userEntry)
987
+ vals = []
988
+ t_prev = obses[0]._dtime_
989
+ lst.index = np.abs(lst.fulltimes-t_prev).argmin()
990
+ for obs in obses:
991
+ b = obs._block_
992
+ t = obs._dtime_
993
+
994
+ # TODO, this is slow, can be much faster
995
+ if t != t_prev:
996
+ lst.index = np.abs(lst.fulltimes-t).argmin()
997
+ t_prev = t
998
+
999
+ vals.append(lst.element[b][FIELD['temp']])
1000
+ return vals
1001
+
1002
+ def temperature_plot(wname, time, elevs, temps, obsname, title):
1003
+ """ for (timgui) batch plotting """
1004
+ return {
1005
+ "series": [
1006
+ {
1007
+ "variable": FIELD["temp"],
1008
+ "well": wname,
1009
+ "timeunit": "year",
1010
+ "type": "DownholeWellSeries",
1011
+ "time": time
1012
+ },
1013
+ {
1014
+ "frozen_x": temps,
1015
+ "frozen_y": elevs,
1016
+ "xunit": "degC",
1017
+ "yunit": "meter",
1018
+ "name": obsname,
1019
+ "original_series": "",
1020
+ "type": "FrozenDataSeries"
1021
+ }
1022
+ ],
1023
+ "ylabel": "Elevation",
1024
+ "xlabel": "Temperature",
1025
+ "title": title
1026
+ }
1027
+
1028
+
1029
+ def pressure_fielddata(geo,dat,userEntry):
1030
+ """ called by goPESTobs.py """
1031
+ entries = private_history_data(userEntry, 100000.0, 365.25*24.*60.*60.)
1032
+ obses, times = zip(*entries)
1033
+ return obses
1034
+ def pressure_modelresult(geo,dat,lst,userEntry):
1035
+ from mulgrids import fix_blockname
1036
+ # name,timelist
1037
+ name = fix_blockname(eval(userEntry.obsInfo[0]))
1038
+ entries = private_history_data(userEntry, 100000.0, 365.25*24.*60.*60.)
1039
+ obses, timelist = zip(*entries)
1040
+ tbl = lst.history([('e',name,'Pressure')])
1041
+ if tbl is None:
1042
+ raise Exception("Observation (type pressure) '%s' does not match any block." % name)
1043
+ alltimes = tbl[0] # assuming all times are the same
1044
+ allpress = tbl[1]
1045
+ from numpy import interp
1046
+ return list(interp(timelist,alltimes,allpress))
1047
+
1048
+ def pressure_by_well_fielddata(geo,dat,userEntry):
1049
+ """ called by goPESTobs.py """
1050
+ entries = private_history_data(userEntry, 100000.0, 365.25*24.*60.*60.)
1051
+ if len(entries) == 0:
1052
+ raise Exception("User entry yields no observation: " + str(userEntry))
1053
+ obses, times = zip(*entries)
1054
+ return obses
1055
+ def pressure_by_well_modelresult(geo,dat,lst,userEntry):
1056
+ """ expects a well name and elevation in first line, eg: 'WK 1', -100.0 """
1057
+ from mulgrids import fix_blockname
1058
+ # name,timelist
1059
+ # name = fix_blockname(eval(userEntry.obsInfo[0]))
1060
+ wname, elev = eval(userEntry.obsInfo[0])
1061
+ elev = float(elev)
1062
+ if wname not in geo.well:
1063
+ raise Exception("Obs type 'pressure_by_well' well %s does not exist in geometry file." % wname)
1064
+ pos = geo.well[wname].elevation_pos(elev, extend=True)
1065
+ # spped up, hopefully
1066
+ if 'qtree' not in geo.__dict__:
1067
+ geo.qtree = geo.column_quadtree()
1068
+ name = geo.block_name_containing_point(pos, geo.qtree)
1069
+ if name is None:
1070
+ raise Exception("Obs type 'pressure_by_well' well %s at %f is outside of the model." % (wname, elev))
1071
+
1072
+ # print wname, elev, name
1073
+ entries = private_history_data(userEntry, 100000.0, 365.25*24.*60.*60.)
1074
+ obses, timelist = zip(*entries)
1075
+ tbl = lst.history([('e',name,'Pressure')])
1076
+ if tbl is None:
1077
+ raise Exception("Obs failed to extract Pressure for block %s." % name)
1078
+ alltimes = tbl[0] # assuming all times are the same
1079
+ allpress = tbl[1]
1080
+ from numpy import interp
1081
+ return list(interp(timelist,alltimes,allpress))
1082
+
1083
+ def target_times(desired_times, limit, data):
1084
+ """ work out target times (among desired_times) that has data within +-
1085
+ limit """
1086
+ def count_valid(time, limit, data):
1087
+ """ Assuming data is a list of tuple (t,y), count how many t is
1088
+ within the range of time +- limit. """
1089
+ i = 0
1090
+ for t,y in data:
1091
+ if (time-limit) <= t <= (time+limit):
1092
+ i += 1
1093
+ return i
1094
+ targets = []
1095
+ for time in desired_times:
1096
+ if count_valid(time, limit, data):
1097
+ targets.append(time)
1098
+ return targets
1099
+
1100
+ def calc_bz(geo, w, elev):
1101
+ pos = geo.well[w].elevation_pos(elev, extend=True)
1102
+ if 'qtree' not in geo.__dict__:
1103
+ geo.qtree = geo.column_quadtree()
1104
+ b = geo.block_name_containing_point(pos, geo.qtree)
1105
+ if b is None:
1106
+ raise Exception("well %s at %f is outside of the model." % (w, pcp))
1107
+ return b, geo.block_centre(geo.layer_name(b), geo.column_name(b))[2]
1108
+
1109
+ def pressure_plot(bname, dataname, ts, ps, title):
1110
+ """ for (timgui) batch plotting """
1111
+ return {
1112
+ "series": [
1113
+ {
1114
+ "type": "HistoryBlockSeries",
1115
+ "block": bname,
1116
+ "variable": FIELD["pres"]
1117
+ },
1118
+ {
1119
+ "type": "FrozenDataSeries",
1120
+ "name": dataname,
1121
+ "original_series": "",
1122
+ "frozen_x": [t for t in ts],
1123
+ "frozen_y": [p*1.0e5 for p in ps],
1124
+ "xunit": "t2year",
1125
+ "yunit": "pascal",
1126
+ }
1127
+ ],
1128
+ "ylabel": "Pressure",
1129
+ "xlabel": "Time",
1130
+ "title": title
1131
+ }
1132
+
1133
+ def plot_append_raw_data(plot, name, x, y, xunit="", yunit=""):
1134
+ plot["series"].append({
1135
+ "type": "FrozenDataSeries",
1136
+ "name": name,
1137
+ "original_series": "",
1138
+ "frozen_x": x,
1139
+ "frozen_y": y,
1140
+ "xunit": xunit,
1141
+ "yunit": yunit,
1142
+ })
1143
+ return plot
1144
+
1145
+ def pressure_block_average_fielddata(geo, dat, userEntry):
1146
+ customFilter = userEntry.customFilter
1147
+ offsetTime = userEntry.offsetTime
1148
+ obsDefault = userEntry.obsDefault
1149
+ vFactor, tFactor = 100000.0, 365.25*24.*60.*60.
1150
+
1151
+ # check all required settings exists:
1152
+ must_have = ['_DESIRED_DATA_TIMES', '_INTERP_LIMIT', '_P_GRADIENT']
1153
+ if any([not hasattr(obsDefault,a) for a in must_have]):
1154
+ raise Exception('Obs type pressure_block_average must have these default settings: ' + ', '.join(must_have))
1155
+
1156
+ p_byblock = {}
1157
+
1158
+ for oline in userEntry.obsInfo:
1159
+ wname, elev, fwell = eval(oline)
1160
+ elev = float(elev)
1161
+
1162
+ f = open(fwell,'r')
1163
+ times, vals = [], []
1164
+ for line in f.readlines():
1165
+ if line.strip() == '': break
1166
+ time,val = [float(x) for x in line.split()[0:2]]
1167
+ if eval(customFilter):
1168
+ times.append(time), vals.append(val)
1169
+ f.close()
1170
+
1171
+ if len(times) == 0:
1172
+ f.close()
1173
+ raise Exception("Pressure file: %s yields no observation" % fwell)
1174
+
1175
+ desired_times = obsDefault._DESIRED_DATA_TIMES
1176
+ interp_limit = obsDefault._INTERP_LIMIT
1177
+ p_gradient = obsDefault._P_GRADIENT
1178
+
1179
+ # correct to block centre
1180
+ b, bz = calc_bz(geo, wname, elev)
1181
+ vals = [p - (bz-elev) * p_gradient for p in vals]
1182
+
1183
+ # get times and vals for each line (well)
1184
+ final_times = [t for t in desired_times if times[0] <= t <= times[-1]]
1185
+ final_times = target_times(desired_times, interp_limit, zip(times,vals))
1186
+ if len(final_times) == 0:
1187
+ raise Exception("User entry yields no observation: " + oline)
1188
+ from numpy import interp
1189
+ final_vals = list(interp(final_times,times,vals))
1190
+
1191
+ if b not in p_byblock:
1192
+ p_byblock[b] = []
1193
+ # each block have a list (ts, ps), one for each well in block
1194
+ p_byblock[b].append((final_times, final_vals, wname))
1195
+
1196
+ # one series of obs for each block, averaging all these in the same block
1197
+ obses = []
1198
+ for b in sorted(p_byblock.keys()):
1199
+ # fo = open('p_byblock_%s.obs' % b.replace(' ','_'), 'w')
1200
+ bp_times = {}
1201
+ ws = []
1202
+ for dtimes, dvals, wname in p_byblock[b]:
1203
+ ws.append(wname)
1204
+ for ft,fv in zip(dtimes, dvals):
1205
+ if ft not in bp_times:
1206
+ bp_times[ft] = []
1207
+ bp_times[ft].append(fv)
1208
+
1209
+ avgt, avgp = [], []
1210
+ for t in sorted(bp_times.keys()):
1211
+ avgt.append(t)
1212
+ avgp.append(sum(bp_times[t]) / len(bp_times[t]))
1213
+
1214
+ from gopest.common import private_cleanup_name
1215
+ from copy import deepcopy
1216
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(b)
1217
+ if baseName not in obsBaseNameCount:
1218
+ obsBaseNameCount[baseName] = 0
1219
+ for time, val in zip(avgt, avgp):
1220
+ obsBaseNameCount[baseName] += 1
1221
+
1222
+ obs = deepcopy(obsDefault)
1223
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
1224
+ obs.OBSVAL = val * vFactor
1225
+ # additional for model result extraction
1226
+ obs._block_ = b
1227
+ obs._mtime_ = time * tFactor - offsetTime
1228
+ obses.append(obs)
1229
+ # output data file in original unit (instead of PEST/TOUGH2)
1230
+ # fo.write('%e %e\n' % (time, val))
1231
+
1232
+ # generate batch plot entries
1233
+ userEntry.batch_plot_entry.append(pressure_plot(
1234
+ b, baseName, [t-offsetTime/(60.0*60.0*24.0*365.25) for t in avgt], avgp,
1235
+ b+': '+','.join(ws)))
1236
+
1237
+ # fo.close()
1238
+ return obses
1239
+
1240
+ def pressure_block_average_modelresult(geo,dat,lst,userEntry):
1241
+ obses = pressure_block_average_fielddata(geo, dat, userEntry)
1242
+ bs, tss = [], []
1243
+ for obs in obses:
1244
+ b = obs._block_
1245
+ t = obs._mtime_
1246
+ # print b, t
1247
+ if b not in bs:
1248
+ bs.append(b)
1249
+ tss.append([])
1250
+ tss[-1].append(t)
1251
+
1252
+ tbls = lst.history([('e',b,'Pressure') for b in bs])
1253
+ if tbls is None:
1254
+ raise Exception("Extraction Pressure history of listing results went wrong, blocks: %s" % ','.join(bs))
1255
+ # PyTOUGH checks result length, then just return the first set (hence a
1256
+ # tuple) if only one in the list, instead of a list
1257
+ if not isinstance(tbls, list):
1258
+ tbls = [tbls]
1259
+
1260
+ from numpy import interp
1261
+ all_obs_vals = []
1262
+ for i, (b,ts) in enumerate(zip(bs,tss)):
1263
+ alltimes = tbls[i][0] # assuming all times are the same
1264
+ allpress = tbls[i][1]
1265
+ all_obs_vals += list(interp(ts,alltimes,allpress))
1266
+
1267
+ return all_obs_vals
1268
+
1269
+ def pressure_block_average_json_fielddata(geo, dat, userEntry):
1270
+ customFilter = userEntry.customFilter
1271
+ offsetTime = userEntry.offsetTime
1272
+ obsDefault = userEntry.obsDefault
1273
+ vFactor, tFactor = 100000.0, 365.25*24.*60.*60.
1274
+
1275
+ # check all required settings exists:
1276
+ must_have = ['_DESIRED_DATA_TIMES', '_INTERP_LIMIT', '_P_GRADIENT']
1277
+ if any([not hasattr(obsDefault,a) for a in must_have]):
1278
+ raise Exception('Obs type pressure_block_average_json must have these default settings: ' + ', '.join(must_have))
1279
+
1280
+ import json
1281
+ # 1st line is json file name (of all wells)
1282
+ with open(userEntry.obsInfo[0], 'r') as f:
1283
+ p_bywell = json.load(f)
1284
+
1285
+ p_byblock = {}
1286
+
1287
+ for oline in userEntry.obsInfo[1:]:
1288
+ wname = eval(oline)
1289
+ elev = p_bywell[wname]['elevation']
1290
+
1291
+ ts, vs = p_bywell[wname]['times'], p_bywell[wname]['pressures']
1292
+ times, vals = [], []
1293
+ for time, val in zip(ts, vs):
1294
+ if eval(customFilter):
1295
+ times.append(time), vals.append(val)
1296
+
1297
+ if len(times) == 0:
1298
+ f.close()
1299
+ raise Exception("Pressure from %s: %s yields no observation" % (userEntry.obsInfo[0], wname))
1300
+
1301
+ desired_times = obsDefault._DESIRED_DATA_TIMES
1302
+ interp_limit = obsDefault._INTERP_LIMIT
1303
+ p_gradient = obsDefault._P_GRADIENT
1304
+
1305
+ ##look for the column that contains the bottom of the well
1306
+ #for col in geo.columnlist:
1307
+ # #position of well bottom
1308
+ # pos = geo.well[wname].bottom
1309
+ # if col.contains_point(pos):
1310
+ # depth = col.surface - pos[3]
1311
+ #
1312
+ #geo.column_containing_point(geo.well[wname].bottom)
1313
+
1314
+ # b, bz = calc_bz(geo, wname, pos[3])
1315
+ # vals = [p - (bz-pos[3]) * p_gradient for p in vals]
1316
+
1317
+ # correct to block centre
1318
+ b, bz = calc_bz(geo, wname, elev)
1319
+ vals = [p - (bz-elev) * p_gradient for p in vals]
1320
+
1321
+ # get times and vals for each line (well)
1322
+ final_times = [t for t in desired_times if times[0] <= t <= times[-1]]
1323
+ final_times = target_times(desired_times, interp_limit, list(zip(times,vals)))
1324
+ if len(final_times) == 0:
1325
+ msg1 = 'final_times = ' + str(final_times)
1326
+ msg2 = 'interp_limit = ' + str(interp_limit)
1327
+ msg3 = 'times = ' + str(times)
1328
+ msg = "User entry yields no observation: " + oline
1329
+ raise Exception('\n'.join([msg1, msg2, msg3, msg]))
1330
+ from numpy import interp
1331
+ final_vals = list(interp(final_times,times,vals))
1332
+
1333
+ if b not in p_byblock:
1334
+ p_byblock[b] = []
1335
+ # each block have a list (ts, ps), one for each well in block
1336
+ p_byblock[b].append((final_times, final_vals, wname))
1337
+
1338
+ # one series of obs for each block, averaging all these in the same block
1339
+ obses = []
1340
+ for b in sorted(p_byblock.keys()):
1341
+ # fo = open('p_byblock_%s.obs' % b.replace(' ','_'), 'w')
1342
+ bp_times = {}
1343
+ ws = []
1344
+ for dtimes, dvals, wname in p_byblock[b]:
1345
+ ws.append(wname)
1346
+ for ft,fv in zip(dtimes, dvals):
1347
+ if ft not in bp_times:
1348
+ bp_times[ft] = []
1349
+ bp_times[ft].append(fv)
1350
+
1351
+ avgt, avgp = [], []
1352
+ for t in sorted(bp_times.keys()):
1353
+ avgt.append(t)
1354
+ avgp.append(sum(bp_times[t]) / len(bp_times[t]))
1355
+
1356
+ from gopest.common import private_cleanup_name
1357
+ from copy import deepcopy
1358
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(b)
1359
+ if baseName not in obsBaseNameCount:
1360
+ obsBaseNameCount[baseName] = 0
1361
+ for time, val in zip(avgt, avgp):
1362
+ obsBaseNameCount[baseName] += 1
1363
+
1364
+ obs = deepcopy(obsDefault)
1365
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
1366
+ obs.OBSVAL = val * vFactor
1367
+ # additional for model result extraction
1368
+ obs._block_ = b
1369
+ obs._mtime_ = time * tFactor - offsetTime
1370
+ obses.append(obs)
1371
+ # output data file in original unit (instead of PEST/TOUGH2)
1372
+ # fo.write('%e %e\n' % (time, val))
1373
+
1374
+ # generate batch plot entries
1375
+ plot = pressure_plot(
1376
+ b, baseName, [t-offsetTime/(60.0*60.0*24.0*365.25) for t in avgt], avgp,
1377
+ b+': '+','.join(ws))
1378
+ if PLOT_RAW_FIELD_DATA:
1379
+ for w in ws:
1380
+ ts, vs = p_bywell[w]['times'], p_bywell[w]['pressures']
1381
+ plot = plot_append_raw_data(plot, 'raw_'+w, ts, vs, xunit="year", yunit="bar")
1382
+ userEntry.batch_plot_entry.append(plot)
1383
+
1384
+ # fo.close()
1385
+ return obses
1386
+
1387
+ def pressure_block_average_json_modelresult(geo,dat,lst,userEntry):
1388
+ obses = pressure_block_average_json_fielddata(geo, dat, userEntry)
1389
+ bs, tss = [], []
1390
+ for obs in obses:
1391
+ b = obs._block_
1392
+ t = obs._mtime_
1393
+ # print b, t
1394
+ if b not in bs:
1395
+ bs.append(b)
1396
+ tss.append([])
1397
+ tss[-1].append(t)
1398
+
1399
+ tbls = lst.history([('e',b,FIELD['pres']) for b in bs])
1400
+ if tbls is None:
1401
+ raise Exception("Extraction Pressure history of listing results went wrong, blocks: %s" % ','.join(bs))
1402
+ # PyTOUGH checks result length, then just return the first set (hence a
1403
+ # tuple) if only one in the list, instead of a list
1404
+ if not isinstance(tbls, list):
1405
+ tbls = [tbls]
1406
+
1407
+ from numpy import interp
1408
+ all_obs_vals = []
1409
+ for i, (b,ts) in enumerate(zip(bs,tss)):
1410
+ alltimes = tbls[i][0] # assuming all times are the same
1411
+ allpress = tbls[i][1]
1412
+ all_obs_vals += list(interp(ts,alltimes,allpress))
1413
+
1414
+ return all_obs_vals
1415
+
1416
+ def private_history_data_with_boiling(userEntry):
1417
+ """ A special version of private_history_data() with enthalpy below boiling
1418
+ filtered out using user's '_BOILING_ABOVE_ENTH' value.
1419
+ """
1420
+ # default of 273 degree boiling, J/kg, unit same as internal object unit
1421
+ # (same as TOUGH2 unit)
1422
+ eboil = 1200.0e3
1423
+ if hasattr(userEntry.obsDefault, '_BOILING_ABOVE_ENTH'):
1424
+ eboil = userEntry.obsDefault._BOILING_ABOVE_ENTH
1425
+
1426
+ entries = private_history_data(userEntry, 1000.0, 365.25*24.*60.*60.)
1427
+ filtered_entries = []
1428
+ for o, t in entries:
1429
+ if o.OBSVAL >= eboil:
1430
+ filtered_entries.append((o,t))
1431
+ return filtered_entries
1432
+
1433
+ def private_all_blocks_in_geners(name, all_gener_keys):
1434
+ """ return a list (unique and sorted) of block names from generators that
1435
+ matches name, with regular expression supported. all_gener_keys should be a
1436
+ list of gener keys (tuple of block_name and gener_name).
1437
+ """
1438
+ import re
1439
+ from mulgrids import unfix_blockname,fix_blockname
1440
+ pattern = re.compile(name)
1441
+ bs = [b for (b,g) in all_gener_keys if pattern.match(unfix_blockname(g)) or pattern.match(g)]
1442
+ return sorted(list(set(bs)))
1443
+
1444
+ def private_boiling_plot(blockname, gener, datafile, timelist, obsname):
1445
+ """ for (timgui) batch plotting """
1446
+ return {
1447
+ "series": [
1448
+ {
1449
+ # "type": "HistoryPressureToBoil",
1450
+ "type": "HistoryPressureToBoilCO2",
1451
+ "block": blockname
1452
+ },
1453
+ {
1454
+ "type": "FrozenDataSeries",
1455
+ "name": obsname,
1456
+ "original_series": "",
1457
+ "frozen_x": list(timelist),
1458
+ "frozen_y": [0.0] * len(timelist),
1459
+ "xunit": "year",
1460
+ "yunit": "pascal",
1461
+ }
1462
+ ],
1463
+ "ylabel": "Pressure Difference to Boil (P-Psat)",
1464
+ "xlabel": "Time",
1465
+ "title": "%s - %s - %s" % (blockname, gener, datafile)
1466
+ }
1467
+
1468
+ def boiling_fielddata(geo, dat, userEntry):
1469
+ """ called by goPESTobs.py """
1470
+ from copy import deepcopy
1471
+ from gopest.common import private_cleanup_name
1472
+ obsDefault = userEntry.obsDefault
1473
+
1474
+ entries = private_history_data_with_boiling(userEntry)
1475
+ if len(entries) == 0:
1476
+ raise Exception("User entry yields no observation: " + str(userEntry))
1477
+
1478
+ psat_obses = []
1479
+ name = eval(userEntry.obsInfo[0])
1480
+ bs = private_all_blocks_in_geners(name, dat.generator.keys())
1481
+ if len(bs) == 0:
1482
+ msg = 'No GENERs matches with ' + name
1483
+ raise Exception(msg + "User entry yields no observation: " + str(userEntry))
1484
+ for b in bs:
1485
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(b)
1486
+ if baseName not in obsBaseNameCount:
1487
+ obsBaseNameCount[baseName] = 0
1488
+ for o, t in entries:
1489
+ obsBaseNameCount[baseName] += 1
1490
+ obs = deepcopy(obsDefault)
1491
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
1492
+ obs.OBSVAL = 0.0
1493
+ psat_obses.append(obs)
1494
+ # generate batch plot entries
1495
+ userEntry.batch_plot_entry.append(private_boiling_plot(
1496
+ b, name, userEntry.obsInfo[1], list(zip(*entries)[1])))
1497
+ return psat_obses
1498
+
1499
+ def boiling_modelresult(geo, dat, lst, userEntry):
1500
+ import numpy as np
1501
+ from t2thermo import sat
1502
+ from numpy import interp
1503
+ obsDefault = userEntry.obsDefault
1504
+
1505
+ entries = private_history_data_with_boiling(userEntry)
1506
+ obses, timelist = map(list, zip(*entries))
1507
+
1508
+ name = eval(userEntry.obsInfo[0])
1509
+ bs = private_all_blocks_in_geners(name, lst.generation.row_name)
1510
+ if len(bs) == 0:
1511
+ msg = 'No GENERs matches with ' + name
1512
+ raise Exception(msg + "User entry yields no observation: " + str(userEntry))
1513
+ selection = []
1514
+ for b in bs:
1515
+ # print "Boiling Block '%s' from '%s'" % (b, name)
1516
+ selection.append(('e',b,FIELD['temp']))
1517
+ selection.append(('e',b,FIELD['pres']))
1518
+ tbl = lst.history(selection)
1519
+ alltimes = tbl[0][0] # assuming all times are the same
1520
+ allpdiffs = []
1521
+ for i in range(len(bs)):
1522
+ ts = tbl[i*2][1]
1523
+ ps = tbl[i*2+1][1]
1524
+ pdiff_to_boil = []
1525
+ for (t,p) in zip(ts,ps):
1526
+ pdiff_to_boil.append(p - sat(t))
1527
+ pdiffs = interp(timelist,alltimes,np.array(pdiff_to_boil))
1528
+ allpdiffs += list(pdiffs)
1529
+ return allpdiffs
1530
+
1531
+ def private_enthalpy_plot(gname, datafile, timelist, enlist):
1532
+ """ for (timgui) batch plotting """
1533
+ return {
1534
+ "series": [
1535
+ {
1536
+ "type": "HistoryGenerEnthalpy",
1537
+ "gener": gname
1538
+ },
1539
+ {
1540
+ "type": "FrozenDataSeries",
1541
+ "name": datafile,
1542
+ "original_series": "",
1543
+ "frozen_x": [t/(365.25*24.*60.*60.) for t in timelist],
1544
+ "frozen_y": enlist,
1545
+ "xunit": "t2year",
1546
+ "yunit": "kJ/kg", #edited to kj/kg
1547
+ }
1548
+ ],
1549
+ "ylabel": "Average Enthalpy",
1550
+ "xlabel": "Time",
1551
+ "title": gname
1552
+ }
1553
+
1554
+ def enthalpy_fielddata(geo,dat,userEntry):
1555
+ """ called by goPESTobs.py """
1556
+ entries = private_history_data(userEntry, 1000.0, 365.25*24.*60.*60.)
1557
+ obses, times = map(list, zip(*entries))
1558
+ userEntry.batch_plot_entry.append(private_enthalpy_plot(
1559
+ eval(userEntry.obsInfo[0]), userEntry.obsInfo[1], times, [o.OBSVAL for o in obses]))
1560
+ return obses
1561
+ def enthalpy_modelresult(geo,dat,lst,userEntry):
1562
+ # name,timelist
1563
+ name = eval(userEntry.obsInfo[0])
1564
+ entries = private_history_data(userEntry, 1000.0, 365.25*24.*60.*60.)
1565
+ obses, timelist = map(list, zip(*entries))
1566
+ """
1567
+ # nearest value
1568
+ import numpy as np
1569
+ lst.index = np.abs(lst.fulltimes-time).argmin()
1570
+ rows = lst.generation.rows_matching(('',name))
1571
+ total_heat, total_mass = 0.0, 0.0
1572
+ for r in rows:
1573
+ total_mass += r['Generation rate']
1574
+ total_heat += r['Generation rate'] * r['Enthalpy']
1575
+ if total_mass > 0.0:
1576
+ return total_heat / total_mass
1577
+ else:
1578
+ return 0.0
1579
+ """
1580
+ import numpy as np
1581
+ from mulgrids import unfix_blockname,fix_blockname
1582
+ allgs = lst.generation.row_name
1583
+ import re
1584
+ pattern = re.compile(name)
1585
+ gs = [(b,g) for (b,g) in allgs if pattern.match(unfix_blockname(g)) or pattern.match(g)]
1586
+ if len(gs) ==0:
1587
+ print('Warning, no GENERs matches with ', name)
1588
+ return 0.0
1589
+ # print "'%s' matches %i geners." % (name, len(gs))
1590
+ selection = []
1591
+ for gname in gs:
1592
+ selection.append(('g',gname,FIELD['rate']))
1593
+ selection.append(('g',gname,FIELD['enth']))
1594
+ tbl = lst.history(selection)
1595
+ alltimes = tbl[0][0] # assuming all times are the same
1596
+ total_heat = np.array([0.0 for i in range(len(alltimes))])
1597
+ total_mass = np.array([0.0 for i in range(len(alltimes))])
1598
+ for i in range(len(gs)):
1599
+ total_heat = total_heat + tbl[i*2+1][1] * tbl[i*2][1]
1600
+ total_mass = total_mass + tbl[i*2][1]
1601
+ average_enth = []
1602
+ for (mass,heat) in zip(total_mass,total_heat):
1603
+ if abs(mass) <= 1.0e-7:
1604
+ average_enth.append(0.0)
1605
+ else:
1606
+ average_enth.append(heat/mass)
1607
+ allenths = np.array(average_enth)
1608
+ from numpy import interp
1609
+ es = interp(timelist,alltimes,allenths)
1610
+ return list(es)
1611
+
1612
+ def gradient_by_central(xs, ys):
1613
+ import numpy as np
1614
+ if type(xs) is not np.ndarray:
1615
+ xs = np.array(xs)
1616
+ if type(ys) is not np.ndarray:
1617
+ ys = np.array(ys)
1618
+ z1 = np.hstack((ys[0], ys[:-1]))
1619
+ z2 = np.hstack((ys[1:], ys[-1]))
1620
+
1621
+ dx1 = np.hstack((0.0, np.diff(xs)))
1622
+ dx2 = np.hstack((np.diff(xs), 0.0))
1623
+
1624
+ d = (z2-z1) / (dx2+dx1)
1625
+ return d
1626
+
1627
+ def enthalpy_json_fielddata(geo,dat,userEntry):
1628
+ """ called by goPESTobs.py """
1629
+ jfilename = userEntry.obsInfo[0]
1630
+ customFilter = userEntry.customFilter
1631
+ offsetTime = userEntry.offsetTime
1632
+ obsDefault = userEntry.obsDefault
1633
+ vFactor = 1000.0 # assume given J/kg
1634
+ tFactor = 365.25*24.*60.*60. # assume given decimal years
1635
+
1636
+ import numpy as np
1637
+
1638
+ def weight_two_ends(def_weight, ends_factor, final_times):
1639
+ """ use this to increase or decrease weighting of start/end points in
1640
+ history the middle of the data point will be kept as the set 'WEIGHT',
1641
+ then linearly increases to the specified value here towards two ends.
1642
+ It can be either larger than 1.0 or smaller than 1.0.
1643
+ """
1644
+ if (final_times[-1] - final_times[0]) < 1.0e-7:
1645
+ raise Exception("_ENDS_WEIGHT_FACTOR doesn't work if time range too small")
1646
+ final_weights = np.ones(len(final_times)) * def_weight
1647
+ half_t_range = (final_times[-1] - final_times[0]) / 2.0
1648
+ mid_t = (final_times[-1] + final_times[0]) / 2.0
1649
+ for i,t in enumerate(final_times):
1650
+ add_fac = abs(t - mid_t) / half_t_range * (ends_factor - 1.0)
1651
+ final_weights[i] = final_weights[i] * (add_fac + 1.0)
1652
+ return final_weights
1653
+
1654
+ import json
1655
+ # 1st line is json file name (of all wells)
1656
+ with open(jfilename, 'r') as f:
1657
+ e_bywell = json.load(f)
1658
+
1659
+ obses = []
1660
+
1661
+ skipped_entryline = []
1662
+ skipped_gradient = []
1663
+ for oline in userEntry.obsInfo[1:]:
1664
+ wname = eval(oline)
1665
+ times, vals = [], []
1666
+ for time,val in zip(e_bywell[wname]['times'], e_bywell[wname]['enthalpy']):
1667
+ if eval(customFilter):
1668
+ times.append(time), vals.append(val)
1669
+
1670
+ if len(times) == 0:
1671
+ raise Exception("User entry yields no observation: %s" % wname + str(userEntry))
1672
+
1673
+ if hasattr(obsDefault, '_DESIRED_DATA_TIMES'):
1674
+ desired_times = obsDefault._DESIRED_DATA_TIMES
1675
+ final_times = [t for t in desired_times if times[0] <= t <= times[-1]]
1676
+ if hasattr(obsDefault, '_INTERP_LIMIT'):
1677
+ interp_limit = obsDefault._INTERP_LIMIT
1678
+ final_times = target_times(desired_times, interp_limit, list(zip(times,vals)))
1679
+ if len(final_times) == 0:
1680
+ raise Exception("User entry yields no observation: " + str(userEntry))
1681
+ from numpy import interp
1682
+ final_vals = list(interp(final_times,times,vals))
1683
+ else:
1684
+ final_times, final_vals = times, vals
1685
+
1686
+ # set this to increase or decrease weighting of start/end points in
1687
+ # history the middle of the data point will be kept as the set 'WEIGHT',
1688
+ # then linearly increases to the specified value here towards two ends.
1689
+ # It can be either larger than 1.0 or smaller than 1.0.
1690
+ if hasattr(obsDefault, '_ENDS_WEIGHT_FACTOR'):
1691
+ ends_factor = float(obsDefault._ENDS_WEIGHT_FACTOR)
1692
+ final_weights = weight_two_ends(obsDefault.WEIGHT, ends_factor, final_times)
1693
+ else:
1694
+ final_weights = np.ones(len(final_times)) * obsDefault.WEIGHT
1695
+
1696
+ from gopest.common import private_cleanup_name
1697
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(wname)[:5]
1698
+ if baseName not in obsBaseNameCount:
1699
+ obsBaseNameCount[baseName] = 0
1700
+
1701
+ for time, val, w in zip(final_times, final_vals, final_weights):
1702
+ obsBaseNameCount[baseName] += 1
1703
+ from copy import deepcopy
1704
+ obs = deepcopy(obsDefault)
1705
+ from gopest.common import private_cleanup_name
1706
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
1707
+ obs.OBSVAL = val * vFactor
1708
+ obs.WEIGHT = w
1709
+ obses.append(obs)
1710
+
1711
+ gpattern = wname
1712
+ if hasattr(obsDefault, '_WELL_TO_GENERS'):
1713
+ import json
1714
+ with open(obsDefault._WELL_TO_GENERS, 'r') as f:
1715
+ well_to_geners_dict = json.load(f)
1716
+ gpattern = well_to_geners_dict[wname]
1717
+
1718
+ # generate batch plot entries
1719
+ plot = enthalpy_plot(
1720
+ gpattern, baseName, final_times, final_vals,
1721
+ ('Well %s' % wname) )
1722
+ if PLOT_RAW_FIELD_DATA:
1723
+ ts, vs = e_bywell[wname]['times'], e_bywell[wname]['enthalpy']
1724
+ plot = plot_append_raw_data(plot, 'raw_'+wname, ts, vs, xunit="year", yunit="kJ/kg") #edited to kJ/kg
1725
+ userEntry.batch_plot_entry.append(plot)
1726
+
1727
+ if hasattr(obsDefault, '_GRADIENT_WEIGHT_FACTOR'):
1728
+
1729
+ w = float(obsDefault._GRADIENT_WEIGHT_FACTOR) * obsDefault.WEIGHT
1730
+ if len(final_times) <= 1:
1731
+ print('enthalpy_json: User entry has too few data for gradient, skipping: %s' % wname)
1732
+ skipped_gradient.append(wname)
1733
+ continue
1734
+ final_gradients = gradient_by_central(final_times, final_vals)
1735
+
1736
+ baseName = 'g' + obsDefault.OBSNME +'_'+ private_cleanup_name(wname)[:5]
1737
+ if baseName not in obsBaseNameCount:
1738
+ obsBaseNameCount[baseName] = 0
1739
+
1740
+ for time, grad in zip(final_times, final_gradients):
1741
+ obsBaseNameCount[baseName] += 1
1742
+ from copy import deepcopy
1743
+ obs = deepcopy(obsDefault)
1744
+ from gopest.common import private_cleanup_name
1745
+ obs.OBGNME = obs.OBGNME + '_g'
1746
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
1747
+ obs.OBSVAL = grad
1748
+ obs.WEIGHT = w
1749
+ obses.append(obs)
1750
+ # if skipped_gradient or skipped_entryline:
1751
+ # print('See:\n' + str(userEntry))
1752
+ return obses
1753
+
1754
+ def private_remove_zeros(times, values):
1755
+ import numpy as np
1756
+ ts, vs = [], []
1757
+ for t, v in zip(times, values):
1758
+ if v != 0.0:
1759
+ ts.append(t), vs.append(v)
1760
+ return np.array(ts), np.array(vs)
1761
+
1762
+ def enthalpy_json_modelresult(geo,dat,lst,userEntry):
1763
+ jfilename = userEntry.obsInfo[0]
1764
+ customFilter = userEntry.customFilter
1765
+ offsetTime = userEntry.offsetTime
1766
+ obsDefault = userEntry.obsDefault
1767
+ vFactor = 1.0 # assume given J/kg
1768
+ tFactor = 365.25*24.*60.*60. # assume given decimal years
1769
+
1770
+ import json
1771
+ # 1st line is json file name (of all wells)
1772
+ with open(jfilename, 'r') as f:
1773
+ e_bywell = json.load(f)
1774
+
1775
+ if hasattr(obsDefault, '_WELL_TO_GENERS'):
1776
+ with open(obsDefault._WELL_TO_GENERS, 'r') as f:
1777
+ well_to_geners_dict = json.load(f)
1778
+
1779
+ alles = []
1780
+
1781
+ for oline in userEntry.obsInfo[1:]:
1782
+ wname = eval(oline)
1783
+ times, vals = [], []
1784
+ for time,val in zip(e_bywell[wname]['times'], e_bywell[wname]['enthalpy']):
1785
+ if eval(customFilter):
1786
+ times.append(time), vals.append(val)
1787
+
1788
+ if len(times) == 0:
1789
+ raise Exception("User entry yields no observation: " + str(userEntry))
1790
+
1791
+ if hasattr(obsDefault, '_DESIRED_DATA_TIMES'):
1792
+ desired_times = obsDefault._DESIRED_DATA_TIMES
1793
+ final_times = [t for t in desired_times if times[0] <= t <= times[-1]]
1794
+ if hasattr(obsDefault, '_INTERP_LIMIT'):
1795
+ interp_limit = obsDefault._INTERP_LIMIT
1796
+ final_times = target_times(desired_times, interp_limit, list(zip(times,vals)))
1797
+ if len(final_times) == 0:
1798
+ raise Exception("User entry yields no observation: " + str(userEntry))
1799
+ from numpy import interp
1800
+ final_vals = list(interp(final_times,times,vals))
1801
+ else:
1802
+ final_times, final_vals = times, vals
1803
+
1804
+ import numpy as np
1805
+ from mulgrids import unfix_blockname,fix_blockname
1806
+ allgs = lst.generation.row_name
1807
+
1808
+ gpattern = wname
1809
+ if hasattr(obsDefault, '_WELL_TO_GENERS'):
1810
+ gpattern = well_to_geners_dict[wname]
1811
+
1812
+ import re
1813
+ pattern = re.compile(gpattern)
1814
+ gs = [(b,g) for (b,g) in allgs if pattern.match(unfix_blockname(g)) or pattern.match(g)]
1815
+ if len(gs) ==0:
1816
+ print('Warning, no GENERs matches with ', gpattern)
1817
+ return 0.0
1818
+ # print "'%s' matches %i geners." % (wname, len(gs))
1819
+ selection = []
1820
+ for gname in gs:
1821
+ selection.append(('g',gname,FIELD['rate']))
1822
+ selection.append(('g',gname,FIELD['enth']))
1823
+ tbl = lst.history(selection)
1824
+ alltimes = tbl[0][0] # assuming all times are the same
1825
+ total_heat = np.array([0.0 for i in range(len(alltimes))])
1826
+ total_mass = np.array([0.0 for i in range(len(alltimes))])
1827
+ for i in range(len(gs)):
1828
+ total_heat = total_heat + tbl[i*2+1][1] * tbl[i*2][1]
1829
+ total_mass = total_mass + tbl[i*2][1]
1830
+ average_enth = []
1831
+ for (mass,heat) in zip(total_mass,total_heat):
1832
+ if abs(mass) <= 1.0e-7:
1833
+ average_enth.append(0.0)
1834
+ else:
1835
+ average_enth.append(heat/mass)
1836
+ allenths = np.array(average_enth)
1837
+ alltimes = alltimes / tFactor + offsetTime / tFactor
1838
+ if hasattr(obsDefault, '_REMOVE_ZEROS'):
1839
+ if obsDefault._REMOVE_ZEROS:
1840
+ alltimes, allenths = private_remove_zeros(alltimes, allenths)
1841
+ from numpy import interp
1842
+ # print "~~~~~", gpattern, len(final_times), len(alltimes), len(allenths)
1843
+ if len(alltimes) == 0:
1844
+ es = [0.0] * len(final_times)
1845
+ # es = [v - 300.0e3 for v in final_vals]
1846
+ else:
1847
+ # force enthalpy to be "reasonable", avoid crazy obj fn.
1848
+ allenths = [min(3.0e6,max(0.0,enth)) for enth in allenths]
1849
+ es = interp(final_times,alltimes,allenths)
1850
+ alles = alles + list(es)
1851
+
1852
+ if hasattr(obsDefault, '_GRADIENT_WEIGHT_FACTOR'):
1853
+ if len(final_times) <= 1:
1854
+ # print('User entry has too few data for gradient, skipping: %s\n%s' % (wname, str(userEntry)))
1855
+ continue
1856
+ final_gradients = gradient_by_central(final_times, es)
1857
+ alles = alles + list(final_gradients)
1858
+
1859
+ return alles
1860
+
1861
+ def enthalpy_plot(gname, dataname, ts, es, title):
1862
+ """ for (timgui) batch plotting """
1863
+ return {
1864
+ "series": [
1865
+ {
1866
+ "type": "HistoryGenerEnthalpy",
1867
+ "gener": gname
1868
+ },
1869
+ {
1870
+ "type": "FrozenDataSeries",
1871
+ "name": dataname,
1872
+ "original_series": "",
1873
+ "frozen_x": list(ts),
1874
+ "frozen_y": list(es),
1875
+ "xunit": "year",
1876
+ "yunit": "kJ/kg", #edited to kJ/kg
1877
+ }
1878
+ ],
1879
+ "ylabel": "Average Enthalpy",
1880
+ "xlabel": "Time",
1881
+ "title": title
1882
+ }
1883
+
1884
+
1885
+ def boiling_json_fielddata(geo, dat, userEntry):
1886
+ """ called by goPESTobs.py """
1887
+ from copy import deepcopy
1888
+ from gopest.common import private_cleanup_name
1889
+ import json
1890
+ import numpy as np
1891
+
1892
+ jfilename = userEntry.obsInfo[0]
1893
+ customFilter = userEntry.customFilter
1894
+ offsetTime = userEntry.offsetTime
1895
+ obsDefault = userEntry.obsDefault
1896
+ vFactor = 1.0 # assume given J/kg
1897
+ tFactor = 365.25*24.*60.*60. # assume given decimal years
1898
+
1899
+ # default of 273 degree boiling, J/kg, unit same as internal object unit
1900
+ # (same as TOUGH2 unit)
1901
+ eboil = 1200.0e3
1902
+ if hasattr(userEntry.obsDefault, '_BOILING_ABOVE_ENTH'):
1903
+ eboil = userEntry.obsDefault._BOILING_ABOVE_ENTH
1904
+
1905
+ # 1st line is json file name (of all wells)
1906
+ with open(jfilename, 'r') as f:
1907
+ e_bywell = json.load(f)
1908
+
1909
+ obses = []
1910
+ boiling_blocks, blk_gener = [], {}
1911
+
1912
+ for oline in userEntry.obsInfo[1:]:
1913
+ wname = eval(oline)
1914
+ gpattern = wname
1915
+ if hasattr(obsDefault, '_WELL_TO_GENERS'):
1916
+ import json
1917
+ with open(obsDefault._WELL_TO_GENERS, 'r') as f:
1918
+ well_to_geners_dict = json.load(f)
1919
+ gpattern = well_to_geners_dict[wname]
1920
+
1921
+ times, vals = [], []
1922
+ for time,val in zip(e_bywell[wname]['times'], e_bywell[wname]['enthalpy']):
1923
+ if eval(customFilter):
1924
+ if val >= eboil:
1925
+ times.append(time), vals.append(val)
1926
+
1927
+ if len(times) == 0:
1928
+ print(wname, e_bywell[wname]['times'], e_bywell[wname]['enthalpy'])
1929
+ raise Exception("User entry has no boiling: " + wname + str(userEntry))
1930
+ if hasattr(obsDefault, '_DESIRED_DATA_TIMES'):
1931
+ desired_times = obsDefault._DESIRED_DATA_TIMES
1932
+ # print wname, desired_times, times, customFilter, e_bywell[wname]['times'], e_bywell[wname]['enthalpy']
1933
+ final_times = [t for t in desired_times if times[0] <= t <= times[-1]]
1934
+ if hasattr(obsDefault, '_INTERP_LIMIT'):
1935
+ interp_limit = obsDefault._INTERP_LIMIT
1936
+ final_times = target_times(desired_times, interp_limit, list(zip(times,vals)))
1937
+ final_vals = list(np.interp(final_times,times,vals))
1938
+ else:
1939
+ final_times, final_vals = times, vals
1940
+ if len(final_times) == 0:
1941
+ raise Exception("User entry yields no observation: " + wname + str(userEntry))
1942
+
1943
+ if isinstance(dat, dict):
1944
+ # waiwera JSON input
1945
+ gen_keys = [(geo.block_name_list[geo.num_atmosphere_blocks:][gen['cell']], gen['name']) for gen in dat['source']]
1946
+ else:
1947
+ # aut2 dat
1948
+ gen_keys = dat.generator.keys()
1949
+ bs = private_all_blocks_in_geners(gpattern, gen_keys)
1950
+ if len(bs) == 0:
1951
+ print('gen_keys = ' + str(type(dat)) + ' ' + str(gen_keys))
1952
+ msg = "Check if dat.filename contains any matching geners:" + gpattern
1953
+ msg += "\n ^^^^^ user entry yields no observation: " + wname + str(userEntry)
1954
+ raise Exception(msg)
1955
+ for b in bs:
1956
+ if b in boiling_blocks:
1957
+ msg1 = "block '%s' already used by previous gener '%s'" % (b, blk_gener[b])
1958
+ msg2 = "User entry yeilds additional boiling blocks: " + wname + str(userEntry)
1959
+ raise Exception('\n'.join([msg2, msg1]))
1960
+ blk_gener[b] = wname
1961
+ boiling_blocks.append(b)
1962
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(b)
1963
+ # if baseName not in obsBaseNameCount:
1964
+ # obsBaseNameCount[baseName] = 0
1965
+ for time, val in zip(final_times, final_vals):
1966
+ # obsBaseNameCount[baseName] += 1
1967
+ obs = deepcopy(obsDefault)
1968
+ # obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
1969
+ obs.OBSNME = unique_obs_name(obs.OBSNME, b)
1970
+ obs.OBSVAL = 0.0
1971
+ obs.WEIGHT = obsDefault.WEIGHT
1972
+ obs._block_ = b
1973
+ obs._mtime_ = time * tFactor - offsetTime
1974
+ obses.append(obs)
1975
+ # generate batch plot entries
1976
+ userEntry.batch_plot_entry.append(private_boiling_plot(
1977
+ b, gpattern, wname, final_times, baseName))
1978
+
1979
+ return obses
1980
+
1981
+ def boiling_json_modelresult(geo,dat,lst,userEntry):
1982
+ from t2thermo import sat
1983
+ import numpy as np
1984
+
1985
+ obses = boiling_json_fielddata(geo, dat, userEntry)
1986
+ bs, tss = [], []
1987
+ for obs in obses:
1988
+ b = obs._block_
1989
+ t = obs._mtime_
1990
+ # print b, t
1991
+ if b not in bs:
1992
+ bs.append(b)
1993
+ tss.append([])
1994
+ tss[-1].append(t)
1995
+
1996
+ selection = []
1997
+ for b, ts in zip(bs, tss):
1998
+ selection.append(('e',b,FIELD['temp']))
1999
+ selection.append(('e',b,FIELD['pres']))
2000
+ selection.append(('e',b,FIELD['pco2']))
2001
+
2002
+ allpdiffs = []
2003
+ tbl = lst.history(selection)
2004
+ alltimes = tbl[0][0] # assuming all times are the same
2005
+ for i,b in enumerate(bs):
2006
+ temps = tbl[i*3][1]
2007
+ press = tbl[i*3+1][1]
2008
+ pco2 = tbl[i*3+2][1]
2009
+ pdiff_to_boil = []
2010
+ for (t,p,p2) in zip(temps,press,pco2):
2011
+ pd = p - p2 - sat(t)
2012
+ if pd < 0.0:
2013
+ pd = 0.0 # allow super heating, treated as zero (good)
2014
+ pdiff_to_boil.append(pd)
2015
+ pdiffs = np.interp(tss[i], alltimes, np.array(pdiff_to_boil))
2016
+ allpdiffs += list(pdiffs)
2017
+
2018
+ return allpdiffs
2019
+
2020
+
2021
+ def private_history_data(userEntry,vFactor=1.0,tFactor=1.0):
2022
+ """ returns entries of history data read from files.
2023
+
2024
+ If the default obs has (optional) property '_DESIRED_DATA_TIMES', the data
2025
+ from field data files will be interpolated into the desired times. The
2026
+ purpose of this was to make the observations more uniform in time, use with
2027
+ care. It should be a list of time with the same unit and offset of the data
2028
+ files, NOT the rest of PEST/TOUGH2.
2029
+ """
2030
+ fieldDataFile = userEntry.obsInfo[1]
2031
+ customFilter = userEntry.customFilter
2032
+ offsetTime = userEntry.offsetTime
2033
+ obsDefault = userEntry.obsDefault
2034
+ f = open(fieldDataFile,'r')
2035
+ fo = open(fieldDataFile+'.obs', 'w')
2036
+ times, vals = [], []
2037
+ for line in f.readlines():
2038
+ if line.strip() == '': break
2039
+ time,val = [float(x) for x in line.split()[0:2]]
2040
+ if eval(customFilter):
2041
+ times.append(time), vals.append(val)
2042
+
2043
+ if len(times) == 0:
2044
+ raise Exception("User entry yields no observation: " + str(userEntry))
2045
+
2046
+ if hasattr(obsDefault, '_DESIRED_DATA_TIMES'):
2047
+ desired_times = obsDefault._DESIRED_DATA_TIMES
2048
+ final_times = [t for t in desired_times if times[0] <= t <= times[-1]]
2049
+ if hasattr(obsDefault, '_INTERP_LIMIT'):
2050
+ interp_limit = obsDefault._INTERP_LIMIT
2051
+ final_times = target_times(desired_times, interp_limit, list(zip(times,vals)))
2052
+ if len(final_times) == 0:
2053
+ raise Exception("User entry yields no observation: " + str(userEntry))
2054
+ from numpy import interp
2055
+ final_vals = list(interp(final_times,times,vals))
2056
+ else:
2057
+ final_times, final_vals = times, vals
2058
+
2059
+ entries = []
2060
+ from gopest.common import private_cleanup_name
2061
+ baseName = obsDefault.OBSNME +'_'+ private_cleanup_name(userEntry.obsInfo[0])[:5]
2062
+ if baseName not in obsBaseNameCount:
2063
+ obsBaseNameCount[baseName] = 0
2064
+ for time, val in zip(final_times, final_vals):
2065
+ obsBaseNameCount[baseName] += 1
2066
+
2067
+ from copy import deepcopy
2068
+ obs = deepcopy(obsDefault)
2069
+ from gopest.common import private_cleanup_name
2070
+ obs.OBSNME = baseName +'_'+ ('%04d' % obsBaseNameCount[baseName])
2071
+ obs.OBSVAL = val * vFactor
2072
+ entries.append((obs, time * tFactor - offsetTime))
2073
+ # output data file in original unit (instead of PEST/TOUGH2)
2074
+ fo.write('%e %e\n' % (time, val))
2075
+ fo.close()
2076
+ f.close()
2077
+ return entries
2078
+
2079
+
2080
+ def private_has_re(text):
2081
+ """ simplified check of a string contains regular expression or not """
2082
+ special = '.^$*+?{}[]()'
2083
+ for s in special:
2084
+ if s in text:
2085
+ return True
2086
+ return False