hspf 2.1.1__py3-none-any.whl → 2.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Binary file
Binary file
Binary file
Binary file
Binary file
Binary file
@@ -0,0 +1 @@
1
+ WDM WDM1 C:\Program Files (x86)\HSPEXP+\WinHSPFLt\hspfmsg.wdm
hspf/build_warehouse.py CHANGED
@@ -281,12 +281,49 @@ for key,ts_names in outputs.items():
281
281
  dfs.append(df)
282
282
  output_df = pd.concat(dfs).reset_index(drop=True)
283
283
 
284
+ dfs = []
285
+ for key,data in hbn.hbns[0].data_frames.items():
286
+ keys = key.split('_')
287
+ operation = keys[0]
288
+ activity = keys[1]
289
+ opnid = int(keys[2])
290
+ t_code = keys[3]
291
+ data.reset_index(inplace=True)
292
+ data.rename(columns={'index': 'datetime'}, inplace=True)
293
+ data = data.melt(id_vars = ['datetime'],var_name = 'ts_name', value_name = 'value')
294
+ data['operation'] = operation
295
+ data['activity'] = activity
296
+ data['opnid'] = opnid
297
+ data['t_code'] = t_code
298
+ data['model_name'] = model_name
299
+ dfs.append(data)
300
+ output_df = pd.concat(dfs).reset_index(drop=True)
301
+
302
+
303
+
304
+ # Write to Parquet with DuckDB, including "t_code" as a partition
305
+ output_path = "model_outputs"
306
+
307
+ con = duckdb.connect(database=':memory:') # Temporary in-memory database
308
+ con.execute(f"""
309
+ COPY output_df
310
+ TO '{output_path}'
311
+ (FORMAT 'parquet', PARTITION_BY ('model_name','operation', 'opnid'))
312
+ """)
313
+
314
+ print(f"Data written to {output_path}")
284
315
 
285
- ts_name = 'PERO'
286
- op_type = 'PERLND'
287
- t_code = 4
288
316
 
289
317
 
318
+ ['PERO',
319
+ 'SURO',
320
+ 'IFWO',
321
+ 'AGWO']
322
+
323
+ for constituent in ['Q','TSS','TP','N','OP','BOD','TKN']:
324
+ t_cons = helpers.get_tcons(constituent,'RCHRES','lb')
325
+ df = hbn.get_rechres_data(constituent, units='lb', freq='daily').reset_index()
326
+
290
327
 
291
328
  pero = hbn.get_multiple_timeseries(op_type,t_code,ts_name).reset_index().rename(columns={'index': 'datetime'})
292
329
  pero = pero.melt(id_vars = ['datetime'],var_name = 'operation_id', value_name = 'value')
@@ -295,15 +332,58 @@ pero['t_code'] = t_code
295
332
  pero['model_name'] = model_name
296
333
 
297
334
 
335
+ db_path = 'c:/Users/mfratki/Documents/ucis.duckdb'
298
336
  with duckdb.connect(db_path) as con:
299
337
  warehouse.insert_model_run(con, model_name, run_id)
300
338
 
339
+ db_path = 'c:/Users/mfratki/Documents/ucis.duckdb'
340
+ with duckdb.connect(db_path) as conn:
341
+ conn.execute("CREATE SCHEMA if not exists reports")
342
+ conn.execute("CREATE TABLE if not exists reports.catchment_loading AS SELECT * FROM df")
343
+ conn.close()
344
+
301
345
 
346
+ # Average annual loading by catchment
347
+ db_path = 'c:/Users/mfratki/Documents/ucis.duckdb'
348
+ with duckdb.connect(db_path) as conn:
349
+ query = f"""
350
+ SELECT
351
+ model_name,
352
+ operation AS operation_type,
353
+ opnid AS operation_id,
354
+ t_code,
355
+ ts_name AS constituent,
356
+ AVG(value) * 365.25 AS annual_loading
357
+ FROM reports.catchment_loading
358
+ WHERE t_code = 'PERLND' AND constituent IN ('Q','TP','TSS','N','OP','BOD','TKN')
359
+ GROUP BY model_name, TVOLNO, constituent
360
+ """
361
+ annual_loadings = conn.execute(query).fetchdf()
362
+ conn.close()
363
+
364
+ hbn.hbns[0].data_frames.keys()
365
+
366
+
367
+
368
+ import duckdb
369
+ import pandas as pd
302
370
 
303
371
 
372
+ # Convert to DataFrame
373
+ df = pd.DataFrame(data)
374
+ df['datetime'] = pd.to_datetime(df['datetime']) # Ensure datetime column is formatted properly
304
375
 
376
+ # Write to Parquet with DuckDB, including "t_code" as a partition
377
+ output_path = "model_outputs"
305
378
 
379
+ con = duckdb.connect(database=':memory:') # Temporary in-memory database
380
+ con.execute(f"""
381
+ COPY df
382
+ TO '{output_path}'
383
+ (FORMAT 'parquet', PARTITION_BY ('operation_type', 'operation_id', 't_code'))
384
+ """)
306
385
 
386
+ print(f"Data written to {output_path}")
307
387
 
308
388
 
309
389
 
hspf/hbn.py CHANGED
@@ -140,8 +140,15 @@ def get_simulated_flow(hbn,time_step,reach_ids,unit = None):
140
140
  flows.attrs['unit'] = unit
141
141
  return flows
142
142
 
143
- def get_simulated_temperature(hbn,units,time_step,reach_ids):
144
- raise NotImplementedError()
143
+ def get_simulated_temperature(hbn,time_step,reach_ids):
144
+ assert len(reach_ids) == 1, "Temperature can only be retreived for one reach at a time."
145
+
146
+
147
+ wt = hbn.get_multiple_timeseries('RCHRES',time_step,'TW', reach_ids)
148
+ wt = wt.sum(axis=1)
149
+ wt.attrs['unit'] = 'degf'
150
+
151
+ return wt
145
152
 
146
153
 
147
154
  def get_simulated_reach_constituent(hbn,constituent,time_step,reach_ids,unit = None):
@@ -152,11 +159,11 @@ def get_simulated_reach_constituent(hbn,constituent,time_step,reach_ids,unit = N
152
159
  if unit is None:
153
160
  unit = UNIT_DEFAULTS[constituent]
154
161
  else:
155
- assert(unit in ['mg/l','lb','cfs','degF'])
162
+ assert(unit in ['mg/l','lb'])
156
163
 
157
164
  t_cons = helpers.get_tcons(constituent,'RCHRES','lb')
158
165
 
159
- # Correct instances when a flow needs to be subtracted (rare)
166
+ # Correct instances when a reach output needs to be subtracted (rare)
160
167
  df = pd.concat([hbn.get_multiple_timeseries('RCHRES',time_step,t_con,[abs(reach_id) for reach_id in reach_ids])*sign for t_con in t_cons],axis=1).sum(axis=1)
161
168
 
162
169
  if constituent == 'TSS':
hspf/helpers.py CHANGED
@@ -48,9 +48,10 @@ def get_tcons(nutrient_name,operation,units = 'mg/l'):
48
48
  'N' :['NO3OUTTOT','NO2OUTTOT'], # N
49
49
  'OP' :['PO4OUTDIS'], # Ortho
50
50
  'TP' :['PTOTOUT'],
51
- 'BOD' :['BODOUTTOT']},
51
+ 'BOD' :['BODOUTTOT'],},
52
52
  'cfs': {'Q': ['ROVOL']},
53
- 'acrft' : {'Q': ['ROVOL']}}
53
+ 'acrft' : {'Q': ['ROVOL']},
54
+ 'degf' : {'WT': ['TW']}}
54
55
 
55
56
  t_cons = MAP[units]
56
57
  elif operation == 'PERLND':
hspf/reports.py CHANGED
@@ -125,6 +125,8 @@ def scour(hbn,uci,start_year = '1996',end_year = '2030'):
125
125
  # schematic block will have all the possible perlands while sosed only has perlands that were simulated
126
126
  # in other words information from sosed is a subset of schematic
127
127
  for tvolno in lakeflag.index: #schematic['TVOLNO'].unique():
128
+ implnd_load = 0
129
+ prlnd_load = 0
128
130
  reach_load = depscr.loc[tvolno].values[0]
129
131
  schem_sub = schematic[schematic['TVOLNO'] == tvolno]
130
132
  if len(schem_sub) == 0:
@@ -944,7 +946,8 @@ def total_phosphorous(uci,hbn,t_code,operation = 'PERLND'):
944
946
  totals = []
945
947
  for mlno in opnids['MLNO'].unique():
946
948
  total = dissolved_orthophosphate(uci,hbn,operation,mlno,t_code) + particulate_orthophosphate(uci,hbn,operation,mlno, t_code) + organic_refactory_phosphorous(uci,hbn,operation,mlno,t_code) + labile_oxygen_demand(uci,hbn,operation,mlno,t_code)*0.007326 # Conversation factor to P
947
- totals.append(total[opnids['SVOLNO'].loc[opnids['MLNO'] == mlno].to_list()])
949
+ if not isinstance(total,float): #TODO fix for when no data is present. Don't like this workaround.
950
+ totals.append(total[opnids['SVOLNO'].loc[opnids['MLNO'] == mlno].to_list()])
948
951
 
949
952
  total = pd.concat(totals,axis=1)
950
953
  total = total.T.groupby(total.columns).sum().T
@@ -974,6 +977,7 @@ MASSLINK_SCHEME = {'dissolved_orthophosphate': {'tmemn': 'NUIF1',
974
977
  'tmemsb2':''}}
975
978
 
976
979
 
980
+
977
981
  def qualprop_transform(uci,hbn,operation,mlno,tmemn,tmemsb1,tmemsb2 = '',t_code = 4):
978
982
  masslink = uci.table('MASS-LINK',f'MASS-LINK{mlno}')
979
983
  masslink = masslink.loc[(masslink['TMEMN'] == tmemn) & (masslink['TMEMSB1'] == tmemsb1) & (masslink['TMEMSB2'] == tmemsb2)]
hspf/warehouse.py CHANGED
@@ -46,41 +46,48 @@ def create_hspf_model_hierarchy_tables(con: duckdb.DuckDBPyConnection):
46
46
  -- Level 1: The overall Model (e.g., for a specific basin)
47
47
  CREATE TABLE IF NOT EXISTS hspf.models (
48
48
  model_pk BIGINT PRIMARY KEY DEFAULT nextval('hspf.model_seq'),
49
- model_name VARCHAR NOT NULL UNIQUE, -- e.g., 'Nemadji River Basin Model'
50
- description VARCHAR
51
- );
52
-
53
- -- Level 2: A specific Version of a Model
54
- CREATE TABLE IF NOT EXISTS hspf.model_versions (
55
- model_version_pk BIGINT PRIMARY KEY DEFAULT nextval('hspf.model_version_seq'),
56
- model_pk BIGINT NOT NULL REFERENCES hspf.models(model_pk),
49
+ model_name VARCHAR NOT NULL, -- e.g., 'Nemadji River Basin Model'
57
50
  version_name VARCHAR NOT NULL, -- e.g., 'v2.1', '2025_Update'
58
- release_date DATE,
59
- description VARCHAR,
60
- UNIQUE (model_pk, version_name)
61
- );
62
-
63
- -- Level 3: A Scenario within a Model Version
64
- CREATE TABLE IF NOT EXISTS hspf.scenarios (
65
- scenario_pk BIGINT PRIMARY KEY DEFAULT nextval('hspf.scenario_seq'),
66
- model_version_pk BIGINT NOT NULL REFERENCES hspf.model_versions(model_version_pk),
67
- scenario_name VARCHAR NOT NULL, -- e.g., 'Baseline_2020', 'Future_Climate_BMPs'
68
- description VARCHAR,
69
- UNIQUE (model_version_pk, scenario_name)
70
- );
71
-
72
- -- Level 4: A single execution (Run) of a Scenario
73
- CREATE TABLE IF NOT EXISTS hspf.model_runs (
74
- model_run_pk BIGINT PRIMARY KEY DEFAULT nextval('hspf.model_run_seq'),
75
- scenario_pk BIGINT NOT NULL REFERENCES hspf.scenarios(scenario_pk),
76
- run_id BIGINT,
77
- run_name VARCHAR, -- e.g., 'Run_1995-2015', 'Calibration_Run_A'
78
- start_year INTEGER,
79
- end_year INTEGER,
80
- run_timestamp TIMESTAMP DEFAULT current_timestamp,
81
- notes VARCHAR
82
- );
83
- ''')
51
+ --last_extension DATE,
52
+ --last_update DATE,
53
+ --last_calibration DATE,
54
+ description VARCHAR
55
+ -- release_date DATE,
56
+ UNIQUE (model_name, version_name)
57
+ );
58
+ ''')
59
+
60
+ # -- Level 2: A specific Version of a Model
61
+ # CREATE TABLE IF NOT EXISTS hspf.model_versions (
62
+ # model_version_pk BIGINT PRIMARY KEY DEFAULT nextval('hspf.model_version_seq'),
63
+ # model_pk BIGINT NOT NULL REFERENCES hspf.models(model_pk),
64
+ # version_name VARCHAR NOT NULL, -- e.g., 'v2.1', '2025_Update'
65
+ # release_date DATE,
66
+ # description VARCHAR,
67
+ # UNIQUE (model_pk, version_name)
68
+ # );
69
+
70
+ # -- Level 3: A Scenario within a Model Version
71
+ # CREATE TABLE IF NOT EXISTS hspf.scenarios (
72
+ # scenario_pk BIGINT PRIMARY KEY DEFAULT nextval('hspf.scenario_seq'),
73
+ # model_version_pk BIGINT NOT NULL REFERENCES hspf.model_versions(model_version_pk),
74
+ # scenario_name VARCHAR NOT NULL, -- e.g., 'Baseline_2020', 'Future_Climate_BMPs'
75
+ # description VARCHAR,
76
+ # UNIQUE (model_version_pk, scenario_name)
77
+ # );
78
+
79
+ # -- Level 4: A single execution (Run) of a Scenario
80
+ # CREATE TABLE IF NOT EXISTS hspf.model_runs (
81
+ # model_run_pk BIGINT PRIMARY KEY DEFAULT nextval('hspf.model_run_seq'),
82
+ # scenario_pk BIGINT NOT NULL REFERENCES hspf.scenarios(scenario_pk),
83
+ # run_id BIGINT,
84
+ # run_name VARCHAR, -- e.g., 'Run_1995-2015', 'Calibration_Run_A'
85
+ # start_year INTEGER,
86
+ # end_year INTEGER,
87
+ # run_timestamp TIMESTAMP DEFAULT current_timestamp,
88
+ # notes VARCHAR
89
+ # );
90
+ # ''')
84
91
 
85
92
  def create_model_run_table(con: duckdb.DuckDBPyConnection):
86
93
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: hspf
3
- Version: 2.1.1
3
+ Version: 2.1.2
4
4
  Summary: Python package for downloading and running HSPF models
5
5
  Project-URL: Homepage, https://github.com/mfratkin1/pyHSPF
6
6
  Author-email: Mulu Fratkin <michael.fratkin@state.mn.us>
@@ -1,20 +1,30 @@
1
1
  hspf/Masslink_Timeseries.csv,sha256=TOV6PpR0SBI0FaAU1T-qyD2DyGsBFjUWZenvWXiS3wA,4985
2
2
  hspf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- hspf/build_warehouse.py,sha256=J3fgycY9xkZdY3C9u0wDaEX1n6satr1t0mQmfg4Fh6E,20205
4
- hspf/hbn.py,sha256=xUvovcAXXLLLE_ID9kejjiyaAqnh-vwmDLdPLjsGQ8A,19401
3
+ hspf/build_warehouse.py,sha256=y91vNmdt918H5_jx9nxMUVmRG9ORdChYWEqQbbi3YKs,22861
4
+ hspf/hbn.py,sha256=g09tEQE196JA_f-4txISr3TA7aq98bv2SxEwNw0gkDA,19606
5
5
  hspf/hbn2.py,sha256=OmuTVDxd0Boyd3GvBgzEfqvP7CTeYIJYPC7EXPgYu30,13190
6
6
  hspf/hbn_cy.c,sha256=ZIJwWxyGx8fE5nM1HBd8-zNotmStGZscqXijl3KSRdI,593464
7
7
  hspf/hbn_cy.html,sha256=o8wMdvEE547DRXcUHFPgAkkyJ665b6rloGL-qKClaTo,137735
8
8
  hspf/hbn_cy.pyx,sha256=T-itpkvHlxHzQHKtJBS-M8_ToLBa1U_ajpV53hh_oI8,4323
9
- hspf/helpers.py,sha256=cd8J3XfCFmpER475Mk5aFL29612rgop75GRGxlofXQo,3242
9
+ hspf/helpers.py,sha256=Jv5lSUKVqML8iJOFfBPcA3bgHn9MbFZC4OO-9YHHP_w,3277
10
10
  hspf/hspfModel.py,sha256=K_xF7HtuMpDMod56Z3IXDCeGsnUi8KGhly_9tm-mxoY,9070
11
- hspf/reports.py,sha256=bU9rU9qaffXosxyA3H5OWi4SqQVPiQh6709tTCMYeeU,46286
11
+ hspf/reports.py,sha256=UzEDkgrmUNncPHFzOkbYoGFUFXVlFuEu_BE0Nh_qhSw,46449
12
12
  hspf/uci.py,sha256=rsi_KJqdfBFp0rlKCHyhmQGdB_rgNE8k6abTjH26UqE,33982
13
13
  hspf/validations.py,sha256=BcNT0h5QDZW9lHpXk8KuHQvycl8a_4jQ91srwWFodRo,6666
14
- hspf/warehouse.py,sha256=1zm1uu_QvevIuAMNPOkzspnFhNpLQrvkq3x3HXSypGg,11898
14
+ hspf/warehouse.py,sha256=12Scro1a_hK5IvE9FENAMQv9jKWgtMYtAftJZ4p3nTQ,12239
15
15
  hspf/wdm.py,sha256=q0hNqsMNrTkxHtKEX0q0wWlIZabXv6UX2HjNCF9WEW4,12734
16
16
  hspf/wdmReader.py,sha256=-akKWB9SpUzXvXoQMeHLZNi_u584KaeEOyHB-YolTWM,22848
17
+ hspf/bin/WinHSPFLt/ATCoRend.dbf,sha256=3bHSDSpL__z1syz6nXlhPrPLnmcsaKDSG1xaiESHwpQ,17743
18
+ hspf/bin/WinHSPFLt/ATCoUnits.mdb,sha256=vJMovhTfyY_2hzzeqLdh-UjId1GV_pVPfesp29J2N18,114688
19
+ hspf/bin/WinHSPFLt/ERROR.FIL,sha256=XKxOp21R0aCEd-ofy3UEQ91XH0V8cxgiURp4Z7J8E6M,1523002
20
+ hspf/bin/WinHSPFLt/LF90.EER,sha256=VpQ92SHdRvIm-OxGMKawKn56ij3-6owFXsYHoZN9mVM,41040
21
+ hspf/bin/WinHSPFLt/LF90WIOD.DLL,sha256=loIi_u234J-fbXq_s_ajMEjalyX3cdnFFtsH6Hm48C4,90112
22
+ hspf/bin/WinHSPFLt/MapWinUtility.dll,sha256=oEcJfvQS7zIGHg5HcRHFv105pySy_V1g2LHwC3zy29o,83456
23
+ hspf/bin/WinHSPFLt/StatusMonitor.exe,sha256=euLR4tKiMYoc7by6Rmem4CAk41Jjecsy4sgUOrRZHFA,39424
17
24
  hspf/bin/WinHSPFLt/WinHspfLt.exe,sha256=Afs_nJ62r1VnTL2P4XfiRJ1sH2If5DeGTbcCzoqlanE,74752
25
+ hspf/bin/WinHSPFLt/hass_ent.dll,sha256=jvm_xLGSci4Sm1VSIVy8dS2-wgNn1dqkcth3vkm9IG0,4904448
26
+ hspf/bin/WinHSPFLt/hspfmsg.wdm,sha256=cfUDyfYKX7Lsrlnu9kDylQNhD1X5lFWVqmLyXMLD5m0,1392640
27
+ hspf/bin/WinHSPFLt/hspfmsg.wdu,sha256=Zl04zGkVN_1ifjsHmDyHFYUC3q6bojWm-vzXoXgJOv0,63
18
28
  hspf/data/HSPFParameterRanges.csv,sha256=PKz1DRIgpsgTEDrVaSHB9SAGMa5yUBRpyZDc9-CKJJo,28357
19
29
  hspf/data/LandUseNames_Mappings.csv,sha256=Bb2toZn6FkPfZ6_8SnzIQvXJ03ycwCuc8uyv4cUowNY,75899
20
30
  hspf/data/ParseTable.csv,sha256=ExqUaZg_uUPF5XHGLJEk5_jadnDenKjbwqC4d-iNX_M,193609
@@ -40,6 +50,6 @@ hspf/data/Timeseries Catalog/RCHRES/SEDTRN.txt,sha256=SiTgD4_YWctTgEfhoMymZfv8ay
40
50
  hspf/parser/__init__.py,sha256=2HvprGVCaJ9L-egvTj1MI-bekq5CNjtSBZfrCtQi3fs,92
41
51
  hspf/parser/graph.py,sha256=jvkjz9eNtBFEmxUeQosuQE7XgsIRlrNH-rSny5KBDoE,33046
42
52
  hspf/parser/parsers.py,sha256=x3othxQogUmGNe_ctCU20atDrRM_B4lEbVJb3EMbwto,20850
43
- hspf-2.1.1.dist-info/METADATA,sha256=KtAPnc8v-bT8ow30iHdN1lCm2asH22rvOyKgDGS_kL0,605
44
- hspf-2.1.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
45
- hspf-2.1.1.dist-info/RECORD,,
53
+ hspf-2.1.2.dist-info/METADATA,sha256=vjAn3jjlUlIIHBwJ43d-ut130cS-tR4QmmUwaAfFIa8,605
54
+ hspf-2.1.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
55
+ hspf-2.1.2.dist-info/RECORD,,
File without changes