ecopipeline 0.11.1__py3-none-any.whl → 0.11.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -295,6 +295,9 @@ def csv_to_df(csv_filenames: List[str], mb_prefix : bool = False, round_time_ind
295
295
  continue
296
296
 
297
297
  temp_dfs.append(data)
298
+ if len(temp_dfs) <= 0:
299
+ print("no data for timefarme.")
300
+ return pd.DataFrame()
298
301
  df = pd.concat(temp_dfs, ignore_index=False)
299
302
 
300
303
  if create_time_pt_idx:
@@ -1,7 +1,8 @@
1
1
  from .transform import rename_sensors, avg_duplicate_times, remove_outliers, ffill_missing, nullify_erroneous, sensor_adjustment, round_time, \
2
2
  aggregate_df, join_to_hourly, concat_last_row, join_to_daily, cop_method_1, cop_method_2, create_summary_tables, remove_partial_days, \
3
3
  convert_c_to_f,convert_l_to_g, convert_on_off_col_to_bool, flag_dhw_outage,generate_event_log_df,convert_time_zone, shift_accumulative_columns, \
4
- heat_output_calc, add_relative_humidity, apply_equipment_cop_derate, create_data_statistics_df, delete_erroneous_from_time_pt,column_name_change
4
+ heat_output_calc, add_relative_humidity, apply_equipment_cop_derate, create_data_statistics_df, delete_erroneous_from_time_pt,column_name_change, \
5
+ process_ls_signal
5
6
  from .lbnl import nclarity_filter_new, site_specific, condensate_calculations, gas_valve_diff, gather_outdoor_conditions, aqsuite_prep_time, \
6
7
  nclarity_csv_to_df, _add_date, add_local_time, aqsuite_filter_new, get_refrig_charge, elev_correction, change_ID_to_HVAC, get_hvac_state, \
7
8
  get_cop_values, get_cfm_values, replace_humidity, create_fan_curves, lbnl_temperature_conversions, lbnl_pressure_conversions, \
@@ -13,4 +14,4 @@ __all__ = ["rename_sensors", "avg_duplicate_times", "remove_outliers", "ffill_mi
13
14
  "create_fan_curves", "lbnl_temperature_conversions", "lbnl_pressure_conversions", "lbnl_sat_calculations", "get_site_cfm_info", "get_site_info", "merge_indexlike_rows", "calculate_cop_values", "aggregate_values",
14
15
  "get_energy_by_min", "verify_power_energy", "get_temp_zones120", "get_storage_gals120","convert_c_to_f","convert_l_to_g", "convert_on_off_col_to_bool", "flag_dhw_outage","generate_event_log_df","convert_time_zone",
15
16
  "shift_accumulative_columns","heat_output_calc", "add_relative_humidity","apply_equipment_cop_derate","create_data_statistics_df",
16
- "delete_erroneous_from_time_pt","column_name_change"]
17
+ "delete_erroneous_from_time_pt","column_name_change","process_ls_signal"]
@@ -245,7 +245,6 @@ def _ffill(col, ffill_df, previous_fill: pd.DataFrame = None): # Helper functio
245
245
  elif (cp == 0): # ffill only up to length
246
246
  col.fillna(method='ffill', inplace=True, limit=length)
247
247
 
248
-
249
248
  def ffill_missing(original_df: pd.DataFrame, config : ConfigManager, previous_fill: pd.DataFrame = None) -> pd.DataFrame:
250
249
  """
251
250
  Function will take a pandas dataframe and forward fill select variables with no entry.
@@ -306,6 +305,86 @@ def ffill_missing(original_df: pd.DataFrame, config : ConfigManager, previous_fi
306
305
  df.apply(_ffill, args=(ffill_df,previous_fill))
307
306
  return df
308
307
 
308
+ def process_ls_signal(df: pd.DataFrame, hourly_df: pd.DataFrame, daily_df: pd.DataFrame, load_dict: dict = {1: "normal", 2: "loadUp", 3 : "shed"}, ls_column: str = 'ls',
309
+ drop_ls_from_df : bool = False):
310
+ """
311
+ Function takes aggregated dfs and adds loadshift signals to hourly df and loadshift days to daily_df
312
+
313
+ Parameters
314
+ ----------
315
+ df: pd.DataFrame
316
+ Timestamp indexed Pandas dataframe of minute by minute values
317
+ hourly_df: pd.DataFrame
318
+ Timestamp indexed Pandas dataframe of hourly average values
319
+ daily_df: pd.DataFrame
320
+ Timestamp indexed Pandas dataframe of daily average values
321
+ load_dict: dict
322
+ dictionary of what loadshift signal is indicated by a value of the ls_column column in df
323
+ ls_column: str
324
+ the name of the loadshift column in df
325
+ drop_ls_from_df: bool
326
+ Set to true to drop ls_column from df after processing
327
+
328
+ Returns
329
+ -------
330
+ df: pd.DataFrame
331
+ Timestamp indexed Pandas dataframe of minute by minute values with ls_column removed if drop_ls_from_df = True
332
+ hourly_df: pd.DataFrame
333
+ Timestamp indexed Pandas dataframe of hourly average values with added column 'system_state' which contains the
334
+ loadshift command value from load_dict from the average (rounded to the nearest integer) key for all indexes in
335
+ df within that load_dict key. If the integer is not a key in load_dict, the loadshift command value will be null
336
+ daily_df: pd.DataFrame
337
+ Timestamp indexed Pandas dataframe of daily average values with added boolean column 'load_shift_day' which holds
338
+ the value True on days which contains hours in hourly_df in which there are loadshift commands other than normal
339
+ and Fals on days where the only command in normal unknown
340
+ """
341
+ # Make copies to avoid modifying original dataframes
342
+ df_copy = df.copy()
343
+
344
+ if ls_column in df_copy.columns:
345
+ # print("1",df_copy[np.isfinite(df_copy[ls_column])])
346
+ df_copy = df_copy[df_copy[ls_column].notna() & np.isfinite(df_copy[ls_column])]
347
+ # print("2",df_copy[np.isfinite(df_copy[ls_column])])
348
+
349
+ # Process hourly data - aggregate ls_column values by hour and map to system_state
350
+ if ls_column in df_copy.columns:
351
+ # Group by hour and calculate mean of ls_column, then round to nearest integer
352
+ hourly_ls = df_copy[ls_column].resample('H').mean().round()
353
+
354
+ # Convert to int only for non-NaN values
355
+ hourly_ls = hourly_ls.apply(lambda x: int(x) if pd.notna(x) else x)
356
+
357
+ # Map the rounded integer values to load_dict, using None for unmapped values
358
+ hourly_df['system_state'] = hourly_ls.map(load_dict)
359
+
360
+ # For hours not present in the minute data, system_state will be NaN
361
+ hourly_df['system_state'] = hourly_df['system_state'].where(
362
+ hourly_df.index.isin(hourly_ls.index)
363
+ )
364
+ else:
365
+ # If ls_column doesn't exist, set all system_state to None
366
+ hourly_df['system_state'] = None
367
+
368
+ # Process daily data - determine if any non-normal loadshift commands occurred
369
+ if 'system_state' in hourly_df.columns:
370
+ # Group by date and check if any non-"normal" and non-null system_state exists
371
+ daily_ls = hourly_df.groupby(hourly_df.index.date)['system_state'].apply(
372
+ lambda x: any((state != "normal") and (state is not None) for state in x.dropna())
373
+ )
374
+
375
+ # Map the daily boolean results to the daily_df index
376
+ daily_df['load_shift_day'] = daily_df.index.date
377
+ daily_df['load_shift_day'] = daily_df['load_shift_day'].map(daily_ls).fillna(False)
378
+ else:
379
+ # If no system_state column, set all days to False
380
+ daily_df['load_shift_day'] = False
381
+
382
+ # Drop ls_column from df if requested
383
+ if drop_ls_from_df and ls_column in df.columns:
384
+ df = df.drop(columns=[ls_column])
385
+
386
+ return df, hourly_df, daily_df
387
+
309
388
  def delete_erroneous_from_time_pt(df: pd.DataFrame, time_point : pd.Timestamp, column_names : list, new_value = None) -> pd.DataFrame:
310
389
  """
311
390
  Function will take a pandas dataframe and delete specified erroneous values at a specified time point.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ecopipeline
3
- Version: 0.11.1
3
+ Version: 0.11.4
4
4
  Summary: Contains functions for use in Ecotope Datapipelines
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: License :: OSI Approved :: GNU General Public License (GPL)
@@ -2,19 +2,19 @@ ecopipeline/__init__.py,sha256=pjC00JWsjVAhS0jUKHD-wyi4UIpTsWbIg9JaxLS1mlc,275
2
2
  ecopipeline/event_tracking/__init__.py,sha256=SV2kkvJgptjeyLQlqHWcDRpQO6-JC433_dRZ3H9-ZNU,131
3
3
  ecopipeline/event_tracking/event_tracking.py,sha256=HffWAIAkNJ8INdG3_86RnDgw2bpHwv9hhkZ5oiiugZY,29653
4
4
  ecopipeline/extract/__init__.py,sha256=gQ3sak6NJ63Gpo-hZXrtZfeKOTHLRyAVXfTgxxRpqPo,675
5
- ecopipeline/extract/extract.py,sha256=y32feIIzgABwrwfduNQM1hICmkVOU4PYu6-M07zCLpU,51422
5
+ ecopipeline/extract/extract.py,sha256=5C6KrfMAGQhTxHaWc1Lgm8yV5g994Fiinwk-IEVSHbM,51519
6
6
  ecopipeline/load/__init__.py,sha256=NLa_efQJZ8aP-J0Y5xx9DP7mtfRH9jY6Jz1ZMZN_BAA,292
7
7
  ecopipeline/load/load.py,sha256=PaSGWOZI0Xg44_SWN7htn2DPIAU_s8mOtCGibXq25tM,24614
8
- ecopipeline/transform/__init__.py,sha256=YveBLBsNhfI4qZP04doa0NrTbEKvjDAUDEKtEPdFPfU,2545
8
+ ecopipeline/transform/__init__.py,sha256=9au1Rjw7SMtbIxpoq_5XWi6VWTxMU2CBjPksSh4LM1o,2590
9
9
  ecopipeline/transform/bayview.py,sha256=TP24dnTsUD95X-f6732egPZKjepFLJgDm9ImGr-fppY,17899
10
10
  ecopipeline/transform/lbnl.py,sha256=EQ54G4rJXaZ7pwVusKcdK2KBehSdCsNo2ybphtMGs7o,33400
11
- ecopipeline/transform/transform.py,sha256=wL4B00XBwLWVlf7goOLSHKgLFmIsXprQNepGLLO_wTk,50028
11
+ ecopipeline/transform/transform.py,sha256=UF-sNw4zoxyXv0zsJZk6AqfQxXmAVQ_fsVZtjKiu1sk,54012
12
12
  ecopipeline/utils/ConfigManager.py,sha256=-g1wtExdvhYO5Y6Q3cRbywa__DxRMFruLrB4YanwaPY,12168
13
13
  ecopipeline/utils/NOAADataDownloader.py,sha256=iC2nl_O4PS1KFrchcPXRZxshwZwUMSqXy6BQBUwnOUU,20927
14
14
  ecopipeline/utils/__init__.py,sha256=7dT3tP6SMK4uBW6NBmQ8i6LaNTTuV6fpAZToBBlJ904,62
15
15
  ecopipeline/utils/unit_convert.py,sha256=VFh1we2Y8KV3u21BeWb-U3TlZJXo83q5vdxxkpgcuME,3064
16
- ecopipeline-0.11.1.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
- ecopipeline-0.11.1.dist-info/METADATA,sha256=_-HP7vfIrz6JBltdDkX4obF-AUJGrbxZfnFtrUBQ49k,2330
18
- ecopipeline-0.11.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
19
- ecopipeline-0.11.1.dist-info/top_level.txt,sha256=WOPFJH2LIgKqm4lk2OnFF5cgVkYibkaBxIxgvLgO7y0,12
20
- ecopipeline-0.11.1.dist-info/RECORD,,
16
+ ecopipeline-0.11.4.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
17
+ ecopipeline-0.11.4.dist-info/METADATA,sha256=hpyr7M9aTCQo1MjbxuOeYwX9eiHnyR8XFVQnY5yWjzI,2330
18
+ ecopipeline-0.11.4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
19
+ ecopipeline-0.11.4.dist-info/top_level.txt,sha256=WOPFJH2LIgKqm4lk2OnFF5cgVkYibkaBxIxgvLgO7y0,12
20
+ ecopipeline-0.11.4.dist-info/RECORD,,