ecopipeline 0.8.11__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +1,2 @@
1
1
  from .event_tracking import *
2
- __all__ = ['flag_boundary_alarms']
2
+ __all__ = ['flag_boundary_alarms','power_ratio_alarm']
@@ -77,6 +77,10 @@ def flag_boundary_alarms(df: pd.DataFrame, config : ConfigManager, default_fault
77
77
  print(f"Could not process alarm for {bound_var}. Fault time must be greater than or equal to 1 minute.")
78
78
  _check_and_add_alarm(df, lower_mask, alarms, day, bounds["fault_time"], bound_var, bounds['pretty_name'], 'Lower')
79
79
  _check_and_add_alarm(df, upper_mask, alarms, day, bounds["fault_time"], bound_var, bounds['pretty_name'], 'Upper')
80
+
81
+ return _convert_silent_alarm_dict_to_df(alarms)
82
+
83
+ def _convert_silent_alarm_dict_to_df(alarm_dict : dict) -> pd.DataFrame:
80
84
  events = {
81
85
  'start_time_pt' : [],
82
86
  'end_time_pt' : [],
@@ -84,7 +88,7 @@ def flag_boundary_alarms(df: pd.DataFrame, config : ConfigManager, default_fault
84
88
  'event_detail' : [],
85
89
  'variable_name' : []
86
90
  }
87
- for key, value_list in alarms.items():
91
+ for key, value_list in alarm_dict.items():
88
92
  for value in value_list:
89
93
  events['start_time_pt'].append(key)
90
94
  events['end_time_pt'].append(key)
@@ -124,6 +128,68 @@ def _check_and_add_alarm(df : pd.DataFrame, mask : pd.Series, alarms_dict, day,
124
128
  else:
125
129
  alarms_dict[day] = [[var_name, alarm_string]]
126
130
 
131
+ def power_ratio_alarm(daily_df: pd.DataFrame, config : ConfigManager, system: str = "") -> pd.DataFrame:
132
+ daily_df_copy = daily_df.copy()
133
+ variable_names_path = config.get_var_names_path()
134
+ try:
135
+ ratios_df = pd.read_csv(variable_names_path)
136
+ except FileNotFoundError:
137
+ print("File Not Found: ", variable_names_path)
138
+ return pd.DataFrame()
139
+ if (system != ""):
140
+ if not 'system' in ratios_df.columns:
141
+ raise Exception("system parameter is non null, however, system is not present in Variable_Names.csv")
142
+ ratios_df = ratios_df.loc[ratios_df['system'] == system]
143
+ required_columns = ["variable_name", "alarm_codes"]
144
+ for required_column in required_columns:
145
+ if not required_column in ratios_df.columns:
146
+ raise Exception(f"{required_column} is not present in Variable_Names.csv")
147
+ if not 'pretty_name' in ratios_df.columns:
148
+ ratios_df['pretty_name'] = ratios_df['variable_name']
149
+ ratios_df = ratios_df.loc[:, ["variable_name", "alarm_codes", "pretty_name"]]
150
+ ratios_df = ratios_df[ratios_df['alarm_codes'].str.contains('PR', na=False)]
151
+ ratios_df.dropna(axis=0, thresh=2, inplace=True)
152
+ ratios_df.set_index(['variable_name'], inplace=True)
153
+
154
+ ratio_dict = {}
155
+ for ratios_var, ratios in ratios_df.iterrows():
156
+ if not ratios_var in daily_df_copy.columns:
157
+ daily_df_copy[ratios_var] = 0
158
+ alarm_codes = str(ratios['alarm_codes']).split(";")
159
+ for alarm_code in alarm_codes:
160
+ if alarm_code[:2] == "PR":
161
+ split_out_alarm = alarm_code.split(":")
162
+ low_high = split_out_alarm[1].split("-")
163
+ pr_id = split_out_alarm[0].split("_")[1]
164
+ if len(low_high) != 2:
165
+ raise Exception(f"Error processing alarm code {alarm_code}")
166
+ if pr_id in ratio_dict:
167
+ ratio_dict[pr_id][0].append(ratios_var)
168
+ ratio_dict[pr_id][1].append(float(low_high[0]))
169
+ ratio_dict[pr_id][2].append(float(low_high[1]))
170
+ ratio_dict[pr_id][3].append(ratios['pretty_name'])
171
+ else:
172
+ ratio_dict[pr_id] = [[ratios_var],[float(low_high[0])],[float(low_high[1])],[ratios['pretty_name']]]
173
+
174
+ alarms = {}
175
+ for key, value_list in ratio_dict.items():
176
+ daily_df_copy[key] = daily_df_copy[value_list[0]].sum(axis=1)
177
+ for i in range(len(value_list[0])):
178
+ column_name = value_list[0][i]
179
+ daily_df_copy[f'{column_name}_{key}'] = (daily_df_copy[column_name]/daily_df_copy[key]) * 100
180
+ _check_and_add_ratio_alarm(daily_df_copy, key, column_name, value_list[3][i], alarms, value_list[2][i], value_list[1][i])
181
+ return _convert_silent_alarm_dict_to_df(alarms)
182
+
183
+ def _check_and_add_ratio_alarm(daily_df: pd.DataFrame, alarm_key : str, column_name : str, pretty_name : str, alarms_dict : dict, high_bound : float, low_bound : float):
184
+ alarm_daily_df = daily_df.loc[(daily_df[f"{column_name}_{alarm_key}"] < low_bound) | (daily_df[f"{column_name}_{alarm_key}"] > high_bound)]
185
+ if not alarm_daily_df.empty:
186
+ for day, values in alarm_daily_df.iterrows():
187
+ alarm_str = f"Power ratio alarm: {pretty_name} accounted for {round(values[f'{column_name}_{alarm_key}'], 2)}% of {alarm_key} energy use."
188
+ if day in alarms_dict:
189
+ alarms_dict[day].append([column_name, alarm_str])
190
+ else:
191
+ alarms_dict[day] = [[column_name, alarm_str]]
192
+
127
193
  # def flag_dhw_outage(df: pd.DataFrame, daily_df : pd.DataFrame, dhw_outlet_column : str, supply_temp : int = 110, consecutive_minutes : int = 15) -> pd.DataFrame:
128
194
  # """
129
195
  # Parameters
@@ -767,7 +767,8 @@ def pull_egauge_data(config: ConfigManager, eGauge_ids: list, eGauge_usr : str,
767
767
 
768
768
  os.chdir(original_directory)
769
769
 
770
- def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: datetime = None, create_csv : bool = True, query_hours : int = 1):
770
+ def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: datetime = None, create_csv : bool = True, query_hours : float = 1,
771
+ sensor_keys : list = [], seperate_keys : bool = False):
771
772
  """
772
773
  Function connects to the things board manager api to pull data and returns a dataframe.
773
774
 
@@ -785,7 +786,7 @@ def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: dat
785
786
  is local time from the data's index.
786
787
  create_csv : bool
787
788
  create csv files as you process such that API need not be relied upon for reprocessing
788
- query_hours : int
789
+ query_hours : float
789
790
  number of hours to query at a time from ThingsBoard API
790
791
 
791
792
  Returns
@@ -794,80 +795,102 @@ def tb_api_to_df(config: ConfigManager, startTime: datetime = None, endTime: dat
794
795
  Pandas Dataframe containing data from the API pull with column headers the same as the variable names in the data from the pull.
795
796
  Will return with index in UTC so needs to be converted after to appropriate timezone
796
797
  """
797
- if endTime is None:
798
- endTime = datetime.now()
799
- if startTime is None:
800
- # 28 hours to ensure encapsulation of last day
801
- startTime = endTime - timedelta(hours=28)
802
-
803
- if endTime - timedelta(hours=query_hours) > startTime:
804
- time_diff = endTime - startTime
805
- midpointTime = startTime + time_diff / 2
806
- # recursively construct the df
807
- df_1 = tb_api_to_df(config, startTime, midpointTime, create_csv=False, query_hours=query_hours)
808
- df_2 = tb_api_to_df(config, midpointTime, endTime, create_csv=False, query_hours=query_hours)
809
- df = pd.concat([df_1, df_2])
810
- df = df.sort_index()
811
- df = df.groupby(df.index).mean()
812
- if create_csv:
813
- filename = f"{startTime.strftime('%Y%m%d%H%M%S')}.csv"
814
- original_directory = os.getcwd()
815
- os.chdir(config.data_directory)
816
- df.to_csv(filename, index_label='time_pt')
817
- os.chdir(original_directory)
818
-
819
- return df
820
- url = f'https://thingsboard.cloud/api/plugins/telemetry/DEVICE/{config.api_device_id}/values/timeseries'
821
- token = config.get_thingsboard_token()
822
- keys = _get_tb_keys(config, token)
823
- if len(keys) <= 0:
824
- raise Exception(f"No sensors available at ThingsBoard site with id {config.api_device_id}")
825
- key_string = ','.join(keys)
826
- params = {
827
- 'keys': key_string,
828
- 'startTs': f'{int(startTime.timestamp())*1000}',
829
- 'endTs': f'{int(endTime.timestamp())*1000}',
830
- 'orderBy': 'ASC',
831
- 'useStrictDataTypes': 'false'
832
- }
833
-
834
- # Headers
835
- headers = {
836
- 'accept': 'application/json',
837
- 'X-Authorization': f'Bearer {token}'
838
- }
839
-
840
- try:
841
- response = requests.get(url, headers=headers, params=params)
842
- if response.status_code == 200:
843
- response_json = response.json()
844
- data = {}
845
- for key, records in response_json.items():
846
- try:
847
- series = pd.Series(
848
- data={record['ts']: float(record['value']) for record in records}
849
- )
850
- data[key] = series
851
- except:
852
- print_statement = f"Could not convert {key} values to floats."
853
- # print(print_statement)
854
- df = pd.DataFrame(data)
855
- df.index = pd.to_datetime(df.index, unit='ms')
798
+ df = pd.DataFrame()
799
+ if len(sensor_keys) <= 0:
800
+ token = config.get_thingsboard_token()
801
+ key_list = _get_tb_keys(config, token)
802
+ if len(key_list) <= 0:
803
+ raise Exception(f"No sensors available at ThingsBoard site with id {config.api_device_id}")
804
+ return tb_api_to_df(config, startTime, endTime, create_csv, query_hours, key_list, seperate_keys)
805
+ if seperate_keys:
806
+ df_list = []
807
+ for sensor_key in sensor_keys:
808
+ df_list.append(tb_api_to_df(config, startTime, endTime, False, query_hours, [sensor_key], False))
809
+ df = pd.concat(df_list)
810
+ else:
811
+ # not seperate_keys:
812
+ if endTime is None:
813
+ endTime = datetime.now()
814
+ if startTime is None:
815
+ # 28 hours to ensure encapsulation of last day
816
+ startTime = endTime - timedelta(hours=28)
817
+
818
+ if endTime - timedelta(hours=query_hours) > startTime:
819
+ time_diff = endTime - startTime
820
+ midpointTime = startTime + time_diff / 2
821
+ df_1 = tb_api_to_df(config, startTime, midpointTime, query_hours=query_hours, sensor_keys=sensor_keys, create_csv=False)#True if startTime >= datetime(2025,7,13,9) and startTime <= datetime(2025,7,13,10) else csv_pass_down)
822
+ df_2 = tb_api_to_df(config, midpointTime, endTime, query_hours=query_hours, sensor_keys=sensor_keys,create_csv=False)#True if endTime >= datetime(2025,7,13,9) and endTime <= datetime(2025,7,13,10) else csv_pass_down)
823
+ df = pd.concat([df_1, df_2])
856
824
  df = df.sort_index()
857
- # save to file
858
- if create_csv:
859
- filename = f"{startTime.strftime('%Y%m%d%H%M%S')}.csv"
860
- original_directory = os.getcwd()
861
- os.chdir(config.data_directory)
862
- df.to_csv(filename, index_label='time_pt')
863
- os.chdir(original_directory)
864
- return df
865
- print(f"Failed to make GET request. Status code: {response.status_code} {response.json()}")
866
- return pd.DataFrame()
867
- except Exception as e:
868
- traceback.print_exc()
869
- print(f"An error occurred: {e}")
870
- return pd.DataFrame()
825
+ df = df.groupby(df.index).mean()
826
+ else:
827
+ url = f'https://thingsboard.cloud/api/plugins/telemetry/DEVICE/{config.api_device_id}/values/timeseries'
828
+ token = config.get_thingsboard_token()
829
+ key_string = ','.join(sensor_keys)
830
+ params = {
831
+ 'keys': key_string,
832
+ 'startTs': f'{int(startTime.timestamp())*1000}',
833
+ 'endTs': f'{int(endTime.timestamp())*1000}',
834
+ 'orderBy': 'ASC',
835
+ 'useStrictDataTypes': 'false',
836
+ 'interval' : '0',
837
+ 'agg' : 'NONE'
838
+ }
839
+
840
+ # Headers
841
+ headers = {
842
+ 'accept': 'application/json',
843
+ 'X-Authorization': f'Bearer {token}'
844
+ }
845
+
846
+ try:
847
+ response = requests.get(url, headers=headers, params=params)
848
+ if response.status_code == 200:
849
+ response_json = response.json()
850
+ # if create_csv:
851
+ # json_filename = f"{startTime.strftime('%Y%m%d%H%M%S')}.json"
852
+ # print(f"filename: {json_filename}, url: {url}, params: {params}")
853
+ # original_directory = os.getcwd()
854
+ # os.chdir(config.data_directory)
855
+ # with open(json_filename, 'w') as f:
856
+ # json.dump(response_json, f, indent=4) # indent=4 makes it human-readable
857
+ # os.chdir(original_directory)
858
+
859
+ data = {}
860
+ for key, records in response_json.items():
861
+ try:
862
+ series = pd.Series(
863
+ data={record['ts']: _get_float_value(record['value']) for record in records}
864
+ )
865
+ data[key] = series
866
+ except:
867
+ print_statement = f"Could not convert {key} values to floats."
868
+ print(print_statement)
869
+ df = pd.DataFrame(data)
870
+ df.index = pd.to_datetime(df.index, unit='ms')
871
+ df = df.sort_index()
872
+ else:
873
+ print(f"Failed to make GET request. Status code: {response.status_code} {response.json()}")
874
+ df = pd.DataFrame()
875
+ except Exception as e:
876
+ traceback.print_exc()
877
+ print(f"An error occurred: {e}")
878
+ df = pd.DataFrame()
879
+ # save to file
880
+ if create_csv:
881
+ filename = f"{startTime.strftime('%Y%m%d%H%M%S')}.csv"
882
+ original_directory = os.getcwd()
883
+ os.chdir(config.data_directory)
884
+ df.to_csv(filename, index_label='time_pt')
885
+ os.chdir(original_directory)
886
+ return df
887
+
888
+ def _get_float_value(value):
889
+ try:
890
+ ret_val = float(value)
891
+ return ret_val
892
+ except (ValueError, TypeError):
893
+ return None
871
894
 
872
895
  def _get_tb_keys(config: ConfigManager, token : str) -> List[str]:
873
896
  url = f'https://thingsboard.cloud/api/plugins/telemetry/DEVICE/{config.api_device_id}/keys/timeseries'
@@ -1,7 +1,7 @@
1
1
  from .transform import rename_sensors, avg_duplicate_times, remove_outliers, ffill_missing, nullify_erroneous, sensor_adjustment, round_time, \
2
2
  aggregate_df, join_to_hourly, concat_last_row, join_to_daily, cop_method_1, cop_method_2, create_summary_tables, remove_partial_days, \
3
3
  convert_c_to_f,convert_l_to_g, convert_on_off_col_to_bool, flag_dhw_outage,generate_event_log_df,convert_time_zone, shift_accumulative_columns, \
4
- heat_output_calc, add_relative_humidity, apply_equipment_cop_derate, create_data_statistics_df, delete_erroneous_from_time_pt
4
+ heat_output_calc, add_relative_humidity, apply_equipment_cop_derate, create_data_statistics_df, delete_erroneous_from_time_pt,column_name_change
5
5
  from .lbnl import nclarity_filter_new, site_specific, condensate_calculations, gas_valve_diff, gather_outdoor_conditions, aqsuite_prep_time, \
6
6
  nclarity_csv_to_df, _add_date, add_local_time, aqsuite_filter_new, get_refrig_charge, elev_correction, change_ID_to_HVAC, get_hvac_state, \
7
7
  get_cop_values, get_cfm_values, replace_humidity, create_fan_curves, lbnl_temperature_conversions, lbnl_pressure_conversions, \
@@ -13,4 +13,4 @@ __all__ = ["rename_sensors", "avg_duplicate_times", "remove_outliers", "ffill_mi
13
13
  "create_fan_curves", "lbnl_temperature_conversions", "lbnl_pressure_conversions", "lbnl_sat_calculations", "get_site_cfm_info", "get_site_info", "merge_indexlike_rows", "calculate_cop_values", "aggregate_values",
14
14
  "get_energy_by_min", "verify_power_energy", "get_temp_zones120", "get_storage_gals120","convert_c_to_f","convert_l_to_g", "convert_on_off_col_to_bool", "flag_dhw_outage","generate_event_log_df","convert_time_zone",
15
15
  "shift_accumulative_columns","heat_output_calc", "add_relative_humidity","apply_equipment_cop_derate","create_data_statistics_df",
16
- "delete_erroneous_from_time_pt"]
16
+ "delete_erroneous_from_time_pt","column_name_change"]
@@ -378,6 +378,31 @@ def nullify_erroneous(original_df: pd.DataFrame, config : ConfigManager) -> pd.D
378
378
 
379
379
  return df
380
380
 
381
+ def column_name_change(df : pd.DataFrame, dt : pd.Timestamp, new_column : str, old_column : str, remove_old_column : bool = True) -> pd.DataFrame:
382
+ """
383
+ Overwrites values in `new_column` with values from `old_column`
384
+ for all rows before `dt`, if `dt` is within the index range.
385
+
386
+ Parameters
387
+ ----------
388
+ df: pd.DataFrame
389
+ Pandas dataframe with minute-to-minute data
390
+ dt: pd.Timestamp
391
+ timestamp of the varriable name change
392
+ new_column: str
393
+ column to be overwritten
394
+ old_column: str
395
+ column to copy from
396
+ remove_old_column : bool
397
+ remove old column when done
398
+ """
399
+ if df.index.min() < dt:
400
+ mask = df.index < dt
401
+ df.loc[mask, new_column] = df.loc[mask, old_column]
402
+ if remove_old_column:
403
+ df = df.drop(columns=[old_column])
404
+ return df
405
+
381
406
  def heat_output_calc(df: pd.DataFrame, flow_var : str, hot_temp : str, cold_temp : str, heat_out_col_name : str, return_as_kw : bool = True) -> pd.DataFrame:
382
407
  """
383
408
  Function will take a flow varriable and two temperature inputs to calculate heat output
@@ -903,7 +928,8 @@ def shift_accumulative_columns(df : pd.DataFrame, column_names : list = []):
903
928
  if len(column_names) == 0:
904
929
  return df_diff
905
930
  for column_name in column_names:
906
- df[column_name] = df_diff[column_name]
931
+ if column_name in df.columns:
932
+ df[column_name] = df_diff[column_name]
907
933
  return df
908
934
 
909
935
  def create_summary_tables(df: pd.DataFrame):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ecopipeline
3
- Version: 0.8.11
3
+ Version: 0.9.0
4
4
  Summary: Contains functions for use in Ecotope Datapipelines
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: License :: OSI Approved :: GNU General Public License (GPL)
@@ -0,0 +1,19 @@
1
+ ecopipeline/__init__.py,sha256=d48mO5La6OrQDkRe_qqoY6lUx7x-e8krOH388jmWjwU,218
2
+ ecopipeline/event_tracking/__init__.py,sha256=hVV2IHyt3pLSIOESpINr8-sQBmK98t7CiVb1IlvE5xQ,84
3
+ ecopipeline/event_tracking/event_tracking.py,sha256=WAHyCZtr0UToB3Ll3F9MBXjewy7bIsvS5m7nZ0ljBkw,16901
4
+ ecopipeline/extract/__init__.py,sha256=gQ3sak6NJ63Gpo-hZXrtZfeKOTHLRyAVXfTgxxRpqPo,675
5
+ ecopipeline/extract/extract.py,sha256=i5e-mL4KiJfmWMFnKwvCW_BdPELGJiYKRk6FbvcEi1U,51106
6
+ ecopipeline/load/__init__.py,sha256=NLa_efQJZ8aP-J0Y5xx9DP7mtfRH9jY6Jz1ZMZN_BAA,292
7
+ ecopipeline/load/load.py,sha256=Ptxr0MOjns_HeVSmZsLLApHJGB-z6XOB2m8LNiVaD7E,23860
8
+ ecopipeline/transform/__init__.py,sha256=YveBLBsNhfI4qZP04doa0NrTbEKvjDAUDEKtEPdFPfU,2545
9
+ ecopipeline/transform/bayview.py,sha256=TP24dnTsUD95X-f6732egPZKjepFLJgDm9ImGr-fppY,17899
10
+ ecopipeline/transform/lbnl.py,sha256=EQ54G4rJXaZ7pwVusKcdK2KBehSdCsNo2ybphtMGs7o,33400
11
+ ecopipeline/transform/transform.py,sha256=1s4gtaiP8H6WLasOCntQZrx8Av_Y9pp1YvMnWZm_ya8,49975
12
+ ecopipeline/utils/ConfigManager.py,sha256=-g1wtExdvhYO5Y6Q3cRbywa__DxRMFruLrB4YanwaPY,12168
13
+ ecopipeline/utils/__init__.py,sha256=ccWUR0m7gD9DfcgsxBCLOfi4lho6RdYuB2Ugy_g6ZdQ,28
14
+ ecopipeline/utils/unit_convert.py,sha256=VFh1we2Y8KV3u21BeWb-U3TlZJXo83q5vdxxkpgcuME,3064
15
+ ecopipeline-0.9.0.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
+ ecopipeline-0.9.0.dist-info/METADATA,sha256=t6AjFnEnFEhp1RV6TiN8N1ROF7H9R_97QlKlO5cl8Oo,2329
17
+ ecopipeline-0.9.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
18
+ ecopipeline-0.9.0.dist-info/top_level.txt,sha256=WOPFJH2LIgKqm4lk2OnFF5cgVkYibkaBxIxgvLgO7y0,12
19
+ ecopipeline-0.9.0.dist-info/RECORD,,
@@ -1,19 +0,0 @@
1
- ecopipeline/__init__.py,sha256=d48mO5La6OrQDkRe_qqoY6lUx7x-e8krOH388jmWjwU,218
2
- ecopipeline/event_tracking/__init__.py,sha256=q49j46fXMUjNUPzL4FvXEppB93i3lUni-QUZpp61tt0,64
3
- ecopipeline/event_tracking/event_tracking.py,sha256=LOCLE7ju320O7CrwnWRIqHRa2uAqoq-KvXZ3zWQ2S74,13224
4
- ecopipeline/extract/__init__.py,sha256=gQ3sak6NJ63Gpo-hZXrtZfeKOTHLRyAVXfTgxxRpqPo,675
5
- ecopipeline/extract/extract.py,sha256=feqmSEUYAGn1gl_gLqTsY8qOYVyDuWVwfUdxRlEqX24,49495
6
- ecopipeline/load/__init__.py,sha256=NLa_efQJZ8aP-J0Y5xx9DP7mtfRH9jY6Jz1ZMZN_BAA,292
7
- ecopipeline/load/load.py,sha256=Ptxr0MOjns_HeVSmZsLLApHJGB-z6XOB2m8LNiVaD7E,23860
8
- ecopipeline/transform/__init__.py,sha256=hYb4F64fXdXtjBSYCqv6gLFBwKZjjnl0z7s291pFE98,2505
9
- ecopipeline/transform/bayview.py,sha256=TP24dnTsUD95X-f6732egPZKjepFLJgDm9ImGr-fppY,17899
10
- ecopipeline/transform/lbnl.py,sha256=EQ54G4rJXaZ7pwVusKcdK2KBehSdCsNo2ybphtMGs7o,33400
11
- ecopipeline/transform/transform.py,sha256=S8fpAb45XBcYzeGNkxELiHM8-1jlNQqADV7_m-2oeWI,49097
12
- ecopipeline/utils/ConfigManager.py,sha256=-g1wtExdvhYO5Y6Q3cRbywa__DxRMFruLrB4YanwaPY,12168
13
- ecopipeline/utils/__init__.py,sha256=ccWUR0m7gD9DfcgsxBCLOfi4lho6RdYuB2Ugy_g6ZdQ,28
14
- ecopipeline/utils/unit_convert.py,sha256=VFh1we2Y8KV3u21BeWb-U3TlZJXo83q5vdxxkpgcuME,3064
15
- ecopipeline-0.8.11.dist-info/licenses/LICENSE,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- ecopipeline-0.8.11.dist-info/METADATA,sha256=HFo9uw0zynyzql9HleX9KtUsZGZ7hkuyETw4mFhP8f0,2330
17
- ecopipeline-0.8.11.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
18
- ecopipeline-0.8.11.dist-info/top_level.txt,sha256=WOPFJH2LIgKqm4lk2OnFF5cgVkYibkaBxIxgvLgO7y0,12
19
- ecopipeline-0.8.11.dist-info/RECORD,,