emhass 0.9.0__py3-none-any.whl → 0.9.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- emhass/command_line.py +189 -284
- emhass/forecast.py +10 -16
- emhass/machine_learning_regressor.py +14 -53
- emhass/retrieve_hass.py +22 -95
- emhass/utils.py +11 -26
- {emhass-0.9.0.dist-info → emhass-0.9.1.dist-info}/METADATA +15 -12
- {emhass-0.9.0.dist-info → emhass-0.9.1.dist-info}/RECORD +11 -11
- {emhass-0.9.0.dist-info → emhass-0.9.1.dist-info}/LICENSE +0 -0
- {emhass-0.9.0.dist-info → emhass-0.9.1.dist-info}/WHEEL +0 -0
- {emhass-0.9.0.dist-info → emhass-0.9.1.dist-info}/entry_points.txt +0 -0
- {emhass-0.9.0.dist-info → emhass-0.9.1.dist-info}/top_level.txt +0 -0
emhass/forecast.py
CHANGED
@@ -417,9 +417,9 @@ class Forecast(object):
|
|
417
417
|
# Setting the main parameters of the PV plant
|
418
418
|
location = Location(latitude=self.lat, longitude=self.lon)
|
419
419
|
temp_params = TEMPERATURE_MODEL_PARAMETERS['sapm']['close_mount_glass_glass']
|
420
|
-
cec_modules =
|
420
|
+
cec_modules = bz2.BZ2File(pathlib.Path(__file__).parent / 'data/cec_modules.pbz2', "rb")
|
421
421
|
cec_modules = cPickle.load(cec_modules)
|
422
|
-
cec_inverters = bz2.BZ2File(
|
422
|
+
cec_inverters = bz2.BZ2File(pathlib.Path(__file__).parent / 'data/cec_inverters.pbz2', "rb")
|
423
423
|
cec_inverters = cPickle.load(cec_inverters)
|
424
424
|
if type(self.plant_conf['module_model']) == list:
|
425
425
|
P_PV_forecast = pd.Series(0, index=df_weather.index)
|
@@ -639,10 +639,10 @@ class Forecast(object):
|
|
639
639
|
days_list = get_days_list(days_min_load_forecast)
|
640
640
|
if not rh.get_data(days_list, var_list):
|
641
641
|
return False
|
642
|
-
if not rh.prepare_data(
|
643
|
-
|
644
|
-
|
645
|
-
|
642
|
+
if not rh.prepare_data(
|
643
|
+
self.retrieve_hass_conf['var_load'], load_negative = self.retrieve_hass_conf['load_negative'],
|
644
|
+
set_zero_min = self.retrieve_hass_conf['set_zero_min'],
|
645
|
+
var_replace_zero = var_replace_zero, var_interp = var_interp):
|
646
646
|
return False
|
647
647
|
df = rh.df_final.copy()[[self.var_load_new]]
|
648
648
|
if method == 'naive': # using a naive approach
|
@@ -747,7 +747,6 @@ class Forecast(object):
|
|
747
747
|
|
748
748
|
"""
|
749
749
|
csv_path = self.emhass_conf['data_path'] / csv_path
|
750
|
-
|
751
750
|
if method == 'hp_hc_periods':
|
752
751
|
df_final[self.var_load_cost] = self.optim_conf['load_cost_hc']
|
753
752
|
list_df_hp = []
|
@@ -780,12 +779,11 @@ class Forecast(object):
|
|
780
779
|
else:
|
781
780
|
self.logger.error("Passed method is not valid")
|
782
781
|
return False
|
783
|
-
|
784
782
|
return df_final
|
785
783
|
|
786
784
|
def get_prod_price_forecast(self, df_final: pd.DataFrame, method: Optional[str] = 'constant',
|
787
|
-
|
788
|
-
|
785
|
+
csv_path: Optional[str] = "data_prod_price_forecast.csv",
|
786
|
+
list_and_perfect: Optional[bool] = False) -> pd.DataFrame:
|
789
787
|
|
790
788
|
r"""
|
791
789
|
Get the unit power production price for the energy injected to the grid.\
|
@@ -807,16 +805,13 @@ class Forecast(object):
|
|
807
805
|
:rtype: pd.DataFrame
|
808
806
|
|
809
807
|
"""
|
810
|
-
|
811
808
|
csv_path = self.emhass_conf['data_path'] / csv_path
|
812
|
-
|
813
809
|
if method == 'constant':
|
814
810
|
df_final[self.var_prod_price] = self.optim_conf['prod_sell_price']
|
815
811
|
elif method == 'csv':
|
816
812
|
forecast_dates_csv = self.get_forecast_days_csv(timedelta_days=0)
|
817
|
-
forecast_out = self.get_forecast_out_from_csv_or_list(
|
818
|
-
|
819
|
-
csv_path)
|
813
|
+
forecast_out = self.get_forecast_out_from_csv_or_list(
|
814
|
+
df_final, forecast_dates_csv, csv_path)
|
820
815
|
df_final[self.var_prod_price] = forecast_out
|
821
816
|
elif method == 'list': # reading a list of values
|
822
817
|
# Loading data from passed list
|
@@ -837,6 +832,5 @@ class Forecast(object):
|
|
837
832
|
else:
|
838
833
|
self.logger.error("Passed method is not valid")
|
839
834
|
return False
|
840
|
-
|
841
835
|
return df_final
|
842
836
|
|
@@ -76,16 +76,8 @@ class MLRegressor:
|
|
76
76
|
|
77
77
|
"""
|
78
78
|
|
79
|
-
def __init__(
|
80
|
-
|
81
|
-
data: pd.DataFrame,
|
82
|
-
model_type: str,
|
83
|
-
regression_model: str,
|
84
|
-
features: list,
|
85
|
-
target: str,
|
86
|
-
timestamp: str,
|
87
|
-
logger: logging.Logger,
|
88
|
-
) -> None:
|
79
|
+
def __init__(self: MLRegressor, data: pd.DataFrame, model_type: str, regression_model: str,
|
80
|
+
features: list, target: str, timestamp: str, logger: logging.Logger) -> None:
|
89
81
|
r"""Define constructor for the forecast class.
|
90
82
|
|
91
83
|
:param data: The data that will be used for train/test
|
@@ -124,11 +116,7 @@ class MLRegressor:
|
|
124
116
|
self.grid_search = None
|
125
117
|
|
126
118
|
@staticmethod
|
127
|
-
def add_date_features(
|
128
|
-
data: pd.DataFrame,
|
129
|
-
date_features: list,
|
130
|
-
timestamp: str,
|
131
|
-
) -> pd.DataFrame:
|
119
|
+
def add_date_features(data: pd.DataFrame, date_features: list, timestamp: str) -> pd.DataFrame:
|
132
120
|
"""Add date features from the input DataFrame timestamp.
|
133
121
|
|
134
122
|
:param data: The input DataFrame
|
@@ -152,23 +140,18 @@ class MLRegressor:
|
|
152
140
|
df["day"] = [i.day for i in df["timestamp"]]
|
153
141
|
if "hour" in date_features:
|
154
142
|
df["hour"] = [i.day for i in df["timestamp"]]
|
155
|
-
|
156
143
|
return df
|
157
144
|
|
158
145
|
def get_regression_model(self: MLRegressor) -> tuple[str, str]:
|
159
|
-
"""
|
160
|
-
|
146
|
+
r"""
|
147
|
+
Get the base model and parameter grid for the specified regression model.
|
161
148
|
Returns a tuple containing the base model and parameter grid corresponding to \
|
162
149
|
the specified regression model.
|
163
150
|
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
Returns:
|
169
|
-
-------
|
170
|
-
A tuple containing the base model and parameter grid.
|
171
|
-
|
151
|
+
:param self: The instance of the MLRegressor class.
|
152
|
+
:type self: MLRegressor
|
153
|
+
:return: A tuple containing the base model and parameter grid.
|
154
|
+
:rtype: tuple[str, str]
|
172
155
|
"""
|
173
156
|
if self.regression_model == "LinearRegression":
|
174
157
|
base_model = REGRESSION_METHODS["LinearRegression"]["model"]
|
@@ -197,7 +180,7 @@ class MLRegressor:
|
|
197
180
|
return base_model, param_grid
|
198
181
|
|
199
182
|
def fit(self: MLRegressor, date_features: list | None = None) -> None:
|
200
|
-
"""Fit the model using the provided data.
|
183
|
+
r"""Fit the model using the provided data.
|
201
184
|
|
202
185
|
:param date_features: A list of 'date_features' to take into account when \
|
203
186
|
fitting the model.
|
@@ -226,45 +209,24 @@ class MLRegressor:
|
|
226
209
|
"If no timestamp provided, you can't use date_features, going \
|
227
210
|
further without date_features.",
|
228
211
|
)
|
229
|
-
|
230
212
|
y = self.data_exo[self.target]
|
231
213
|
self.data_exo = self.data_exo.drop(self.target, axis=1)
|
232
214
|
if self.timestamp is not None:
|
233
215
|
self.data_exo = self.data_exo.drop(self.timestamp, axis=1)
|
234
|
-
X = self.data_exo
|
235
|
-
|
236
|
-
X_train, X_test, y_train, y_test = train_test_split( # noqa: N806
|
237
|
-
X,
|
238
|
-
y,
|
239
|
-
test_size=0.2,
|
240
|
-
random_state=42,
|
241
|
-
)
|
242
|
-
|
216
|
+
X = self.data_exo
|
217
|
+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
|
243
218
|
self.steps = len(X_test)
|
244
|
-
|
245
219
|
base_model, param_grid = self.get_regression_model()
|
246
|
-
|
247
220
|
self.model = make_pipeline(StandardScaler(), base_model)
|
248
|
-
|
249
221
|
# Create a grid search object
|
250
|
-
self.grid_search = GridSearchCV(
|
251
|
-
|
252
|
-
param_grid,
|
253
|
-
cv=5,
|
254
|
-
scoring="neg_mean_squared_error",
|
255
|
-
refit=True,
|
256
|
-
verbose=0,
|
257
|
-
n_jobs=-1,
|
258
|
-
)
|
259
|
-
|
222
|
+
self.grid_search = GridSearchCV(self.model, param_grid, cv=5, scoring="neg_mean_squared_error",
|
223
|
+
refit=True, verbose=0, n_jobs=-1)
|
260
224
|
# Fit the grid search object to the data
|
261
225
|
self.logger.info("Training a %s model", self.regression_model)
|
262
226
|
start_time = time.time()
|
263
227
|
self.grid_search.fit(X_train.values, y_train.values)
|
264
228
|
self.logger.info("Elapsed time for model fit: %s", time.time() - start_time)
|
265
|
-
|
266
229
|
self.model = self.grid_search.best_estimator_
|
267
|
-
|
268
230
|
# Make predictions
|
269
231
|
predictions = self.model.predict(X_test.values)
|
270
232
|
predictions = pd.Series(predictions, index=X_test.index)
|
@@ -286,5 +248,4 @@ class MLRegressor:
|
|
286
248
|
"""
|
287
249
|
self.logger.info("Performing a prediction for %s", self.model_type)
|
288
250
|
new_values = np.array([new_values])
|
289
|
-
|
290
251
|
return self.model.predict(new_values)
|
emhass/retrieve_hass.py
CHANGED
@@ -65,14 +65,9 @@ class RetrieveHass:
|
|
65
65
|
self.logger = logger
|
66
66
|
self.get_data_from_file = get_data_from_file
|
67
67
|
|
68
|
-
def get_data(
|
69
|
-
|
70
|
-
|
71
|
-
var_list: list,
|
72
|
-
minimal_response: Optional[bool] = False,
|
73
|
-
significant_changes_only: Optional[bool] = False,
|
74
|
-
test_url: Optional[str] = "empty",
|
75
|
-
) -> None:
|
68
|
+
def get_data(self, days_list: pd.date_range, var_list: list,
|
69
|
+
minimal_response: Optional[bool] = False, significant_changes_only: Optional[bool] = False,
|
70
|
+
test_url: Optional[str] = "empty") -> None:
|
76
71
|
r"""
|
77
72
|
Retrieve the actual data from hass.
|
78
73
|
|
@@ -100,9 +95,7 @@ class RetrieveHass:
|
|
100
95
|
x = 0 # iterate based on days
|
101
96
|
# Looping on each day from days list
|
102
97
|
for day in days_list:
|
103
|
-
|
104
98
|
for i, var in enumerate(var_list):
|
105
|
-
|
106
99
|
if test_url == "empty":
|
107
100
|
if (
|
108
101
|
self.hass_url == "http://supervisor/core/api"
|
@@ -289,15 +282,8 @@ class RetrieveHass:
|
|
289
282
|
return True
|
290
283
|
|
291
284
|
@staticmethod
|
292
|
-
def get_attr_data_dict(
|
293
|
-
|
294
|
-
idx: int,
|
295
|
-
entity_id: str,
|
296
|
-
unit_of_measurement: str,
|
297
|
-
friendly_name: str,
|
298
|
-
list_name: str,
|
299
|
-
state: float,
|
300
|
-
) -> dict:
|
285
|
+
def get_attr_data_dict(data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_measurement: str,
|
286
|
+
friendly_name: str, list_name: str, state: float) -> dict:
|
301
287
|
list_df = copy.deepcopy(data_df).loc[data_df.index[idx] :].reset_index()
|
302
288
|
list_df.columns = ["timestamps", entity_id]
|
303
289
|
ts_list = [str(i) for i in list_df["timestamps"].tolist()]
|
@@ -318,17 +304,9 @@ class RetrieveHass:
|
|
318
304
|
}
|
319
305
|
return data
|
320
306
|
|
321
|
-
def post_data(
|
322
|
-
|
323
|
-
|
324
|
-
idx: int,
|
325
|
-
entity_id: str,
|
326
|
-
unit_of_measurement: str,
|
327
|
-
friendly_name: str,
|
328
|
-
type_var: str,
|
329
|
-
from_mlforecaster: Optional[bool] = False,
|
330
|
-
publish_prefix: Optional[str] = "",
|
331
|
-
) -> None:
|
307
|
+
def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str, unit_of_measurement: str,
|
308
|
+
friendly_name: str, type_var: str, from_mlforecaster: Optional[bool] = False,
|
309
|
+
publish_prefix: Optional[str] = "") -> None:
|
332
310
|
r"""
|
333
311
|
Post passed data to hass.
|
334
312
|
|
@@ -375,75 +353,26 @@ class RetrieveHass:
|
|
375
353
|
else:
|
376
354
|
state = np.round(data_df.loc[data_df.index[idx]], 2)
|
377
355
|
if type_var == "power":
|
378
|
-
data = RetrieveHass.get_attr_data_dict(
|
379
|
-
|
380
|
-
idx,
|
381
|
-
entity_id,
|
382
|
-
unit_of_measurement,
|
383
|
-
friendly_name,
|
384
|
-
"forecasts",
|
385
|
-
state,
|
386
|
-
)
|
356
|
+
data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
|
357
|
+
friendly_name, "forecasts", state)
|
387
358
|
elif type_var == "deferrable":
|
388
|
-
data = RetrieveHass.get_attr_data_dict(
|
389
|
-
|
390
|
-
idx,
|
391
|
-
entity_id,
|
392
|
-
unit_of_measurement,
|
393
|
-
friendly_name,
|
394
|
-
"deferrables_schedule",
|
395
|
-
state,
|
396
|
-
)
|
359
|
+
data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
|
360
|
+
friendly_name, "deferrables_schedule", state)
|
397
361
|
elif type_var == "batt":
|
398
|
-
data = RetrieveHass.get_attr_data_dict(
|
399
|
-
|
400
|
-
idx,
|
401
|
-
entity_id,
|
402
|
-
unit_of_measurement,
|
403
|
-
friendly_name,
|
404
|
-
"battery_scheduled_power",
|
405
|
-
state,
|
406
|
-
)
|
362
|
+
data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
|
363
|
+
friendly_name, "battery_scheduled_power", state)
|
407
364
|
elif type_var == "SOC":
|
408
|
-
data = RetrieveHass.get_attr_data_dict(
|
409
|
-
|
410
|
-
idx,
|
411
|
-
entity_id,
|
412
|
-
unit_of_measurement,
|
413
|
-
friendly_name,
|
414
|
-
"battery_scheduled_soc",
|
415
|
-
state,
|
416
|
-
)
|
365
|
+
data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
|
366
|
+
friendly_name, "battery_scheduled_soc", state)
|
417
367
|
elif type_var == "unit_load_cost":
|
418
|
-
data = RetrieveHass.get_attr_data_dict(
|
419
|
-
|
420
|
-
idx,
|
421
|
-
entity_id,
|
422
|
-
unit_of_measurement,
|
423
|
-
friendly_name,
|
424
|
-
"unit_load_cost_forecasts",
|
425
|
-
state,
|
426
|
-
)
|
368
|
+
data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
|
369
|
+
friendly_name, "unit_load_cost_forecasts", state)
|
427
370
|
elif type_var == "unit_prod_price":
|
428
|
-
data = RetrieveHass.get_attr_data_dict(
|
429
|
-
|
430
|
-
idx,
|
431
|
-
entity_id,
|
432
|
-
unit_of_measurement,
|
433
|
-
friendly_name,
|
434
|
-
"unit_prod_price_forecasts",
|
435
|
-
state,
|
436
|
-
)
|
371
|
+
data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
|
372
|
+
friendly_name, "unit_prod_price_forecasts", state)
|
437
373
|
elif type_var == "mlforecaster":
|
438
|
-
data = RetrieveHass.get_attr_data_dict(
|
439
|
-
|
440
|
-
idx,
|
441
|
-
entity_id,
|
442
|
-
unit_of_measurement,
|
443
|
-
friendly_name,
|
444
|
-
"scheduled_forecast",
|
445
|
-
state,
|
446
|
-
)
|
374
|
+
data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement,
|
375
|
+
friendly_name, "scheduled_forecast", state)
|
447
376
|
elif type_var == "optim_status":
|
448
377
|
data = {
|
449
378
|
"state": state,
|
@@ -470,10 +399,8 @@ class RetrieveHass:
|
|
470
399
|
}
|
471
400
|
# Actually post the data
|
472
401
|
if self.get_data_from_file:
|
473
|
-
|
474
402
|
class response:
|
475
403
|
pass
|
476
|
-
|
477
404
|
response.status_code = 200
|
478
405
|
response.ok = True
|
479
406
|
else:
|
emhass/utils.py
CHANGED
@@ -41,6 +41,7 @@ def get_root(file: str, num_parent: Optional[int] = 3) -> str:
|
|
41
41
|
raise ValueError("num_parent value not valid, must be between 1 and 3")
|
42
42
|
return root
|
43
43
|
|
44
|
+
|
44
45
|
def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] = True,
|
45
46
|
logging_level: Optional[str] = "DEBUG") -> Tuple[logging.Logger, logging.StreamHandler]:
|
46
47
|
"""
|
@@ -88,9 +89,8 @@ def get_logger(fun_name: str, emhass_conf: dict, save_to_file: Optional[bool] =
|
|
88
89
|
return logger, ch
|
89
90
|
|
90
91
|
|
91
|
-
def get_forecast_dates(
|
92
|
-
|
93
|
-
) -> pd.core.indexes.datetimes.DatetimeIndex:
|
92
|
+
def get_forecast_dates(freq: int, delta_forecast: int, timedelta_days: Optional[int] = 0
|
93
|
+
) -> pd.core.indexes.datetimes.DatetimeIndex:
|
94
94
|
"""
|
95
95
|
Get the date_range list of the needed future dates using the delta_forecast parameter.
|
96
96
|
|
@@ -113,15 +113,9 @@ def get_forecast_dates(
|
|
113
113
|
return forecast_dates
|
114
114
|
|
115
115
|
|
116
|
-
def treat_runtimeparams(
|
117
|
-
|
118
|
-
|
119
|
-
retrieve_hass_conf: dict,
|
120
|
-
optim_conf: dict,
|
121
|
-
plant_conf: dict,
|
122
|
-
set_type: str,
|
123
|
-
logger: logging.Logger,
|
124
|
-
) -> Tuple[str, dict]:
|
116
|
+
def treat_runtimeparams(runtimeparams: str, params: str, retrieve_hass_conf: dict, optim_conf: dict,
|
117
|
+
plant_conf: dict, set_type: str, logger: logging.Logger
|
118
|
+
) -> Tuple[str, dict]:
|
125
119
|
"""
|
126
120
|
Treat the passed optimization runtime parameters.
|
127
121
|
|
@@ -503,6 +497,7 @@ def treat_runtimeparams(
|
|
503
497
|
params = json.dumps(params)
|
504
498
|
return params, retrieve_hass_conf, optim_conf, plant_conf
|
505
499
|
|
500
|
+
|
506
501
|
def get_yaml_parse(emhass_conf: dict, use_secrets: Optional[bool] = True,
|
507
502
|
params: Optional[str] = None) -> Tuple[dict, dict, dict]:
|
508
503
|
"""
|
@@ -655,9 +650,7 @@ def get_injection_dict(df: pd.DataFrame, plot_size: Optional[int] = 1366) -> dic
|
|
655
650
|
return injection_dict
|
656
651
|
|
657
652
|
|
658
|
-
def get_injection_dict_forecast_model_fit(
|
659
|
-
df_fit_pred: pd.DataFrame, mlf: MLForecaster
|
660
|
-
) -> dict:
|
653
|
+
def get_injection_dict_forecast_model_fit(df_fit_pred: pd.DataFrame, mlf: MLForecaster) -> dict:
|
661
654
|
"""
|
662
655
|
Build a dictionary with graphs and tables for the webui for special MLF fit case.
|
663
656
|
|
@@ -686,9 +679,7 @@ def get_injection_dict_forecast_model_fit(
|
|
686
679
|
return injection_dict
|
687
680
|
|
688
681
|
|
689
|
-
def get_injection_dict_forecast_model_tune(
|
690
|
-
df_pred_optim: pd.DataFrame, mlf: MLForecaster
|
691
|
-
) -> dict:
|
682
|
+
def get_injection_dict_forecast_model_tune(df_pred_optim: pd.DataFrame, mlf: MLForecaster) -> dict:
|
692
683
|
"""
|
693
684
|
Build a dictionary with graphs and tables for the webui for special MLF tune case.
|
694
685
|
|
@@ -719,13 +710,8 @@ def get_injection_dict_forecast_model_tune(
|
|
719
710
|
return injection_dict
|
720
711
|
|
721
712
|
|
722
|
-
def build_params(
|
723
|
-
|
724
|
-
params_secrets: dict,
|
725
|
-
options: dict,
|
726
|
-
addon: int,
|
727
|
-
logger: logging.Logger,
|
728
|
-
) -> dict:
|
713
|
+
def build_params(params: dict, params_secrets: dict, options: dict, addon: int,
|
714
|
+
logger: logging.Logger) -> dict:
|
729
715
|
"""
|
730
716
|
Build the main params dictionary from the loaded options.json when using the add-on.
|
731
717
|
|
@@ -967,7 +953,6 @@ def get_days_list(days_to_retrieve: int) -> pd.date_range:
|
|
967
953
|
today = datetime.now(timezone.utc).replace(minute=0, second=0, microsecond=0)
|
968
954
|
d = (today - timedelta(days=days_to_retrieve)).isoformat()
|
969
955
|
days_list = pd.date_range(start=d, end=today.isoformat(), freq="D")
|
970
|
-
|
971
956
|
return days_list
|
972
957
|
|
973
958
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: emhass
|
3
|
-
Version: 0.9.
|
3
|
+
Version: 0.9.1
|
4
4
|
Summary: An Energy Management System for Home Assistant
|
5
5
|
Home-page: https://github.com/davidusb-geek/emhass
|
6
6
|
Author: David HERNANDEZ
|
@@ -40,43 +40,46 @@ Requires-Dist: plotly >=5.6.0
|
|
40
40
|
<strong></strong>
|
41
41
|
</div>
|
42
42
|
<br>
|
43
|
+
|
43
44
|
<p align="center">
|
44
|
-
<a href="https://github.com/davidusb-geek/emhass/releases">
|
45
|
+
<a style="text-decoration:none" href="https://github.com/davidusb-geek/emhass/releases">
|
45
46
|
<img alt="GitHub release (latest by date)" src="https://img.shields.io/github/v/release/davidusb-geek/emhass">
|
46
47
|
</a>
|
47
|
-
<a href="https://github.com/davidusb-geek/emhass/actions">
|
48
|
+
<a style="text-decoration:none" href="https://github.com/davidusb-geek/emhass/actions">
|
48
49
|
<img alt="GitHub Workflow Status" src="https://img.shields.io/github/actions/workflow/status/davidusb-geek/emhass/python-test.yml?branch=master">
|
49
50
|
</a>
|
50
|
-
<a
|
51
|
+
<a hstyle="text-decoration:none" ref="https://codecov.io/github/davidusb-geek/emhass" >
|
51
52
|
<img src="https://codecov.io/github/davidusb-geek/emhass/branch/master/graph/badge.svg?token=BW7KSCHN90"/>
|
52
53
|
</a>
|
53
|
-
<a href="https://github.com/davidusb-geek/emhass/blob/master/LICENSE">
|
54
|
+
<a style="text-decoration:none" href="https://github.com/davidusb-geek/emhass/blob/master/LICENSE">
|
54
55
|
<img alt="GitHub" src="https://img.shields.io/github/license/davidusb-geek/emhass">
|
55
56
|
</a>
|
56
|
-
<a href="https://pypi.org/project/emhass/">
|
57
|
+
<a style="text-decoration:none" href="https://pypi.org/project/emhass/">
|
57
58
|
<img alt="PyPI - Python Version" src="https://img.shields.io/pypi/pyversions/emhass">
|
58
59
|
</a>
|
59
|
-
<a href="https://pypi.org/project/emhass/">
|
60
|
+
<a style="text-decoration:none" href="https://pypi.org/project/emhass/">
|
60
61
|
<img alt="PyPI - Status" src="https://img.shields.io/pypi/status/emhass">
|
61
62
|
</a>
|
62
|
-
<a href="https://emhass.readthedocs.io/en/latest/">
|
63
|
+
<a style="text-decoration:none" href="https://emhass.readthedocs.io/en/latest/">
|
63
64
|
<img alt="Read the Docs" src="https://img.shields.io/readthedocs/emhass">
|
64
65
|
</a>
|
65
66
|
</p>
|
67
|
+
|
66
68
|
<div align="center">
|
67
|
-
<a href="https://emhass.readthedocs.io/en/latest/">
|
69
|
+
<a style="text-decoration:none" href="https://emhass.readthedocs.io/en/latest/">
|
68
70
|
<img src="https://raw.githubusercontent.com/davidusb-geek/emhass/master/docs/images/Documentation_button.svg" alt="Documentation">
|
69
71
|
</a>
|
70
|
-
<a href="https://community.home-assistant.io/t/emhass-an-energy-management-for-home-assistant/338126">
|
72
|
+
<a style="text-decoration:none" href="https://community.home-assistant.io/t/emhass-an-energy-management-for-home-assistant/338126">
|
71
73
|
<img src="https://raw.githubusercontent.com/davidusb-geek/emhass/master/docs/images/Community_button.svg" alt="Community">
|
72
74
|
</a>
|
73
|
-
<a href="https://github.com/davidusb-geek/emhass/issues">
|
75
|
+
<a style="text-decoration:none" href="https://github.com/davidusb-geek/emhass/issues">
|
74
76
|
<img src="https://raw.githubusercontent.com/davidusb-geek/emhass/master/docs/images/Issues_button.svg" alt="Issues">
|
75
77
|
</a>
|
76
|
-
<a href="https://github.com/davidusb-geek/emhass-add-on">
|
78
|
+
<a style="text-decoration:none" href="https://github.com/davidusb-geek/emhass-add-on">
|
77
79
|
<img src="https://raw.githubusercontent.com/davidusb-geek/emhass/master/docs/images/EMHASS_Add_on_button.svg" alt="EMHASS Add-on">
|
78
80
|
</a>
|
79
81
|
</div>
|
82
|
+
|
80
83
|
<br>
|
81
84
|
<p align="center">
|
82
85
|
If you like this work please consider buying a coffee ;-)
|
@@ -1,11 +1,11 @@
|
|
1
1
|
emhass/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
2
|
-
emhass/command_line.py,sha256=
|
3
|
-
emhass/forecast.py,sha256=
|
2
|
+
emhass/command_line.py,sha256=mxMvxqN5rp30AZ_QR4uWWxWu5NxLMNesgf44pJ-sVWk,47908
|
3
|
+
emhass/forecast.py,sha256=lZZ7B8CUj-r9aXk1YDVr3bF7vgH-KUM1MoA99F0IBmA,47076
|
4
4
|
emhass/machine_learning_forecaster.py,sha256=az8cYRCckmR-WEdXyigbe8udtbj82yfahPmow4gue4s,15655
|
5
|
-
emhass/machine_learning_regressor.py,sha256=
|
5
|
+
emhass/machine_learning_regressor.py,sha256=WmR9ODWkY64RAniqLowwf5tZWzPTVp5ftCTKNtzcd6I,10407
|
6
6
|
emhass/optimization.py,sha256=ijiSBKdU0fS6TBpeoBo-CoPz6lBMU4nnsi6aiZi1J0I,37252
|
7
|
-
emhass/retrieve_hass.py,sha256=
|
8
|
-
emhass/utils.py,sha256=
|
7
|
+
emhass/retrieve_hass.py,sha256=Xz3dYfQri-6irltbPr4QDDI7GGLJPwW3WEzRyHeC62Q,20391
|
8
|
+
emhass/utils.py,sha256=4sm8QMp2rU1DZVM7XYT4FK5O7z_GEZTXbZcfn8nyBgc,47820
|
9
9
|
emhass/web_server.py,sha256=UfPUBA-ct1Su8cQFyufnW0Bb4BBlpGHF3yXN47sXkig,23055
|
10
10
|
emhass/data/cec_inverters.pbz2,sha256=tK8FvAUDW0uYez8EPttdCJwHhpPofclYV6GhhNZL0Pk,168272
|
11
11
|
emhass/data/cec_modules.pbz2,sha256=8vEaysgYffXg3KUl8XSF36Mdywzi3LpEtUN_qenjO9s,1655747
|
@@ -18,9 +18,9 @@ emhass/static/img/emhass_logo_short.svg,sha256=yzMcqtBRCV8rH84-MwnigZh45_f9Eoqwh
|
|
18
18
|
emhass/static/img/feather-sprite.svg,sha256=VHjMJQg88wXa9CaeYrKGhNtyK0xdd47zCqwSIa-hxo8,60319
|
19
19
|
emhass/templates/index.html,sha256=_BsvUJ981uSQkx5H9tq_3es__x4WdPiOy7FjNoNYU9w,2744
|
20
20
|
emhass/templates/template.html,sha256=TkGgMecQEbFUZA4ymPwMUzNjKHsENvCgroUWbPt7G4Y,158
|
21
|
-
emhass-0.9.
|
22
|
-
emhass-0.9.
|
23
|
-
emhass-0.9.
|
24
|
-
emhass-0.9.
|
25
|
-
emhass-0.9.
|
26
|
-
emhass-0.9.
|
21
|
+
emhass-0.9.1.dist-info/LICENSE,sha256=1X3-S1yvOCBDBeox1aK3dq00m7dA8NDtcPrpKPISzbE,1077
|
22
|
+
emhass-0.9.1.dist-info/METADATA,sha256=jc55aJnUsaxY7_yKqCux0BSmVdySw2ftms7fqnJfMCc,35940
|
23
|
+
emhass-0.9.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
24
|
+
emhass-0.9.1.dist-info/entry_points.txt,sha256=6Bp1NFOGNv_fSTxYl1ke3K3h3aqAcBxI-bgq5yq-i1M,52
|
25
|
+
emhass-0.9.1.dist-info/top_level.txt,sha256=L7fIX4awfmxQbAePtSdVg2e6x_HhghfReHfsKSpKr9I,7
|
26
|
+
emhass-0.9.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|