Skip to content

Commit

Permalink
Fix - Fixed conflict on constraints
Browse files Browse the repository at this point in the history
  • Loading branch information
davidusb-geek committed Jan 3, 2024
1 parent a4255f5 commit 0887a19
Show file tree
Hide file tree
Showing 6 changed files with 161 additions and 33 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
# Changelog

## [0.6.2] - Unreleased
### Fix
- Updated optimization constraints to solve conflict for `set_def_constant` and `treat_def_as_semi_cont` cases

## [0.6.1] - 2023-12-18
### Fix
- Patching EMHASS for Python 3.11. New explicit dependecy h5py==3.10.0
Expand Down
20 changes: 12 additions & 8 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
FROM python:3.11-slim-buster
#FROM ghcr.io/home-assistant/amd64-base-debian:bookworm # Uncomment to test add-on
# FROM ghcr.io/home-assistant/amd64-base-debian:bookworm # Uncomment to test add-on
# FROM ghcr.io/home-assistant/armhf-base-debian:bookworm

# switch working directory
WORKDIR /app
Expand All @@ -13,22 +14,25 @@ COPY README.md README.md
# Setup
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
# libc-bin \ # Uncomment to test add-on
# libffi-dev \ # Uncomment to test add-on
# python3 \ # Uncomment to test add-on
# python3-pip \ # Uncomment to test add-on
# python3-dev \ # Uncomment to test add-on
# git \ # Uncomment to test add-on
# build-essential \ # Uncomment to test add-on
# libffi-dev \
# python3 \
# python3-pip \
# python3-dev \
# git \
# build-essential \
gcc \
coinor-cbc \
coinor-libcbc-dev \
libhdf5-dev \
libhdf5-serial-dev \
netcdf-bin \
libnetcdf-dev \
# pkg-config \
# gfortran \
# libatlas-base-dev \
&& ln -s /usr/include/hdf5/serial /usr/include/hdf5/include \
&& export HDF5_DIR=/usr/include/hdf5 \
# && pip3 install --extra-index-url=https://www.piwheels.org/simple --no-cache-dir --break-system-packages -U setuptools wheel \
&& pip3 install --no-cache-dir --break-system-packages -r requirements_webserver.txt \
&& apt-get purge -y --auto-remove \
gcc \
Expand Down
8 changes: 4 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
wheel
numpy==1.26.0
pandas==2.0.3
scipy==1.11.3
numpy<=1.26.0
pandas<=2.0.3
scipy<=1.11.3
pvlib>=0.10.2
protobuf>=3.0.0
pytz>=2021.1
Expand All @@ -10,5 +10,5 @@ beautifulsoup4>=4.9.3
h5py==3.10.0
pulp>=2.4
pyyaml>=5.4.1
tables==3.9.1
tables<=3.9.1
skforecast==0.11.0
113 changes: 113 additions & 0 deletions scripts/optim_results_analysis.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
# -*- coding: utf-8 -*-
import pickle
import numpy as np
import pandas as pd
import pathlib
import plotly.express as px
import plotly.subplots as sp
import plotly.io as pio
pio.renderers.default = 'browser'
pd.options.plotting.backend = "plotly"

from emhass.retrieve_hass import retrieve_hass
from emhass.optimization import optimization
from emhass.forecast import forecast
from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger

# the root folder
root = str(get_root(__file__, num_parent=2))
# create logger
logger, ch = get_logger(__name__, root, save_to_file=False)

def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf,
params, get_data_from_file):
fcst = forecast(retrieve_hass_conf, optim_conf, plant_conf,
params, root, logger, get_data_from_file=get_data_from_file)
df_weather = fcst.get_weather_forecast(method='csv')
P_PV_forecast = fcst.get_power_from_weather(df_weather)
P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method'])
df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1)
df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast']
opt = optimization(retrieve_hass_conf, optim_conf, plant_conf,
fcst.var_load_cost, fcst.var_prod_price,
'profit', root, logger)
return fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt

if __name__ == '__main__':
show_figures = False
save_figures = False
get_data_from_file = True
params = None
retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(pathlib.Path(root+'/config_emhass.yaml'), use_secrets=False)
retrieve_hass_conf, optim_conf, plant_conf = \
retrieve_hass_conf, optim_conf, plant_conf
rh = retrieve_hass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'],
retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'],
params, root, logger)
if get_data_from_file:
with open(pathlib.Path(root+'/data/test_df_final.pkl'), 'rb') as inp:
rh.df_final, days_list, var_list = pickle.load(inp)
else:
days_list = get_days_list(retrieve_hass_conf['days_to_retrieve'])
var_list = [retrieve_hass_conf['var_load'], retrieve_hass_conf['var_PV']]
rh.get_data(days_list, var_list,
minimal_response=False, significant_changes_only=False)
rh.prepare_data(retrieve_hass_conf['var_load'], load_negative = retrieve_hass_conf['load_negative'],
set_zero_min = retrieve_hass_conf['set_zero_min'],
var_replace_zero = retrieve_hass_conf['var_replace_zero'],
var_interp = retrieve_hass_conf['var_interp'])
df_input_data = rh.df_final.copy()

fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt = \
get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf,
params, get_data_from_file)
df_input_data = fcst.get_load_cost_forecast(df_input_data)
df_input_data = fcst.get_prod_price_forecast(df_input_data)

template = 'presentation'

# Let's plot the input data
fig_inputs1 = df_input_data[['sensor.power_photovoltaics',
'sensor.power_load_no_var_loads_positive']].plot()
fig_inputs1.layout.template = template
fig_inputs1.update_yaxes(title_text = "Powers (W)")
fig_inputs1.update_xaxes(title_text = "Time")
if show_figures:
fig_inputs1.show()
if save_figures:
fig_inputs1.write_image(root + "/docs/images/inputs_power.svg",
width=1080, height=0.8*1080)

fig_inputs_dah = df_input_data_dayahead.plot()
fig_inputs_dah.layout.template = template
fig_inputs_dah.update_yaxes(title_text = "Powers (W)")
fig_inputs_dah.update_xaxes(title_text = "Time")
if show_figures:
fig_inputs_dah.show()
if save_figures:
fig_inputs_dah.write_image(root + "/docs/images/inputs_dayahead.svg",
width=1080, height=0.8*1080)

# And then perform a dayahead optimization
df_input_data_dayahead = fcst.get_load_cost_forecast(df_input_data_dayahead)
df_input_data_dayahead = fcst.get_prod_price_forecast(df_input_data_dayahead)
optim_conf['treat_def_as_semi_cont'] = [True, True]
optim_conf['set_def_constant'] = [True, True]
opt_res_dah = opt.perform_dayahead_forecast_optim(df_input_data_dayahead, P_PV_forecast, P_load_forecast)
opt_res_dah['P_PV'] = df_input_data_dayahead[['P_PV_forecast']]
fig_res_dah = opt_res_dah[['P_deferrable0', 'P_deferrable1', 'P_grid', 'P_PV',
'P_def_start_0', 'P_def_start_1', 'P_def_bin2_0', 'P_def_bin2_1']].plot()
fig_res_dah.layout.template = template
fig_res_dah.update_yaxes(title_text = "Powers (W)")
fig_res_dah.update_xaxes(title_text = "Time")
# if show_figures:
fig_res_dah.show()
if save_figures:
fig_res_dah.write_image(root + "/docs/images/optim_results_PV_defLoads_dayaheadOptim.svg",
width=1080, height=0.8*1080)

print("System with: PV, two deferrable loads, dayahead optimization, profit >> total cost function sum: "+\
str(opt_res_dah['cost_profit'].sum()))

print(opt_res_dah)
opt_res_dah.to_html('opt_res_dah.html')
8 changes: 4 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,17 +40,17 @@
python_requires='>=3.9, <3.12',
install_requires=[
'wheel',
'numpy==1.26',
'scipy==1.11.3',
'pandas==2.0.3',
'numpy<=1.26',
'scipy<=1.11.3',
'pandas<=2.0.3',
'pvlib>=0.10.1',
'protobuf>=3.0.0',
'pytz>=2021.1',
'requests>=2.25.1',
'beautifulsoup4>=4.9.3',
'pulp>=2.4',
'pyyaml>=5.4.1',
'tables==3.9.1',
'tables<=3.9.1',
'skforecast==0.11.0',
], # Optional
entry_points={ # Optional
Expand Down
41 changes: 24 additions & 17 deletions src/emhass/optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,30 +287,31 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n
for i in set_I})
# Treat the number of starts for a deferrable load
if self.optim_conf['set_def_constant'][k]:
constraints.update({"constraint_pdef{}_start1".format(k) :
plp.LpConstraint(
e=P_def_start[k][0],
sense=plp.LpConstraintEQ,
rhs=0)
})
constraints.update({"constraint_pdef{}_start2_{}".format(k, i) :
plp.LpConstraint(
e=P_def_start[k][i] - P_def_bin2[k][i] + P_def_bin2[k][i-1],
sense=plp.LpConstraintEQ,
rhs=0)
for i in set_I[1:]})
constraints.update({"constraint_pdef{}_start4_{}".format(k, i) :

constraints.update({"constraint_pdef{}_start1_{}".format(k, i) :
plp.LpConstraint(
e=P_deferrable[k][i] - P_def_bin2[k][i]*M,
sense=plp.LpConstraintLE,
rhs=0)
for i in set_I})
constraints.update({"constraint_pdef{}_start5_{}".format(k, i) :
constraints.update({"constraint_pdef{}_start2_{}".format(k, i) :
plp.LpConstraint(
e=-P_deferrable[k][i] + M*(P_def_bin2[k][i]-1) + 1,
sense=plp.LpConstraintLE,
e=P_def_start[k][i] - P_def_bin2[k][i] + P_def_bin2[k][i-1],
sense=plp.LpConstraintGE,
rhs=0)
for i in set_I})
for i in set_I[1:]})
constraints.update({"constraint_pdef{}_start3".format(k) :
plp.LpConstraint(
e = plp.lpSum(P_def_start[k][i] for i in set_I),
sense = plp.LpConstraintEQ,
rhs = 1)
})
constraints.update({"constraint_pdef{}_start4".format(k) :
plp.LpConstraint(
e = plp.lpSum(P_def_bin2[k][i] for i in set_I),
sense = plp.LpConstraintEQ,
rhs = self.optim_conf['def_total_hours'][k]/self.timeStep)
})

# The battery constraints
if self.optim_conf['set_use_battery']:
Expand Down Expand Up @@ -465,6 +466,12 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n
# Add the optimization status
opt_tp["optim_status"] = self.optim_status

# Debug variables
opt_tp["P_def_start_0"] = [P_def_start[0][i].varValue for i in set_I]
opt_tp["P_def_start_1"] = [P_def_start[1][i].varValue for i in set_I]
opt_tp["P_def_bin2_0"] = [P_def_bin2[0][i].varValue for i in set_I]
opt_tp["P_def_bin2_1"] = [P_def_bin2[1][i].varValue for i in set_I]

return opt_tp

def perform_perfect_forecast_optim(self, df_input_data: pd.DataFrame, days_list: pd.date_range) -> pd.DataFrame:
Expand Down

0 comments on commit 0887a19

Please sign in to comment.