Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Dynamic paths with emhass config dict, some mlforcaster error suppression #247

Merged
merged 17 commits into from
Apr 18, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
75 changes: 43 additions & 32 deletions src/emhass/command_line.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,19 @@
from emhass import utils


def set_input_data_dict(config_path: pathlib.Path, base_path: str, costfun: str,
def set_input_data_dict(emhass_conf: dict, costfun: str,
params: str, runtimeparams: str, set_type: str, logger: logging.Logger,
get_data_from_file: Optional[bool] = False) -> dict:
"""
Set up some of the data needed for the different actions.

:param config_path: The complete absolute path where the config.yaml file is located
:type config_path: pathlib.Path
:param base_path: The parent folder of the config_path
:type base_path: str
:param emhass_conf: Dictionary containing the needed emhass paths
:type emhass_conf: dict
:param costfun: The type of cost function to use for optimization problem
:type costfun: str
:param params: Configuration parameters passed from data/options.json
:type params: str
:param runtimeparams: Runtime optimization parameters passed as a dictionnary
:param runtimeparams: Runtime optimization parameters passed as a dictionary
:type runtimeparams: str
:param set_type: Set the type of setup based on following type of optimization
:type set_type: str
Expand All @@ -52,25 +50,25 @@
logger.info("Setting up needed data")
# Parsing yaml
retrieve_hass_conf, optim_conf, plant_conf = utils.get_yaml_parse(
config_path, use_secrets=not(get_data_from_file), params=params)
emhass_conf['config_path'], use_secrets=not(get_data_from_file), params=params)
# Treat runtimeparams
params, retrieve_hass_conf, optim_conf, plant_conf = utils.treat_runtimeparams(
runtimeparams, params, retrieve_hass_conf,
optim_conf, plant_conf, set_type, logger)
# Define main objects
rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'],
retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'],
params, base_path, logger, get_data_from_file=get_data_from_file)
params, emhass_conf, logger, get_data_from_file=get_data_from_file)
fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf,
params, base_path, logger, get_data_from_file=get_data_from_file)
params, emhass_conf, logger, get_data_from_file=get_data_from_file)
opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf,
fcst.var_load_cost, fcst.var_prod_price,
costfun, base_path, logger)
costfun, emhass_conf, logger)
# Perform setup based on type of action
if set_type == "perfect-optim":
# Retrieve data from hass
if get_data_from_file:
with open(pathlib.Path(base_path) / 'data' / 'test_df_final.pkl', 'rb') as inp:
with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp:
rh.df_final, days_list, var_list = pickle.load(inp)
else:
days_list = utils.get_days_list(retrieve_hass_conf['days_to_retrieve'])
Expand Down Expand Up @@ -107,7 +105,7 @@
elif set_type == "naive-mpc-optim":
# Retrieve data from hass
if get_data_from_file:
with open(pathlib.Path(base_path) / 'data' / 'test_df_final.pkl', 'rb') as inp:
with open(emhass_conf['data_path'] / 'test_df_final.pkl', 'rb') as inp:
rh.df_final, days_list, var_list = pickle.load(inp)
else:
days_list = utils.get_days_list(1)
Expand All @@ -125,6 +123,9 @@
df_weather = fcst.get_weather_forecast(method=optim_conf['weather_forecast_method'])
P_PV_forecast = fcst.get_power_from_weather(df_weather, set_mix_forecast=True, df_now=df_input_data)
P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method'], set_mix_forecast=True, df_now=df_input_data)
if isinstance(P_load_forecast,bool) and not P_load_forecast:
logger.error("Unable to get sensor power photovoltaics, or sensor power load no var loads. Check HA sensors and their daily data")
return False
df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1)
df_input_data_dayahead = utils.set_df_index_freq(df_input_data_dayahead)
df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast']
Expand All @@ -143,7 +144,7 @@
if get_data_from_file:
days_list = None
filename = 'data_train_'+model_type+'.pkl'
data_path = pathlib.Path(base_path) / 'data' / filename
data_path = emhass_conf['data_path'] / filename
with open(data_path, 'rb') as inp:
df_input_data, _ = pickle.load(inp)
df_input_data = df_input_data[df_input_data.index[-1] - pd.offsets.Day(days_to_retrieve):]
Expand All @@ -163,9 +164,9 @@
P_PV_forecast, P_load_forecast = None, None
days_list = None

# The input data dictionnary to return
# The input data dictionary to return
input_data_dict = {
'root': base_path,
'emhass_conf': emhass_conf,
'retrieve_hass_conf': retrieve_hass_conf,
'rh': rh,
'opt': opt,
Expand Down Expand Up @@ -211,7 +212,7 @@
else: # Just save the latest optimization results
filename = 'opt_res_latest.csv'
if not debug:
opt_res.to_csv(pathlib.Path(input_data_dict['root']) / filename, index_label='timestamp')
opt_res.to_csv(pathlib.Path(input_data_dict['emhass_conf']['data_path']) / filename, index_label='timestamp')
return opt_res

def dayahead_forecast_optim(input_data_dict: dict, logger: logging.Logger,
Expand Down Expand Up @@ -248,7 +249,7 @@
else: # Just save the latest optimization results
filename = 'opt_res_latest.csv'
if not debug:
opt_res_dayahead.to_csv(pathlib.Path(input_data_dict['root']) / filename, index_label='timestamp')
opt_res_dayahead.to_csv(pathlib.Path(input_data_dict['emhass_conf']['data_path']) / filename, index_label='timestamp')
return opt_res_dayahead

def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger,
Expand Down Expand Up @@ -292,7 +293,7 @@
else: # Just save the latest optimization results
filename = 'opt_res_latest.csv'
if not debug:
opt_res_naive_mpc.to_csv(pathlib.Path(input_data_dict['root']) / filename, index_label='timestamp')
opt_res_naive_mpc.to_csv(pathlib.Path(input_data_dict['emhass_conf']['data_path']) / filename, index_label='timestamp')
return opt_res_naive_mpc

def forecast_model_fit(input_data_dict: dict, logger: logging.Logger,
Expand All @@ -315,16 +316,16 @@
num_lags = input_data_dict['params']['passed_data']['num_lags']
split_date_delta = input_data_dict['params']['passed_data']['split_date_delta']
perform_backtest = input_data_dict['params']['passed_data']['perform_backtest']
root = input_data_dict['root']
data_path = input_data_dict['emhass_conf']['data_path']
# The ML forecaster object
mlf = MLForecaster(data, model_type, var_model, sklearn_model, num_lags, root, logger)
mlf = MLForecaster(data, model_type, var_model, sklearn_model, num_lags, input_data_dict['emhass_conf'], logger)
# Fit the ML model
df_pred, df_pred_backtest = mlf.fit(split_date_delta=split_date_delta,
perform_backtest=perform_backtest)
# Save model
if not debug:
filename = model_type+'_mlf.pkl'
with open(pathlib.Path(root) / filename, 'wb') as outp:
with open(pathlib.Path(data_path) / filename, 'wb') as outp:
Fixed Show fixed Hide fixed
pickle.dump(mlf, outp, pickle.HIGHEST_PROTOCOL)
return df_pred, df_pred_backtest, mlf

Expand Down Expand Up @@ -353,9 +354,9 @@
"""
# Load model
model_type = input_data_dict['params']['passed_data']['model_type']
root = input_data_dict['root']
data_path = input_data_dict['emhass_conf']['data_path']
filename = model_type+'_mlf.pkl'
filename_path = pathlib.Path(root) / filename
filename_path = pathlib.Path(data_path) / filename
if not debug:
if filename_path.is_file():
with open(filename_path, 'rb') as inp:
Expand Down Expand Up @@ -414,9 +415,9 @@
"""
# Load model
model_type = input_data_dict['params']['passed_data']['model_type']
root = input_data_dict['root']
data_path = input_data_dict['emhass_conf']['data_path']
filename = model_type+'_mlf.pkl'
filename_path = pathlib.Path(root) / filename
filename_path = pathlib.Path(data_path) / filename
if not debug:
if filename_path.is_file():
with open(filename_path, 'rb') as inp:
Expand All @@ -429,7 +430,7 @@
# Save model
if not debug:
filename = model_type+'_mlf.pkl'
with open(pathlib.Path(root) / filename, 'wb') as outp:
with open(pathlib.Path(data_path) / filename, 'wb') as outp:
Fixed Show fixed Hide fixed
pickle.dump(mlf, outp, pickle.HIGHEST_PROTOCOL)
return df_pred_optim, mlf

Expand Down Expand Up @@ -457,11 +458,11 @@
else:
filename = 'opt_res_latest.csv'
if opt_res_latest is None:
if not os.path.isfile(pathlib.Path(input_data_dict['root']) / filename):
if not os.path.isfile(pathlib.Path(input_data_dict['emhass_conf']['data_path']) / filename):
Fixed Show fixed Hide fixed
logger.error("File not found error, run an optimization task first.")
return
else:
opt_res_latest = pd.read_csv(pathlib.Path(input_data_dict['root']) / filename, index_col='timestamp')
opt_res_latest = pd.read_csv(pathlib.Path(input_data_dict['emhass_conf']['data_path']) / filename, index_col='timestamp')
opt_res_latest.index = pd.to_datetime(opt_res_latest.index)
opt_res_latest.index.freq = input_data_dict['retrieve_hass_conf']['freq']
# Estimate the current index
Expand Down Expand Up @@ -614,18 +615,26 @@
parser.add_argument('--debug', type=strtobool, default='False', help='Use True for testing purposes')
args = parser.parse_args()
# The path to the configuration files
config_path = pathlib.Path(args.config)
base_path = str(config_path.parent)
if args.config is not None:
config_path = pathlib.Path(args.config)
else:
config_path = pathlib.Path(str(utils.get_root(__file__, num_parent=2) / 'config_emhass.yaml' ))
root_path = config_path.parent
data_path = (config_path.parent / 'data/')
emhass_conf = {}
emhass_conf['config_path'] = config_path
emhass_conf['data_path'] = data_path
emhass_conf['root_path'] = root_path
# create logger
logger, ch = utils.get_logger(__name__, base_path, save_to_file=bool(args.log2file))
logger, ch = utils.get_logger(__name__, emhass_conf['data_path'], save_to_file=bool(args.log2file))
# Additionnal argument
try:
parser.add_argument('--version', action='version', version='%(prog)s '+version('emhass'))
args = parser.parse_args()
except Exception:
logger.info("Version not found for emhass package. Or importlib exited with PackageNotFoundError.")
# Setup parameters
input_data_dict = set_input_data_dict(config_path, base_path,
input_data_dict = set_input_data_dict(emhass_conf,
args.costfun, args.params, args.runtimeparams, args.action,
logger, args.debug)
# Perform selected action
Expand Down Expand Up @@ -670,6 +679,8 @@
return df_pred
elif args.action == 'forecast-model-tune':
return df_pred_optim, mlf
else:
return opt_res

if __name__ == '__main__':
main()
Loading
Loading