Skip to content

Commit

Permalink
Merge pull request #219 from bknueven/startup_shutdown_curve
Browse files Browse the repository at this point in the history
Startup/shutdown curve
  • Loading branch information
bknueven authored Apr 22, 2021
2 parents 650481c + 4158112 commit 45e96fe
Show file tree
Hide file tree
Showing 10 changed files with 345 additions and 40 deletions.
125 changes: 125 additions & 0 deletions egret/model_library/transmission/tests/test_baseMVA_scaling.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
# ___________________________________________________________________________
#
# EGRET: Electrical Grid Research and Engineering Tools
# Copyright 2019 National Technology & Engineering Solutions of Sandia, LLC
# (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S.
# Government retains certain rights in this software.
# This software is distributed under the Revised BSD License.
# ___________________________________________________________________________

import os
import math
import pytest

from egret.data.model_data import ModelData
from egret.model_library.transmission.tx_utils import \
scale_ModelData_to_pu, unscale_ModelData_to_pu
from egret.models.unit_commitment import solve_unit_commitment
from egret.models.tests.test_unit_commitment import test_solver

current_dir = os.path.dirname(os.path.abspath(__file__))
uc_test_dir = os.path.join(current_dir, '..','..','..','models',
'tests', 'uc_test_instances')
scuc_fn = os.path.join(uc_test_dir, 'test_scuc_full_enforce_relaxed_sol.json')
tiny_uc_7_fn = os.path.join(uc_test_dir, 'tiny_uc_7.json')

def test_scale_unscale():
md = ModelData.read(scuc_fn)

## do type conversions
original_base_MVA = md.data['system']['baseMVA']
md.data['system']['baseMVA'] = 1.

scale_ModelData_to_pu(md, inplace=True)
md.data['system']['baseMVA'] = original_base_MVA

md_transformed = scale_ModelData_to_pu(md, inplace=False)

# test inplace flag
assert id(md.data) != id(md_transformed.data)

unscale_ModelData_to_pu(md_transformed, inplace=True)

assert md.data['system'] == md_transformed.data['system']
for esn, esd in md.data['elements'].items():
for en, ed in esd.items():
assert ed == md_transformed.data['elements'][esn][en]

for esn, esd in md_transformed.data['elements'].items():
for en, ed in esd.items():
assert ed == md.data['elements'][esn][en]

def test_scaling_spot_check():
md = ModelData.read(scuc_fn)

baseMVA = md.data['system']['baseMVA']

md_scaled = scale_ModelData_to_pu(md, inplace=False)

md_scaled_unscaled = unscale_ModelData_to_pu(md_scaled, inplace=False)

## commitment should be unchanged
assert md.data['elements']['generator']['101_STEAM_3_t']['commitment']['values'][10] == \
md_scaled.data['elements']['generator']['101_STEAM_3_t']['commitment']['values'][10] == \
md_scaled_unscaled.data['elements']['generator']['101_STEAM_3_t']['commitment']['values'][10]

## as should production cost
assert md.data['elements']['generator']['101_STEAM_3_t']['production_cost']['values'][10] == \
md_scaled.data['elements']['generator']['101_STEAM_3_t']['production_cost']['values'][10] == \
md_scaled_unscaled.data['elements']['generator']['101_STEAM_3_t']['production_cost']['values'][10]

## as should voltage angle
assert md.data['elements']['bus']['Alber']['va']['values'][10] == \
md_scaled.data['elements']['bus']['Alber']['va']['values'][10] == \
md_scaled_unscaled.data['elements']['bus']['Alber']['va']['values'][10]

## pg should be scaled
assert md.data['elements']['generator']['101_STEAM_3_t']['pg']['values'][10] == \
md_scaled.data['elements']['generator']['101_STEAM_3_t']['pg']['values'][10]/baseMVA == \
md_scaled_unscaled.data['elements']['generator']['101_STEAM_3_t']['pg']['values'][10]

## load should be scaled
assert md.data['elements']['bus']['Alber']['pl']['values'][10] == \
md_scaled.data['elements']['bus']['Alber']['pl']['values'][10]/baseMVA == \
md_scaled_unscaled.data['elements']['bus']['Alber']['pl']['values'][10]

## load should be scaled
assert md.data['elements']['load']['Alber']['p_load']['values'][10] == \
md_scaled.data['elements']['load']['Alber']['p_load']['values'][10]/baseMVA == \
md_scaled_unscaled.data['elements']['load']['Alber']['p_load']['values'][10]

## flows should be scaled
assert md.data['elements']['branch']['A22']['pf']['values'][20] == \
md_scaled.data['elements']['branch']['A22']['pf']['values'][20]/baseMVA == \
md_scaled_unscaled.data['elements']['branch']['A22']['pf']['values'][20]

## contingency flows should also be scaled
assert md.data['elements']['contingency']['A1']['monitored_branches']['values'][10]['A11']['pf'] == \
md_scaled.data['elements']['contingency']['A1']['monitored_branches']['values'][10]['A11']['pf']/baseMVA == \
md_scaled_unscaled.data['elements']['contingency']['A1']['monitored_branches']['values'][10]['A11']['pf']

## lmp should be inversly scaled
assert md.data['elements']['bus']['Alber']['lmp']['values'][10] == \
md_scaled.data['elements']['bus']['Alber']['lmp']['values'][10]*baseMVA == \
md_scaled_unscaled.data['elements']['bus']['Alber']['lmp']['values'][10]

## reserve prices should be inversly scaled
assert md.data['system']['reserve_price']['values'][18] == \
md_scaled.data['system']['reserve_price']['values'][18]*baseMVA == \
md_scaled_unscaled.data['system']['reserve_price']['values'][18]

## shortfall price should be inversly scaled
assert md.data['system']['reserve_shortfall_cost'] == \
md_scaled.data['system']['reserve_shortfall_cost']*baseMVA == \
md_scaled_unscaled.data['system']['reserve_shortfall_cost']

def test_scaling_solve():
md = ModelData.read(tiny_uc_7_fn)

assert md.data['system']['baseMVA'] == 1.
mdo_unscaled = solve_unit_commitment(md, test_solver, relaxed=True)

md.data['system']['baseMVA'] = 100.
mdo_scaled = solve_unit_commitment(md, test_solver, relaxed=True)

assert math.isclose(mdo_scaled.data['system']['total_cost'], mdo_unscaled.data['system']['total_cost'])
107 changes: 81 additions & 26 deletions egret/model_library/transmission/tx_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,8 @@ def load_shed_limit(load, gens, gen_mins):
'q_max',
'startup_capacity',
'shutdown_capacity',
'startup_curve',
'shutdown_curve',
'ramp_up_60min',
'ramp_down_60min',
'initial_p_output',
Expand All @@ -267,9 +269,9 @@ def load_shed_limit(load, gens, gen_mins):
'headroom',
'reg_up_supplied',
'reg_down_supplied',
'spin_supplied',
'flex_up_supplied',
'flex_down_supplied',
'spinning_supplied',
'non_spinning_supplied',
'supplemental_supplied',
'p_cost',
Expand Down Expand Up @@ -403,40 +405,93 @@ def _get_op(normal_op, inverse_op, attr_name):
return inverse_op
return normal_op

def _no_op(a,b):
return a

def _recurse_deeper_dict(normal_op, inverse_op, element, attr_name, value, baseMVA, attributes):
if 'data_type' in value:
_scale_by_baseMVA(normal_op, inverse_op, element, attr_name, value, baseMVA, attributes)
else: # recurse deeper
for k,v in value.items():
_scale_by_baseMVA(normal_op, inverse_op, value, k, v, baseMVA, attributes)

def _scale_by_baseMVA(normal_op, inverse_op, element, attr_name, attr, baseMVA, attributes):
if attr is None:
return
if isinstance(attr, dict):
if 'data_type' in attr and attr['data_type'] == 'time_series':
op = _get_op(normal_op, inverse_op, attr_name)
values_list = attr['values']
for time, value in enumerate(values_list):
if isinstance(value, dict):
_scale_by_baseMVA(normal_op, inverse_op, element, attr_name, value, baseMVA, attributes)
if 'data_type' in attr:
if attr['data_type'] == 'time_series':
if attr_name in attributes:
op = _get_op(normal_op, inverse_op, attr_name)
else:
op = _no_op
values_list = attr['values']
for time, value in enumerate(values_list):
if isinstance(value, dict):
_recurse_deeper_dict(normal_op, inverse_op, element, attr_name, value, baseMVA, attributes)
elif isinstance(value, list):
values_list[time] = [ op(v, baseMVA) for v in value ]
elif isinstance(value, tuple):
values_list[time] = tuple( op(v, baseMVA) for v in value )
else:
values_list[time] = op( value , baseMVA )
elif attr['data_type'] == 'cost_curve':
if attr['cost_curve_type'] == 'polynomial':
values_dict = attr['values']
if 'data_type' in values_dict:
_recurse_deeper_dict(normal_op, inverse_op, element, attr_name, values_dict, baseMVA, attributes)
else:
attr['values'] = { int(power): coeff*(inverse_op(1.,baseMVA)**int(power)) \
for (power, coeff) in values_dict.items() }
elif attr['cost_curve_type'] == 'piecewise':
values = attr['values']
if isinstance(values, list):
attr['values'] = [ (normal_op(point,baseMVA), cost) \
for (point, cost) in values ]
elif isinstance(values, tuple):
attr['values'] = tuple( (normal_op(point,baseMVA), cost) \
for (point, cost) in values )
elif isinstance(values, dict):
_recurse_deeper_dict(normal_op, inverse_op, element, attr_name, values, baseMVA, attributes)
else:
raise RuntimeError("Unexpected case converting piecewise cost curve")
elif attr['data_type'] == 'fuel_curve':
values = attr['values']
if isinstance(values, list):
attr['values'] = [ (normal_op(point,baseMVA), fuel) \
for (point, fuel) in values ]
elif isinstance(values, tuple):
attr['values'] = tuple( (normal_op(point,baseMVA), fuel) \
for (point, fuel) in values )
elif isinstance(values, dict):
_recurse_deeper_dict(normal_op, inverse_op, element, attr_name, values, baseMVA, attributes)
else:
raise RuntimeError("Unexpected case converting piecewise fuel curve")
else: # potentially recurse deeper on the "values"
if attr_name in attributes:
op = _get_op(normal_op, inverse_op, attr_name)
else:
op = _no_op
values = attr['values']
if isinstance(values, dict):
_recurse_deeper_dict(normal_op, inverse_op, element, attr_name, values, baseMVA, attributes)
elif isinstance(values, list):
attr['values'] = [ op(v, baseMVA) for v in values ]
elif isinstance(value, tuple):
attr['values'] = tuple( op(v, baseMVA) for v in values )
else:
values_list[time] = op( value , baseMVA )
elif 'data_type' in attr and attr['data_type'] == 'cost_curve':
if attr['cost_curve_type'] == 'polynomial':
values_dict = attr['values']
new_values = { int(power): coeff*(inverse_op(1.,baseMVA)**int(power)) \
for (power, coeff) in values_dict.items() }
attr['values'] = new_values
elif attr['cost_curve_type'] == 'piecewise':
values_list_of_tuples = attr['values']
new_values = [ ( normal_op(point,baseMVA), cost) \
for (point, cost) in values_list_of_tuples ]
attr['values'] = new_values
elif 'data_type' in attr and attr['data_type'] == 'fuel_curve':
values_list_of_tuples = attr['values']
new_values = [ ( normal_op(point,baseMVA), fuel) \
for (point, fuel) in values_list_of_tuples ]
attr['values'] = new_values
else: # recurse deeper
attr['values'] = op( values , baseMVA )
else: # recurse deeper AND we've already checked for data_type
for k,v in attr.items():
_scale_by_baseMVA(normal_op, inverse_op, attr, k, v, baseMVA, attributes)
elif attr_name in attributes:
op = _get_op(normal_op, inverse_op, attr_name)
element[attr_name] = op( attr , baseMVA )
if isinstance(attr, list):
element[attr_name] = [ op(a, baseMVA) for a in attr ]
elif isinstance(attr, tuple):
element[attr_name] = tuple( op(a, baseMVA) for a in attr )
else:
element[attr_name] = op( attr , baseMVA )
else:
return

Expand Down
77 changes: 68 additions & 9 deletions egret/model_library/unit_commitment/params.py
Original file line number Diff line number Diff line change
Expand Up @@ -387,12 +387,12 @@ def warn_about_negative_demand_rule(m, b, t):
## Assert that MUT and MDT are at least 1 in the time units of the model.
## Otherwise, turn on/offs may not be enforced correctly.
def scale_min_uptime(m, g):
scaled_up_time = int(round(m.MinimumUpTime[g] / m.TimePeriodLengthHours))
scaled_up_time = int(math.ceil(m.MinimumUpTime[g] / m.TimePeriodLengthHours))
return min(max(scaled_up_time,1), value(m.NumTimePeriods))
model.ScaledMinimumUpTime = Param(model.ThermalGenerators, within=NonNegativeIntegers, initialize=scale_min_uptime)

def scale_min_downtime(m, g):
scaled_down_time = int(round(m.MinimumDownTime[g] / m.TimePeriodLengthHours))
scaled_down_time = int(math.ceil(m.MinimumDownTime[g] / m.TimePeriodLengthHours))
return min(max(scaled_down_time,1), value(m.NumTimePeriods))
model.ScaledMinimumDownTime = Param(model.ThermalGenerators, within=NonNegativeIntegers, initialize=scale_min_downtime)

Expand Down Expand Up @@ -524,12 +524,68 @@ def t0_unit_on_rule(m, g):

_add_initial_time_periods_on_off_line(model)
_verify_must_run_t0_state_consistency(model)

# For future shutdowns/startups beyond the time-horizon
# Like UnitOnT0State, a postive quantity means the generator
# *will start* in 'future_status' hours, and a negative quantity
# means the generator *will stop* in -('future_status') hours.
# The default of 0 means we have no information
model.FutureStatus = Param(model.ThermalGenerators,
within=Reals,
mutable=True,
default=0.,
initialize=thermal_gen_attrs.get('future_status', dict()))

def time_periods_since_last_shutdown_rule(m,g):
if value(m.UnitOnT0[g]):
# longer than any time-horizon we'd consider
return 10000
else:
return int(math.ceil( -value(m.UnitOnT0State[g]) / value(m.TimePeriodLengthHours) ))
model.TimePeriodsSinceShutdown = Param(model.ThermalGenerators, within=PositiveIntegers, mutable=True,
initialize=time_periods_since_last_shutdown_rule)

def time_periods_before_startup_rule(m,g):
if value(m.FutureStatus[g]) <= 0:
# longer than any time-horizon we'd consider
return 10000
else:
return int(math.ceil( value(m.FutureStatus[g]) / value(m.TimePeriodLengthHours) ))
model.TimePeriodsBeforeStartup = Param(model.ThermalGenerators, within=PositiveIntegers, mutable=True,
initialize=time_periods_before_startup_rule)

###############################################
# startup/shutdown curves for each generator. #
# These are specified in the same time scales #
# as 'time_period_length_minutes' and other #
# time-vary quantities. #
###############################################

def startup_curve_init_rule(m,g):
startup_curve = thermal_gens[g].get('startup_curve')
if startup_curve is None:
return ()
min_down_time = int(math.ceil(m.MinimumDownTime[g] / m.TimePeriodLengthHours))
if len(startup_curve) > min_down_time:
logger.warn(f"Truncating startup_curve longer than scaled minimum down time {min_down_time} for generator {g}")
return startup_curve[0:min_down_time]
model.StartupCurve = Set(model.ThermalGenerators, within=NonNegativeReals, ordered=True, initialize=startup_curve_init_rule)

def shutdown_curve_init_rule(m,g):
shutdown_curve = thermal_gens[g].get('shutdown_curve')
if shutdown_curve is None:
return ()
min_down_time = int(math.ceil(m.MinimumDownTime[g] / m.TimePeriodLengthHours))
if len(shutdown_curve) > min_down_time:
logger.warn(f"Truncating shutdown_curve longer than scaled minimum down time {min_down_time} for generator {g}")
return shutdown_curve[0:min_down_time]
model.ShutdownCurve = Set(model.ThermalGenerators, within=NonNegativeReals, ordered=True, initialize=shutdown_curve_init_rule)

####################################################################
# generator power output at t=0 (initial condition). units are MW. #
####################################################################

def between_limits_validator(m, v, g):
def power_generated_t0_validator(m, v, g):
t = m.TimePeriods.first()

if value(m.UnitOnT0[g]):
Expand All @@ -544,13 +600,16 @@ def between_limits_validator(m, v, g):
return True

else:
return v == 0.
# Generator was off, but could have residual power due to
# start-up/shut-down curve. Therefore, do not be too picky
# as the value doesn't affect any constraints directly
return True

model.PowerGeneratedT0 = Param(model.ThermalGenerators,
within=NonNegativeReals,
validate=between_limits_validator,
mutable=True,
initialize=thermal_gen_attrs['initial_p_output'])
within=NonNegativeReals,
validate=power_generated_t0_validator,
mutable=True,
initialize=thermal_gen_attrs['initial_p_output'])

# limits for time periods in which generators are brought on or off-line.
# must be no less than the generator minimum output.
Expand Down Expand Up @@ -663,7 +722,7 @@ def scale_shutdown_limit_t0(m, g):
else:
return temp + m.MinimumPowerOutputT0[g]
model.ScaledShutdownRampLimitT0 = Param(model.ThermalGenerators, within=NonNegativeReals, initialize=scale_shutdown_limit_t0, mutable=True)

###############################################
# startup cost parameters for each generator. #
###############################################
Expand Down
Loading

0 comments on commit 45e96fe

Please sign in to comment.