Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PHTracking extension #334

Merged
merged 14 commits into from
Sep 10, 2023
1 change: 0 additions & 1 deletion .github/workflows/nompi4py.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,3 @@ jobs:
run: |
cd mpisppy/tests
python test_ef_ph.py

4 changes: 2 additions & 2 deletions .github/workflows/pull_push_regression.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ jobs:
- name: Test afew
run: |
cd examples
python afew.py xpress_persistent
python afew.py xpress_persistent

- name: Test run_all nouc
- name: Test run_all nouc
run: |
cd examples
python run_all.py xpress_persistent "" nouc
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/pyotracker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name: pyomo tracker

on:
schedule:
# min hr dayofmonth month dayofweek
# min hr dayofmonth month dayofweek
- cron: "14 3 * * *"

defaults:
Expand Down Expand Up @@ -47,7 +47,7 @@ jobs:
- name: Test afew
run: |
cd examples
python afew.py xpress_persistent
python afew.py xpress_persistent

- name: Test docs
run: |
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/schur_complement.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
python -m pip install --upgrade pip
pip install numpy scipy nose pybind11
conda install openmpi pymumps --no-update-deps
pip install mpi4py pandas
pip install mpi4py pandas matplotlib
pip install git+https://github.com/pyutilib/pyutilib.git
git clone https://github.com/pyomo/pyomo.git
cd pyomo/
Expand Down
3 changes: 1 addition & 2 deletions .github/workflows/straight.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
- name: Install dependencies
run: |
conda install mpi4py pandas setuptools
pip install pyomo xpress cplex dill
pip install pyomo xpress cplex dill matplotlib

- name: setup the program
run: |
Expand All @@ -37,4 +37,3 @@ jobs:
run: |
cd mpisppy/tests
python straight_tests.py

2 changes: 1 addition & 1 deletion .github/workflows/testaph.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,4 @@ jobs:
run: |
cd mpisppy/tests
# envall does nothing
python test_aph.py
python test_aph.py
7 changes: 3 additions & 4 deletions .github/workflows/testbunpick.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ jobs:
auto-activate-base: false
- name: Install dependencies
run: |
conda install mpi4py numpy setuptools
pip install pyomo pandas xpress cplex scipy sympy dill PyYAML Pympler networkx pandas
conda install mpi4py numpy setuptools
pip install pyomo pandas xpress cplex scipy sympy dill PyYAML Pympler networkx pandas matplotlib

- name: setup the program
run: |
Expand All @@ -37,5 +37,4 @@ jobs:
timeout-minutes: 10
run: |
cd mpisppy/tests
python test_pickle_bundle.py

python test_pickle_bundle.py
8 changes: 4 additions & 4 deletions .github/workflows/testconfint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ jobs:
auto-activate-base: false
- name: Install dependencies
run: |
conda install mpi4py numpy setuptools
pip install pyomo pandas xpress cplex scipy sympy dill
conda install mpi4py numpy setuptools
pip install pyomo pandas xpress cplex scipy sympy dill matplotlib

- name: setup the program
run: |
Expand All @@ -37,10 +37,10 @@ jobs:
timeout-minutes: 10
run: |
cd mpisppy/tests
python test_conf_int_farmer.py
python test_conf_int_farmer.py

- name: run aircond tests
timeout-minutes: 10
run: |
cd mpisppy/tests
python test_conf_int_aircond.py
python test_conf_int_aircond.py
5 changes: 2 additions & 3 deletions .github/workflows/testgradient.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ jobs:
- name: Install dependencies
run: |
conda install mpi4py numpy setuptools cmake
pip install pyomo pandas xpress cplex scipy sympy dill
pip install pyomo pandas xpress cplex scipy sympy dill matplotlib

- name: setup the program
run: |
Expand All @@ -40,6 +40,5 @@ jobs:
timeout-minutes: 10
run: |
cd mpisppy/tests
python test_gradient_rho.py
python test_gradient_rho.py
python test_w_writer.py

2 changes: 1 addition & 1 deletion .github/workflows/testpysp.yml
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ jobs:
run: |
cd mpisppy/tests
# envall does nothing
python test_pysp_model.py
python test_pysp_model.py

- name: run pysp unit tests
timeout-minutes: 100
Expand Down
8 changes: 6 additions & 2 deletions examples/farmer/farmer_cylinders.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from mpisppy.extensions.norm_rho_updater import NormRhoUpdater
from mpisppy.convergers.norm_rho_converger import NormRhoConverger
from mpisppy.convergers.primal_dual_converger import PrimalDualConverger
from mpisppy.utils.cfg_vanilla import extension_adder

write_solution = True

Expand All @@ -33,6 +34,8 @@ def _parse_args():
cfg.lagranger_args()
cfg.xhatshuffle_args()
cfg.converger_args()
cfg.wxbar_read_write_args()
cfg.tracking_args()
cfg.add_to_config("crops_mult",
description="There will be 3x this many crops (default 1)",
domain=int,
Expand Down Expand Up @@ -101,11 +104,12 @@ def main():
hub_dict['opt_kwargs']['options']\
['primal_dual_converger_options'] = {
'verbose': True,
'tol': cfg.primal_dual_converger_tol}
'tol': cfg.primal_dual_converger_tol,
'tracking': True}

## hack in adaptive rho
if cfg.use_norm_rho_updater:
hub_dict['opt_kwargs']['extensions'] = NormRhoUpdater
extension_adder(hub_dict, NormRhoUpdater)
hub_dict['opt_kwargs']['options']['norm_rho_options'] = {'verbose': True}

# FWPH spoke
Expand Down
16 changes: 11 additions & 5 deletions examples/run_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,10 +158,16 @@ def do_one_mmw(dirname, runefstring, npyfile, mmwargstring):
"--num-scens 3 --bundles-per-rank=0 --max-iterations=50 --default-rho=1 --solver-name={} "
"--primal-dual-converger --primal-dual-converger-tol=0.5 --lagrangian --xhatshuffle "
"--intra-hub-conv-thresh -0.1 --rel-gap=1e-6".format(solver_name))
do_one("farmer", "farmer_cylinders.py", 4,
do_one("farmer", "farmer_cylinders.py", 5,
"--num-scens 3 --bundles-per-rank=0 --max-iterations=50 --default-rho=1 --solver-name={} "
"--use-norm-rho-converger --use-norm-rho-updater --lagrangian --xhatshuffle --fwph "
"--display-convergence-detail".format(solver_name))
"--use-norm-rho-converger --use-norm-rho-updater --rel-gap=1e-6 --lagrangian --lagranger "
"--xhatshuffle --fwph --W-fname=out_ws.txt --Xbar-fname=out_xbars.txt "
"--ph-track-progress --track-convergence=4 --track-xbar=4 --track-nonants=4 "
"--track-duals=4".format(solver_name))
do_one("farmer", "farmer_cylinders.py", 5,
"--num-scens 3 --bundles-per-rank=0 --max-iterations=50 --default-rho=1 --solver-name={} "
"--use-norm-rho-converger --use-norm-rho-updater --lagrangian --lagranger --xhatshuffle --fwph "
"--init-W-fname=out_ws.txt --init-Xbar-fname=out_xbars.txt --ph-track-progress --track-convergence=4 " "--track-xbar=4 --track-nonants=4 --track-duals=4 ".format(solver_name))
do_one("farmer", "farmer_lshapedhub.py", 2,
"--num-scens 3 --bundles-per-rank=0 --max-iterations=50 "
"--solver-name={} --rel-gap=0.0 "
Expand Down Expand Up @@ -361,14 +367,14 @@ def do_one_mmw(dirname, runefstring, npyfile, mmwargstring):
"--default-rho=1 --num-scens=3 --max-solver-threads=2 "
"--lagrangian-iter0-mipgap=1e-7 --lagrangian --xhatshuffle "
"--ph-mipgaps-json=phmipgaps.json "
"--solver-name={}".format(solver_name))
"--solver-name={}".format(solver_name))
# as of May 2022, this one works well, but outputs some crazy messages
do_one("uc", "uc_ama.py", 3,
"--bundles-per-rank=0 --max-iterations=2 "
"--default-rho=1 --num-scens=3 "
"--fixer-tol=1e-2 --lagranger --xhatshuffle "
"--solver-name={}".format(solver_name))

# 10-scenario UC
time_one("UC_cylinder10scen", "uc", "uc_cylinders.py", 3,
"--bundles-per-rank=5 --max-iterations=2 "
Expand Down
8 changes: 7 additions & 1 deletion mpisppy/convergers/converger.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,15 @@ def __init__(self, opt):
@abc.abstractmethod
def is_converged(self):
''' Indicated whether the algorithm has converged.

Must return a boolean. If True, the algorithm will terminate at the
current iteration--no more solves will be performed by SPBase.
Otherwise, the iterations will continue.
'''
pass

def post_loops(self):
'''Method called after the termination of the algorithm.
This method is called after the post_loops of any extensions
'''
pass
77 changes: 60 additions & 17 deletions mpisppy/convergers/primal_dual_converger.py
Original file line number Diff line number Diff line change
@@ -1,35 +1,51 @@
import math
import numpy as np
import os
import pandas as pd
import mpisppy.convergers.converger
import matplotlib.pyplot as plt
from mpisppy import MPI
from mpisppy.extensions.phtracker import TrackedData

class PrimalDualConverger(mpisppy.convergers.converger.Converger):
""" Convergence checker for the primal-dual metrics.
Primal convergence is measured as weighted sum over all scenarios s
p_{s} * ||x_{s} - \bar{x}||_1.
Dual convergence is measured as
Dual convergence is measured as
rho * ||\bar{x}_{t} - \bar{x}_{t-1}||_1
"""
def __init__(self, ph):
""" Initialization method for the PrimalDualConverger class."""
super().__init__(ph)
if 'primal_dual_converger_options' in ph.options and \
'verbose' in ph.options['primal_dual_converger_options'] and \
ph.options['primal_dual_converger_options']['verbose']:
self._verbose = True
else:
self._verbose = False
self._ph = ph
self.convergence_threshold = ph.options['primal_dual_converger_options']\
['tol']

self.options = ph.options.get('primal_dual_converger_options', {})
self._verbose = self.options.get('verbose', False)
self._ph = ph
self.convergence_threshold = self.options.get('tol', 1)
self.tracking = self.options.get('tracking', False)
self.prev_xbars = self._get_xbars()
self._rank = self._ph.cylinder_rank

if self.tracking and self._rank == 0:
# if phtracker is set up, save the results in the phtracker/hub folder
if 'phtracker_options' in self._ph.options:
tracker_options = self._ph.options["phtracker_options"]
cylinder_name = tracker_options.get(
"cylinder_name", type(self._ph.spcomm).__name__)
results_folder = tracker_options.get(
"results_folder", "results")
results_folder = os.path.join(results_folder, cylinder_name)
else:
results_folder = self.options.get('results_folder', 'results')
self.tracker = TrackedData('pd', results_folder, plot=True, verbose=self._verbose)
os.makedirs(results_folder, exist_ok=True)
self.tracker.initialize_fnames(name=self.options.get('pd_fname', None))
self.tracker.initialize_df(['iteration', 'primal_gap', 'dual_gap'])

def _get_xbars(self):
"""
Get the current xbar values from the local scenarios
Returns:
xbars (dict): dictionary of xbar values indexed by
xbars (dict): dictionary of xbar values indexed by
(decision node name, index)
"""
xbars = {}
Expand All @@ -38,7 +54,7 @@ def _get_xbars(self):
xbars[ndn_i] = xbar.value
break
return xbars

def _compute_primal_convergence(self):
"""
Compute the primal convergence metric
Expand All @@ -55,12 +71,12 @@ def _compute_primal_convergence(self):
nlen = s._mpisppy_data.nlens[ndn]
x_bars = np.fromiter((s._mpisppy_model.xbars[ndn,i]._value
for i in range(nlen)), dtype='d')

nonants_array = np.fromiter(
(v._value for v in node.nonant_vardata_list),
dtype='d', count=nlen)
_l1 = np.abs(x_bars - nonants_array)

# invariant to prob_coeff being a scalar or array
prob = s._mpisppy_data.prob_coeff[ndn] * np.ones(nlen)
local_sum_diff[0] += np.dot(prob, _l1)
Expand Down Expand Up @@ -107,13 +123,40 @@ def is_converged(self):
self.prev_xbars = self._get_xbars()
ret_val = max(primal_gap, dual_gap) <= self.convergence_threshold

if self._verbose and self._ph.cylinder_rank == 0:
if self._verbose and self._rank == 0:
print(f"primal gap = {round(primal_gap, 5)}, dual gap = {round(dual_gap, 5)}")

if ret_val:
print("Dual convergence check passed")
else:
print("Dual convergence check failed "
f"(requires primal + dual gaps) <= {self.convergence_threshold}")

if self.tracking and self._rank == 0:
self.tracker.add_row([self._ph._PHIter, primal_gap, dual_gap])
self.tracker.write_out_data()
return ret_val

def plot_results(self):
"""
Plot the results of the convergence checks
by reading in csv file and plotting
"""
plot_fname = self.tracker.plot_fname
conv_data = pd.read_csv(self.tracker.fname)

# Create a log-scale plot
plt.semilogy(conv_data['iteration'], conv_data['primal_gap'], label='Primal Gap')
plt.semilogy(conv_data['iteration'], conv_data['dual_gap'], label='Dual Gap')

plt.xlabel('Iteration')
plt.ylabel('Convergence Metric')
plt.legend()
plt.savefig(plot_fname)
plt.close()

def post_everything(self):
'''
Reading the convergence data and plotting the results
'''
if self.tracking and self._rank == 0:
self.plot_results()
5 changes: 4 additions & 1 deletion mpisppy/cylinders/lagranger_bounder.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,13 +93,13 @@ def main(self):

self.lagrangian_prep()

self.A_iter = 1
self.trivial_bound = self._lagrangian(0)

self.bound = self.trivial_bound

self.opt.current_solver_options = self.opt.iterk_solver_options

self.A_iter = 1
while not self.got_kill_signal():
# because of aph, do not check for new data, just go for it
self.bound = self._update_weights_and_solve(self.A_iter)
Expand All @@ -113,4 +113,7 @@ def finalize(self):
'''
self.final_bound = self._update_weights_and_solve(self.A_iter)
self.bound = self.final_bound
if self.opt.extensions is not None and \
hasattr(self.opt.extobject, 'post_everything'):
self.opt.extobject.post_everything()
return self.final_bound
Loading