diff --git a/doc/optimizers/ParOpt.rst b/doc/optimizers/ParOpt.rst index 048f7acd..c9ba5fab 100644 --- a/doc/optimizers/ParOpt.rst +++ b/doc/optimizers/ParOpt.rst @@ -2,16 +2,27 @@ ParOpt ====== -ParOpt is a nonlinear interior point optimizer that is designed for large parallel design optimization problems with structured sparse constraints. -ParOpt is open source and can be downloaded at `https://github.com/smdogroup/paropt `_. -Documentation and examples for ParOpt can be found at `https://smdogroup.github.io/paropt/ `_. -The version of ParOpt supported is v2.0.2. +ParOpt is an open source package that implements trust-region, interior points, and MMA optimization algorithms. +The ParOpt optimizers are themselves MPI parallel, which allows them to scale to large problems. +Unlike other optimizers supported by pyOptSparse, as of ParOpt version 2.1.5 and later, the pyOptSparse interface to ParOpt is a part of ParOpt itself. +Maintaining the wrapper, and control of which versions of pyOptSparse are compatible with which versions of ParOpt, is therefore the responsibility of the ParOpt developers. +ParOpt can be downloaded at ``_. +Documentation and examples can be found at ``_. +The wrapper code in pyOptSparse is minimal, simply allowing the ParOpt wrapper to be used in the same way as other optimizers in pyOptSparse, through the ``OPT`` method. + +The ParOpt wrapper takes a ``sparse`` argument, which controls whether ParOpt uses sparse or dense storage for the constraint Jacobian. +The default is ``True``, which uses sparse storage, but is incompatible with ParOpt's trust-region algorithm. +If you want to use the trust-region algorithm, you must set ``sparse=False``, e.g.: + +.. code-block:: python + + from pyoptsparse import OPT + opt = OPT("ParOpt", sparse=False) Installation ------------ Please follow the instructions `here `_ to install ParOpt as a separate Python package. Make sure that the package is named ``paropt`` and the installation location can be found by Python, so that ``from paropt import ParOpt`` works within the pyOptSparse folder. -This typically requires installing it in a location which is already present under ``$PYTHONPATH`` environment variable, or you can modify the ``.bashrc`` file and manually append the path. Options ------- diff --git a/pyoptsparse/__init__.py b/pyoptsparse/__init__.py index 74c88eaa..41e5487a 100644 --- a/pyoptsparse/__init__.py +++ b/pyoptsparse/__init__.py @@ -8,6 +8,7 @@ from .pyOpt_optimization import Optimization from .pyOpt_optimizer import Optimizer, OPT, Optimizers, list_optimizers from .pyOpt_solution import Solution +from . import testing # Now import all the individual optimizers from .pySNOPT.pySNOPT import SNOPT diff --git a/pyoptsparse/pyParOpt/ParOpt.py b/pyoptsparse/pyParOpt/ParOpt.py index e0e15be4..ddb72bb3 100644 --- a/pyoptsparse/pyParOpt/ParOpt.py +++ b/pyoptsparse/pyParOpt/ParOpt.py @@ -1,263 +1,23 @@ -# Standard Python modules -import datetime -import os -import time - -# External modules -import numpy as np - -# Local modules -from ..pyOpt_optimizer import Optimizer -from ..pyOpt_utils import INFINITY, try_import_compiled_module_from_path - -# Attempt to import ParOpt/mpi4py -# If PYOPTSPARSE_REQUIRE_MPI is set to a recognized positive value, attempt import -# and raise exception on failure. If set to anything else, no import is attempted. -if "PYOPTSPARSE_REQUIRE_MPI" in os.environ and os.environ["PYOPTSPARSE_REQUIRE_MPI"].lower() not in [ - "always", - "1", - "true", - "yes", -]: - _ParOpt = "ParOpt was not imported, as requested by the environment variable 'PYOPTSPARSE_REQUIRE_MPI'" - MPI = "mpi4py was not imported, as requested by the environment variable 'PYOPTSPARSE_REQUIRE_MPI'" -# If PYOPTSPARSE_REQUIRE_MPI is unset, attempt to import mpi4py. -# Since ParOpt requires mpi4py, if either _ParOpt or mpi4py is unavailable -# we disable the optimizer. -else: - _ParOpt = try_import_compiled_module_from_path("paropt.ParOpt") - MPI = try_import_compiled_module_from_path("mpi4py.MPI") - - -class ParOpt(Optimizer): - """ - ParOpt optimizer class - - ParOpt has the capability to handle distributed design vectors. - This is not replicated here since pyOptSparse does not have the - capability to handle this type of design problem. - """ - - def __init__(self, raiseError=True, options={}): - name = "ParOpt" - category = "Local Optimizer" - for mod in [_ParOpt, MPI]: - if isinstance(mod, str) and raiseError: - raise ImportError(mod) - - # Create and fill-in the dictionary of default option values - self.defOpts = {} - paropt_default_options = _ParOpt.getOptionsInfo() - # Manually override the options with missing default values - paropt_default_options["ip_checkpoint_file"].default = "default.out" - paropt_default_options["problem_name"].default = "problem" - for option_name in paropt_default_options: - # Get the type and default value of the named argument - _type = None - if paropt_default_options[option_name].option_type == "bool": - _type = bool - elif paropt_default_options[option_name].option_type == "int": - _type = int - elif paropt_default_options[option_name].option_type == "float": - _type = float - else: - _type = str - default_value = paropt_default_options[option_name].default - - # Set the entry into the dictionary - self.defOpts[option_name] = [_type, default_value] - - self.set_options = {} - self.informs = {} - super().__init__(name, category, defaultOptions=self.defOpts, informs=self.informs, options=options) - - # ParOpt requires a dense Jacobian format - self.jacType = "dense2d" - - return - - def __call__( - self, optProb, sens=None, sensStep=None, sensMode=None, storeHistory=None, hotStart=None, storeSens=True - ): - """ - This is the main routine used to solve the optimization - problem. - - Parameters - ---------- - optProb : Optimization or Solution class instance - This is the complete description of the optimization problem - to be solved by the optimizer - - sens : str or python Function. - Specifiy method to compute sensitivities. To - explictly use pyOptSparse gradient class to do the - derivatives with finite differenes use \'FD\'. \'sens\' - may also be \'CS\' which will cause pyOptSpare to compute - the derivatives using the complex step method. Finally, - \'sens\' may be a python function handle which is expected - to compute the sensitivities directly. For expensive - function evaluations and/or problems with large numbers of - design variables this is the preferred method. - - sensStep : float - Set the step size to use for design variables. Defaults to - 1e-6 when sens is \'FD\' and 1e-40j when sens is \'CS\'. - - sensMode : str - Use \'pgc\' for parallel gradient computations. Only - available with mpi4py and each objective evaluation is - otherwise serial - - storeHistory : str - File name of the history file into which the history of - this optimization will be stored - - hotStart : str - File name of the history file to "replay" for the - optimziation. The optimization problem used to generate - the history file specified in \'hotStart\' must be - **IDENTICAL** to the currently supplied \'optProb\'. By - identical we mean, **EVERY SINGLE PARAMETER MUST BE - IDENTICAL**. As soon as he requested evaluation point - from ParOpt does not match the history, function and - gradient evaluations revert back to normal evaluations. - - storeSens : bool - Flag sepcifying if sensitivities are to be stored in hist. - This is necessay for hot-starting only. - """ - self.startTime = time.time() - self.callCounter = 0 - self.storeSens = storeSens - - if len(optProb.constraints) == 0: - # If the problem is unconstrained, add a dummy constraint. - self.unconstrained = True - optProb.dummyConstraint = True - - # Save the optimization problem and finalize constraint - # Jacobian, in general can only do on root proc - self.optProb = optProb - self.optProb.finalize() - # Set history/hotstart - self._setHistory(storeHistory, hotStart) - self._setInitialCacheValues() - self._setSens(sens, sensStep, sensMode) - blx, bux, xs = self._assembleContinuousVariables() - xs = np.maximum(xs, blx) - xs = np.minimum(xs, bux) - - # The number of design variables - n = len(xs) - - oneSided = True - - if self.unconstrained: - m = 0 - else: - indices, blc, buc, fact = self.optProb.getOrdering(["ne", "le", "ni", "li"], oneSided=oneSided) - m = len(indices) - self.optProb.jacIndices = indices - self.optProb.fact = fact - self.optProb.offset = buc - - if self.optProb.comm.rank == 0: - - class Problem(_ParOpt.Problem): - def __init__(self, ptr, n, m, xs, blx, bux): - super().__init__(MPI.COMM_SELF, nvars=n, ncon=m) - self.ptr = ptr - self.n = n - self.m = m - self.xs = xs - self.blx = blx - self.bux = bux - self.fobj = 0.0 - return - - def getVarsAndBounds(self, x, lb, ub): - """Get the variable values and bounds""" - # Find the average distance between lower and upper bound - bound_sum = 0.0 - for i in range(len(x)): - if self.blx[i] <= -INFINITY or self.bux[i] >= INFINITY: - bound_sum += 1.0 - else: - bound_sum += self.bux[i] - self.blx[i] - bound_sum = bound_sum / len(x) - - for i in range(len(x)): - x[i] = self.xs[i] - lb[i] = self.blx[i] - ub[i] = self.bux[i] - if self.xs[i] <= self.blx[i]: - x[i] = self.blx[i] + 0.5 * np.min((bound_sum, self.bux[i] - self.blx[i])) - elif self.xs[i] >= self.bux[i]: - x[i] = self.bux[i] - 0.5 * np.min((bound_sum, self.bux[i] - self.blx[i])) - - return - - def evalObjCon(self, x): - """Evaluate the objective and constraint values""" - fobj, fcon, fail = self.ptr._masterFunc(x[:], ["fobj", "fcon"]) - self.fobj = fobj - return fail, fobj, -fcon - - def evalObjConGradient(self, x, g, A): - """Evaluate the objective and constraint gradients""" - gobj, gcon, fail = self.ptr._masterFunc(x[:], ["gobj", "gcon"]) - g[:] = gobj[:] - for i in range(self.m): - A[i][:] = -gcon[i][:] - return fail - - optTime = MPI.Wtime() - - # Optimize the problem - problem = Problem(self, n, m, xs, blx, bux) - optimizer = _ParOpt.Optimizer(problem, self.set_options) - optimizer.optimize() - x, z, zw, zl, zu = optimizer.getOptimizedPoint() - - # Set the total opt time - optTime = MPI.Wtime() - optTime - - # Get the obective function value - fobj = problem.fobj - - if self.storeHistory: - self.metadata["endTime"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") - self.metadata["optTime"] = optTime - self.hist.writeData("metadata", self.metadata) - self.hist.close() - - # Create the optimization solution. Note that the signs on the multipliers - # are switch since ParOpt uses a formulation with c(x) >= 0, while pyOpt - # uses g(x) = -c(x) <= 0. Therefore the multipliers are reversed. - sol_inform = {"value": "", "text": ""} - - # If number of constraints is zero, ParOpt returns z as None. - # Thus if there is no constraints, should pass an empty list - # to multipliers instead of z. - if z is not None: - sol = self._createSolution(optTime, sol_inform, fobj, x[:], multipliers=-z) - else: - sol = self._createSolution(optTime, sol_inform, fobj, x[:], multipliers=[]) - - # Indicate solution finished - self.optProb.comm.bcast(-1, root=0) - else: # We are not on the root process so go into waiting loop: - self._waitLoop() - sol = None - - # Communication solution and return - sol = self._communicateSolution(sol) - - return sol - - def _on_setOption(self, name, value): - """ - Add the value to the set_options dictionary. - """ - self.set_options[name] = value +# First party modules +from pyoptsparse.pyOpt_optimizer import Optimizer + +try: + # External modules + from paropt.paropt_pyoptsparse import ParOptSparse as ParOpt +except ImportError: + + class ParOpt(Optimizer): + def __init__(self, raiseError=True, options={}): + name = "ParOpt" + category = "Local Optimizer" + self.defOpts = {} + self.informs = {} + super().__init__( + name, + category, + defaultOptions=self.defOpts, + informs=self.informs, + options=options, + ) + if raiseError: + raise ImportError("There was an error importing ParOpt") diff --git a/pyoptsparse/testing/__init__.py b/pyoptsparse/testing/__init__.py new file mode 100644 index 00000000..bb525cca --- /dev/null +++ b/pyoptsparse/testing/__init__.py @@ -0,0 +1 @@ +from .pyOpt_testing import * diff --git a/tests/testing_utils.py b/pyoptsparse/testing/pyOpt_testing.py similarity index 98% rename from tests/testing_utils.py rename to pyoptsparse/testing/pyOpt_testing.py index 7fb041b5..c9535e39 100644 --- a/tests/testing_utils.py +++ b/pyoptsparse/testing/pyOpt_testing.py @@ -53,7 +53,7 @@ def get_dict_distance(d, d2): "PSQP": {"IFILE": ".out"}, "CONMIN": {"IFILE": ".out"}, "NLPQLP": {"iFile": ".out"}, - "ParOpt": {"output_file": ".out"}, + "ParOpt": {"output_file": ".out", "tr_output_file": ".tr", "mma_output_file": ".mma"}, "ALPSO": {"filename": ".out"}, "NSGA2": {}, } @@ -236,7 +236,10 @@ def optimize(self, sens=None, setDV=None, optOptions=None, storeHistory=False, h optOptions = self.update_OptOptions_output(optOptions) # Optimizer try: - opt = OPT(self.optName, options=optOptions) + if hasattr(self, "setup_optimizer"): + opt = self.setup_optimizer(optOptions=optOptions) + else: + opt = OPT(self.optName, options=optOptions) self.optVersion = opt.version except ImportError as e: if self.optName in DEFAULT_OPTIMIZERS: diff --git a/tests/test_hs015.py b/tests/test_hs015.py index ffc15e7c..7f312a77 100644 --- a/tests/test_hs015.py +++ b/tests/test_hs015.py @@ -11,9 +11,7 @@ # First party modules from pyoptsparse import OPT, History, Optimization - -# Local modules -from testing_utils import OptTest +from pyoptsparse.testing import OptTest class TestHS15(OptTest): @@ -47,7 +45,6 @@ class TestHS15(OptTest): "SLSQP": 1e-5, "NLPQLP": 1e-12, "IPOPT": 1e-4, - "ParOpt": 1e-6, "CONMIN": 1e-10, "PSQP": 5e-12, } @@ -119,7 +116,7 @@ def test_snopt(self): # sol_xvars = [sol.variables["xvars"][i].value for i in range(2)] # assert_allclose(sol_xvars, dv["xvars"], atol=tol, rtol=tol) - @parameterized.expand(["SLSQP", "PSQP", "CONMIN", "NLPQLP", "ParOpt"]) + @parameterized.expand(["SLSQP", "PSQP", "CONMIN", "NLPQLP"]) def test_optimization(self, optName): self.optName = optName self.setup_optProb() diff --git a/tests/test_hs071.py b/tests/test_hs071.py index f6ccb5b4..8ea94d1f 100644 --- a/tests/test_hs071.py +++ b/tests/test_hs071.py @@ -10,9 +10,7 @@ # First party modules from pyoptsparse import History, Optimization - -# Local modules -from testing_utils import OptTest +from pyoptsparse.testing import OptTest class TestHS71(OptTest): @@ -33,7 +31,6 @@ class TestHS71(OptTest): "SLSQP": 1e-6, "CONMIN": 1e-3, "PSQP": 1e-6, - "ParOpt": 1e-6, } optOptions = { "CONMIN": { @@ -203,7 +200,7 @@ def test_psqp_informs(self): sol = self.optimize(optOptions={"MIT": 1}) self.assert_inform_equal(sol, 11) - @parameterized.expand(["SNOPT", "IPOPT", "SLSQP", "PSQP", "CONMIN", "NLPQLP", "ParOpt"]) + @parameterized.expand(["SNOPT", "IPOPT", "SLSQP", "PSQP", "CONMIN", "NLPQLP"]) def test_optimization(self, optName): self.optName = optName self.setup_optProb() @@ -221,8 +218,8 @@ def test_optimization(self, optName): con2_line_num = constraint_header_line_num + 3 lambda_con1 = float(lines[con1_line_num].split()[-1]) lambda_con2 = float(lines[con2_line_num].split()[-1]) - if optName in ("IPOPT", "SNOPT", "ParOpt"): - # IPOPT returns Lagrange multipliers with opposite sign than SNOPT and ParOpt + if optName in ("IPOPT", "SNOPT"): + # IPOPT returns Lagrange multipliers with opposite sign than SNOPT lambda_sign = -1.0 if optName == "IPOPT" else 1.0 assert_allclose( [lambda_con1, lambda_con2], diff --git a/tests/test_large_sparse.py b/tests/test_large_sparse.py index 5f88088c..04053050 100644 --- a/tests/test_large_sparse.py +++ b/tests/test_large_sparse.py @@ -15,9 +15,7 @@ # First party modules from pyoptsparse import Optimization - -# Local modules -from testing_utils import OptTest +from pyoptsparse.testing import OptTest class TestLarge(OptTest): diff --git a/tests/test_nsga2_multi_objective.py b/tests/test_nsga2_multi_objective.py index 7cf0981e..1d2b3442 100644 --- a/tests/test_nsga2_multi_objective.py +++ b/tests/test_nsga2_multi_objective.py @@ -9,9 +9,7 @@ # First party modules from pyoptsparse import Optimization - -# Local modules -from testing_utils import OptTest +from pyoptsparse.testing import OptTest class TestNSGA2(OptTest): diff --git a/tests/test_optProb.py b/tests/test_optProb.py index a0a3c7ec..1e0d401c 100644 --- a/tests/test_optProb.py +++ b/tests/test_optProb.py @@ -10,9 +10,7 @@ # First party modules from pyoptsparse import OPT, Optimization - -# Local modules -from testing_utils import assert_optProb_size +from pyoptsparse.testing.pyOpt_testing import assert_optProb_size class TestOptProb(unittest.TestCase): diff --git a/tests/test_require_mpi_env_var.py b/tests/test_require_mpi_env_var.py index ca95c0ed..b2a0d6a2 100644 --- a/tests/test_require_mpi_env_var.py +++ b/tests/test_require_mpi_env_var.py @@ -42,36 +42,5 @@ def test_do_not_use_mpi(self): self.assertFalse(inspect.ismodule(pyoptsparse.pyOpt_MPI.MPI)) -class TestRequireMPIEnvVarOnParOpt(unittest.TestCase): - # Check how the environment variable affects using ParOpt - def setUp(self): - # Just check to see if ParOpt is installed before doing any testing - try: - from paropt import ParOpt as _ParOpt # noqa: F401 - except ImportError: - raise unittest.SkipTest("Optimizer not available: paropt") - - def test_require_mpi_check_paropt(self): - os.environ["PYOPTSPARSE_REQUIRE_MPI"] = "1" - import pyoptsparse.pyParOpt.ParOpt - - importlib.reload(pyoptsparse.pyParOpt.ParOpt) - self.assertIsNotNone(pyoptsparse.pyParOpt.ParOpt._ParOpt) - - def test_no_mpi_requirement_given_check_paropt(self): - os.environ.pop("PYOPTSPARSE_REQUIRE_MPI", None) - import pyoptsparse.pyParOpt.ParOpt - - importlib.reload(pyoptsparse.pyParOpt.ParOpt) - self.assertIsNotNone(pyoptsparse.pyParOpt.ParOpt._ParOpt) - - def test_do_not_use_mpi_check_paropt(self): - os.environ["PYOPTSPARSE_REQUIRE_MPI"] = "0" - import pyoptsparse.pyParOpt.ParOpt - - importlib.reload(pyoptsparse.pyParOpt.ParOpt) - self.assertTrue(isinstance(pyoptsparse.pyParOpt.ParOpt._ParOpt, str)) - - if __name__ == "__main__": unittest.main() diff --git a/tests/test_rosenbrock.py b/tests/test_rosenbrock.py index b590d605..f58c22e8 100644 --- a/tests/test_rosenbrock.py +++ b/tests/test_rosenbrock.py @@ -11,9 +11,7 @@ # First party modules from pyoptsparse import History, Optimization - -# Local modules -from testing_utils import OptTest +from pyoptsparse.testing import OptTest class TestRosenbrock(OptTest): @@ -49,7 +47,6 @@ class TestRosenbrock(OptTest): "SLSQP": 1e-6, "CONMIN": 1e-9, "PSQP": 1e-8, - "ParOpt": 1e-8, } optOptions = { "SLSQP": {"ACC": 1e-10}, @@ -146,7 +143,7 @@ def test_snopt_hotstart_starting_from_grad(self): # The first is from a call we deleted and the second is the call after 'last' self.assertEqual(self.ng, 2) - @parameterized.expand(["IPOPT", "SLSQP", "PSQP", "CONMIN", "NLPQLP", "ParOpt"]) + @parameterized.expand(["IPOPT", "SLSQP", "PSQP", "CONMIN", "NLPQLP"]) def test_optimization(self, optName): self.optName = optName if optName == "IPOPT" and sys.platform == "win32": diff --git a/tests/test_sphere.py b/tests/test_sphere.py index 4901341e..34c8f0e2 100644 --- a/tests/test_sphere.py +++ b/tests/test_sphere.py @@ -9,9 +9,7 @@ # First party modules from pyoptsparse import Optimization - -# Local modules -from testing_utils import OptTest +from pyoptsparse.testing import OptTest class TestSphere(OptTest): diff --git a/tests/test_tp109.py b/tests/test_tp109.py index 48a6b5d2..8d7195c0 100644 --- a/tests/test_tp109.py +++ b/tests/test_tp109.py @@ -27,17 +27,17 @@ f*1 = 0.536206927538e+04 x*1 = [0.674888100445e+03, 0.113417039470e+04, 0.133569060261e+00, -0.371152592466e+00, 0.252e+03, 0.252e+03, 0.201464535316e+03, 0.426660777226e+03, 0.368494083867e+03] """ + # Standard Python modules import unittest # External modules import numpy as np +from parameterized import parameterized # First party modules from pyoptsparse import History, Optimization - -# Local modules -from testing_utils import OptTest +from pyoptsparse.testing import OptTest USE_LINEAR = True @@ -179,8 +179,9 @@ def test_snopt_informs(self): sol = self.optimize(optOptions={"Time Limit": 1e-15}) self.assert_inform_equal(sol, 34) - def test_slsqp(self): - self.optName = "SLSQP" + @parameterized.expand(["SLSQP", "PSQP", "NLPQLP"]) + def test_optimization(self, optName): + self.optName = optName self.setup_optProb() sol = self.optimize(sens="CS") self.assert_solution_allclose(sol, 1e-7)