Source code for desdeo.optimization.OptimizationMethod

# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2016  Vesa Ojalehto
"""
This module contains methods for solving single-objective optimization problems.
"""
from abc import ABC, abstractmethod
from typing import List, Tuple

import numpy as np
from scipy.optimize import differential_evolution, minimize


[docs]class OptimizationMethod(ABC): """ Abstract class for optimization methods Attributes ---------- _max : bool (default:False) True if the objective function is to be maximized _ceoff : float Coefficient for the objective function """
[docs] def __init__(self, optimization_problem): self.optimization_problem = optimization_problem
[docs] def search(self, max=False, **params) -> Tuple[np.ndarray, List[float]]: """ Search for the optimal solution This sets up the search for the optimization and calls the _search method Parameters ---------- max : bool (default False) If true find mximum of the objective function instead of minimum **params : dict [optional] Parameters for single objective optimization method """ self._max = max if max: self._coeff = -1.0 else: self._coeff = 1.0 return self._search(**params)
[docs]class OptimalSearch(OptimizationMethod): """ Abstract class for optimal search """
[docs] @abstractmethod def _objective(self, x): """ Return objective function value Parameters ---------- x : list of values Decision variable vector to be calclated """
[docs]class SciPy(OptimalSearch): """ Optimal search using :py:func:`scipy.optimize.minimize`. """
[docs] def _objective(self, x): self.last_objective, self.last_const = self.optimization_problem.evaluate( self.optimization_problem.problem.evaluate([x]) ) return self._coeff * self.last_objective
# objective, new_constraints = self.scalarproblem(objectives) # for ci, const in enumerate(new_constraints): # constraints[ci].extend(const) # return objective[0], constraints[0]
[docs] def _const(self, x, *ncon): # self.last_objective, self.last_const = self.optimization_problem.evaluate(self.optimization_problem.problem.evaluate([x])) self.last_objective, self.last_const = self.optimization_problem.evaluate( self.optimization_problem.problem.evaluate([x]) ) return self.last_const[ncon[0]]
[docs]class SciPyDE(OptimalSearch): """ Optimal search using :py:func:`scipy.optimize.differential_evolution`. """
[docs] def __init__(self, optimization_problem): super().__init__(optimization_problem) self.penalty = 0.0
[docs] def _objective(self, x): self.penalty = 0.0 obj, const = self.optimization_problem.evaluate( self.optimization_problem.problem.evaluate([x]) ) if const is not None and len(const): self.v = 0.0 for c in const[0]: if c > 0.00001: # Lets use Death penalty self.v += c self.penalty = 50000000 return self._coeff * obj[0] + self.penalty
[docs]class PointSearch(OptimizationMethod):