SimplexOptimizer.java

  1. /*
  2.  * Licensed to the Apache Software Foundation (ASF) under one or more
  3.  * contributor license agreements.  See the NOTICE file distributed with
  4.  * this work for additional information regarding copyright ownership.
  5.  * The ASF licenses this file to You under the Apache License, Version 2.0
  6.  * (the "License"); you may not use this file except in compliance with
  7.  * the License.  You may obtain a copy of the License at
  8.  *
  9.  *      https://www.apache.org/licenses/LICENSE-2.0
  10.  *
  11.  * Unless required by applicable law or agreed to in writing, software
  12.  * distributed under the License is distributed on an "AS IS" BASIS,
  13.  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14.  * See the License for the specific language governing permissions and
  15.  * limitations under the License.
  16.  */

  17. /*
  18.  * This is not the original file distributed by the Apache Software Foundation
  19.  * It has been modified by the Hipparchus project
  20.  */
  21. package org.hipparchus.optim.nonlinear.scalar.noderiv;

  22. import java.util.Comparator;

  23. import org.hipparchus.analysis.MultivariateFunction;
  24. import org.hipparchus.exception.LocalizedCoreFormats;
  25. import org.hipparchus.exception.MathRuntimeException;
  26. import org.hipparchus.exception.NullArgumentException;
  27. import org.hipparchus.optim.ConvergenceChecker;
  28. import org.hipparchus.optim.OptimizationData;
  29. import org.hipparchus.optim.PointValuePair;
  30. import org.hipparchus.optim.SimpleValueChecker;
  31. import org.hipparchus.optim.nonlinear.scalar.GoalType;
  32. import org.hipparchus.optim.nonlinear.scalar.MultivariateOptimizer;

  33. /**
  34.  * This class implements simplex-based direct search optimization.
  35.  *
  36.  * <p>
  37.  *  Direct search methods only use objective function values, they do
  38.  *  not need derivatives and don't either try to compute approximation
  39.  *  of the derivatives. According to a 1996 paper by Margaret H. Wright
  40.  *  (<a href="http://cm.bell-labs.com/cm/cs/doc/96/4-02.ps.gz">Direct
  41.  *  Search Methods: Once Scorned, Now Respectable</a>), they are used
  42.  *  when either the computation of the derivative is impossible (noisy
  43.  *  functions, unpredictable discontinuities) or difficult (complexity,
  44.  *  computation cost). In the first cases, rather than an optimum, a
  45.  *  <em>not too bad</em> point is desired. In the latter cases, an
  46.  *  optimum is desired but cannot be reasonably found. In all cases
  47.  *  direct search methods can be useful.
  48.  * </p>
  49.  * <p>
  50.  *  Simplex-based direct search methods are based on comparison of
  51.  *  the objective function values at the vertices of a simplex (which is a
  52.  *  set of n+1 points in dimension n) that is updated by the algorithms
  53.  *  steps.
  54.  * </p>
  55.  * <p>
  56.  *  The simplex update procedure ({@link NelderMeadSimplex} or
  57.  * {@link MultiDirectionalSimplex})  must be passed to the
  58.  * {@code optimize} method.
  59.  * </p>
  60.  * <p>
  61.  *  Each call to {@code optimize} will re-use the start configuration of
  62.  *  the current simplex and move it such that its first vertex is at the
  63.  *  provided start point of the optimization.
  64.  *  If the {@code optimize} method is called to solve a different problem
  65.  *  and the number of parameters change, the simplex must be re-initialized
  66.  *  to one with the appropriate dimensions.
  67.  * </p>
  68.  * <p>
  69.  *  Convergence is checked by providing the <em>worst</em> points of
  70.  *  previous and current simplex to the convergence checker, not the best
  71.  *  ones.
  72.  * </p>
  73.  * <p>
  74.  *  This simplex optimizer implementation does not directly support constrained
  75.  *  optimization with simple bounds; so, for such optimizations, either a more
  76.  *  dedicated algorithm must be used like
  77.  *  {@link CMAESOptimizer} or {@link BOBYQAOptimizer}, or the objective
  78.  *  function must be wrapped in an adapter like
  79.  *  {@link org.hipparchus.optim.nonlinear.scalar.MultivariateFunctionMappingAdapter
  80.  *  MultivariateFunctionMappingAdapter} or
  81.  *  {@link org.hipparchus.optim.nonlinear.scalar.MultivariateFunctionPenaltyAdapter
  82.  *  MultivariateFunctionPenaltyAdapter}.
  83.  *  <br>
  84.  *  The call to {@link #optimize(OptimizationData[]) optimize} will throw
  85.  *  {@link MathRuntimeException} if bounds are passed to it.
  86.  * </p>
  87.  *
  88.  */
  89. public class SimplexOptimizer extends MultivariateOptimizer {
  90.     /** Simplex update rule. */
  91.     private AbstractSimplex simplex;

  92.     /** Simple constructor.
  93.      * @param checker Convergence checker.
  94.      */
  95.     public SimplexOptimizer(ConvergenceChecker<PointValuePair> checker) {
  96.         super(checker);
  97.     }

  98.     /** Simple constructor.
  99.      * @param rel Relative threshold.
  100.      * @param abs Absolute threshold.
  101.      */
  102.     public SimplexOptimizer(double rel, double abs) {
  103.         this(new SimpleValueChecker(rel, abs));
  104.     }

  105.     /**
  106.      * {@inheritDoc}
  107.      *
  108.      * @param optData Optimization data. In addition to those documented in
  109.      * {@link MultivariateOptimizer#parseOptimizationData(OptimizationData[])
  110.      * MultivariateOptimizer}, this method will register the following data:
  111.      * <ul>
  112.      *  <li>{@link AbstractSimplex}</li>
  113.      * </ul>
  114.      * @return {@inheritDoc}
  115.      */
  116.     @Override
  117.     public PointValuePair optimize(OptimizationData... optData) {
  118.         // Set up base class and perform computation.
  119.         return super.optimize(optData);
  120.     }

  121.     /** {@inheritDoc} */
  122.     @Override
  123.     protected PointValuePair doOptimize() {
  124.         checkParameters();

  125.         // Indirect call to "computeObjectiveValue" in order to update the
  126.         // evaluations counter.
  127.         final MultivariateFunction evalFunc
  128.             = new MultivariateFunction() {
  129.                 /** {@inheritDoc} */
  130.                 @Override
  131.                 public double value(double[] point) {
  132.                     return computeObjectiveValue(point);
  133.                 }
  134.             };

  135.         final boolean isMinim = getGoalType() == GoalType.MINIMIZE;
  136.         final Comparator<PointValuePair> comparator
  137.             = new Comparator<PointValuePair>() {
  138.             /** {@inheritDoc} */
  139.             @Override
  140.             public int compare(final PointValuePair o1,
  141.                                final PointValuePair o2) {
  142.                 final double v1 = o1.getValue();
  143.                 final double v2 = o2.getValue();
  144.                 return isMinim ? Double.compare(v1, v2) : Double.compare(v2, v1);
  145.             }
  146.         };

  147.         // Initialize search.
  148.         simplex.build(getStartPoint());
  149.         simplex.evaluate(evalFunc, comparator);

  150.         PointValuePair[] previous = null;
  151.         int iteration = 0;
  152.         final ConvergenceChecker<PointValuePair> checker = getConvergenceChecker();
  153.         while (true) {
  154.             if (getIterations() > 0) {
  155.                 boolean converged = true;
  156.                 for (int i = 0; i < simplex.getSize(); i++) {
  157.                     PointValuePair prev = previous[i];
  158.                     converged = converged &&
  159.                         checker.converged(iteration, prev, simplex.getPoint(i));
  160.                 }
  161.                 if (converged) {
  162.                     // We have found an optimum.
  163.                     return simplex.getPoint(0);
  164.                 }
  165.             }

  166.             // We still need to search.
  167.             previous = simplex.getPoints();
  168.             simplex.iterate(evalFunc, comparator);

  169.             incrementIterationCount();
  170.         }
  171.     }

  172.     /**
  173.      * Scans the list of (required and optional) optimization data that
  174.      * characterize the problem.
  175.      *
  176.      * @param optData Optimization data.
  177.      * The following data will be looked for:
  178.      * <ul>
  179.      *  <li>{@link AbstractSimplex}</li>
  180.      * </ul>
  181.      */
  182.     @Override
  183.     protected void parseOptimizationData(OptimizationData... optData) {
  184.         // Allow base class to register its own data.
  185.         super.parseOptimizationData(optData);

  186.         // The existing values (as set by the previous call) are reused if
  187.         // not provided in the argument list.
  188.         for (OptimizationData data : optData) {
  189.             if (data instanceof AbstractSimplex) {
  190.                 simplex = (AbstractSimplex) data;
  191.                 // If more data must be parsed, this statement _must_ be
  192.                 // changed to "continue".
  193.                 break;
  194.             }
  195.         }
  196.     }

  197.     /**
  198.      * @throws MathRuntimeException if bounds were passed to the
  199.      * {@link #optimize(OptimizationData[]) optimize} method.
  200.      * @throws NullArgumentException if no initial simplex was passed to the
  201.      * {@link #optimize(OptimizationData[]) optimize} method.
  202.      */
  203.     private void checkParameters() {
  204.         if (simplex == null) {
  205.             throw new NullArgumentException();
  206.         }
  207.         if (getLowerBound() != null ||
  208.             getUpperBound() != null) {
  209.             throw new MathRuntimeException(LocalizedCoreFormats.CONSTRAINT);
  210.         }
  211.     }
  212. }