View Javadoc
1   /*
2    * Licensed to the Apache Software Foundation (ASF) under one or more
3    * contributor license agreements.  See the NOTICE file distributed with
4    * this work for additional information regarding copyright ownership.
5    * The ASF licenses this file to You under the Apache License, Version 2.0
6    * (the "License"); you may not use this file except in compliance with
7    * the License.  You may obtain a copy of the License at
8    *
9    *      https://www.apache.org/licenses/LICENSE-2.0
10   *
11   * Unless required by applicable law or agreed to in writing, software
12   * distributed under the License is distributed on an "AS IS" BASIS,
13   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14   * See the License for the specific language governing permissions and
15   * limitations under the License.
16   */
17  
18  /*
19   * This is not the original file distributed by the Apache Software Foundation
20   * It has been modified by the Hipparchus project
21   */
22  package org.hipparchus.optim.nonlinear.scalar.noderiv;
23  
24  import java.util.Comparator;
25  
26  import org.hipparchus.analysis.MultivariateFunction;
27  import org.hipparchus.exception.LocalizedCoreFormats;
28  import org.hipparchus.exception.MathRuntimeException;
29  import org.hipparchus.exception.NullArgumentException;
30  import org.hipparchus.optim.ConvergenceChecker;
31  import org.hipparchus.optim.OptimizationData;
32  import org.hipparchus.optim.PointValuePair;
33  import org.hipparchus.optim.SimpleValueChecker;
34  import org.hipparchus.optim.nonlinear.scalar.GoalType;
35  import org.hipparchus.optim.nonlinear.scalar.MultivariateOptimizer;
36  
37  /**
38   * This class implements simplex-based direct search optimization.
39   *
40   * <p>
41   *  Direct search methods only use objective function values, they do
42   *  not need derivatives and don't either try to compute approximation
43   *  of the derivatives. According to a 1996 paper by Margaret H. Wright
44   *  (<a href="http://cm.bell-labs.com/cm/cs/doc/96/4-02.ps.gz">Direct
45   *  Search Methods: Once Scorned, Now Respectable</a>), they are used
46   *  when either the computation of the derivative is impossible (noisy
47   *  functions, unpredictable discontinuities) or difficult (complexity,
48   *  computation cost). In the first cases, rather than an optimum, a
49   *  <em>not too bad</em> point is desired. In the latter cases, an
50   *  optimum is desired but cannot be reasonably found. In all cases
51   *  direct search methods can be useful.
52   * </p>
53   * <p>
54   *  Simplex-based direct search methods are based on comparison of
55   *  the objective function values at the vertices of a simplex (which is a
56   *  set of n+1 points in dimension n) that is updated by the algorithms
57   *  steps.
58   * </p>
59   * <p>
60   *  The simplex update procedure ({@link NelderMeadSimplex} or
61   * {@link MultiDirectionalSimplex})  must be passed to the
62   * {@code optimize} method.
63   * </p>
64   * <p>
65   *  Each call to {@code optimize} will re-use the start configuration of
66   *  the current simplex and move it such that its first vertex is at the
67   *  provided start point of the optimization.
68   *  If the {@code optimize} method is called to solve a different problem
69   *  and the number of parameters change, the simplex must be re-initialized
70   *  to one with the appropriate dimensions.
71   * </p>
72   * <p>
73   *  Convergence is checked by providing the <em>worst</em> points of
74   *  previous and current simplex to the convergence checker, not the best
75   *  ones.
76   * </p>
77   * <p>
78   *  This simplex optimizer implementation does not directly support constrained
79   *  optimization with simple bounds; so, for such optimizations, either a more
80   *  dedicated algorithm must be used like
81   *  {@link CMAESOptimizer} or {@link BOBYQAOptimizer}, or the objective
82   *  function must be wrapped in an adapter like
83   *  {@link org.hipparchus.optim.nonlinear.scalar.MultivariateFunctionMappingAdapter
84   *  MultivariateFunctionMappingAdapter} or
85   *  {@link org.hipparchus.optim.nonlinear.scalar.MultivariateFunctionPenaltyAdapter
86   *  MultivariateFunctionPenaltyAdapter}.
87   *  <br>
88   *  The call to {@link #optimize(OptimizationData[]) optimize} will throw
89   *  {@link MathRuntimeException} if bounds are passed to it.
90   * </p>
91   *
92   */
93  public class SimplexOptimizer extends MultivariateOptimizer {
94      /** Simplex update rule. */
95      private AbstractSimplex simplex;
96  
97      /** Simple constructor.
98       * @param checker Convergence checker.
99       */
100     public SimplexOptimizer(ConvergenceChecker<PointValuePair> checker) {
101         super(checker);
102     }
103 
104     /** Simple constructor.
105      * @param rel Relative threshold.
106      * @param abs Absolute threshold.
107      */
108     public SimplexOptimizer(double rel, double abs) {
109         this(new SimpleValueChecker(rel, abs));
110     }
111 
112     /**
113      * {@inheritDoc}
114      *
115      * @param optData Optimization data. In addition to those documented in
116      * {@link MultivariateOptimizer#parseOptimizationData(OptimizationData[])
117      * MultivariateOptimizer}, this method will register the following data:
118      * <ul>
119      *  <li>{@link AbstractSimplex}</li>
120      * </ul>
121      * @return {@inheritDoc}
122      */
123     @Override
124     public PointValuePair optimize(OptimizationData... optData) {
125         // Set up base class and perform computation.
126         return super.optimize(optData);
127     }
128 
129     /** {@inheritDoc} */
130     @Override
131     protected PointValuePair doOptimize() {
132         checkParameters();
133 
134         // Indirect call to "computeObjectiveValue" in order to update the
135         // evaluations counter.
136         final MultivariateFunction evalFunc
137             = new MultivariateFunction() {
138                 /** {@inheritDoc} */
139                 @Override
140                 public double value(double[] point) {
141                     return computeObjectiveValue(point);
142                 }
143             };
144 
145         final boolean isMinim = getGoalType() == GoalType.MINIMIZE;
146         final Comparator<PointValuePair> comparator
147             = new Comparator<PointValuePair>() {
148             /** {@inheritDoc} */
149             @Override
150             public int compare(final PointValuePair o1,
151                                final PointValuePair o2) {
152                 final double v1 = o1.getValue();
153                 final double v2 = o2.getValue();
154                 return isMinim ? Double.compare(v1, v2) : Double.compare(v2, v1);
155             }
156         };
157 
158         // Initialize search.
159         simplex.build(getStartPoint());
160         simplex.evaluate(evalFunc, comparator);
161 
162         PointValuePair[] previous = null;
163         int iteration = 0;
164         final ConvergenceChecker<PointValuePair> checker = getConvergenceChecker();
165         while (true) {
166             if (getIterations() > 0) {
167                 boolean converged = true;
168                 for (int i = 0; i < simplex.getSize(); i++) {
169                     PointValuePair prev = previous[i];
170                     converged = converged &&
171                         checker.converged(iteration, prev, simplex.getPoint(i));
172                 }
173                 if (converged) {
174                     // We have found an optimum.
175                     return simplex.getPoint(0);
176                 }
177             }
178 
179             // We still need to search.
180             previous = simplex.getPoints();
181             simplex.iterate(evalFunc, comparator);
182 
183             incrementIterationCount();
184         }
185     }
186 
187     /**
188      * Scans the list of (required and optional) optimization data that
189      * characterize the problem.
190      *
191      * @param optData Optimization data.
192      * The following data will be looked for:
193      * <ul>
194      *  <li>{@link AbstractSimplex}</li>
195      * </ul>
196      */
197     @Override
198     protected void parseOptimizationData(OptimizationData... optData) {
199         // Allow base class to register its own data.
200         super.parseOptimizationData(optData);
201 
202         // The existing values (as set by the previous call) are reused if
203         // not provided in the argument list.
204         for (OptimizationData data : optData) {
205             if (data instanceof AbstractSimplex) {
206                 simplex = (AbstractSimplex) data;
207                 // If more data must be parsed, this statement _must_ be
208                 // changed to "continue".
209                 break;
210             }
211         }
212     }
213 
214     /**
215      * @throws MathRuntimeException if bounds were passed to the
216      * {@link #optimize(OptimizationData[]) optimize} method.
217      * @throws NullArgumentException if no initial simplex was passed to the
218      * {@link #optimize(OptimizationData[]) optimize} method.
219      */
220     private void checkParameters() {
221         if (simplex == null) {
222             throw new NullArgumentException();
223         }
224         if (getLowerBound() != null ||
225             getUpperBound() != null) {
226             throw new MathRuntimeException(LocalizedCoreFormats.CONSTRAINT);
227         }
228     }
229 }