1 /*
2 * Licensed to the Apache Software Foundation (ASF) under one or more
3 * contributor license agreements. See the NOTICE file distributed with
4 * this work for additional information regarding copyright ownership.
5 * The ASF licenses this file to You under the Apache License, Version 2.0
6 * (the "License"); you may not use this file except in compliance with
7 * the License. You may obtain a copy of the License at
8 *
9 * https://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18 /*
19 * This is not the original file distributed by the Apache Software Foundation
20 * It has been modified by the Hipparchus project
21 */
22 package org.hipparchus.optim.nonlinear.scalar;
23
24 import org.hipparchus.analysis.MultivariateVectorFunction;
25 import org.hipparchus.exception.MathIllegalStateException;
26 import org.hipparchus.optim.ConvergenceChecker;
27 import org.hipparchus.optim.OptimizationData;
28 import org.hipparchus.optim.PointValuePair;
29
30 /**
31 * Base class for implementing optimizers for multivariate scalar
32 * differentiable functions.
33 * It contains boiler-plate code for dealing with gradient evaluation.
34 *
35 */
36 public abstract class GradientMultivariateOptimizer
37 extends MultivariateOptimizer {
38 /**
39 * Gradient of the objective function.
40 */
41 private MultivariateVectorFunction gradient;
42
43 /** Simple constructor.
44 * @param checker Convergence checker.
45 */
46 protected GradientMultivariateOptimizer(ConvergenceChecker<PointValuePair> checker) {
47 super(checker);
48 }
49
50 /**
51 * Compute the gradient vector.
52 *
53 * @param params Point at which the gradient must be evaluated.
54 * @return the gradient at the specified point.
55 */
56 protected double[] computeObjectiveGradient(final double[] params) {
57 return gradient.value(params);
58 }
59
60 /**
61 * {@inheritDoc}
62 *
63 * @param optData Optimization data. In addition to those documented in
64 * {@link MultivariateOptimizer#parseOptimizationData(OptimizationData[])
65 * MultivariateOptimizer}, this method will register the following data:
66 * <ul>
67 * <li>{@link ObjectiveFunctionGradient}</li>
68 * </ul>
69 * @return {@inheritDoc}
70 * @throws MathIllegalStateException if the maximal number of
71 * evaluations (of the objective function) is exceeded.
72 */
73 @Override
74 public PointValuePair optimize(OptimizationData... optData)
75 throws MathIllegalStateException {
76 // Set up base class and perform computation.
77 return super.optimize(optData);
78 }
79
80 /**
81 * Scans the list of (required and optional) optimization data that
82 * characterize the problem.
83 *
84 * @param optData Optimization data.
85 * The following data will be looked for:
86 * <ul>
87 * <li>{@link ObjectiveFunctionGradient}</li>
88 * </ul>
89 */
90 @Override
91 protected void parseOptimizationData(OptimizationData... optData) {
92 // Allow base class to register its own data.
93 super.parseOptimizationData(optData);
94
95 // The existing values (as set by the previous call) are reused if
96 // not provided in the argument list.
97 for (OptimizationData data : optData) {
98 if (data instanceof ObjectiveFunctionGradient) {
99 gradient = ((ObjectiveFunctionGradient) data).getObjectiveFunctionGradient();
100 // If more data must be parsed, this statement _must_ be
101 // changed to "continue".
102 break;
103 }
104 }
105 }
106 }