SimplexOptimizer.java

  1. /*
  2.  * Licensed to the Apache Software Foundation (ASF) under one or more
  3.  * contributor license agreements.  See the NOTICE file distributed with
  4.  * this work for additional information regarding copyright ownership.
  5.  * The ASF licenses this file to You under the Apache License, Version 2.0
  6.  * (the "License"); you may not use this file except in compliance with
  7.  * the License.  You may obtain a copy of the License at
  8.  *
  9.  *      http://www.apache.org/licenses/LICENSE-2.0
  10.  *
  11.  * Unless required by applicable law or agreed to in writing, software
  12.  * distributed under the License is distributed on an "AS IS" BASIS,
  13.  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14.  * See the License for the specific language governing permissions and
  15.  * limitations under the License.
  16.  */
  17. package org.apache.commons.math3.optim.nonlinear.scalar.noderiv;

  18. import java.util.Comparator;
  19. import org.apache.commons.math3.analysis.MultivariateFunction;
  20. import org.apache.commons.math3.exception.NullArgumentException;
  21. import org.apache.commons.math3.exception.MathUnsupportedOperationException;
  22. import org.apache.commons.math3.exception.util.LocalizedFormats;
  23. import org.apache.commons.math3.optim.nonlinear.scalar.GoalType;
  24. import org.apache.commons.math3.optim.ConvergenceChecker;
  25. import org.apache.commons.math3.optim.PointValuePair;
  26. import org.apache.commons.math3.optim.SimpleValueChecker;
  27. import org.apache.commons.math3.optim.OptimizationData;
  28. import org.apache.commons.math3.optim.nonlinear.scalar.MultivariateOptimizer;

  29. /**
  30.  * This class implements simplex-based direct search optimization.
  31.  *
  32.  * <p>
  33.  *  Direct search methods only use objective function values, they do
  34.  *  not need derivatives and don't either try to compute approximation
  35.  *  of the derivatives. According to a 1996 paper by Margaret H. Wright
  36.  *  (<a href="http://cm.bell-labs.com/cm/cs/doc/96/4-02.ps.gz">Direct
  37.  *  Search Methods: Once Scorned, Now Respectable</a>), they are used
  38.  *  when either the computation of the derivative is impossible (noisy
  39.  *  functions, unpredictable discontinuities) or difficult (complexity,
  40.  *  computation cost). In the first cases, rather than an optimum, a
  41.  *  <em>not too bad</em> point is desired. In the latter cases, an
  42.  *  optimum is desired but cannot be reasonably found. In all cases
  43.  *  direct search methods can be useful.
  44.  * </p>
  45.  * <p>
  46.  *  Simplex-based direct search methods are based on comparison of
  47.  *  the objective function values at the vertices of a simplex (which is a
  48.  *  set of n+1 points in dimension n) that is updated by the algorithms
  49.  *  steps.
  50.  * <p>
  51.  * <p>
  52.  *  The simplex update procedure ({@link NelderMeadSimplex} or
  53.  * {@link MultiDirectionalSimplex})  must be passed to the
  54.  * {@code optimize} method.
  55.  * </p>
  56.  * <p>
  57.  *  Each call to {@code optimize} will re-use the start configuration of
  58.  *  the current simplex and move it such that its first vertex is at the
  59.  *  provided start point of the optimization.
  60.  *  If the {@code optimize} method is called to solve a different problem
  61.  *  and the number of parameters change, the simplex must be re-initialized
  62.  *  to one with the appropriate dimensions.
  63.  * </p>
  64.  * <p>
  65.  *  Convergence is checked by providing the <em>worst</em> points of
  66.  *  previous and current simplex to the convergence checker, not the best
  67.  *  ones.
  68.  * </p>
  69.  * <p>
  70.  *  This simplex optimizer implementation does not directly support constrained
  71.  *  optimization with simple bounds; so, for such optimizations, either a more
  72.  *  dedicated algorithm must be used like
  73.  *  {@link CMAESOptimizer} or {@link BOBYQAOptimizer}, or the objective
  74.  *  function must be wrapped in an adapter like
  75.  *  {@link org.apache.commons.math3.optim.nonlinear.scalar.MultivariateFunctionMappingAdapter
  76.  *  MultivariateFunctionMappingAdapter} or
  77.  *  {@link org.apache.commons.math3.optim.nonlinear.scalar.MultivariateFunctionPenaltyAdapter
  78.  *  MultivariateFunctionPenaltyAdapter}.
  79.  *  <br/>
  80.  *  The call to {@link #optimize(OptimizationData[]) optimize} will throw
  81.  *  {@link MathUnsupportedOperationException} if bounds are passed to it.
  82.  * </p>
  83.  *
  84.  * @since 3.0
  85.  */
  86. public class SimplexOptimizer extends MultivariateOptimizer {
  87.     /** Simplex update rule. */
  88.     private AbstractSimplex simplex;

  89.     /**
  90.      * @param checker Convergence checker.
  91.      */
  92.     public SimplexOptimizer(ConvergenceChecker<PointValuePair> checker) {
  93.         super(checker);
  94.     }

  95.     /**
  96.      * @param rel Relative threshold.
  97.      * @param abs Absolute threshold.
  98.      */
  99.     public SimplexOptimizer(double rel, double abs) {
  100.         this(new SimpleValueChecker(rel, abs));
  101.     }

  102.     /**
  103.      * {@inheritDoc}
  104.      *
  105.      * @param optData Optimization data. In addition to those documented in
  106.      * {@link MultivariateOptimizer#parseOptimizationData(OptimizationData[])
  107.      * MultivariateOptimizer}, this method will register the following data:
  108.      * <ul>
  109.      *  <li>{@link AbstractSimplex}</li>
  110.      * </ul>
  111.      * @return {@inheritDoc}
  112.      */
  113.     @Override
  114.     public PointValuePair optimize(OptimizationData... optData) {
  115.         // Set up base class and perform computation.
  116.         return super.optimize(optData);
  117.     }

  118.     /** {@inheritDoc} */
  119.     @Override
  120.     protected PointValuePair doOptimize() {
  121.         checkParameters();

  122.         // Indirect call to "computeObjectiveValue" in order to update the
  123.         // evaluations counter.
  124.         final MultivariateFunction evalFunc
  125.             = new MultivariateFunction() {
  126.                 public double value(double[] point) {
  127.                     return computeObjectiveValue(point);
  128.                 }
  129.             };

  130.         final boolean isMinim = getGoalType() == GoalType.MINIMIZE;
  131.         final Comparator<PointValuePair> comparator
  132.             = new Comparator<PointValuePair>() {
  133.             public int compare(final PointValuePair o1,
  134.                                final PointValuePair o2) {
  135.                 final double v1 = o1.getValue();
  136.                 final double v2 = o2.getValue();
  137.                 return isMinim ? Double.compare(v1, v2) : Double.compare(v2, v1);
  138.             }
  139.         };

  140.         // Initialize search.
  141.         simplex.build(getStartPoint());
  142.         simplex.evaluate(evalFunc, comparator);

  143.         PointValuePair[] previous = null;
  144.         int iteration = 0;
  145.         final ConvergenceChecker<PointValuePair> checker = getConvergenceChecker();
  146.         while (true) {
  147.             if (getIterations() > 0) {
  148.                 boolean converged = true;
  149.                 for (int i = 0; i < simplex.getSize(); i++) {
  150.                     PointValuePair prev = previous[i];
  151.                     converged = converged &&
  152.                         checker.converged(iteration, prev, simplex.getPoint(i));
  153.                 }
  154.                 if (converged) {
  155.                     // We have found an optimum.
  156.                     return simplex.getPoint(0);
  157.                 }
  158.             }

  159.             // We still need to search.
  160.             previous = simplex.getPoints();
  161.             simplex.iterate(evalFunc, comparator);

  162.             incrementIterationCount();
  163.         }
  164.     }

  165.     /**
  166.      * Scans the list of (required and optional) optimization data that
  167.      * characterize the problem.
  168.      *
  169.      * @param optData Optimization data.
  170.      * The following data will be looked for:
  171.      * <ul>
  172.      *  <li>{@link AbstractSimplex}</li>
  173.      * </ul>
  174.      */
  175.     @Override
  176.     protected void parseOptimizationData(OptimizationData... optData) {
  177.         // Allow base class to register its own data.
  178.         super.parseOptimizationData(optData);

  179.         // The existing values (as set by the previous call) are reused if
  180.         // not provided in the argument list.
  181.         for (OptimizationData data : optData) {
  182.             if (data instanceof AbstractSimplex) {
  183.                 simplex = (AbstractSimplex) data;
  184.                 // If more data must be parsed, this statement _must_ be
  185.                 // changed to "continue".
  186.                 break;
  187.             }
  188.         }
  189.     }

  190.     /**
  191.      * @throws MathUnsupportedOperationException if bounds were passed to the
  192.      * {@link #optimize(OptimizationData[]) optimize} method.
  193.      * @throws NullArgumentException if no initial simplex was passed to the
  194.      * {@link #optimize(OptimizationData[]) optimize} method.
  195.      */
  196.     private void checkParameters() {
  197.         if (simplex == null) {
  198.             throw new NullArgumentException();
  199.         }
  200.         if (getLowerBound() != null ||
  201.             getUpperBound() != null) {
  202.             throw new MathUnsupportedOperationException(LocalizedFormats.CONSTRAINT);
  203.         }
  204.     }
  205. }