AbstractLeastSquaresOptimizer.java

  1. /*
  2.  * Licensed to the Apache Software Foundation (ASF) under one or more
  3.  * contributor license agreements.  See the NOTICE file distributed with
  4.  * this work for additional information regarding copyright ownership.
  5.  * The ASF licenses this file to You under the Apache License, Version 2.0
  6.  * (the "License"); you may not use this file except in compliance with
  7.  * the License.  You may obtain a copy of the License at
  8.  *
  9.  *      http://www.apache.org/licenses/LICENSE-2.0
  10.  *
  11.  * Unless required by applicable law or agreed to in writing, software
  12.  * distributed under the License is distributed on an "AS IS" BASIS,
  13.  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14.  * See the License for the specific language governing permissions and
  15.  * limitations under the License.
  16.  */

  17. package org.apache.commons.math3.optimization.general;

  18. import org.apache.commons.math3.analysis.DifferentiableMultivariateVectorFunction;
  19. import org.apache.commons.math3.analysis.FunctionUtils;
  20. import org.apache.commons.math3.analysis.differentiation.DerivativeStructure;
  21. import org.apache.commons.math3.analysis.differentiation.MultivariateDifferentiableVectorFunction;
  22. import org.apache.commons.math3.exception.DimensionMismatchException;
  23. import org.apache.commons.math3.exception.NumberIsTooSmallException;
  24. import org.apache.commons.math3.exception.util.LocalizedFormats;
  25. import org.apache.commons.math3.linear.ArrayRealVector;
  26. import org.apache.commons.math3.linear.RealMatrix;
  27. import org.apache.commons.math3.linear.DiagonalMatrix;
  28. import org.apache.commons.math3.linear.DecompositionSolver;
  29. import org.apache.commons.math3.linear.MatrixUtils;
  30. import org.apache.commons.math3.linear.QRDecomposition;
  31. import org.apache.commons.math3.linear.EigenDecomposition;
  32. import org.apache.commons.math3.optimization.OptimizationData;
  33. import org.apache.commons.math3.optimization.InitialGuess;
  34. import org.apache.commons.math3.optimization.Target;
  35. import org.apache.commons.math3.optimization.Weight;
  36. import org.apache.commons.math3.optimization.ConvergenceChecker;
  37. import org.apache.commons.math3.optimization.DifferentiableMultivariateVectorOptimizer;
  38. import org.apache.commons.math3.optimization.PointVectorValuePair;
  39. import org.apache.commons.math3.optimization.direct.BaseAbstractMultivariateVectorOptimizer;
  40. import org.apache.commons.math3.util.FastMath;

  41. /**
  42.  * Base class for implementing least squares optimizers.
  43.  * It handles the boilerplate methods associated to thresholds settings,
  44.  * Jacobian and error estimation.
  45.  * <br/>
  46.  * This class constructs the Jacobian matrix of the function argument in method
  47.  * {@link BaseAbstractMultivariateVectorOptimizer#optimize(int,
  48.  * org.apache.commons.math3.analysis.MultivariateVectorFunction,OptimizationData[])
  49.  * optimize} and assumes that the rows of that matrix iterate on the model
  50.  * functions while the columns iterate on the parameters; thus, the numbers
  51.  * of rows is equal to the dimension of the
  52.  * {@link org.apache.commons.math3.optimization.Target Target} while
  53.  * the number of columns is equal to the dimension of the
  54.  * {@link org.apache.commons.math3.optimization.InitialGuess InitialGuess}.
  55.  *
  56.  * @deprecated As of 3.1 (to be removed in 4.0).
  57.  * @since 1.2
  58.  */
  59. @Deprecated
  60. public abstract class AbstractLeastSquaresOptimizer
  61.     extends BaseAbstractMultivariateVectorOptimizer<DifferentiableMultivariateVectorFunction>
  62.     implements DifferentiableMultivariateVectorOptimizer {
  63.     /**
  64.      * Singularity threshold (cf. {@link #getCovariances(double)}).
  65.      * @deprecated As of 3.1.
  66.      */
  67.     @Deprecated
  68.     private static final double DEFAULT_SINGULARITY_THRESHOLD = 1e-14;
  69.     /**
  70.      * Jacobian matrix of the weighted residuals.
  71.      * This matrix is in canonical form just after the calls to
  72.      * {@link #updateJacobian()}, but may be modified by the solver
  73.      * in the derived class (the {@link LevenbergMarquardtOptimizer
  74.      * Levenberg-Marquardt optimizer} does this).
  75.      * @deprecated As of 3.1. To be removed in 4.0. Please use
  76.      * {@link #computeWeightedJacobian(double[])} instead.
  77.      */
  78.     @Deprecated
  79.     protected double[][] weightedResidualJacobian;
  80.     /** Number of columns of the jacobian matrix.
  81.      * @deprecated As of 3.1.
  82.      */
  83.     @Deprecated
  84.     protected int cols;
  85.     /** Number of rows of the jacobian matrix.
  86.      * @deprecated As of 3.1.
  87.      */
  88.     @Deprecated
  89.     protected int rows;
  90.     /** Current point.
  91.      * @deprecated As of 3.1.
  92.      */
  93.     @Deprecated
  94.     protected double[] point;
  95.     /** Current objective function value.
  96.      * @deprecated As of 3.1.
  97.      */
  98.     @Deprecated
  99.     protected double[] objective;
  100.     /** Weighted residuals
  101.      * @deprecated As of 3.1.
  102.      */
  103.     @Deprecated
  104.     protected double[] weightedResiduals;
  105.     /** Cost value (square root of the sum of the residuals).
  106.      * @deprecated As of 3.1. Field to become "private" in 4.0.
  107.      * Please use {@link #setCost(double)}.
  108.      */
  109.     @Deprecated
  110.     protected double cost;
  111.     /** Objective function derivatives. */
  112.     private MultivariateDifferentiableVectorFunction jF;
  113.     /** Number of evaluations of the Jacobian. */
  114.     private int jacobianEvaluations;
  115.     /** Square-root of the weight matrix. */
  116.     private RealMatrix weightMatrixSqrt;

  117.     /**
  118.      * Simple constructor with default settings.
  119.      * The convergence check is set to a {@link
  120.      * org.apache.commons.math3.optimization.SimpleVectorValueChecker}.
  121.      * @deprecated See {@link org.apache.commons.math3.optimization.SimpleValueChecker#SimpleValueChecker()}
  122.      */
  123.     @Deprecated
  124.     protected AbstractLeastSquaresOptimizer() {}

  125.     /**
  126.      * @param checker Convergence checker.
  127.      */
  128.     protected AbstractLeastSquaresOptimizer(ConvergenceChecker<PointVectorValuePair> checker) {
  129.         super(checker);
  130.     }

  131.     /**
  132.      * @return the number of evaluations of the Jacobian function.
  133.      */
  134.     public int getJacobianEvaluations() {
  135.         return jacobianEvaluations;
  136.     }

  137.     /**
  138.      * Update the jacobian matrix.
  139.      *
  140.      * @throws DimensionMismatchException if the Jacobian dimension does not
  141.      * match problem dimension.
  142.      * @deprecated As of 3.1. Please use {@link #computeWeightedJacobian(double[])}
  143.      * instead.
  144.      */
  145.     @Deprecated
  146.     protected void updateJacobian() {
  147.         final RealMatrix weightedJacobian = computeWeightedJacobian(point);
  148.         weightedResidualJacobian = weightedJacobian.scalarMultiply(-1).getData();
  149.     }

  150.     /**
  151.      * Computes the Jacobian matrix.
  152.      *
  153.      * @param params Model parameters at which to compute the Jacobian.
  154.      * @return the weighted Jacobian: W<sup>1/2</sup> J.
  155.      * @throws DimensionMismatchException if the Jacobian dimension does not
  156.      * match problem dimension.
  157.      * @since 3.1
  158.      */
  159.     protected RealMatrix computeWeightedJacobian(double[] params) {
  160.         ++jacobianEvaluations;

  161.         final DerivativeStructure[] dsPoint = new DerivativeStructure[params.length];
  162.         final int nC = params.length;
  163.         for (int i = 0; i < nC; ++i) {
  164.             dsPoint[i] = new DerivativeStructure(nC, 1, i, params[i]);
  165.         }
  166.         final DerivativeStructure[] dsValue = jF.value(dsPoint);
  167.         final int nR = getTarget().length;
  168.         if (dsValue.length != nR) {
  169.             throw new DimensionMismatchException(dsValue.length, nR);
  170.         }
  171.         final double[][] jacobianData = new double[nR][nC];
  172.         for (int i = 0; i < nR; ++i) {
  173.             int[] orders = new int[nC];
  174.             for (int j = 0; j < nC; ++j) {
  175.                 orders[j] = 1;
  176.                 jacobianData[i][j] = dsValue[i].getPartialDerivative(orders);
  177.                 orders[j] = 0;
  178.             }
  179.         }

  180.         return weightMatrixSqrt.multiply(MatrixUtils.createRealMatrix(jacobianData));
  181.     }

  182.     /**
  183.      * Update the residuals array and cost function value.
  184.      * @throws DimensionMismatchException if the dimension does not match the
  185.      * problem dimension.
  186.      * @throws org.apache.commons.math3.exception.TooManyEvaluationsException
  187.      * if the maximal number of evaluations is exceeded.
  188.      * @deprecated As of 3.1. Please use {@link #computeResiduals(double[])},
  189.      * {@link #computeObjectiveValue(double[])}, {@link #computeCost(double[])}
  190.      * and {@link #setCost(double)} instead.
  191.      */
  192.     @Deprecated
  193.     protected void updateResidualsAndCost() {
  194.         objective = computeObjectiveValue(point);
  195.         final double[] res = computeResiduals(objective);

  196.         // Compute cost.
  197.         cost = computeCost(res);

  198.         // Compute weighted residuals.
  199.         final ArrayRealVector residuals = new ArrayRealVector(res);
  200.         weightedResiduals = weightMatrixSqrt.operate(residuals).toArray();
  201.     }

  202.     /**
  203.      * Computes the cost.
  204.      *
  205.      * @param residuals Residuals.
  206.      * @return the cost.
  207.      * @see #computeResiduals(double[])
  208.      * @since 3.1
  209.      */
  210.     protected double computeCost(double[] residuals) {
  211.         final ArrayRealVector r = new ArrayRealVector(residuals);
  212.         return FastMath.sqrt(r.dotProduct(getWeight().operate(r)));
  213.     }

  214.     /**
  215.      * Get the Root Mean Square value.
  216.      * Get the Root Mean Square value, i.e. the root of the arithmetic
  217.      * mean of the square of all weighted residuals. This is related to the
  218.      * criterion that is minimized by the optimizer as follows: if
  219.      * <em>c</em> if the criterion, and <em>n</em> is the number of
  220.      * measurements, then the RMS is <em>sqrt (c/n)</em>.
  221.      *
  222.      * @return RMS value
  223.      */
  224.     public double getRMS() {
  225.         return FastMath.sqrt(getChiSquare() / rows);
  226.     }

  227.     /**
  228.      * Get a Chi-Square-like value assuming the N residuals follow N
  229.      * distinct normal distributions centered on 0 and whose variances are
  230.      * the reciprocal of the weights.
  231.      * @return chi-square value
  232.      */
  233.     public double getChiSquare() {
  234.         return cost * cost;
  235.     }

  236.     /**
  237.      * Gets the square-root of the weight matrix.
  238.      *
  239.      * @return the square-root of the weight matrix.
  240.      * @since 3.1
  241.      */
  242.     public RealMatrix getWeightSquareRoot() {
  243.         return weightMatrixSqrt.copy();
  244.     }

  245.     /**
  246.      * Sets the cost.
  247.      *
  248.      * @param cost Cost value.
  249.      * @since 3.1
  250.      */
  251.     protected void setCost(double cost) {
  252.         this.cost = cost;
  253.     }

  254.     /**
  255.      * Get the covariance matrix of the optimized parameters.
  256.      *
  257.      * @return the covariance matrix.
  258.      * @throws org.apache.commons.math3.linear.SingularMatrixException
  259.      * if the covariance matrix cannot be computed (singular problem).
  260.      * @see #getCovariances(double)
  261.      * @deprecated As of 3.1. Please use {@link #computeCovariances(double[],double)}
  262.      * instead.
  263.      */
  264.     @Deprecated
  265.     public double[][] getCovariances() {
  266.         return getCovariances(DEFAULT_SINGULARITY_THRESHOLD);
  267.     }

  268.     /**
  269.      * Get the covariance matrix of the optimized parameters.
  270.      * <br/>
  271.      * Note that this operation involves the inversion of the
  272.      * <code>J<sup>T</sup>J</code> matrix, where {@code J} is the
  273.      * Jacobian matrix.
  274.      * The {@code threshold} parameter is a way for the caller to specify
  275.      * that the result of this computation should be considered meaningless,
  276.      * and thus trigger an exception.
  277.      *
  278.      * @param threshold Singularity threshold.
  279.      * @return the covariance matrix.
  280.      * @throws org.apache.commons.math3.linear.SingularMatrixException
  281.      * if the covariance matrix cannot be computed (singular problem).
  282.      * @deprecated As of 3.1. Please use {@link #computeCovariances(double[],double)}
  283.      * instead.
  284.      */
  285.     @Deprecated
  286.     public double[][] getCovariances(double threshold) {
  287.         return computeCovariances(point, threshold);
  288.     }

  289.     /**
  290.      * Get the covariance matrix of the optimized parameters.
  291.      * <br/>
  292.      * Note that this operation involves the inversion of the
  293.      * <code>J<sup>T</sup>J</code> matrix, where {@code J} is the
  294.      * Jacobian matrix.
  295.      * The {@code threshold} parameter is a way for the caller to specify
  296.      * that the result of this computation should be considered meaningless,
  297.      * and thus trigger an exception.
  298.      *
  299.      * @param params Model parameters.
  300.      * @param threshold Singularity threshold.
  301.      * @return the covariance matrix.
  302.      * @throws org.apache.commons.math3.linear.SingularMatrixException
  303.      * if the covariance matrix cannot be computed (singular problem).
  304.      * @since 3.1
  305.      */
  306.     public double[][] computeCovariances(double[] params,
  307.                                          double threshold) {
  308.         // Set up the Jacobian.
  309.         final RealMatrix j = computeWeightedJacobian(params);

  310.         // Compute transpose(J)J.
  311.         final RealMatrix jTj = j.transpose().multiply(j);

  312.         // Compute the covariances matrix.
  313.         final DecompositionSolver solver
  314.             = new QRDecomposition(jTj, threshold).getSolver();
  315.         return solver.getInverse().getData();
  316.     }

  317.     /**
  318.      * <p>
  319.      * Returns an estimate of the standard deviation of each parameter. The
  320.      * returned values are the so-called (asymptotic) standard errors on the
  321.      * parameters, defined as {@code sd(a[i]) = sqrt(S / (n - m) * C[i][i])},
  322.      * where {@code a[i]} is the optimized value of the {@code i}-th parameter,
  323.      * {@code S} is the minimized value of the sum of squares objective function
  324.      * (as returned by {@link #getChiSquare()}), {@code n} is the number of
  325.      * observations, {@code m} is the number of parameters and {@code C} is the
  326.      * covariance matrix.
  327.      * </p>
  328.      * <p>
  329.      * See also
  330.      * <a href="http://en.wikipedia.org/wiki/Least_squares">Wikipedia</a>,
  331.      * or
  332.      * <a href="http://mathworld.wolfram.com/LeastSquaresFitting.html">MathWorld</a>,
  333.      * equations (34) and (35) for a particular case.
  334.      * </p>
  335.      *
  336.      * @return an estimate of the standard deviation of the optimized parameters
  337.      * @throws org.apache.commons.math3.linear.SingularMatrixException
  338.      * if the covariance matrix cannot be computed.
  339.      * @throws NumberIsTooSmallException if the number of degrees of freedom is not
  340.      * positive, i.e. the number of measurements is less or equal to the number of
  341.      * parameters.
  342.      * @deprecated as of version 3.1, {@link #computeSigma(double[],double)} should be used
  343.      * instead. It should be emphasized that {@code guessParametersErrors} and
  344.      * {@code computeSigma} are <em>not</em> strictly equivalent.
  345.      */
  346.     @Deprecated
  347.     public double[] guessParametersErrors() {
  348.         if (rows <= cols) {
  349.             throw new NumberIsTooSmallException(LocalizedFormats.NO_DEGREES_OF_FREEDOM,
  350.                                                 rows, cols, false);
  351.         }
  352.         double[] errors = new double[cols];
  353.         final double c = FastMath.sqrt(getChiSquare() / (rows - cols));
  354.         double[][] covar = computeCovariances(point, 1e-14);
  355.         for (int i = 0; i < errors.length; ++i) {
  356.             errors[i] = FastMath.sqrt(covar[i][i]) * c;
  357.         }
  358.         return errors;
  359.     }

  360.     /**
  361.      * Computes an estimate of the standard deviation of the parameters. The
  362.      * returned values are the square root of the diagonal coefficients of the
  363.      * covariance matrix, {@code sd(a[i]) ~= sqrt(C[i][i])}, where {@code a[i]}
  364.      * is the optimized value of the {@code i}-th parameter, and {@code C} is
  365.      * the covariance matrix.
  366.      *
  367.      * @param params Model parameters.
  368.      * @param covarianceSingularityThreshold Singularity threshold (see
  369.      * {@link #computeCovariances(double[],double) computeCovariances}).
  370.      * @return an estimate of the standard deviation of the optimized parameters
  371.      * @throws org.apache.commons.math3.linear.SingularMatrixException
  372.      * if the covariance matrix cannot be computed.
  373.      * @since 3.1
  374.      */
  375.     public double[] computeSigma(double[] params,
  376.                                  double covarianceSingularityThreshold) {
  377.         final int nC = params.length;
  378.         final double[] sig = new double[nC];
  379.         final double[][] cov = computeCovariances(params, covarianceSingularityThreshold);
  380.         for (int i = 0; i < nC; ++i) {
  381.             sig[i] = FastMath.sqrt(cov[i][i]);
  382.         }
  383.         return sig;
  384.     }

  385.     /** {@inheritDoc}
  386.      * @deprecated As of 3.1. Please use
  387.      * {@link BaseAbstractMultivariateVectorOptimizer#optimize(int,
  388.      * org.apache.commons.math3.analysis.MultivariateVectorFunction,OptimizationData[])
  389.      * optimize(int,MultivariateDifferentiableVectorFunction,OptimizationData...)}
  390.      * instead.
  391.      */
  392.     @Override
  393.     @Deprecated
  394.     public PointVectorValuePair optimize(int maxEval,
  395.                                          final DifferentiableMultivariateVectorFunction f,
  396.                                          final double[] target, final double[] weights,
  397.                                          final double[] startPoint) {
  398.         return optimizeInternal(maxEval,
  399.                                 FunctionUtils.toMultivariateDifferentiableVectorFunction(f),
  400.                                 new Target(target),
  401.                                 new Weight(weights),
  402.                                 new InitialGuess(startPoint));
  403.     }

  404.     /**
  405.      * Optimize an objective function.
  406.      * Optimization is considered to be a weighted least-squares minimization.
  407.      * The cost function to be minimized is
  408.      * <code>&sum;weight<sub>i</sub>(objective<sub>i</sub> - target<sub>i</sub>)<sup>2</sup></code>
  409.      *
  410.      * @param f Objective function.
  411.      * @param target Target value for the objective functions at optimum.
  412.      * @param weights Weights for the least squares cost computation.
  413.      * @param startPoint Start point for optimization.
  414.      * @return the point/value pair giving the optimal value for objective
  415.      * function.
  416.      * @param maxEval Maximum number of function evaluations.
  417.      * @throws org.apache.commons.math3.exception.DimensionMismatchException
  418.      * if the start point dimension is wrong.
  419.      * @throws org.apache.commons.math3.exception.TooManyEvaluationsException
  420.      * if the maximal number of evaluations is exceeded.
  421.      * @throws org.apache.commons.math3.exception.NullArgumentException if
  422.      * any argument is {@code null}.
  423.      * @deprecated As of 3.1. Please use
  424.      * {@link BaseAbstractMultivariateVectorOptimizer#optimize(int,
  425.      * org.apache.commons.math3.analysis.MultivariateVectorFunction,OptimizationData[])
  426.      * optimize(int,MultivariateDifferentiableVectorFunction,OptimizationData...)}
  427.      * instead.
  428.      */
  429.     @Deprecated
  430.     public PointVectorValuePair optimize(final int maxEval,
  431.                                          final MultivariateDifferentiableVectorFunction f,
  432.                                          final double[] target, final double[] weights,
  433.                                          final double[] startPoint) {
  434.         return optimizeInternal(maxEval, f,
  435.                                 new Target(target),
  436.                                 new Weight(weights),
  437.                                 new InitialGuess(startPoint));
  438.     }

  439.     /**
  440.      * Optimize an objective function.
  441.      * Optimization is considered to be a weighted least-squares minimization.
  442.      * The cost function to be minimized is
  443.      * <code>&sum;weight<sub>i</sub>(objective<sub>i</sub> - target<sub>i</sub>)<sup>2</sup></code>
  444.      *
  445.      * @param maxEval Allowed number of evaluations of the objective function.
  446.      * @param f Objective function.
  447.      * @param optData Optimization data. The following data will be looked for:
  448.      * <ul>
  449.      *  <li>{@link Target}</li>
  450.      *  <li>{@link Weight}</li>
  451.      *  <li>{@link InitialGuess}</li>
  452.      * </ul>
  453.      * @return the point/value pair giving the optimal value of the objective
  454.      * function.
  455.      * @throws org.apache.commons.math3.exception.TooManyEvaluationsException if
  456.      * the maximal number of evaluations is exceeded.
  457.      * @throws DimensionMismatchException if the target, and weight arguments
  458.      * have inconsistent dimensions.
  459.      * @see BaseAbstractMultivariateVectorOptimizer#optimizeInternal(int,
  460.      * org.apache.commons.math3.analysis.MultivariateVectorFunction,OptimizationData[])
  461.      * @since 3.1
  462.      * @deprecated As of 3.1. Override is necessary only until this class's generic
  463.      * argument is changed to {@code MultivariateDifferentiableVectorFunction}.
  464.      */
  465.     @Deprecated
  466.     protected PointVectorValuePair optimizeInternal(final int maxEval,
  467.                                                     final MultivariateDifferentiableVectorFunction f,
  468.                                                     OptimizationData... optData) {
  469.         // XXX Conversion will be removed when the generic argument of the
  470.         // base class becomes "MultivariateDifferentiableVectorFunction".
  471.         return super.optimizeInternal(maxEval, FunctionUtils.toDifferentiableMultivariateVectorFunction(f), optData);
  472.     }

  473.     /** {@inheritDoc} */
  474.     @Override
  475.     protected void setUp() {
  476.         super.setUp();

  477.         // Reset counter.
  478.         jacobianEvaluations = 0;

  479.         // Square-root of the weight matrix.
  480.         weightMatrixSqrt = squareRoot(getWeight());

  481.         // Store least squares problem characteristics.
  482.         // XXX The conversion won't be necessary when the generic argument of
  483.         // the base class becomes "MultivariateDifferentiableVectorFunction".
  484.         // XXX "jF" is not strictly necessary anymore but is currently more
  485.         // efficient than converting the value returned from "getObjectiveFunction()"
  486.         // every time it is used.
  487.         jF = FunctionUtils.toMultivariateDifferentiableVectorFunction((DifferentiableMultivariateVectorFunction) getObjectiveFunction());

  488.         // Arrays shared with "private" and "protected" methods.
  489.         point = getStartPoint();
  490.         rows = getTarget().length;
  491.         cols = point.length;
  492.     }

  493.     /**
  494.      * Computes the residuals.
  495.      * The residual is the difference between the observed (target)
  496.      * values and the model (objective function) value.
  497.      * There is one residual for each element of the vector-valued
  498.      * function.
  499.      *
  500.      * @param objectiveValue Value of the the objective function. This is
  501.      * the value returned from a call to
  502.      * {@link #computeObjectiveValue(double[]) computeObjectiveValue}
  503.      * (whose array argument contains the model parameters).
  504.      * @return the residuals.
  505.      * @throws DimensionMismatchException if {@code params} has a wrong
  506.      * length.
  507.      * @since 3.1
  508.      */
  509.     protected double[] computeResiduals(double[] objectiveValue) {
  510.         final double[] target = getTarget();
  511.         if (objectiveValue.length != target.length) {
  512.             throw new DimensionMismatchException(target.length,
  513.                                                  objectiveValue.length);
  514.         }

  515.         final double[] residuals = new double[target.length];
  516.         for (int i = 0; i < target.length; i++) {
  517.             residuals[i] = target[i] - objectiveValue[i];
  518.         }

  519.         return residuals;
  520.     }

  521.     /**
  522.      * Computes the square-root of the weight matrix.
  523.      *
  524.      * @param m Symmetric, positive-definite (weight) matrix.
  525.      * @return the square-root of the weight matrix.
  526.      */
  527.     private RealMatrix squareRoot(RealMatrix m) {
  528.         if (m instanceof DiagonalMatrix) {
  529.             final int dim = m.getRowDimension();
  530.             final RealMatrix sqrtM = new DiagonalMatrix(dim);
  531.             for (int i = 0; i < dim; i++) {
  532.                sqrtM.setEntry(i, i, FastMath.sqrt(m.getEntry(i, i)));
  533.             }
  534.             return sqrtM;
  535.         } else {
  536.             final EigenDecomposition dec = new EigenDecomposition(m);
  537.             return dec.getSquareRoot();
  538.         }
  539.     }
  540. }