GLSMultipleLinearRegression.java

  1. /*
  2.  * Licensed to the Apache Software Foundation (ASF) under one or more
  3.  * contributor license agreements.  See the NOTICE file distributed with
  4.  * this work for additional information regarding copyright ownership.
  5.  * The ASF licenses this file to You under the Apache License, Version 2.0
  6.  * (the "License"); you may not use this file except in compliance with
  7.  * the License.  You may obtain a copy of the License at
  8.  *
  9.  *      https://www.apache.org/licenses/LICENSE-2.0
  10.  *
  11.  * Unless required by applicable law or agreed to in writing, software
  12.  * distributed under the License is distributed on an "AS IS" BASIS,
  13.  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14.  * See the License for the specific language governing permissions and
  15.  * limitations under the License.
  16.  */

  17. /*
  18.  * This is not the original file distributed by the Apache Software Foundation
  19.  * It has been modified by the Hipparchus project
  20.  */
  21. package org.hipparchus.stat.regression;

  22. import org.hipparchus.linear.Array2DRowRealMatrix;
  23. import org.hipparchus.linear.LUDecomposition;
  24. import org.hipparchus.linear.RealMatrix;
  25. import org.hipparchus.linear.RealVector;

  26. /**
  27.  * The GLS implementation of multiple linear regression.
  28.  *
  29.  * GLS assumes a general covariance matrix Omega of the error
  30.  * <pre>
  31.  * u ~ N(0, Omega)
  32.  * </pre>
  33.  *
  34.  * Estimated by GLS,
  35.  * <pre>
  36.  * b=(X' Omega^-1 X)^-1X'Omega^-1 y
  37.  * </pre>
  38.  * whose variance is
  39.  * <pre>
  40.  * Var(b)=(X' Omega^-1 X)^-1
  41.  * </pre>
  42.  */
  43. public class GLSMultipleLinearRegression extends AbstractMultipleLinearRegression {

  44.     /** Covariance matrix. */
  45.     private RealMatrix Omega;

  46.     /** Inverse of covariance matrix. */
  47.     private RealMatrix OmegaInverse;

  48.     /** Empty constructor.
  49.      * <p>
  50.      * This constructor is not strictly necessary, but it prevents spurious
  51.      * javadoc warnings with JDK 18 and later.
  52.      * </p>
  53.      * @since 3.0
  54.      */
  55.     public GLSMultipleLinearRegression() { // NOPMD - unnecessary constructor added intentionally to make javadoc happy
  56.         // nothing to do
  57.     }

  58.     /** Replace sample data, overriding any previous sample.
  59.      * @param y y values of the sample
  60.      * @param x x values of the sample
  61.      * @param covariance array representing the covariance matrix
  62.      */
  63.     public void newSampleData(double[] y, double[][] x, double[][] covariance) {
  64.         validateSampleData(x, y);
  65.         newYSampleData(y);
  66.         newXSampleData(x);
  67.         validateCovarianceData(x, covariance);
  68.         newCovarianceData(covariance);
  69.     }

  70.     /**
  71.      * Add the covariance data.
  72.      *
  73.      * @param omega the [n,n] array representing the covariance
  74.      */
  75.     protected void newCovarianceData(double[][] omega){
  76.         this.Omega = new Array2DRowRealMatrix(omega);
  77.         this.OmegaInverse = null;
  78.     }

  79.     /**
  80.      * Get the inverse of the covariance.
  81.      * <p>The inverse of the covariance matrix is lazily evaluated and cached.</p>
  82.      * @return inverse of the covariance
  83.      */
  84.     protected RealMatrix getOmegaInverse() {
  85.         if (OmegaInverse == null) {
  86.             OmegaInverse = new LUDecomposition(Omega).getSolver().getInverse();
  87.         }
  88.         return OmegaInverse;
  89.     }

  90.     /**
  91.      * Calculates beta by GLS.
  92.      * <pre>
  93.      *  b=(X' Omega^-1 X)^-1X'Omega^-1 y
  94.      * </pre>
  95.      * @return beta
  96.      */
  97.     @Override
  98.     protected RealVector calculateBeta() {
  99.         RealMatrix OI = getOmegaInverse();
  100.         RealMatrix XT = getX().transpose();
  101.         RealMatrix XTOIX = XT.multiply(OI).multiply(getX());
  102.         RealMatrix inverse = new LUDecomposition(XTOIX).getSolver().getInverse();
  103.         return inverse.multiply(XT).multiply(OI).operate(getY());
  104.     }

  105.     /**
  106.      * Calculates the variance on the beta.
  107.      * <pre>
  108.      *  Var(b)=(X' Omega^-1 X)^-1
  109.      * </pre>
  110.      * @return The beta variance matrix
  111.      */
  112.     @Override
  113.     protected RealMatrix calculateBetaVariance() {
  114.         RealMatrix OI = getOmegaInverse();
  115.         RealMatrix XTOIX = getX().transposeMultiply(OI).multiply(getX());
  116.         return new LUDecomposition(XTOIX).getSolver().getInverse();
  117.     }


  118.     /**
  119.      * Calculates the estimated variance of the error term using the formula
  120.      * <pre>
  121.      *  Var(u) = Tr(u' Omega^-1 u)/(n-k)
  122.      * </pre>
  123.      * where n and k are the row and column dimensions of the design
  124.      * matrix X.
  125.      *
  126.      * @return error variance
  127.      */
  128.     @Override
  129.     protected double calculateErrorVariance() {
  130.         RealVector residuals = calculateResiduals();
  131.         double t = residuals.dotProduct(getOmegaInverse().operate(residuals));
  132.         return t / (getX().getRowDimension() - getX().getColumnDimension());

  133.     }

  134. }