Merge branch 'feature_MATH-1459'

Closes #84
This commit is contained in:
Gilles 2018-05-16 16:16:50 +02:00
commit 78ee07c627
5 changed files with 426 additions and 64 deletions

View File

@ -0,0 +1,103 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math4.fitting.leastsquares;
import org.apache.commons.math4.analysis.MultivariateFunction;
import org.apache.commons.math4.analysis.UnivariateFunction;
import org.apache.commons.math4.analysis.differentiation.DerivativeStructure;
import org.apache.commons.math4.analysis.differentiation.UnivariateFunctionDifferentiator;
import org.apache.commons.math4.linear.Array2DRowRealMatrix;
import org.apache.commons.math4.linear.ArrayRealVector;
import org.apache.commons.math4.linear.RealMatrix;
import org.apache.commons.math4.linear.RealVector;
import org.apache.commons.math4.util.Pair;
/**
* A MultivariateJacobianFunction (a thing that requires a derivative)
* combined with the thing that can find derivatives.
*
* Can be used with a LeastSquaresProblem, a LeastSquaresFactory, or a LeastSquaresBuilder.
*
* This version that works with MultivariateFunction
* @see DifferentiatorVectorMultivariateJacobianFunction for version that works with MultivariateVectorFunction
*/
public class DifferentiatorMultivariateJacobianFunction implements MultivariateJacobianFunction {
/**
* The input function to find a jacobian for.
*/
private final MultivariateFunction function;
/**
* The differentiator to use to find the jacobian.
*/
private final UnivariateFunctionDifferentiator differentiator;
/**
* Build the jacobian function using a differentiator.
*
* @param function the function to turn into a jacobian
* @param differentiator the differentiator to find the derivative
*
* This version that works with MultivariateFunction
* @see DifferentiatorVectorMultivariateJacobianFunction for version that works with MultivariateVectorFunction
*/
public DifferentiatorMultivariateJacobianFunction(MultivariateFunction function, UnivariateFunctionDifferentiator differentiator) {
this.function = function;
this.differentiator = differentiator;
}
/**
* @inheritDoc
*/
@Override
public Pair<RealVector, RealMatrix> value(RealVector point) {
ArrayRealVector value = new ArrayRealVector(1);
value.setEntry(0, function.value(point.toArray()));
RealMatrix jacobian = new Array2DRowRealMatrix(1, point.getDimension());
for(int column = 0; column < point.getDimension(); column++) {
final int columnFinal = column;
double originalPoint = point.getEntry(column);
double partialDerivative = getPartialDerivative(testPoint -> {
point.setEntry(columnFinal, testPoint);
double testPointOutput = function.value(point.toArray());
point.setEntry(columnFinal, originalPoint); //set it back
return testPointOutput;
}, originalPoint);
jacobian.setEntry(0, column, partialDerivative);
}
return new Pair<>(value, jacobian);
}
/**
* Returns first order derivative for the function passed in using a differentiator
* @param univariateFunction the function to differentiate
* @param atParameterValue the point at which to differentiate it at
* @return the slope at that point
*/
private double getPartialDerivative(UnivariateFunction univariateFunction, double atParameterValue) {
return differentiator
.differentiate(univariateFunction)
.value(new DerivativeStructure(1, 1, 0, atParameterValue))
.getPartialDerivative(1);
}
}

View File

@ -0,0 +1,106 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math4.fitting.leastsquares;
import org.apache.commons.math4.analysis.MultivariateVectorFunction;
import org.apache.commons.math4.analysis.UnivariateVectorFunction;
import org.apache.commons.math4.analysis.differentiation.DerivativeStructure;
import org.apache.commons.math4.analysis.differentiation.UnivariateVectorFunctionDifferentiator;
import org.apache.commons.math4.linear.Array2DRowRealMatrix;
import org.apache.commons.math4.linear.ArrayRealVector;
import org.apache.commons.math4.linear.RealMatrix;
import org.apache.commons.math4.linear.RealVector;
import org.apache.commons.math4.util.Pair;
/**
* A MultivariateJacobianFunction (a thing that requires a derivative)
* combined with the thing that can find derivatives.
*
* Can be used with a LeastSquaresProblem, a LeastSquaresFactory, or a LeastSquaresBuilder.
*
* This version that works with MultivariateVectorFunction
* @see DifferentiatorMultivariateJacobianFunction for version that works with MultivariateFunction
*/
public class DifferentiatorVectorMultivariateJacobianFunction implements MultivariateJacobianFunction {
/**
* The input function to find a jacobian for.
*/
private final MultivariateVectorFunction function;
/**
* The differentiator to use to find the jacobian.
*/
private final UnivariateVectorFunctionDifferentiator differentiator;
/**
* Build the jacobian function using a differentiator.
*
* @param function the function to turn into a jacobian
* @param differentiator the differentiator to find the derivative
*
* This version that works with MultivariateFunction
* @see DifferentiatorVectorMultivariateJacobianFunction for version that works with MultivariateVectorFunction
*/
public DifferentiatorVectorMultivariateJacobianFunction(MultivariateVectorFunction function, UnivariateVectorFunctionDifferentiator differentiator) {
this.function = function;
this.differentiator = differentiator;
}
/**
* @inheritDoc
*/
@Override
public Pair<RealVector, RealMatrix> value(RealVector point) {
RealVector value = new ArrayRealVector(function.value(point.toArray()));
RealMatrix jacobian = new Array2DRowRealMatrix(value.getDimension(), point.getDimension());
for(int column = 0; column < point.getDimension(); column++) {
final int columnFinal = column;
double originalPoint = point.getEntry(column);
double[] partialDerivatives = getPartialDerivative(testPoint -> {
point.setEntry(columnFinal, testPoint);
double[] testPointValue = function.value(point.toArray());
point.setEntry(columnFinal, originalPoint); //set it back
return testPointValue;
}, originalPoint);
jacobian.setColumn(column, partialDerivatives);
}
return new Pair<>(value, jacobian);
}
/**
* Returns first order derivative for the function passed in using a differentiator
* @param univariateVectorFunction the function to differentiate
* @param atParameterValue the point at which to differentiate it at
* @return the slopes at that point
*/
private double[] getPartialDerivative(UnivariateVectorFunction univariateVectorFunction, double atParameterValue) {
DerivativeStructure[] derivatives = differentiator
.differentiate(univariateVectorFunction)
.value(new DerivativeStructure(1, 1, 0, atParameterValue));
double[] derivativesOut = new double[derivatives.length];
for(int index=0;index<derivatives.length;index++) {
derivativesOut[index] = derivatives[index].getPartialDerivative(1);
}
return derivativesOut;
}
}

View File

@ -0,0 +1,63 @@
package org.apache.commons.math4.fitting.leastsquares;
import org.apache.commons.math4.analysis.MultivariateMatrixFunction;
import org.apache.commons.math4.analysis.MultivariateVectorFunction;
import org.apache.commons.math4.util.FastMath;
import java.util.ArrayList;
import java.util.List;
class BevingtonProblem {
private List<Double> time;
private List<Double> count;
public BevingtonProblem() {
time = new ArrayList<>();
count = new ArrayList<>();
}
public void addPoint(double t, double c) {
time.add(t);
count.add(c);
}
public MultivariateVectorFunction getModelFunction() {
return new MultivariateVectorFunction() {
@Override
public double[] value(double[] params) {
double[] values = new double[time.size()];
for (int i = 0; i < values.length; ++i) {
final double t = time.get(i);
values[i] = params[0] +
params[1] * FastMath.exp(-t / params[3]) +
params[2] * FastMath.exp(-t / params[4]);
}
return values;
}
};
}
public MultivariateMatrixFunction getModelFunctionJacobian() {
return new MultivariateMatrixFunction() {
@Override
public double[][] value(double[] params) {
double[][] jacobian = new double[time.size()][5];
for (int i = 0; i < jacobian.length; ++i) {
final double t = time.get(i);
jacobian[i][0] = 1;
final double p3 = params[3];
final double p4 = params[4];
final double tOp3 = t / p3;
final double tOp4 = t / p4;
jacobian[i][1] = FastMath.exp(-tOp3);
jacobian[i][2] = FastMath.exp(-tOp4);
jacobian[i][3] = params[1] * FastMath.exp(-tOp3) * tOp3 / p3;
jacobian[i][4] = params[2] * FastMath.exp(-tOp4) * tOp4 / p4;
}
return jacobian;
}
};
}
}

View File

@ -0,0 +1,154 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math4.fitting.leastsquares;
import org.apache.commons.math4.analysis.differentiation.FiniteDifferencesDifferentiator;
import org.apache.commons.math4.analysis.differentiation.UnivariateVectorFunctionDifferentiator;
import org.apache.commons.math4.linear.DiagonalMatrix;
import org.apache.commons.math4.linear.RealMatrix;
import org.apache.commons.math4.linear.RealVector;
import org.apache.commons.math4.optim.SimpleVectorValueChecker;
import org.apache.commons.math4.util.FastMath;
import org.junit.Assert;
import org.junit.Test;
public class DifferentiatorVectorMultivariateJacobianFunctionTest {
private static final int POINTS = 20;
private static final double STEP_SIZE = 0.2;
private final UnivariateVectorFunctionDifferentiator differentiator = new FiniteDifferencesDifferentiator(POINTS, STEP_SIZE);
private final LeastSquaresOptimizer optimizer = this.getOptimizer();
public LeastSquaresBuilder base() {
return new LeastSquaresBuilder()
.checkerPair(new SimpleVectorValueChecker(1e-6, 1e-6))
.maxEvaluations(100)
.maxIterations(getMaxIterations());
}
public LeastSquaresBuilder builder(BevingtonProblem problem, boolean automatic){
if(automatic) {
return base()
.model(new DifferentiatorVectorMultivariateJacobianFunction(problem.getModelFunction(), differentiator));
}
else {
return base()
.model(problem.getModelFunction(), problem.getModelFunctionJacobian());
}
}
public int getMaxIterations() {
return 25;
}
public LeastSquaresOptimizer getOptimizer() {
return new LevenbergMarquardtOptimizer();
}
/**
* Non-linear test case: fitting of decay curve (from Chapter 8 of
* Bevington's textbook, "Data reduction and analysis for the physical sciences").
*/
@Test
public void testBevington() {
// the analytical optimum to compare to
final LeastSquaresOptimizer.Optimum analyticalOptimum = findBevington(false);
final RealVector analyticalSolution = analyticalOptimum.getPoint();
final RealMatrix analyticalCovarianceMatrix = analyticalOptimum.getCovariances(1e-14);
// the automatic DifferentiatorVectorMultivariateJacobianFunction optimum
final LeastSquaresOptimizer.Optimum automaticOptimum = findBevington(true);
final RealVector automaticSolution = automaticOptimum.getPoint();
final RealMatrix automaticCovarianceMatrix = automaticOptimum.getCovariances(1e-14);
final int numParams = analyticalOptimum.getPoint().getDimension();
// Check that the automatic solution is within the reference error range.
for (int i = 0; i < numParams; i++) {
final double error = FastMath.sqrt(analyticalCovarianceMatrix.getEntry(i, i));
Assert.assertEquals("Parameter " + i, analyticalSolution.getEntry(i), automaticSolution.getEntry(i), error);
}
// Check that each entry of the computed covariance matrix is within 1%
// of the reference analytical matrix entry.
for (int i = 0; i < numParams; i++) {
for (int j = 0; j < numParams; j++) {
Assert.assertEquals("Covariance matrix [" + i + "][" + j + "]",
analyticalCovarianceMatrix.getEntry(i, j),
automaticCovarianceMatrix.getEntry(i, j),
FastMath.abs(0.01 * analyticalCovarianceMatrix.getEntry(i, j)));
}
}
// Check various measures of goodness-of-fit.
final double tol = 1e-40;
Assert.assertEquals(analyticalOptimum.getChiSquare(), automaticOptimum.getChiSquare(), tol);
Assert.assertEquals(analyticalOptimum.getCost(), automaticOptimum.getCost(), tol);
Assert.assertEquals(analyticalOptimum.getRMS(), automaticOptimum.getRMS(), tol);
Assert.assertEquals(analyticalOptimum.getReducedChiSquare(automaticOptimum.getPoint().getDimension()), automaticOptimum.getReducedChiSquare(automaticOptimum.getPoint().getDimension()), tol);
}
/**
* Build the problem and return the optimum, doesn't actually test the results.
*
* Pass in if you want to test using analytical derivatives,
* or the automatic {@link DifferentiatorVectorMultivariateJacobianFunction}
*
* @param automatic automatic {@link DifferentiatorVectorMultivariateJacobianFunction}, as opposed to analytical
* @return the optimum for this test
*/
private LeastSquaresOptimizer.Optimum findBevington(boolean automatic) {
final double[][] dataPoints = {
// column 1 = times
{ 15, 30, 45, 60, 75, 90, 105, 120, 135, 150,
165, 180, 195, 210, 225, 240, 255, 270, 285, 300,
315, 330, 345, 360, 375, 390, 405, 420, 435, 450,
465, 480, 495, 510, 525, 540, 555, 570, 585, 600,
615, 630, 645, 660, 675, 690, 705, 720, 735, 750,
765, 780, 795, 810, 825, 840, 855, 870, 885, },
// column 2 = measured counts
{ 775, 479, 380, 302, 185, 157, 137, 119, 110, 89,
74, 61, 66, 68, 48, 54, 51, 46, 55, 29,
28, 37, 49, 26, 35, 29, 31, 24, 25, 35,
24, 30, 26, 28, 21, 18, 20, 27, 17, 17,
14, 17, 24, 11, 22, 17, 12, 10, 13, 16,
9, 9, 14, 21, 17, 13, 12, 18, 10, },
};
final double[] start = {10, 900, 80, 27, 225};
final BevingtonProblem problem = new BevingtonProblem();
final int len = dataPoints[0].length;
final double[] weights = new double[len];
for (int i = 0; i < len; i++) {
problem.addPoint(dataPoints[0][i],
dataPoints[1][i]);
weights[i] = 1 / dataPoints[1][i];
}
return optimizer.optimize(
builder(problem, automatic)
.target(dataPoints[1])
.weight(new DiagonalMatrix(weights))
.start(start)
.build()
);
}
}

View File

@ -17,15 +17,8 @@
package org.apache.commons.math4.fitting.leastsquares;
import org.apache.commons.math4.analysis.MultivariateMatrixFunction;
import org.apache.commons.math4.analysis.MultivariateVectorFunction;
import org.apache.commons.math4.exception.DimensionMismatchException;
import org.apache.commons.math4.exception.TooManyEvaluationsException;
import org.apache.commons.math4.fitting.leastsquares.LeastSquaresBuilder;
import org.apache.commons.math4.fitting.leastsquares.LeastSquaresOptimizer;
import org.apache.commons.math4.fitting.leastsquares.LeastSquaresProblem;
import org.apache.commons.math4.fitting.leastsquares.LevenbergMarquardtOptimizer;
import org.apache.commons.math4.fitting.leastsquares.ParameterValidator;
import org.apache.commons.math4.fitting.leastsquares.LeastSquaresOptimizer.Optimum;
import org.apache.commons.math4.fitting.leastsquares.LeastSquaresProblem.Evaluation;
import org.apache.commons.math4.geometry.euclidean.twod.Cartesian2D;
@ -39,9 +32,6 @@ import org.apache.commons.numbers.core.Precision;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.CoreMatchers.is;
/**
@ -357,58 +347,4 @@ public class LevenbergMarquardtOptimizerTest
Assert.assertThat(optimum.getEvaluations(), is(2));
}
private static class BevingtonProblem {
private List<Double> time;
private List<Double> count;
public BevingtonProblem() {
time = new ArrayList<>();
count = new ArrayList<>();
}
public void addPoint(double t, double c) {
time.add(t);
count.add(c);
}
public MultivariateVectorFunction getModelFunction() {
return new MultivariateVectorFunction() {
@Override
public double[] value(double[] params) {
double[] values = new double[time.size()];
for (int i = 0; i < values.length; ++i) {
final double t = time.get(i);
values[i] = params[0] +
params[1] * FastMath.exp(-t / params[3]) +
params[2] * FastMath.exp(-t / params[4]);
}
return values;
}
};
}
public MultivariateMatrixFunction getModelFunctionJacobian() {
return new MultivariateMatrixFunction() {
@Override
public double[][] value(double[] params) {
double[][] jacobian = new double[time.size()][5];
for (int i = 0; i < jacobian.length; ++i) {
final double t = time.get(i);
jacobian[i][0] = 1;
final double p3 = params[3];
final double p4 = params[4];
final double tOp3 = t / p3;
final double tOp4 = t / p4;
jacobian[i][1] = FastMath.exp(-tOp3);
jacobian[i][2] = FastMath.exp(-tOp4);
jacobian[i][3] = params[1] * FastMath.exp(-tOp3) * tOp3 / p3;
jacobian[i][4] = params[2] * FastMath.exp(-tOp4) * tOp4 / p4;
}
return jacobian;
}
};
}
}
}