In o.a.c.m3.optimization.general.AbstractLeastSquaresOptimizer
- deprecated guessParametersErrors() - created getSigma() which should be used instead (but is not strictly equivalent). Updated unit tests accordingly. See MATH-784. git-svn-id: https://svn.apache.org/repos/asf/commons/proper/math/trunk@1334315 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
28a2839927
commit
833111b2e8
|
@ -248,6 +248,9 @@ public abstract class AbstractLeastSquaresOptimizer
|
|||
* @throws NumberIsTooSmallException if the number of degrees of freedom is not
|
||||
* positive, i.e. the number of measurements is less or equal to the number of
|
||||
* parameters.
|
||||
* @deprecated as of version 3.1, {@link #getSigma()} should be used
|
||||
* instead. It should be emphasized that {@link #guessParametersErrors()} and
|
||||
* {@link #getSigma()} are <em>not</em> strictly equivalent.
|
||||
*/
|
||||
public double[] guessParametersErrors() {
|
||||
if (rows <= cols) {
|
||||
|
@ -263,6 +266,28 @@ public abstract class AbstractLeastSquaresOptimizer
|
|||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Returns an estimate of the standard deviation of the parameters. The
|
||||
* returned values are the square root of the diagonal coefficients of the
|
||||
* covariance matrix, {@code sd(a[i]) ~= sqrt(C[i][i])}, where {@code a[i]}
|
||||
* is the optimized value of the {@code i}-th parameter, and {@code C} is
|
||||
* the covariance matrix.
|
||||
* </p>
|
||||
*
|
||||
* @return an estimate of the standard deviation of the optimized parameters
|
||||
* @throws org.apache.commons.math3.linear.SingularMatrixException
|
||||
* if the covariance matrix cannot be computed.
|
||||
*/
|
||||
public double[] getSigma() {
|
||||
double[] sig = new double[cols];
|
||||
double[][] cov = getCovariances();
|
||||
for (int i = 0; i < sig.length; ++i) {
|
||||
sig[i] = FastMath.sqrt(cov[i][i]);
|
||||
}
|
||||
return sig;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public PointVectorValuePair optimize(int maxEval,
|
||||
|
|
|
@ -325,4 +325,12 @@ public class Gamma {
|
|||
|
||||
return trigamma(x + 1) + 1 / (x * x);
|
||||
}
|
||||
|
||||
public static double lanczos(final double x){
|
||||
double sum = 0.0;
|
||||
for (int i = LANCZOS.length - 1; i > 0; --i) {
|
||||
sum = sum + (LANCZOS[i] / (x + i));
|
||||
}
|
||||
return sum + LANCZOS[0];
|
||||
}
|
||||
}
|
||||
|
|
|
@ -75,7 +75,7 @@ public class AbstractLeastSquaresOptimizerTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testGuessParametersErrors() throws IOException {
|
||||
public void testGetSigma() throws IOException {
|
||||
final StatisticalReferenceDataset dataset;
|
||||
dataset = StatisticalReferenceDatasetFactory.createKirby2();
|
||||
final AbstractLeastSquaresOptimizer optimizer;
|
||||
|
@ -85,12 +85,14 @@ public class AbstractLeastSquaresOptimizerTest {
|
|||
final double[] w = new double[y.length];
|
||||
Arrays.fill(w, 1.0);
|
||||
|
||||
final int dof = y.length-a.length;
|
||||
optimizer.optimize(1, dataset.getLeastSquaresProblem(), y, w, a);
|
||||
final double[] actual = optimizer.guessParametersErrors();
|
||||
final double[] sig = optimizer.getSigma();
|
||||
final double[] expected = dataset.getParametersStandardDeviations();
|
||||
for (int i = 0; i < actual.length; i++) {
|
||||
for (int i = 0; i < sig.length; i++) {
|
||||
final double actual = FastMath.sqrt(optimizer.getChiSquare()/dof)*sig[i];
|
||||
Assert.assertEquals(dataset.getName() + ", parameter #" + i,
|
||||
actual[i], expected[i], 1E-8 * expected[i]);
|
||||
actual, expected[i], 1E-8 * expected[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue