MATH-1092

Extracted class "LineSearch" from "PowellOptimizer".
Made method "computeObjectiveValue" public in "MultivariateOptimizer".
Modified "PowellOptimizer" to use the now public class.
"NonLinearConjugateGradientOptimizer" uses the new "LineSearch".
Added constructors to set the line search tolerances and deprecated
obsolete contructors and inner classes ("BracketingStep" and
"LineSearchFunction".
Removed method "findUpperBound".



git-svn-id: https://svn.apache.org/repos/asf/commons/proper/math/trunk@1572988 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Gilles Sadowski 2014-02-28 16:23:26 +00:00
parent ba70cfc5c9
commit f0ae42aa93
6 changed files with 303 additions and 242 deletions

View File

@ -0,0 +1,123 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.math3.optim.nonlinear.scalar;
import org.apache.commons.math3.optim.univariate.UnivariateOptimizer;
import org.apache.commons.math3.optim.univariate.BrentOptimizer;
import org.apache.commons.math3.optim.univariate.BracketFinder;
import org.apache.commons.math3.optim.univariate.UnivariatePointValuePair;
import org.apache.commons.math3.optim.univariate.SimpleUnivariateValueChecker;
import org.apache.commons.math3.optim.univariate.SearchInterval;
import org.apache.commons.math3.optim.univariate.UnivariateObjectiveFunction;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.optim.MaxEval;
/**
* Class for finding the minimum of the objective function along a given
* direction.
*
* @since 3.3
* @version $Id$
*/
public class LineSearch {
/**
* Value that will pass the precondition check for {@link BrentOptimizer}
* but will not pass the convergence check, so that the custom checker
* will always decide when to stop the line search.
*/
private static final double REL_TOL_UNUSED = 1e-15;
/**
* Value that will pass the precondition check for {@link BrentOptimizer}
* but will not pass the convergence check, so that the custom checker
* will always decide when to stop the line search.
*/
private static final double ABS_TOL_UNUSED = Double.MIN_VALUE;
/**
* Optimizer used for line search.
*/
private final UnivariateOptimizer lineOptimizer;
/**
* Automatic bracketing.
*/
private final BracketFinder bracket = new BracketFinder();
/**
* Optimizer on behalf of which the line search must be performed.
*/
private final MultivariateOptimizer mainOptimizer;
/**
* The {@code BrentOptimizer} default stopping criterion uses the
* tolerances to check the domain (point) values, not the function
* values.
* The {@code relativeTolerance} and {@code absoluteTolerance}
* arguments are thus passed to a custom checker that will use
* the function values.
*
* @param optimizer Optimizer on behalf of which the line search
* be performed.
* Its {@link MultivariateOptimizer#computeObjectiveValue(double[])
* computeObjectiveValue} method will be called by this class's
* {@link #search(double[],double[]) search} method.
* @param relativeTolerance Relative threshold.
* @param absoluteTolerance Absolute threshold.
*/
public LineSearch(MultivariateOptimizer optimizer,
double relativeTolerance,
double absoluteTolerance) {
mainOptimizer = optimizer;
lineOptimizer = new BrentOptimizer(REL_TOL_UNUSED,
ABS_TOL_UNUSED,
new SimpleUnivariateValueChecker(relativeTolerance,
absoluteTolerance));
}
/**
* Find the minimum of the function {@code f(p + alpha * d)}.
*
* @param startPoint Starting point.
* @param direction Search direction.
* @return the optimum.
* @throws org.apache.commons.math3.exception.TooManyEvaluationsException
* if the number of evaluations is exceeded.
*/
public UnivariatePointValuePair search(final double[] startPoint,
final double[] direction) {
final int n = startPoint.length;
final UnivariateFunction f = new UnivariateFunction() {
public double value(double alpha) {
final double[] x = new double[n];
for (int i = 0; i < n; i++) {
x[i] = startPoint[i] + alpha * direction[i];
}
final double obj = mainOptimizer.computeObjectiveValue(x);
return obj;
}
};
final GoalType goal = mainOptimizer.getGoalType();
bracket.search(f, goal, 0, 1);
// Passing "MAX_VALUE" as a dummy value because it is the enclosing
// class that counts the number of evaluations (and will eventually
// generate the exception).
return lineOptimizer.optimize(new MaxEval(Integer.MAX_VALUE),
new UnivariateObjectiveFunction(f),
goal,
new SearchInterval(bracket.getLo(),
bracket.getHi(),
bracket.getMid()));
}
}

View File

@ -111,7 +111,7 @@ public abstract class MultivariateOptimizer
* @throws TooManyEvaluationsException if the maximal number of
* evaluations is exceeded.
*/
protected double computeObjectiveValue(double[] params) {
public double computeObjectiveValue(double[] params) {
super.incrementEvaluationCount();
return function.value(params);
}

View File

@ -17,11 +17,8 @@
package org.apache.commons.math3.optim.nonlinear.scalar.gradient;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.analysis.solvers.BrentSolver;
import org.apache.commons.math3.analysis.solvers.UnivariateSolver;
import org.apache.commons.math3.exception.MathInternalError;
import org.apache.commons.math3.exception.MathIllegalStateException;
import org.apache.commons.math3.exception.TooManyEvaluationsException;
import org.apache.commons.math3.exception.MathUnsupportedOperationException;
import org.apache.commons.math3.exception.util.LocalizedFormats;
@ -30,7 +27,8 @@ import org.apache.commons.math3.optim.PointValuePair;
import org.apache.commons.math3.optim.ConvergenceChecker;
import org.apache.commons.math3.optim.nonlinear.scalar.GoalType;
import org.apache.commons.math3.optim.nonlinear.scalar.GradientMultivariateOptimizer;
import org.apache.commons.math3.util.FastMath;
import org.apache.commons.math3.optim.nonlinear.scalar.LineSearch;
/**
* Non-linear conjugate gradient optimizer.
@ -52,27 +50,8 @@ public class NonLinearConjugateGradientOptimizer
private final Formula updateFormula;
/** Preconditioner (may be null). */
private final Preconditioner preconditioner;
/** solver to use in the line search (may be null). */
private final UnivariateSolver solver;
/** Initial step used to bracket the optimum in line search. */
private double initialStep = 1;
/**
* Constructor with default {@link BrentSolver line search solver} and
* {@link IdentityPreconditioner preconditioner}.
*
* @param updateFormula formula to use for updating the &beta; parameter,
* must be one of {@link Formula#FLETCHER_REEVES} or
* {@link Formula#POLAK_RIBIERE}.
* @param checker Convergence checker.
*/
public NonLinearConjugateGradientOptimizer(final Formula updateFormula,
ConvergenceChecker<PointValuePair> checker) {
this(updateFormula,
checker,
new BrentSolver(),
new IdentityPreconditioner());
}
/** Line search algorithm. */
private final LineSearch line;
/**
* Available choices of update formulas for the updating the parameter
@ -107,7 +86,9 @@ public class NonLinearConjugateGradientOptimizer
* search.
*
* @since 3.1
* @deprecated As of v3.3, class is not used anymore.
*/
@Deprecated
public static class BracketingStep implements OptimizationData {
/** Initial step. */
private final double initialStep;
@ -129,6 +110,24 @@ public class NonLinearConjugateGradientOptimizer
}
}
/**
* Constructor with default tolerances for the line search (1e-8) and
* {@link IdentityPreconditioner preconditioner}.
*
* @param updateFormula formula to use for updating the &beta; parameter,
* must be one of {@link Formula#FLETCHER_REEVES} or
* {@link Formula#POLAK_RIBIERE}.
* @param checker Convergence checker.
*/
public NonLinearConjugateGradientOptimizer(final Formula updateFormula,
ConvergenceChecker<PointValuePair> checker) {
this(updateFormula,
checker,
1e-8,
1e-8,
new IdentityPreconditioner());
}
/**
* Constructor with default {@link IdentityPreconditioner preconditioner}.
*
@ -137,7 +136,10 @@ public class NonLinearConjugateGradientOptimizer
* {@link Formula#POLAK_RIBIERE}.
* @param checker Convergence checker.
* @param lineSearchSolver Solver to use during line search.
* @deprecated as of 3.3. Please use
* {@link #NonLinearConjugateGradientOptimizer(Formula,ConvergenceChecker,double,double)} instead.
*/
@Deprecated
public NonLinearConjugateGradientOptimizer(final Formula updateFormula,
ConvergenceChecker<PointValuePair> checker,
final UnivariateSolver lineSearchSolver) {
@ -147,6 +149,29 @@ public class NonLinearConjugateGradientOptimizer
new IdentityPreconditioner());
}
/**
* Constructor with default {@link IdentityPreconditioner preconditioner}.
*
* @param updateFormula formula to use for updating the &beta; parameter,
* must be one of {@link Formula#FLETCHER_REEVES} or
* {@link Formula#POLAK_RIBIERE}.
* @param checker Convergence checker.
* @param relativeTolerance Relative threshold for line search.
* @param absoluteTolerance Absolute threshold for line search.
*
* @see LineSearch#LineSearch(MultivariateOptimizer,double,double)
*/
public NonLinearConjugateGradientOptimizer(final Formula updateFormula,
ConvergenceChecker<PointValuePair> checker,
double relativeTolerance,
double absoluteTolerance) {
this(updateFormula,
checker,
relativeTolerance,
absoluteTolerance,
new IdentityPreconditioner());
}
/**
* @param updateFormula formula to use for updating the &beta; parameter,
* must be one of {@link Formula#FLETCHER_REEVES} or
@ -154,31 +179,48 @@ public class NonLinearConjugateGradientOptimizer
* @param checker Convergence checker.
* @param lineSearchSolver Solver to use during line search.
* @param preconditioner Preconditioner.
* @deprecated as of 3.3. Please use
* {@link #NonLinearConjugateGradientOptimizer(Formula,ConvergenceChecker,double,double,Preconditioner)} instead.
*/
@Deprecated
public NonLinearConjugateGradientOptimizer(final Formula updateFormula,
ConvergenceChecker<PointValuePair> checker,
final UnivariateSolver lineSearchSolver,
final Preconditioner preconditioner) {
this(updateFormula,
checker,
lineSearchSolver.getRelativeAccuracy(),
lineSearchSolver.getAbsoluteAccuracy(),
preconditioner);
}
/**
* @param updateFormula formula to use for updating the &beta; parameter,
* must be one of {@link Formula#FLETCHER_REEVES} or
* {@link Formula#POLAK_RIBIERE}.
* @param checker Convergence checker.
* @param preconditioner Preconditioner.
* @param relativeTolerance Relative threshold for line search.
* @param absoluteTolerance Absolute threshold for line search.
*
* @see LineSearch#LineSearch(MultivariateOptimizer,double,double)
*/
public NonLinearConjugateGradientOptimizer(final Formula updateFormula,
ConvergenceChecker<PointValuePair> checker,
double relativeTolerance,
double absoluteTolerance,
final Preconditioner preconditioner) {
super(checker);
this.updateFormula = updateFormula;
solver = lineSearchSolver;
this.preconditioner = preconditioner;
initialStep = 1;
line = new LineSearch(this,
relativeTolerance,
absoluteTolerance);
}
/**
* {@inheritDoc}
*
* @param optData Optimization data. In addition to those documented in
* {@link GradientMultivariateOptimizer#parseOptimizationData(OptimizationData[])
* GradientMultivariateOptimizer}, this method will register the following data:
* <ul>
* <li>{@link BracketingStep}</li>
* </ul>
* @return {@inheritDoc}
* @throws TooManyEvaluationsException if the maximal number of
* evaluations (of the objective function) is exceeded.
*/
@Override
public PointValuePair optimize(OptimizationData... optData)
@ -211,7 +253,6 @@ public class NonLinearConjugateGradientOptimizer
}
PointValuePair current = null;
int maxEval = getMaxEvaluations();
while (true) {
incrementIterationCount();
@ -223,14 +264,7 @@ public class NonLinearConjugateGradientOptimizer
return current;
}
// Find the optimal step in the search direction.
final UnivariateFunction lsf = new LineSearchFunction(point, searchDirection);
final double uB = findUpperBound(lsf, 0, initialStep);
// XXX Last parameters is set to a value close to zero in order to
// work around the divergence problem in the "testCircleFitting"
// unit test (see MATH-439).
final double step = solver.solve(maxEval, lsf, 0, uB, 1e-15);
maxEval -= solver.getEvaluations(); // Subtract used up evaluations.
final double step = line.search(point, searchDirection).getPoint();
// Validate new point.
for (int i = 0; i < point.length; ++i) {
@ -285,57 +319,16 @@ public class NonLinearConjugateGradientOptimizer
}
/**
* Scans the list of (required and optional) optimization data that
* characterize the problem.
*
* @param optData Optimization data.
* The following data will be looked for:
* <ul>
* <li>{@link BracketingStep}</li>
* </ul>
* {@inheritDoc}
*/
@Override
protected void parseOptimizationData(OptimizationData... optData) {
// Allow base class to register its own data.
super.parseOptimizationData(optData);
// The existing values (as set by the previous call) are reused if
// not provided in the argument list.
for (OptimizationData data : optData) {
if (data instanceof BracketingStep) {
initialStep = ((BracketingStep) data).getBracketingStep();
// If more data must be parsed, this statement _must_ be
// changed to "continue".
break;
}
}
checkParameters();
}
/**
* Finds the upper bound b ensuring bracketing of a root between a and b.
*
* @param f function whose root must be bracketed.
* @param a lower bound of the interval.
* @param h initial step to try.
* @return b such that f(a) and f(b) have opposite signs.
* @throws MathIllegalStateException if no bracket can be found.
*/
private double findUpperBound(final UnivariateFunction f,
final double a, final double h) {
final double yA = f.value(a);
double yB = yA;
for (double step = h; step < Double.MAX_VALUE; step *= FastMath.max(2, yA / yB)) {
final double b = a + step;
yB = f.value(b);
if (yA * yB <= 0) {
return b;
}
}
throw new MathIllegalStateException(LocalizedFormats.UNABLE_TO_BRACKET_OPTIMUM_IN_LINE_SEARCH);
}
/** Default identity preconditioner. */
public static class IdentityPreconditioner implements Preconditioner {
/** {@inheritDoc} */
@ -344,52 +337,55 @@ public class NonLinearConjugateGradientOptimizer
}
}
/**
* Internal class for line search.
* <p>
* The function represented by this class is the dot product of
* the objective function gradient and the search direction. Its
* value is zero when the gradient is orthogonal to the search
* direction, i.e. when the objective function value is a local
* extremum along the search direction.
* </p>
*/
private class LineSearchFunction implements UnivariateFunction {
/** Current point. */
private final double[] currentPoint;
/** Search direction. */
private final double[] searchDirection;
// Class is not used anymore (cf. MATH-1092). However, it might
// be interesting to create a class similar to "LineSearch", but
// that will take advantage that the model's gradient is available.
// /**
// * Internal class for line search.
// * <p>
// * The function represented by this class is the dot product of
// * the objective function gradient and the search direction. Its
// * value is zero when the gradient is orthogonal to the search
// * direction, i.e. when the objective function value is a local
// * extremum along the search direction.
// * </p>
// */
// private class LineSearchFunction implements UnivariateFunction {
// /** Current point. */
// private final double[] currentPoint;
// /** Search direction. */
// private final double[] searchDirection;
/**
* @param point Current point.
* @param direction Search direction.
*/
public LineSearchFunction(double[] point,
double[] direction) {
currentPoint = point.clone();
searchDirection = direction.clone();
}
// /**
// * @param point Current point.
// * @param direction Search direction.
// */
// public LineSearchFunction(double[] point,
// double[] direction) {
// currentPoint = point.clone();
// searchDirection = direction.clone();
// }
/** {@inheritDoc} */
public double value(double x) {
// current point in the search direction
final double[] shiftedPoint = currentPoint.clone();
for (int i = 0; i < shiftedPoint.length; ++i) {
shiftedPoint[i] += x * searchDirection[i];
}
// /** {@inheritDoc} */
// public double value(double x) {
// // current point in the search direction
// final double[] shiftedPoint = currentPoint.clone();
// for (int i = 0; i < shiftedPoint.length; ++i) {
// shiftedPoint[i] += x * searchDirection[i];
// }
// gradient of the objective function
final double[] gradient = computeObjectiveGradient(shiftedPoint);
// // gradient of the objective function
// final double[] gradient = computeObjectiveGradient(shiftedPoint);
// dot product with the search direction
double dotProduct = 0;
for (int i = 0; i < gradient.length; ++i) {
dotProduct += gradient[i] * searchDirection[i];
}
// // dot product with the search direction
// double dotProduct = 0;
// for (int i = 0; i < gradient.length; ++i) {
// dotProduct += gradient[i] * searchDirection[i];
// }
return dotProduct;
}
}
// return dotProduct;
// }
// }
/**
* @throws MathUnsupportedOperationException if bounds were passed to the

View File

@ -18,22 +18,16 @@ package org.apache.commons.math3.optim.nonlinear.scalar.noderiv;
import org.apache.commons.math3.util.FastMath;
import org.apache.commons.math3.util.MathArrays;
import org.apache.commons.math3.analysis.UnivariateFunction;
import org.apache.commons.math3.exception.NumberIsTooSmallException;
import org.apache.commons.math3.exception.NotStrictlyPositiveException;
import org.apache.commons.math3.exception.MathUnsupportedOperationException;
import org.apache.commons.math3.exception.util.LocalizedFormats;
import org.apache.commons.math3.optim.nonlinear.scalar.GoalType;
import org.apache.commons.math3.optim.MaxEval;
import org.apache.commons.math3.optim.PointValuePair;
import org.apache.commons.math3.optim.ConvergenceChecker;
import org.apache.commons.math3.optim.nonlinear.scalar.MultivariateOptimizer;
import org.apache.commons.math3.optim.univariate.BracketFinder;
import org.apache.commons.math3.optim.univariate.BrentOptimizer;
import org.apache.commons.math3.optim.nonlinear.scalar.LineSearch;
import org.apache.commons.math3.optim.univariate.UnivariatePointValuePair;
import org.apache.commons.math3.optim.univariate.SimpleUnivariateValueChecker;
import org.apache.commons.math3.optim.univariate.SearchInterval;
import org.apache.commons.math3.optim.univariate.UnivariateObjectiveFunction;
/**
* Powell's algorithm.
@ -131,7 +125,8 @@ public class PowellOptimizer
absoluteThreshold = abs;
// Create the line search optimizer.
line = new LineSearch(lineRel,
line = new LineSearch(this,
lineRel,
lineAbs);
}
@ -292,79 +287,6 @@ public class PowellOptimizer
return result;
}
/**
* Class for finding the minimum of the objective function along a given
* direction.
*/
private class LineSearch extends BrentOptimizer {
/**
* Value that will pass the precondition check for {@link BrentOptimizer}
* but will not pass the convergence check, so that the custom checker
* will always decide when to stop the line search.
*/
private static final double REL_TOL_UNUSED = 1e-15;
/**
* Value that will pass the precondition check for {@link BrentOptimizer}
* but will not pass the convergence check, so that the custom checker
* will always decide when to stop the line search.
*/
private static final double ABS_TOL_UNUSED = Double.MIN_VALUE;
/**
* Automatic bracketing.
*/
private final BracketFinder bracket = new BracketFinder();
/**
* The "BrentOptimizer" default stopping criterion uses the tolerances
* to check the domain (point) values, not the function values.
* We thus create a custom checker to use function values.
*
* @param rel Relative threshold.
* @param abs Absolute threshold.
*/
LineSearch(double rel,
double abs) {
super(REL_TOL_UNUSED,
ABS_TOL_UNUSED,
new SimpleUnivariateValueChecker(rel, abs));
}
/**
* Find the minimum of the function {@code f(p + alpha * d)}.
*
* @param p Starting point.
* @param d Search direction.
* @return the optimum.
* @throws org.apache.commons.math3.exception.TooManyEvaluationsException
* if the number of evaluations is exceeded.
*/
public UnivariatePointValuePair search(final double[] p, final double[] d) {
final int n = p.length;
final UnivariateFunction f = new UnivariateFunction() {
public double value(double alpha) {
final double[] x = new double[n];
for (int i = 0; i < n; i++) {
x[i] = p[i] + alpha * d[i];
}
final double obj = PowellOptimizer.this.computeObjectiveValue(x);
return obj;
}
};
final GoalType goal = PowellOptimizer.this.getGoalType();
bracket.search(f, goal, 0, 1);
// Passing "MAX_VALUE" as a dummy value because it is the enclosing
// class that counts the number of evaluations (and will eventually
// generate the exception).
return optimize(new MaxEval(Integer.MAX_VALUE),
new UnivariateObjectiveFunction(f),
goal,
new SearchInterval(bracket.getLo(),
bracket.getHi(),
bracket.getMid()));
}
}
/**
* @throws MathUnsupportedOperationException if bounds were passed to the
* {@link #optimize(OptimizationData[]) optimize} method.

View File

@ -66,12 +66,15 @@ public class MultiStartMultivariateOptimizerTest {
PointValuePair[] optima = optimizer.getOptima();
for (PointValuePair o : optima) {
Vector2D center = new Vector2D(o.getPointRef()[0], o.getPointRef()[1]);
Assert.assertEquals(69.960161753, circle.getRadius(center), 1e-8);
Assert.assertEquals(96.075902096, center.getX(), 1e-8);
Assert.assertEquals(48.135167894, center.getY(), 1e-8);
Assert.assertEquals(69.9601, circle.getRadius(center), 1e-4);
Assert.assertEquals(96.075, center.getX(), 1e-3);
Assert.assertEquals(48.13516, center.getY(), 1e-5);
}
Assert.assertTrue(optimizer.getEvaluations() > 70);
Assert.assertTrue(optimizer.getEvaluations() < 90);
Assert.assertTrue(optimizer.getEvaluations() > 590);
Assert.assertTrue(optimizer.getEvaluations() < 610);
Assert.assertEquals(3.1267527, optimum.getValue(), 1e-8);
}

View File

@ -104,7 +104,8 @@ public class NonLinearConjugateGradientOptimizerTest {
= new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
problem.getObjectiveFunctionGradient(),
@ -120,7 +121,8 @@ public class NonLinearConjugateGradientOptimizerTest {
= new LinearProblem(new double[][] { { 2 } }, new double[] { 3 });
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -142,7 +144,8 @@ public class NonLinearConjugateGradientOptimizerTest {
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -167,7 +170,8 @@ public class NonLinearConjugateGradientOptimizerTest {
}, new double[] { 0.0, 1.1, 2.2, 3.3, 4.4, 5.5 });
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -188,7 +192,8 @@ public class NonLinearConjugateGradientOptimizerTest {
}, new double[] { 1, 1, 1});
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -230,7 +235,7 @@ public class NonLinearConjugateGradientOptimizerTest {
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-13, 1e-13),
new BrentSolver(),
1e-7, 1e-7,
preconditioner);
PointValuePair optimum
@ -239,12 +244,16 @@ public class NonLinearConjugateGradientOptimizerTest {
problem.getObjectiveFunctionGradient(),
GoalType.MINIMIZE,
new InitialGuess(new double[] { 0, 0, 0, 0, 0, 0 }));
Assert.assertEquals( 3.0, optimum.getPoint()[0], 1.0e-10);
Assert.assertEquals( 4.0, optimum.getPoint()[1], 1.0e-10);
Assert.assertEquals(-1.0, optimum.getPoint()[2], 1.0e-10);
Assert.assertEquals(-2.0, optimum.getPoint()[3], 1.0e-10);
Assert.assertEquals( 1.0 + epsilon, optimum.getPoint()[4], 1.0e-10);
Assert.assertEquals( 1.0 - epsilon, optimum.getPoint()[5], 1.0e-10);
final double[] result = optimum.getPoint();
final double[] expected = {3, 4, -1, -2, 1 + epsilon, 1 - epsilon};
Assert.assertEquals(expected[0], result[0], 1.0e-7);
Assert.assertEquals(expected[1], result[1], 1.0e-7);
Assert.assertEquals(expected[2], result[2], 1.0e-9);
Assert.assertEquals(expected[3], result[3], 1.0e-8);
Assert.assertEquals(expected[4] + epsilon, result[4], 1.0e-6);
Assert.assertEquals(expected[5] - epsilon, result[5], 1.0e-6);
}
@ -257,7 +266,8 @@ public class NonLinearConjugateGradientOptimizerTest {
}, new double[] { 1, 1, 1 });
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -278,7 +288,7 @@ public class NonLinearConjugateGradientOptimizerTest {
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-13, 1e-13),
new BrentSolver(1e-15, 1e-15));
1e-15, 1e-15);
PointValuePair optimum1
= optimizer.optimize(new MaxEval(200),
problem1.getObjectiveFunction(),
@ -286,7 +296,7 @@ public class NonLinearConjugateGradientOptimizerTest {
GoalType.MINIMIZE,
new InitialGuess(new double[] { 0, 1, 2, 3 }));
Assert.assertEquals(1.0, optimum1.getPoint()[0], 1.0e-4);
Assert.assertEquals(1.0, optimum1.getPoint()[1], 1.0e-4);
Assert.assertEquals(1.0, optimum1.getPoint()[1], 1.0e-3);
Assert.assertEquals(1.0, optimum1.getPoint()[2], 1.0e-4);
Assert.assertEquals(1.0, optimum1.getPoint()[3], 1.0e-4);
@ -302,11 +312,14 @@ public class NonLinearConjugateGradientOptimizerTest {
problem2.getObjectiveFunctionGradient(),
GoalType.MINIMIZE,
new InitialGuess(new double[] { 0, 1, 2, 3 }));
Assert.assertEquals(-81.0, optimum2.getPoint()[0], 1.0e-1);
Assert.assertEquals(137.0, optimum2.getPoint()[1], 1.0e-1);
Assert.assertEquals(-34.0, optimum2.getPoint()[2], 1.0e-1);
Assert.assertEquals( 22.0, optimum2.getPoint()[3], 1.0e-1);
final double[] result2 = optimum2.getPoint();
final double[] expected2 = {-81, 137, -34, 22};
Assert.assertEquals(expected2[0], result2[0], 2);
Assert.assertEquals(expected2[1], result2[1], 4);
Assert.assertEquals(expected2[2], result2[2], 1);
Assert.assertEquals(expected2[3], result2[3], 1);
}
@Test
@ -319,7 +332,8 @@ public class NonLinearConjugateGradientOptimizerTest {
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -341,7 +355,8 @@ public class NonLinearConjugateGradientOptimizerTest {
}, new double[] { 3.0, 12.0, -1.0, 7.0, 1.0 });
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -361,7 +376,8 @@ public class NonLinearConjugateGradientOptimizerTest {
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -383,7 +399,8 @@ public class NonLinearConjugateGradientOptimizerTest {
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-6, 1e-6));
new SimpleValueChecker(1e-6, 1e-6),
1e-3, 1e-3);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -405,7 +422,7 @@ public class NonLinearConjugateGradientOptimizerTest {
NonLinearConjugateGradientOptimizer optimizer
= new NonLinearConjugateGradientOptimizer(NonLinearConjugateGradientOptimizer.Formula.POLAK_RIBIERE,
new SimpleValueChecker(1e-30, 1e-30),
new BrentSolver(1e-15, 1e-13));
1e-15, 1e-13);
PointValuePair optimum
= optimizer.optimize(new MaxEval(100),
problem.getObjectiveFunction(),
@ -414,8 +431,8 @@ public class NonLinearConjugateGradientOptimizerTest {
new InitialGuess(new double[] { 98.680, 47.345 }));
Vector2D center = new Vector2D(optimum.getPointRef()[0], optimum.getPointRef()[1]);
Assert.assertEquals(69.960161753, problem.getRadius(center), 1.0e-8);
Assert.assertEquals(96.075902096, center.getX(), 1.0e-8);
Assert.assertEquals(48.135167894, center.getY(), 1.0e-8);
Assert.assertEquals(96.075902096, center.getX(), 1.0e-7);
Assert.assertEquals(48.135167894, center.getY(), 1.0e-6);
}
private static class LinearProblem {