optimize: remove Local implementation and replace with a call to Global (#485)

* optimize: remove Local implementation and replace with a call to Global

This PR starts the process described in #482. It removes the existing Local implementation, replacing with a function that wraps Method to act as a GlobalMethod. This PR also adds a hack to fix an inconsistency with FunctionConverge between Global and Local (and a TODO to make it not a hack in the future)
This commit is contained in:
Brendan Tracey
2018-05-09 11:02:19 -06:00
committed by GitHub
parent fa6741ae52
commit f402b0ae71
5 changed files with 171 additions and 164 deletions

View File

@@ -1155,7 +1155,7 @@ func TestNewton(t *testing.T) {
}
func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
for _, test := range tests {
for cas, test := range tests {
if test.long && testing.Short() {
continue
}
@@ -1182,11 +1182,11 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
result, err := Local(test.p, test.x, settings, method)
if err != nil {
t.Errorf("error finding minimum (%v) for:\n%v", err, test)
t.Errorf("Case %d: error finding minimum (%v) for:\n%v", cas, err, test)
continue
}
if result == nil {
t.Errorf("nil result without error for:\n%v", test)
t.Errorf("Case %d: nil result without error for:\n%v", cas, test)
continue
}
@@ -1194,8 +1194,8 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
// equal to result.F.
optF := test.p.Func(result.X)
if optF != result.F {
t.Errorf("Function value at the optimum location %v not equal to the returned value %v for:\n%v",
optF, result.F, test)
t.Errorf("Case %d: Function value at the optimum location %v not equal to the returned value %v for:\n%v",
cas, optF, result.F, test)
}
if result.Gradient != nil {
// Evaluate the norm of the gradient at the found optimum location.
@@ -1203,15 +1203,15 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
test.p.Grad(g, result.X)
if !floats.Equal(result.Gradient, g) {
t.Errorf("Gradient at the optimum location not equal to the returned value for:\n%v", test)
t.Errorf("Case %d: Gradient at the optimum location not equal to the returned value for:\n%v", cas, test)
}
optNorm := floats.Norm(g, math.Inf(1))
// Check that the norm of the gradient at the found optimum location is
// smaller than the tolerance.
if optNorm >= settings.GradientThreshold {
t.Errorf("Norm of the gradient at the optimum location %v not smaller than tolerance %v for:\n%v",
optNorm, settings.GradientThreshold, test)
t.Errorf("Case %d: Norm of the gradient at the optimum location %v not smaller than tolerance %v for:\n%v",
cas, optNorm, settings.GradientThreshold, test)
}
}