mirror of
https://github.com/gonum/gonum.git
synced 2025-10-04 06:46:29 +08:00
optimize: Remove Local function (#538)
* optimize: Remove Local function This change removes the Local function. In order to do so, this changes the previous LocalGlobal wrapper to LocalController to allow Local methods to be used as a Global optimizer. This adds methods to all of the Local methods in order to implement GlobalMethod, and changes the tests accordingly. The next commit will fix all of the names
This commit is contained in:
@@ -23,6 +23,9 @@ type BFGS struct {
|
||||
|
||||
ls *LinesearchMethod
|
||||
|
||||
status Status
|
||||
err error
|
||||
|
||||
dim int
|
||||
x mat.VecDense // Location of the last major iteration.
|
||||
grad mat.VecDense // Gradient at the last major iteration.
|
||||
@@ -35,6 +38,22 @@ type BFGS struct {
|
||||
first bool // Indicator of the first iteration.
|
||||
}
|
||||
|
||||
func (b *BFGS) Status() (Status, error) {
|
||||
return b.status, b.err
|
||||
}
|
||||
|
||||
func (b *BFGS) InitGlobal(dim, tasks int) int {
|
||||
b.status = NotTerminated
|
||||
b.err = nil
|
||||
return 1
|
||||
}
|
||||
|
||||
func (b *BFGS) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
||||
b.status, b.err = localOptimizer{}.runGlobal(b, operation, result, tasks)
|
||||
close(operation)
|
||||
return
|
||||
}
|
||||
|
||||
func (b *BFGS) Init(loc *Location) (Operation, error) {
|
||||
if b.Linesearcher == nil {
|
||||
b.Linesearcher = &Bisection{}
|
||||
|
@@ -90,6 +90,9 @@ type CG struct {
|
||||
|
||||
ls *LinesearchMethod
|
||||
|
||||
status Status
|
||||
err error
|
||||
|
||||
restartAfter int
|
||||
iterFromRestart int
|
||||
|
||||
@@ -98,6 +101,22 @@ type CG struct {
|
||||
gradPrevNorm float64
|
||||
}
|
||||
|
||||
func (cg *CG) Status() (Status, error) {
|
||||
return cg.status, cg.err
|
||||
}
|
||||
|
||||
func (cg *CG) InitGlobal(dim, tasks int) int {
|
||||
cg.status = NotTerminated
|
||||
cg.err = nil
|
||||
return 1
|
||||
}
|
||||
|
||||
func (cg *CG) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
||||
cg.status, cg.err = localOptimizer{}.runGlobal(cg, operation, result, tasks)
|
||||
close(operation)
|
||||
return
|
||||
}
|
||||
|
||||
func (cg *CG) Init(loc *Location) (Operation, error) {
|
||||
if cg.IterationRestartFactor < 0 {
|
||||
panic("cg: IterationRestartFactor is negative")
|
||||
|
@@ -115,7 +115,7 @@ type GlobalMethod interface {
|
||||
func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Result, error) {
|
||||
startTime := time.Now()
|
||||
if method == nil {
|
||||
method = &GuessAndCheck{}
|
||||
method = getDefaultMethod(&p)
|
||||
}
|
||||
if settings == nil {
|
||||
settings = DefaultSettingsGlobal()
|
||||
@@ -161,6 +161,13 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul
|
||||
}, err
|
||||
}
|
||||
|
||||
func getDefaultMethod(p *Problem) GlobalMethod {
|
||||
if p.Grad != nil {
|
||||
return &BFGS{}
|
||||
}
|
||||
return &NelderMead{}
|
||||
}
|
||||
|
||||
// minimizeGlobal performs a Global optimization. minimizeGlobal updates the
|
||||
// settings and optLoc, and returns the final Status and error.
|
||||
func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stats *Stats, initOp Operation, initLoc, optLoc *Location, startTime time.Time) (Status, error) {
|
||||
@@ -278,6 +285,7 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
|
||||
)
|
||||
|
||||
// Update optimization statistics and check convergence.
|
||||
var methodDone bool
|
||||
for task := range statsChan {
|
||||
switch task.Op {
|
||||
default:
|
||||
@@ -297,14 +305,8 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
|
||||
case MajorIteration:
|
||||
status = performMajorIteration(optLoc, task.Location, stats, startTime, settings)
|
||||
case MethodDone:
|
||||
statuser, ok := method.(Statuser)
|
||||
if !ok {
|
||||
panic("optimize: global method returned MethodDone but is not a Statuser")
|
||||
}
|
||||
status, err = statuser.Status()
|
||||
if status == NotTerminated {
|
||||
panic("optimize: global method returned MethodDone but a NotTerminated status")
|
||||
}
|
||||
methodDone = true
|
||||
status = MethodConverge
|
||||
}
|
||||
if settings.Recorder != nil && status == NotTerminated && err == nil {
|
||||
stats.Runtime = time.Since(startTime)
|
||||
@@ -334,5 +336,17 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
|
||||
results <- task
|
||||
}
|
||||
}
|
||||
// This code block is here rather than above to ensure Status() is not called
|
||||
// before Method.RunGlobal closes operations.
|
||||
if methodDone {
|
||||
statuser, ok := method.(Statuser)
|
||||
if !ok {
|
||||
panic("optimize: global method returned MethodDone but is not a Statuser")
|
||||
}
|
||||
finalStatus, finalError = statuser.Status()
|
||||
if finalStatus == NotTerminated {
|
||||
panic("optimize: global method returned MethodDone but a NotTerminated status")
|
||||
}
|
||||
}
|
||||
return finalStatus, finalError
|
||||
}
|
||||
|
@@ -17,6 +17,25 @@ type GradientDescent struct {
|
||||
StepSizer StepSizer
|
||||
|
||||
ls *LinesearchMethod
|
||||
|
||||
status Status
|
||||
err error
|
||||
}
|
||||
|
||||
func (g *GradientDescent) Status() (Status, error) {
|
||||
return g.status, g.err
|
||||
}
|
||||
|
||||
func (g *GradientDescent) InitGlobal(dim, tasks int) int {
|
||||
g.status = NotTerminated
|
||||
g.err = nil
|
||||
return 1
|
||||
}
|
||||
|
||||
func (g *GradientDescent) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
||||
g.status, g.err = localOptimizer{}.runGlobal(g, operation, result, tasks)
|
||||
close(operation)
|
||||
return
|
||||
}
|
||||
|
||||
func (g *GradientDescent) Init(loc *Location) (Operation, error) {
|
||||
|
@@ -27,6 +27,9 @@ type LBFGS struct {
|
||||
// If Store is 0, it will be defaulted to 15.
|
||||
Store int
|
||||
|
||||
status Status
|
||||
err error
|
||||
|
||||
ls *LinesearchMethod
|
||||
|
||||
dim int // Dimension of the problem
|
||||
@@ -41,6 +44,22 @@ type LBFGS struct {
|
||||
a []float64 // Cache of Hessian updates
|
||||
}
|
||||
|
||||
func (l *LBFGS) Status() (Status, error) {
|
||||
return l.status, l.err
|
||||
}
|
||||
|
||||
func (l *LBFGS) InitGlobal(dim, tasks int) int {
|
||||
l.status = NotTerminated
|
||||
l.err = nil
|
||||
return 1
|
||||
}
|
||||
|
||||
func (l *LBFGS) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
||||
l.status, l.err = localOptimizer{}.runGlobal(l, operation, result, tasks)
|
||||
close(operation)
|
||||
return
|
||||
}
|
||||
|
||||
func (l *LBFGS) Init(loc *Location) (Operation, error) {
|
||||
if l.Linesearcher == nil {
|
||||
l.Linesearcher = &Bisection{}
|
||||
|
@@ -6,220 +6,118 @@ package optimize
|
||||
|
||||
import (
|
||||
"math"
|
||||
|
||||
"gonum.org/v1/gonum/floats"
|
||||
)
|
||||
|
||||
// Local finds a local minimum of a minimization problem using a sequential
|
||||
// algorithm. A maximization problem can be transformed into a minimization
|
||||
// problem by multiplying the function by -1.
|
||||
//
|
||||
// The first argument represents the problem to be minimized. Its fields are
|
||||
// routines that evaluate the objective function, gradient, and other
|
||||
// quantities related to the problem. The objective function, p.Func, must not
|
||||
// be nil. The optimization method used may require other fields to be non-nil
|
||||
// as specified by method.Needs. Local will panic if these are not met. The
|
||||
// method can be determined automatically from the supplied problem which is
|
||||
// described below.
|
||||
//
|
||||
// If p.Status is not nil, it is called before every evaluation. If the
|
||||
// returned Status is not NotTerminated or the error is not nil, the
|
||||
// optimization run is terminated.
|
||||
//
|
||||
// The second argument is the initial location at which to start the minimization.
|
||||
// The initial location must be supplied, and must have a length equal to the
|
||||
// problem dimension.
|
||||
//
|
||||
// The third argument contains the settings for the minimization. It is here that
|
||||
// gradient tolerance, etc. are specified. The DefaultSettings function
|
||||
// can be called for a Settings struct with the default values initialized.
|
||||
// If settings == nil, the default settings are used. See the documentation
|
||||
// for the Settings structure for more information. The optimization Method used
|
||||
// may also contain settings, see documentation for the appropriate optimizer.
|
||||
//
|
||||
// The final argument is the optimization method to use. If method == nil, then
|
||||
// an appropriate default is chosen based on the properties of the other arguments
|
||||
// (dimension, gradient-free or gradient-based, etc.). The optimization
|
||||
// methods in this package are designed such that reasonable defaults occur
|
||||
// if options are not specified explicitly. For example, the code
|
||||
// method := &optimize.BFGS{}
|
||||
// creates a pointer to a new BFGS struct. When Local is called, the settings
|
||||
// in the method will be populated with default values. The methods are also
|
||||
// designed such that they can be reused in future calls to Local.
|
||||
//
|
||||
// If method implements Statuser, method.Status is called before every call
|
||||
// to method.Iterate. If the returned Status is not NotTerminated or the
|
||||
// error is non-nil, the optimization run is terminated.
|
||||
//
|
||||
// Local returns a Result struct and any error that occurred. See the
|
||||
// documentation of Result for more information.
|
||||
//
|
||||
// Be aware that the default behavior of Local is to find the minimum.
|
||||
// For certain functions and optimization methods, this process can take many
|
||||
// function evaluations. If you would like to put limits on this, for example
|
||||
// maximum runtime or maximum function evaluations, modify the Settings
|
||||
// input struct.
|
||||
func Local(p Problem, initX []float64, settings *Settings, method Method) (*Result, error) {
|
||||
if method == nil {
|
||||
method = getDefaultMethod(&p)
|
||||
}
|
||||
if settings == nil {
|
||||
settings = DefaultSettings()
|
||||
}
|
||||
// Check that the initial location matches the one in settings.
|
||||
if settings.InitX != nil && !floats.Equal(settings.InitX, initX) {
|
||||
panic("local: initX does not match settings x location")
|
||||
}
|
||||
lg := &localGlobal{
|
||||
Method: method,
|
||||
InitX: initX,
|
||||
Settings: settings,
|
||||
}
|
||||
return Global(p, len(initX), settings, lg)
|
||||
}
|
||||
// localOptimizer is a helper type for running an optimization using a LocalMethod.
|
||||
type localOptimizer struct{}
|
||||
|
||||
func getDefaultMethod(p *Problem) Method {
|
||||
if p.Grad != nil {
|
||||
return &BFGS{}
|
||||
}
|
||||
return &NelderMead{}
|
||||
}
|
||||
|
||||
// localGlobal is a wrapper for Local methods to allow them to be optimized by Global.
|
||||
type localGlobal struct {
|
||||
Method Method
|
||||
InitX []float64
|
||||
Settings *Settings
|
||||
|
||||
dim int
|
||||
status Status
|
||||
err error
|
||||
}
|
||||
|
||||
func (l *localGlobal) InitGlobal(dim, tasks int) int {
|
||||
if dim != len(l.InitX) {
|
||||
panic("optimize: initial length mismatch")
|
||||
}
|
||||
l.dim = dim
|
||||
l.status = NotTerminated
|
||||
l.err = nil
|
||||
return 1 // Local optimizations always run in serial.
|
||||
}
|
||||
|
||||
func (l *localGlobal) Status() (Status, error) {
|
||||
return l.status, l.err
|
||||
}
|
||||
|
||||
func (l *localGlobal) Needs() struct {
|
||||
Gradient bool
|
||||
Hessian bool
|
||||
} {
|
||||
return l.Method.Needs()
|
||||
}
|
||||
|
||||
func (l *localGlobal) RunGlobal(operations chan<- GlobalTask, results <-chan GlobalTask, tasks []GlobalTask) {
|
||||
// RunGlobal controls the optimization run for a LocalMethod. The calling method
|
||||
// must close the operation channel at the conclusion of the optimization. This
|
||||
// provides a happens before relationship between the return of status and the
|
||||
// closure of operation, and thus a call to method.Status (if necessary).
|
||||
func (l localOptimizer) runGlobal(method Method, operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) (Status, error) {
|
||||
// Local methods start with a fully-specified initial location.
|
||||
task := tasks[0]
|
||||
op := l.getStartingLocation(operations, results, task)
|
||||
if op == PostIteration {
|
||||
l.cleanup(operations, results)
|
||||
return
|
||||
task = l.initialLocation(operation, result, task, method)
|
||||
if task.Op == PostIteration {
|
||||
l.finish(operation, result)
|
||||
return NotTerminated, nil
|
||||
}
|
||||
// Check the starting condition.
|
||||
if math.IsInf(task.F, 1) || math.IsNaN(task.F) {
|
||||
l.status = Failure
|
||||
l.err = ErrFunc(task.F)
|
||||
}
|
||||
for i, v := range task.Gradient {
|
||||
if math.IsInf(v, 0) || math.IsNaN(v) {
|
||||
l.status = Failure
|
||||
l.err = ErrGrad{Grad: v, Index: i}
|
||||
break
|
||||
}
|
||||
}
|
||||
if l.status == Failure {
|
||||
l.exitFailure(operations, results, tasks[0])
|
||||
return
|
||||
status, err := l.checkStartingLocation(task)
|
||||
if err != nil {
|
||||
l.finishMethodDone(operation, result, task)
|
||||
return status, err
|
||||
}
|
||||
|
||||
// Send a major iteration with the starting location.
|
||||
task.Op = MajorIteration
|
||||
operations <- task
|
||||
task = <-results
|
||||
operation <- task
|
||||
task = <-result
|
||||
if task.Op == PostIteration {
|
||||
l.cleanup(operations, results)
|
||||
return
|
||||
l.finish(operation, result)
|
||||
return NotTerminated, nil
|
||||
}
|
||||
|
||||
op, err := l.Method.Init(task.Location)
|
||||
op, err := method.Init(task.Location)
|
||||
if err != nil {
|
||||
l.status = Failure
|
||||
l.err = err
|
||||
l.exitFailure(operations, results, tasks[0])
|
||||
return
|
||||
l.finishMethodDone(operation, result, task)
|
||||
return Failure, err
|
||||
}
|
||||
task.Op = op
|
||||
operations <- task
|
||||
operation <- task
|
||||
Loop:
|
||||
for {
|
||||
result := <-results
|
||||
switch result.Op {
|
||||
r := <-result
|
||||
switch r.Op {
|
||||
case PostIteration:
|
||||
break Loop
|
||||
default:
|
||||
op, err := l.Method.Iterate(result.Location)
|
||||
op, err := method.Iterate(r.Location)
|
||||
if err != nil {
|
||||
l.status = Failure
|
||||
l.err = err
|
||||
l.exitFailure(operations, results, result)
|
||||
return
|
||||
l.finishMethodDone(operation, result, r)
|
||||
return Failure, err
|
||||
}
|
||||
result.Op = op
|
||||
operations <- result
|
||||
r.Op = op
|
||||
operation <- r
|
||||
}
|
||||
}
|
||||
l.cleanup(operations, results)
|
||||
l.finish(operation, result)
|
||||
return NotTerminated, nil
|
||||
}
|
||||
|
||||
// exitFailure cleans up from a failure of the local method.
|
||||
func (l *localGlobal) exitFailure(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask) {
|
||||
// initialOperation returns the Operation needed to fill the initial location
|
||||
// based on the needs of the method and the values already supplied.
|
||||
func (localOptimizer) initialOperation(task GlobalTask, needser Needser) Operation {
|
||||
var newOp Operation
|
||||
op := task.Op
|
||||
if op&FuncEvaluation == 0 {
|
||||
newOp |= FuncEvaluation
|
||||
}
|
||||
needs := needser.Needs()
|
||||
if needs.Gradient && op&GradEvaluation == 0 {
|
||||
newOp |= GradEvaluation
|
||||
}
|
||||
if needs.Hessian && op&HessEvaluation == 0 {
|
||||
newOp |= HessEvaluation
|
||||
}
|
||||
return newOp
|
||||
}
|
||||
|
||||
// initialLocation fills the initial location based on the needs of the method.
|
||||
// The task passed to initialLocation should be the first task sent in RunGlobal.
|
||||
func (l localOptimizer) initialLocation(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask, needser Needser) GlobalTask {
|
||||
task.Op = l.initialOperation(task, needser)
|
||||
operation <- task
|
||||
return <-result
|
||||
}
|
||||
|
||||
func (localOptimizer) checkStartingLocation(task GlobalTask) (Status, error) {
|
||||
if math.IsInf(task.F, 1) || math.IsNaN(task.F) {
|
||||
return Failure, ErrFunc(task.F)
|
||||
}
|
||||
for i, v := range task.Gradient {
|
||||
if math.IsInf(v, 0) || math.IsNaN(v) {
|
||||
return Failure, ErrGrad{Grad: v, Index: i}
|
||||
}
|
||||
}
|
||||
return NotTerminated, nil
|
||||
}
|
||||
|
||||
// finish completes the channel operations to finish an optimization.
|
||||
func (localOptimizer) finish(operation chan<- GlobalTask, result <-chan GlobalTask) {
|
||||
// Guarantee that result is closed before operation is closed.
|
||||
for range result {
|
||||
}
|
||||
}
|
||||
|
||||
// finishMethodDone sends a MethodDone signal on operation, reads the result,
|
||||
// and completes the channel operations to finish an optimization.
|
||||
func (l localOptimizer) finishMethodDone(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask) {
|
||||
task.Op = MethodDone
|
||||
operation <- task
|
||||
task = <-result
|
||||
if task.Op != PostIteration {
|
||||
panic("task should have returned post iteration")
|
||||
panic("optimize: task should have returned post iteration")
|
||||
}
|
||||
l.cleanup(operation, result)
|
||||
}
|
||||
|
||||
func (l *localGlobal) cleanup(operation chan<- GlobalTask, result <-chan GlobalTask) {
|
||||
// Guarantee that result is closed before operation is closed.
|
||||
for range result {
|
||||
}
|
||||
close(operation)
|
||||
}
|
||||
|
||||
func (l *localGlobal) getStartingLocation(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask) Operation {
|
||||
copy(task.X, l.InitX)
|
||||
// Construct the operation by what is missing.
|
||||
needs := l.Method.Needs()
|
||||
initOp := task.Op
|
||||
op := NoOperation
|
||||
if initOp&FuncEvaluation == 0 {
|
||||
op |= FuncEvaluation
|
||||
}
|
||||
if needs.Gradient && initOp&GradEvaluation == 0 {
|
||||
op |= GradEvaluation
|
||||
}
|
||||
if needs.Hessian && initOp&HessEvaluation == 0 {
|
||||
op |= HessEvaluation
|
||||
}
|
||||
|
||||
if op == NoOperation {
|
||||
return NoOperation
|
||||
}
|
||||
task.Op = op
|
||||
operation <- task
|
||||
task = <-result
|
||||
return task.Op
|
||||
l.finish(operation, result)
|
||||
}
|
||||
|
@@ -12,7 +12,7 @@ import (
|
||||
"gonum.org/v1/gonum/optimize/functions"
|
||||
)
|
||||
|
||||
func ExampleLocal() {
|
||||
func ExampleMinimize() {
|
||||
p := optimize.Problem{
|
||||
Func: functions.ExtendedRosenbrock{}.Func,
|
||||
Grad: functions.ExtendedRosenbrock{}.Grad,
|
||||
@@ -23,8 +23,9 @@ func ExampleLocal() {
|
||||
settings.Recorder = nil
|
||||
settings.GradientThreshold = 1e-12
|
||||
settings.FunctionConverge = nil
|
||||
settings.InitX = x
|
||||
|
||||
result, err := optimize.Local(p, x, settings, &optimize.BFGS{})
|
||||
result, err := optimize.Global(p, len(x), settings, &optimize.BFGS{})
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
@@ -67,6 +67,9 @@ type NelderMead struct {
|
||||
Shrink float64 // Shrink parameter (>0, <1)
|
||||
SimplexSize float64 // size of auto-constructed initial simplex
|
||||
|
||||
status Status
|
||||
err error
|
||||
|
||||
reflection float64
|
||||
expansion float64
|
||||
contraction float64
|
||||
@@ -82,6 +85,22 @@ type NelderMead struct {
|
||||
reflectedValue float64 // Value at the last reflection point
|
||||
}
|
||||
|
||||
func (n *NelderMead) Status() (Status, error) {
|
||||
return n.status, n.err
|
||||
}
|
||||
|
||||
func (n *NelderMead) InitGlobal(dim, tasks int) int {
|
||||
n.status = NotTerminated
|
||||
n.err = nil
|
||||
return 1
|
||||
}
|
||||
|
||||
func (n *NelderMead) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
||||
n.status, n.err = localOptimizer{}.runGlobal(n, operation, result, tasks)
|
||||
close(operation)
|
||||
return
|
||||
}
|
||||
|
||||
func (n *NelderMead) Init(loc *Location) (Operation, error) {
|
||||
dim := len(loc.X)
|
||||
if cap(n.vertices) < dim+1 {
|
||||
|
@@ -46,6 +46,9 @@ type Newton struct {
|
||||
// Increase must be greater than 1. If Increase is 0, it is defaulted to 5.
|
||||
Increase float64
|
||||
|
||||
status Status
|
||||
err error
|
||||
|
||||
ls *LinesearchMethod
|
||||
|
||||
hess *mat.SymDense // Storage for a copy of the Hessian matrix.
|
||||
@@ -53,6 +56,22 @@ type Newton struct {
|
||||
tau float64
|
||||
}
|
||||
|
||||
func (n *Newton) Status() (Status, error) {
|
||||
return n.status, n.err
|
||||
}
|
||||
|
||||
func (n *Newton) InitGlobal(dim, tasks int) int {
|
||||
n.status = NotTerminated
|
||||
n.err = nil
|
||||
return 1
|
||||
}
|
||||
|
||||
func (n *Newton) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
||||
n.status, n.err = localOptimizer{}.runGlobal(n, operation, result, tasks)
|
||||
close(operation)
|
||||
return
|
||||
}
|
||||
|
||||
func (n *Newton) Init(loc *Location) (Operation, error) {
|
||||
if n.Increase == 0 {
|
||||
n.Increase = 5
|
||||
|
@@ -1154,7 +1154,7 @@ func TestNewton(t *testing.T) {
|
||||
testLocal(t, newtonTests, &Newton{})
|
||||
}
|
||||
|
||||
func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
|
||||
func testLocal(t *testing.T, tests []unconstrainedTest, method GlobalMethod) {
|
||||
for cas, test := range tests {
|
||||
if test.long && testing.Short() {
|
||||
continue
|
||||
@@ -1180,7 +1180,9 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
|
||||
}
|
||||
settings.GradientThreshold = test.gradTol
|
||||
|
||||
result, err := Local(test.p, test.x, settings, method)
|
||||
dim := len(test.x)
|
||||
settings.InitX = test.x
|
||||
result, err := Global(test.p, dim, settings, method)
|
||||
if err != nil {
|
||||
t.Errorf("Case %d: error finding minimum (%v) for:\n%v", cas, err, test)
|
||||
continue
|
||||
@@ -1244,7 +1246,7 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
|
||||
|
||||
// Rerun the test again to make sure that it gets the same answer with
|
||||
// the same starting condition. Moreover, we are using the initial data.
|
||||
result2, err2 := Local(test.p, test.x, settings, method)
|
||||
result2, err2 := Global(test.p, dim, settings, method)
|
||||
if err2 != nil {
|
||||
t.Errorf("error finding minimum second time (%v) for:\n%v", err2, test)
|
||||
continue
|
||||
@@ -1296,7 +1298,8 @@ func TestIssue76(t *testing.T) {
|
||||
Linesearcher: &Backtracking{},
|
||||
}
|
||||
// We are not interested in the error, only in the returned status.
|
||||
r, _ := Local(p, x, s, m)
|
||||
s.InitX = x
|
||||
r, _ := Global(p, len(x), s, m)
|
||||
// With the above stringent tolerance, the optimizer will never
|
||||
// successfully reach the minimum. Check if it terminated in a finite
|
||||
// number of steps.
|
||||
|
Reference in New Issue
Block a user