mirror of
https://github.com/gonum/gonum.git
synced 2025-10-05 15:16:59 +08:00

* optimize: Refactor gradient convergence and remove DefaultSettings The current API design makes it easy to make a mistake in not using the DefaultSettings. This change makes the zero value of Settings do the 'right thing'. The remaining setting that is used by the DefaultSettings is to change the behavior of the GradientTolerance. This was necessary because gradient-based Local methods (BFGS, LBFGS, CG, etc.) typically _define_ convergence by the value of the gradient, while Global methods (CMAES, GuessAndCheck) are defined by _not_ converging when the gradient is small. The problem is to have two completely different default behaviors without knowing the Method. The solution is to treat a very small value of the gradient as a method-based convergence, in the same way that a small spread of data is a convergence of CMAES. Thus, the default behavior, from the perspective of Settings, is never to converge based on the gradient, but all of the Local methods will converge when a value close to the minimum is found. This default value is set to a very small value, such that users should not want a smaller value. A user can thus still set a (more reasonable) convergence value through settings. Fixes 677.
87 lines
2.3 KiB
Go
87 lines
2.3 KiB
Go
// Copyright ©2014 The Gonum Authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style
|
|
// license that can be found in the LICENSE file.
|
|
|
|
package optimize
|
|
|
|
import "gonum.org/v1/gonum/floats"
|
|
|
|
// GradientDescent implements the steepest descent optimization method that
|
|
// performs successive steps along the direction of the negative gradient.
|
|
type GradientDescent struct {
|
|
// Linesearcher selects suitable steps along the descent direction.
|
|
// If Linesearcher is nil, a reasonable default will be chosen.
|
|
Linesearcher Linesearcher
|
|
// StepSizer determines the initial step size along each direction.
|
|
// If StepSizer is nil, a reasonable default will be chosen.
|
|
StepSizer StepSizer
|
|
// GradStopThreshold sets the threshold for stopping if the gradient norm
|
|
// gets too small. If GradStopThreshold is 0 it is defaulted to 1e-12, and
|
|
// if it is NaN the setting is not used.
|
|
GradStopThreshold float64
|
|
|
|
ls *LinesearchMethod
|
|
|
|
status Status
|
|
err error
|
|
}
|
|
|
|
func (g *GradientDescent) Status() (Status, error) {
|
|
return g.status, g.err
|
|
}
|
|
|
|
func (g *GradientDescent) Init(dim, tasks int) int {
|
|
g.status = NotTerminated
|
|
g.err = nil
|
|
return 1
|
|
}
|
|
|
|
func (g *GradientDescent) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
|
|
g.status, g.err = localOptimizer{}.run(g, g.GradStopThreshold, operation, result, tasks)
|
|
close(operation)
|
|
return
|
|
}
|
|
|
|
func (g *GradientDescent) initLocal(loc *Location) (Operation, error) {
|
|
if g.Linesearcher == nil {
|
|
g.Linesearcher = &Backtracking{}
|
|
}
|
|
if g.StepSizer == nil {
|
|
g.StepSizer = &QuadraticStepSize{}
|
|
}
|
|
|
|
if g.ls == nil {
|
|
g.ls = &LinesearchMethod{}
|
|
}
|
|
g.ls.Linesearcher = g.Linesearcher
|
|
g.ls.NextDirectioner = g
|
|
|
|
return g.ls.Init(loc)
|
|
}
|
|
|
|
func (g *GradientDescent) iterateLocal(loc *Location) (Operation, error) {
|
|
return g.ls.Iterate(loc)
|
|
}
|
|
|
|
func (g *GradientDescent) InitDirection(loc *Location, dir []float64) (stepSize float64) {
|
|
copy(dir, loc.Gradient)
|
|
floats.Scale(-1, dir)
|
|
return g.StepSizer.Init(loc, dir)
|
|
}
|
|
|
|
func (g *GradientDescent) NextDirection(loc *Location, dir []float64) (stepSize float64) {
|
|
copy(dir, loc.Gradient)
|
|
floats.Scale(-1, dir)
|
|
return g.StepSizer.StepSize(loc, dir)
|
|
}
|
|
|
|
func (*GradientDescent) Needs() struct {
|
|
Gradient bool
|
|
Hessian bool
|
|
} {
|
|
return struct {
|
|
Gradient bool
|
|
Hessian bool
|
|
}{true, false}
|
|
}
|