mirror of
https://github.com/gonum/gonum.git
synced 2025-10-13 02:43:59 +08:00
optimize: Change Settings to allow InitialLocation (#497)
* optimize: Change Settings to allow InitialLocation This modifies Settings to allow specifying an initial location and properties of the function (value, gradient, etc.). This allows to work with local optimizers that are seeded with initial settings. This has two fields that must be specified, InitX and InitValues. Ideally this would only be one location, but the difficulty is that the default value of the function is 0. We either must require the user to specify it is set (in this case that InitValues is non-zero), or require the user to change the default value away if it is not set. The former seems much safer.
This commit is contained in:
@@ -4,7 +4,11 @@
|
||||
|
||||
package optimize
|
||||
|
||||
import "math"
|
||||
import (
|
||||
"math"
|
||||
|
||||
"gonum.org/v1/gonum/floats"
|
||||
)
|
||||
|
||||
// Local finds a local minimum of a minimization problem using a sequential
|
||||
// algorithm. A maximization problem can be transformed into a minimization
|
||||
@@ -62,6 +66,10 @@ func Local(p Problem, initX []float64, settings *Settings, method Method) (*Resu
|
||||
if settings == nil {
|
||||
settings = DefaultSettings()
|
||||
}
|
||||
// Check that the initial location matches the one in settings.
|
||||
if settings.InitX != nil && !floats.Equal(settings.InitX, initX) {
|
||||
panic("local: initX does not match settings x location")
|
||||
}
|
||||
lg := &localGlobal{
|
||||
Method: method,
|
||||
InitX: initX,
|
||||
@@ -193,38 +201,24 @@ func (l *localGlobal) cleanup(operation chan<- GlobalTask, result <-chan GlobalT
|
||||
|
||||
func (l *localGlobal) getStartingLocation(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask) Operation {
|
||||
copy(task.X, l.InitX)
|
||||
if l.Settings.UseInitialData {
|
||||
task.F = l.Settings.InitialValue
|
||||
if task.Gradient != nil {
|
||||
g := l.Settings.InitialGradient
|
||||
if g == nil {
|
||||
panic("optimize: initial gradient is nil")
|
||||
}
|
||||
if len(g) != l.dim {
|
||||
panic("optimize: initial gradient size mismatch")
|
||||
}
|
||||
copy(task.Gradient, g)
|
||||
}
|
||||
if task.Hessian != nil {
|
||||
h := l.Settings.InitialHessian
|
||||
if h == nil {
|
||||
panic("optimize: initial Hessian is nil")
|
||||
}
|
||||
if h.Symmetric() != l.dim {
|
||||
panic("optimize: initial Hessian size mismatch")
|
||||
}
|
||||
task.Hessian.CopySym(h)
|
||||
}
|
||||
// Construct the operation by what is missing.
|
||||
needs := l.Method.Needs()
|
||||
initOp := task.Op
|
||||
op := NoOperation
|
||||
if initOp&FuncEvaluation == 0 {
|
||||
op |= FuncEvaluation
|
||||
}
|
||||
if needs.Gradient && initOp&GradEvaluation == 0 {
|
||||
op |= GradEvaluation
|
||||
}
|
||||
if needs.Hessian && initOp&HessEvaluation == 0 {
|
||||
op |= HessEvaluation
|
||||
}
|
||||
|
||||
if op == NoOperation {
|
||||
return NoOperation
|
||||
}
|
||||
eval := FuncEvaluation
|
||||
if task.Gradient != nil {
|
||||
eval |= GradEvaluation
|
||||
}
|
||||
if task.Hessian != nil {
|
||||
eval |= HessEvaluation
|
||||
}
|
||||
task.Op = eval
|
||||
task.Op = op
|
||||
operation <- task
|
||||
task = <-result
|
||||
return task.Op
|
||||
|
Reference in New Issue
Block a user