mirror of
https://github.com/gonum/gonum.git
synced 2025-10-05 23:26:52 +08:00
Change Minimize to take in an initial X location
This commit is contained in:
@@ -190,21 +190,25 @@ func TestCmaEsChol(t *testing.T) {
|
|||||||
src := rand.New(rand.NewSource(1))
|
src := rand.New(rand.NewSource(1))
|
||||||
method := test.method
|
method := test.method
|
||||||
method.Src = src
|
method.Src = src
|
||||||
|
initX := method.InitMean
|
||||||
|
if initX == nil {
|
||||||
|
initX = make([]float64, test.dim)
|
||||||
|
}
|
||||||
// Run and check that the expected termination occurs.
|
// Run and check that the expected termination occurs.
|
||||||
result, err := Minimize(test.problem, test.dim, test.settings, method)
|
result, err := Minimize(test.problem, initX, test.settings, method)
|
||||||
if testErr := test.good(result, err, test.settings.Concurrent); testErr != nil {
|
if testErr := test.good(result, err, test.settings.Concurrent); testErr != nil {
|
||||||
t.Errorf("cas %d: %v", i, testErr)
|
t.Errorf("cas %d: %v", i, testErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run a second time to make sure there are no residual effects
|
// Run a second time to make sure there are no residual effects
|
||||||
result, err = Minimize(test.problem, test.dim, test.settings, method)
|
result, err = Minimize(test.problem, initX, test.settings, method)
|
||||||
if testErr := test.good(result, err, test.settings.Concurrent); testErr != nil {
|
if testErr := test.good(result, err, test.settings.Concurrent); testErr != nil {
|
||||||
t.Errorf("cas %d second: %v", i, testErr)
|
t.Errorf("cas %d second: %v", i, testErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test the problem in parallel.
|
// Test the problem in parallel.
|
||||||
test.settings.Concurrent = 5
|
test.settings.Concurrent = 5
|
||||||
result, err = Minimize(test.problem, test.dim, test.settings, method)
|
result, err = Minimize(test.problem, initX, test.settings, method)
|
||||||
if testErr := test.good(result, err, test.settings.Concurrent); testErr != nil {
|
if testErr := test.good(result, err, test.settings.Concurrent); testErr != nil {
|
||||||
t.Errorf("cas %d concurrent: %v", i, testErr)
|
t.Errorf("cas %d concurrent: %v", i, testErr)
|
||||||
}
|
}
|
||||||
|
@@ -76,9 +76,9 @@ type Method interface {
|
|||||||
Run(operation chan<- Task, result <-chan Task, tasks []Task)
|
Run(operation chan<- Task, result <-chan Task, tasks []Task)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Minimize uses an optimizer to search for the minimum of a
|
// Minimize uses an optimizer to search for the minimum of a function. A
|
||||||
// function. A maximization problem can be transformed into a
|
// maximization problem can be transformed into a minimization problem by
|
||||||
// minimization problem by multiplying the function by -1.
|
// multiplying the function by -1.
|
||||||
//
|
//
|
||||||
// The first argument represents the problem to be minimized. Its fields are
|
// The first argument represents the problem to be minimized. Its fields are
|
||||||
// routines that evaluate the objective function, gradient, and other
|
// routines that evaluate the objective function, gradient, and other
|
||||||
@@ -92,6 +92,10 @@ type Method interface {
|
|||||||
// returned Status is other than NotTerminated or if the error is not nil, the
|
// returned Status is other than NotTerminated or if the error is not nil, the
|
||||||
// optimization run is terminated.
|
// optimization run is terminated.
|
||||||
//
|
//
|
||||||
|
// The second argument specifies the initial location for the optimization.
|
||||||
|
// Some Methods do not require an initial location, but initX must still be
|
||||||
|
// specified for the dimension of the optimization problem.
|
||||||
|
//
|
||||||
// The third argument contains the settings for the minimization. The
|
// The third argument contains the settings for the minimization. The
|
||||||
// DefaultLocalSettings and DefaultGlobalSettings functions can be called for
|
// DefaultLocalSettings and DefaultGlobalSettings functions can be called for
|
||||||
// different default settings depending on the optimization method. If
|
// different default settings depending on the optimization method. If
|
||||||
@@ -116,7 +120,7 @@ type Method interface {
|
|||||||
// minimum. For certain functions and optimization methods, this can take many
|
// minimum. For certain functions and optimization methods, this can take many
|
||||||
// function evaluations. The Settings input struct can be used to limit this,
|
// function evaluations. The Settings input struct can be used to limit this,
|
||||||
// for example by modifying the maximum function evaluations or gradient tolerance.
|
// for example by modifying the maximum function evaluations or gradient tolerance.
|
||||||
func Minimize(p Problem, dim int, settings *Settings, method Method) (*Result, error) {
|
func Minimize(p Problem, initX []float64, settings *Settings, method Method) (*Result, error) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
if method == nil {
|
if method == nil {
|
||||||
method = getDefaultMethod(&p)
|
method = getDefaultMethod(&p)
|
||||||
@@ -125,6 +129,7 @@ func Minimize(p Problem, dim int, settings *Settings, method Method) (*Result, e
|
|||||||
settings = DefaultSettingsLocal()
|
settings = DefaultSettingsLocal()
|
||||||
}
|
}
|
||||||
stats := &Stats{}
|
stats := &Stats{}
|
||||||
|
dim := len(initX)
|
||||||
err := checkOptimization(p, dim, method, settings.Recorder)
|
err := checkOptimization(p, dim, method, settings.Recorder)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@@ -137,7 +142,7 @@ func Minimize(p Problem, dim int, settings *Settings, method Method) (*Result, e
|
|||||||
settings.FunctionConverge.Init()
|
settings.FunctionConverge.Init()
|
||||||
}
|
}
|
||||||
|
|
||||||
initOp, initLoc := getInitLocation(dim, settings.InitX, settings.InitValues, method)
|
initOp, initLoc := getInitLocation(dim, initX, settings.InitValues, method)
|
||||||
|
|
||||||
stats.Runtime = time.Since(startTime)
|
stats.Runtime = time.Since(startTime)
|
||||||
|
|
||||||
|
@@ -26,10 +26,11 @@ func TestGuessAndCheck(t *testing.T) {
|
|||||||
if !ok {
|
if !ok {
|
||||||
panic("bad test")
|
panic("bad test")
|
||||||
}
|
}
|
||||||
Minimize(problem, dim, nil, &GuessAndCheck{Rander: d})
|
initX := make([]float64, dim)
|
||||||
|
Minimize(problem, initX, nil, &GuessAndCheck{Rander: d})
|
||||||
|
|
||||||
settings := DefaultSettingsGlobal()
|
settings := DefaultSettingsGlobal()
|
||||||
settings.Concurrent = 5
|
settings.Concurrent = 5
|
||||||
settings.MajorIterations = 15
|
settings.MajorIterations = 15
|
||||||
Minimize(problem, dim, settings, &GuessAndCheck{Rander: d})
|
Minimize(problem, initX, settings, &GuessAndCheck{Rander: d})
|
||||||
}
|
}
|
||||||
|
@@ -49,7 +49,8 @@ func TestListSearch(t *testing.T) {
|
|||||||
Locs: locs,
|
Locs: locs,
|
||||||
}
|
}
|
||||||
settings := &Settings{}
|
settings := &Settings{}
|
||||||
result, err := Minimize(p, c, settings, method)
|
initX := make([]float64, c)
|
||||||
|
result, err := Minimize(p, initX, settings, method)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
||||||
}
|
}
|
||||||
@@ -63,7 +64,7 @@ func TestListSearch(t *testing.T) {
|
|||||||
// Check that the optimization works concurrently.
|
// Check that the optimization works concurrently.
|
||||||
concurrent := 6
|
concurrent := 6
|
||||||
settings.Concurrent = concurrent
|
settings.Concurrent = concurrent
|
||||||
result, err = Minimize(p, c, settings, method)
|
result, err = Minimize(p, initX, settings, method)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
||||||
}
|
}
|
||||||
@@ -76,7 +77,7 @@ func TestListSearch(t *testing.T) {
|
|||||||
|
|
||||||
// Check that the optimization works concurrently with more than the number of samples.
|
// Check that the optimization works concurrently with more than the number of samples.
|
||||||
settings.Concurrent = test.r + concurrent
|
settings.Concurrent = test.r + concurrent
|
||||||
result, err = Minimize(p, c, settings, method)
|
result, err = Minimize(p, initX, settings, method)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
||||||
}
|
}
|
||||||
@@ -92,7 +93,7 @@ func TestListSearch(t *testing.T) {
|
|||||||
swapSamples(locs, fs, minIdx, test.r-1)
|
swapSamples(locs, fs, minIdx, test.r-1)
|
||||||
minIdx = test.r - 1
|
minIdx = test.r - 1
|
||||||
settings.Concurrent = concurrent
|
settings.Concurrent = concurrent
|
||||||
result, err = Minimize(p, c, settings, method)
|
result, err = Minimize(p, initX, settings, method)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
||||||
}
|
}
|
||||||
@@ -111,7 +112,7 @@ func TestListSearch(t *testing.T) {
|
|||||||
minIdxFirst := floats.MinIdx(fs[:evals])
|
minIdxFirst := floats.MinIdx(fs[:evals])
|
||||||
settings.Concurrent = 0
|
settings.Concurrent = 0
|
||||||
settings.FuncEvaluations = evals
|
settings.FuncEvaluations = evals
|
||||||
result, err = Minimize(p, c, settings, method)
|
result, err = Minimize(p, initX, settings, method)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
||||||
}
|
}
|
||||||
@@ -134,7 +135,7 @@ func TestListSearch(t *testing.T) {
|
|||||||
|
|
||||||
minIdxFirst = floats.MinIdx(fs[:evals])
|
minIdxFirst = floats.MinIdx(fs[:evals])
|
||||||
settings.Concurrent = concurrent
|
settings.Concurrent = concurrent
|
||||||
result, err = Minimize(p, c, settings, method)
|
result, err = Minimize(p, initX, settings, method)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
t.Errorf("cas %v: error optimizing: %s", cas, err)
|
||||||
}
|
}
|
||||||
|
@@ -23,9 +23,8 @@ func ExampleMinimize() {
|
|||||||
settings.Recorder = nil
|
settings.Recorder = nil
|
||||||
settings.GradientThreshold = 1e-12
|
settings.GradientThreshold = 1e-12
|
||||||
settings.FunctionConverge = nil
|
settings.FunctionConverge = nil
|
||||||
settings.InitX = x
|
|
||||||
|
|
||||||
result, err := optimize.Minimize(p, len(x), settings, &optimize.BFGS{})
|
result, err := optimize.Minimize(p, x, settings, &optimize.BFGS{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
@@ -166,13 +166,10 @@ func (p Problem) satisfies(method Needser) error {
|
|||||||
//
|
//
|
||||||
// If Recorder is nil, no information will be recorded.
|
// If Recorder is nil, no information will be recorded.
|
||||||
type Settings struct {
|
type Settings struct {
|
||||||
// InitX specifies an initial location to communicate to the Method. If InitX
|
// InitValues specifies properties (function value, gradient, etc.). known
|
||||||
// is nil, then a slice of zeros is used as a default value.
|
// at the initial location passed to Minimize. If InitValues is non-nil, then
|
||||||
InitX []float64
|
|
||||||
// InitValues specifies properties known at InitX (function value, gradient, etc.).
|
|
||||||
// If InitX is nil, InitValues must be also. If InitValues is non-nil, then
|
|
||||||
// the function value F must be provided, the location X must not be specified
|
// the function value F must be provided, the location X must not be specified
|
||||||
// (use InitX instead), and other fields may be specified.
|
// and other fields may be specified.
|
||||||
InitValues *Location
|
InitValues *Location
|
||||||
|
|
||||||
// FunctionThreshold is the threshold for acceptably small values of the
|
// FunctionThreshold is the threshold for acceptably small values of the
|
||||||
|
@@ -1180,9 +1180,7 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
|
|||||||
}
|
}
|
||||||
settings.GradientThreshold = test.gradTol
|
settings.GradientThreshold = test.gradTol
|
||||||
|
|
||||||
dim := len(test.x)
|
result, err := Minimize(test.p, test.x, settings, method)
|
||||||
settings.InitX = test.x
|
|
||||||
result, err := Minimize(test.p, dim, settings, method)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("Case %d: error finding minimum (%v) for:\n%v", cas, err, test)
|
t.Errorf("Case %d: error finding minimum (%v) for:\n%v", cas, err, test)
|
||||||
continue
|
continue
|
||||||
@@ -1232,7 +1230,6 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
|
|||||||
|
|
||||||
// We are going to restart the solution using known initial data, so
|
// We are going to restart the solution using known initial data, so
|
||||||
// evaluate them.
|
// evaluate them.
|
||||||
settings.InitX = test.x
|
|
||||||
settings.InitValues = &Location{}
|
settings.InitValues = &Location{}
|
||||||
settings.InitValues.F = test.p.Func(test.x)
|
settings.InitValues.F = test.p.Func(test.x)
|
||||||
if method.Needs().Gradient {
|
if method.Needs().Gradient {
|
||||||
@@ -1246,7 +1243,7 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
|
|||||||
|
|
||||||
// Rerun the test again to make sure that it gets the same answer with
|
// Rerun the test again to make sure that it gets the same answer with
|
||||||
// the same starting condition. Moreover, we are using the initial data.
|
// the same starting condition. Moreover, we are using the initial data.
|
||||||
result2, err2 := Minimize(test.p, dim, settings, method)
|
result2, err2 := Minimize(test.p, test.x, settings, method)
|
||||||
if err2 != nil {
|
if err2 != nil {
|
||||||
t.Errorf("error finding minimum second time (%v) for:\n%v", err2, test)
|
t.Errorf("error finding minimum second time (%v) for:\n%v", err2, test)
|
||||||
continue
|
continue
|
||||||
@@ -1298,8 +1295,7 @@ func TestIssue76(t *testing.T) {
|
|||||||
Linesearcher: &Backtracking{},
|
Linesearcher: &Backtracking{},
|
||||||
}
|
}
|
||||||
// We are not interested in the error, only in the returned status.
|
// We are not interested in the error, only in the returned status.
|
||||||
s.InitX = x
|
r, _ := Minimize(p, x, s, m)
|
||||||
r, _ := Minimize(p, len(x), s, m)
|
|
||||||
// With the above stringent tolerance, the optimizer will never
|
// With the above stringent tolerance, the optimizer will never
|
||||||
// successfully reach the minimum. Check if it terminated in a finite
|
// successfully reach the minimum. Check if it terminated in a finite
|
||||||
// number of steps.
|
// number of steps.
|
||||||
@@ -1315,8 +1311,7 @@ func TestNelderMeadOneD(t *testing.T) {
|
|||||||
x := []float64{10}
|
x := []float64{10}
|
||||||
m := &NelderMead{}
|
m := &NelderMead{}
|
||||||
s := DefaultSettingsLocal()
|
s := DefaultSettingsLocal()
|
||||||
s.InitX = x
|
result, err := Minimize(p, x, s, m)
|
||||||
result, err := Minimize(p, len(x), s, m)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf(err.Error())
|
t.Errorf(err.Error())
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user