Change Minimize to take in an initial X location

This commit is contained in:
Brendan Tracey
2018-07-18 16:52:39 -06:00
parent 6f6398b9ea
commit fd90faf24c
7 changed files with 35 additions and 33 deletions

View File

@@ -190,21 +190,25 @@ func TestCmaEsChol(t *testing.T) {
src := rand.New(rand.NewSource(1))
method := test.method
method.Src = src
initX := method.InitMean
if initX == nil {
initX = make([]float64, test.dim)
}
// Run and check that the expected termination occurs.
result, err := Minimize(test.problem, test.dim, test.settings, method)
result, err := Minimize(test.problem, initX, test.settings, method)
if testErr := test.good(result, err, test.settings.Concurrent); testErr != nil {
t.Errorf("cas %d: %v", i, testErr)
}
// Run a second time to make sure there are no residual effects
result, err = Minimize(test.problem, test.dim, test.settings, method)
result, err = Minimize(test.problem, initX, test.settings, method)
if testErr := test.good(result, err, test.settings.Concurrent); testErr != nil {
t.Errorf("cas %d second: %v", i, testErr)
}
// Test the problem in parallel.
test.settings.Concurrent = 5
result, err = Minimize(test.problem, test.dim, test.settings, method)
result, err = Minimize(test.problem, initX, test.settings, method)
if testErr := test.good(result, err, test.settings.Concurrent); testErr != nil {
t.Errorf("cas %d concurrent: %v", i, testErr)
}

View File

@@ -76,9 +76,9 @@ type Method interface {
Run(operation chan<- Task, result <-chan Task, tasks []Task)
}
// Minimize uses an optimizer to search for the minimum of a
// function. A maximization problem can be transformed into a
// minimization problem by multiplying the function by -1.
// Minimize uses an optimizer to search for the minimum of a function. A
// maximization problem can be transformed into a minimization problem by
// multiplying the function by -1.
//
// The first argument represents the problem to be minimized. Its fields are
// routines that evaluate the objective function, gradient, and other
@@ -92,6 +92,10 @@ type Method interface {
// returned Status is other than NotTerminated or if the error is not nil, the
// optimization run is terminated.
//
// The second argument specifies the initial location for the optimization.
// Some Methods do not require an initial location, but initX must still be
// specified for the dimension of the optimization problem.
//
// The third argument contains the settings for the minimization. The
// DefaultLocalSettings and DefaultGlobalSettings functions can be called for
// different default settings depending on the optimization method. If
@@ -116,7 +120,7 @@ type Method interface {
// minimum. For certain functions and optimization methods, this can take many
// function evaluations. The Settings input struct can be used to limit this,
// for example by modifying the maximum function evaluations or gradient tolerance.
func Minimize(p Problem, dim int, settings *Settings, method Method) (*Result, error) {
func Minimize(p Problem, initX []float64, settings *Settings, method Method) (*Result, error) {
startTime := time.Now()
if method == nil {
method = getDefaultMethod(&p)
@@ -125,6 +129,7 @@ func Minimize(p Problem, dim int, settings *Settings, method Method) (*Result, e
settings = DefaultSettingsLocal()
}
stats := &Stats{}
dim := len(initX)
err := checkOptimization(p, dim, method, settings.Recorder)
if err != nil {
return nil, err
@@ -137,7 +142,7 @@ func Minimize(p Problem, dim int, settings *Settings, method Method) (*Result, e
settings.FunctionConverge.Init()
}
initOp, initLoc := getInitLocation(dim, settings.InitX, settings.InitValues, method)
initOp, initLoc := getInitLocation(dim, initX, settings.InitValues, method)
stats.Runtime = time.Since(startTime)

View File

@@ -26,10 +26,11 @@ func TestGuessAndCheck(t *testing.T) {
if !ok {
panic("bad test")
}
Minimize(problem, dim, nil, &GuessAndCheck{Rander: d})
initX := make([]float64, dim)
Minimize(problem, initX, nil, &GuessAndCheck{Rander: d})
settings := DefaultSettingsGlobal()
settings.Concurrent = 5
settings.MajorIterations = 15
Minimize(problem, dim, settings, &GuessAndCheck{Rander: d})
Minimize(problem, initX, settings, &GuessAndCheck{Rander: d})
}

View File

@@ -49,7 +49,8 @@ func TestListSearch(t *testing.T) {
Locs: locs,
}
settings := &Settings{}
result, err := Minimize(p, c, settings, method)
initX := make([]float64, c)
result, err := Minimize(p, initX, settings, method)
if err != nil {
t.Errorf("cas %v: error optimizing: %s", cas, err)
}
@@ -63,7 +64,7 @@ func TestListSearch(t *testing.T) {
// Check that the optimization works concurrently.
concurrent := 6
settings.Concurrent = concurrent
result, err = Minimize(p, c, settings, method)
result, err = Minimize(p, initX, settings, method)
if err != nil {
t.Errorf("cas %v: error optimizing: %s", cas, err)
}
@@ -76,7 +77,7 @@ func TestListSearch(t *testing.T) {
// Check that the optimization works concurrently with more than the number of samples.
settings.Concurrent = test.r + concurrent
result, err = Minimize(p, c, settings, method)
result, err = Minimize(p, initX, settings, method)
if err != nil {
t.Errorf("cas %v: error optimizing: %s", cas, err)
}
@@ -92,7 +93,7 @@ func TestListSearch(t *testing.T) {
swapSamples(locs, fs, minIdx, test.r-1)
minIdx = test.r - 1
settings.Concurrent = concurrent
result, err = Minimize(p, c, settings, method)
result, err = Minimize(p, initX, settings, method)
if err != nil {
t.Errorf("cas %v: error optimizing: %s", cas, err)
}
@@ -111,7 +112,7 @@ func TestListSearch(t *testing.T) {
minIdxFirst := floats.MinIdx(fs[:evals])
settings.Concurrent = 0
settings.FuncEvaluations = evals
result, err = Minimize(p, c, settings, method)
result, err = Minimize(p, initX, settings, method)
if err != nil {
t.Errorf("cas %v: error optimizing: %s", cas, err)
}
@@ -134,7 +135,7 @@ func TestListSearch(t *testing.T) {
minIdxFirst = floats.MinIdx(fs[:evals])
settings.Concurrent = concurrent
result, err = Minimize(p, c, settings, method)
result, err = Minimize(p, initX, settings, method)
if err != nil {
t.Errorf("cas %v: error optimizing: %s", cas, err)
}

View File

@@ -23,9 +23,8 @@ func ExampleMinimize() {
settings.Recorder = nil
settings.GradientThreshold = 1e-12
settings.FunctionConverge = nil
settings.InitX = x
result, err := optimize.Minimize(p, len(x), settings, &optimize.BFGS{})
result, err := optimize.Minimize(p, x, settings, &optimize.BFGS{})
if err != nil {
log.Fatal(err)
}

View File

@@ -166,13 +166,10 @@ func (p Problem) satisfies(method Needser) error {
//
// If Recorder is nil, no information will be recorded.
type Settings struct {
// InitX specifies an initial location to communicate to the Method. If InitX
// is nil, then a slice of zeros is used as a default value.
InitX []float64
// InitValues specifies properties known at InitX (function value, gradient, etc.).
// If InitX is nil, InitValues must be also. If InitValues is non-nil, then
// InitValues specifies properties (function value, gradient, etc.). known
// at the initial location passed to Minimize. If InitValues is non-nil, then
// the function value F must be provided, the location X must not be specified
// (use InitX instead), and other fields may be specified.
// and other fields may be specified.
InitValues *Location
// FunctionThreshold is the threshold for acceptably small values of the

View File

@@ -1180,9 +1180,7 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
}
settings.GradientThreshold = test.gradTol
dim := len(test.x)
settings.InitX = test.x
result, err := Minimize(test.p, dim, settings, method)
result, err := Minimize(test.p, test.x, settings, method)
if err != nil {
t.Errorf("Case %d: error finding minimum (%v) for:\n%v", cas, err, test)
continue
@@ -1232,7 +1230,6 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
// We are going to restart the solution using known initial data, so
// evaluate them.
settings.InitX = test.x
settings.InitValues = &Location{}
settings.InitValues.F = test.p.Func(test.x)
if method.Needs().Gradient {
@@ -1246,7 +1243,7 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
// Rerun the test again to make sure that it gets the same answer with
// the same starting condition. Moreover, we are using the initial data.
result2, err2 := Minimize(test.p, dim, settings, method)
result2, err2 := Minimize(test.p, test.x, settings, method)
if err2 != nil {
t.Errorf("error finding minimum second time (%v) for:\n%v", err2, test)
continue
@@ -1298,8 +1295,7 @@ func TestIssue76(t *testing.T) {
Linesearcher: &Backtracking{},
}
// We are not interested in the error, only in the returned status.
s.InitX = x
r, _ := Minimize(p, len(x), s, m)
r, _ := Minimize(p, x, s, m)
// With the above stringent tolerance, the optimizer will never
// successfully reach the minimum. Check if it terminated in a finite
// number of steps.
@@ -1315,8 +1311,7 @@ func TestNelderMeadOneD(t *testing.T) {
x := []float64{10}
m := &NelderMead{}
s := DefaultSettingsLocal()
s.InitX = x
result, err := Minimize(p, len(x), s, m)
result, err := Minimize(p, x, s, m)
if err != nil {
t.Errorf(err.Error())
}