diff --git a/optimize/global.go b/optimize/global.go index d9266c87..cc89a92f 100644 --- a/optimize/global.go +++ b/optimize/global.go @@ -58,6 +58,12 @@ type GlobalMethod interface { // The last parameter to RunGlobal is a slice of tasks with length equal to // the return from InitGlobal. GlobalTask has an ID field which may be // set and modified by GlobalMethod, and must not be modified by the caller. + // The first element of tasks contains information about the initial location. + // The Location.X field is always valid. The Operation field specifies which + // other values of Location are known. If Operation == NoOperation, none of + // the values should be used, otherwise the Evaluation operations will be + // composed to specify the valid fields. GlobalMethods are free to use or + // ignore these values. // // GlobalMethod may have its own specific convergence criteria, which can // be communicated using a MethodDone operation. This will trigger a @@ -120,9 +126,6 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul return nil, err } - // TODO(btracey): These init calls don't do anything with their arguments - // because optLoc is meaningless at this point. Should change the function - // signatures. optLoc := newLocation(dim, method) optLoc.F = math.Inf(1) @@ -130,6 +133,8 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul settings.FunctionConverge.Init() } + initOp, initLoc := getInitLocation(dim, settings.InitX, settings.InitValues, method) + stats.Runtime = time.Since(startTime) // Send initial location to Recorder @@ -142,7 +147,7 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul // Run optimization var status Status - status, err = minimizeGlobal(&p, method, settings, stats, optLoc, startTime) + status, err = minimizeGlobal(&p, method, settings, stats, initOp, initLoc, optLoc, startTime) // Cleanup and collect results if settings.Recorder != nil && err == nil { @@ -158,7 +163,7 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul // minimizeGlobal performs a Global optimization. minimizeGlobal updates the // settings and optLoc, and returns the final Status and error. -func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stats *Stats, optLoc *Location, startTime time.Time) (Status, error) { +func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stats *Stats, initOp Operation, initLoc, optLoc *Location, startTime time.Time) (Status, error) { dim := len(optLoc.X) nTasks := settings.Concurrent if nTasks == 0 { @@ -176,7 +181,9 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat results := make(chan GlobalTask, nTasks) go func() { tasks := make([]GlobalTask, nTasks) - for i := range tasks { + tasks[0].Location = initLoc + tasks[0].Op = initOp + for i := 1; i < len(tasks); i++ { tasks[i].Location = newLocation(dim, method) } method.RunGlobal(operations, results, tasks) diff --git a/optimize/local.go b/optimize/local.go index fbcdd11f..44d3ec2d 100644 --- a/optimize/local.go +++ b/optimize/local.go @@ -4,7 +4,11 @@ package optimize -import "math" +import ( + "math" + + "gonum.org/v1/gonum/floats" +) // Local finds a local minimum of a minimization problem using a sequential // algorithm. A maximization problem can be transformed into a minimization @@ -62,6 +66,10 @@ func Local(p Problem, initX []float64, settings *Settings, method Method) (*Resu if settings == nil { settings = DefaultSettings() } + // Check that the initial location matches the one in settings. + if settings.InitX != nil && !floats.Equal(settings.InitX, initX) { + panic("local: initX does not match settings x location") + } lg := &localGlobal{ Method: method, InitX: initX, @@ -193,38 +201,24 @@ func (l *localGlobal) cleanup(operation chan<- GlobalTask, result <-chan GlobalT func (l *localGlobal) getStartingLocation(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask) Operation { copy(task.X, l.InitX) - if l.Settings.UseInitialData { - task.F = l.Settings.InitialValue - if task.Gradient != nil { - g := l.Settings.InitialGradient - if g == nil { - panic("optimize: initial gradient is nil") - } - if len(g) != l.dim { - panic("optimize: initial gradient size mismatch") - } - copy(task.Gradient, g) - } - if task.Hessian != nil { - h := l.Settings.InitialHessian - if h == nil { - panic("optimize: initial Hessian is nil") - } - if h.Symmetric() != l.dim { - panic("optimize: initial Hessian size mismatch") - } - task.Hessian.CopySym(h) - } + // Construct the operation by what is missing. + needs := l.Method.Needs() + initOp := task.Op + op := NoOperation + if initOp&FuncEvaluation == 0 { + op |= FuncEvaluation + } + if needs.Gradient && initOp&GradEvaluation == 0 { + op |= GradEvaluation + } + if needs.Hessian && initOp&HessEvaluation == 0 { + op |= HessEvaluation + } + + if op == NoOperation { return NoOperation } - eval := FuncEvaluation - if task.Gradient != nil { - eval |= GradEvaluation - } - if task.Hessian != nil { - eval |= HessEvaluation - } - task.Op = eval + task.Op = op operation <- task task = <-result return task.Op diff --git a/optimize/minimize.go b/optimize/minimize.go index e1c93976..88384899 100644 --- a/optimize/minimize.go +++ b/optimize/minimize.go @@ -26,14 +26,12 @@ func min(a, b int) int { } // newLocation allocates a new locatian structure of the appropriate size. It -// allocates memory based on the dimension and the values in Needs. The initial -// function value is set to math.Inf(1). +// allocates memory based on the dimension and the values in Needs. func newLocation(dim int, method Needser) *Location { // TODO(btracey): combine this with Local. loc := &Location{ X: make([]float64, dim), } - loc.F = math.Inf(1) if method.Needs().Gradient { loc.Gradient = make([]float64, dim) } @@ -60,6 +58,48 @@ func copyLocation(dst, src *Location) { } } +// getInitLocation checks the validity of initLocation and initOperation and +// returns the initial values as a *Location. +func getInitLocation(dim int, initX []float64, initValues *Location, method Needser) (Operation, *Location) { + needs := method.Needs() + loc := newLocation(dim, method) + if initX == nil { + if initValues != nil { + panic("optimize: initValues is non-nil but no initial location specified") + } + return NoOperation, loc + } + copy(loc.X, initX) + if initValues == nil { + return NoOperation, loc + } else { + if initValues.X != nil { + panic("optimize: location specified in InitValues (only use InitX)") + } + } + loc.F = initValues.F + op := FuncEvaluation + if initValues.Gradient != nil { + if len(initValues.Gradient) != dim { + panic("optimize: initial gradient does not match problem dimension") + } + if needs.Gradient { + copy(loc.Gradient, initValues.Gradient) + op |= GradEvaluation + } + } + if initValues.Hessian != nil { + if initValues.Hessian.Symmetric() != dim { + panic("optimize: initial Hessian does not match problem dimension") + } + if needs.Hessian { + loc.Hessian.CopySym(initValues.Hessian) + op |= HessEvaluation + } + } + return op, loc +} + func checkOptimization(p Problem, dim int, method Needser, recorder Recorder) error { if p.Func == nil { panic(badProblem) diff --git a/optimize/types.go b/optimize/types.go index 10bc7f28..2feec6b4 100644 --- a/optimize/types.go +++ b/optimize/types.go @@ -164,15 +164,16 @@ func (p Problem) satisfies(method Needser) error { // settings, convergence information, and Recorder information. In general, users // should use DefaultSettings rather than constructing a Settings literal. // -// If UseInitData is true, InitialValue, InitialGradient and InitialHessian -// specify function information at the initial location. -// // If Recorder is nil, no information will be recorded. type Settings struct { - UseInitialData bool // Use supplied information about the conditions at the initial x. - InitialValue float64 // Function value at the initial x. - InitialGradient []float64 // Gradient at the initial x. - InitialHessian *mat.SymDense // Hessian at the initial x. + // InitX specifies an initial location to communicate to the Method. If InitX + // is nil, then a slice of zeros is used as a default value. + InitX []float64 + // InitValues specifies properties known at InitX (function value, gradient, etc.). + // If InitX is nil, InitValues must be also. If InitValues is non-nil, then + // the function value F must be provided, the location X must not be specified + // (use InitX instead), and other fields may be specified. + InitValues *Location // FunctionThreshold is the threshold for acceptably small values of the // objective function. FunctionThreshold status is returned if diff --git a/optimize/unconstrained_test.go b/optimize/unconstrained_test.go index 2a0be2e5..48081789 100644 --- a/optimize/unconstrained_test.go +++ b/optimize/unconstrained_test.go @@ -1230,15 +1230,16 @@ func testLocal(t *testing.T, tests []unconstrainedTest, method Method) { // We are going to restart the solution using known initial data, so // evaluate them. - settings.UseInitialData = true - settings.InitialValue = test.p.Func(test.x) + settings.InitX = test.x + settings.InitValues = &Location{} + settings.InitValues.F = test.p.Func(test.x) if method.Needs().Gradient { - settings.InitialGradient = resize(settings.InitialGradient, len(test.x)) - test.p.Grad(settings.InitialGradient, test.x) + settings.InitValues.Gradient = resize(settings.InitValues.Gradient, len(test.x)) + test.p.Grad(settings.InitValues.Gradient, test.x) } if method.Needs().Hessian { - settings.InitialHessian = mat.NewSymDense(len(test.x), nil) - test.p.Hess(settings.InitialHessian, test.x) + settings.InitValues.Hessian = mat.NewSymDense(len(test.x), nil) + test.p.Hess(settings.InitValues.Hessian, test.x) } // Rerun the test again to make sure that it gets the same answer with