Rename GlobalMethod to Method and GlobalTask to Task

This commit is contained in:
Brendan Tracey
2018-07-18 14:22:43 -06:00
parent 88ef6dbe25
commit 9c5a3cae0e
12 changed files with 84 additions and 84 deletions

View File

@@ -42,14 +42,14 @@ func (b *BFGS) Status() (Status, error) {
return b.status, b.err
}
func (b *BFGS) InitGlobal(dim, tasks int) int {
func (b *BFGS) Init(dim, tasks int) int {
b.status = NotTerminated
b.err = nil
return 1
}
func (b *BFGS) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
b.status, b.err = localOptimizer{}.runGlobal(b, operation, result, tasks)
func (b *BFGS) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
b.status, b.err = localOptimizer{}.run(b, operation, result, tasks)
close(operation)
return
}

View File

@@ -105,14 +105,14 @@ func (cg *CG) Status() (Status, error) {
return cg.status, cg.err
}
func (cg *CG) InitGlobal(dim, tasks int) int {
func (cg *CG) Init(dim, tasks int) int {
cg.status = NotTerminated
cg.err = nil
return 1
}
func (cg *CG) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
cg.status, cg.err = localOptimizer{}.runGlobal(cg, operation, result, tasks)
func (cg *CG) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
cg.status, cg.err = localOptimizer{}.run(cg, operation, result, tasks)
close(operation)
return
}

View File

@@ -109,13 +109,13 @@ type CmaEsChol struct {
// Synchronization.
sentIdx int
receivedIdx int
operation chan<- GlobalTask
operation chan<- Task
updateErr error
}
var (
_ Statuser = (*CmaEsChol)(nil)
_ GlobalMethod = (*CmaEsChol)(nil)
_ Method = (*CmaEsChol)(nil)
)
func (cma *CmaEsChol) Needs() struct{ Gradient, Hessian bool } {
@@ -144,7 +144,7 @@ func (cma *CmaEsChol) Status() (Status, error) {
return cma.methodConverged(), nil
}
func (cma *CmaEsChol) InitGlobal(dim, tasks int) int {
func (cma *CmaEsChol) Init(dim, tasks int) int {
if dim <= 0 {
panic(nonpositiveDimension)
}
@@ -241,7 +241,7 @@ func (cma *CmaEsChol) InitGlobal(dim, tasks int) int {
return t
}
func (cma *CmaEsChol) sendInitTasks(tasks []GlobalTask) {
func (cma *CmaEsChol) sendInitTasks(tasks []Task) {
for i, task := range tasks {
cma.sendTask(i, task)
}
@@ -249,7 +249,7 @@ func (cma *CmaEsChol) sendInitTasks(tasks []GlobalTask) {
}
// sendTask generates a sample and sends the task. It does not update the cma index.
func (cma *CmaEsChol) sendTask(idx int, task GlobalTask) {
func (cma *CmaEsChol) sendTask(idx int, task Task) {
task.ID = idx
task.Op = FuncEvaluation
distmv.NormalRand(cma.xs.RawRowView(idx), cma.mean, &cma.chol, cma.Src)
@@ -277,7 +277,7 @@ func (cma *CmaEsChol) bestIdx() int {
// findBestAndUpdateTask finds the best task in the current list, updates the
// new best overall, and then stores the best location into task.
func (cma *CmaEsChol) findBestAndUpdateTask(task GlobalTask) GlobalTask {
func (cma *CmaEsChol) findBestAndUpdateTask(task Task) Task {
// Find and update the best location.
// Don't use floats because there may be NaN values.
best := cma.bestIdx()
@@ -301,7 +301,7 @@ func (cma *CmaEsChol) findBestAndUpdateTask(task GlobalTask) GlobalTask {
return task
}
func (cma *CmaEsChol) RunGlobal(operations chan<- GlobalTask, results <-chan GlobalTask, tasks []GlobalTask) {
func (cma *CmaEsChol) Run(operations chan<- Task, results <-chan Task, tasks []Task) {
cma.operation = operations
// Send the initial tasks. We know there are at most as many tasks as elements
// of the population.

View File

@@ -9,7 +9,7 @@ import (
"time"
)
// DefaultSettingsGlobal returns the default settings for Global optimization.
// DefaultSettingsGlobal returns the default settings for a global optimization.
func DefaultSettingsGlobal() *Settings {
return &Settings{
FunctionThreshold: math.Inf(-1),
@@ -20,27 +20,27 @@ func DefaultSettingsGlobal() *Settings {
}
}
// GlobalTask is a type to communicate between the GlobalMethod and the outer
// Task is a type to communicate between the Method and the outer
// calling script.
type GlobalTask struct {
type Task struct {
ID int
Op Operation
*Location
}
// GlobalMethod is a type which can search for a global optimum for an objective function.
type GlobalMethod interface {
// Method is a type which can search for a global optimum for an objective function.
type Method interface {
Needser
// InitGlobal takes as input the problem dimension and number of available
// Init takes as input the problem dimension and number of available
// concurrent tasks, and returns the number of concurrent processes to be used.
// The returned value must be less than or equal to tasks.
InitGlobal(dim, tasks int) int
// RunGlobal runs a global optimization. The method sends GlobalTasks on
Init(dim, tasks int) int
// Run runs a global optimization. The method sends Tasks on
// the operation channel (for performing function evaluations, major
// iterations, etc.). The result of the tasks will be returned on Result.
// See the documentation for Operation types for the possible tasks.
//
// The caller of RunGlobal will signal the termination of the optimization
// The caller of Run will signal the termination of the optimization
// (i.e. convergence from user settings) by sending a task with a PostIteration
// Op field on result. More tasks may still be sent on operation after this
// occurs, but only MajorIteration operations will still be conducted
@@ -48,32 +48,32 @@ type GlobalMethod interface {
// on operation will be evaluated, however if an Evaluation is started,
// the results of that evaluation will be sent on results.
//
// The GlobalMethod must read from the result channel until it is closed.
// During this, the GlobalMethod may want to send new MajorIteration(s) on
// operation. GlobalMethod then must close operation, and return from RunGlobal.
// The Method must read from the result channel until it is closed.
// During this, the Method may want to send new MajorIteration(s) on
// operation. Method then must close operation, and return from Run.
// These steps must establish a "happens-before" relationship between result
// being closed (externally) and RunGlobal closing operation, for example
// being closed (externally) and Run closing operation, for example
// by using a range loop to read from result even if no results are expected.
//
// The last parameter to RunGlobal is a slice of tasks with length equal to
// the return from InitGlobal. GlobalTask has an ID field which may be
// set and modified by GlobalMethod, and must not be modified by the caller.
// The last parameter to Run is a slice of tasks with length equal to
// the return from Init. Task has an ID field which may be
// set and modified by Method, and must not be modified by the caller.
// The first element of tasks contains information about the initial location.
// The Location.X field is always valid. The Operation field specifies which
// other values of Location are known. If Operation == NoOperation, none of
// the values should be used, otherwise the Evaluation operations will be
// composed to specify the valid fields. GlobalMethods are free to use or
// composed to specify the valid fields. Methods are free to use or
// ignore these values.
//
// GlobalMethod may have its own specific convergence criteria, which can
// Method may have its own specific convergence criteria, which can
// be communicated using a MethodDone operation. This will trigger a
// PostIteration to be sent on result, and the MethodDone task will not be
// returned on result. The GlobalMethod must implement Statuser, and the
// returned on result. The Method must implement Statuser, and the
// call to Status must return a Status other than NotTerminated.
//
// The operation and result tasks are guaranteed to have a buffer length
// equal to the return from InitGlobal.
RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask)
// equal to the return from Init.
Run(operation chan<- Task, result <-chan Task, tasks []Task)
}
// Global uses a global optimizer to search for the global minimum of a
@@ -106,13 +106,13 @@ type GlobalMethod interface {
// Global returns a Result struct and any error that occurred. See the
// documentation of Result for more information.
//
// See the documentation for GlobalMethod for the details on implementing a method.
// See the documentation for Method for the details on implementing a method.
//
// Be aware that the default behavior of Global is to find the minimum.
// For certain functions and optimization methods, this process can take many
// function evaluations. The Settings input struct can be used to limit this,
// for example by modifying the maximum runtime or maximum function evaluations.
func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Result, error) {
func Global(p Problem, dim int, settings *Settings, method Method) (*Result, error) {
startTime := time.Now()
if method == nil {
method = getDefaultMethod(&p)
@@ -161,7 +161,7 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul
}, err
}
func getDefaultMethod(p *Problem) GlobalMethod {
func getDefaultMethod(p *Problem) Method {
if p.Grad != nil {
return &BFGS{}
}
@@ -170,30 +170,30 @@ func getDefaultMethod(p *Problem) GlobalMethod {
// minimizeGlobal performs a Global optimization. minimizeGlobal updates the
// settings and optLoc, and returns the final Status and error.
func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stats *Stats, initOp Operation, initLoc, optLoc *Location, startTime time.Time) (Status, error) {
func minimizeGlobal(prob *Problem, method Method, settings *Settings, stats *Stats, initOp Operation, initLoc, optLoc *Location, startTime time.Time) (Status, error) {
dim := len(optLoc.X)
nTasks := settings.Concurrent
if nTasks == 0 {
nTasks = 1
}
newNTasks := method.InitGlobal(dim, nTasks)
newNTasks := method.Init(dim, nTasks)
if newNTasks > nTasks {
panic("global: too many tasks returned by GlobalMethod")
panic("global: too many tasks returned by Method")
}
nTasks = newNTasks
// Launch the method. The method communicates tasks using the operations
// channel, and results is used to return the evaluated results.
operations := make(chan GlobalTask, nTasks)
results := make(chan GlobalTask, nTasks)
operations := make(chan Task, nTasks)
results := make(chan Task, nTasks)
go func() {
tasks := make([]GlobalTask, nTasks)
tasks := make([]Task, nTasks)
tasks[0].Location = initLoc
tasks[0].Op = initOp
for i := 1; i < len(tasks); i++ {
tasks[i].Location = newLocation(dim, method)
}
method.RunGlobal(operations, results, tasks)
method.Run(operations, results, tasks)
}()
// Algorithmic Overview:
@@ -218,14 +218,14 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
// method that all results have been collected. At this point, the method
// may send MajorIteration(s) to update an optimum location based on these
// last returned results, and then the method will close the operations channel.
// The GlobalMethod must ensure that the closing of results happens before the
// The Method must ensure that the closing of results happens before the
// closing of operations in order to ensure proper shutdown order.
// Now that no more tasks will be commanded by the method, the distributor
// closes statsChan, and with no more statistics to update the optimization
// concludes.
workerChan := make(chan GlobalTask) // Delegate tasks to the workers.
statsChan := make(chan GlobalTask) // Send evaluation updates.
workerChan := make(chan Task) // Delegate tasks to the workers.
statsChan := make(chan Task) // Send evaluation updates.
done := make(chan struct{}) // Communicate the optimization is done.
// Read tasks from the method and distribute as appropriate.
@@ -235,9 +235,9 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
case task := <-operations:
switch task.Op {
case InitIteration:
panic("optimize: GlobalMethod returned InitIteration")
panic("optimize: Method returned InitIteration")
case PostIteration:
panic("optimize: GlobalMethod returned PostIteration")
panic("optimize: Method returned PostIteration")
case NoOperation, MajorIteration, MethodDone:
statsChan <- task
default:
@@ -270,7 +270,7 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
statsChan <- task
}
// Signal successful worker completion.
statsChan <- GlobalTask{Op: signalDone}
statsChan <- Task{Op: signalDone}
}
for i := 0; i < nTasks; i++ {
go worker()
@@ -324,7 +324,7 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
default:
finalStatus = status
finalError = err
results <- GlobalTask{
results <- Task{
Op: PostIteration,
}
close(done)
@@ -337,15 +337,15 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
}
}
// This code block is here rather than above to ensure Status() is not called
// before Method.RunGlobal closes operations.
// before Method.Run closes operations.
if methodDone {
statuser, ok := method.(Statuser)
if !ok {
panic("optimize: global method returned MethodDone but is not a Statuser")
panic("optimize: method returned MethodDone but is not a Statuser")
}
finalStatus, finalError = statuser.Status()
if finalStatus == NotTerminated {
panic("optimize: global method returned MethodDone but a NotTerminated status")
panic("optimize: method returned MethodDone but a NotTerminated status")
}
}
return finalStatus, finalError

View File

@@ -26,14 +26,14 @@ func (g *GradientDescent) Status() (Status, error) {
return g.status, g.err
}
func (g *GradientDescent) InitGlobal(dim, tasks int) int {
func (g *GradientDescent) Init(dim, tasks int) int {
g.status = NotTerminated
g.err = nil
return 1
}
func (g *GradientDescent) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
g.status, g.err = localOptimizer{}.runGlobal(g, operation, result, tasks)
func (g *GradientDescent) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
g.status, g.err = localOptimizer{}.run(g, operation, result, tasks)
close(operation)
return
}

View File

@@ -23,7 +23,7 @@ func (g *GuessAndCheck) Needs() struct{ Gradient, Hessian bool } {
return struct{ Gradient, Hessian bool }{false, false}
}
func (g *GuessAndCheck) InitGlobal(dim, tasks int) int {
func (g *GuessAndCheck) Init(dim, tasks int) int {
if dim <= 0 {
panic(nonpositiveDimension)
}
@@ -35,13 +35,13 @@ func (g *GuessAndCheck) InitGlobal(dim, tasks int) int {
return tasks
}
func (g *GuessAndCheck) sendNewLoc(operation chan<- GlobalTask, task GlobalTask) {
func (g *GuessAndCheck) sendNewLoc(operation chan<- Task, task Task) {
g.Rander.Rand(task.X)
task.Op = FuncEvaluation
operation <- task
}
func (g *GuessAndCheck) updateMajor(operation chan<- GlobalTask, task GlobalTask) {
func (g *GuessAndCheck) updateMajor(operation chan<- Task, task Task) {
// Update the best value seen so far, and send a MajorIteration.
if task.F < g.bestF {
g.bestF = task.F
@@ -54,7 +54,7 @@ func (g *GuessAndCheck) updateMajor(operation chan<- GlobalTask, task GlobalTask
operation <- task
}
func (g *GuessAndCheck) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
func (g *GuessAndCheck) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
// Send initial tasks to evaluate
for _, task := range tasks {
g.sendNewLoc(operation, task)

View File

@@ -48,14 +48,14 @@ func (l *LBFGS) Status() (Status, error) {
return l.status, l.err
}
func (l *LBFGS) InitGlobal(dim, tasks int) int {
func (l *LBFGS) Init(dim, tasks int) int {
l.status = NotTerminated
l.err = nil
return 1
}
func (l *LBFGS) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
l.status, l.err = localOptimizer{}.runGlobal(l, operation, result, tasks)
func (l *LBFGS) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
l.status, l.err = localOptimizer{}.run(l, operation, result, tasks)
close(operation)
return
}

View File

@@ -30,7 +30,7 @@ func (*ListSearch) Needs() struct{ Gradient, Hessian bool } {
// InitGlobal initializes the method for optimization. The input dimension
// must match the number of columns of Locs.
func (l *ListSearch) InitGlobal(dim, tasks int) int {
func (l *ListSearch) Init(dim, tasks int) int {
if dim <= 0 {
panic(nonpositiveDimension)
}
@@ -51,7 +51,7 @@ func (l *ListSearch) InitGlobal(dim, tasks int) int {
return min(r, tasks)
}
func (l *ListSearch) sendNewLoc(operation chan<- GlobalTask, task GlobalTask) {
func (l *ListSearch) sendNewLoc(operation chan<- Task, task Task) {
task.Op = FuncEvaluation
task.ID = l.eval
mat.Row(task.X, l.eval, l.Locs)
@@ -59,7 +59,7 @@ func (l *ListSearch) sendNewLoc(operation chan<- GlobalTask, task GlobalTask) {
operation <- task
}
func (l *ListSearch) updateMajor(operation chan<- GlobalTask, task GlobalTask) {
func (l *ListSearch) updateMajor(operation chan<- Task, task Task) {
// Update the best value seen so far, and send a MajorIteration.
if task.F < l.bestF {
l.bestF = task.F
@@ -79,7 +79,7 @@ func (l *ListSearch) Status() (Status, error) {
return MethodConverge, nil
}
func (l *ListSearch) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
func (l *ListSearch) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
// Send initial tasks to evaluate
for _, task := range tasks {
l.sendNewLoc(operation, task)

View File

@@ -11,11 +11,11 @@ import (
// localOptimizer is a helper type for running an optimization using a LocalMethod.
type localOptimizer struct{}
// RunGlobal controls the optimization run for a localMethod. The calling method
// run controls the optimization run for a localMethod. The calling method
// must close the operation channel at the conclusion of the optimization. This
// provides a happens before relationship between the return of status and the
// closure of operation, and thus a call to method.Status (if necessary).
func (l localOptimizer) runGlobal(method localMethod, operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) (Status, error) {
func (l localOptimizer) run(method localMethod, operation chan<- Task, result <-chan Task, tasks []Task) (Status, error) {
// Local methods start with a fully-specified initial location.
task := tasks[0]
task = l.initialLocation(operation, result, task, method)
@@ -67,7 +67,7 @@ Loop:
// initialOperation returns the Operation needed to fill the initial location
// based on the needs of the method and the values already supplied.
func (localOptimizer) initialOperation(task GlobalTask, needser Needser) Operation {
func (localOptimizer) initialOperation(task Task, needser Needser) Operation {
var newOp Operation
op := task.Op
if op&FuncEvaluation == 0 {
@@ -85,13 +85,13 @@ func (localOptimizer) initialOperation(task GlobalTask, needser Needser) Operati
// initialLocation fills the initial location based on the needs of the method.
// The task passed to initialLocation should be the first task sent in RunGlobal.
func (l localOptimizer) initialLocation(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask, needser Needser) GlobalTask {
func (l localOptimizer) initialLocation(operation chan<- Task, result <-chan Task, task Task, needser Needser) Task {
task.Op = l.initialOperation(task, needser)
operation <- task
return <-result
}
func (localOptimizer) checkStartingLocation(task GlobalTask) (Status, error) {
func (localOptimizer) checkStartingLocation(task Task) (Status, error) {
if math.IsInf(task.F, 1) || math.IsNaN(task.F) {
return Failure, ErrFunc(task.F)
}
@@ -104,7 +104,7 @@ func (localOptimizer) checkStartingLocation(task GlobalTask) (Status, error) {
}
// finish completes the channel operations to finish an optimization.
func (localOptimizer) finish(operation chan<- GlobalTask, result <-chan GlobalTask) {
func (localOptimizer) finish(operation chan<- Task, result <-chan Task) {
// Guarantee that result is closed before operation is closed.
for range result {
}
@@ -112,7 +112,7 @@ func (localOptimizer) finish(operation chan<- GlobalTask, result <-chan GlobalTa
// finishMethodDone sends a MethodDone signal on operation, reads the result,
// and completes the channel operations to finish an optimization.
func (l localOptimizer) finishMethodDone(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask) {
func (l localOptimizer) finishMethodDone(operation chan<- Task, result <-chan Task, task Task) {
task.Op = MethodDone
operation <- task
task = <-result

View File

@@ -89,14 +89,14 @@ func (n *NelderMead) Status() (Status, error) {
return n.status, n.err
}
func (n *NelderMead) InitGlobal(dim, tasks int) int {
func (n *NelderMead) Init(dim, tasks int) int {
n.status = NotTerminated
n.err = nil
return 1
}
func (n *NelderMead) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
n.status, n.err = localOptimizer{}.runGlobal(n, operation, result, tasks)
func (n *NelderMead) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
n.status, n.err = localOptimizer{}.run(n, operation, result, tasks)
close(operation)
return
}

View File

@@ -60,14 +60,14 @@ func (n *Newton) Status() (Status, error) {
return n.status, n.err
}
func (n *Newton) InitGlobal(dim, tasks int) int {
func (n *Newton) Init(dim, tasks int) int {
n.status = NotTerminated
n.err = nil
return 1
}
func (n *Newton) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
n.status, n.err = localOptimizer{}.runGlobal(n, operation, result, tasks)
func (n *Newton) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
n.status, n.err = localOptimizer{}.run(n, operation, result, tasks)
close(operation)
return
}

View File

@@ -1154,7 +1154,7 @@ func TestNewton(t *testing.T) {
testLocal(t, newtonTests, &Newton{})
}
func testLocal(t *testing.T, tests []unconstrainedTest, method GlobalMethod) {
func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
for cas, test := range tests {
if test.long && testing.Short() {
continue