mirror of
https://github.com/gonum/gonum.git
synced 2025-10-08 08:30:14 +08:00
Rename GlobalMethod to Method and GlobalTask to Task
This commit is contained in:
@@ -42,14 +42,14 @@ func (b *BFGS) Status() (Status, error) {
|
|||||||
return b.status, b.err
|
return b.status, b.err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BFGS) InitGlobal(dim, tasks int) int {
|
func (b *BFGS) Init(dim, tasks int) int {
|
||||||
b.status = NotTerminated
|
b.status = NotTerminated
|
||||||
b.err = nil
|
b.err = nil
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BFGS) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
func (b *BFGS) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
|
||||||
b.status, b.err = localOptimizer{}.runGlobal(b, operation, result, tasks)
|
b.status, b.err = localOptimizer{}.run(b, operation, result, tasks)
|
||||||
close(operation)
|
close(operation)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@@ -105,14 +105,14 @@ func (cg *CG) Status() (Status, error) {
|
|||||||
return cg.status, cg.err
|
return cg.status, cg.err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cg *CG) InitGlobal(dim, tasks int) int {
|
func (cg *CG) Init(dim, tasks int) int {
|
||||||
cg.status = NotTerminated
|
cg.status = NotTerminated
|
||||||
cg.err = nil
|
cg.err = nil
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cg *CG) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
func (cg *CG) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
|
||||||
cg.status, cg.err = localOptimizer{}.runGlobal(cg, operation, result, tasks)
|
cg.status, cg.err = localOptimizer{}.run(cg, operation, result, tasks)
|
||||||
close(operation)
|
close(operation)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@@ -109,13 +109,13 @@ type CmaEsChol struct {
|
|||||||
// Synchronization.
|
// Synchronization.
|
||||||
sentIdx int
|
sentIdx int
|
||||||
receivedIdx int
|
receivedIdx int
|
||||||
operation chan<- GlobalTask
|
operation chan<- Task
|
||||||
updateErr error
|
updateErr error
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
_ Statuser = (*CmaEsChol)(nil)
|
_ Statuser = (*CmaEsChol)(nil)
|
||||||
_ GlobalMethod = (*CmaEsChol)(nil)
|
_ Method = (*CmaEsChol)(nil)
|
||||||
)
|
)
|
||||||
|
|
||||||
func (cma *CmaEsChol) Needs() struct{ Gradient, Hessian bool } {
|
func (cma *CmaEsChol) Needs() struct{ Gradient, Hessian bool } {
|
||||||
@@ -144,7 +144,7 @@ func (cma *CmaEsChol) Status() (Status, error) {
|
|||||||
return cma.methodConverged(), nil
|
return cma.methodConverged(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cma *CmaEsChol) InitGlobal(dim, tasks int) int {
|
func (cma *CmaEsChol) Init(dim, tasks int) int {
|
||||||
if dim <= 0 {
|
if dim <= 0 {
|
||||||
panic(nonpositiveDimension)
|
panic(nonpositiveDimension)
|
||||||
}
|
}
|
||||||
@@ -241,7 +241,7 @@ func (cma *CmaEsChol) InitGlobal(dim, tasks int) int {
|
|||||||
return t
|
return t
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cma *CmaEsChol) sendInitTasks(tasks []GlobalTask) {
|
func (cma *CmaEsChol) sendInitTasks(tasks []Task) {
|
||||||
for i, task := range tasks {
|
for i, task := range tasks {
|
||||||
cma.sendTask(i, task)
|
cma.sendTask(i, task)
|
||||||
}
|
}
|
||||||
@@ -249,7 +249,7 @@ func (cma *CmaEsChol) sendInitTasks(tasks []GlobalTask) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// sendTask generates a sample and sends the task. It does not update the cma index.
|
// sendTask generates a sample and sends the task. It does not update the cma index.
|
||||||
func (cma *CmaEsChol) sendTask(idx int, task GlobalTask) {
|
func (cma *CmaEsChol) sendTask(idx int, task Task) {
|
||||||
task.ID = idx
|
task.ID = idx
|
||||||
task.Op = FuncEvaluation
|
task.Op = FuncEvaluation
|
||||||
distmv.NormalRand(cma.xs.RawRowView(idx), cma.mean, &cma.chol, cma.Src)
|
distmv.NormalRand(cma.xs.RawRowView(idx), cma.mean, &cma.chol, cma.Src)
|
||||||
@@ -277,7 +277,7 @@ func (cma *CmaEsChol) bestIdx() int {
|
|||||||
|
|
||||||
// findBestAndUpdateTask finds the best task in the current list, updates the
|
// findBestAndUpdateTask finds the best task in the current list, updates the
|
||||||
// new best overall, and then stores the best location into task.
|
// new best overall, and then stores the best location into task.
|
||||||
func (cma *CmaEsChol) findBestAndUpdateTask(task GlobalTask) GlobalTask {
|
func (cma *CmaEsChol) findBestAndUpdateTask(task Task) Task {
|
||||||
// Find and update the best location.
|
// Find and update the best location.
|
||||||
// Don't use floats because there may be NaN values.
|
// Don't use floats because there may be NaN values.
|
||||||
best := cma.bestIdx()
|
best := cma.bestIdx()
|
||||||
@@ -301,7 +301,7 @@ func (cma *CmaEsChol) findBestAndUpdateTask(task GlobalTask) GlobalTask {
|
|||||||
return task
|
return task
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cma *CmaEsChol) RunGlobal(operations chan<- GlobalTask, results <-chan GlobalTask, tasks []GlobalTask) {
|
func (cma *CmaEsChol) Run(operations chan<- Task, results <-chan Task, tasks []Task) {
|
||||||
cma.operation = operations
|
cma.operation = operations
|
||||||
// Send the initial tasks. We know there are at most as many tasks as elements
|
// Send the initial tasks. We know there are at most as many tasks as elements
|
||||||
// of the population.
|
// of the population.
|
||||||
|
@@ -9,7 +9,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
// DefaultSettingsGlobal returns the default settings for Global optimization.
|
// DefaultSettingsGlobal returns the default settings for a global optimization.
|
||||||
func DefaultSettingsGlobal() *Settings {
|
func DefaultSettingsGlobal() *Settings {
|
||||||
return &Settings{
|
return &Settings{
|
||||||
FunctionThreshold: math.Inf(-1),
|
FunctionThreshold: math.Inf(-1),
|
||||||
@@ -20,27 +20,27 @@ func DefaultSettingsGlobal() *Settings {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// GlobalTask is a type to communicate between the GlobalMethod and the outer
|
// Task is a type to communicate between the Method and the outer
|
||||||
// calling script.
|
// calling script.
|
||||||
type GlobalTask struct {
|
type Task struct {
|
||||||
ID int
|
ID int
|
||||||
Op Operation
|
Op Operation
|
||||||
*Location
|
*Location
|
||||||
}
|
}
|
||||||
|
|
||||||
// GlobalMethod is a type which can search for a global optimum for an objective function.
|
// Method is a type which can search for a global optimum for an objective function.
|
||||||
type GlobalMethod interface {
|
type Method interface {
|
||||||
Needser
|
Needser
|
||||||
// InitGlobal takes as input the problem dimension and number of available
|
// Init takes as input the problem dimension and number of available
|
||||||
// concurrent tasks, and returns the number of concurrent processes to be used.
|
// concurrent tasks, and returns the number of concurrent processes to be used.
|
||||||
// The returned value must be less than or equal to tasks.
|
// The returned value must be less than or equal to tasks.
|
||||||
InitGlobal(dim, tasks int) int
|
Init(dim, tasks int) int
|
||||||
// RunGlobal runs a global optimization. The method sends GlobalTasks on
|
// Run runs a global optimization. The method sends Tasks on
|
||||||
// the operation channel (for performing function evaluations, major
|
// the operation channel (for performing function evaluations, major
|
||||||
// iterations, etc.). The result of the tasks will be returned on Result.
|
// iterations, etc.). The result of the tasks will be returned on Result.
|
||||||
// See the documentation for Operation types for the possible tasks.
|
// See the documentation for Operation types for the possible tasks.
|
||||||
//
|
//
|
||||||
// The caller of RunGlobal will signal the termination of the optimization
|
// The caller of Run will signal the termination of the optimization
|
||||||
// (i.e. convergence from user settings) by sending a task with a PostIteration
|
// (i.e. convergence from user settings) by sending a task with a PostIteration
|
||||||
// Op field on result. More tasks may still be sent on operation after this
|
// Op field on result. More tasks may still be sent on operation after this
|
||||||
// occurs, but only MajorIteration operations will still be conducted
|
// occurs, but only MajorIteration operations will still be conducted
|
||||||
@@ -48,32 +48,32 @@ type GlobalMethod interface {
|
|||||||
// on operation will be evaluated, however if an Evaluation is started,
|
// on operation will be evaluated, however if an Evaluation is started,
|
||||||
// the results of that evaluation will be sent on results.
|
// the results of that evaluation will be sent on results.
|
||||||
//
|
//
|
||||||
// The GlobalMethod must read from the result channel until it is closed.
|
// The Method must read from the result channel until it is closed.
|
||||||
// During this, the GlobalMethod may want to send new MajorIteration(s) on
|
// During this, the Method may want to send new MajorIteration(s) on
|
||||||
// operation. GlobalMethod then must close operation, and return from RunGlobal.
|
// operation. Method then must close operation, and return from Run.
|
||||||
// These steps must establish a "happens-before" relationship between result
|
// These steps must establish a "happens-before" relationship between result
|
||||||
// being closed (externally) and RunGlobal closing operation, for example
|
// being closed (externally) and Run closing operation, for example
|
||||||
// by using a range loop to read from result even if no results are expected.
|
// by using a range loop to read from result even if no results are expected.
|
||||||
//
|
//
|
||||||
// The last parameter to RunGlobal is a slice of tasks with length equal to
|
// The last parameter to Run is a slice of tasks with length equal to
|
||||||
// the return from InitGlobal. GlobalTask has an ID field which may be
|
// the return from Init. Task has an ID field which may be
|
||||||
// set and modified by GlobalMethod, and must not be modified by the caller.
|
// set and modified by Method, and must not be modified by the caller.
|
||||||
// The first element of tasks contains information about the initial location.
|
// The first element of tasks contains information about the initial location.
|
||||||
// The Location.X field is always valid. The Operation field specifies which
|
// The Location.X field is always valid. The Operation field specifies which
|
||||||
// other values of Location are known. If Operation == NoOperation, none of
|
// other values of Location are known. If Operation == NoOperation, none of
|
||||||
// the values should be used, otherwise the Evaluation operations will be
|
// the values should be used, otherwise the Evaluation operations will be
|
||||||
// composed to specify the valid fields. GlobalMethods are free to use or
|
// composed to specify the valid fields. Methods are free to use or
|
||||||
// ignore these values.
|
// ignore these values.
|
||||||
//
|
//
|
||||||
// GlobalMethod may have its own specific convergence criteria, which can
|
// Method may have its own specific convergence criteria, which can
|
||||||
// be communicated using a MethodDone operation. This will trigger a
|
// be communicated using a MethodDone operation. This will trigger a
|
||||||
// PostIteration to be sent on result, and the MethodDone task will not be
|
// PostIteration to be sent on result, and the MethodDone task will not be
|
||||||
// returned on result. The GlobalMethod must implement Statuser, and the
|
// returned on result. The Method must implement Statuser, and the
|
||||||
// call to Status must return a Status other than NotTerminated.
|
// call to Status must return a Status other than NotTerminated.
|
||||||
//
|
//
|
||||||
// The operation and result tasks are guaranteed to have a buffer length
|
// The operation and result tasks are guaranteed to have a buffer length
|
||||||
// equal to the return from InitGlobal.
|
// equal to the return from Init.
|
||||||
RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask)
|
Run(operation chan<- Task, result <-chan Task, tasks []Task)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Global uses a global optimizer to search for the global minimum of a
|
// Global uses a global optimizer to search for the global minimum of a
|
||||||
@@ -106,13 +106,13 @@ type GlobalMethod interface {
|
|||||||
// Global returns a Result struct and any error that occurred. See the
|
// Global returns a Result struct and any error that occurred. See the
|
||||||
// documentation of Result for more information.
|
// documentation of Result for more information.
|
||||||
//
|
//
|
||||||
// See the documentation for GlobalMethod for the details on implementing a method.
|
// See the documentation for Method for the details on implementing a method.
|
||||||
//
|
//
|
||||||
// Be aware that the default behavior of Global is to find the minimum.
|
// Be aware that the default behavior of Global is to find the minimum.
|
||||||
// For certain functions and optimization methods, this process can take many
|
// For certain functions and optimization methods, this process can take many
|
||||||
// function evaluations. The Settings input struct can be used to limit this,
|
// function evaluations. The Settings input struct can be used to limit this,
|
||||||
// for example by modifying the maximum runtime or maximum function evaluations.
|
// for example by modifying the maximum runtime or maximum function evaluations.
|
||||||
func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Result, error) {
|
func Global(p Problem, dim int, settings *Settings, method Method) (*Result, error) {
|
||||||
startTime := time.Now()
|
startTime := time.Now()
|
||||||
if method == nil {
|
if method == nil {
|
||||||
method = getDefaultMethod(&p)
|
method = getDefaultMethod(&p)
|
||||||
@@ -161,7 +161,7 @@ func Global(p Problem, dim int, settings *Settings, method GlobalMethod) (*Resul
|
|||||||
}, err
|
}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func getDefaultMethod(p *Problem) GlobalMethod {
|
func getDefaultMethod(p *Problem) Method {
|
||||||
if p.Grad != nil {
|
if p.Grad != nil {
|
||||||
return &BFGS{}
|
return &BFGS{}
|
||||||
}
|
}
|
||||||
@@ -170,30 +170,30 @@ func getDefaultMethod(p *Problem) GlobalMethod {
|
|||||||
|
|
||||||
// minimizeGlobal performs a Global optimization. minimizeGlobal updates the
|
// minimizeGlobal performs a Global optimization. minimizeGlobal updates the
|
||||||
// settings and optLoc, and returns the final Status and error.
|
// settings and optLoc, and returns the final Status and error.
|
||||||
func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stats *Stats, initOp Operation, initLoc, optLoc *Location, startTime time.Time) (Status, error) {
|
func minimizeGlobal(prob *Problem, method Method, settings *Settings, stats *Stats, initOp Operation, initLoc, optLoc *Location, startTime time.Time) (Status, error) {
|
||||||
dim := len(optLoc.X)
|
dim := len(optLoc.X)
|
||||||
nTasks := settings.Concurrent
|
nTasks := settings.Concurrent
|
||||||
if nTasks == 0 {
|
if nTasks == 0 {
|
||||||
nTasks = 1
|
nTasks = 1
|
||||||
}
|
}
|
||||||
newNTasks := method.InitGlobal(dim, nTasks)
|
newNTasks := method.Init(dim, nTasks)
|
||||||
if newNTasks > nTasks {
|
if newNTasks > nTasks {
|
||||||
panic("global: too many tasks returned by GlobalMethod")
|
panic("global: too many tasks returned by Method")
|
||||||
}
|
}
|
||||||
nTasks = newNTasks
|
nTasks = newNTasks
|
||||||
|
|
||||||
// Launch the method. The method communicates tasks using the operations
|
// Launch the method. The method communicates tasks using the operations
|
||||||
// channel, and results is used to return the evaluated results.
|
// channel, and results is used to return the evaluated results.
|
||||||
operations := make(chan GlobalTask, nTasks)
|
operations := make(chan Task, nTasks)
|
||||||
results := make(chan GlobalTask, nTasks)
|
results := make(chan Task, nTasks)
|
||||||
go func() {
|
go func() {
|
||||||
tasks := make([]GlobalTask, nTasks)
|
tasks := make([]Task, nTasks)
|
||||||
tasks[0].Location = initLoc
|
tasks[0].Location = initLoc
|
||||||
tasks[0].Op = initOp
|
tasks[0].Op = initOp
|
||||||
for i := 1; i < len(tasks); i++ {
|
for i := 1; i < len(tasks); i++ {
|
||||||
tasks[i].Location = newLocation(dim, method)
|
tasks[i].Location = newLocation(dim, method)
|
||||||
}
|
}
|
||||||
method.RunGlobal(operations, results, tasks)
|
method.Run(operations, results, tasks)
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// Algorithmic Overview:
|
// Algorithmic Overview:
|
||||||
@@ -218,15 +218,15 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
|
|||||||
// method that all results have been collected. At this point, the method
|
// method that all results have been collected. At this point, the method
|
||||||
// may send MajorIteration(s) to update an optimum location based on these
|
// may send MajorIteration(s) to update an optimum location based on these
|
||||||
// last returned results, and then the method will close the operations channel.
|
// last returned results, and then the method will close the operations channel.
|
||||||
// The GlobalMethod must ensure that the closing of results happens before the
|
// The Method must ensure that the closing of results happens before the
|
||||||
// closing of operations in order to ensure proper shutdown order.
|
// closing of operations in order to ensure proper shutdown order.
|
||||||
// Now that no more tasks will be commanded by the method, the distributor
|
// Now that no more tasks will be commanded by the method, the distributor
|
||||||
// closes statsChan, and with no more statistics to update the optimization
|
// closes statsChan, and with no more statistics to update the optimization
|
||||||
// concludes.
|
// concludes.
|
||||||
|
|
||||||
workerChan := make(chan GlobalTask) // Delegate tasks to the workers.
|
workerChan := make(chan Task) // Delegate tasks to the workers.
|
||||||
statsChan := make(chan GlobalTask) // Send evaluation updates.
|
statsChan := make(chan Task) // Send evaluation updates.
|
||||||
done := make(chan struct{}) // Communicate the optimization is done.
|
done := make(chan struct{}) // Communicate the optimization is done.
|
||||||
|
|
||||||
// Read tasks from the method and distribute as appropriate.
|
// Read tasks from the method and distribute as appropriate.
|
||||||
distributor := func() {
|
distributor := func() {
|
||||||
@@ -235,9 +235,9 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
|
|||||||
case task := <-operations:
|
case task := <-operations:
|
||||||
switch task.Op {
|
switch task.Op {
|
||||||
case InitIteration:
|
case InitIteration:
|
||||||
panic("optimize: GlobalMethod returned InitIteration")
|
panic("optimize: Method returned InitIteration")
|
||||||
case PostIteration:
|
case PostIteration:
|
||||||
panic("optimize: GlobalMethod returned PostIteration")
|
panic("optimize: Method returned PostIteration")
|
||||||
case NoOperation, MajorIteration, MethodDone:
|
case NoOperation, MajorIteration, MethodDone:
|
||||||
statsChan <- task
|
statsChan <- task
|
||||||
default:
|
default:
|
||||||
@@ -270,7 +270,7 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
|
|||||||
statsChan <- task
|
statsChan <- task
|
||||||
}
|
}
|
||||||
// Signal successful worker completion.
|
// Signal successful worker completion.
|
||||||
statsChan <- GlobalTask{Op: signalDone}
|
statsChan <- Task{Op: signalDone}
|
||||||
}
|
}
|
||||||
for i := 0; i < nTasks; i++ {
|
for i := 0; i < nTasks; i++ {
|
||||||
go worker()
|
go worker()
|
||||||
@@ -324,7 +324,7 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
|
|||||||
default:
|
default:
|
||||||
finalStatus = status
|
finalStatus = status
|
||||||
finalError = err
|
finalError = err
|
||||||
results <- GlobalTask{
|
results <- Task{
|
||||||
Op: PostIteration,
|
Op: PostIteration,
|
||||||
}
|
}
|
||||||
close(done)
|
close(done)
|
||||||
@@ -337,15 +337,15 @@ func minimizeGlobal(prob *Problem, method GlobalMethod, settings *Settings, stat
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// This code block is here rather than above to ensure Status() is not called
|
// This code block is here rather than above to ensure Status() is not called
|
||||||
// before Method.RunGlobal closes operations.
|
// before Method.Run closes operations.
|
||||||
if methodDone {
|
if methodDone {
|
||||||
statuser, ok := method.(Statuser)
|
statuser, ok := method.(Statuser)
|
||||||
if !ok {
|
if !ok {
|
||||||
panic("optimize: global method returned MethodDone but is not a Statuser")
|
panic("optimize: method returned MethodDone but is not a Statuser")
|
||||||
}
|
}
|
||||||
finalStatus, finalError = statuser.Status()
|
finalStatus, finalError = statuser.Status()
|
||||||
if finalStatus == NotTerminated {
|
if finalStatus == NotTerminated {
|
||||||
panic("optimize: global method returned MethodDone but a NotTerminated status")
|
panic("optimize: method returned MethodDone but a NotTerminated status")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return finalStatus, finalError
|
return finalStatus, finalError
|
||||||
|
@@ -26,14 +26,14 @@ func (g *GradientDescent) Status() (Status, error) {
|
|||||||
return g.status, g.err
|
return g.status, g.err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GradientDescent) InitGlobal(dim, tasks int) int {
|
func (g *GradientDescent) Init(dim, tasks int) int {
|
||||||
g.status = NotTerminated
|
g.status = NotTerminated
|
||||||
g.err = nil
|
g.err = nil
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GradientDescent) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
func (g *GradientDescent) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
|
||||||
g.status, g.err = localOptimizer{}.runGlobal(g, operation, result, tasks)
|
g.status, g.err = localOptimizer{}.run(g, operation, result, tasks)
|
||||||
close(operation)
|
close(operation)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@@ -23,7 +23,7 @@ func (g *GuessAndCheck) Needs() struct{ Gradient, Hessian bool } {
|
|||||||
return struct{ Gradient, Hessian bool }{false, false}
|
return struct{ Gradient, Hessian bool }{false, false}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GuessAndCheck) InitGlobal(dim, tasks int) int {
|
func (g *GuessAndCheck) Init(dim, tasks int) int {
|
||||||
if dim <= 0 {
|
if dim <= 0 {
|
||||||
panic(nonpositiveDimension)
|
panic(nonpositiveDimension)
|
||||||
}
|
}
|
||||||
@@ -35,13 +35,13 @@ func (g *GuessAndCheck) InitGlobal(dim, tasks int) int {
|
|||||||
return tasks
|
return tasks
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GuessAndCheck) sendNewLoc(operation chan<- GlobalTask, task GlobalTask) {
|
func (g *GuessAndCheck) sendNewLoc(operation chan<- Task, task Task) {
|
||||||
g.Rander.Rand(task.X)
|
g.Rander.Rand(task.X)
|
||||||
task.Op = FuncEvaluation
|
task.Op = FuncEvaluation
|
||||||
operation <- task
|
operation <- task
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GuessAndCheck) updateMajor(operation chan<- GlobalTask, task GlobalTask) {
|
func (g *GuessAndCheck) updateMajor(operation chan<- Task, task Task) {
|
||||||
// Update the best value seen so far, and send a MajorIteration.
|
// Update the best value seen so far, and send a MajorIteration.
|
||||||
if task.F < g.bestF {
|
if task.F < g.bestF {
|
||||||
g.bestF = task.F
|
g.bestF = task.F
|
||||||
@@ -54,7 +54,7 @@ func (g *GuessAndCheck) updateMajor(operation chan<- GlobalTask, task GlobalTask
|
|||||||
operation <- task
|
operation <- task
|
||||||
}
|
}
|
||||||
|
|
||||||
func (g *GuessAndCheck) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
func (g *GuessAndCheck) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
|
||||||
// Send initial tasks to evaluate
|
// Send initial tasks to evaluate
|
||||||
for _, task := range tasks {
|
for _, task := range tasks {
|
||||||
g.sendNewLoc(operation, task)
|
g.sendNewLoc(operation, task)
|
||||||
|
@@ -48,14 +48,14 @@ func (l *LBFGS) Status() (Status, error) {
|
|||||||
return l.status, l.err
|
return l.status, l.err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *LBFGS) InitGlobal(dim, tasks int) int {
|
func (l *LBFGS) Init(dim, tasks int) int {
|
||||||
l.status = NotTerminated
|
l.status = NotTerminated
|
||||||
l.err = nil
|
l.err = nil
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *LBFGS) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
func (l *LBFGS) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
|
||||||
l.status, l.err = localOptimizer{}.runGlobal(l, operation, result, tasks)
|
l.status, l.err = localOptimizer{}.run(l, operation, result, tasks)
|
||||||
close(operation)
|
close(operation)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@@ -30,7 +30,7 @@ func (*ListSearch) Needs() struct{ Gradient, Hessian bool } {
|
|||||||
|
|
||||||
// InitGlobal initializes the method for optimization. The input dimension
|
// InitGlobal initializes the method for optimization. The input dimension
|
||||||
// must match the number of columns of Locs.
|
// must match the number of columns of Locs.
|
||||||
func (l *ListSearch) InitGlobal(dim, tasks int) int {
|
func (l *ListSearch) Init(dim, tasks int) int {
|
||||||
if dim <= 0 {
|
if dim <= 0 {
|
||||||
panic(nonpositiveDimension)
|
panic(nonpositiveDimension)
|
||||||
}
|
}
|
||||||
@@ -51,7 +51,7 @@ func (l *ListSearch) InitGlobal(dim, tasks int) int {
|
|||||||
return min(r, tasks)
|
return min(r, tasks)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *ListSearch) sendNewLoc(operation chan<- GlobalTask, task GlobalTask) {
|
func (l *ListSearch) sendNewLoc(operation chan<- Task, task Task) {
|
||||||
task.Op = FuncEvaluation
|
task.Op = FuncEvaluation
|
||||||
task.ID = l.eval
|
task.ID = l.eval
|
||||||
mat.Row(task.X, l.eval, l.Locs)
|
mat.Row(task.X, l.eval, l.Locs)
|
||||||
@@ -59,7 +59,7 @@ func (l *ListSearch) sendNewLoc(operation chan<- GlobalTask, task GlobalTask) {
|
|||||||
operation <- task
|
operation <- task
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *ListSearch) updateMajor(operation chan<- GlobalTask, task GlobalTask) {
|
func (l *ListSearch) updateMajor(operation chan<- Task, task Task) {
|
||||||
// Update the best value seen so far, and send a MajorIteration.
|
// Update the best value seen so far, and send a MajorIteration.
|
||||||
if task.F < l.bestF {
|
if task.F < l.bestF {
|
||||||
l.bestF = task.F
|
l.bestF = task.F
|
||||||
@@ -79,7 +79,7 @@ func (l *ListSearch) Status() (Status, error) {
|
|||||||
return MethodConverge, nil
|
return MethodConverge, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *ListSearch) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
func (l *ListSearch) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
|
||||||
// Send initial tasks to evaluate
|
// Send initial tasks to evaluate
|
||||||
for _, task := range tasks {
|
for _, task := range tasks {
|
||||||
l.sendNewLoc(operation, task)
|
l.sendNewLoc(operation, task)
|
||||||
|
@@ -11,11 +11,11 @@ import (
|
|||||||
// localOptimizer is a helper type for running an optimization using a LocalMethod.
|
// localOptimizer is a helper type for running an optimization using a LocalMethod.
|
||||||
type localOptimizer struct{}
|
type localOptimizer struct{}
|
||||||
|
|
||||||
// RunGlobal controls the optimization run for a localMethod. The calling method
|
// run controls the optimization run for a localMethod. The calling method
|
||||||
// must close the operation channel at the conclusion of the optimization. This
|
// must close the operation channel at the conclusion of the optimization. This
|
||||||
// provides a happens before relationship between the return of status and the
|
// provides a happens before relationship between the return of status and the
|
||||||
// closure of operation, and thus a call to method.Status (if necessary).
|
// closure of operation, and thus a call to method.Status (if necessary).
|
||||||
func (l localOptimizer) runGlobal(method localMethod, operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) (Status, error) {
|
func (l localOptimizer) run(method localMethod, operation chan<- Task, result <-chan Task, tasks []Task) (Status, error) {
|
||||||
// Local methods start with a fully-specified initial location.
|
// Local methods start with a fully-specified initial location.
|
||||||
task := tasks[0]
|
task := tasks[0]
|
||||||
task = l.initialLocation(operation, result, task, method)
|
task = l.initialLocation(operation, result, task, method)
|
||||||
@@ -67,7 +67,7 @@ Loop:
|
|||||||
|
|
||||||
// initialOperation returns the Operation needed to fill the initial location
|
// initialOperation returns the Operation needed to fill the initial location
|
||||||
// based on the needs of the method and the values already supplied.
|
// based on the needs of the method and the values already supplied.
|
||||||
func (localOptimizer) initialOperation(task GlobalTask, needser Needser) Operation {
|
func (localOptimizer) initialOperation(task Task, needser Needser) Operation {
|
||||||
var newOp Operation
|
var newOp Operation
|
||||||
op := task.Op
|
op := task.Op
|
||||||
if op&FuncEvaluation == 0 {
|
if op&FuncEvaluation == 0 {
|
||||||
@@ -85,13 +85,13 @@ func (localOptimizer) initialOperation(task GlobalTask, needser Needser) Operati
|
|||||||
|
|
||||||
// initialLocation fills the initial location based on the needs of the method.
|
// initialLocation fills the initial location based on the needs of the method.
|
||||||
// The task passed to initialLocation should be the first task sent in RunGlobal.
|
// The task passed to initialLocation should be the first task sent in RunGlobal.
|
||||||
func (l localOptimizer) initialLocation(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask, needser Needser) GlobalTask {
|
func (l localOptimizer) initialLocation(operation chan<- Task, result <-chan Task, task Task, needser Needser) Task {
|
||||||
task.Op = l.initialOperation(task, needser)
|
task.Op = l.initialOperation(task, needser)
|
||||||
operation <- task
|
operation <- task
|
||||||
return <-result
|
return <-result
|
||||||
}
|
}
|
||||||
|
|
||||||
func (localOptimizer) checkStartingLocation(task GlobalTask) (Status, error) {
|
func (localOptimizer) checkStartingLocation(task Task) (Status, error) {
|
||||||
if math.IsInf(task.F, 1) || math.IsNaN(task.F) {
|
if math.IsInf(task.F, 1) || math.IsNaN(task.F) {
|
||||||
return Failure, ErrFunc(task.F)
|
return Failure, ErrFunc(task.F)
|
||||||
}
|
}
|
||||||
@@ -104,7 +104,7 @@ func (localOptimizer) checkStartingLocation(task GlobalTask) (Status, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// finish completes the channel operations to finish an optimization.
|
// finish completes the channel operations to finish an optimization.
|
||||||
func (localOptimizer) finish(operation chan<- GlobalTask, result <-chan GlobalTask) {
|
func (localOptimizer) finish(operation chan<- Task, result <-chan Task) {
|
||||||
// Guarantee that result is closed before operation is closed.
|
// Guarantee that result is closed before operation is closed.
|
||||||
for range result {
|
for range result {
|
||||||
}
|
}
|
||||||
@@ -112,7 +112,7 @@ func (localOptimizer) finish(operation chan<- GlobalTask, result <-chan GlobalTa
|
|||||||
|
|
||||||
// finishMethodDone sends a MethodDone signal on operation, reads the result,
|
// finishMethodDone sends a MethodDone signal on operation, reads the result,
|
||||||
// and completes the channel operations to finish an optimization.
|
// and completes the channel operations to finish an optimization.
|
||||||
func (l localOptimizer) finishMethodDone(operation chan<- GlobalTask, result <-chan GlobalTask, task GlobalTask) {
|
func (l localOptimizer) finishMethodDone(operation chan<- Task, result <-chan Task, task Task) {
|
||||||
task.Op = MethodDone
|
task.Op = MethodDone
|
||||||
operation <- task
|
operation <- task
|
||||||
task = <-result
|
task = <-result
|
||||||
|
@@ -89,14 +89,14 @@ func (n *NelderMead) Status() (Status, error) {
|
|||||||
return n.status, n.err
|
return n.status, n.err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *NelderMead) InitGlobal(dim, tasks int) int {
|
func (n *NelderMead) Init(dim, tasks int) int {
|
||||||
n.status = NotTerminated
|
n.status = NotTerminated
|
||||||
n.err = nil
|
n.err = nil
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *NelderMead) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
func (n *NelderMead) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
|
||||||
n.status, n.err = localOptimizer{}.runGlobal(n, operation, result, tasks)
|
n.status, n.err = localOptimizer{}.run(n, operation, result, tasks)
|
||||||
close(operation)
|
close(operation)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@@ -60,14 +60,14 @@ func (n *Newton) Status() (Status, error) {
|
|||||||
return n.status, n.err
|
return n.status, n.err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *Newton) InitGlobal(dim, tasks int) int {
|
func (n *Newton) Init(dim, tasks int) int {
|
||||||
n.status = NotTerminated
|
n.status = NotTerminated
|
||||||
n.err = nil
|
n.err = nil
|
||||||
return 1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *Newton) RunGlobal(operation chan<- GlobalTask, result <-chan GlobalTask, tasks []GlobalTask) {
|
func (n *Newton) Run(operation chan<- Task, result <-chan Task, tasks []Task) {
|
||||||
n.status, n.err = localOptimizer{}.runGlobal(n, operation, result, tasks)
|
n.status, n.err = localOptimizer{}.run(n, operation, result, tasks)
|
||||||
close(operation)
|
close(operation)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@@ -1154,7 +1154,7 @@ func TestNewton(t *testing.T) {
|
|||||||
testLocal(t, newtonTests, &Newton{})
|
testLocal(t, newtonTests, &Newton{})
|
||||||
}
|
}
|
||||||
|
|
||||||
func testLocal(t *testing.T, tests []unconstrainedTest, method GlobalMethod) {
|
func testLocal(t *testing.T, tests []unconstrainedTest, method Method) {
|
||||||
for cas, test := range tests {
|
for cas, test := range tests {
|
||||||
if test.long && testing.Short() {
|
if test.long && testing.Short() {
|
||||||
continue
|
continue
|
||||||
|
Reference in New Issue
Block a user