optimize: Change initialization, remove Needser, and update Problem f… (#779)

* optimize: Change initialization, remove Needser, and update Problem function calls

We need a better way to express the Hessian function call so that sparse Hessians can be provided. This change updates the Problem function definitions to allow an arbitrary Symmetric matrix. With this change, we need to change how Location is used, so that we do not allocate a SymDense. Once this location is changed, we no longer need Needser to allocate the appropriate memory, and can shift that to initialization, further simplifying the interfaces.

A 'fake' Problem is passed to Method to continue to make it impossible for the Method to call the functions directly.

Fixes #727, #593.
This commit is contained in:
Brendan Tracey
2019-02-01 15:26:26 +00:00
committed by GitHub
parent 199b7405a3
commit c07f678f3f
20 changed files with 427 additions and 192 deletions

View File

@@ -36,10 +36,13 @@ func (Beale) Func(x []float64) float64 {
return f1*f1 + f2*f2 + f3*f3
}
func (Beale) Grad(grad, x []float64) {
func (Beale) Grad(grad, x []float64) []float64 {
if len(x) != 2 {
panic("dimension of the problem must be 2")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -54,12 +57,16 @@ func (Beale) Grad(grad, x []float64) {
grad[0] = -2 * (f1*t1 + f2*t2 + f3*t3)
grad[1] = 2 * x[0] * (f1 + 2*f2*x[1] + 3*f3*x[1]*x[1])
return grad
}
func (Beale) Hess(hess mat.MutableSymmetric, x []float64) {
func (Beale) Hess(hess mat.Symmetric, x []float64) mat.Symmetric {
if len(x) != 2 {
panic("dimension of the problem must be 2")
}
if hess == nil {
hess = mat.NewSymDense(len(x), nil)
}
if len(x) != hess.Symmetric() {
panic("incorrect size of the Hessian")
}
@@ -74,9 +81,11 @@ func (Beale) Hess(hess mat.MutableSymmetric, x []float64) {
h00 := 2 * (t1*t1 + t2*t2 + t3*t3)
h01 := 2 * (f1 + x[1]*(2*f2+3*x[1]*f3) - x[0]*(t1+x[1]*(2*t2+3*x[1]*t3)))
h11 := 2 * x[0] * (x[0] + 2*f2 + x[1]*(6*f3+x[0]*x[1]*(4+9*x[1]*x[1])))
hess.SetSym(0, 0, h00)
hess.SetSym(0, 1, h01)
hess.SetSym(1, 1, h11)
h := hess.(*mat.SymDense)
h.SetSym(0, 0, h00)
h.SetSym(0, 1, h01)
h.SetSym(1, 1, h11)
return h
}
func (Beale) Minima() []Minimum {
@@ -113,10 +122,13 @@ func (BiggsEXP2) Func(x []float64) (sum float64) {
return sum
}
func (BiggsEXP2) Grad(grad, x []float64) {
func (BiggsEXP2) Grad(grad, x []float64) []float64 {
if len(x) != 2 {
panic("dimension of the problem must be 2")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -135,6 +147,7 @@ func (BiggsEXP2) Grad(grad, x []float64) {
grad[0] += 2 * f * dfdx0
grad[1] += 2 * f * dfdx1
}
return grad
}
func (BiggsEXP2) Minima() []Minimum {
@@ -171,10 +184,13 @@ func (BiggsEXP3) Func(x []float64) (sum float64) {
return sum
}
func (BiggsEXP3) Grad(grad, x []float64) {
func (BiggsEXP3) Grad(grad, x []float64) []float64 {
if len(x) != 3 {
panic("dimension of the problem must be 3")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -195,6 +211,7 @@ func (BiggsEXP3) Grad(grad, x []float64) {
grad[1] += 2 * f * dfdx1
grad[2] += 2 * f * dfdx2
}
return grad
}
func (BiggsEXP3) Minima() []Minimum {
@@ -231,10 +248,13 @@ func (BiggsEXP4) Func(x []float64) (sum float64) {
return sum
}
func (BiggsEXP4) Grad(grad, x []float64) {
func (BiggsEXP4) Grad(grad, x []float64) []float64 {
if len(x) != 4 {
panic("dimension of the problem must be 4")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -257,6 +277,7 @@ func (BiggsEXP4) Grad(grad, x []float64) {
grad[2] += 2 * f * dfdx2
grad[3] += 2 * f * dfdx3
}
return grad
}
func (BiggsEXP4) Minima() []Minimum {
@@ -293,10 +314,13 @@ func (BiggsEXP5) Func(x []float64) (sum float64) {
return sum
}
func (BiggsEXP5) Grad(grad, x []float64) {
func (BiggsEXP5) Grad(grad, x []float64) []float64 {
if len(x) != 5 {
panic("dimension of the problem must be 5")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -321,6 +345,7 @@ func (BiggsEXP5) Grad(grad, x []float64) {
grad[3] += 2 * f * dfdx3
grad[4] += 2 * f * dfdx4
}
return grad
}
func (BiggsEXP5) Minima() []Minimum {
@@ -360,10 +385,13 @@ func (BiggsEXP6) Func(x []float64) (sum float64) {
return sum
}
func (BiggsEXP6) Grad(grad, x []float64) {
func (BiggsEXP6) Grad(grad, x []float64) []float64 {
if len(x) != 6 {
panic("dimension of the problem must be 6")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -390,6 +418,7 @@ func (BiggsEXP6) Grad(grad, x []float64) {
grad[4] += 2 * f * dfdx4
grad[5] += 2 * f * dfdx5
}
return grad
}
func (BiggsEXP6) Minima() []Minimum {
@@ -440,10 +469,13 @@ func (Box3D) Func(x []float64) (sum float64) {
return sum
}
func (Box3D) Grad(grad, x []float64) {
func (Box3D) Grad(grad, x []float64) []float64 {
if len(x) != 3 {
panic("dimension of the problem must be 3")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -459,6 +491,7 @@ func (Box3D) Grad(grad, x []float64) {
grad[1] += -2 * f * c * math.Exp(c*x[1])
grad[2] += -2 * f * y
}
return grad
}
func (Box3D) Minima() []Minimum {
@@ -543,10 +576,13 @@ func (BrownBadlyScaled) Func(x []float64) float64 {
return f1*f1 + f2*f2 + f3*f3
}
func (BrownBadlyScaled) Grad(grad, x []float64) {
func (BrownBadlyScaled) Grad(grad, x []float64) []float64 {
if len(x) != 2 {
panic("dimension of the problem must be 2")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -556,12 +592,16 @@ func (BrownBadlyScaled) Grad(grad, x []float64) {
f3 := x[0]*x[1] - 2
grad[0] = 2*f1 + 2*f3*x[1]
grad[1] = 2*f2 + 2*f3*x[0]
return grad
}
func (BrownBadlyScaled) Hess(hess mat.MutableSymmetric, x []float64) {
func (BrownBadlyScaled) Hess(hess mat.Symmetric, x []float64) mat.Symmetric {
if len(x) != 2 {
panic("dimension of the problem must be 2")
}
if hess == nil {
hess = mat.NewSymDense(len(x), nil)
}
if len(x) != hess.Symmetric() {
panic("incorrect size of the Hessian")
}
@@ -569,9 +609,11 @@ func (BrownBadlyScaled) Hess(hess mat.MutableSymmetric, x []float64) {
h00 := 2 + 2*x[1]*x[1]
h01 := 4*x[0]*x[1] - 4
h11 := 2 + 2*x[0]*x[0]
hess.SetSym(0, 0, h00)
hess.SetSym(0, 1, h01)
hess.SetSym(1, 1, h11)
h := hess.(*mat.SymDense)
h.SetSym(0, 0, h00)
h.SetSym(0, 1, h01)
h.SetSym(1, 1, h11)
return h
}
func (BrownBadlyScaled) Minima() []Minimum {
@@ -612,10 +654,13 @@ func (BrownAndDennis) Func(x []float64) (sum float64) {
return sum
}
func (BrownAndDennis) Grad(grad, x []float64) {
func (BrownAndDennis) Grad(grad, x []float64) []float64 {
if len(x) != 4 {
panic("dimension of the problem must be 4")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -633,19 +678,24 @@ func (BrownAndDennis) Grad(grad, x []float64) {
grad[2] += 4 * f * f2
grad[3] += 4 * f * f2 * math.Sin(c)
}
return grad
}
func (BrownAndDennis) Hess(hess mat.MutableSymmetric, x []float64) {
func (BrownAndDennis) Hess(hess mat.Symmetric, x []float64) mat.Symmetric {
if len(x) != 4 {
panic("dimension of the problem must be 4")
}
if hess == nil {
hess = mat.NewSymDense(len(x), nil)
}
if len(x) != hess.Symmetric() {
panic("incorrect size of the Hessian")
}
h := hess.(*mat.SymDense)
for i := 0; i < 4; i++ {
for j := i; j < 4; j++ {
hess.SetSym(i, j, 0)
h.SetSym(i, j, 0)
}
}
for i := 1; i <= 20; i++ {
@@ -657,22 +707,23 @@ func (BrownAndDennis) Hess(hess mat.MutableSymmetric, x []float64) {
s3 := 2 * t1 * t2
r1 := t + 2*t1*t1
r2 := t + 2*t2*t2
hess.SetSym(0, 0, hess.At(0, 0)+r1)
hess.SetSym(0, 1, hess.At(0, 1)+d1*r1)
hess.SetSym(1, 1, hess.At(1, 1)+d1*d1*r1)
hess.SetSym(0, 2, hess.At(0, 2)+s3)
hess.SetSym(1, 2, hess.At(1, 2)+d1*s3)
hess.SetSym(2, 2, hess.At(2, 2)+r2)
hess.SetSym(0, 3, hess.At(0, 3)+d2*s3)
hess.SetSym(1, 3, hess.At(1, 3)+d1*d2*s3)
hess.SetSym(2, 3, hess.At(2, 3)+d2*r2)
hess.SetSym(3, 3, hess.At(3, 3)+d2*d2*r2)
h.SetSym(0, 0, h.At(0, 0)+r1)
h.SetSym(0, 1, h.At(0, 1)+d1*r1)
h.SetSym(1, 1, h.At(1, 1)+d1*d1*r1)
h.SetSym(0, 2, h.At(0, 2)+s3)
h.SetSym(1, 2, h.At(1, 2)+d1*s3)
h.SetSym(2, 2, h.At(2, 2)+r2)
h.SetSym(0, 3, h.At(0, 3)+d2*s3)
h.SetSym(1, 3, h.At(1, 3)+d1*d2*s3)
h.SetSym(2, 3, h.At(2, 3)+d2*r2)
h.SetSym(3, 3, h.At(3, 3)+d2*d2*r2)
}
for i := 0; i < 4; i++ {
for j := i; j < 4; j++ {
hess.SetSym(i, j, 4*hess.At(i, j))
h.SetSym(i, j, 4*h.At(i, j))
}
}
return h
}
func (BrownAndDennis) Minima() []Minimum {
@@ -716,10 +767,13 @@ func (ExtendedPowellSingular) Func(x []float64) (sum float64) {
return sum
}
func (ExtendedPowellSingular) Grad(grad, x []float64) {
func (ExtendedPowellSingular) Grad(grad, x []float64) []float64 {
if len(x)%4 != 0 {
panic("dimension of the problem must be a multiple of 4")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -736,6 +790,7 @@ func (ExtendedPowellSingular) Grad(grad, x []float64) {
grad[i+2] = 10*f2 - 8*f3*t1
grad[i+3] = -10*f2 - 40*f4*t2
}
return grad
}
func (ExtendedPowellSingular) Minima() []Minimum {
@@ -780,7 +835,10 @@ func (ExtendedRosenbrock) Func(x []float64) (sum float64) {
return sum
}
func (ExtendedRosenbrock) Grad(grad, x []float64) {
func (ExtendedRosenbrock) Grad(grad, x []float64) []float64 {
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -796,6 +854,7 @@ func (ExtendedRosenbrock) Grad(grad, x []float64) {
for i := 1; i < dim; i++ {
grad[i] += 200 * (x[i] - x[i-1]*x[i-1])
}
return grad
}
func (ExtendedRosenbrock) Minima() []Minimum {
@@ -902,10 +961,13 @@ func (g Gaussian) Func(x []float64) (sum float64) {
return sum
}
func (g Gaussian) Grad(grad, x []float64) {
func (g Gaussian) Grad(grad, x []float64) []float64 {
if len(x) != 3 {
panic("dimension of the problem must be 3")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -923,6 +985,7 @@ func (g Gaussian) Grad(grad, x []float64) {
grad[1] -= f * e * d * x[0]
grad[2] += 2 * f * e * x[0] * x[1] * b
}
return grad
}
func (Gaussian) Minima() []Minimum {
@@ -964,10 +1027,13 @@ func (GulfResearchAndDevelopment) Func(x []float64) (sum float64) {
return sum
}
func (GulfResearchAndDevelopment) Grad(grad, x []float64) {
func (GulfResearchAndDevelopment) Grad(grad, x []float64) []float64 {
if len(x) != 3 {
panic("dimension of the problem must be 3")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -989,6 +1055,7 @@ func (GulfResearchAndDevelopment) Grad(grad, x []float64) {
grad[0] *= 2 / x[0]
grad[1] *= 2 * x[2]
grad[2] *= 2
return grad
}
func (GulfResearchAndDevelopment) Minima() []Minimum {
@@ -1042,10 +1109,13 @@ func (HelicalValley) Func(x []float64) float64 {
return f1*f1 + f2*f2 + f3*f3
}
func (HelicalValley) Grad(grad, x []float64) {
func (HelicalValley) Grad(grad, x []float64) []float64 {
if len(x) != 3 {
panic("dimension of the problem must be 3")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1064,6 +1134,7 @@ func (HelicalValley) Grad(grad, x []float64) {
grad[0] = 200 * (5*s*q*x[1] + (h-1)*r*x[0])
grad[1] = 200 * (-5*s*q*x[0] + (h-1)*r*x[1])
grad[2] = 2 * (100*s + x[2])
return grad
}
func (HelicalValley) Minima() []Minimum {
@@ -1083,7 +1154,7 @@ func (Linear) Func(x []float64) float64 {
return floats.Sum(x)
}
func (Linear) Grad(grad, x []float64) {
func (Linear) Grad(grad, x []float64) []float64 {
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1091,6 +1162,7 @@ func (Linear) Grad(grad, x []float64) {
for i := range grad {
grad[i] = 1
}
return grad
}
// PenaltyI implements the first penalty function by Gill, Murray and Pitfield.
@@ -1120,7 +1192,10 @@ func (PenaltyI) Func(x []float64) (sum float64) {
return sum
}
func (PenaltyI) Grad(grad, x []float64) {
func (PenaltyI) Grad(grad, x []float64) []float64 {
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1132,6 +1207,7 @@ func (PenaltyI) Grad(grad, x []float64) {
for i, v := range x {
grad[i] = 2 * (2*s*v + 1e-5*(v-1))
}
return grad
}
func (PenaltyI) Minima() []Minimum {
@@ -1185,7 +1261,10 @@ func (PenaltyII) Func(x []float64) (sum float64) {
return sum
}
func (PenaltyII) Grad(grad, x []float64) {
func (PenaltyII) Grad(grad, x []float64) []float64 {
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1209,6 +1288,7 @@ func (PenaltyII) Grad(grad, x []float64) {
grad[i] += 1e-5 * f * math.Exp(x[i]/10) / 5
}
grad[0] += 2 * (x[0] - 0.2)
return grad
}
func (PenaltyII) Minima() []Minimum {
@@ -1254,10 +1334,13 @@ func (PowellBadlyScaled) Func(x []float64) float64 {
return f1*f1 + f2*f2
}
func (PowellBadlyScaled) Grad(grad, x []float64) {
func (PowellBadlyScaled) Grad(grad, x []float64) []float64 {
if len(x) != 2 {
panic("dimension of the problem must be 2")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1266,12 +1349,16 @@ func (PowellBadlyScaled) Grad(grad, x []float64) {
f2 := math.Exp(-x[0]) + math.Exp(-x[1]) - 1.0001
grad[0] = 2 * (1e4*f1*x[1] - f2*math.Exp(-x[0]))
grad[1] = 2 * (1e4*f1*x[0] - f2*math.Exp(-x[1]))
return grad
}
func (PowellBadlyScaled) Hess(hess mat.MutableSymmetric, x []float64) {
func (PowellBadlyScaled) Hess(hess mat.Symmetric, x []float64) mat.Symmetric {
if len(x) != 2 {
panic("dimension of the problem must be 2")
}
if hess == nil {
hess = mat.NewSymDense(len(x), nil)
}
if len(x) != hess.Symmetric() {
panic("incorrect size of the Hessian")
}
@@ -1281,12 +1368,14 @@ func (PowellBadlyScaled) Hess(hess mat.MutableSymmetric, x []float64) {
s2 := math.Exp(-x[1])
t2 := s1 + s2 - 1.0001
h := hess.(*mat.SymDense)
h00 := 2 * (1e8*x[1]*x[1] + s1*(s1+t2))
h01 := 2 * (1e4*(1+2*t1) + s1*s2)
h11 := 2 * (1e8*x[0]*x[0] + s2*(s2+t2))
hess.SetSym(0, 0, h00)
hess.SetSym(0, 1, h01)
hess.SetSym(1, 1, h11)
h.SetSym(0, 0, h00)
h.SetSym(0, 1, h01)
h.SetSym(1, 1, h11)
return h
}
func (PowellBadlyScaled) Minima() []Minimum {
@@ -1324,7 +1413,10 @@ func (Trigonometric) Func(x []float64) (sum float64) {
return sum
}
func (Trigonometric) Grad(grad, x []float64) {
func (Trigonometric) Grad(grad, x []float64) []float64 {
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1344,6 +1436,7 @@ func (Trigonometric) Grad(grad, x []float64) {
for i, v := range x {
grad[i] += 2 * s2 * math.Sin(v)
}
return grad
}
func (Trigonometric) Minima() []Minimum {
@@ -1396,7 +1489,10 @@ func (VariablyDimensioned) Func(x []float64) (sum float64) {
return sum
}
func (VariablyDimensioned) Grad(grad, x []float64) {
func (VariablyDimensioned) Grad(grad, x []float64) []float64 {
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1408,6 +1504,7 @@ func (VariablyDimensioned) Grad(grad, x []float64) {
for i, v := range x {
grad[i] = 2 * (v - 1 + s*float64(i+1)*(1+2*s*s))
}
return grad
}
func (VariablyDimensioned) Minima() []Minimum {
@@ -1480,7 +1577,10 @@ func (Watson) Func(x []float64) (sum float64) {
return sum
}
func (Watson) Grad(grad, x []float64) {
func (Watson) Grad(grad, x []float64) []float64 {
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1516,17 +1616,21 @@ func (Watson) Grad(grad, x []float64) {
t := x[1] - x[0]*x[0] - 1
grad[0] += x[0] * (2 - 4*t)
grad[1] += 2 * t
return grad
}
func (Watson) Hess(hess mat.MutableSymmetric, x []float64) {
func (Watson) Hess(hess mat.Symmetric, x []float64) mat.Symmetric {
dim := len(x)
if hess == nil {
hess = mat.NewSymDense(len(x), nil)
}
if dim != hess.Symmetric() {
panic("incorrect size of the Hessian")
}
h := hess.(*mat.SymDense)
for j := 0; j < dim; j++ {
for k := j; k < dim; k++ {
hess.SetSym(j, k, 0)
h.SetSym(j, k, 0)
}
}
for i := 1; i <= 29; i++ {
@@ -1553,16 +1657,17 @@ func (Watson) Hess(hess mat.MutableSymmetric, x []float64) {
v := float64(j) - s3
d3 := 1 / d1
for k := 0; k <= j; k++ {
hess.SetSym(k, j, hess.At(k, j)+d2*d3*(v*(float64(k)-s3)-th))
h.SetSym(k, j, h.At(k, j)+d2*d3*(v*(float64(k)-s3)-th))
d3 *= d1
}
d2 *= d1
}
}
t1 := x[1] - x[0]*x[0] - 1
hess.SetSym(0, 0, hess.At(0, 0)+8*x[0]*x[0]+2-4*t1)
hess.SetSym(0, 1, hess.At(0, 1)-4*x[0])
hess.SetSym(1, 1, hess.At(1, 1)+2)
h.SetSym(0, 0, h.At(0, 0)+8*x[0]*x[0]+2-4*t1)
h.SetSym(0, 1, h.At(0, 1)-4*x[0])
h.SetSym(1, 1, h.At(1, 1)+2)
return h
}
func (Watson) Minima() []Minimum {
@@ -1618,10 +1723,13 @@ func (Wood) Func(x []float64) (sum float64) {
return 100*f1*f1 + f2*f2 + 90*f3*f3 + f4*f4 + 10*f5*f5 + 0.1*f6*f6
}
func (Wood) Grad(grad, x []float64) {
func (Wood) Grad(grad, x []float64) []float64 {
if len(x) != 4 {
panic("dimension of the problem must be 4")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1636,26 +1744,32 @@ func (Wood) Grad(grad, x []float64) {
grad[1] = 2 * (100*f1 + 10*f5 + 0.1*f6)
grad[2] = -2 * (180*f3*x[2] + f4)
grad[3] = 2 * (90*f3 + 10*f5 - 0.1*f6)
return grad
}
func (Wood) Hess(hess mat.MutableSymmetric, x []float64) {
func (Wood) Hess(hess mat.Symmetric, x []float64) mat.Symmetric {
if len(x) != 4 {
panic("dimension of the problem must be 4")
}
if hess == nil {
hess = mat.NewSymDense(len(x), nil)
}
if len(x) != hess.Symmetric() {
panic("incorrect size of the Hessian")
}
h := hess.(*mat.SymDense)
hess.SetSym(0, 0, 400*(3*x[0]*x[0]-x[1])+2)
hess.SetSym(0, 1, -400*x[0])
hess.SetSym(1, 1, 220.2)
hess.SetSym(0, 2, 0)
hess.SetSym(1, 2, 0)
hess.SetSym(2, 2, 360*(3*x[2]*x[2]-x[3])+2)
hess.SetSym(0, 3, 0)
hess.SetSym(1, 3, 19.8)
hess.SetSym(2, 3, -360*x[2])
hess.SetSym(3, 3, 200.2)
h.SetSym(0, 0, 400*(3*x[0]*x[0]-x[1])+2)
h.SetSym(0, 1, -400*x[0])
h.SetSym(1, 1, 220.2)
h.SetSym(0, 2, 0)
h.SetSym(1, 2, 0)
h.SetSym(2, 2, 360*(3*x[2]*x[2]-x[3])+2)
h.SetSym(0, 3, 0)
h.SetSym(1, 3, 19.8)
h.SetSym(2, 3, -360*x[2])
h.SetSym(3, 3, 200.2)
return h
}
func (Wood) Minima() []Minimum {
@@ -1683,7 +1797,7 @@ func (ConcaveRight) Func(x []float64) float64 {
return -x[0] / (x[0]*x[0] + 2)
}
func (ConcaveRight) Grad(grad, x []float64) {
func (ConcaveRight) Grad(grad, x []float64) []float64 {
if len(x) != 1 {
panic("dimension of the problem must be 1")
}
@@ -1692,6 +1806,7 @@ func (ConcaveRight) Grad(grad, x []float64) {
}
xSqr := x[0] * x[0]
grad[0] = (xSqr - 2) / (xSqr + 2) / (xSqr + 2)
return grad
}
// ConcaveLeft implements an univariate function that is concave to the left of
@@ -1709,14 +1824,18 @@ func (ConcaveLeft) Func(x []float64) float64 {
return math.Pow(x[0]+0.004, 4) * (x[0] - 1.996)
}
func (ConcaveLeft) Grad(grad, x []float64) {
func (ConcaveLeft) Grad(grad, x []float64) []float64 {
if len(x) != 1 {
panic("dimension of the problem must be 1")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
grad[0] = math.Pow(x[0]+0.004, 3) * (5*x[0] - 7.98)
return grad
}
// Plassmann implements an univariate oscillatory function where the value of L
@@ -1753,10 +1872,13 @@ func (f Plassmann) Func(x []float64) float64 {
return r
}
func (f Plassmann) Grad(grad, x []float64) {
func (f Plassmann) Grad(grad, x []float64) []float64 {
if len(x) != 1 {
panic("dimension of the problem must be 1")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1772,6 +1894,7 @@ func (f Plassmann) Grad(grad, x []float64) {
default: // a > 1+b
grad[0]++
}
return grad
}
// YanaiOzawaKaneko is an univariate convex function where the values of Beta1
@@ -1802,10 +1925,13 @@ func (f YanaiOzawaKaneko) Func(x []float64) float64 {
return g1*math.Sqrt((a-1)*(a-1)+b2*b2) + g2*math.Sqrt(a*a+b1*b1)
}
func (f YanaiOzawaKaneko) Grad(grad, x []float64) {
func (f YanaiOzawaKaneko) Grad(grad, x []float64) []float64 {
if len(x) != 1 {
panic("dimension of the problem must be 1")
}
if grad == nil {
grad = make([]float64, len(x))
}
if len(x) != len(grad) {
panic("incorrect size of the gradient")
}
@@ -1815,4 +1941,5 @@ func (f YanaiOzawaKaneko) Grad(grad, x []float64) {
g1 := math.Sqrt(1+b1*b1) - b1
g2 := math.Sqrt(1+b2*b2) - b2
grad[0] = g1*(a-1)/math.Sqrt(b2*b2+(a-1)*(a-1)) + g2*a/math.Sqrt(b1*b1+a*a)
return grad
}