From 2f77175663e6da5841869d7295eca2c1cdb9349f Mon Sep 17 00:00:00 2001 From: btoutee Date: Thu, 14 Apr 2022 20:18:21 -0400 Subject: [PATCH 1/3] Added Gaussian problem from Testint Unconstrained Optimization Software, done for the course MTH8408 at Polytechnique --- src/ADNLPProblems/gaussian.jl | 14 ++++++++++++++ src/Meta/gaussian.jl | 25 +++++++++++++++++++++++++ src/PureJuMP/gaussian.jl | 34 ++++++++++++++++++++++++++++++++++ 3 files changed, 73 insertions(+) create mode 100644 src/ADNLPProblems/gaussian.jl create mode 100644 src/Meta/gaussian.jl create mode 100644 src/PureJuMP/gaussian.jl diff --git a/src/ADNLPProblems/gaussian.jl b/src/ADNLPProblems/gaussian.jl new file mode 100644 index 00000000..2c8b95f5 --- /dev/null +++ b/src/ADNLPProblems/gaussian.jl @@ -0,0 +1,14 @@ +export gaussian + +function gaussian(; n::Int = default_nvar, type::Val{T} = Val(Float64), kwargs...) where {T} + function f(x) + n = 3 + m = 15 + t(i)=(8-i)/2 + y = [0.0009, 0.0044, 0.0175, 0.054, 0.1295, 0.2420, 0.3521, 0.3989, 0.3521, 0.2420, 0.1295, 0.054, 0.0175, 0.0044, 0.0009] + + return sum((x[1]*exp((-x[2]*(t(i)-x[3]^2))/2)-y[i])^2 for i = 1:m) + end + x0 = [0.4, 1, 0] + return ADNLPModels.ADNLPModel(f, x0, name = "gaussian"; kwargs...) +end diff --git a/src/Meta/gaussian.jl b/src/Meta/gaussian.jl new file mode 100644 index 00000000..d5250333 --- /dev/null +++ b/src/Meta/gaussian.jl @@ -0,0 +1,25 @@ +gaussian_meta = Dict( + :nvar => 3, + :variable_nvar => false, + :ncon => 0, + :variable_ncon => false, + :minimize => true, + :name => "gaussian", + :has_equalities_only => false, + :has_inequalities_only => false, + :has_bounds => false, + :has_fixed_variables => false, + :objtype => :other, + :contype => :unconstrained, + :best_known_lower_bound => -Inf, + :best_known_upper_bound => 500.0, + :is_feasible => true, + :defined_everywhere => missing, + :origin => :unknown, +) +get_gaussian_nvar(; n::Integer = default_nvar, kwargs...) = 1 * n + 0 +get_gaussian_ncon(; n::Integer = default_nvar, kwargs...) = 0 +get_gaussian_nlin(; n::Integer = default_nvar, kwargs...) = 0 +get_gaussian_nnln(; n::Integer = default_nvar, kwargs...) = 0 +get_gaussian_nequ(; n::Integer = default_nvar, kwargs...) = 0 +get_gaussian_nineq(; n::Integer = default_nvar, kwargs...) = 0 diff --git a/src/PureJuMP/gaussian.jl b/src/PureJuMP/gaussian.jl new file mode 100644 index 00000000..68e1d0bd --- /dev/null +++ b/src/PureJuMP/gaussian.jl @@ -0,0 +1,34 @@ +# Linear function - full rank +# +# Source: problem 9 in +# J.J. More', B.S. Garbow and K.E. Hillstrom, +# "Testing Unconstrained Optimization Software", +# ACM Transactions on Mathematical Software, vol. 7(1), pp. 17-41, 1981. +# +# See also Buckley#80 (with different N and M) +# +# classification SUR2-AN-V-0 + +export gaussian + +"Linear function with `n` parameters and `m` observations - full rank" +function gaussian(args...; n::Int = default_nvar, m::Int = 2n, kwargs...) + n = 3 + m = 15 + + + t(i)=(8-i)/2 + y = [0.0009, 0.0044, 0.0175, 0.054, 0.1295, 0.2420, 0.3521, 0.3989, 0.3521, 0.2420, 0.1295, 0.054, 0.0175, 0.0044, 0.0009] + + nlp = Model() + + @variable(nlp, x[j = 1:n], start = [0.4, 1, 0]) + + @NLobjective( + nlp, + Min, + sum((x[1]*exp((-x[2]*(t(i)-x[3]^2))/2)-y[i])^2 for i = 1:m) + ) + + return nlp +end From 975474100a184b69710f72ed52eed55f8646e2f8 Mon Sep 17 00:00:00 2001 From: btoutee Date: Thu, 14 Apr 2022 20:34:42 -0400 Subject: [PATCH 2/3] Added Gaussian problem from Testing Unconstrained Optimization Software #15, done for the course MTH8408 at Polytechnique --- src/ADNLPProblems/kowalikosborne.jl | 14 ++++++++++++++ src/Meta/kowalikosborne.jl | 25 +++++++++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 src/ADNLPProblems/kowalikosborne.jl create mode 100644 src/Meta/kowalikosborne.jl diff --git a/src/ADNLPProblems/kowalikosborne.jl b/src/ADNLPProblems/kowalikosborne.jl new file mode 100644 index 00000000..37bc5e75 --- /dev/null +++ b/src/ADNLPProblems/kowalikosborne.jl @@ -0,0 +1,14 @@ +export kowalikosborne + +function kowalikosborne(; n::Int = default_nvar, type::Val{T} = Val(Float64), kwargs...) where {T} + function f(x) + n = 4 + m = 11 + y = [0.1957, 0.1947, 0.1735, 0.1600, 0.0844, 0.0627, 0.0456, 0.0342, 0.0323, 0.0235, 0.0246] + u = [4, 2, 1, 0.5, 0.25, 0.167, 0.125, 0.1, 0.833, 0.0714, 0.0625] + + return sum((y[i] - (x[1]*(u[i]^2 + u[i]*x[2]))/(u[i]^2 + u[i]*x[3] + x[4]))^2 for i = 1:m) + end + x0 = [0.25, 0.39, 0.415, 0.39] + return ADNLPModels.ADNLPModel(f, x0, name = "kowalikosborne"; kwargs...) +end diff --git a/src/Meta/kowalikosborne.jl b/src/Meta/kowalikosborne.jl new file mode 100644 index 00000000..b66eb5c4 --- /dev/null +++ b/src/Meta/kowalikosborne.jl @@ -0,0 +1,25 @@ +kowalikosborne_meta = Dict( + :nvar => 4, + :variable_nvar => false, + :ncon => 0, + :variable_ncon => false, + :minimize => true, + :name => "kowalikosborne", + :has_equalities_only => false, + :has_inequalities_only => false, + :has_bounds => false, + :has_fixed_variables => false, + :objtype => :other, + :contype => :unconstrained, + :best_known_lower_bound => -Inf, + :best_known_upper_bound => 500.0, + :is_feasible => true, + :defined_everywhere => missing, + :origin => :unknown, +) +get_kowalikosborne_nvar(; n::Integer = default_nvar, kwargs...) = 1 * n + 0 +get_kowalikosborne_ncon(; n::Integer = default_nvar, kwargs...) = 0 +get_kowalikosborne_nlin(; n::Integer = default_nvar, kwargs...) = 0 +get_kowalikosborne_nnln(; n::Integer = default_nvar, kwargs...) = 0 +get_kowalikosborne_nequ(; n::Integer = default_nvar, kwargs...) = 0 +get_kowalikosborne_nineq(; n::Integer = default_nvar, kwargs...) = 0 From 83b7453718e2861dd2f59f009492a71b5057a57e Mon Sep 17 00:00:00 2001 From: btoutee Date: Thu, 14 Apr 2022 20:43:36 -0400 Subject: [PATCH 3/3] Shortened name to kowosb, added meta and jump model and removed gaussian models that were not supposed to be part of this branch --- src/ADNLPProblems/gaussian.jl | 14 ------------ src/ADNLPProblems/kowalikosborne.jl | 14 ------------ src/ADNLPProblems/kowosb.jl | 14 ++++++++++++ src/Meta/gaussian.jl | 25 --------------------- src/Meta/kowalikosborne.jl | 25 --------------------- src/Meta/kowosb.jl | 25 +++++++++++++++++++++ src/PureJuMP/gaussian.jl | 34 ----------------------------- src/PureJuMP/kowosb.jl | 21 ++++++++++++++++++ 8 files changed, 60 insertions(+), 112 deletions(-) delete mode 100644 src/ADNLPProblems/gaussian.jl delete mode 100644 src/ADNLPProblems/kowalikosborne.jl create mode 100644 src/ADNLPProblems/kowosb.jl delete mode 100644 src/Meta/gaussian.jl delete mode 100644 src/Meta/kowalikosborne.jl create mode 100644 src/Meta/kowosb.jl delete mode 100644 src/PureJuMP/gaussian.jl create mode 100644 src/PureJuMP/kowosb.jl diff --git a/src/ADNLPProblems/gaussian.jl b/src/ADNLPProblems/gaussian.jl deleted file mode 100644 index 2c8b95f5..00000000 --- a/src/ADNLPProblems/gaussian.jl +++ /dev/null @@ -1,14 +0,0 @@ -export gaussian - -function gaussian(; n::Int = default_nvar, type::Val{T} = Val(Float64), kwargs...) where {T} - function f(x) - n = 3 - m = 15 - t(i)=(8-i)/2 - y = [0.0009, 0.0044, 0.0175, 0.054, 0.1295, 0.2420, 0.3521, 0.3989, 0.3521, 0.2420, 0.1295, 0.054, 0.0175, 0.0044, 0.0009] - - return sum((x[1]*exp((-x[2]*(t(i)-x[3]^2))/2)-y[i])^2 for i = 1:m) - end - x0 = [0.4, 1, 0] - return ADNLPModels.ADNLPModel(f, x0, name = "gaussian"; kwargs...) -end diff --git a/src/ADNLPProblems/kowalikosborne.jl b/src/ADNLPProblems/kowalikosborne.jl deleted file mode 100644 index 37bc5e75..00000000 --- a/src/ADNLPProblems/kowalikosborne.jl +++ /dev/null @@ -1,14 +0,0 @@ -export kowalikosborne - -function kowalikosborne(; n::Int = default_nvar, type::Val{T} = Val(Float64), kwargs...) where {T} - function f(x) - n = 4 - m = 11 - y = [0.1957, 0.1947, 0.1735, 0.1600, 0.0844, 0.0627, 0.0456, 0.0342, 0.0323, 0.0235, 0.0246] - u = [4, 2, 1, 0.5, 0.25, 0.167, 0.125, 0.1, 0.833, 0.0714, 0.0625] - - return sum((y[i] - (x[1]*(u[i]^2 + u[i]*x[2]))/(u[i]^2 + u[i]*x[3] + x[4]))^2 for i = 1:m) - end - x0 = [0.25, 0.39, 0.415, 0.39] - return ADNLPModels.ADNLPModel(f, x0, name = "kowalikosborne"; kwargs...) -end diff --git a/src/ADNLPProblems/kowosb.jl b/src/ADNLPProblems/kowosb.jl new file mode 100644 index 00000000..2d3d65a9 --- /dev/null +++ b/src/ADNLPProblems/kowosb.jl @@ -0,0 +1,14 @@ +export kowosb + +function kowosb(args...; n::Int = default_nvar, type::Val{T} = Val(Float64), kwargs...) where {T} + function f(x) + n = 4 + m = 11 + y = T[0.1957, 0.1947, 0.1735, 0.1600, 0.0844, 0.0627, 0.0456, 0.0342, 0.0323, 0.0235, 0.0246] + u = T[4, 2, 1, 0.5, 0.25, 0.167, 0.125, 0.1, 0.833, 0.0714, 0.0625] + + return sum((y[i] - (x[1]*(u[i]^2 + u[i]*x[2]))/(u[i]^2 + u[i]*x[3] + x[4]))^2 for i = 1:m) + end + x0 = T[0.25, 0.39, 0.415, 0.39] + return ADNLPModels.ADNLPModel(f, x0, name = "kowosb"; kwargs...) +end diff --git a/src/Meta/gaussian.jl b/src/Meta/gaussian.jl deleted file mode 100644 index d5250333..00000000 --- a/src/Meta/gaussian.jl +++ /dev/null @@ -1,25 +0,0 @@ -gaussian_meta = Dict( - :nvar => 3, - :variable_nvar => false, - :ncon => 0, - :variable_ncon => false, - :minimize => true, - :name => "gaussian", - :has_equalities_only => false, - :has_inequalities_only => false, - :has_bounds => false, - :has_fixed_variables => false, - :objtype => :other, - :contype => :unconstrained, - :best_known_lower_bound => -Inf, - :best_known_upper_bound => 500.0, - :is_feasible => true, - :defined_everywhere => missing, - :origin => :unknown, -) -get_gaussian_nvar(; n::Integer = default_nvar, kwargs...) = 1 * n + 0 -get_gaussian_ncon(; n::Integer = default_nvar, kwargs...) = 0 -get_gaussian_nlin(; n::Integer = default_nvar, kwargs...) = 0 -get_gaussian_nnln(; n::Integer = default_nvar, kwargs...) = 0 -get_gaussian_nequ(; n::Integer = default_nvar, kwargs...) = 0 -get_gaussian_nineq(; n::Integer = default_nvar, kwargs...) = 0 diff --git a/src/Meta/kowalikosborne.jl b/src/Meta/kowalikosborne.jl deleted file mode 100644 index b66eb5c4..00000000 --- a/src/Meta/kowalikosborne.jl +++ /dev/null @@ -1,25 +0,0 @@ -kowalikosborne_meta = Dict( - :nvar => 4, - :variable_nvar => false, - :ncon => 0, - :variable_ncon => false, - :minimize => true, - :name => "kowalikosborne", - :has_equalities_only => false, - :has_inequalities_only => false, - :has_bounds => false, - :has_fixed_variables => false, - :objtype => :other, - :contype => :unconstrained, - :best_known_lower_bound => -Inf, - :best_known_upper_bound => 500.0, - :is_feasible => true, - :defined_everywhere => missing, - :origin => :unknown, -) -get_kowalikosborne_nvar(; n::Integer = default_nvar, kwargs...) = 1 * n + 0 -get_kowalikosborne_ncon(; n::Integer = default_nvar, kwargs...) = 0 -get_kowalikosborne_nlin(; n::Integer = default_nvar, kwargs...) = 0 -get_kowalikosborne_nnln(; n::Integer = default_nvar, kwargs...) = 0 -get_kowalikosborne_nequ(; n::Integer = default_nvar, kwargs...) = 0 -get_kowalikosborne_nineq(; n::Integer = default_nvar, kwargs...) = 0 diff --git a/src/Meta/kowosb.jl b/src/Meta/kowosb.jl new file mode 100644 index 00000000..8c535650 --- /dev/null +++ b/src/Meta/kowosb.jl @@ -0,0 +1,25 @@ +kowosb_meta = Dict( + :nvar => 4, + :variable_nvar => false, + :ncon => 0, + :variable_ncon => false, + :minimize => true, + :name => "kowosb", + :has_equalities_only => false, + :has_inequalities_only => false, + :has_bounds => false, + :has_fixed_variables => false, + :objtype => :other, + :contype => :unconstrained, + :best_known_lower_bound => -Inf, + :best_known_upper_bound => 0.026497849149796696, + :is_feasible => true, + :defined_everywhere => missing, + :origin => :unknown, +) +get_kowosb_nvar(; n::Integer = default_nvar, kwargs...) = 4 +get_kowosb_ncon(; n::Integer = default_nvar, kwargs...) = 0 +get_kowosb_nlin(; n::Integer = default_nvar, kwargs...) = 0 +get_kowosb_nnln(; n::Integer = default_nvar, kwargs...) = 0 +get_kowosb_nequ(; n::Integer = default_nvar, kwargs...) = 0 +get_kowosb_nineq(; n::Integer = default_nvar, kwargs...) = 0 diff --git a/src/PureJuMP/gaussian.jl b/src/PureJuMP/gaussian.jl deleted file mode 100644 index 68e1d0bd..00000000 --- a/src/PureJuMP/gaussian.jl +++ /dev/null @@ -1,34 +0,0 @@ -# Linear function - full rank -# -# Source: problem 9 in -# J.J. More', B.S. Garbow and K.E. Hillstrom, -# "Testing Unconstrained Optimization Software", -# ACM Transactions on Mathematical Software, vol. 7(1), pp. 17-41, 1981. -# -# See also Buckley#80 (with different N and M) -# -# classification SUR2-AN-V-0 - -export gaussian - -"Linear function with `n` parameters and `m` observations - full rank" -function gaussian(args...; n::Int = default_nvar, m::Int = 2n, kwargs...) - n = 3 - m = 15 - - - t(i)=(8-i)/2 - y = [0.0009, 0.0044, 0.0175, 0.054, 0.1295, 0.2420, 0.3521, 0.3989, 0.3521, 0.2420, 0.1295, 0.054, 0.0175, 0.0044, 0.0009] - - nlp = Model() - - @variable(nlp, x[j = 1:n], start = [0.4, 1, 0]) - - @NLobjective( - nlp, - Min, - sum((x[1]*exp((-x[2]*(t(i)-x[3]^2))/2)-y[i])^2 for i = 1:m) - ) - - return nlp -end diff --git a/src/PureJuMP/kowosb.jl b/src/PureJuMP/kowosb.jl new file mode 100644 index 00000000..225c4054 --- /dev/null +++ b/src/PureJuMP/kowosb.jl @@ -0,0 +1,21 @@ +# Source: Problem 15 in +# J.J. More', B.S. Garbow and K.E. Hillstrom, +# "Testing Unconstrained Optimization Software", +# ACM Transactions on Mathematical Software, vol. 7(1), pp. 17-41, 1981. + +# classification SUR2-MN-4-0 +export kowosb + +function kowosb(args...; n::Int = default_nvar, kwargs...) + nlp = Model() + + x0 = [0.25, 0.39, 0.415, 0.39] + @variable(nlp, x[i=1:4], start=x0[i]) + + m = 11 + y = [0.1957, 0.1947, 0.1735, 0.1600, 0.0844, 0.0627, 0.0456, 0.0342, 0.0323, 0.0235, 0.0246] + u = [4, 2, 1, 0.5, 0.25, 0.167, 0.125, 0.1, 0.833, 0.0714, 0.0625] + + @NLobjective(nlp, Min, sum((y[i] - (x[1]*(u[i]^2 + u[i]*x[2]))/(u[i]^2 + u[i]*x[3] + x[4]))^2 for i = 1:m)) + return nlp +end