From 0ca4c6fb3d168d30782f1e5d43264c3c3e1b1efd Mon Sep 17 00:00:00 2001 From: odow Date: Fri, 25 Oct 2024 12:18:41 +1300 Subject: [PATCH] Update --- src/predictors/Sigmoid.jl | 10 +--------- src/predictors/SoftPlus.jl | 12 +++--------- src/predictors/Tanh.jl | 8 +------- src/utilities.jl | 12 ++++++++++++ 4 files changed, 17 insertions(+), 25 deletions(-) diff --git a/src/predictors/Sigmoid.jl b/src/predictors/Sigmoid.jl index 8c94c02..5ed6cbe 100644 --- a/src/predictors/Sigmoid.jl +++ b/src/predictors/Sigmoid.jl @@ -58,17 +58,9 @@ ReducedSpace(Sigmoid()) """ struct Sigmoid <: AbstractPredictor end -_eval(::Sigmoid, x::Real) = 1 / (1 + exp(-x)) - function add_predictor(model::JuMP.AbstractModel, predictor::Sigmoid, x::Vector) y = JuMP.@variable(model, [1:length(x)], base_name = "moai_Sigmoid") - cons = Any[] - for i in 1:length(x) - x_l, x_u = _get_variable_bounds(x[i]) - y_l = x_l === nothing ? 0 : _eval(predictor, x_l) - y_u = x_u === nothing ? 1 : _eval(predictor, x_u) - _set_bounds_if_finite(cons, y[i], y_l, y_u) - end + cons = _set_direct_bounds(x -> 1 / (1 + exp(-x)), 0, 1, x, y) append!(cons, JuMP.@constraint(model, y .== 1 ./ (1 .+ exp.(-x)))) return y, Formulation(predictor, y, cons) end diff --git a/src/predictors/SoftPlus.jl b/src/predictors/SoftPlus.jl index 870e74f..d29af1e 100644 --- a/src/predictors/SoftPlus.jl +++ b/src/predictors/SoftPlus.jl @@ -61,22 +61,16 @@ struct SoftPlus <: AbstractPredictor SoftPlus(; beta::Float64 = 1.0) = new(beta) end -_eval(f::SoftPlus, x::Real) = log(1 + exp(f.beta * x)) / f.beta +_softplus(f::SoftPlus, x::Real) = log(1 + exp(f.beta * x)) / f.beta function add_predictor( model::JuMP.AbstractModel, predictor::SoftPlus, x::Vector, ) - y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftPlus") - cons = Any[] - for i in 1:length(x) - x_l, x_u = _get_variable_bounds(x[i]) - y_l = x_l === nothing ? 0 : _eval(predictor, x_l) - y_u = x_u === nothing ? nothing : _eval(predictor, x_u) - _set_bounds_if_finite(cons, y[i], y_l, y_u) - end beta = predictor.beta + y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftPlus") + cons = _set_variable_bounds(x -> _softplus(beta), 0, nothing, x, y) append!( cons, JuMP.@constraint(model, y .== log.(1 .+ exp.(beta .* x)) ./ beta), diff --git a/src/predictors/Tanh.jl b/src/predictors/Tanh.jl index ddfe02c..cd1b913 100644 --- a/src/predictors/Tanh.jl +++ b/src/predictors/Tanh.jl @@ -62,13 +62,7 @@ _eval(::Tanh, x::Real) = tanh(x) function add_predictor(model::JuMP.AbstractModel, predictor::Tanh, x::Vector) y = JuMP.@variable(model, [1:length(x)], base_name = "moai_Tanh") - cons = Any[] - for i in 1:length(x) - x_l, x_u = _get_variable_bounds(x[i]) - y_l = x_l === nothing ? -1 : _eval(predictor, x_l) - y_u = x_u === nothing ? 1 : _eval(predictor, x_u) - _set_bounds_if_finite(cons, y[i], y_l, y_u) - end + cons = _set_variable_bounds(tanh, -1, 1, x, y) append!(cons, JuMP.@constraint(model, y .== tanh.(x))) return y, Formulation(predictor, y, cons) end diff --git a/src/utilities.jl b/src/utilities.jl index 311fc8b..241d185 100644 --- a/src/utilities.jl +++ b/src/utilities.jl @@ -43,3 +43,15 @@ _get_variable_bounds(::Any) = -Inf, Inf # Default fallback: skip setting variable bound _set_bounds_if_finite(::Vector, ::Any, ::Any, ::Any) = nothing + + +function _set_direct_bounds(f::F, l, u, x::Vector, y::Vector) where {F} + cons = Any[] + for (xi, yi) in zip(x, y) + x_l, x_u = _get_variable_bounds(xi) + y_l = x_l === nothing ? l : f(x_l) + y_u = x_u === nothing ? u : f(x_u) + _set_bounds_if_finite(cons, yi, y_l, y_u) + end + return cons +end