Skip to content

Commit

Permalink
Add bound constraints to the Formulation object
Browse files Browse the repository at this point in the history
  • Loading branch information
odow committed Oct 24, 2024
1 parent 296e206 commit b88ade3
Showing 10 changed files with 106 additions and 66 deletions.
11 changes: 7 additions & 4 deletions src/predictors/Affine.jl
Original file line number Diff line number Diff line change
@@ -22,7 +22,7 @@ julia> using JuMP, MathOptAI
julia> model = Model();
julia> @variable(model, x[1:2]);
julia> @variable(model, 0 <= x[i in 1:2] <= i);
julia> f = MathOptAI.Affine([2.0 3.0], [4.0])
Affine(A, b) [input: 2, output: 1]
@@ -37,7 +37,9 @@ julia> formulation
Affine(A, b) [input: 2, output: 1]
├ variables [1]
│ └ moai_Affine[1]
└ constraints [1]
└ constraints [3]
├ moai_Affine[1] ≥ 4
├ moai_Affine[1] ≤ 12
└ 2 x[1] + 3 x[2] - moai_Affine[1] = -4
julia> y, formulation =
@@ -75,6 +77,7 @@ function add_predictor(model::JuMP.AbstractModel, predictor::Affine, x::Vector)
m = size(predictor.A, 1)
y = JuMP.@variable(model, [1:m], base_name = "moai_Affine")
bounds = _get_variable_bounds.(x)
cons = Any[]
for i in 1:size(predictor.A, 1)
y_lb, y_ub = predictor.b[i], predictor.b[i]
for j in 1:size(predictor.A, 2)
@@ -83,9 +86,9 @@ function add_predictor(model::JuMP.AbstractModel, predictor::Affine, x::Vector)
y_ub += a_ij * ifelse(a_ij >= 0, ub, lb)
y_lb += a_ij * ifelse(a_ij >= 0, lb, ub)
end
_set_bounds_if_finite(y[i], y_lb, y_ub)
_set_bounds_if_finite(cons, y[i], y_lb, y_ub)
end
cons = JuMP.@constraint(model, predictor.A * x .+ predictor.b .== y)
append!(cons, JuMP.@constraint(model, predictor.A * x .+ predictor.b .== y))
return y, Formulation(predictor, y, cons)
end

4 changes: 3 additions & 1 deletion src/predictors/Pipeline.jl
Original file line number Diff line number Diff line change
@@ -46,7 +46,9 @@ ReLUQuadratic()
├ variables [2]
│ ├ moai_ReLU[1]
│ └ moai_z[1]
└ constraints [2]
└ constraints [4]
├ moai_ReLU[1] ≥ 0
├ moai_z[1] ≥ 0
├ moai_Affine[1] - moai_ReLU[1] + moai_z[1] = 0
└ moai_ReLU[1]*moai_z[1] = 0
```
84 changes: 54 additions & 30 deletions src/predictors/ReLU.jl
Original file line number Diff line number Diff line change
@@ -17,7 +17,7 @@ julia> using JuMP, MathOptAI
julia> model = Model();
julia> @variable(model, x[1:2]);
julia> @variable(model, -1 <= x[i in 1:2] <= i);
julia> f = MathOptAI.ReLU()
ReLU()
@@ -34,9 +34,11 @@ ReLU()
├ variables [2]
│ ├ moai_ReLU[1]
│ └ moai_ReLU[2]
└ constraints [4]
└ constraints [6]
├ moai_ReLU[1] ≥ 0
├ moai_ReLU[1] ≤ 1
├ moai_ReLU[2] ≥ 0
├ moai_ReLU[2] ≤ 2
├ moai_ReLU[1] - max(0.0, x[1]) = 0
└ moai_ReLU[2] - max(0.0, x[2]) = 0
@@ -59,10 +61,10 @@ struct ReLU <: AbstractPredictor end
function add_predictor(model::JuMP.AbstractModel, predictor::ReLU, x::Vector)
ub = last.(_get_variable_bounds.(x))
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_ReLU")
_set_bounds_if_finite.(y, 0, max.(0, ub))
cons = JuMP.@constraint(model, y .== max.(0, x))
constraints = Any[JuMP.LowerBoundRef.(y); cons]
return y, Formulation(predictor, y, constraints)
cons = Any[]
_set_bounds_if_finite.(Ref(cons), y, 0, max.(0, ub))
append!(cons, JuMP.@constraint(model, y .== max.(0, x)))
return y, Formulation(predictor, y, cons)
end

function add_predictor(
@@ -103,17 +105,21 @@ ReLUBigM(100.0)
├ variables [4]
│ ├ moai_ReLU[1]
│ ├ moai_ReLU[2]
│ ├ _[5]
│ └ _[6]
└ constraints [8]
├ _[5] binary
│ ├ moai_z[1]
│ └ moai_z[2]
└ constraints [12]
├ moai_ReLU[1] ≥ 0
├ moai_ReLU[1] ≤ 1
├ moai_ReLU[2] ≥ 0
├ moai_ReLU[2] ≤ 2
├ moai_z[1] binary
├ -x[1] + moai_ReLU[1] ≥ 0
├ moai_ReLU[1] - _[5] ≤ 0
├ -x[1] + moai_ReLU[1] + 3 _[5] ≤ 3
_[6] binary
├ moai_ReLU[1] - moai_z[1] ≤ 0
├ -x[1] + moai_ReLU[1] + 3 moai_z[1] ≤ 3
moai_z[2] binary
├ -x[2] + moai_ReLU[2] ≥ 0
├ moai_ReLU[2] - 2 _[6] ≤ 0
└ -x[2] + moai_ReLU[2] + 3 _[6] ≤ 3
├ moai_ReLU[2] - 2 moai_z[2] ≤ 0
└ -x[2] + moai_ReLU[2] + 3 moai_z[2] ≤ 3
```
"""
struct ReLUBigM <: AbstractPredictor
@@ -128,12 +134,14 @@ function add_predictor(
m = length(x)
bounds = _get_variable_bounds.(x)
y = JuMP.@variable(model, [1:m], base_name = "moai_ReLU")
_set_bounds_if_finite.(y, 0, max.(0, last.(bounds)))
formulation = Formulation(predictor)
cons = Any[]
_set_bounds_if_finite.(Ref(cons), y, 0, max.(0, last.(bounds)))
formulation = Formulation(predictor, Any[], cons)
append!(formulation.variables, y)
for i in 1:m
lb, ub = bounds[i]
z = JuMP.@variable(model, binary = true)
JuMP.set_name(z, "moai_z[$i]")
push!(formulation.variables, z)
push!(formulation.constraints, JuMP.BinaryRef(z))
c = JuMP.@constraint(model, y[i] >= x[i])
@@ -167,7 +175,7 @@ julia> using JuMP, MathOptAI
julia> model = Model();
julia> @variable(model, x[1:2] >= -1);
julia> @variable(model, -1 <= x[i in 1:2] <= i);
julia> f = MathOptAI.ReLUSOS1()
ReLUSOS1()
@@ -186,7 +194,13 @@ ReLUSOS1()
│ ├ moai_ReLU[2]
│ ├ moai_z[1]
│ └ moai_z[2]
└ constraints [4]
└ constraints [10]
├ moai_ReLU[1] ≥ 0
├ moai_ReLU[1] ≤ 1
├ moai_ReLU[2] ≥ 0
├ moai_ReLU[2] ≤ 2
├ moai_z[1] ≤ 1
├ moai_z[2] ≤ 1
├ x[1] - moai_ReLU[1] + moai_z[1] = 0
├ x[2] - moai_ReLU[2] + moai_z[2] = 0
├ [moai_ReLU[1], moai_z[1]] ∈ MathOptInterface.SOS1{Float64}([1.0, 2.0])
@@ -203,11 +217,12 @@ function add_predictor(
m = length(x)
bounds = _get_variable_bounds.(x)
y = JuMP.@variable(model, [i in 1:m], base_name = "moai_ReLU")
_set_bounds_if_finite.(y, 0, max.(0, last.(bounds)))
cons = Any[]
_set_bounds_if_finite.(Ref(cons), y, 0, max.(0, last.(bounds)))
z = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "moai_z")
_set_bounds_if_finite.(z, nothing, -first.(bounds))
cons = JuMP.@constraint(model, x .== y - z)
formulation = Formulation(predictor, Any[y; z], Any[cons;])
_set_bounds_if_finite.(Ref(cons), z, nothing, -first.(bounds))
append!(cons, JuMP.@constraint(model, x .== y - z))
formulation = Formulation(predictor, Any[y; z], cons)
for i in 1:m
c = JuMP.@constraint(model, [y[i], z[i]] in MOI.SOS1([1.0, 2.0]))
push!(formulation.constraints, c)
@@ -235,7 +250,7 @@ julia> using JuMP, MathOptAI
julia> model = Model();
julia> @variable(model, x[1:2] >= -1);
julia> @variable(model, -1 <= x[i in 1:2] <= i);
julia> f = MathOptAI.ReLUQuadratic()
ReLUQuadratic()
@@ -254,7 +269,15 @@ ReLUQuadratic()
│ ├ moai_ReLU[2]
│ ├ moai_z[1]
│ └ moai_z[2]
└ constraints [4]
└ constraints [12]
├ moai_ReLU[1] ≥ 0
├ moai_ReLU[1] ≤ 1
├ moai_ReLU[2] ≥ 0
├ moai_ReLU[2] ≤ 2
├ moai_z[1] ≥ 0
├ moai_z[1] ≤ 1
├ moai_z[2] ≥ 0
├ moai_z[2] ≤ 1
├ x[1] - moai_ReLU[1] + moai_z[1] = 0
├ x[2] - moai_ReLU[2] + moai_z[2] = 0
├ moai_ReLU[1]*moai_z[1] = 0
@@ -271,10 +294,11 @@ function add_predictor(
m = length(x)
bounds = _get_variable_bounds.(x)
y = JuMP.@variable(model, [1:m], base_name = "moai_ReLU")
_set_bounds_if_finite.(y, 0, max.(0, last.(bounds)))
cons = Any[]
_set_bounds_if_finite.(Ref(cons), y, 0, max.(0, last.(bounds)))
z = JuMP.@variable(model, [1:m], base_name = "moai_z")
_set_bounds_if_finite.(z, 0, max.(0, -first.(bounds)))
c1 = JuMP.@constraint(model, x .== y - z)
c2 = JuMP.@constraint(model, y .* z .== 0)
return y, Formulation(predictor, Any[y; z], Any[c1; c2])
_set_bounds_if_finite.(Ref(cons), z, 0, max.(0, -first.(bounds)))
append!(cons, JuMP.@constraint(model, x .== y - z))
append!(cons, JuMP.@constraint(model, y .* z .== 0))
return y, Formulation(predictor, Any[y; z], cons)
end
16 changes: 12 additions & 4 deletions src/predictors/Scale.jl
Original file line number Diff line number Diff line change
@@ -22,7 +22,7 @@ julia> using JuMP, MathOptAI
julia> model = Model();
julia> @variable(model, x[1:2]);
julia> @variable(model, 0 <= x[i in 1:2] <= i);
julia> f = MathOptAI.Scale([2.0, 3.0], [4.0, 5.0])
Scale(scale, bias)
@@ -39,7 +39,11 @@ Scale(scale, bias)
├ variables [2]
│ ├ moai_Scale[1]
│ └ moai_Scale[2]
└ constraints [2]
└ constraints [6]
├ moai_Scale[1] ≥ 4
├ moai_Scale[1] ≤ 6
├ moai_Scale[2] ≥ 5
├ moai_Scale[2] ≤ 11
├ 2 x[1] - moai_Scale[1] = -4
└ 3 x[2] - moai_Scale[2] = -5
@@ -70,14 +74,18 @@ function add_predictor(model::JuMP.AbstractModel, predictor::Scale, x::Vector)
m = length(predictor.scale)
y = JuMP.@variable(model, [1:m], base_name = "moai_Scale")
bounds = _get_variable_bounds.(x)
cons = Any[]
for (i, scale) in enumerate(predictor.scale)
y_lb = y_ub = predictor.bias[i]
lb, ub = bounds[i]
y_ub += scale * ifelse(scale >= 0, ub, lb)
y_lb += scale * ifelse(scale >= 0, lb, ub)
_set_bounds_if_finite(y[i], y_lb, y_ub)
_set_bounds_if_finite(cons, y[i], y_lb, y_ub)
end
cons = JuMP.@constraint(model, predictor.scale .* x .+ predictor.bias .== y)
append!(
cons,
JuMP.@constraint(model, predictor.scale .* x .+ predictor.bias .== y),
)
return y, Formulation(predictor, y, cons)
end

10 changes: 5 additions & 5 deletions src/predictors/Sigmoid.jl
Original file line number Diff line number Diff line change
@@ -36,8 +36,8 @@ Sigmoid()
│ └ moai_Sigmoid[2]
└ constraints [6]
├ moai_Sigmoid[1] ≥ 0
├ moai_Sigmoid[2] ≥ 0
├ moai_Sigmoid[1] ≤ 1
├ moai_Sigmoid[2] ≥ 0
├ moai_Sigmoid[2] ≤ 1
├ moai_Sigmoid[1] - (1.0 / (1.0 + exp(-x[1]))) = 0
└ moai_Sigmoid[2] - (1.0 / (1.0 + exp(-x[2]))) = 0
@@ -60,10 +60,10 @@ struct Sigmoid <: AbstractPredictor end

function add_predictor(model::JuMP.AbstractModel, predictor::Sigmoid, x::Vector)
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_Sigmoid")
_set_bounds_if_finite.(y, 0, 1)
cons = JuMP.@constraint(model, y .== 1 ./ (1 .+ exp.(-x)))
constraints = Any[JuMP.LowerBoundRef.(y); JuMP.UpperBoundRef.(y); cons]
return y, Formulation(predictor, y, constraints)
cons = Any[]
_set_bounds_if_finite.(Ref(cons), y, 0, 1)
append!(cons, JuMP.@constraint(model, y .== 1 ./ (1 .+ exp.(-x))))
return y, Formulation(predictor, y, cons)
end

function add_predictor(
19 changes: 7 additions & 12 deletions src/predictors/SoftMax.jl
Original file line number Diff line number Diff line change
@@ -37,8 +37,8 @@ SoftMax()
│ └ moai_SoftMax[2]
└ constraints [8]
├ moai_SoftMax[1] ≥ 0
├ moai_SoftMax[2] ≥ 0
├ moai_SoftMax[1] ≤ 1
├ moai_SoftMax[2] ≥ 0
├ moai_SoftMax[2] ≤ 1
├ moai_SoftMax_denom ≥ 0
├ moai_SoftMax_denom - (0.0 + exp(x[2]) + exp(x[1])) = 0
@@ -66,19 +66,14 @@ struct SoftMax <: AbstractPredictor end

function add_predictor(model::JuMP.AbstractModel, predictor::SoftMax, x::Vector)
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftMax")
_set_bounds_if_finite.(y, 0, 1)
cons = Any[]
_set_bounds_if_finite.(Ref(cons), y, 0, 1)
denom = JuMP.@variable(model, base_name = "moai_SoftMax_denom")
JuMP.set_lower_bound(denom, 0)
d_con = JuMP.@constraint(model, denom == sum(exp.(x)))
cons = JuMP.@constraint(model, y .== exp.(x) ./ denom)
constraints = [
JuMP.LowerBoundRef.(y)
JuMP.UpperBoundRef.(y)
JuMP.LowerBoundRef(denom)
d_con
cons
]
return y, Formulation(predictor, [denom; y], constraints)
push!(cons, JuMP.LowerBoundRef(denom))
push!(cons, JuMP.@constraint(model, denom == sum(exp.(x))))
append!(cons, JuMP.@constraint(model, y .== exp.(x) ./ denom))
return y, Formulation(predictor, [denom; y], cons)
end

function add_predictor(
10 changes: 7 additions & 3 deletions src/predictors/SoftPlus.jl
Original file line number Diff line number Diff line change
@@ -65,10 +65,14 @@ function add_predictor(
x::Vector,
)
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_SoftPlus")
_set_bounds_if_finite.(y, 0, nothing)
cons = Any[]
_set_bounds_if_finite.(Ref(cons), y, 0, nothing)
beta = predictor.beta
cons = JuMP.@constraint(model, y .== log.(1 .+ exp.(beta .* x)) ./ beta)
return y, Formulation(predictor, y, Any[JuMP.LowerBoundRef.(y); cons])
append!(
cons,
JuMP.@constraint(model, y .== log.(1 .+ exp.(beta .* x)) ./ beta),
)
return y, Formulation(predictor, y, cons)
end

function add_predictor(
10 changes: 5 additions & 5 deletions src/predictors/Tanh.jl
Original file line number Diff line number Diff line change
@@ -36,8 +36,8 @@ Tanh()
│ └ moai_Tanh[2]
└ constraints [6]
├ moai_Tanh[1] ≥ -1
├ moai_Tanh[2] ≥ -1
├ moai_Tanh[1] ≤ 1
├ moai_Tanh[2] ≥ -1
├ moai_Tanh[2] ≤ 1
├ moai_Tanh[1] - tanh(x[1]) = 0
└ moai_Tanh[2] - tanh(x[2]) = 0
@@ -60,10 +60,10 @@ struct Tanh <: AbstractPredictor end

function add_predictor(model::JuMP.AbstractModel, predictor::Tanh, x::Vector)
y = JuMP.@variable(model, [1:length(x)], base_name = "moai_Tanh")
_set_bounds_if_finite.(y, -1, 1)
cons = JuMP.@constraint(model, y .== tanh.(x))
constraints = Any[JuMP.LowerBoundRef.(y); JuMP.UpperBoundRef.(y); cons]
return y, Formulation(predictor, y, constraints)
cons = Any[]
_set_bounds_if_finite.(Ref(cons), y, -1, 1)
append!(cons, JuMP.@constraint(model, y .== tanh.(x)))
return y, Formulation(predictor, y, cons)
end

function add_predictor(
5 changes: 4 additions & 1 deletion src/utilities.jl
Original file line number Diff line number Diff line change
@@ -22,15 +22,18 @@ function _get_variable_bounds(x::JuMP.GenericVariableRef{T}) where {T}
end

function _set_bounds_if_finite(
cons::Vector,
x::JuMP.GenericVariableRef{T},
l::Union{Nothing,Real},
u::Union{Nothing,Real},
) where {T}
if l !== nothing && l > typemin(T)
JuMP.set_lower_bound(x, l)
push!(cons, JuMP.LowerBoundRef(x))
end
if u !== nothing && u < typemax(T)
JuMP.set_upper_bound(x, u)
push!(cons, JuMP.UpperBoundRef(x))
end
return
end
@@ -39,4 +42,4 @@ end
_get_variable_bounds(::Any) = -Inf, Inf

# Default fallback: skip setting variable bound
_set_bounds_if_finite(::Any, ::Any, ::Any) = nothing
_set_bounds_if_finite(::Vector, ::Any, ::Any, ::Any) = nothing
Loading

0 comments on commit b88ade3

Please sign in to comment.