Skip to content

Commit

Permalink
Propagate variable bounds where possible (#11)
Browse files Browse the repository at this point in the history
  • Loading branch information
odow authored Jun 6, 2024
1 parent 35b07c4 commit 1814e40
Show file tree
Hide file tree
Showing 5 changed files with 52 additions and 4 deletions.
20 changes: 20 additions & 0 deletions src/Omelette.jl
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,26 @@ julia> print(model)
"""
function add_predictor end

function _get_variable_bounds(x::Vector{JuMP.VariableRef})
lb, ub = fill(-Inf, length(x)), fill(Inf, length(x))
for i in 1:length(x)
if JuMP.has_upper_bound(x[i])
ub[i] = JuMP.upper_bound(x[i])
end
if JuMP.has_lower_bound(x[i])
lb[i] = JuMP.lower_bound(x[i])
end
if JuMP.is_fixed(x[i])
lb[i] = ub[i] = JuMP.fix_value(x[i])
end
if JuMP.is_binary(x[i])
lb[i] = max(0.0, lb[i])
ub[i] = min(1.0, ub[i])
end
end
return lb, ub
end

for file in readdir(joinpath(@__DIR__, "models"); join = true)
if endswith(file, ".jl")
include(file)
Expand Down
15 changes: 15 additions & 0 deletions src/models/LinearRegression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,21 @@ function add_predictor(
)
m = size(predictor.A, 1)
y = JuMP.@variable(model, [1:m], base_name = "omelette_y")
lb, ub = _get_variable_bounds(x)
for i in 1:size(predictor.A, 1)
y_lb, y_ub = predictor.b[i], predictor.b[i]
for j in 1:size(predictor.A, 2)
a_ij = predictor.A[i, j]
y_ub += a_ij * ifelse(a_ij >= 0, ub[j], lb[j])
y_lb += a_ij * ifelse(a_ij >= 0, lb[j], ub[j])
end
if isfinite(y_lb)
JuMP.set_lower_bound(y[i], y_lb)
end
if isfinite(y_ub)
JuMP.set_upper_bound(y[i], y_ub)
end
end
JuMP.@constraint(model, predictor.A * x .+ predictor.b .== y)
return y
end
4 changes: 4 additions & 0 deletions src/models/LogisticRegression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ julia> print(model)
Feasibility
Subject to
(1.0 / (1.0 + exp(-2 x[1] - 3 x[2]))) - omelette_y[1] = 0
omelette_y[1] ≥ 0
omelette_y[1] ≤ 1
```
"""
struct LogisticRegression <: AbstractPredictor
Expand All @@ -49,6 +51,8 @@ function add_predictor(
)
m = size(predictor.parameters, 1)
y = JuMP.@variable(model, [1:m], base_name = "omelette_y")
JuMP.set_lower_bound.(y, 0.0)
JuMP.set_upper_bound.(y, 1.0)
JuMP.@constraint(model, 1 ./ (1 .+ exp.(-predictor.parameters * x)) .== y)
return y
end
6 changes: 5 additions & 1 deletion src/models/ReLU.jl
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,12 @@ function add_predictor(
x::Vector{JuMP.VariableRef},
)
m = length(x)
y = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "omelette_y")
lb, ub = _get_variable_bounds(x)
y = JuMP.@variable(model, [i in 1:m], base_name = "omelette_y")
JuMP.set_lower_bound.(y, 0.0)
JuMP.set_upper_bound.(y, ub)
z = JuMP.@variable(model, [1:m], lower_bound = 0, base_name = "_z")
JuMP.set_upper_bound.(z, -lb)
JuMP.@constraint(model, x .== y - z)
for i in 1:m
JuMP.@constraint(model, [y[i], z[i]] in MOI.SOS1([1.0, 2.0]))
Expand Down
11 changes: 8 additions & 3 deletions test/test_ReLU.jl
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,19 @@ function test_ReLU_BigM()
end

function test_ReLU_SOS1()
model = Model()
@variable(model, x[1:2])
model = Model(HiGHS.Optimizer)
set_silent(model)
@variable(model, -2 <= x[1:2] <= 2)
f = Omelette.ReLUSOS1()
y = Omelette.add_predictor(model, f, x)
@test length(y) == 2
@test num_variables(model) == 6
@test num_constraints(model, Vector{VariableRef}, MOI.SOS1{Float64}) == 2
# TODO(odow): add a test for solution with solver that supports SOS1
@objective(model, Min, sum(y))
@constraint(model, x .>= [-1, 2])
optimize!(model)
@assert is_solved_and_feasible(model)
@test value.(y) [0.0, 2.0]
return
end

Expand Down

0 comments on commit 1814e40

Please sign in to comment.