From 3a6e0a6d2fa993324fdd40957028d501e81765af Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Sat, 2 Nov 2019 12:58:21 +0100 Subject: [PATCH] Fixes detecting thanks to SumOfSquares tests --- Project.toml | 2 +- src/MOI_wrapper.jl | 42 ++++++++++++++++++++++++++---------------- src/SDPT3.jl | 3 ++- test/MOI_wrapper.jl | 5 ++++- 4 files changed, 33 insertions(+), 19 deletions(-) diff --git a/Project.toml b/Project.toml index 8099e08..0c62cde 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "SDPT3" uuid = "e33b2407-87ff-50a0-8b27-f0fe7855237d" repo = "https://github.com/JuliaOpt/SDPT3.jl.git" -version = "0.0.1" +version = "0.0.2" [deps] LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 2fc8883..d9fac1c 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -51,7 +51,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer free_Acon::Vector{Int} free_Aval::Vector{Float64} - objective_sign::Int + objective_sense::MOI.OptimizationSense objective_constant::Float64 primal_objective_value::Float64 @@ -79,7 +79,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer Int[], Vector{Int}[], Vector{Float64}[], Vector{Int}[], Vector{Int}[], Vector{Float64}[], 0, Int[], Int[], Float64[], Int[], Int[], Float64[], 0, Int[], Float64[], Int[], Int[], Float64[], - 1, 0.0, + MOI.FEASIBILITY_SENSE, 0.0, NaN, NaN, Float64[], Float64[], Vector{Float64}[], Vector{Float64}[], # X Float64[], # y @@ -88,7 +88,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer nothing, NaN, false, Dict{Symbol, Any}()) for (key, value) in kwargs - MOI.set(optimizer, MOI.RawParameter(key), value) + MOI.set(optimizer, MOI.RawParameter(string(key)), value) end return optimizer end @@ -101,11 +101,11 @@ function MOI.set(optimizer::Optimizer, param::MOI.RawParameter, value) if !MOI.supports(optimizer, param) throw(MOI.UnsupportedAttribute(param)) end - optimizer.options[param.name] = value + optimizer.options[Symbol(param.name)] = value end function MOI.get(optimizer::Optimizer, param::MOI.RawParameter) # TODO: This gives a poor error message if the name of the parameter is invalid. - return optimizer.options[param.name] + return optimizer.options[Symbol(param.name)] end MOI.supports(::Optimizer, ::MOI.Silent) = true @@ -126,7 +126,7 @@ function MOI.is_empty(optimizer::Optimizer) iszero(optimizer.quad_dims) && iszero(optimizer.num_nneg) && iszero(optimizer.num_free) && - isone(optimizer.objective_sign) && + optimizer.objective_sense == MOI.FEASIBILITY_SENSE && iszero(optimizer.objective_constant) end @@ -164,7 +164,7 @@ function MOI.empty!(optimizer::Optimizer) empty!(optimizer.free_Acon) empty!(optimizer.free_Aval) - optimizer.objective_sign = 1 + optimizer.objective_sense = MOI.FEASIBILITY_SENSE optimizer.objective_constant = 0.0 optimizer.primal_objective_value = NaN @@ -262,9 +262,13 @@ function MOI.add_constrained_variables(optimizer::Optimizer, set::SupportedSets) end # Objective +function MOI.get(optimizer::Optimizer, ::MOI.ObjectiveSense) + return optimizer.objective_sense +end +sense_to_sign(sense::MOI.OptimizationSense) = sense == MOI.MAX_SENSE ? -1 : 1 function MOI.set(optimizer::Optimizer, ::MOI.ObjectiveSense, sense::MOI.OptimizationSense) - sign = sense == MOI.MAX_SENSE ? -1 : 1 - if sign != optimizer.objective_sign + if sense != optimizer.objective_sense + sign = sense_to_sign(sense) rmul!(optimizer.free_Cval, -1) rmul!(optimizer.nneg_Cval, -1) for i in eachindex(optimizer.quad_dims) @@ -274,7 +278,7 @@ function MOI.set(optimizer::Optimizer, ::MOI.ObjectiveSense, sense::MOI.Optimiza rmul!(optimizer.psdc_Cval[i], -1) end end - optimizer.objective_sign = sign + optimizer.objective_sense = sense end function MOI.set(optimizer::Optimizer, ::MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}, func::MOI.ScalarAffineFunction{Float64}) @@ -291,21 +295,22 @@ function MOI.set(optimizer::Optimizer, ::MOI.ObjectiveFunction{MOI.ScalarAffineF empty!(optimizer.psdc_Cvar[i]) empty!(optimizer.psdc_Cval[i]) end + sign = sense_to_sign(optimizer.objective_sense) for term in func.terms info = optimizer.variable_info[term.variable_index.value] if info.variable_type == FREE push!(optimizer.free_Cvar, info.index_in_cone) - push!(optimizer.free_Cval, optimizer.objective_sign * term.coefficient) + push!(optimizer.free_Cval, sign * term.coefficient) elseif info.variable_type == NNEG push!(optimizer.nneg_Cvar, info.index_in_cone) - push!(optimizer.nneg_Cval, optimizer.objective_sign * term.coefficient) + push!(optimizer.nneg_Cval, sign * term.coefficient) elseif info.variable_type == QUAD push!(optimizer.quad_Cvar[info.cone_index], info.index_in_cone) - push!(optimizer.quad_Cval[info.cone_index], optimizer.objective_sign * term.coefficient) + push!(optimizer.quad_Cval[info.cone_index], sign * term.coefficient) else @assert info.variable_type == PSD push!(optimizer.psdc_Cvar[info.cone_index], info.index_in_cone) - push!(optimizer.psdc_Cval[info.cone_index], optimizer.objective_sign * term.coefficient) + push!(optimizer.psdc_Cval[info.cone_index], sign * term.coefficient) end end end @@ -351,6 +356,9 @@ function MOI.add_constraint(optimizer::Optimizer, func::MOI.ScalarAffineFunction return AFFEQ(con) end +# TODO could do something more efficient here +# `SparseMatrixCSC` is returned in SumOfSquares.jl test `sos_horn` +symvec(Q::SparseMatrixCSC) = symvec(Matrix(Q)) function symvec(Q::Matrix) n = LinearAlgebra.checksquare(Q) vec_dim = MOI.dimension(MOI.PositiveSemidefiniteConeTriangle(n)) @@ -539,11 +547,13 @@ end MOI.get(::Optimizer, ::MOI.ResultCount) = 1 function MOI.get(optimizer::Optimizer, attr::MOI.ObjectiveValue) MOI.check_result_index_bounds(optimizer, attr) - return optimizer.objective_sign * optimizer.primal_objective_value + optimizer.objective_constant + sign = sense_to_sign(optimizer.objective_sense) + return sign * optimizer.primal_objective_value + optimizer.objective_constant end function MOI.get(optimizer::Optimizer, attr::MOI.DualObjectiveValue) MOI.check_result_index_bounds(optimizer, attr) - return optimizer.objective_sign * optimizer.dual_objective_value + optimizer.objective_constant + sign = sense_to_sign(optimizer.objective_sense) + return sign * optimizer.dual_objective_value + optimizer.objective_constant end function MOI.get(optimizer::Optimizer, attr::MOI.VariablePrimal, vi::MOI.VariableIndex) diff --git a/src/SDPT3.jl b/src/SDPT3.jl index ac9f6e5..12b00c9 100644 --- a/src/SDPT3.jl +++ b/src/SDPT3.jl @@ -33,7 +33,8 @@ const ALLOWED_OPTIONS = [ "schurfun_par" ] -_array(x::Matrix) = x +# `SparseMatrixCSC` is returned in SumOfSquares.jl test `sos_horn` +_array(x::AbstractMatrix) = x _array(x::Vector) = x _array(x::Float64) = [x] diff --git a/test/MOI_wrapper.jl b/test/MOI_wrapper.jl index 12bb8a6..f1f579b 100644 --- a/test/MOI_wrapper.jl +++ b/test/MOI_wrapper.jl @@ -26,7 +26,10 @@ const BRIDGED = MOIB.full_bridge_optimizer(CACHED, Float64) const CONFIG = MOIT.TestConfig(atol=1e-4, rtol=1e-4) @testset "Options" begin - param = MOI.RawParameter(:bad_option) + optimizer = SDPT3.Optimizer(printlevel = 1) + @test MOI.get(optimizer, MOI.RawParameter("printlevel")) == 1 + + param = MOI.RawParameter("bad_option") err = MOI.UnsupportedAttribute(param) @test_throws err SDPT3.Optimizer(bad_option = 1) end