diff --git a/Project.toml b/Project.toml index 982faad2..78909455 100644 --- a/Project.toml +++ b/Project.toml @@ -8,7 +8,9 @@ version = "0.16.1" Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" +Revise = "295af30f-e4ad-537b-8983-00126c2a3abe" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" +XpressPSR = "3a57e2e4-7268-4354-91dd-d57606666fa4" [compat] MathOptInterface = "1" diff --git a/src/MOI/MOI_wrapper.jl b/src/MOI/MOI_wrapper.jl index 8e520318..7cbae6e1 100644 --- a/src/MOI/MOI_wrapper.jl +++ b/src/MOI/MOI_wrapper.jl @@ -43,6 +43,7 @@ const CleverDicts = MOI.Utilities.CleverDicts SINGLE_VARIABLE, SCALAR_AFFINE, SCALAR_QUADRATIC, + NLP_OBJECTIVE, ) @enum( @@ -132,6 +133,17 @@ mutable struct ConstraintInfo ConstraintInfo(row::Int, set::MOI.AbstractSet, type::ConstraintType) = new(row, set, "", type) end +mutable struct NLPConstraintInfo + expression::Expr + lower_bound::Union{Float64, Nothing} + upper_bound::Union{Float64, Nothing} + name::Union{String, Nothing} +end + +function NLPConstraintInfo() + NLPConstraintInfo(:(), nothing, nothing, nothing) +end + mutable struct CachedSolution variable_primal::Vector{Float64} variable_dual::Vector{Float64} @@ -197,6 +209,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer # An enum to remember what objective is currently stored in the model. objective_type::ObjectiveType + objective_expr::Union{Nothing, Real, Expr} # track whether objective function is set and the state of objective sense is_objective_set::Bool @@ -211,7 +224,6 @@ mutable struct Optimizer <: MOI.AbstractOptimizer typeof(CleverDicts.key_to_index), typeof(CleverDicts.index_to_key), } - # An index that is incremented for each new constraint (regardless of type). # We can check if a constraint is valid by checking if it is in the correct # xxx_constraint_info. We should _not_ reset this to zero, since then new @@ -227,6 +239,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer # Note: we do not have a singlevariable_constraint_info dictionary. Instead, # data associated with these constraints are stored in the VariableInfo # objects. + nlp_constraint_info::Vector{NLPConstraintInfo} # Mappings from variable and constraint names to their indices. These are # lazily built on-demand, so most of the time, they are `nothing`. @@ -265,6 +278,8 @@ mutable struct Optimizer <: MOI.AbstractOptimizer message_callback::Union{Nothing, Tuple{Ptr{Nothing}, _CallbackUserData}} params::Dict{Any, Any} + + nlp_block_data::Union{Nothing, MOI.NLPBlockData} """ Optimizer() @@ -297,6 +312,8 @@ mutable struct Optimizer <: MOI.AbstractOptimizer model.variable_info = CleverDicts.CleverDict{MOI.VariableIndex, VariableInfo}() model.affine_constraint_info = Dict{Int, ConstraintInfo}() model.sos_constraint_info = Dict{Int, ConstraintInfo}() + model.nlp_constraint_info = NLPConstraintInfo[] + model.nlp_block_data=nothing MOI.empty!(model) # inner is initialized here @@ -329,12 +346,14 @@ function MOI.empty!(model::Optimizer) model.name = "" model.objective_type = SCALAR_AFFINE + model.objective_expr = nothing model.is_objective_set = false model.objective_sense = nothing empty!(model.variable_info) model.last_constraint_index = 0 empty!(model.affine_constraint_info) empty!(model.sos_constraint_info) + empty!(model.nlp_constraint_info) model.name_to_variable = nothing model.name_to_constraint_index = nothing @@ -362,6 +381,8 @@ function MOI.empty!(model::Optimizer) model.callback_data = nothing # model.message_callback = nothing + model.nlp_block_data = nothing + for (name, value) in model.params MOI.set(model, name, value) end @@ -371,11 +392,13 @@ end function MOI.is_empty(model::Optimizer) !isempty(model.name) && return false model.objective_type != SCALAR_AFFINE && return false + model.objective_expr !== nothing && return false model.is_objective_set == true && return false model.objective_sense !== nothing && return false !isempty(model.variable_info) && return false length(model.affine_constraint_info) != 0 && return false length(model.sos_constraint_info) != 0 && return false + length(model.nlp_constraint_info) != 0 && return false model.name_to_variable !== nothing && return false model.name_to_constraint_index !== nothing && return false @@ -402,12 +425,14 @@ function MOI.is_empty(model::Optimizer) # model.message_callback !== nothing && return false # otherwise jump complains it is not empty + model.nlp_block_data !== nothing && return false + return true end function reset_cached_solution(model::Optimizer) num_variables = length(model.variable_info) - num_affine = length(model.affine_constraint_info) + num_affine = length(model.affine_constraint_info)+length(model.nlp_constraint_info) if model.cached_solution === nothing model.cached_solution = CachedSolution( fill(NaN, num_variables), @@ -660,6 +685,9 @@ function MOI.get(model::Optimizer, ::MOI.ListOfModelAttributesSet) if model.objective_sense !== nothing push!(attributes, MOI.ObjectiveSense()) end + if model.nlp_block_data !== nothing + push!(attributes, MOI.NLPBlock()) + end typ = MOI.get(model, MOI.ObjectiveFunctionType()) if typ !== nothing push!(attributes, MOI.ObjectiveFunction{typ}()) @@ -797,6 +825,7 @@ function _info(model::Optimizer, key::MOI.VariableIndex) end function MOI.add_variable(model::Optimizer) + # Initialize `VariableInfo` with a dummy `VariableIndex` and a column, # because we need `add_item` to tell us what the `VariableIndex` is. index = CleverDicts.add_item( @@ -2729,6 +2758,7 @@ function MOI.optimize!(model::Optimizer) MOI.set(model, CallbackFunction(), default_moi_callback(model)) model.has_generic_callback = false # because it is set as true in the above end + pre_solve_reset(model) # cache rhs: must be done before hand because it cant be # properly queried if the problem ends up in a presolve state @@ -2738,7 +2768,96 @@ function MOI.optimize!(model::Optimizer) _update_MIP_start!(model) end start_time = time() - if is_mip(model) + if is_nlp(model) + ncols=n_variables(model.inner) + # Getting problem's variables + x=collect(keys(model.variable_info)) + # Creating a NULL Objective function to allow its modification by XPRSnlp + c=[0.0 for i = 1:ncols] + if model.objective_expr === nothing + MOI.set(model, + MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), + MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(c, x), 0.0), + ); + end + # Changing names to the right format adapted to XPRSnlp + names=["x$idx" for idx = 1:ncols] + MOI.set(model, MOI.VariableName(), x, names) + Xpress._pass_variable_names_to_solver(model) + + keys_index=collect(keys(model.affine_constraint_info)) + count_nl_affine=0 + for i=1:length(model.affine_constraint_info) + if model.affine_constraint_info[keys_index[i]].name== "nl_constraint_to_remove" + count_nl_affine += 1 + end + end + + + + #Case with NL Objective and no NL Constraints + if length(model.nlp_constraint_info)==0 + # Delete existing constraints when optimize! is called again + if count_nl_affine>0 + for i in 1:count_nl_affine + pop!(model.affine_constraint_info,keys_index[filter(i->collect(values(model.affine_constraint_info))[i].name=="nl_constraint_to_remove",reverse([j for j=1:length(model.affine_constraint_info)]))[1]]) + end + end + + #Case with NL Constraints + + #New NL Constraints + elseif count_nl_affine != length(model.nlp_constraint_info) + # Delete existing constraints when optimize! is called again + for i in 1:count_nl_affine + popfirst!(model.nlp_constraint_info) + pop!(model.affine_constraint_info,keys_index[filter(i->collect(values(model.affine_constraint_info))[i].name=="nl_constraint_to_remove",reverse([j for j=1:length(model.affine_constraint_info)]))[1]]) + end + + + # Creating NULL constraints to allow their modification by XPRSnlp + for cons in model.nlp_constraint_info + c_set=to_constraint_set(cons) + if length(c_set)==2 + added_1=MOI.add_constraint( + model, + MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(c, x), 0.0), + c_set[1], + ); + MOI.set(model, MOI.ConstraintName(), added_1, "nl_constraint_to_remove") + + added_2=MOI.add_constraint( + model, + MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(c, x), 0.0), + c_set[2], + ); + MOI.set(model, MOI.ConstraintName(), added_2, "nl_constraint_to_remove") + elseif c_set !== nothing + added=MOI.add_constraint( + model, + MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(c, x), 0.0), + c_set[1], + ); + MOI.set(model, MOI.ConstraintName(), added, "nl_constraint_to_remove") + end + end + + + # Passing constraints to the solver + for i in 0:length(model.nlp_constraint_info)-1 + Lib.XPRSnlpchgformulastring(model.inner, Cint(i), join(["+"," ",to_str(model.nlp_constraint_info[i+1].expression)])) + end + end + + + # Passing objective to the solver + Lib.XPRSnlpchgobjformulastring(model.inner, join(["+"," ",to_str(model.objective_expr)])) + + solvestatus = Ref{Cint}(0) + solstatus = Ref{Cint}(0) + Lib.XPRSoptimize(model.inner, "", solvestatus, solstatus) + + elseif is_mip(model) @checked Lib.XPRSmipoptimize(model.inner, model.solve_method) else @checked Lib.XPRSlpoptimize(model.inner, model.solve_method) @@ -2751,32 +2870,43 @@ function MOI.optimize!(model::Optimizer) if model.post_solve @checked Lib.XPRSpostsolve(model.inner) end - - model.termination_status = _cache_termination_status(model) + + if model.termination_status != MOI.INVALID_MODEL + model.termination_status = _cache_termination_status(model) + end model.primal_status = _cache_primal_status(model) model.dual_status = _cache_dual_status(model) # TODO: add @checked here - must review statuses - if is_mip(model) - # TODO @checked (only works if not in [MOI.NO_SOLUTION, MOI.INFEASIBILITY_CERTIFICATE, MOI.INFEASIBLE_POINT]) - Lib.XPRSgetmipsol( - model.inner, - model.cached_solution.variable_primal, - model.cached_solution.linear_primal, + if is_nlp(model) + Xpress.Lib.XPRSgetnlpsol( + model.inner, + model.cached_solution.variable_primal, + model.cached_solution.linear_primal, + model.cached_solution.linear_dual, + model.cached_solution.variable_dual, ) - fill!(model.cached_solution.linear_dual, NaN) - fill!(model.cached_solution.variable_dual, NaN) else - Lib.XPRSgetlpsol( - model.inner, - model.cached_solution.variable_primal, - model.cached_solution.linear_primal, - model.cached_solution.linear_dual, - model.cached_solution.variable_dual, - ) + if is_mip(model) + # TODO @checked (only works if not in [MOI.NO_SOLUTION, MOI.INFEASIBILITY_CERTIFICATE, MOI.INFEASIBLE_POINT]) + Lib.XPRSgetmipsol( + model.inner, + model.cached_solution.variable_primal, + model.cached_solution.linear_primal, + ) + fill!(model.cached_solution.linear_dual, NaN) + fill!(model.cached_solution.variable_dual, NaN) + else + Lib.XPRSgetlpsol( + model.inner, + model.cached_solution.variable_primal, + model.cached_solution.linear_primal, + model.cached_solution.linear_dual, + model.cached_solution.variable_dual, + ) + end + model.cached_solution.linear_primal .= rhs .- model.cached_solution.linear_primal end - model.cached_solution.linear_primal .= rhs .- model.cached_solution.linear_primal - status = MOI.get(model, MOI.PrimalStatus()) if status == MOI.INFEASIBILITY_CERTIFICATE has_Ray = Int64[0] @@ -2804,7 +2934,10 @@ function MOI.get(model::Optimizer, attr::MOI.RawStatusString) _throw_if_optimize_in_progress(model, attr) stop = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_STOPSTATUS, _)::Int stop_str = STOPSTATUS_STRING[stop] - if is_mip(model) + if is_nlp(model) + stat = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_NLPSTATUS, _)::Int + return Xpress.NLPSTATUS_STRING[stat] * " - " * stop_str + elseif is_mip(model) stat = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_MIPSTATUS, _)::Int return Xpress.MIPSTATUS_STRING[stat] * " - " * stop_str else @@ -2826,7 +2959,14 @@ function _cache_termination_status(model::Optimizer) return MOI.ITERATION_LIMIT elseif stop == Lib.XPRS_STOP_MIPGAP stat = Lib.XPRS_MIP_NOT_LOADED - if is_mip(model) + if is_nlp(model) + stat = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_NLPSTATUS, _)::Int + if stat == Lib.XPRS_NLPSTATUS_OPTIMAL + return MOI.OPTIMAL + else + return MOI.OTHER_ERROR + end + elseif is_mip(model) stat = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_MIPSTATUS, _)::Int if stat == Lib.XPRS_MIP_OPTIMAL return MOI.OPTIMAL @@ -2853,7 +2993,37 @@ function _cache_termination_status(model::Optimizer) XPRS_STOP_USER user interrupt =# end # else: - if is_mip(model) + if is_nlp(model) + stat = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_NLPSTATUS, _)::Int + if stat == Lib.XPRS_NLPSTATUS_UNSTARTED + return MOI.OPTIMIZE_NOT_CALLED + elseif stat == Lib.XPRS_NLPSTATUS_SOLUTION # is a STOP + return MOI.LOCALLY_SOLVED + elseif stat == Lib.XPRS_NLPSTATUS_OPTIMAL + return MOI.OPTIMAL + elseif stat == Lib.XPRS_NLPSTATUS_NOSOLUTION # is a STOP + return MOI.OTHER_ERROR + elseif stat == Lib.XPRS_NLPSTATUS_INFEASIBLE + return MOI.INFEASIBLE + elseif stat == Lib.XPRS_NLPSTATUS_UNBOUNDED + return MOI.DUAL_INFEASIBLE + elseif stat == Lib.XPRS_NLPSTATUS_UNFINISHED # is a STOP + return MOI.OTHER_ERROR + else + @assert stat == Lib.XPRS_NLPSTATUS_UNSOLVED + return MOI.NUMERICAL_ERROR + end + #= + 0 Unstarted ( XPRS_NLPSTATUS_UNSTARTED). + 1 Global search incomplete - an integer solution has been found ( XPRS_NLPSTATUS_SOLUTION). + 2 Optimal ( XPRS_NLPSTATUS_OPTIMAL). + 3 Global search complete - No solution found ( XPRS_NLPSTATUS_NOSOLUTION).. + 4 Infeasible ( XPRS_NLPSTATUS_INFEASIBLE). + 5 Unbounded ( XPRS_NLPSTATUS_UNBOUNDED). + 6 Unfinished ( XPRS_NLPSTATUS_UNFINISHED).. + 7 Problem could not be solved due to numerical issues. ( XPRS_NLPSTATUS_UNSOLVED). + =# + elseif is_mip(model) stat = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_MIPSTATUS, _)::Int if stat == Lib.XPRS_MIP_NOT_LOADED return MOI.OPTIMIZE_NOT_CALLED @@ -2954,6 +3124,26 @@ function _cache_primal_status(model) if _has_primal_ray(model) return MOI.INFEASIBILITY_CERTIFICATE end + elseif is_nlp(model) + stat = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_NLPSTATUS, _)::Int + if stat == Lib.XPRS_NLPSTATUS_UNSTARTED + return MOI.NO_SOLUTION + elseif stat == Lib.XPRS_NLPSTATUS_SOLUTION # is a STOP + return MOI.FEASIBLE_POINT + elseif stat == Lib.XPRS_NLPSTATUS_OPTIMAL + return MOI.FEASIBLE_POINT + elseif stat == Lib.XPRS_NLPSTATUS_NOSOLUTION # is a STOP + return MOI.NO_SOLUTION + elseif stat == Lib.XPRS_NLPSTATUS_INFEASIBLE + return MOI.INFEASIBLE_POINT + elseif stat == Lib.XPRS_NLPSTATUS_UNBOUNDED + return MOI.NO_SOLUTION + elseif stat == Lib.XPRS_NLPSTATUS_UNFINISHED # is a STOP + return MOI.NO_SOLUTION + else + @assert stat == Lib.XPRS_NLPSTATUS_UNSOLVED + return MOI.NO_SOLUTION + end elseif is_mip(model) stat = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_MIPSTATUS, _)::Int if stat == Lib.XPRS_MIP_NOT_LOADED @@ -2974,6 +3164,12 @@ function _cache_primal_status(model) return MOI.NO_SOLUTION #? DUAL_INFEASIBLE? end end + if is_nlp(model) + nlp_sols = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_SLPMIPSOLS, _)::Int + if nlp_sols > 0 + return MOI.FEASIBLE_POINT + end + end if is_mip(model) mip_sols = @_invoke Lib.XPRSgetintattrib(model.inner, Lib.XPRS_MIPSOLS, _)::Int if mip_sols > 0 @@ -3196,7 +3392,9 @@ end function MOI.get(model::Optimizer, attr::MOI.ObjectiveValue) _throw_if_optimize_in_progress(model, attr) MOI.check_result_index_bounds(model, attr) - if is_mip(model) + if is_nlp(model) + return @_invoke Lib.XPRSgetdblattrib(model.inner, Lib.XPRS_NLPOBJVAL, _)::Float64 + elseif is_mip(model) return @_invoke Lib.XPRSgetdblattrib(model.inner, Lib.XPRS_MIPOBJVAL, _)::Float64 else return @_invoke Lib.XPRSgetdblattrib(model.inner, Lib.XPRS_LPOBJVAL, _)::Float64 @@ -3205,7 +3403,9 @@ end function MOI.get(model::Optimizer, attr::MOI.ObjectiveBound) _throw_if_optimize_in_progress(model, attr) - if is_mip(model) + if is_nlp(model) + return @_invoke Lib.XPRSgetdblattrib(model.inner, Lib.XPRS_NLPOBJVAL, _)::Float64 + elseif is_mip(model) return @_invoke Lib.XPRSgetdblattrib(model.inner, Lib.XPRS_BESTBOUND, _)::Float64 else return @_invoke Lib.XPRSgetdblattrib(model.inner, Lib.XPRS_LPOBJVAL, _)::Float64 @@ -3650,7 +3850,7 @@ function MOI.get( ) where {S <: SCALAR_SETS} row = _info(model, c).row basis_status = model.basis_status - if basis_status == nothing + if basis_status === nothing _generate_basis_status(model::Optimizer) basis_status = model.basis_status end @@ -3675,7 +3875,7 @@ function MOI.get( ) column = _info(model, x).column basis_status = model.basis_status - if basis_status == nothing + if basis_status === nothing _generate_basis_status(model::Optimizer) basis_status = model.basis_status end @@ -4165,7 +4365,7 @@ end col_type_char(::Type{MOI.LessThan{Float64}}) = 'U' col_type_char(::Type{MOI.GreaterThan{Float64}}) = 'L' col_type_char(::Type{MOI.EqualTo{Float64}}) = 'F' -# col_type_char(::Type{MOI.Interval{Float64}}) = 'T' +col_type_char(::Type{MOI.Interval{Float64}}) = 'T' col_type_char(::Type{MOI.ZeroOne}) = 'B' col_type_char(::Type{MOI.Integer}) = 'I' col_type_char(::Type{MOI.Semicontinuous{Float64}}) = 'S' @@ -4246,6 +4446,7 @@ function MOI.supports( end include("MOI_callbacks.jl") +include("nlp.jl") function extension(str::String) try diff --git a/src/MOI/nlp.jl b/src/MOI/nlp.jl new file mode 100644 index 00000000..006ce3d2 --- /dev/null +++ b/src/MOI/nlp.jl @@ -0,0 +1,223 @@ +MOI.supports(::Optimizer, ::MOI.NLPBlock) = true + +function walk_and_strip_variable_index!(expr::Expr) + for i in 1:length(expr.args) + if expr.args[i] isa MOI.VariableIndex + expr.args[i] = expr.args[i].value + end + walk_and_strip_variable_index!(expr.args[i]) + end + return +end + +walk_and_strip_variable_index!(not_expr) = nothing + +function MOI.set(model::Optimizer, ::MOI.NLPBlock, nlp_data::MOI.NLPBlockData) + if model.nlp_block_data !== nothing + model.nlp_block_data = nothing + # error("Nonlinear block already set; cannot overwrite. Create a new model instead.") + end + model.nlp_block_data = nlp_data + + nlp_eval = nlp_data.evaluator + + MOI.initialize(nlp_eval, [:ExprGraph]) + + if nlp_data.has_objective + # according to test: test_nonlinear_objective_and_moi_objective_test + # from MOI 0.10.9, linear objectives are just ignores if the noliena exists + # if model.inner.objective_expr !== nothing + # error("Two objectives set: One linear, one nonlinear.") + # end + obj = verify_support(MOI.objective_expr(nlp_eval)) + walk_and_strip_variable_index!(obj) + if obj == :NaN + model.objective_expr = 0.0 + model.termination_status = MOI.INVALID_MODEL + else + model.objective_expr = obj + end + model.objective_type = NLP_OBJECTIVE + else + model.objective_expr = 0.0 + end + + for i in 1:length(nlp_data.constraint_bounds) + expr = verify_support(MOI.constraint_expr(nlp_eval, i)) + println(i) + + @assert expr.head == :call + lb = nlp_data.constraint_bounds[i].lower + ub = nlp_data.constraint_bounds[i].upper + @assert expr.head == :call + if expr.args[1] == :(==) + @assert lb == ub == expr.args[3] + elseif expr.args[1] == :(<=) + @assert lb == -Inf + lb = nothing + @assert ub == expr.args[3] + elseif expr.args[1] == :(>=) + @assert lb == expr.args[3] + @assert ub == Inf + ub = nothing + else + error("Unexpected expression $expr.") + end + expr = expr.args[2] + walk_and_strip_variable_index!(expr) + push!(model.nlp_constraint_info, Xpress.NLPConstraintInfo(expr, lb, ub, nothing)) + end + return +end + +# Converting expressions in strings adapted to chgformulastring and chgobjformulastring +wrap_with_parens(x::String) = string("( ", x, " )") + +to_str(x) = string(x) + +function to_str(c::Expr) + if c.head == :comparison + if length(c.args) == 3 + return join([to_str(c.args[1]), " ", c.args[2], " ", c.args[3]]) + elseif length(c.args) == 5 + return join([c.args[1], " ", c.args[2], " ", to_str(c.args[3]), " ", + c.args[4], " ", c.args[5]]) + else + throw(UnrecognizedExpressionException("comparison", c)) + end + elseif c.head == :call + if c.args[1] in (:<=,:>=,:(==)) + if length(c.args) == 3 + return join([to_str(c.args[2]), " ", to_str(c.args[1]), " ", to_str(c.args[3])]) + elseif length(c.args) == 5 + return join([to_str(c.args[1]), " ", to_str(c.args[2]), " ", to_str(c.args[3]), " ", to_str(c.args[4]), " ", to_str(c.args[5])]) + else + throw(UnrecognizedExpressionException("comparison", c)) + end + elseif c.args[1] in (:+,:-,:*,:/) + if all(d->isa(d, Real), c.args[2:end]) # handle unary case + return wrap_with_parens(string(eval(c))) + elseif c.args[1] == :- && length(c.args) == 2 + return wrap_with_parens(string("( - $(to_str(c.args[2])) )")) + else + return wrap_with_parens(string(join([to_str(d) for d in c.args[2:end]], join([" ",string(c.args[1]), " "])))) + end + elseif c.args[1] == :^ + if length(c.args) != 3 + throw(UnrecognizedExpressionException("function call", c)) + end + return wrap_with_parens(join([to_str(c.args[2]), " ",to_str(c.args[1]), " ",to_str(c.args[3])])) + elseif c.args[1] in (:exp,:log,:sin,:cos,:abs,:tan,:sqrt) + if length(c.args) != 2 + throw(UnrecognizedExpressionException("function call", c)) + end + return wrap_with_parens(string(join([uppercase(string(c.args[1])), " "]), wrap_with_parens(to_str(c.args[2])))) + else + throw(UnrecognizedExpressionException("function call", c)) + end + elseif c.head == :ref + if c.args[1] == :x + idx = c.args[2] + @assert isa(idx, Int) + # TODO decide is use use defined names + # might be messy becaus a use can call his variable "sin" + return "x$idx" + else + throw(UnrecognizedExpressionException("reference", c)) + end + end +end + +verify_support(c) = c + +function verify_support(c::Real) + if isfinite(c) # blocks NaN and +/-Inf + return c + end + error("Expected number but got $c") +end + +function verify_support(c::Expr) + if c.head == :comparison + map(verify_support, c.args) + return c + end + if c.head == :call + if c.args[1] in (:+, :-, :*, :/, :exp, :log) + return c + elseif c.args[1] in (:<=, :>=, :(==)) + map(verify_support, c.args[2:end]) + return c + elseif c.args[1] == :^ + @assert isa(c.args[2], Real) || isa(c.args[3], Real) + return c + else # TODO: do automatic transformation for x^y, |x| + error("Unsupported expression $c") + end + end + return c +end + +function set_lower_bound(info::NLPConstraintInfo, value::Union{Number, Nothing}) + if value !== nothing + info.lower_bound !== nothing && throw(ArgumentError("Lower bound has already been set")) + info.lower_bound = value + end + return +end + +function set_upper_bound(info::NLPConstraintInfo, value::Union{Number, Nothing}) + if value !== nothing + info.upper_bound !== nothing && throw(ArgumentError("Upper bound has already been set")) + info.upper_bound = value + end + return +end + +function set_bounds(info::NLPConstraintInfo, set::MOI.EqualTo) + set_lower_bound(info, set.value) + set_upper_bound(info, set.value) +end + +function set_bounds(info::NLPConstraintInfo, set::MOI.GreaterThan) + set_lower_bound(info, set.lower) +end + +function set_bounds(info::NLPConstraintInfo, set::MOI.LessThan) + set_upper_bound(info, set.upper) +end + +function set_bounds(info::NLPConstraintInfo, set::MOI.Interval) + set_lower_bound(info, set.lower) + set_upper_bound(info, set.upper) +end + +# Transforming NLconstraints in constraint sets used for affine functions creation in optimize! +function to_constraint_set(c::Xpress.NLPConstraintInfo) + if c.lower_bound !== nothing || c.upper_bound !== nothing + if c.upper_bound === nothing + return [MOI.GreaterThan(c.lower_bound)] + elseif c.lower_bound === nothing + return [MOI.LessThan(c.upper_bound)] + elseif c.lower_bound==c.upper_bound + return [MOI.EqualTo(c.lower_bound)] + else + return MOI.GreaterThan(c.lower_bound), MOI.LessThan(c.upper_bound) + end + end +end + +# The problem is Nonlinear if a NLPBlockData has been defined +function is_nlp(model) + return model.nlp_block_data !== nothing +end + +MOI.supports(::Optimizer, ::MOI.NLPBlockDual) = true + +function MOI.get(model::Optimizer, attr::MOI.NLPBlockDual) + MOI.check_result_index_bounds(model, attr) + s = _dual_multiplier(model) + return s .*model.cached_solution.linear_dual[(1:length(model.nlp_constraint_info))] +end + + diff --git a/src/api.jl b/src/api.jl index 9829eece..36878c5b 100644 --- a/src/api.jl +++ b/src/api.jl @@ -4964,3 +4964,4 @@ end function _bo_validate(obranch, p_status) @checked Lib.XPRS_bo_validate(obranch, p_status) end + diff --git a/src/common.jl b/src/common.jl index c78bbb8b..cdb89f59 100644 --- a/src/common.jl +++ b/src/common.jl @@ -186,7 +186,6 @@ const XPRS_DUALIZE = 8144 const XPRS_DUALGRADIENT = 8145 const XPRS_SBITERLIMIT = 8146 const XPRS_SBBEST = 8147 -const XPRS_MAXCUTTIME = 8149 # kept for compatibility (removed in v41) const XPRS_ACTIVESET = 8152 # kept for compatibility (removed in v41) const XPRS_BARINDEFLIMIT = 8153 const XPRS_HEURSTRATEGY = 8154 # kept for compatibility (removed in v41) @@ -297,7 +296,6 @@ const XPRS_TUNERMODE = 8359 const XPRS_TUNERMETHOD = 8360 const XPRS_TUNERTARGET = 8362 const XPRS_TUNERTHREADS = 8363 -const XPRS_TUNERMAXTIME = 8364 # kept for compatibility (removed in v41) const XPRS_TUNERHISTORY = 8365 const XPRS_TUNERPERMUTE = 8366 const XPRS_TUNERROOTALG = 8367 # kept for compatibility (removed in v41) @@ -418,8 +416,8 @@ const XPRS_BRANCHVAR = 1036 const XPRS_MIPTHREADID = 1037 const XPRS_ALGORITHM = 1049 const XPRS_SOLSTATUS = 1053 -const XPRS_TIME = 1122 # kept for compatibility (removed in v41) const XPRS_ORIGINALROWS = 1124 +const NLPORIGINALROWS = 1125 const XPRS_CALLBACKCOUNT_OPTNODE = 1136 const XPRS_CALLBACKCOUNT_CUTMGR = 1137 const XPRS_ORIGINALQELEMS = 1157 diff --git a/src/helper.jl b/src/helper.jl index c35cf2bf..a37d2605 100644 --- a/src/helper.jl +++ b/src/helper.jl @@ -234,6 +234,7 @@ n_quadratic_elements(prob::XpressProblem) = @_invoke Lib.XPRSgetintattrib(prob, n_quadratic_row_coefficients(prob::XpressProblem) = @_invoke Lib.XPRSgetintattrib(prob, Lib.XPRS_ORIGINALQCELEMS, _)::Int n_entities(prob::XpressProblem) = @_invoke Lib.XPRSgetintattrib(prob, Lib.XPRS_ORIGINALMIPENTS, _)::Int n_setmembers(prob::XpressProblem) = @_invoke Lib.XPRSgetintattrib(prob, Lib.XPRS_ORIGINALSETMEMBERS, _)::Int +n_nonlinear_coefs(prob::XpressProblem) = @_invoke Lib.XPRSgetintattrib(prob, Lib.XPRS_SLPCOEFFICIENTS, _)::Int n_original_variables(prob::XpressProblem) = @_invoke Lib.XPRSgetintattrib(prob, Lib.XPRS_ORIGINALCOLS, _)::Int n_original_constraints(prob::XpressProblem) = @_invoke Lib.XPRSgetintattrib(prob, Lib.XPRS_ORIGINALROWS, _)::Int @@ -243,11 +244,17 @@ objective_sense(prob::XpressProblem) = obj_sense(prob) == Lib.XPRS_OBJ_MINIMIZE # derived attribute functions +""" + n_nonlinear_constraints(prob::XpressProblem) +Return the number of nonlinear contraints in the XpressProblem +""" +n_nonlinear_constraints(prob::XpressProblem) = max(n_nonlinear_coefs(prob) - 1,0) + """ n_linear_constraints(prob::XpressProblem) Return the number of purely linear contraints in the XpressProblem """ -n_linear_constraints(prob::XpressProblem) = n_constraints(prob) - n_quadratic_constraints(prob) +n_linear_constraints(prob::XpressProblem) =n_constraints(prob) - n_quadratic_constraints(prob)-n_nonlinear_constraints(prob) """ is_qcp(prob::XpressProblem) @@ -261,6 +268,12 @@ Return `true` if there are integer entities in the XpressProblem """ is_mixedinteger(prob::XpressProblem) = (n_entities(prob) + n_special_ordered_sets(prob)) > 0 +""" + is_nonlinear(prob::XpressProblem) +Return `true` if there are nonlinear strings in the XpressProblem +""" +is_nonlinear(prob::XpressProblem) = n_nonlinear_coefs(prob) > 0 + """ is_quadratic_objective(prob::XpressProblem) Return `true` if there are quadratic terms in the objective in the XpressProblem @@ -273,6 +286,7 @@ Return a symbol enconding the type of the problem.] Options are: `:LP`, `:QP` and `:QCP` """ function problem_type(prob::XpressProblem) + is_nonlinear(prob) ? (:NLP) : is_quadratic_constraints(prob) ? (:QCP) : is_quadratic_objective(prob) ? (:QP) : (:LP) end @@ -293,12 +307,31 @@ function Base.show(io::IO, prob::XpressProblem) println(io, " number of linear constraints = $(n_linear_constraints(prob))") println(io, " number of quadratic constraints = $(n_quadratic_constraints(prob))") println(io, " number of sos constraints = $(n_special_ordered_sets(prob))") + println(io, " number of nonlinear constraints = $(n_nonlinear_constraints(prob))") println(io, " number of non-zero coeffs = $(n_non_zero_elements(prob))") println(io, " number of non-zero qp objective terms = $(n_quadratic_elements(prob))") println(io, " number of non-zero qp constraint terms = $(n_quadratic_row_coefficients(prob))") println(io, " number of integer entities = $(n_entities(prob))") end +const NLPSTATUS_STRING = Dict{Int,String}( + Lib.XPRS_NLPSTATUS_UNSTARTED => "0 Unstarted ( XPRS_NLPSTATUS_UNSTARTED).", + Lib.XPRS_NLPSTATUS_SOLUTION => "1 Global search incomplete - an integer solution has been found ( XPRS_NLPSTATUS_SOLUTION).", + Lib.XPRS_NLPSTATUS_OPTIMAL => "2 Optimal ( XPRS_NLPSTATUS_OPTIMAL).", + Lib.XPRS_NLPSTATUS_NOSOLUTION => "3 Global search complete - No solution found ( XPRS_NLPSTATUS_NOSOLUTION).", + Lib.XPRS_NLPSTATUS_INFEASIBLE => "4 Infeasible ( XPRS_NLPSTATUS_INFEASIBLE).", + Lib.XPRS_NLPSTATUS_UNBOUNDED => "5 Unbounded ( XPRS_NLPSTATUS_UNBOUNDED).", + Lib.XPRS_NLPSTATUS_UNFINISHED => "6 Unfinished ( XPRS_NLPSTATUS_UNFINISHED).", + Lib.XPRS_NLPSTATUS_UNSOLVED => "7 Problem could not be solved due to numerical issues. ( XPRS_NLPSTATUS_UNSOLVED).", +) + +function nlp_solve_complete(stat) + stat in [Lib.XPRS_NLPSTATUS_INFEASIBLE, Lib.XPRS_NLPSTATUS_OPTIMAL] +end +function nlp_solve_stopped(stat) + stat in [Lib.XPRS_NLPSTATUS_INFEASIBLE, Lib.XPRS_NLPSTATUS_OPTIMAL] +end + const MIPSTATUS_STRING = Dict{Int,String}( Lib.XPRS_MIP_NOT_LOADED => "0 Problem has not been loaded ( XPRS_MIP_NOT_LOADED).", Lib.XPRS_MIP_LP_NOT_OPTIMAL => "1 Global search incomplete - the initial continuous relaxation has not been solved and no integer solution has been found ( XPRS_MIP_LP_NOT_OPTIMAL).", diff --git a/src/lib.jl b/src/lib.jl index 965a2a24..33c55df1 100644 --- a/src/lib.jl +++ b/src/lib.jl @@ -2419,3 +2419,12 @@ end function XPRSremovecbgloballog(prob, globallog, data) ccall((:XPRSremovecbgloballog, libxprs), Cint, (XPRSprob, Ptr{Cvoid}, Ptr{Cvoid}), prob, globallog, data) end + +function XSLPcreateprob(prob, _probholder) + ccall((:XSLPcreateprob, libxprs), Cint, (XPRSprob, Ptr{XPRSprob},), prob, _probholder) +end + +function XSLPinit() + ccall((:XSLPinit, libxprs), Cint, ()) +end + diff --git a/src/utils.jl b/src/utils.jl index 9030b27c..636a14b1 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -144,4 +144,4 @@ function _check(prob, val::Cint) throw(XpressError(val, "Xpress internal error:\n\n$e.\n")) end return nothing -end \ No newline at end of file +end diff --git a/test/MathOptInterface/MOI_callbacks.jl b/test/MathOptInterface/MOI_callbacks.jl index a64b6919..d442363a 100644 --- a/test/MathOptInterface/MOI_callbacks.jl +++ b/test/MathOptInterface/MOI_callbacks.jl @@ -7,9 +7,9 @@ const MOI = MathOptInterface function callback_simple_model() model = Xpress.Optimizer( - HEURSTRATEGY = 0, # before v41 + HEURSTRATEGY = 0, HEUREMPHASIS = 0, - OUTPUTLOG = 0, + OUTPUTLOG = 0 ) MOI.Utilities.loadfromstring!(model, """ @@ -28,7 +28,7 @@ end function callback_knapsack_model() model = Xpress.Optimizer( OUTPUTLOG = 0, - HEURSTRATEGY = 0, # before v41 + HEURSTRATEGY = 0, HEUREMPHASIS = 0, CUTSTRATEGY = 0, PRESOLVE = 0, diff --git a/test/MathOptInterface/MOI_wrapper.jl b/test/MathOptInterface/MOI_wrapper.jl index 49a6f728..0bd5ba09 100644 --- a/test/MathOptInterface/MOI_wrapper.jl +++ b/test/MathOptInterface/MOI_wrapper.jl @@ -2,6 +2,7 @@ module TestMOIWrapper using Xpress using MathOptInterface +using JuMP using Test const MOI = MathOptInterface @@ -57,6 +58,12 @@ function test_runtests() "test_constraint_PrimalStart_DualStart_SecondOrderCone", "_RotatedSecondOrderCone_", "_GeometricMeanCone_", + "_nonlinear_hs071_NLPBlockDual", + "_nonlinear_objective", + "_nonlinear_objective_and_moi_objective_test", + "_nonlinear_without_objective", + "_nlp1", + "_nlp2", # Xpress cannot handle nonconvex quadratic constraint "test_quadratic_nonconvex_", ], @@ -75,12 +82,33 @@ function test_runtests() "_SecondOrderCone_", "test_constraint_PrimalStart_DualStart_SecondOrderCone", "_RotatedSecondOrderCone_", - "_GeometricMeanCone_" + "_GeometricMeanCone_", + ] ) return end +function test_runtests_nlp() + + optimizer = Xpress.Optimizer(OUTPUTLOG = 0, PRESOLVE = 0) + model = MOI.Bridges.full_bridge_optimizer(optimizer, Float64) + MOI.set(model, MOI.Silent(), true) + MOI.Test.runtests( + model, + MOI.Test.Config(atol = 0.2, rtol = 0.2, optimal_status = MOI.LOCALLY_SOLVED), + include = String[ + # tested with PRESOLVE=0 below + "_nonlinear_hs071_NLPBlockDual", + "_nonlinear_objective", + "_nonlinear_objective_and_moi_objective_test", + "_nonlinear_without_objective", + ], + ) + return +end + + function test_Binaryfixing() @testset "Binary Equal to 1" begin T = Float64 @@ -892,7 +920,8 @@ function test_name_empty_names() end function test_dummy_nlp() - model = Xpress.Optimizer(OUTPUTLOG = 0); + + model = Xpress.Optimizer(OUTPUTLOG = 1); x = MOI.add_variables(model, 2); c = [1.0, 2.0] MOI.set( @@ -905,7 +934,7 @@ function test_dummy_nlp() Xpress._pass_variable_names_to_solver(model) - MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE); b1 = MOI.add_constraint.( model, @@ -922,7 +951,7 @@ function test_dummy_nlp() model, MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([1.0, 0.0], x), 0.0), MOI.EqualTo(0.0), - ) + ); c3 = MOI.add_constraint( model, @@ -957,6 +986,7 @@ function test_dummy_nlp() ret = Xpress.Lib.XPRSnlpchgformulastring(model.inner, Cint(0), "- 0.5 * x1 - 3") @test ret == 0 + # to optimize NLPs we need: XPRSoptimize solvestatus = Ref{Cint}(0) solstatus = Ref{Cint}(0) @@ -976,6 +1006,192 @@ function test_dummy_nlp() return nothing end +function test_optimize_lp() + + model = Model(()->Xpress.Optimizer(DEFAULTALG=2, PRESOLVE=0, logfile = "output.log")) + + @variable(model,x[i=1:2]) + set_lower_bound(x[1],0.0) + set_lower_bound(x[2],0.0) + set_upper_bound(x[2],3.0) + @NLobjective(model, Min, 12x[1] + 20x[2]) + @NLconstraint(model, c1, 6x[1] + 8x[2] >= 100) + @NLconstraint(model, c2, 7x[1] + 12x[2] >= 120) + optimize!(model) + + k=collect(keys(model.moi_backend.optimizer.model.variable_info)) + @test MOI.get(model.moi_backend.optimizer.model, MOI.VariableName(), k) == ["x1", "x2"] + + @test model.moi_backend.optimizer.model.termination_status == OPTIMAL::TerminationStatusCode + @test model.moi_backend.optimizer.model.primal_status == FEASIBLE_POINT::ResultStatusCode + @test model.moi_backend.optimizer.model.dual_status == FEASIBLE_POINT::ResultStatusCode + + @test objective_value(model) == 205.00000000000003 + @test value(x[1]) == 14.999999999999993 + @test value(x[2]) == 1.2500000000000053 + @test dual(c1) == 0.25000000000000167 + @test dual(c2) == 1.4999999999999987 + +end + +function test_delete_constraints() + + model = Model(()->Xpress.Optimizer(DEFAULTALG=2, PRESOLVE=0, logfile = "output.log")) + + @variable(model,x[i=1:2]) + set_lower_bound(x[1],0.0) + set_lower_bound(x[2],0.0) + set_upper_bound(x[2],3.0) + @NLobjective(model, Min, 12x[1] + 20x[2]) + @NLconstraint(model, c1, 6x[1] + 8x[2] >= 100) + @NLconstraint(model, c2, 7x[1] + 12x[2] >= 120) + optimize!(model) + optimize!(model) + + @test length(model.moi_backend.optimizer.model.nlp_constraint_info) == 2 + @test length(model.moi_backend.optimizer.model.affine_constraint_info) == 2 + +end + +function test_change_objective() + + model = Model(()->Xpress.Optimizer(DEFAULTALG=2, PRESOLVE=0, logfile = "output.log")) + + @variable(model,x[i=1:2]) + set_lower_bound(x[1],0.0) + set_lower_bound(x[2],0.0) + set_upper_bound(x[2],3.0) + @NLobjective(model, Min, 12x[1] + 20x[2]) + @NLconstraint(model, c1, 6x[1] + 8x[2] >= 100) + optimize!(model) + + obj_1=objective_value(model) + + @NLconstraint(model, c2, 7x[1] + 12x[2] >= 120) + + optimize!(model) + + obj_2=objective_value(model) + + @test obj_1 != obj_2 + +end + +function test_formula_string() + + model = Model(()->Xpress.Optimizer(DEFAULTALG=2, PRESOLVE=0, logfile = "output.log")) + + @variable(model,x[i=1:2]) + set_lower_bound(x[1],0.0) + set_lower_bound(x[2],0.0) + set_upper_bound(x[2],3.0) + @NLobjective(model, Min, 12x[1] + 20x[2]) + @NLconstraint(model, c1, 6x[1] + 8x[2] >= 100) + @NLconstraint(model, c2, 7x[1] + 12x[2] >= 120) + optimize!(model) + + buffer = Array{Cchar}(undef, 80) + buffer_p = pointer(buffer) + out = Cstring(buffer_p) + ret=Xpress.Lib.XPRSnlpgetformulastring(model.moi_backend.optimizer.model.inner, Cint(0), out , 80) + @test ret == 0 + version = unsafe_string(out)::String + @test version == "6 * x1 + 8 * x2 - 100" + + buffer = Array{Cchar}(undef, 80) + buffer_p = pointer(buffer) + out = Cstring(buffer_p) + ret=Xpress.Lib.XPRSnlpgetformulastring(model.moi_backend.optimizer.model.inner, Cint(1), out , 80) + @test ret == 0 + version = unsafe_string(out)::String + @test version == "7 * x1 + 12 * x2 - 120" + + buffer = Array{Cchar}(undef, 80) + buffer_p = pointer(buffer) + out = Cstring(buffer_p) + ret=Xpress.Lib.XPRSnlpgetobjformulastring(model.moi_backend.optimizer.model.inner, out , 80) + @test ret == 0 + version = unsafe_string(out)::String + @test version == "12 * x1 + 20 * x2" + +end + +function test_optimize_nlp() + + model = Model(()->Xpress.Optimizer(DEFAULTALG=2, PRESOLVE=0, logfile = "output.log")) + @variable(model,x[i=1:4]) + set_start_value(x[1],2.1) + set_start_value(x[2],2.2) + set_start_value(x[3],2.3) + set_start_value(x[4],2.4) + set_lower_bound(x[1],1.1) + set_lower_bound(x[2],1.2) + set_lower_bound(x[3],1.3) + set_lower_bound(x[4],1.4) + set_upper_bound(x[1],5.1) + set_upper_bound(x[2],5.2) + set_upper_bound(x[3],5.3) + set_upper_bound(x[4],5.4) + @NLconstraint(model, c1, (x[1]*x[2]*x[3]*x[4])>=25.0) + @NLconstraint(model, c2, (x[1]^2)+(x[2]^2)+(x[3]^2)+(x[4]^2)==40.0) + @NLobjective(model,Min,x[1]*x[4]*(x[1]+x[2]+x[3])+x[3]) + optimize!(model) + + k=collect(keys(model.moi_backend.optimizer.model.variable_info)) + @test MOI.get(model.moi_backend.optimizer.model, MOI.VariableName(), k) == ["x1", "x2", "x3", "x4"] + + @test model.moi_backend.optimizer.model.termination_status == LOCALLY_SOLVED::TerminationStatusCode + @test model.moi_backend.optimizer.model.primal_status == FEASIBLE_POINT::ResultStatusCode + + @test objective_value(model) == 17.649399912766466 + @test value(x[1]) == 1.1 + @test value(x[2]) == 5.2 + @test value(x[3]) == 3.128897603451363 + @test value(x[4]) == 1.4 + @test dual(c1) == 0.0 + @test dual(c2) == 0.40583391082953174 + +end + +function test_nlp1() + m = Model(()->Xpress.Optimizer(DEFAULTALG=2, PRESOLVE=0, logfile = "output.log")) + ub = [6,4] + @variable(m, 0 ≤ x[i=1:2] ≤ ub[i]) + + @NLconstraint(m, x[1]*x[2] ≤ 4) + + @NLobjective(m, Min, -x[1] - x[2]) + + optimize!(m) + + @test_broken isapprox(value(x[1]), 6, rtol=1e-6) + @test_broken isapprox(value(x[2]), 2/3, rtol=2e-6) + @test_broken isapprox(objective_value(m), -20/3, rtol=1e-6) + +end + +function test_nlp2() + m = Model(()->Xpress.Optimizer(DEFAULTALG=2, PRESOLVE=0, logfile = "output.log")) + ub = [9.422, 5.9023, 267.417085245] + @variable(m, 0 ≤ x[i=1:3] ≤ ub[i]) + + @NLconstraints(m, begin + 250 + 30x[1] - 6x[1]^2 == x[3] + 300 + 20x[2] - 12x[2]^2 == x[3] + 150 + 0.5*(x[1]+x[2])^2 == x[3] + end) + + @objective(m, Min, -x[3]) + + optimize!(m) + + @test_broken isapprox(value(x[1]), 6.2934300, rtol=1e-6) + @test_broken isapprox(value(x[2]), 3.8218391, rtol=1e-6) + @test_broken isapprox(value(x[3]), 201.1593341, rtol=1e-5) + @test_broken isapprox(objective_value(m), -201.1593341, rtol=1e-6) + +end + end TestMOIWrapper.runtests()