diff --git a/docs/src/manual/standard_form.md b/docs/src/manual/standard_form.md index 1a2af4e2ae..2d8855e21a 100644 --- a/docs/src/manual/standard_form.md +++ b/docs/src/manual/standard_form.md @@ -37,6 +37,7 @@ The function types implemented in MathOptInterface.jl are: | [`VariableIndex`](@ref) | ``x_j``, the projection onto a single coordinate defined by a variable index ``j``. | | [`VectorOfVariables`](@ref) | The projection onto multiple coordinates (that is, extracting a sub-vector). | | [`ScalarAffineFunction`](@ref) | ``a^T x + b``, where ``a`` is a vector and ``b`` scalar. | +| [`ScalarNonlinearFunction`](@ref) | ``f(x)``, where ``f`` is a nonlinear function. | | [`VectorAffineFunction`](@ref) | ``A x + b``, where ``A`` is a matrix and ``b`` is a vector. | | [`ScalarQuadraticFunction`](@ref) | ``\frac{1}{2} x^T Q x + a^T x + b``, where ``Q`` is a symmetric matrix, ``a`` is a vector, and ``b`` is a constant. | | [`VectorQuadraticFunction`](@ref) | A vector of scalar-valued quadratic functions. | diff --git a/docs/src/reference/errors.md b/docs/src/reference/errors.md index 28c8b4278e..9df8874006 100644 --- a/docs/src/reference/errors.md +++ b/docs/src/reference/errors.md @@ -75,6 +75,7 @@ ModifyObjectiveNotAllowed DeleteNotAllowed UnsupportedSubmittable SubmitNotAllowed +UnsupportedNonlinearOperator ``` Note that setting the [`ConstraintFunction`](@ref) of a [`VariableIndex`](@ref) diff --git a/docs/src/reference/models.md b/docs/src/reference/models.md index 115106f4ec..6405e17d0c 100644 --- a/docs/src/reference/models.md +++ b/docs/src/reference/models.md @@ -50,6 +50,8 @@ ListOfOptimizerAttributesSet ListOfModelAttributesSet ListOfVariableAttributesSet ListOfConstraintAttributesSet +UserDefinedFunction +ListOfSupportedNonlinearOperators ``` ## Optimizer interface diff --git a/docs/src/reference/standard_form.md b/docs/src/reference/standard_form.md index ab123deefb..e4f77fe341 100644 --- a/docs/src/reference/standard_form.md +++ b/docs/src/reference/standard_form.md @@ -25,6 +25,7 @@ ScalarAffineTerm ScalarAffineFunction ScalarQuadraticTerm ScalarQuadraticFunction +ScalarNonlinearFunction ``` ## Vector functions diff --git a/src/Bridges/Objective/bridges/slack.jl b/src/Bridges/Objective/bridges/slack.jl index c325f3ec8b..e85892324b 100644 --- a/src/Bridges/Objective/bridges/slack.jl +++ b/src/Bridges/Objective/bridges/slack.jl @@ -70,7 +70,7 @@ function bridge_objective( end constraint = MOI.Utilities.normalize_and_add_constraint(model, f, set) MOI.set(model, MOI.ObjectiveFunction{MOI.VariableIndex}(), slack) - return SlackBridge{T,F,G}(slack, constraint, MOI.constant(f)) + return SlackBridge{T,F,G}(slack, constraint, MOI.constant(f, T)) end function supports_objective_function( @@ -166,7 +166,11 @@ function MOI.get( bridge::SlackBridge{T,F,G}, ) where {T,F,G<:MOI.AbstractScalarFunction} func = MOI.get(model, MOI.ConstraintFunction(), bridge.constraint) - f = MOI.Utilities.operate(+, T, func, bridge.constant) + f = if !iszero(bridge.constant) + MOI.Utilities.operate(+, T, func, bridge.constant) + else + func + end g = MOI.Utilities.remove_variable(f, bridge.slack) return MOI.Utilities.convert_approx(G, g) end diff --git a/src/Nonlinear/model.jl b/src/Nonlinear/model.jl index 9ed5f83013..cc7c466402 100644 --- a/src/Nonlinear/model.jl +++ b/src/Nonlinear/model.jl @@ -327,3 +327,7 @@ function evaluate( end return storage[1] end + +function MOI.get(model::Model, attr::MOI.ListOfSupportedNonlinearOperators) + return MOI.get(model.operators, attr) +end diff --git a/src/Nonlinear/operators.jl b/src/Nonlinear/operators.jl index aca69a952f..7e51f108fc 100644 --- a/src/Nonlinear/operators.jl +++ b/src/Nonlinear/operators.jl @@ -74,6 +74,35 @@ end DEFAULT_UNIVARIATE_OPERATORS The list of univariate operators that are supported by default. + +## Example + +```jldoctest +julia> import MathOptInterface as MOI + +julia> MOI.Nonlinear.DEFAULT_UNIVARIATE_OPERATORS +72-element Vector{Symbol}: + :+ + :- + :abs + :sqrt + :cbrt + :abs2 + :inv + :log + :log10 + :log2 + ⋮ + :airybi + :airyaiprime + :airybiprime + :besselj0 + :besselj1 + :bessely0 + :bessely1 + :erfcx + :dawson +``` """ const DEFAULT_UNIVARIATE_OPERATORS = first.(SYMBOLIC_UNIVARIATE_EXPRESSIONS) @@ -81,6 +110,24 @@ const DEFAULT_UNIVARIATE_OPERATORS = first.(SYMBOLIC_UNIVARIATE_EXPRESSIONS) DEFAULT_MULTIVARIATE_OPERATORS The list of multivariate operators that are supported by default. + +## Example + +```jldoctest +julia> import MathOptInterface as MOI + +julia> MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS +9-element Vector{Symbol}: + :+ + :- + :* + :^ + :/ + :ifelse + :atan + :min + :max +``` """ const DEFAULT_MULTIVARIATE_OPERATORS = [:+, :-, :*, :^, :/, :ifelse, :atan, :min, :max] @@ -140,6 +187,19 @@ struct OperatorRegistry end end +function MOI.get( + registry::OperatorRegistry, + ::MOI.ListOfSupportedNonlinearOperators, +) + ops = vcat( + registry.univariate_operators, + registry.multivariate_operators, + registry.logic_operators, + registry.comparison_operators, + ) + return unique(ops) +end + const _FORWARD_DIFF_METHOD_ERROR_HELPER = raw""" Common reasons for this include: diff --git a/src/Nonlinear/parse.jl b/src/Nonlinear/parse.jl index cc5c522d87..dd2ca479f4 100644 --- a/src/Nonlinear/parse.jl +++ b/src/Nonlinear/parse.jl @@ -36,6 +36,58 @@ function parse_expression(::Model, ::Expression, x::Any, ::Int) ) end +function parse_expression( + data::Model, + expr::Expression, + x::MOI.ScalarNonlinearFunction, + parent_index::Int, +) + stack = Tuple{Int,Any}[(parent_index, x)] + while !isempty(stack) + parent_node, arg = pop!(stack) + if arg isa MOI.ScalarNonlinearFunction + _parse_without_recursion_inner(stack, data, expr, arg, parent_node) + else + # We can use recursion here, because ScalarNonlinearFunction only + # occur in other ScalarNonlinearFunction. + parse_expression(data, expr, arg, parent_node) + end + end + return +end + +function _get_node_type(data, x) + id = get(data.operators.univariate_operator_to_id, x.head, nothing) + if length(x.args) == 1 && id !== nothing + return id, MOI.Nonlinear.NODE_CALL_UNIVARIATE + end + id = get(data.operators.multivariate_operator_to_id, x.head, nothing) + if id !== nothing + return id, MOI.Nonlinear.NODE_CALL_MULTIVARIATE + end + id = get(data.operators.comparison_operator_to_id, x.head, nothing) + if id !== nothing + return id, MOI.Nonlinear.NODE_COMPARISON + end + id = get(data.operators.logic_operator_to_id, x.head, nothing) + if id !== nothing + return id, MOI.Nonlinear.NODE_LOGIC + end + return throw(MOI.UnsupportedNonlinearOperator(x.head)) +end + +function _parse_without_recursion_inner(stack, data, expr, x, parent) + id, node_type = _get_node_type(data, x) + push!(expr.nodes, Node(node_type, id, parent)) + parent = length(expr.nodes) + # Args need to be pushed onto the stack in reverse because the stack is a + # first-in last-out datastructure. + for arg in reverse(x.args) + push!(stack, (parent, arg)) + end + return +end + function parse_expression( data::Model, expr::Expression, @@ -108,7 +160,7 @@ function _parse_univariate_expression( _parse_multivariate_expression(stack, data, expr, x, parent_index) return end - error("Unable to parse: $x") + throw(MOI.UnsupportedNonlinearOperator(x.args[1])) end push!(expr.nodes, Node(NODE_CALL_UNIVARIATE, id, parent_index)) push!(stack, (length(expr.nodes), x.args[2])) @@ -200,6 +252,28 @@ function parse_expression( return end +function parse_expression( + data::Model, + expr::Expression, + x::MOI.ScalarAffineFunction, + parent_index::Int, +) + f = convert(MOI.ScalarNonlinearFunction, x) + parse_expression(data, expr, f, parent_index) + return +end + +function parse_expression( + data::Model, + expr::Expression, + x::MOI.ScalarQuadraticFunction, + parent_index::Int, +) + f = convert(MOI.ScalarNonlinearFunction, x) + parse_expression(data, expr, f, parent_index) + return +end + function parse_expression(::Model, expr::Expression, x::Real, parent_index::Int) push!(expr.values, convert(Float64, x)::Float64) push!(expr.nodes, Node(NODE_VALUE, length(expr.values), parent_index)) diff --git a/src/Test/test_basic_constraint.jl b/src/Test/test_basic_constraint.jl index 1dd4211add..061f0821f4 100644 --- a/src/Test/test_basic_constraint.jl +++ b/src/Test/test_basic_constraint.jl @@ -66,6 +66,17 @@ function _function( ) end +function _function( + ::Type{T}, + ::Type{MOI.ScalarNonlinearFunction}, + x::Vector{MOI.VariableIndex}, +) where {T} + return MOI.ScalarNonlinearFunction( + :+, + Any[MOI.ScalarNonlinearFunction(:^, Any[xi, 2]) for xi in x], + ) +end + # Default fallback. _set(::Any, ::Type{S}) where {S} = _set(S) @@ -316,7 +327,12 @@ for s in [ ] S = getfield(MOI, s) functions = if S <: MOI.AbstractScalarSet - (:VariableIndex, :ScalarAffineFunction, :ScalarQuadraticFunction) + ( + :VariableIndex, + :ScalarAffineFunction, + :ScalarQuadraticFunction, + :ScalarNonlinearFunction, + ) else (:VectorOfVariables, :VectorAffineFunction, :VectorQuadraticFunction) end diff --git a/src/Test/test_nonlinear.jl b/src/Test/test_nonlinear.jl index c5dc71ac30..48bd4c87f3 100644 --- a/src/Test/test_nonlinear.jl +++ b/src/Test/test_nonlinear.jl @@ -1115,3 +1115,560 @@ function setup_test( ) return end + +function test_nonlinear_expression_hs071( + model::MOI.ModelLike, + config::MOI.Test.Config{T}, +) where {T} + @requires T == Float64 + @requires _supports(config, MOI.optimize!) + F = MOI.ScalarNonlinearFunction + @requires MOI.supports(model, MOI.ObjectiveFunction{F}()) + @requires MOI.supports_constraint(model, F, MOI.GreaterThan{Float64}) + MOI.Utilities.loadfromstring!( + model, + """ +variables: w, x, y, z +minobjective: ScalarNonlinearFunction(w * z * (w + x + y) + y) +c2: ScalarNonlinearFunction(w * x * y * z) >= 25.0 +c3: ScalarNonlinearFunction(w^2 + x^2 + y^2 + z^2) == 40.0 +w in Interval(1.0, 5.0) +x in Interval(1.0, 5.0) +y in Interval(1.0, 5.0) +z in Interval(1.0, 5.0) +""", + ) + x = MOI.get(model, MOI.ListOfVariableIndices()) + MOI.set.(model, MOI.VariablePrimalStart(), x, [1.0, 5.0, 5.0, 1.0]) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == config.optimal_status + @test ≈( + MOI.get.(model, MOI.VariablePrimal(), x), + [1.0, 4.742999, 3.821150, 1.379408], + config, + ) + return +end + +function setup_test( + ::typeof(test_nonlinear_expression_hs071), + model::MOIU.MockOptimizer, + config::Config{T}, +) where {T} + if T != Float64 + return # Skip for non-Float64 solvers + end + MOI.Utilities.set_mock_optimize!( + model, + (mock) -> begin + MOI.Utilities.mock_optimize!( + mock, + config.optimal_status, + [1.0, 4.742999, 3.821150, 1.379408], + ) + end, + ) + return +end + +function test_nonlinear_expression_hs071_epigraph( + model::MOI.ModelLike, + config::MOI.Test.Config{T}, +) where {T} + @requires T == Float64 + @requires _supports(config, MOI.optimize!) + F = MOI.ScalarNonlinearFunction + @requires MOI.supports(model, MOI.ObjectiveFunction{F}()) + @requires MOI.supports_constraint(model, F, MOI.GreaterThan{Float64}) + MOI.Utilities.loadfromstring!( + model, + """ +variables: t, w, x, y, z +minobjective: t +c1: ScalarNonlinearFunction(t - (w * z * (w + x + y) + y)) >= 0.0 +c2: ScalarNonlinearFunction(w * x * y * z) >= 25.0 +c3: ScalarNonlinearFunction(w^2 + x^2 + y^2 + z^2) == 40.0 +w in Interval(1.0, 5.0) +x in Interval(1.0, 5.0) +y in Interval(1.0, 5.0) +z in Interval(1.0, 5.0) +""", + ) + x = MOI.get(model, MOI.ListOfVariableIndices()) + MOI.set.(model, MOI.VariablePrimalStart(), x, [16.0, 1.0, 5.0, 5.0, 1.0]) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == config.optimal_status + @test ≈( + MOI.get.(model, MOI.VariablePrimal(), x), + [17.014013643792, 1.0, 4.742999, 3.821150, 1.379408], + config, + ) + return +end + +function setup_test( + ::typeof(test_nonlinear_expression_hs071_epigraph), + model::MOIU.MockOptimizer, + config::Config{T}, +) where {T} + if T != Float64 + return # Skip for non-Float64 solvers + end + MOI.Utilities.set_mock_optimize!( + model, + (mock) -> begin + MOI.Utilities.mock_optimize!( + mock, + config.optimal_status, + [17.014013643792, 1.0, 4.742999, 3.821150, 1.379408], + ) + end, + ) + return +end + +function test_nonlinear_expression_hs109( + model::MOI.ModelLike, + config::MOI.Test.Config{T}, +) where {T} + @requires T == Float64 + @requires _supports(config, MOI.optimize!) + F = MOI.ScalarNonlinearFunction + @requires MOI.supports(model, MOI.ObjectiveFunction{F}()) + @requires MOI.supports_constraint(model, F, MOI.GreaterThan{Float64}) + MOI.Utilities.loadfromstring!( + model, + """ +variables: x1, x2, x3, x4, x5, x6, x7, x8, x9 +minobjective: ScalarNonlinearFunction(3 * x1 + 1e-6 * x1^3 + 2 * x2 + 0.522074e-6 * x2^3) +x1 >= 0.0 +x2 >= 0.0 +x3 in Interval(-0.55, 0.55) +x4 in Interval(-0.55, 0.55) +x5 in Interval(196.0, 252.0) +x6 in Interval(196.0, 252.0) +x7 in Interval(196.0, 252.0) +x8 in Interval(-400.0, 800.0) +x9 in Interval(-400.0, 800.0) +c1: ScalarNonlinearFunction(x4 - x3 + 0.55) >= 0.0 +c2: ScalarNonlinearFunction(x3 - x4 + 0.55) >= 0.0 +c3: ScalarNonlinearFunction(2250000 - x1^2 - x8^2) >= 0.0 +c4: ScalarNonlinearFunction(2250000 - x2^2 - x9^2) >= 0.0 +c5: ScalarNonlinearFunction(x5 * x6 * sin(-x3 - 0.25) + x5 * x7 * sin(-x4 - 0.25) + 2 * 0.24740395925452294 * x5^2 - 50.176 * x1 + 400 * 50.176) == 0.0 +c6: ScalarNonlinearFunction(x5 * x6 * sin(x3 - 0.25) + x6 * x7 * sin(x3 - x4 - 0.25) + 2 * 0.24740395925452294 * x6^2 - 50.176 * x2 + 400 * 50.176) == 0.0 +c7: ScalarNonlinearFunction(x5 * x7 * sin(x4 - 0.25) + x6 * x7 * sin(x4 - x3 - 0.25) + 2 * 0.24740395925452294 * x7^2 + 881.779 * 50.176) == 0.0 +c8: ScalarNonlinearFunction(50.176 * x8 + x5 * x6 * cos(-x3 - 0.25) + x5 * x7 * cos(-x4 - 0.25) - 200 * 50.176 - 2 * 0.9689124217106447 * x5^2 + 0.7533e-3 * 50.176 * x5^2) == 0.0 +c9: ScalarNonlinearFunction(50.176 * x9 + x5 * x6 * cos(x3 - 0.25) + x6 * x7 * cos(x3 - x4 - 0.25) - 2 * 0.9689124217106447 * x6^2 + 0.7533e-3 * 50.176 * x6^2 - 200 * 50.176) == 0.0 +c10: ScalarNonlinearFunction(x5 * x7 * cos(x4 - 0.25) + x6 * x7 * cos(x4 - x3 - 0.25) - 2 * 0.9689124217106447 * x7^2 + 22.938 * 50.176 + 0.7533e-3 * 50.176 * x7^2) == 0.0 +""", + ) + x = MOI.get(model, MOI.ListOfVariableIndices()) + MOI.set.(model, MOI.VariablePrimalStart(), x, 0.0) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == config.optimal_status + @test ≈(MOI.get(model, MOI.ObjectiveValue()), 5326.851310161077, config) + return +end + +function setup_test( + ::typeof(test_nonlinear_expression_hs109), + model::MOIU.MockOptimizer, + config::Config{T}, +) where {T} + if T != Float64 + return # Skip for non-Float64 solvers + end + flag = model.eval_objective_value + model.eval_objective_value = false + MOI.Utilities.set_mock_optimize!( + model, + (mock) -> begin + MOI.Utilities.mock_optimize!( + mock, + config.optimal_status, + MOI.FEASIBLE_POINT, + ) + MOI.set(mock, MOI.ObjectiveValue(), 5326.851310161077) + end, + ) + return () -> model.eval_objective_value = flag +end + +function test_nonlinear_expression_hs110( + model::MOI.ModelLike, + config::MOI.Test.Config{T}, +) where {T} + @requires T == Float64 + @requires _supports(config, MOI.optimize!) + F = MOI.ScalarNonlinearFunction + @requires MOI.supports(model, MOI.ObjectiveFunction{F}()) + f = "log(x1 - 2)^2 + log(10 - x1)^2" + for i in 2:10 + f *= " + log(x$i - 2)^2 + log(10 - x$i)^2" + end + f *= " - (x1 * x2 * x3 * x4 * x5 * x6 * x7 * x8 * x9 * x10)^0.2" + MOI.Utilities.loadfromstring!( + model, + """ +variables: x1, x2, x3, x4, x5, x6, x7, x8, x9, x10 +minobjective: ScalarNonlinearFunction($f) +x1 in Interval(-2.001, 9.999) +x2 in Interval(-2.001, 9.999) +x3 in Interval(-2.001, 9.999) +x4 in Interval(-2.001, 9.999) +x5 in Interval(-2.001, 9.999) +x6 in Interval(-2.001, 9.999) +x7 in Interval(-2.001, 9.999) +x8 in Interval(-2.001, 9.999) +x9 in Interval(-2.001, 9.999) +x10 in Interval(-2.001, 9.999) +""", + ) + x = MOI.get(model, MOI.ListOfVariableIndices()) + MOI.set.(model, MOI.VariablePrimalStart(), x, 9.0) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == config.optimal_status + @test ≈(MOI.get(model, MOI.ObjectiveValue()), -45.77846971, config) + return +end + +function setup_test( + ::typeof(test_nonlinear_expression_hs110), + model::MOIU.MockOptimizer, + config::Config{T}, +) where {T} + if T != Float64 + return # Skip for non-Float64 solvers + end + flag = model.eval_objective_value + model.eval_objective_value = false + MOI.Utilities.set_mock_optimize!( + model, + (mock) -> begin + MOI.Utilities.mock_optimize!( + mock, + config.optimal_status, + MOI.FEASIBLE_POINT, + ) + MOI.set(mock, MOI.ObjectiveValue(), -45.77846971) + end, + ) + return () -> model.eval_objective_value = flag +end + +function test_nonlinear_expression_quartic( + model::MOI.ModelLike, + config::MOI.Test.Config{T}, +) where {T} + @requires T == Float64 + @requires _supports(config, MOI.optimize!) + F = MOI.ScalarNonlinearFunction + @requires MOI.supports(model, MOI.ObjectiveFunction{F}()) + MOI.Utilities.loadfromstring!( + model, + """ +variables: x +minobjective: ScalarNonlinearFunction(x^4) +x >= -1.0 +""", + ) + x = MOI.get(model, MOI.ListOfVariableIndices()) + MOI.set.(model, MOI.VariablePrimalStart(), x, [-0.5]) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == config.optimal_status + @test ≈(MOI.get(model, MOI.ObjectiveValue()), 0.0, config) + return +end + +function setup_test( + ::typeof(test_nonlinear_expression_quartic), + model::MOIU.MockOptimizer, + config::Config{T}, +) where {T} + if T != Float64 + return # Skip for non-Float64 solvers + end + flag = model.eval_objective_value + model.eval_objective_value = false + MOI.Utilities.set_mock_optimize!( + model, + (mock) -> begin + MOI.Utilities.mock_optimize!( + mock, + config.optimal_status, + MOI.FEASIBLE_POINT, + ) + MOI.set(mock, MOI.ObjectiveValue(), 0.0) + end, + ) + return () -> model.eval_objective_value = flag +end + +function test_nonlinear_expression_overrides_objective( + model::MOI.ModelLike, + config::MOI.Test.Config{T}, +) where {T} + @requires T == Float64 + @requires _supports(config, MOI.optimize!) + F = MOI.ScalarNonlinearFunction + @requires MOI.supports(model, MOI.ObjectiveFunction{F}()) + x = MOI.add_variables(model, 2) + MOI.add_constraint.(model, x, MOI.GreaterThan(0.0)) + MOI.add_constraint(model, 1.0 * x[1] + 2.0 * x[2], MOI.LessThan(1.0)) + MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE) + f = MOI.ScalarNonlinearFunction(:+, Any[x[1], x[2]]) + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == config.optimal_status + @test ≈(MOI.get.(model, MOI.VariablePrimal(), x), [1.0, 0.0], config) + @test ≈(MOI.get(model, MOI.ObjectiveValue()), 1.0, config) + f = 2.0 * x[1] + x[2] + MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == config.optimal_status + @test ≈(MOI.get.(model, MOI.VariablePrimal(), x), [1.0, 0.0], config) + @test ≈(MOI.get(model, MOI.ObjectiveValue()), 2.0, config) + return +end + +function setup_test( + ::typeof(test_nonlinear_expression_overrides_objective), + model::MOIU.MockOptimizer, + config::Config{T}, +) where {T} + if T != Float64 + return # Skip for non-Float64 solvers + end + flag = model.eval_objective_value + model.eval_objective_value = false + MOI.Utilities.set_mock_optimize!( + model, + (mock) -> begin + MOI.Utilities.mock_optimize!(mock, config.optimal_status, [1.0, 0.0]) + MOI.set(mock, MOI.ObjectiveValue(), 1.0) + end, + (mock) -> begin + MOI.Utilities.mock_optimize!(mock, config.optimal_status, [1.0, 0.0]) + MOI.set(mock, MOI.ObjectiveValue(), 2.0) + end, + ) + return () -> model.eval_objective_value = flag +end + +function test_nonlinear_expression_univariate_function( + model::MOI.ModelLike, + config::MOI.Test.Config{T}, +) where {T} + @requires T == Float64 + @requires _supports(config, MOI.optimize!) + F = MOI.ScalarNonlinearFunction + @requires MOI.supports(model, MOI.ObjectiveFunction{F}()) + @requires MOI.supports(model, MOI.UserDefinedFunction(:my_square, 1)) + my_square(x) = (x - 1)^2 + MOI.set(model, MOI.UserDefinedFunction(:my_square, 1), (my_square,)) + x = MOI.add_variable(model) + obj = MOI.ScalarNonlinearFunction(:my_square, Any[x]) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + MOI.set(model, MOI.ObjectiveFunction{typeof(obj)}(), obj) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == config.optimal_status + @test ≈(MOI.get(model, MOI.VariablePrimal(), x), T(1), config) + return +end + +function setup_test( + ::typeof(test_nonlinear_expression_univariate_function), + model::MOIU.MockOptimizer, + config::Config{T}, +) where {T} + if T != Float64 + return # Skip for non-Float64 solvers + end + MOI.Utilities.set_mock_optimize!( + model, + mock -> MOI.Utilities.mock_optimize!(mock, config.optimal_status, [1]), + ) + return +end + +function test_nonlinear_expression_multivariate_function( + model::MOI.ModelLike, + config::MOI.Test.Config{T}, +) where {T} + @requires T == Float64 + @requires _supports(config, MOI.optimize!) + F = MOI.ScalarNonlinearFunction + @requires MOI.supports(model, MOI.ObjectiveFunction{F}()) + @requires MOI.supports(model, MOI.UserDefinedFunction(:my_square, 2)) + f(x, y) = (x - 1)^2 + (y - 2)^2 + function ∇f(g, x, y) + g[1] = 2 * (x - 1) + g[2] = 2 * (y - 2) + return + end + MOI.set(model, MOI.UserDefinedFunction(:my_square, 2), (f, ∇f)) + x = MOI.add_variable(model) + y = MOI.add_variable(model) + obj = MOI.ScalarNonlinearFunction(:my_square, Any[x, y]) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + MOI.set(model, MOI.ObjectiveFunction{typeof(obj)}(), obj) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == config.optimal_status + @test ≈(MOI.get(model, MOI.VariablePrimal(), x), T(1), config) + @test ≈(MOI.get(model, MOI.VariablePrimal(), y), T(2), config) + return +end + +function setup_test( + ::typeof(test_nonlinear_expression_multivariate_function), + model::MOIU.MockOptimizer, + config::Config{T}, +) where {T} + if T != Float64 + return # Skip for non-Float64 solvers + end + MOI.Utilities.set_mock_optimize!( + model, + mock -> MOI.Utilities.mock_optimize!( + mock, + config.optimal_status, + [1.0, 2.0], + ), + ) + return +end + +""" + test_nonlinear_duals(model::MOI.ModelLike, config::MOI.Test.Config) + +Tests dual solutions with `ScalarNonlinearFunction`. We use a linear program +so that the duals are easy to compute. +""" +function test_nonlinear_duals( + model::MOI.ModelLike, + config::MOI.Test.Config{T}, +) where {T} + @requires T == Float64 + @requires _supports(config, MOI.optimize!) + F = MOI.ScalarNonlinearFunction + @requires MOI.supports(model, MOI.ObjectiveFunction{F}()) + MOI.Utilities.loadfromstring!( + model, + """ +variables: x, y, z, r3, r4, r5, r6 +minobjective: ScalarNonlinearFunction(-((x + y) / 2.0 + 3.0) / 3.0 - z - r3) +x >= 0.0 +y <= 5.0 +z in Interval(2.0, 4.0) +r3 in Interval(0.0, 3.0) +r4 in Interval(0.0, 4.0) +r5 in Interval(0.0, 5.0) +r6 in Interval(0.0, 6.0) +cons1: ScalarNonlinearFunction(x + y) >= 2.0 +cons2: ScalarNonlinearFunction(r3 + r4 + r5 + x / 2) <= 1.0 +cons3: ScalarNonlinearFunction(7.0 * y + - (z + r6 / 1.9)) <= 0.0 +""", + ) + x = MOI.get(model, MOI.ListOfVariableIndices()) + MOI.optimize!(model) + @test MOI.get(model, MOI.PrimalStatus()) == MOI.FEASIBLE_POINT + @test MOI.get(model, MOI.DualStatus()) == MOI.FEASIBLE_POINT + @test ≈( + MOI.get(model, MOI.VariablePrimal(), x), + [0.9774436, 1.0225564, 4.0, 0.5112782, 0.0, 0.0, 6.0], + config, + ) + cons1 = MOI.get(model, MOI.ConstraintIndex, "cons1") + cons2 = MOI.get(model, MOI.ConstraintIndex, "cons2") + cons3 = MOI.get(model, MOI.ConstraintIndex, "cons3") + LB = MOI.ConstraintIndex{MOI.VariableIndex,MOI.GreaterThan{Float64}} + UB = MOI.ConstraintIndex{MOI.VariableIndex,MOI.LessThan{Float64}} + IV = MOI.ConstraintIndex{MOI.VariableIndex,MOI.Interval{Float64}} + @test ≈(MOI.get(model, MOI.ConstraintDual(), cons1), 1 / 3, config) + @test ≈(MOI.get(model, MOI.ConstraintDual(), cons2), -1, config) + @test ≈(MOI.get(model, MOI.ConstraintDual(), cons3), -0.0714286, config) + @test ≈(MOI.get(model, MOI.ConstraintDual(), LB(x[1].value)), 0.0, config) + @test ≈(MOI.get(model, MOI.ConstraintDual(), UB(x[2].value)), 0.0, config) + @test ≈( + MOI.get(model, MOI.ConstraintDual(), [IV(xi.value) for xi in x[3:end]]), + [-1.0714286, 0.0, 1.0, 1.0, -0.03759398], + config, + ) + @test ≈(MOI.get(model, MOI.ObjectiveValue()), -5.8446115, config) + f = MOI.get(model, MOI.ObjectiveFunction{F}()) + MOI.set( + model, + MOI.ObjectiveFunction{F}(), + MOI.ScalarNonlinearFunction(:-, Any[f]), + ) + MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE) + MOI.optimize!(model) + @test MOI.get(model, MOI.PrimalStatus()) == MOI.FEASIBLE_POINT + @test MOI.get(model, MOI.DualStatus()) == MOI.FEASIBLE_POINT + @test ≈( + MOI.get(model, MOI.VariablePrimal(), x), + [0.9774436, 1.0225564, 4.0, 0.5112782, 0.0, 0.0, 6.0], + config, + ) + @test ≈(MOI.get(model, MOI.ConstraintDual(), cons1), 1 / 3, config) + @test ≈(MOI.get(model, MOI.ConstraintDual(), cons2), -1, config) + @test ≈(MOI.get(model, MOI.ConstraintDual(), cons3), -0.0714286, config) + @test ≈(MOI.get(model, MOI.ConstraintDual(), LB(x[1].value)), 0.0, config) + @test ≈(MOI.get(model, MOI.ConstraintDual(), UB(x[2].value)), 0.0, config) + @test ≈( + MOI.get(model, MOI.ConstraintDual(), [IV(xi.value) for xi in x[3:end]]), + [-1.0714286, 0.0, 1.0, 1.0, -0.03759398], + config, + ) + @test ≈(MOI.get(model, MOI.ObjectiveValue()), 5.8446115, config) + return +end + +function setup_test( + ::typeof(test_nonlinear_duals), + model::MOIU.MockOptimizer, + config::Config{T}, +) where {T} + if T != Float64 + return # Skip for non-Float64 solvers + end + MOI.Utilities.set_mock_optimize!( + model, + mock -> begin + MOI.Utilities.mock_optimize!( + mock, + [0.9774436, 1.0225564, 4.0, 0.5112782, 0.0, 0.0, 6.0], + (MOI.ScalarNonlinearFunction, MOI.GreaterThan{T}) => T[1/3], + (MOI.ScalarNonlinearFunction, MOI.LessThan{T}) => + T[-1, -0.0714286], + (MOI.VariableIndex, MOI.GreaterThan{T}) => [0.0], + (MOI.VariableIndex, MOI.LessThan{T}) => [0.0], + (MOI.VariableIndex, MOI.Interval{T}) => + [-1.0714286, 0.0, 1.0, 1.0, -0.03759398], + ) + MOI.set(mock, MOI.ObjectiveValue(), -5.8446115) + end, + mock -> begin + MOI.Utilities.mock_optimize!( + mock, + [0.9774436, 1.0225564, 4.0, 0.5112782, 0.0, 0.0, 6.0], + (MOI.ScalarNonlinearFunction, MOI.GreaterThan{T}) => T[1/3], + (MOI.ScalarNonlinearFunction, MOI.LessThan{T}) => + T[-1, -0.0714286], + (MOI.VariableIndex, MOI.GreaterThan{T}) => [0.0], + (MOI.VariableIndex, MOI.LessThan{T}) => [0.0], + (MOI.VariableIndex, MOI.Interval{T}) => + [-1.0714286, 0.0, 1.0, 1.0, -0.03759398], + ) + MOI.set(mock, MOI.ObjectiveValue(), 5.8446115) + end, + ) + flag = model.eval_variable_constraint_dual + obj_flag = model.eval_objective_value + model.eval_variable_constraint_dual = false + model.eval_objective_value = false + return () -> begin + model.eval_variable_constraint_dual = flag + model.eval_objective_value = obj_flag + end +end diff --git a/src/Utilities/copy.jl b/src/Utilities/copy.jl index 231fd0b2a0..9af59c7df7 100644 --- a/src/Utilities/copy.jl +++ b/src/Utilities/copy.jl @@ -9,6 +9,10 @@ include("copy/index_map.jl") +_sort_priority(::Any) = 2 +_sort_priority(::MOI.UserDefinedFunction) = 0 +_sort_priority(::MOI.ObjectiveSense) = 1 + """ pass_attributes( dest::MOI.ModelLike, @@ -23,7 +27,12 @@ function pass_attributes( src::MOI.ModelLike, index_map::IndexMap, ) - for attr in MOI.get(src, MOI.ListOfModelAttributesSet()) + attrs = MOI.get(src, MOI.ListOfModelAttributesSet()) + # We need to deal with the UserDefinedFunctions first, so that they are in + # the model before we deal with the objective function or the constraints. + # We also need `ObjectiveSense` to be set before `ObjectiveFunction`. + sort!(attrs; by = _sort_priority) + for attr in attrs if !MOI.supports(dest, attr) if attr == MOI.Name() continue # Skipping names is okay. diff --git a/src/Utilities/functions.jl b/src/Utilities/functions.jl index 1d3df5ab79..4946acdf67 100644 --- a/src/Utilities/functions.jl +++ b/src/Utilities/functions.jl @@ -196,11 +196,8 @@ function map_indices(index_map::F, ci::MOI.ConstraintIndex) where {F<:Function} return index_map(ci) end -function map_indices( - index_map::F, - array::AbstractArray{<:MOI.Index}, -) where {F<:Function} - return map(index_map, array) +function map_indices(index_map::F, x::AbstractArray) where {F<:Function} + return [map_indices(index_map, xi) for xi in x] end map_indices(::F, block::MOI.NLPBlockData) where {F<:Function} = block @@ -264,6 +261,16 @@ function map_indices( return typeof(f)(quadratic_terms, affine_terms, MOI.constant(f)) end +function map_indices( + index_map::F, + f::MOI.ScalarNonlinearFunction, +) where {F<:Function} + return MOI.ScalarNonlinearFunction( + f.head, + Any[map_indices(index_map, arg) for arg in f.args], + ) +end + # Function changes function map_indices( @@ -865,6 +872,8 @@ function canonicalize!( return f end +canonicalize!(f::MOI.ScalarNonlinearFunction) = f + """ canonicalize!(f::Union{ScalarQuadraticFunction, VectorQuadraticFunction}) @@ -1043,6 +1052,39 @@ function filter_variables( ) end +function filter_variables(keep::Function, f::MOI.ScalarNonlinearFunction) + args = Any[] + first_arg_deleted = false + for (i, arg) in enumerate(f.args) + if arg isa MOI.VariableIndex + if keep(arg) + push!(args, arg) + else + if i == 1 + first_arg_deleted = true + end + if !(f.head in (:+, :-, :*)) + error("Unable to delete variable in `$(f.head) operation.") + end + end + elseif arg isa Number + push!(args, arg) + else + push!(args, filter_variables(keep, arg)) + end + end + if f.head == :- + if first_arg_deleted + # -(x, y...) has become -(y...), but it should be -(0, y...) + pushfirst!(args, 0) + elseif length(f.args) > 1 && length(args) == 1 + # -(x, y...) has become -(x), but it should be +(x) + return f.args[1] + end + end + return MOI.ScalarNonlinearFunction(f.head, args) +end + """ remove_variable(f::AbstractFunction, vi::VariableIndex) @@ -1772,6 +1814,37 @@ function operate( return operate!(op, T, copy(f), g) end +### ScalarNonlinearFunction + +function promote_operation( + ::typeof(-), + ::Type{T}, + ::Type{MOI.ScalarNonlinearFunction}, + ::Type{T}, +) where {T} + return MOI.ScalarNonlinearFunction +end + +function promote_operation( + ::typeof(-), + ::Type{T}, + ::Type{MOI.ScalarNonlinearFunction}, + ::Type{MOI.VariableIndex}, +) where {T} + return MOI.ScalarNonlinearFunction +end + +function operate( + op::Union{typeof(+),typeof(-)}, + ::Type{T}, + f::MOI.ScalarNonlinearFunction, + g::ScalarQuadraticLike{T}, +) where {T} + return MOI.ScalarNonlinearFunction(Symbol(op), Any[f, g]) +end + +### Base methods + _eltype(args::Tuple) = _eltype(first(args), Base.tail(args)) _eltype(::Tuple{}) = nothing _eltype(::MOI.Utilities.TypedScalarLike{T}, tail) where {T} = T @@ -3347,6 +3420,8 @@ is_coefficient_type(::Type{<:TypedLike{T}}, ::Type{T}) where {T} = true is_coefficient_type(::Type{<:TypedLike}, ::Type) = false +is_coefficient_type(::Type{<:MOI.ScalarNonlinearFunction}, ::Type) = true + similar_type(::Type{F}, ::Type{T}) where {F,T} = F function similar_type(::Type{<:MOI.ScalarAffineFunction}, ::Type{T}) where {T} diff --git a/src/Utilities/model.jl b/src/Utilities/model.jl index a25e2a6b2d..128c9b20d0 100644 --- a/src/Utilities/model.jl +++ b/src/Utilities/model.jl @@ -817,7 +817,7 @@ const LessThanIndicatorZero{T} = MOI.BinPacking, MOI.HyperRectangle, ), - (), + (MOI.ScalarNonlinearFunction,), (MOI.ScalarAffineFunction, MOI.ScalarQuadraticFunction), (MOI.VectorOfVariables,), (MOI.VectorAffineFunction, MOI.VectorQuadraticFunction) diff --git a/src/Utilities/mutable_arithmetics.jl b/src/Utilities/mutable_arithmetics.jl index 638d63f869..efaddea617 100644 --- a/src/Utilities/mutable_arithmetics.jl +++ b/src/Utilities/mutable_arithmetics.jl @@ -52,8 +52,8 @@ function MA.isequal_canonical( ) end -function MA.iszero!!(f::TypedScalarLike) - return iszero(MOI.constant(f)) && _is_constant(canonicalize!(f)) +function MA.iszero!!(f::TypedScalarLike{T}) where {T} + return iszero(MOI.constant(f, T)) && _is_constant(canonicalize!(f)) end function MA.scaling(f::TypedScalarLike{T}) where {T} @@ -61,7 +61,7 @@ function MA.scaling(f::TypedScalarLike{T}) where {T} if !_is_constant(g) throw(InexactError(:convert, T, f)) end - return MA.scaling(MOI.constant(g)) + return MA.scaling(MOI.constant(g, T)) end function MA.promote_operation( @@ -245,7 +245,7 @@ end _constant(::Type{T}, α::T) where {T} = α _constant(::Type{T}, ::MOI.VariableIndex) where {T} = zero(T) -_constant(::Type{T}, func::TypedScalarLike{T}) where {T} = MOI.constant(func) +_constant(::Type{T}, func::TypedScalarLike{T}) where {T} = MOI.constant(func, T) _affine_terms(f::MOI.ScalarAffineFunction) = f.terms _affine_terms(f::MOI.ScalarQuadraticFunction) = f.affine_terms diff --git a/src/Utilities/objective_container.jl b/src/Utilities/objective_container.jl index 9bb77df7d9..412cf4a7e8 100644 --- a/src/Utilities/objective_container.jl +++ b/src/Utilities/objective_container.jl @@ -17,6 +17,7 @@ mutable struct ObjectiveContainer{T} <: MOI.ModelLike single_variable::Union{Nothing,MOI.VariableIndex} scalar_affine::Union{Nothing,MOI.ScalarAffineFunction{T}} scalar_quadratic::Union{Nothing,MOI.ScalarQuadraticFunction{T}} + scalar_nonlinear::Union{Nothing,MOI.ScalarNonlinearFunction} vector_variables::Union{Nothing,MOI.VectorOfVariables} vector_affine::Union{Nothing,MOI.VectorAffineFunction{T}} vector_quadratic::Union{Nothing,MOI.VectorQuadraticFunction{T}} @@ -34,6 +35,7 @@ function MOI.empty!(o::ObjectiveContainer{T}) where {T} o.single_variable = nothing o.scalar_affine = nothing o.scalar_quadratic = nothing + o.scalar_nonlinear = nothing o.vector_variables = nothing o.vector_affine = nothing o.vector_quadratic = nothing @@ -75,6 +77,8 @@ function MOI.get( return MOI.VariableIndex elseif o.scalar_quadratic !== nothing return MOI.ScalarQuadraticFunction{T} + elseif o.scalar_nonlinear !== nothing + return MOI.ScalarNonlinearFunction elseif o.vector_variables !== nothing return MOI.VectorOfVariables elseif o.vector_affine !== nothing @@ -97,6 +101,7 @@ function MOI.supports( MOI.VariableIndex, MOI.ScalarAffineFunction{T}, MOI.ScalarQuadraticFunction{T}, + MOI.ScalarNonlinearFunction, MOI.VectorOfVariables, MOI.VectorAffineFunction{T}, MOI.VectorQuadraticFunction{T}, @@ -116,6 +121,8 @@ function MOI.get( return convert(F, o.single_variable) elseif o.scalar_quadratic !== nothing return convert(F, o.scalar_quadratic) + elseif o.scalar_nonlinear !== nothing + return convert(F, o.scalar_nonlinear) elseif o.vector_variables !== nothing return convert(F, o.vector_variables) elseif o.vector_affine !== nothing @@ -167,6 +174,17 @@ function MOI.set( return end +function MOI.set( + o::ObjectiveContainer, + ::MOI.ObjectiveFunction{MOI.ScalarNonlinearFunction}, + f::MOI.ScalarNonlinearFunction, +) + _empty_keeping_sense(o) + o.is_function_set = true + o.scalar_nonlinear = copy(f) + return +end + function MOI.set( o::ObjectiveContainer, ::MOI.ObjectiveFunction{MOI.VectorOfVariables}, @@ -231,6 +249,14 @@ function MOI.modify( o.scalar_affine = modify_function!(o.scalar_affine, change) elseif o.scalar_quadratic !== nothing o.scalar_quadratic = modify_function!(o.scalar_quadratic, change) + elseif o.scalar_nonlinear !== nothing + throw( + MOI.ModifyObjectiveNotAllowed( + change, + "Cannot modify objective when there is a " * + "`ScalarNonlinearFunction` objective", + ), + ) elseif o.vector_variables !== nothing o.vector_variables = modify_function!(o.vector_variables, change) elseif o.vector_quadratic !== nothing @@ -259,6 +285,14 @@ function MOI.delete(o::ObjectiveContainer, x::MOI.VariableIndex) o.scalar_affine = remove_variable(o.scalar_affine, x) elseif o.scalar_quadratic !== nothing o.scalar_quadratic = remove_variable(o.scalar_quadratic, x) + elseif o.scalar_nonlinear !== nothing + throw( + MOI.DeleteNotAllowed( + x, + "Cannot delete variable when there is a " * + "`ScalarNonlinearFunction` objective", + ), + ) elseif o.vector_variables !== nothing o.vector_variables = remove_variable(o.vector_variables, x) if isempty(o.vector_variables.variables) @@ -282,6 +316,14 @@ function MOI.delete(o::ObjectiveContainer, x::Vector{MOI.VariableIndex}) o.scalar_affine = filter_variables(keep, o.scalar_affine) elseif o.scalar_quadratic !== nothing o.scalar_quadratic = filter_variables(keep, o.scalar_quadratic) + elseif o.scalar_nonlinear !== nothing + throw( + MOI.DeleteNotAllowed( + first(x), + "Cannot delete variable when there is a " * + "`ScalarNonlinearFunction` objective", + ), + ) elseif o.vector_variables !== nothing o.vector_variables = filter_variables(keep, o.vector_variables) if isempty(o.vector_variables.variables) diff --git a/src/Utilities/parser.jl b/src/Utilities/parser.jl index ebe80ba654..63f4645246 100644 --- a/src/Utilities/parser.jl +++ b/src/Utilities/parser.jl @@ -113,6 +113,9 @@ function _parse_function(ex, ::Type{T} = Float64) where {T} end end else + if isexpr(ex, :call, 2) && ex.args[1] == :ScalarNonlinearFunction + return ex + end # For simplicity, only accept Expr(:call, :+, ...); no recursive # expressions if isexpr(ex, :call) && ex.args[1] == :* @@ -235,6 +238,19 @@ _parsed_to_moi(model, s::Vector) = _parsed_to_moi.(model, s) _parsed_to_moi(model, s::Number) = s +function _parsed_to_moi(model, s::Expr) + if isexpr(s, :call, 2) && s.args[1] == :ScalarNonlinearFunction + return _parsed_scalar_to_moi(model, s.args[2]) + end + args = Any[_parsed_to_moi(model, arg) for arg in s.args[2:end]] + return MOI.ScalarNonlinearFunction(s.args[1], args) +end + +function _parsed_scalar_to_moi(model, s::Expr) + args = Any[_parsed_to_moi(model, arg) for arg in s.args[2:end]] + return MOI.ScalarNonlinearFunction(s.args[1], args) +end + for typename in [ :_ParsedScalarAffineTerm, :_ParsedScalarAffineFunction, diff --git a/src/Utilities/print.jl b/src/Utilities/print.jl index d84c863930..cbe6cb480d 100644 --- a/src/Utilities/print.jl +++ b/src/Utilities/print.jl @@ -75,6 +75,15 @@ _to_string(::_PrintOptions, ::typeof(in)) = @static Sys.iswindows() ? "in" : " # Functions #------------------------------------------------------------------------ +function _to_string(options::_PrintOptions, model::MOI.ModelLike, x::Vector) + args = [_to_string(options, model, xi) for xi in x] + return string("[", join(args, ", "), "]") +end + +function _to_string(options::_PrintOptions, ::MOI.ModelLike, c::Real) + return _shorten(options, c) +end + function _to_string( options::_PrintOptions, model::MOI.ModelLike, @@ -227,6 +236,34 @@ function _to_string( return s end +function _to_string( + options::_PrintOptions, + model::MOI.ModelLike, + f::MOI.ScalarNonlinearFunction, +) + io, stack, is_open = IOBuffer(), Any[f], true + while !isempty(stack) + arg = pop!(stack) + if !is_open && arg != ')' + print(io, ", ") + end + if arg isa MOI.ScalarNonlinearFunction + print(io, arg.head, "(") + push!(stack, ')') + for i in length(arg.args):-1:1 + push!(stack, arg.args[i]) + end + elseif arg isa Char + print(io, arg) + else + print(io, _to_string(options, model, arg)) + end + is_open = arg isa MOI.ScalarNonlinearFunction + end + seekstart(io) + return read(io, String) +end + function _to_string( options::_PrintOptions, model::MOI.ModelLike, diff --git a/src/Utilities/results.jl b/src/Utilities/results.jl index 1646199381..e693a7b7ee 100644 --- a/src/Utilities/results.jl +++ b/src/Utilities/results.jl @@ -46,7 +46,7 @@ function constraint_constant( }, T::Type, ) - return MOI.constant(MOI.get(model, MOI.ConstraintFunction(), ci)) + return MOI.constant(MOI.get(model, MOI.ConstraintFunction(), ci), T) end function constraint_constant( model::MOI.ModelLike, diff --git a/src/attributes.jl b/src/attributes.jl index ed620cdaa8..864cccc317 100644 --- a/src/attributes.jl +++ b/src/attributes.jl @@ -1856,6 +1856,110 @@ function attribute_value_type(::ConstraintConflictStatus) return ConflictParticipationStatusCode end +""" + UserDefinedFunction(name::Symbol, arity::Int) <: AbstractModelAttribute + +Set this attribute to register a user-defined function by the name of `name` +with `arity` arguments. + +Once registered, `name` will appear in [`ListOfSupportedNonlinearOperators`](@ref). + +You cannot register multiple `UserDefinedFunction`s with the same `name` but +different `arity`. + +## Value type + +The value to be set is a tuple containing one, two, or three functions to +evaluate the function, the first-order derivative, and the second-order +derivative respectively. Both derivatives are optional, but if you pass the +second-order derivative you must also pass the first-order derivative. + +For univariate functions with `arity == 1`, the functions in the tuple must +have the form: + + * `f(x::T)::T`: returns the value of the function at `x` + * `∇f(x::T)::T`: returns the first-order derivative of `f` with respect to `x` + * `∇²f(x::T)::T`: returns the second-order derivative of `f` with respect to + `x`. + +For multivariate functions with `arity > 1`, the functions in the tuple must +have the form: + + * `f(x::T...)::T`: returns the value of the function at `x` + * `∇f(g::AbstractVector{T}, x::T...)::Nothing`: fills the components of `g`, + with `g[i]` being the first-order partial derivative of `f` with respect to + `x[i]` + * `∇²f(H::AbstractMatrix{T}, x::T...)::Nothing`: fills the non-zero components + of `H`, with `H[i, j]` being the second-order partial derivative of `f` with + respect to `x[i]` and then `x[j]`. `H` is initialized to the zero matrix, + so you do not need to set any zero elements. + +## Examples + +```jldoctest +julia> import MathOptInterface as MOI + +julia> f(x, y) = x^2 + y^2 +f (generic function with 1 method) + +julia> function ∇f(g, x, y) + g .= 2 * x, 2 * y + return + end +∇f (generic function with 1 method) + +julia> function ∇²f(H, x...) + H[1, 1] = H[2, 2] = 2.0 + return + end +∇²f (generic function with 1 method) + +julia> model = MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}()) +MOIU.UniversalFallback{MOIU.Model{Float64}} +fallback for MOIU.Model{Float64} + +julia> MOI.set(model, MOI.UserDefinedFunction(:f, 2), (f,)) + +julia> MOI.set(model, MOI.UserDefinedFunction(:g, 2), (f, ∇f)) + +julia> MOI.set(model, MOI.UserDefinedFunction(:h, 2), (f, ∇f, ∇²f)) + +julia> x = MOI.add_variables(model, 2) +2-element Vector{MathOptInterface.VariableIndex}: + MOI.VariableIndex(1) + MOI.VariableIndex(2) + +julia> MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + +julia> obj_f = MOI.ScalarNonlinearFunction(:f, Any[x[1], x[2]]) +f(MOI.VariableIndex(1), MOI.VariableIndex(2)) + +julia> MOI.set(model, MOI.ObjectiveFunction{typeof(obj_f)}(), obj_f) + +julia> print(model) +Minimize ScalarNonlinearFunction: + f(v[1], v[2]) + +Subject to: + +``` +""" +struct UserDefinedFunction <: AbstractModelAttribute + name::Symbol + arity::Int +end + +""" + ListOfSupportedNonlinearOperators() <: AbstractModelAttribute + +When queried with [`get`](@ref), return a `Vector{Symbol}` listing the operators +supported by the model. + +```julia +``` +""" +struct ListOfSupportedNonlinearOperators <: AbstractOptimizerAttribute end + """ TerminationStatusCode diff --git a/src/functions.jl b/src/functions.jl index 994c21b3e7..c966ae2274 100644 --- a/src/functions.jl +++ b/src/functions.jl @@ -278,6 +278,121 @@ function Base.copy(f::ScalarQuadraticFunction) ) end +""" + ScalarNonlinearFunction(head::Symbol, args::Vector{Any}) + +The scalar-valued nonlinear function `head(args...)`, represented as a symbolic +expression tree, with the call operator `head` and ordered arguments in `args`. + +## `head` + +The `head::Symbol` must be an operator supported by the model. + +The default list of supported univariate operators is given by: + + * [`Nonlinear.DEFAULT_UNIVARIATE_OPERATORS`](@ref) + +and the default list of supported multivariate operators is given by: + + * [`Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS`](@ref) + +Additional operators can be registered by setting a [`UserDefinedFunction`](@ref) +attribute. + +See the full list of operators supported by a [`ModelLike`](@ref) by querying +[`ListOfSupportedNonlinearOperators`](@ref). + +## `args` + +The vector `args` contains the arguments to the nonlinear function. If the +operator is univariate, it must contain one element. Otherwise, it may contain +multiple elements. + +Each element must be one of the following: + + * A constant value of type `T<:Real` + * A [`VariableIndex`](@ref) + * A [`ScalarAffineFunction`](@ref) + * A [`ScalarQuadraticFunction`](@ref) + * A [`ScalarNonlinearFunction`](@ref) + +## Unsupported operators + +If the optimizer does not support `head`, an [`UnsupportedNonlinearOperator`](@ref) +error will be thrown. + +There is no guarantee about when this error will be thrown; it may be thrown +when the function is first added to the model, or it may be thrown when +[`optimize!`](@ref) is called. + +## Example + +To represent the function ``f(x) = sin(x)^2``, do: + +```jldoctest +julia> import MathOptInterface as MOI + +julia> x = MOI.VariableIndex(1) +MOI.VariableIndex(1) + +julia> MOI.ScalarNonlinearFunction( + :^, + Any[MOI.ScalarNonlinearFunction(:sin, Any[x]), 2], + ) +^(sin(MOI.VariableIndex(1)), (2)) +``` +""" +struct ScalarNonlinearFunction <: AbstractScalarFunction + head::Symbol + args::Vector{Any} + + function ScalarNonlinearFunction(head::Symbol, args::AbstractVector) + # TODO(odow): should we do this? + # for arg in args + # if !(arg isa Real || arg isa AbstractScalarFunction) + # error("Unsupported object in nonlinear expression: $arg") + # end + # end + return new(head, convert(Vector{Any}, args)) + end +end + +function Base.copy(f::ScalarNonlinearFunction) + return ScalarNonlinearFunction(f.head, copy(f.args)) +end + +constant(f::ScalarNonlinearFunction, ::Type{T} = Float64) where {T} = zero(T) + +""" + UnsupportedNonlinearOperator(head::Symbol[, message::String]) <: UnsupportedError + +An error thrown by optimizers if they do not support the operator `head` in a +[`ScalarNonlinearFunction`](@ref). + +## Example + +```jldoctest +julia> import MathOptInterface as MOI + +julia> throw(MOI.UnsupportedNonlinearOperator(:black_box)) +ERROR: MathOptInterface.UnsupportedNonlinearOperator: The nonlinear operator `:black_box` is not supported by the model. +Stacktrace: +[...] +``` +""" +struct UnsupportedNonlinearOperator <: UnsupportedError + head::Symbol + message::String + + function UnsupportedNonlinearOperator(head::Symbol, message::String = "") + return new(head, message) + end +end + +function element_name(err::UnsupportedNonlinearOperator) + return "The nonlinear operator `:$(err.head)`" +end + """ abstract type AbstractVectorFunction <: AbstractFunction @@ -694,6 +809,29 @@ function Base.isapprox( ) end +_is_approx(x, y; kwargs...) = isapprox(x, y; kwargs...) + +function _is_approx(x::AbstractArray, y::AbstractArray; kwargs...) + return size(x) == size(y) && + all(z -> _is_approx(z[1], z[2]; kwargs...), zip(x, y)) +end + +function Base.isapprox( + f::ScalarNonlinearFunction, + g::ScalarNonlinearFunction; + kwargs..., +) + if f.head != g.head || length(f.args) != length(g.args) + return false + end + for (fi, gi) in zip(f.args, g.args) + if !_is_approx(fi, gi; kwargs...) + return false + end + end + return true +end + ### ### Base.convert ### @@ -811,6 +949,48 @@ function Base.convert( ) end +# ScalarNonlinearFunction + +function Base.convert(::Type{ScalarNonlinearFunction}, term::ScalarAffineTerm) + return ScalarNonlinearFunction(:*, Any[term.coefficient, term.variable]) +end + +function Base.convert(F::Type{ScalarNonlinearFunction}, f::ScalarAffineFunction) + args = Any[convert(ScalarNonlinearFunction, term) for term in f.terms] + if !iszero(f.constant) + push!(args, f.constant) + end + return ScalarNonlinearFunction(:+, args) +end + +function Base.convert( + ::Type{ScalarNonlinearFunction}, + term::ScalarQuadraticTerm, +) + coef = term.coefficient + if term.variable_1 == term.variable_2 + coef /= 2 + end + return ScalarNonlinearFunction( + :*, + Any[coef, term.variable_1, term.variable_2], + ) +end + +function Base.convert( + F::Type{ScalarNonlinearFunction}, + f::ScalarQuadraticFunction, +) + args = Any[convert(F, term) for term in f.quadratic_terms] + for term in f.affine_terms + push!(args, convert(F, term)) + end + if !iszero(f.constant) + push!(args, f.constant) + end + return ScalarNonlinearFunction(:+, args) +end + # VectorOfVariables function Base.convert(::Type{VectorOfVariables}, g::VariableIndex) diff --git a/test/Bridges/Constraint/interval.jl b/test/Bridges/Constraint/interval.jl index f3b73119eb..a654c380b4 100644 --- a/test/Bridges/Constraint/interval.jl +++ b/test/Bridges/Constraint/interval.jl @@ -456,6 +456,18 @@ function test_runtests() """, eltype = Rational{Int}, ) + MOI.Bridges.runtests( + MOI.Bridges.Constraint.SplitIntervalBridge, + """ + variables: x + ScalarNonlinearFunction(log(x)) in Interval(1.0, 2.0) + """, + """ + variables: x + ScalarNonlinearFunction(log(x)) >= 1.0 + ScalarNonlinearFunction(log(x)) <= 2.0 + """, + ) return end diff --git a/test/Bridges/Constraint/slack.jl b/test/Bridges/Constraint/slack.jl index c33fbb780c..1f89f984e9 100644 --- a/test/Bridges/Constraint/slack.jl +++ b/test/Bridges/Constraint/slack.jl @@ -324,6 +324,18 @@ function test_runtests() y <= 2.0 """, ) + MOI.Bridges.runtests( + MOI.Bridges.Constraint.ScalarSlackBridge, + """ + variables: x + ScalarNonlinearFunction(log(x)) <= 2.0 + """, + """ + variables: x, y + ScalarNonlinearFunction(log(x) - y) == 0.0 + y <= 2.0 + """, + ) MOI.Bridges.runtests( MOI.Bridges.Constraint.ScalarSlackBridge, """ diff --git a/test/Bridges/Objective/slack.jl b/test/Bridges/Objective/slack.jl index 6b95a6bbcd..e1c978db25 100644 --- a/test/Bridges/Objective/slack.jl +++ b/test/Bridges/Objective/slack.jl @@ -471,6 +471,18 @@ function test_runtests() 1.1 * x + -1.0 * y >= -2.2 """, ) + MOI.Bridges.runtests( + MOI.Bridges.Objective.SlackBridge, + """ + variables: x + maxobjective: ScalarNonlinearFunction(log(x)) + """, + """ + variables: x, y + maxobjective: y + ScalarNonlinearFunction(log(x) - y) >= 0.0 + """, + ) return end diff --git a/test/Nonlinear/Nonlinear.jl b/test/Nonlinear/Nonlinear.jl index 238cb40b51..827fb6492d 100644 --- a/test/Nonlinear/Nonlinear.jl +++ b/test/Nonlinear/Nonlinear.jl @@ -40,7 +40,7 @@ function test_parse_unable() x = MOI.VariableIndex(1) input = :(f($x)) @test_throws( - ErrorException("Unable to parse: $input"), + MOI.UnsupportedNonlinearOperator(:f), Nonlinear.set_objective(model, input), ) return @@ -989,6 +989,101 @@ function test_parse_splat_no_reverse() return end +function test_scalar_nonlinear_function_parse_expression() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction( + :+, + Any[x, MOI.ScalarNonlinearFunction(:sin, Any[x])], + ) + nlp_model = MOI.Nonlinear.Model() + e1 = MOI.Nonlinear.add_expression(nlp_model, f) + e2 = MOI.Nonlinear.add_expression(nlp_model, :($x + sin($x))) + @test nlp_model[e1] == nlp_model[e2] + return +end + +function test_scalar_nonlinear_function_parse_scalaraffinefunction() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = 1.0 * x + 2.0 + nlp_model = MOI.Nonlinear.Model() + e1 = MOI.Nonlinear.add_expression(nlp_model, f) + e2 = MOI.Nonlinear.add_expression(nlp_model, :(1.0 * $x + 2.0)) + @test nlp_model[e1] == nlp_model[e2] + return +end + +function test_scalar_nonlinear_function_parse_scalarquadraticfunction() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + y = MOI.add_variable(model) + f = 1.5 * x * x + 2.5 * x * y + 3.5 * x + 2.0 + nlp_model = MOI.Nonlinear.Model() + e1 = MOI.Nonlinear.add_expression(nlp_model, f) + f_expr = :(1.5 * $x * $x + 2.5 * $x * $y + 3.5 * $x + 2.0) + e2 = MOI.Nonlinear.add_expression(nlp_model, f_expr) + @test nlp_model[e1] == nlp_model[e2] + return +end + +function test_scalar_nonlinear_function_parse_logic_or() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction(:||, Any[x, x]) + nlp_model = MOI.Nonlinear.Model() + e1 = MOI.Nonlinear.add_expression(nlp_model, f) + e2 = MOI.Nonlinear.add_expression(nlp_model, :($x || $x)) + @test nlp_model[e1] == nlp_model[e2] + return +end + +function test_scalar_nonlinear_function_parse_logic_or() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction(:&&, Any[x, x]) + nlp_model = MOI.Nonlinear.Model() + e1 = MOI.Nonlinear.add_expression(nlp_model, f) + e2 = MOI.Nonlinear.add_expression(nlp_model, :($x && $x)) + @test nlp_model[e1] == nlp_model[e2] + return +end + +function test_scalar_nonlinear_function_parse_comparison() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction(:<, Any[x, 1]) + nlp_model = MOI.Nonlinear.Model() + e1 = MOI.Nonlinear.add_expression(nlp_model, f) + e2 = MOI.Nonlinear.add_expression(nlp_model, :($x < 1)) + @test nlp_model[e1] == nlp_model[e2] + return +end + +function test_scalar_nonlinear_function_parse_unknown() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction(:foo, Any[x, 1]) + nlp_model = MOI.Nonlinear.Model() + @test_throws( + MOI.UnsupportedNonlinearOperator(:foo), + MOI.Nonlinear.add_expression(nlp_model, f), + ) + return +end + +function test_ListOfSupportedNonlinearOperators() + model = MOI.Nonlinear.Model() + ops = MOI.get(model, MOI.ListOfSupportedNonlinearOperators()) + @test ops isa Vector{Symbol} + @test length(ops) > 70 + @test :|| in ops + @test :sin in ops + @test :> in ops + @test :ifelse in ops + return +end + end TestNonlinear.runtests() diff --git a/test/Utilities/functions.jl b/test/Utilities/functions.jl index 15d1a93935..b6e3d8ff6f 100644 --- a/test/Utilities/functions.jl +++ b/test/Utilities/functions.jl @@ -1834,6 +1834,45 @@ function test_value_type() return end +function test_filter_variables_scalarnonlinearfunction() + x = MOI.VariableIndex(1) + y = MOI.VariableIndex(2) + f = MOI.ScalarNonlinearFunction(:+, Any[x, 2, y]) + new_f = MOI.Utilities.filter_variables(xi -> xi != x, f) + @test new_f ≈ MOI.ScalarNonlinearFunction(:+, Any[2, y]) + f = MOI.ScalarNonlinearFunction(:-, Any[x, 2, y]) + new_f = MOI.Utilities.filter_variables(xi -> xi != x, f) + @test new_f ≈ MOI.ScalarNonlinearFunction(:-, Any[0, 2, y]) + f = MOI.ScalarNonlinearFunction(:-, Any[2, x, y]) + new_f = MOI.Utilities.filter_variables(xi -> xi != x, f) + @test new_f ≈ MOI.ScalarNonlinearFunction(:-, Any[2, y]) + f = MOI.ScalarNonlinearFunction(:-, Any[2, x]) + new_f = MOI.Utilities.filter_variables(xi -> xi != x, f) + @test new_f ≈ 2 + f = MOI.ScalarNonlinearFunction(:+, Any[x, 2, y]) + f2 = MOI.ScalarNonlinearFunction(:+, Any[2, y]) + g = MOI.ScalarNonlinearFunction(:-, Any[f, x, 2]) + new_g = MOI.Utilities.filter_variables(xi -> xi != x, g) + @test new_g ≈ MOI.ScalarNonlinearFunction(:-, Any[f2, 2]) + return +end + +function test_ScalarNonlinearFunction_count_map_indices_and_print() + model = MOI.Utilities.CachingOptimizer( + MOI.Utilities.Model{Bool}(), + MOI.Utilities.MockOptimizer(MOI.Utilities.Model{Bool}()), + ) + MOI.Utilities.attach_optimizer(model) + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction(:!, Any[x]) + g = MOI.ScalarNonlinearFunction(:count, Any[Any[x, f]]) + @test sprint(io -> show(io, MIME("text/plain"), g)) == + "count([MOI.VariableIndex(1), !(MOI.VariableIndex(1))])" + c = MOI.add_constraint(model, g, MOI.EqualTo(true)) + @test MOI.get(model, MOI.ConstraintFunction(), c) ≈ g + return +end + end # module TestFunctions.runtests() diff --git a/test/Utilities/objective_container.jl b/test/Utilities/objective_container.jl index ddadb82ba6..865806feba 100644 --- a/test/Utilities/objective_container.jl +++ b/test/Utilities/objective_container.jl @@ -128,6 +128,42 @@ function test_delete_ScalarQuadraticFunction_plural() return end +function test_modify_ScalarNonlinearFunction() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction(:log, Any[x]) + MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE) + attr = MOI.ObjectiveFunction{typeof(f)}() + MOI.set(model, attr, f) + @test_throws( + MOI.ModifyObjectiveNotAllowed, + MOI.modify(model, attr, MOI.ScalarConstantChange(3.0)), + ) + return +end + +function test_delete_variable_ScalarNonlinearFunction() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction(:log, Any[x]) + MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE) + attr = MOI.ObjectiveFunction{typeof(f)}() + MOI.set(model, attr, f) + @test_throws MOI.DeleteNotAllowed MOI.delete(model, x) + return +end + +function test_delete_variables_ScalarNonlinearFunction() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction(:log, Any[x]) + MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE) + attr = MOI.ObjectiveFunction{typeof(f)}() + MOI.set(model, attr, f) + @test_throws MOI.DeleteNotAllowed MOI.delete(model, [x]) + return +end + end # module TestObjectiveContainer.runtests() diff --git a/test/Utilities/print.jl b/test/Utilities/print.jl index 4bc8fb4526..52d863d37a 100644 --- a/test/Utilities/print.jl +++ b/test/Utilities/print.jl @@ -701,6 +701,16 @@ function test_default_printing() return end +function test_scalar_nonlinear_function_print() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + MOI.set(model, MOI.VariableName(), x, "x") + f = MOI.ScalarNonlinearFunction(:+, Any[x, 2.0]) + options = MOI.Utilities._PrintOptions(MIME("text/plain")) + @test MOI.Utilities._to_string(options, model, f) == "+(x, 2.0)" + return +end + end TestPrint.runtests() diff --git a/test/attributes.jl b/test/attributes.jl index 2c5e03d3f8..ba4af69682 100644 --- a/test/attributes.jl +++ b/test/attributes.jl @@ -283,6 +283,33 @@ function test_issue_1777() return end +function test_scalar_nonlinear_function_ConstraintName() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction( + :+, + Any[x, MOI.ScalarNonlinearFunction(:sin, Any[x])], + ) + c = MOI.add_constraint(model, f, MOI.EqualTo(0.0)) + MOI.set(model, MOI.ConstraintName(), c, "c") + @test MOI.get(model, MOI.ConstraintName(), c) == "c" + return +end + +function test_scalar_nonlinear_function_set_objective() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction( + :+, + Any[x, MOI.ScalarNonlinearFunction(:sin, Any[x])], + ) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + attr = MOI.ObjectiveFunction{typeof(f)}() + MOI.set(model, attr, f) + @test isapprox(MOI.get(model, attr), f) + return +end + function runtests() for name in names(@__MODULE__; all = true) if startswith("$name", "test_") diff --git a/test/constraints.jl b/test/constraints.jl index 0629571b5e..a92f241825 100644 --- a/test/constraints.jl +++ b/test/constraints.jl @@ -78,6 +78,18 @@ function test_ScalarFunctionConstantNotZero_error() return end +function test_scalar_nonlinear_function_add_constraint() + model = MOI.Utilities.Model{Float64}() + x = MOI.add_variable(model) + f = MOI.ScalarNonlinearFunction( + :+, + Any[x, MOI.ScalarNonlinearFunction(:sin, Any[x])], + ) + c = MOI.add_constraint(model, f, MOI.EqualTo(0.0)) + @test isapprox(MOI.get(model, MOI.ConstraintFunction(), c), f) + return +end + end TestConstraints.runtests() diff --git a/test/errors.jl b/test/errors.jl index 23e2bb7f2d..7eb9accfe4 100644 --- a/test/errors.jl +++ b/test/errors.jl @@ -346,6 +346,12 @@ function test_get_fallback_error() return end +function test_unsupported_nonlinear_operator() + @test MOI.element_name(MOI.UnsupportedNonlinearOperator(:f)) == + "The nonlinear operator `:f`" + return +end + function runtests() for name in names(@__MODULE__; all = true) if startswith("$name", "test_") diff --git a/test/functions.jl b/test/functions.jl index 99ba1dd843..32127581a8 100644 --- a/test/functions.jl +++ b/test/functions.jl @@ -285,6 +285,25 @@ function test_convert_vectorofvariables() return end +function test_ScalarNonlinearFunction_constant() + x = MOI.VariableIndex(1) + f = MOI.ScalarNonlinearFunction(:log, Any[x]) + @test MOI.constant(f, Float64) === Float64(0) + @test MOI.constant(f, Int32) === Int32(0) + return +end + +function test_ScalarNonlinearFunction_isapprox() + x = MOI.VariableIndex(1) + f = MOI.ScalarNonlinearFunction(:log, Any[x]) + g = MOI.ScalarNonlinearFunction(:+, Any[f, 0.0]) + h = MOI.ScalarNonlinearFunction(:+, Any[f, 1.0]) + @test f ≈ f + @test !(f ≈ g) + @test !(g ≈ h) + return +end + function runtests() for name in names(@__MODULE__; all = true) if startswith("$name", "test_")