diff --git a/.github/workflows/TagBot.yml b/.github/workflows/TagBot.yml index bef2b7c1..f49313b6 100644 --- a/.github/workflows/TagBot.yml +++ b/.github/workflows/TagBot.yml @@ -1,17 +1,15 @@ name: TagBot on: - issue_comment: # THIS BIT IS NEW + issue_comment: types: - created workflow_dispatch: jobs: TagBot: - # THIS 'if' LINE IS NEW if: github.event_name == 'workflow_dispatch' || github.actor == 'JuliaTagBot' - # NOTHING BELOW HAS CHANGED runs-on: ubuntu-latest steps: - uses: JuliaRegistries/TagBot@v1 with: token: ${{ secrets.GITHUB_TOKEN }} - ssh: ${{ secrets.DOCUMENTER_KEY }} \ No newline at end of file + ssh: ${{ secrets.DOCUMENTER_KEY }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f0a489ff..204b95cb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -22,24 +22,16 @@ jobs: os: ubuntu-latest arch: x86 steps: - - uses: actions/checkout@v2 - - uses: julia-actions/setup-julia@v1 + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - - uses: actions/cache@v1 - env: - cache-name: cache-artifacts - with: - path: ~/.julia/artifacts - key: ${{ runner.os }}-test-${{ env.cache-name }}-${{ hashFiles('**/Project.toml') }} - restore-keys: | - ${{ runner.os }}-test-${{ env.cache-name }}- - ${{ runner.os }}-test- - ${{ runner.os }}- + - uses: julia-actions/cache@v1 - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 - uses: julia-actions/julia-processcoverage@v1 - - uses: codecov/codecov-action@v1 + - uses: codecov/codecov-action@v4 with: - file: lcov.info \ No newline at end of file + file: lcov.info + token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 658af04c..d3ebd29f 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -9,7 +9,7 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - uses: julia-actions/setup-julia@latest with: # Build documentation on Julia 1.6 @@ -20,4 +20,4 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # For authentication with GitHub Actions token DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }} # For authentication with SSH deploy key - run: julia --project=docs/ docs/make.jl \ No newline at end of file + run: julia --color=yes --project=docs/ docs/make.jl diff --git a/.github/workflows/format_check.yml b/.github/workflows/format_check.yml index ea39e1cf..e31cfc19 100644 --- a/.github/workflows/format_check.yml +++ b/.github/workflows/format_check.yml @@ -13,12 +13,12 @@ jobs: - uses: julia-actions/setup-julia@latest with: version: '1' - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Format check shell: julia --color=yes {0} run: | using Pkg - Pkg.add(PackageSpec(name="JuliaFormatter", version="0.22.2")) + Pkg.add(PackageSpec(name="JuliaFormatter", version="1")) using JuliaFormatter format("src", verbose=true) format("test", verbose=true) diff --git a/Project.toml b/Project.toml index f9563713..1d65f9c8 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "ParametricOptInterface" uuid = "0ce4ce61-57bf-432b-a095-efac525d185e" authors = ["Tomás Gutierrez "] -version = "0.7.0" +version = "0.8.2" [deps] MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" diff --git a/docs/src/Examples/example.md b/docs/src/Examples/example.md index cdad6370..813a4ca0 100644 --- a/docs/src/Examples/example.md +++ b/docs/src/Examples/example.md @@ -353,7 +353,7 @@ MOI.set( ) ``` -To multiply a parameter in a quadratic term, the user will +To multiply a parameter in a quadratic term, the user will need to use the `POI.QuadraticObjectiveCoef` model attribute. ```@example moi2 @@ -415,7 +415,7 @@ We use the same MOI function to add the parameter multiplied to the quadratic te MOI.set(backend(model), POI.QuadraticObjectiveCoef(), (index(x),index(y)), 2index(p)+3) ``` -If the user print the `model`, the term `(2p+3)*xy` won't show. +If the user print the `model`, the term `(2p+3)*xy` won't show. It's possible to retrieve the parametric function multiplying the term `xy` with `MOI.get`. ```@example jump4 @@ -440,63 +440,3 @@ isapprox(objective_value(model), 128/9, atol=1e-4) isapprox(value(x), 4/3, atol=1e-4) isapprox(value(y), 4/3, atol=1e-4) ``` -## JuMP Example - Non Linear Programming (NLP) - -POI currently works with NLPs when users wish to add the parameters to the non-NL constraints or objective. This means that POI works with models like this one: - -```julia -@variable(model, x) -@variable(model, y) -@variable(model, z in MOI.Parameter(10)) -@constraint(model, x + y >= z) -@NLobjective(model, Min, x^2 + y^2) -``` - -but does not work with models that have parameters on the NL expressions like this one: - -```julia -@variable(model, x) -@variable(model, y) -@variable(model, z in MOI.Parameter(10)) -@constraint(model, x + y >= z) -@NLobjective(model, Min, x^2 + y^2 + z) # There is a parameter here -``` - -If users with to add parameters in NL expressions we strongly recommend them to read [this section on the JuMP documentation]((https://jump.dev/JuMP.jl/stable/manual/nlp/#Create-a-nonlinear-parameter)) - -Although POI works with NLPs there are some important information for users to keep in mind. All come from the fact that POI relies on the MOI interface for problem modifications and these are not common on NLP solvers, most solvers only allow users to modify variable bounds using their official APIs. This means that if users wish to make modifications on some constraint that is not a variable bound we are not allowed to call `MOI.modify` because the function is not supported in the MOI solver interface. The work-around to this is defining a [`POI.Optimizer`](@ref) on a caching optimizer: - -```julia -ipopt = Ipopt.Optimizer() -MOI.set(ipopt, MOI.RawOptimizerAttribute("print_level"), 0) -cached = - () -> MOI.Bridges.full_bridge_optimizer( - MOIU.CachingOptimizer( - MOIU.UniversalFallback(MOIU.Model{Float64}()), - ipopt, - ), - Float64, - ) -POI_cached_optimizer() = POI.Optimizer(cached()) -model = Model(() -> POI_cached_optimizer()) -@variable(model, x) -@variable(model, y) -@variable(model, z in MOI.Parameter(10)) -@constraint(model, x + y >= z) -@NLobjective(model, Min, x^2 + y^2) -``` - -This works but keep in mind that the model has an additional layer of between the solver and the [`POI.Optimizer`](@ref). This will make most operations slower than with the version without the caching optimizer. Keep in mind that since the official APIs of most solvers don't allow for modifications on linear constraints there should have no big difference between making a modification using POI or re-building the model from scratch. - -If users wish to make modifications on variable bounds the POI interface will help you save time between solves. In this case you should use the [`ParametricOptInterface.ConstraintsInterpretation`](@ref) as we do in this example: - -```julia -model = Model(() -> POI.Optimizer(Ipopt.Optimizer())) -@variable(model, x) -@variable(model, z in MOI.Parameter(10)) -MOI.set(model, POI.ConstraintsInterpretation(), POI.ONLY_BOUNDS) -@constraint(model, x >= z) -@NLobjective(model, Min, x^2) -``` - -This use case should help users diminsh the time of making model modifications and re-solve the model. To increase the performance users that are familiar with [JuMP direct mode](https://jump.dev/JuMP.jl/stable/manual/models/#Direct-mode) can also use it. diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index d8b7b744..6d4855b7 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -143,11 +143,23 @@ end # function MOI.is_valid(model::Optimizer, vi::MOI.VariableIndex) - return haskey(model.variables, vi) || haskey(model.parameters, p_idx(vi)) + if haskey(model.variables, vi) + return true + elseif haskey(model.parameters, p_idx(vi)) + return true + end + return false end -function MOI.is_valid(model::Optimizer, ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}}) where {T} - return haskey(model.parameters, p_idx(MOI.VariableIndex(ci.value))) +function MOI.is_valid( + model::Optimizer, + ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}}, +) where {T} + vi = MOI.VariableIndex(ci.value) + if haskey(model.parameters, p_idx(vi)) + return true + end + return false end function MOI.supports( @@ -227,6 +239,24 @@ function _add_to_constraint_map!(model::Optimizer, ci) return end +function _add_to_constraint_map!( + model::Optimizer, + ci::MOI.ConstraintIndex{F,S}, +) where {F<:MOI.ScalarAffineFunction,S} + model.last_affine_added += 1 + model.constraint_outer_to_inner[ci] = ci + return +end + +function _add_to_constraint_map!( + model::Optimizer, + ci::MOI.ConstraintIndex{F,S}, +) where {F<:MOI.ScalarQuadraticFunction,S} + model.last_quad_add_added += 1 + model.constraint_outer_to_inner[ci] = ci + return +end + function MOI.supports( model::Optimizer, attr::MOI.AbstractVariableAttribute, @@ -291,11 +321,8 @@ function _delete_variable_index_constraint( value, ) inner = d[F, S] - for k in keys(inner) - if k.value == value - delete!(inner, k) - end - end + key = MOI.ConstraintIndex{F,S}(value) + delete!(inner, key) return end @@ -457,9 +484,9 @@ end function MOI.set( model::Optimizer, ::MOI.ConstraintFunction, - c::MOI.ConstraintIndex{F,S}, + c::MOI.ConstraintIndex{F}, f::F, -) where {F,S} +) where {F} MOI.set(model.optimizer, MOI.ConstraintFunction(), c, f) return end @@ -467,8 +494,8 @@ end function MOI.get( model::Optimizer, attr::MOI.ConstraintFunction, - ci::MOI.ConstraintIndex{F,S}, -) where {F,S} + ci::MOI.ConstraintIndex, +) if haskey(model.quadratic_outer_to_inner, ci) inner_ci = model.quadratic_outer_to_inner[ci] return _original_function(model.quadratic_constraint_cache[inner_ci]) @@ -506,8 +533,8 @@ end function MOI.get( model::Optimizer, attr::MOI.ConstraintSet, - ci::MOI.ConstraintIndex{F,S}, -) where {F,S} + ci::MOI.ConstraintIndex, +) if haskey(model.quadratic_outer_to_inner, ci) inner_ci = model.quadratic_outer_to_inner[ci] return model.quadratic_constraint_cache_set[inner_ci] @@ -587,7 +614,6 @@ function _add_constraint_with_parameters_on_function( set::S, ) where {T,S} pf = ParametricAffineFunction(f) - _cache_set_constant!(pf, set) if model.constraints_interpretation == ONLY_BOUNDS if length(pf.v) == 1 && isone(MOI.coefficient(pf.v[])) poi_ci = _add_vi_constraint(model, pf, set) @@ -597,24 +623,25 @@ function _add_constraint_with_parameters_on_function( ) end elseif model.constraints_interpretation == ONLY_CONSTRAINTS - poi_ci = _add_saf_constraint(model, pf, set) + poi_ci = MOI.add_constraint(model, pf, set) elseif model.constraints_interpretation == BOUNDS_AND_CONSTRAINTS if length(pf.v) == 1 && isone(MOI.coefficient(pf.v[])) poi_ci = _add_vi_constraint(model, pf, set) else - poi_ci = _add_saf_constraint(model, pf, set) + poi_ci = MOI.add_constraint(model, pf, set) end end return poi_ci end -function _add_saf_constraint( +function MOI.add_constraint( model::Optimizer, pf::ParametricAffineFunction{T}, set::S, ) where {T,S} + _cache_set_constant!(pf, set) _update_cache!(pf, model) - inner_ci = MOI.Utilities.normalize_and_add_constraint( + inner_ci = MOI.add_constraint( model.optimizer, MOI.ScalarAffineFunction{T}(pf.v, 0.0), _set_with_new_constant(set, pf.current_constant), @@ -635,8 +662,9 @@ function _add_vi_constraint( pf::ParametricAffineFunction{T}, set::S, ) where {T,S} + _cache_set_constant!(pf, set) _update_cache!(pf, model) - inner_ci = MOI.Utilities.normalize_and_add_constraint( + inner_ci = MOI.add_constraint( model.optimizer, pf.v[].variable, _set_with_new_constant(set, pf.current_constant), @@ -712,13 +740,10 @@ function _add_constraint_with_parameters_on_function( _update_cache!(pf, model) func = _current_function(pf) - f_quad = if !_is_affine(func) + if !_is_affine(func) fq = func - inner_ci = MOI.Utilities.normalize_and_add_constraint( - model.optimizer, - fq, - s, - ) + inner_ci = + MOI.Utilities.normalize_and_add_constraint(model.optimizer, fq, s) model.last_quad_add_added += 1 outer_ci = MOI.ConstraintIndex{MOI.ScalarQuadraticFunction{T},S}( model.last_quad_add_added, @@ -727,11 +752,8 @@ function _add_constraint_with_parameters_on_function( model.constraint_outer_to_inner[outer_ci] = inner_ci else fa = MOI.ScalarAffineFunction(func.affine_terms, func.constant) - inner_ci = MOI.Utilities.normalize_and_add_constraint( - model.optimizer, - fa, - s, - ) + inner_ci = + MOI.Utilities.normalize_and_add_constraint(model.optimizer, fa, s) model.last_quad_add_added += 1 outer_ci = MOI.ConstraintIndex{MOI.ScalarQuadraticFunction{T},S}( model.last_quad_add_added, @@ -953,18 +975,6 @@ function MOI.set( return end -# -# NLP -# - -function MOI.supports(model::Optimizer, ::MOI.NLPBlock) - return MOI.supports(model.optimizer, MOI.NLPBlock()) -end - -function MOI.set(model::Optimizer, ::MOI.NLPBlock, nlp_data::MOI.NLPBlockData) - return MOI.set(model.optimizer, MOI.NLPBlock(), nlp_data) -end - # # Other # @@ -1008,7 +1018,8 @@ function MOI.get( end function MOI.get(model::Optimizer, ::MOI.NumberOfVariables) - return length(model.parameters) + length(model.variables) + return MOI.get(model, NumberOfPureVariables()) + + MOI.get(model, NumberOfParameters()) end function MOI.get(model::Optimizer, ::MOI.NumberOfConstraints{F,S}) where {S,F} @@ -1016,7 +1027,10 @@ function MOI.get(model::Optimizer, ::MOI.NumberOfConstraints{F,S}) where {S,F} end function MOI.get(model::Optimizer, ::MOI.ListOfVariableIndices) - return MOI.get(model.optimizer, MOI.ListOfVariableIndices()) + return vcat( + MOI.get(model, ListOfPureVariableIndices()), + v_idx.(MOI.get(model, ListOfParameterIndices())), + ) end function MOI.get(model::Optimizer, ::MOI.ListOfConstraintTypesPresent) @@ -1101,29 +1115,11 @@ end function MOI.get( model::Optimizer, - attr::T, + attr::MOI.AbstractConstraintAttribute, c::MOI.ConstraintIndex, -) where { - T<:Union{MOI.ConstraintPrimal,MOI.ConstraintDual,MOI.ConstraintBasisStatus}, -} - return MOI.get(model.optimizer, attr, c) -end - -function MOI.get( - model::Optimizer, - attr::AT, - c::MOI.ConstraintIndex{MOI.ScalarAffineFunction{T},S}, -) where { - AT<:Union{ - MOI.ConstraintPrimal, - MOI.ConstraintDual, - MOI.ConstraintBasisStatus, - }, - T, - S<:MOI.AbstractScalarSet, -} - moi_ci = get(model.affine_outer_to_inner, c, c) - return MOI.get(model.optimizer, attr, moi_ci) +) + optimizer_ci = get(model.constraint_outer_to_inner, c, c) + return MOI.get(model.optimizer, attr, optimizer_ci) end # @@ -1199,17 +1195,29 @@ function MOI.get( ) where {F,S, P <: ParametricQuadraticFunction} return model.quadratic_constraint_cache[F, S] end + +struct NumberOfPureVariables <: MOI.AbstractModelAttribute end + +function MOI.get(model::Optimizer, ::NumberOfPureVariables) + return length(model.variables) +end struct ListOfPureVariableIndices <: MOI.AbstractModelAttribute end function MOI.get(model::Optimizer, ::ListOfPureVariableIndices) - return collect(keys(model.variables)) + return collect(keys(model.variables))::Vector{MOI.VariableIndex} +end + +struct NumberOfParameters <: MOI.AbstractModelAttribute end + +function MOI.get(model::Optimizer, ::NumberOfParameters) + return length(model.parameters) end struct ListOfParameterIndices <: MOI.AbstractModelAttribute end function MOI.get(model::Optimizer, ::ListOfParameterIndices) - return collect(keys(model.parameters)) + return collect(keys(model.parameters))::Vector{ParameterIndex} end """ @@ -1444,73 +1452,6 @@ function MOI.get( end end -# -# Copy -# - -function MOI.Utilities.default_copy_to( - dest::MOI.Bridges.LazyBridgeOptimizer{Optimizer{T,OT}}, - src::MOI.ModelLike, -) where {T,OT} - return _poi_default_copy_to(dest, src) -end - -function MOI.Utilities.default_copy_to( - dest::Optimizer{T,OT}, - src::MOI.ModelLike, -) where {T,OT} - return _poi_default_copy_to(dest, src) -end - -function _poi_default_copy_to(dest::T, src::MOI.ModelLike) where {T} - if !MOI.supports_incremental_interface(dest) - error("Model $(typeof(dest)) does not support copy_to.") - end - MOI.empty!(dest) - vis_src = MOI.get(src, MOI.ListOfVariableIndices()) - index_map = MOI.IndexMap() - # The `NLPBlock` assumes that the order of variables does not change (#849) - # Therefore, all VariableIndex and VectorOfVariable constraints are added - # seprately, and no variables constrained-on-creation are added. - - # This is not valid for NLPs with Parameters, they should enter - has_nlp = MOI.NLPBlock() in MOI.get(src, MOI.ListOfModelAttributesSet()) - constraints_not_added = if has_nlp - vcat( - Any[ - MOI.get(src, MOI.ListOfConstraintIndices{F,S}()) for - (F, S) in MOI.get(src, MOI.ListOfConstraintTypesPresent()) if - MOI.Utilities._is_variable_function(F) && - S != MOI.Parameter{Float64} - ], - Any[MOI.Utilities._try_constrain_variables_on_creation( - dest, - src, - index_map, - MOI.Parameter{Float64}, - )], - ) - else - Any[ - MOI.Utilities._try_constrain_variables_on_creation( - dest, - src, - index_map, - S, - ) for S in MOI.Utilities.sorted_variable_sets_by_cost(dest, src) - ] - end - MOI.Utilities._copy_free_variables(dest, index_map, vis_src) - # Copy variable attributes - MOI.Utilities.pass_attributes(dest, src, index_map, vis_src) - # Copy model attributes - MOI.Utilities.pass_attributes(dest, src, index_map) - # Copy constraints - MOI.Utilities._pass_constraints(dest, src, index_map, constraints_not_added) - MOI.Utilities.final_touch(dest, index_map) - return index_map -end - # # Optimize # @@ -1527,11 +1468,25 @@ function MOI.optimize!(model::Optimizer) _set_quadratic_product_in_obj!(model) end MOI.optimize!(model.optimizer) - if MOI.get(model, MOI.DualStatus()) == MOI.NO_SOLUTION && + if MOI.get(model, MOI.DualStatus()) != MOI.NO_SOLUTION && model.evaluate_duals - @warn "Dual solution not available, ignoring `evaluate_duals`" - elseif model.evaluate_duals _compute_dual_of_parameters!(model) end return end + +# +# compute_conflict! +# + +function MOI.compute_conflict!(model::Optimizer) + return MOI.compute_conflict!(model.optimizer) +end + +function MOI.get( + model::Optimizer, + attr::MOI.ConstraintConflictStatus, + ci::MOI.ConstraintIndex{MOI.VariableIndex,<:MOI.Parameter}, +) + return MOI.MAYBE_IN_CONFLICT +end diff --git a/src/ParametricOptInterface.jl b/src/ParametricOptInterface.jl index 2444a274..1b946854 100644 --- a/src/ParametricOptInterface.jl +++ b/src/ParametricOptInterface.jl @@ -28,6 +28,10 @@ function p_idx(vi::MOI.VariableIndex)::ParameterIndex return ParameterIndex(vi.value - PARAMETER_INDEX_THRESHOLD) end +function v_idx(pi::ParameterIndex)::MOI.VariableIndex + return MOI.VariableIndex(pi.index + PARAMETER_INDEX_THRESHOLD) +end + function p_val(vi::MOI.VariableIndex)::Int64 return vi.value - PARAMETER_INDEX_THRESHOLD end diff --git a/src/duals.jl b/src/duals.jl index e4d27276..73eecde9 100644 --- a/src/duals.jl +++ b/src/duals.jl @@ -126,6 +126,16 @@ function MOI.get( ::MOI.ConstraintDual, cp::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}}, ) where {T} + if !model.evaluate_duals + throw( + MOI.GetAttributeNotAllowed( + MOI.ConstraintDual(), + "$(MOI.ConstraintDual()) not available when " * + "evaluate_duals is set to false. " * + "Create an optimizer such as POI.Optimizer(HiGHS.Optimizer(); evaluate_duals = true) to enable this feature.", + ), + ) + end if !_is_additive(model, cp) error("Cannot compute the dual of a multiplicative parameter") end diff --git a/src/parametric_functions.jl b/src/parametric_functions.jl index 8ed9996e..ec80d595 100644 --- a/src/parametric_functions.jl +++ b/src/parametric_functions.jl @@ -1,5 +1,21 @@ +abstract type ParametricFunction{T} end -mutable struct ParametricQuadraticFunction{T} +function _cache_set_constant!( + f::ParametricFunction{T}, + s::Union{MOI.LessThan{T},MOI.GreaterThan{T},MOI.EqualTo{T}}, +) where {T} + f.set_constant = MOI.constant(s) + return +end + +function _cache_set_constant!( + ::ParametricFunction{T}, + ::MOI.AbstractScalarSet, +) where {T} + return +end + +mutable struct ParametricQuadraticFunction{T} <: ParametricFunction{T} # helper to efficiently update affine terms affine_data::Dict{MOI.VariableIndex,T} affine_data_np::Dict{MOI.VariableIndex,T} @@ -167,21 +183,6 @@ function _current_function(f::ParametricQuadraticFunction{T}) where {T} return MOI.ScalarQuadraticFunction{T}(f.vv, affine, f.current_constant) end -function _cache_set_constant!( - f::ParametricQuadraticFunction{T}, - s::Union{MOI.LessThan{T},MOI.GreaterThan{T},MOI.EqualTo{T}}, -) where {T} - f.set_constant = MOI.constant(s) - return -end - -function _cache_set_constant!( - ::ParametricQuadraticFunction{T}, - ::MOI.AbstractScalarSet, -) where {T} - return -end - function _parametric_constant( model, f::ParametricQuadraticFunction{T}, @@ -289,7 +290,7 @@ function _update_cache!(f::ParametricQuadraticFunction{T}, model) where {T} return nothing end -mutable struct ParametricAffineFunction{T} +mutable struct ParametricAffineFunction{T} <: ParametricFunction{T} # constant * parameter p::Vector{MOI.ScalarAffineTerm{T}} # constant * variable @@ -304,7 +305,21 @@ end function ParametricAffineFunction(f::MOI.ScalarAffineFunction{T}) where {T} v, p = _split_affine_terms(f.terms) - return ParametricAffineFunction{T}(p, v, f.constant, zero(T), zero(T)) + return ParametricAffineFunction(p, v, f.constant) +end + +function ParametricAffineFunction( + terms_p::Vector{MOI.ScalarAffineTerm{T}}, + terms_v::Vector{MOI.ScalarAffineTerm{T}}, + constant::T, +) where {T} + return ParametricAffineFunction{T}( + terms_p, + terms_v, + constant, + zero(T), + zero(T), + ) end function affine_parameter_terms(f::ParametricAffineFunction) @@ -356,21 +371,6 @@ function _current_function(f::ParametricAffineFunction{T}) where {T} return MOI.ScalarAffineFunction{T}(affine_variable_terms(f), f.current_constant) end -function _cache_set_constant!( - f::ParametricAffineFunction{T}, - s::Union{MOI.LessThan{T},MOI.GreaterThan{T},MOI.EqualTo{T}}, -) where {T} - f.set_constant = MOI.constant(s) - return -end - -function _cache_set_constant!( - f::ParametricAffineFunction{T}, - s::MOI.AbstractScalarSet, -) where {T} - return -end - function _parametric_constant(model, f::ParametricAffineFunction{T}) where {T} # do not add set_function here param_constant = f.c diff --git a/test/jump_tests.jl b/test/jump_tests.jl index 2dd27999..f167fb96 100644 --- a/test/jump_tests.jl +++ b/test/jump_tests.jl @@ -754,51 +754,12 @@ function test_jump_dual_delete_constraint() end function test_jump_nlp() - ipopt = Ipopt.Optimizer() - MOI.set(ipopt, MOI.RawOptimizerAttribute("print_level"), 0) - cached = - () -> MOI.Bridges.full_bridge_optimizer( - MOI.Utilities.CachingOptimizer( - MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}()), - ipopt, - ), - Float64, - ) - POI_cached_optimizer() = ParametricOptInterface.Optimizer(cached()) - model = Model(() -> POI_cached_optimizer()) - @variable(model, x) - @variable(model, y) - @variable(model, z in MOI.Parameter(10.0)) - @constraint(model, x + y >= z) - @NLobjective(model, Min, x^2 + y^2) - optimize!(model) - objective_value(model) - @test value(x) ≈ 5 - MOI.get(model, ParametricOptInterface.ParameterDual(), z) - MOI.set(model, ParametricOptInterface.ParameterValue(), z, 2.0) - optimize!(model) - @test objective_value(model) ≈ 2 atol = 1e-3 - @test value(x) ≈ 1 - ipopt = Ipopt.Optimizer() - MOI.set(ipopt, MOI.RawOptimizerAttribute("print_level"), 0) - model = Model(() -> ParametricOptInterface.Optimizer(ipopt)) + model = Model(() -> ParametricOptInterface.Optimizer(Ipopt.Optimizer())) @variable(model, x) @variable(model, z in MOI.Parameter(10.0)) - MOI.set( - model, - ParametricOptInterface.ConstraintsInterpretation(), - ParametricOptInterface.ONLY_BOUNDS, - ) @constraint(model, x >= z) @NLobjective(model, Min, x^2) - optimize!(model) - objective_value(model) - @test value(x) ≈ 10 - MOI.get(model, ParametricOptInterface.ParameterDual(), z) - MOI.set(model, ParametricOptInterface.ParameterValue(), z, 2.0) - optimize!(model) - @test objective_value(model) ≈ 4 atol = 1e-3 - @test value(x) ≈ 2 + @test_throws ErrorException optimize!(model) return end @@ -1118,3 +1079,13 @@ function test_abstract_optimizer_attributes() @test get_attribute(model, "tm_lim") ≈ 60 * 1000 return end + +function test_get_quadratic_constraint() + model = Model(() -> POI.Optimizer(GLPK.Optimizer())) + @variable(model, x) + @variable(model, p in Parameter(2.0)) + @constraint(model, c, p * x <= 10) + optimize!(model) + @test value(c) ≈ 2.0 * value(x) + return +end diff --git a/test/moi_tests.jl b/test/moi_tests.jl index 4f8dae66..80c821b9 100644 --- a/test/moi_tests.jl +++ b/test/moi_tests.jl @@ -17,8 +17,14 @@ function test_basic_tests() MOI.set(optimizer, MOI.Silent(), true) x = MOI.add_variables(optimizer, 2) y, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0)) + @test MOI.is_valid(optimizer, x[1]) + @test MOI.is_valid(optimizer, y) + @test MOI.is_valid(optimizer, cy) + @test MOI.get(optimizer, POI.ListOfPureVariableIndices()) == x + @test MOI.get(optimizer, MOI.ListOfVariableIndices()) == [x[1], x[2], y] z = MOI.VariableIndex(4) cz = MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{Float64}}(4) + @test !MOI.is_valid(optimizer, z) for x_i in x MOI.add_constraint(optimizer, x_i, MOI.GreaterThan(0.0)) end @@ -213,9 +219,6 @@ function test_moi_glpk() exclude = [ # GLPK returns INVALID_MODEL instead of INFEASIBLE "test_constraint_ZeroOne_bounds_3", - # Upstream issue: https://github.com/jump-dev/MathOptInterface.jl/issues/1431 - "test_model_LowerBoundAlreadySet", - "test_model_UpperBoundAlreadySet", ], ) return @@ -264,18 +267,7 @@ function test_moi_ipopt() # - Excluded because Ipopt returns LOCALLY_INFEASIBLE instead of # INFEASIBLE "INFEASIBLE", - "test_conic_linear_INFEASIBLE", - "test_conic_linear_INFEASIBLE_2", "test_solve_DualStatus_INFEASIBILITY_CERTIFICATE_", - # - Excluded due to upstream issue - "test_model_LowerBoundAlreadySet", - "test_model_UpperBoundAlreadySet", - # - CachingOptimizer does not throw if optimizer not attached - "test_model_copy_to_UnsupportedAttribute", - "test_model_copy_to_UnsupportedConstraint", - # - POI throws a ErrorException if user tries to modify parametric - # functions - "test_objective_get_ObjectiveFunction_ScalarAffineFunction", ], ) return @@ -1603,3 +1595,118 @@ function test_qp_objective_parameter_in_quadratic_part() @test MOI.get(model, MOI.VariablePrimal(), y) ≈ 4 / 3 atol = ATOL return end + +function test_compute_conflict!() + T = Float64 + mock = MOI.Utilities.MockOptimizer(MOI.Utilities.Model{T}()) + MOI.set(mock, MOI.ConflictStatus(), MOI.COMPUTE_CONFLICT_NOT_CALLED) + model = POI.Optimizer( + MOI.Utilities.CachingOptimizer(MOI.Utilities.Model{T}(), mock), + ) + x, x_ci = MOI.add_constrained_variable(model, MOI.GreaterThan(1.0)) + p, p_ci = MOI.add_constrained_variable(model, MOI.Parameter(2.0)) + ci = MOI.add_constraint(model, 2.0 * x + 3.0 * p, MOI.LessThan(0.0)) + @test MOI.get(model, MOI.ConflictStatus()) == + MOI.COMPUTE_CONFLICT_NOT_CALLED + MOI.Utilities.set_mock_optimize!( + mock, + mock::MOI.Utilities.MockOptimizer -> begin + MOI.Utilities.mock_optimize!( + mock, + MOI.INFEASIBLE, + MOI.NO_SOLUTION, + MOI.NO_SOLUTION; + constraint_conflict_status = [ + (MOI.VariableIndex, MOI.Parameter{T}) => + [MOI.MAYBE_IN_CONFLICT], + (MOI.VariableIndex, MOI.GreaterThan{T}) => + [MOI.IN_CONFLICT], + (MOI.ScalarAffineFunction{T}, MOI.LessThan{T}) => + [MOI.IN_CONFLICT], + ], + ) + MOI.set(mock, MOI.ConflictStatus(), MOI.CONFLICT_FOUND) + end, + ) + MOI.optimize!(model) + @test MOI.get(model, MOI.TerminationStatus()) == MOI.INFEASIBLE + MOI.compute_conflict!(model) + @test MOI.get(model, MOI.ConflictStatus()) == MOI.CONFLICT_FOUND + @test MOI.get(model, MOI.ConstraintConflictStatus(), x_ci) == + MOI.IN_CONFLICT + @test MOI.get(model, MOI.ConstraintConflictStatus(), p_ci) == + MOI.MAYBE_IN_CONFLICT + @test MOI.get(model, MOI.ConstraintConflictStatus(), ci) == MOI.IN_CONFLICT + return +end + +function test_duals_not_available() + optimizer = POI.Optimizer(GLPK.Optimizer(); evaluate_duals = false) + MOI.set(optimizer, MOI.Silent(), true) + x = MOI.add_variables(optimizer, 2) + y, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0)) + z = MOI.VariableIndex(4) + cz = MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{Float64}}(4) + for x_i in x + MOI.add_constraint(optimizer, x_i, MOI.GreaterThan(0.0)) + end + cons1 = MOI.ScalarAffineFunction( + MOI.ScalarAffineTerm.([1.0, 1.0], [x[1], y]), + 0.0, + ) + c1 = MOI.add_constraint(optimizer, cons1, MOI.EqualTo(2.0)) + obj_func = MOI.ScalarAffineFunction( + MOI.ScalarAffineTerm.([1.0, 1.0], [x[1], y]), + 0.0, + ) + MOI.set( + optimizer, + MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), + obj_func, + ) + MOI.set(optimizer, MOI.ObjectiveSense(), MOI.MIN_SENSE) + MOI.optimize!(optimizer) + @test_throws MOI.GetAttributeNotAllowed MOI.get( + optimizer, + MOI.ConstraintDual(), + cy, + ) + return +end + +function test_duals_without_parameters() + optimizer = POI.Optimizer(GLPK.Optimizer()) + MOI.set(optimizer, MOI.Silent(), true) + x = MOI.add_variables(optimizer, 3) + y, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0)) + z, cz = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0)) + cons1 = MOI.ScalarAffineFunction( + MOI.ScalarAffineTerm.([1.0, -1.0], [x[1], y]), + 0.0, + ) + c1 = MOI.add_constraint(optimizer, cons1, MOI.LessThan(0.0)) + cons2 = MOI.ScalarAffineFunction([MOI.ScalarAffineTerm(1.0, x[2])], 0.0) + c2 = MOI.add_constraint(optimizer, cons2, MOI.LessThan(1.0)) + cons3 = MOI.ScalarAffineFunction( + MOI.ScalarAffineTerm.([1.0, -1.0], [x[3], z]), + 0.0, + ) + c3 = MOI.add_constraint(optimizer, cons3, MOI.LessThan(0.0)) + obj_func = MOI.ScalarAffineFunction( + MOI.ScalarAffineTerm.([1.0, 2.0, 3.0], [x[1], x[2], x[3]]), + 0.0, + ) + MOI.set( + optimizer, + MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), + obj_func, + ) + MOI.set(optimizer, MOI.ObjectiveSense(), MOI.MAX_SENSE) + MOI.set(optimizer, MOI.ConstraintSet(), cy, MOI.Parameter(1.0)) + MOI.set(optimizer, MOI.ConstraintSet(), cz, MOI.Parameter(1.0)) + MOI.optimize!(optimizer) + @test ≈(MOI.get(optimizer, MOI.ConstraintDual(), c1), -1.0, atol = ATOL) + @test ≈(MOI.get(optimizer, MOI.ConstraintDual(), c2), -2.0, atol = ATOL) + @test ≈(MOI.get(optimizer, MOI.ConstraintDual(), c3), -3.0, atol = ATOL) + return +end