Skip to content

Commit

Permalink
Update to JuMP v0.22 and MOI v0.10 (#277)
Browse files Browse the repository at this point in the history
* Update to JuMP v0.22 and MOI v0.10

* Update docs Project.toml

* Fix

* Remove CDDLib dependency to docs
  • Loading branch information
blegat authored Nov 11, 2021
1 parent 83493da commit 302a346
Show file tree
Hide file tree
Showing 10 changed files with 62 additions and 39 deletions.
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
[compat]
GenericLinearAlgebra = "0.2"
GeometryBasics = "0.2, 0.3, 0.4"
JuMP = "0.21"
MutableArithmetics = "0.2"
JuMP = "0.22"
MutableArithmetics = "0.3"
RecipesBase = "0.7, 0.8, 1.0"
StaticArrays = "0.12, 1.0"
julia = "1"
Expand Down
3 changes: 1 addition & 2 deletions docs/Project.toml
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
[deps]
CDDLib = "3391f64e-dcde-5f30-b752-e11513730f60"
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
Expand All @@ -9,4 +8,4 @@ Polyhedra = "67491407-f73d-577b-9b50-8179a7c68029"

[compat]
Documenter = "0.26"
JuMP = "0.21"
JuMP = "0.22"
8 changes: 1 addition & 7 deletions examples/Convex hull of a set of points.jl
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,4 @@ p = polyhedron(v)
removevredundancy!(p)
p

# We can also specify a library:

using CDDLib

p = polyhedron(v, CDDLib.Library())
removevredundancy!(p)
p
# We can also specify a library. For instance, to use `CDDLib`, write `using CDDLib` and then `p = polyhedron(v, CDDLib.Library())`.
2 changes: 1 addition & 1 deletion src/center.jl
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ function hchebyshevcenter(p::HRepresentation, solver=default_solver(p; T=Float64
MOI.add_constraint(model, func, set)
end
MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE)
MOI.set(model, MOI.ObjectiveFunction{MOI.SingleVariable}(), MOI.SingleVariable(r))
MOI.set(model, MOI.ObjectiveFunction{MOI.VariableIndex}(), r)
MOI.optimize!(model)
term = MOI.get(model, MOI.TerminationStatus())
if term [MOI.OPTIMAL, MOI.LOCALLY_SOLVED]
Expand Down
6 changes: 3 additions & 3 deletions src/lphrep.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@ export LPHRep
MOI.Utilities.@model(_MOIModel,
(), (MOI.EqualTo, MOI.LessThan,), (), (),
(), (MOI.ScalarAffineFunction,), (), ())
# We need the `SingleVariable` constraints to be bridged so we should say that
# We need the `VariableIndex` constraints to be bridged so we should say that
# they are not supported. We notably exclude `Integer` as we just ignore
# integrality constraints. Binary constraint should be bridged to integrality
# once https://github.com/jump-dev/MathOptInterface.jl/issues/704 is done.
function MOI.supports_constraint(
::_MOIModel{T}, ::Type{MOI.SingleVariable},
::_MOIModel{T}, ::Type{MOI.VariableIndex},
::Type{<:Union{MOI.EqualTo{T}, MOI.GreaterThan{T}, MOI.LessThan{T},
MOI.Interval{T}, MOI.ZeroOne}}) where T
return false
Expand Down Expand Up @@ -173,7 +173,7 @@ function Base.get(rep::LPHRep{T}, idx::HIndex{T}) where {T}
func = MOI.get(rep.model, MOI.ConstraintFunction(), ci)::MOI.ScalarAffineFunction{T}
# MOI uses `Int64` but `SparseArrays` uses `Int32` by default so `Int64` will create
# issues with, e.g. preimages with `spzeros(d, n)`, etc...
indices = Int[t.variable_index.value for t in func.terms]
indices = Int[t.variable.value for t in func.terms]
values = [t.coefficient for t in func.terms]
a = sparsevec(indices, values, FullDim(rep))
set = MOI.get(rep.model, MOI.ConstraintSet(), ci)
Expand Down
15 changes: 8 additions & 7 deletions src/opt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,10 @@ end

abstract type AbstractPolyhedraOptimizer{T} <: MOI.AbstractOptimizer end

function MOI.copy_to(dest::AbstractPolyhedraOptimizer, src::MOI.ModelLike; kws...)
return MOI.Utilities.automatic_copy_to(dest, src; kws...)
function MOI.copy_to(dest::AbstractPolyhedraOptimizer, src::MOI.ModelLike)
return MOI.Utilities.default_copy_to(dest, src)
end
MOI.Utilities.supports_default_copy_to(optimizer::AbstractPolyhedraOptimizer, copy_names::Bool) = true
MOI.supports_incremental_interface(optimizer::AbstractPolyhedraOptimizer) = true

function MOI.add_variable(optimizer::AbstractPolyhedraOptimizer)
return MOI.add_variable(optimizer.lphrep.model)
Expand All @@ -56,7 +56,7 @@ end

function MOI.supports(::AbstractPolyhedraOptimizer{T},
::Union{MOI.ObjectiveSense,
MOI.ObjectiveFunction{MOI.SingleVariable},
MOI.ObjectiveFunction{MOI.VariableIndex},
MOI.ObjectiveFunction{MOI.ScalarAffineFunction{T}}}) where T
return true
end
Expand All @@ -66,15 +66,16 @@ end
function MOI.set(optimizer::AbstractPolyhedraOptimizer, ::MOI.ObjectiveSense,
sense::MOI.OptimizationSense)
optimizer.objective_sense = sense
return
end
function MOI.set(optimizer::AbstractPolyhedraOptimizer{T}, ::MOI.ObjectiveFunction,
func::MOI.SingleVariable) where T
func::MOI.VariableIndex) where T
MOI.set(optimizer, MOI.ObjectiveFunction{MOI.ScalarAffineFunction{T}}(),
convert(MOI.ScalarAffineFunction{T}, func))
end
function MOI.set(optimizer::AbstractPolyhedraOptimizer, ::MOI.ObjectiveFunction,
func::MOI.ScalarAffineFunction)
indices = [term.variable_index.value for term in func.terms]
indices = [term.variable.value for term in func.terms]
coefs = [term.coefficient for term in func.terms]
optimizer.objective_func = sparsevec(indices, coefs, fulldim(optimizer.lphrep))
optimizer.objective_constant = func.constant
Expand Down Expand Up @@ -143,7 +144,7 @@ function layered_optimizer(solver)
solver === nothing && error("No solver specified.\n", NO_SOLVER_HELP)
optimizer = MOI.instantiate(solver)
T = coefficient_type(optimizer)
if !MOIU.supports_default_copy_to(optimizer, false)
if !MOI.supports_incremental_interface(optimizer)
universal_fallback = MOIU.UniversalFallback(_MOIModel{T}())
optimizer = MOIU.CachingOptimizer(universal_fallback, optimizer)
end
Expand Down
2 changes: 1 addition & 1 deletion src/projection_opt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ function MOI.Bridges.Constraint.bridge_constraint(
N = fulldim(p.p.set)
variables = MOI.add_variables(model, N - fulldim(p.p))
for (i, j) in enumerate(setdiff(1:N, p.p.dimensions))
func[j] = MOI.SingleVariable(variables[i])
func[j] = variables[i]
end
constraint = MOI.add_constraint(model, MOI.Utilities.vectorize(func), PolyhedraOptSet(p.p.set))
return ProjectionBridge{T, F, RepT, I}(variables, constraint, p.p.dimensions)
Expand Down
7 changes: 2 additions & 5 deletions src/redundancy.jl
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ function _redundant_indices(rep::Representation, model::MOI.ModelLike, T::Type,
hull_con = MOI.add_constraint.(model, hull, MOI.EqualTo(zero(T)))
for (i, idx) in enumerate(indices)
if=== nothing
fix_con = MOI.add_constraint(model, MOI.SingleVariable(λ[i]), MOI.EqualTo(zero(T)))
fix_con = MOI.add_constraint(model, λ[i], MOI.EqualTo(zero(T)))
else
fix_con = MOI.transform(model, cλ[i], MOI.EqualTo(zero(T)))
end
Expand Down Expand Up @@ -267,10 +267,7 @@ end
function _hull(model::MOI.ModelLike, ::Type{T}, hull::Vector{MOI.ScalarAffineFunction{T}}, rep, idxs, sum_one = idxs isa PointIndices) where T
λ = MOI.add_variables(model, length(idxs))
if !(idxs isa Union{HyperPlaneIndices, LineIndices})
= [
MOI.add_constraint(model, MOI.SingleVariable(λ), MOI.GreaterThan(zero(T)))
for λ in λ
]
= [MOI.add_constraint(model, λ, MOI.GreaterThan(zero(T))) for λ in λ]
else
= nothing
end
Expand Down
28 changes: 22 additions & 6 deletions src/vrep_optimizer.jl
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,11 @@ function MOI.optimize!(lpm::VRepOptimizer{T}) where T
@assert lpm.feasible_set isa Polyhedron
prob = vrep(lpm.feasible_set)
end
if lpm.objective_sense == MOI.FEASIBILITY_SENSE
obj = sparsevec(Int64[], T[], fulldim(lpm.lphrep))
else
obj = lpm.objective_func
end
N = fulldim(prob)
if !haspoints(prob) && !haslines(prob) && !hasrays(prob)
lpm.status = MOI.INFEASIBLE
Expand All @@ -89,7 +94,7 @@ function MOI.optimize!(lpm::VRepOptimizer{T}) where T
bestobjval = zero(T)
lpm.solution = nothing
for r in allrays(prob)
objval = lpm.objective_func r
objval = obj r
if _better(objval, bestobjval)
bestobjval = objval
lpm.solution = coord(r)
Expand All @@ -99,7 +104,7 @@ function MOI.optimize!(lpm::VRepOptimizer{T}) where T
lpm.status = MOI.DUAL_INFEASIBLE
else
for p in points(prob)
objval = lpm.objective_func p
objval = obj p
if lpm.solution === nothing || better(objval, bestobjval)
bestobjval = objval
lpm.solution = p
Expand All @@ -123,9 +128,14 @@ function MOI.get(lpm::VRepOptimizer{T}, attr::MOI.ConstraintPrimal,
ci::MOI.ConstraintIndex{MOI.ScalarAffineFunction{T},
<:Union{MOI.EqualTo{T},
MOI.LessThan{T}}}) where T
MOI.check_result_index_bounds(lpm, attr)
return MOIU.get_fallback(lpm, attr, ci)
end
function MOI.get(lpm::VRepOptimizer, ::MOI.ObjectiveValue)
function MOI.get(lpm::VRepOptimizer{T}, attr::MOI.ObjectiveValue) where T
MOI.check_result_index_bounds(lpm, attr)
if lpm.objective_sense == MOI.FEASIBILITY_SENSE
return zero(T)
end
if lpm.status == MOI.OPTIMAL
return lpm.objective_func lpm.solution + lpm.objective_constant
elseif lpm.status == MOI.DUAL_INFEASIBLE
Expand All @@ -134,16 +144,22 @@ function MOI.get(lpm::VRepOptimizer, ::MOI.ObjectiveValue)
error("No objective value available when termination status is $(lpm.status).")
end
end
function MOI.get(lpm::VRepOptimizer, ::MOI.PrimalStatus)
if lpm.status == MOI.OPTIMAL
function MOI.get(lpm::VRepOptimizer, ::MOI.DualStatus)
return MOI.NO_SOLUTION
end
function MOI.get(lpm::VRepOptimizer, attr::MOI.PrimalStatus)
if attr.result_index > MOI.get(lpm, MOI.ResultCount())
return MOI.NO_SOLUTION
elseif lpm.status == MOI.OPTIMAL
return MOI.FEASIBLE_POINT
elseif lpm.status == MOI.DUAL_INFEASIBLE
return MOI.INFEASIBILITY_CERTIFICATE
else
return MOI.NO_SOLUTION
end
end
function MOI.get(lpm::VRepOptimizer, ::MOI.VariablePrimal, vi::MOI.VariableIndex)
function MOI.get(lpm::VRepOptimizer, attr::MOI.VariablePrimal, vi::MOI.VariableIndex)
MOI.check_result_index_bounds(lpm, attr)
if lpm.status != MOI.OPTIMAL && lpm.status != MOI.DUAL_INFEASIBLE
error("No primal value available when termination status is $(lpm.status).")
end
Expand Down
26 changes: 21 additions & 5 deletions test/vrep_optimizer.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
using Test
using Polyhedra
using JuMP
const MOIT = MOI.Test
const MOIB = MOI.Bridges

@testset "Continuous Linear problems with VRepOptimizer" begin
Expand All @@ -12,10 +11,27 @@ const MOIB = MOI.Bridges
cache = MOIU.UniversalFallback(Polyhedra._MOIModel{Float64}())
cached = MOIU.CachingOptimizer(cache, optimizer)
bridged = MOIB.full_bridge_optimizer(cached, Float64)
config = MOIT.TestConfig(duals=false)
MOIT.contlineartest(bridged, config,
# linear8a and linear12 will be solved by https://github.com/jump-dev/MathOptInterface.jl/pull/702
["linear8a", "linear12", "partial_start"])
config = MOI.Test.Config(
exclude=Any[
MOI.ConstraintBasisStatus,
MOI.VariableBasisStatus,
MOI.ConstraintDual,
MOI.DualObjectiveValue,
MOI.ObjectiveBound,
],
)
MOI.Test.runtests(
bridged,
config,
exclude = String[
"test_attribute_RawStatusString",
"test_attribute_SolveTimeSec",
"test_attribute_SolverVersion",
# MathOptInterface.jl issue #1431
"test_model_LowerBoundAlreadySet",
"test_model_UpperBoundAlreadySet",
],
)
end
@testset "simplex chebyshev center with $T" for T in [Float64, Rational{BigInt}]
h = HalfSpace([-1, 0], 0) HalfSpace([0, -1], 0) HyperPlane([1, 1], 1)
Expand Down

2 comments on commit 302a346

@blegat
Copy link
Member Author

@blegat blegat commented on 302a346 Nov 11, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Error while trying to register: "Tag with name v0.6.17 already exists and points to a different commit"

Please sign in to comment.