Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,13 @@ jobs:
version: ${{ matrix.version }}
arch: ${{ matrix.arch }}
- uses: julia-actions/cache@v2
- name: LRO
shell: julia --project=@. {0}
run: |
using Pkg
Pkg.add([
PackageSpec(url="https://github.com/blegat/LowRankOpt.jl/"),
])
- uses: julia-actions/julia-buildpkg@v1
- uses: julia-actions/julia-runtest@v1
- uses: julia-actions/julia-processcoverage@v1
Expand Down
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ version = "0.3.0"
[deps]
DSDP_jll = "1065e140-e56c-5613-be8b-7480bf7138df"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
LowRankOpt = "607ca3ad-272e-43c8-bcbe-fc71b56c935c"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"

[compat]
Expand Down
2 changes: 1 addition & 1 deletion src/DSDP.jl
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import DSDP_jll
using LinearAlgebra

macro dsdp_ccall(f, args...)
quote
return quote
# QuoteNode prevents the interpretion of the symbol
# and leave it as a symbol
info =
Expand Down
108 changes: 80 additions & 28 deletions src/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,17 @@
# in the LICENSE.md file or at https://opensource.org/licenses/MIT.

import MathOptInterface as MOI
import LowRankOpt as LRO

const _RankOneMatrix{F<:AbstractVector{Cdouble},D<:AbstractArray{Cdouble,0}} =
LRO.Factorization{Cdouble,F,D}

const _SetDotProd{F<:AbstractMatrix{Cdouble},D<:AbstractVector{Cdouble}} =
LRO.SetDotProducts{
LRO.WITH_SET,
MOI.PositiveSemidefiniteConeTriangle,
LRO.TriangleVectorization{Cdouble,_RankOneMatrix{F,D}},
}

mutable struct Optimizer <: MOI.AbstractOptimizer
dsdp::DSDPT
Expand All @@ -15,6 +26,10 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
# * `-d` means a diagonal block with diagonal of length `d`
# * `d` means a symmetric `d x d` block
blockdims::Vector{Int}
# MOI variable index -> rank 1 matrix it corresponds to
rank_one::Vector{Union{Nothing,_RankOneMatrix}}
# To avoid it being free'd
cached_ind::Vector{Vector{Cint}}
varmap::Vector{Tuple{Int,Int,Int}} # Variable Index vi -> blk, i, j
# If `blockdims[i] < 0`, `blk[i]` is the offset in `lpdvars`.
# That is the **sum of length** of diagonal block before
Expand All @@ -32,6 +47,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
sdpdinds::Vector{Vector{Vector{Cint}}}
sdpdcoefs::Vector{Vector{Vector{Cdouble}}}
y::Vector{Cdouble}
solve_time::Float64
silent::Bool
options::Dict{Symbol,Any}

Expand All @@ -43,6 +59,8 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
1,
Cdouble[],
Int[],
Union{Nothing,_RankOneMatrix}[],
Vector{Cint}[],
Tuple{Int,Int,Int}[],
Int[],
C_NULL,
Expand All @@ -53,6 +71,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
Vector{Int}[],
Vector{Cdouble}[],
Cdouble[],
NaN,
false,
Dict{Symbol,Any}(),
)
Expand Down Expand Up @@ -80,6 +99,8 @@ function MOI.empty!(optimizer::Optimizer)
optimizer.objective_sign = 1
empty!(optimizer.b)
empty!(optimizer.blockdims)
empty!(optimizer.rank_one)
empty!(optimizer.cached_ind)
empty!(optimizer.varmap)
empty!(optimizer.blk)
optimizer.nlpdrows = 0
Expand All @@ -89,6 +110,7 @@ function MOI.empty!(optimizer::Optimizer)
empty!(optimizer.sdpdinds)
empty!(optimizer.sdpdcoefs)
empty!(optimizer.y)
optimizer.solve_time = NaN
return
end

Expand Down Expand Up @@ -219,7 +241,7 @@ end
MOI.supports_add_constrained_variables(::Optimizer, ::Type{MOI.Reals}) = false

const SupportedSets =
Union{MOI.Nonnegatives,MOI.PositiveSemidefiniteConeTriangle}
Union{MOI.Nonnegatives,MOI.PositiveSemidefiniteConeTriangle,_SetDotProd}

function MOI.supports_add_constrained_variables(
::Optimizer,
Expand All @@ -241,6 +263,7 @@ function new_block(optimizer::Optimizer, set::MOI.Nonnegatives)
blk = length(optimizer.blockdims)
for i in 1:MOI.dimension(set)
push!(optimizer.varmap, (blk, i, i))
push!(optimizer.rank_one, nothing)
end
return
end
Expand All @@ -254,14 +277,26 @@ function new_block(
for j in 1:set.side_dimension
for i in 1:j
push!(optimizer.varmap, (blk, i, j))
push!(optimizer.rank_one, nothing)
end
end
return
end

function new_block(model::Optimizer, set::_SetDotProd)
blk = length(model.blockdims) + 1
for i in eachindex(set.vectors)
push!(model.varmap, (blk, 0, 0))
push!(model.rank_one, set.vectors[i].matrix)
end
return new_block(model, set.set)
end

function _add_constrained_variables(optimizer::Optimizer, set::SupportedSets)
offset = length(optimizer.varmap)
new_block(optimizer, set)
@assert length(optimizer.varmap) == offset + MOI.dimension(set)
@assert length(optimizer.rank_one) == offset + MOI.dimension(set)
ci = MOI.ConstraintIndex{MOI.VectorOfVariables,typeof(set)}(offset + 1)
return [MOI.VariableIndex(i) for i in offset .+ (1:MOI.dimension(set))], ci
end
Expand Down Expand Up @@ -307,45 +342,59 @@ function constrain_variables_on_creation(
end

function _setcoefficient!(
m::Optimizer,
dest::Optimizer,
coef,
constr::Integer,
blk::Integer,
i::Integer,
j::Integer,
vi::MOI.VariableIndex,
)
if m.blockdims[blk] < 0
@assert i == j
push!(m.lpdvars, constr + 1)
push!(m.lpdrows, m.blk[blk] + i - 1) # -1 because indexing starts at 0 in DSDP
push!(m.lpcoefs, coef)
else
sdp = m.blk[blk]
push!(m.sdpdinds[end][sdp], i + (j - 1) * m.blockdims[blk] - 1)
if i != j
coef /= 2
blk, i, j = varmap(dest, vi)
rank_one = dest.rank_one[vi.value]
if isnothing(rank_one)
if dest.blockdims[blk] < 0
@assert i == j
push!(dest.lpdvars, constr + 1)
push!(dest.lpdrows, dest.blk[blk] + i - 1) # -1 because indexing starts at 0 in DSDP
push!(dest.lpcoefs, coef)
else
sdp = dest.blk[blk]
push!(
dest.sdpdinds[end][sdp],
i + (j - 1) * dest.blockdims[blk] - 1,
)
if i != j
coef /= 2
end
push!(dest.sdpdcoefs[end][sdp], coef)
end
push!(m.sdpdcoefs[end][sdp], coef)
else
d = Cint(dest.blockdims[blk])
push!(dest.cached_ind, collect(Cint(0):(d-1)))
# We use `Add` and not `Set` because I think (if I interpret the name correctly) that would allow mixing with sparse matrices for the same block and constraint
DSDP.SDPCone.SetARankOneMat(
dest.sdpcone,
dest.blk[blk] - 1,
constr,
d,
coef * rank_one.diagonal[],
0,
last(dest.cached_ind),
collect(eachcol(rank_one.factor)[]),
d,
)
end
return
end

# Loads objective coefficient α * vi
function load_objective_term!(
optimizer::Optimizer,
index_map,
α,
vi::MOI.VariableIndex,
)
blk, i, j = varmap(optimizer, vi)
function load_objective_term!(optimizer::Optimizer, α, vi::MOI.VariableIndex)
coef = optimizer.objective_sign * α
_setcoefficient!(optimizer, coef, 0, blk, i, j)
_setcoefficient!(optimizer, coef, 0, vi)
return
end

function _set_A_matrices(m::Optimizer, i)
for (blk, blkdim) in zip(m.blk, m.blockdims)
if blkdim > 0
if blkdim > 0 && !isempty(m.sdpdcoefs[end][blk])
SDPCone.SetASparseVecMat(
m.sdpcone,
blk - 1,
Expand Down Expand Up @@ -386,6 +435,7 @@ function MOI.copy_to(dest::Optimizer, src::MOI.ModelLike)
index_map,
MOI.PositiveSemidefiniteConeTriangle,
)
constrain_variables_on_creation(dest, src, index_map, _SetDotProd)
vis_src = MOI.get(src, MOI.ListOfVariableIndices())
if length(vis_src) < length(index_map.var_map)
_error(
Expand Down Expand Up @@ -464,8 +514,7 @@ function MOI.copy_to(dest::Optimizer, src::MOI.ModelLike)
_new_A_matrix(dest)
for t in func.terms
if !iszero(t.coefficient)
blk, i, j = varmap(dest, index_map[t.variable])
_setcoefficient!(dest, t.coefficient, k, blk, i, j)
_setcoefficient!(dest, t.coefficient, k, index_map[t.variable])
end
end
_set_A_matrices(dest, k)
Expand Down Expand Up @@ -508,7 +557,6 @@ function MOI.copy_to(dest::Optimizer, src::MOI.ModelLike)
if !iszero(term.coefficient)
load_objective_term!(
dest,
index_map,
term.coefficient,
index_map[term.variable],
)
Expand All @@ -533,7 +581,9 @@ function MOI.copy_to(dest::Optimizer, src::MOI.ModelLike)
end

function MOI.optimize!(m::Optimizer)
start_time = time()
Solve(m.dsdp)
m.solve_time = time() - start_time
# Calling `ComputeX` not right after `Solve` seems to sometime cause segfaults or weird Heisenbug's
# let's call it directly what `DSDP/examples/readsdpa.c` does
ComputeX(m.dsdp)
Expand All @@ -543,6 +593,8 @@ function MOI.optimize!(m::Optimizer)
return
end

MOI.get(optimizer::Optimizer, ::MOI.SolveTimeSec) = optimizer.solve_time

function MOI.get(m::Optimizer, ::MOI.RawStatusString)
if m.dsdp == C_NULL
return "`optimize!` not called"
Expand Down
36 changes: 18 additions & 18 deletions src/sdpcone.jl
Original file line number Diff line number Diff line change
Expand Up @@ -97,14 +97,14 @@ end

function SetARankOneMat(
sdpcone::SDPConeT,
arg2::Integer,
arg3::Integer,
arg4::Integer,
arg5::Cdouble,
arg6::Integer,
arg7::Vector{Cint},
arg8::Vector{Cdouble},
arg9::Integer,
blockj::Integer,
vari::Integer,
n::Integer,
alpha::Cdouble,
ishift::Integer,
ind::Vector{Cint},
val::Vector{Cdouble},
nnz::Integer,
)
@dsdp_ccall SDPConeSetARankOneMat (
SDPConeT,
Expand All @@ -116,7 +116,7 @@ function SetARankOneMat(
Ptr{Cint},
Ptr{Cdouble},
Cint,
) sdpcone arg2 arg3 arg4 arg5 arg6 arg7 arg8 arg9
) sdpcone blockj vari n alpha ishift ind val nnz
end

function SetConstantMat(
Expand Down Expand Up @@ -222,14 +222,14 @@ end

function AddARankOneMat(
sdpcone::SDPConeT,
arg2::Integer,
arg3::Integer,
arg4::Integer,
arg5::Cdouble,
arg6::Integer,
arg7::Vector{Cint},
arg8::Vector{Cdouble},
arg9::Integer,
blockj::Integer,
vari::Integer,
n::Integer,
alpha::Cdouble,
ishift::Integer,
ind::Vector{Cint},
val::Vector{Cdouble},
nnz::Integer,
)
@dsdp_ccall SDPConeAddARankOneMat (
SDPConeT,
Expand All @@ -241,7 +241,7 @@ function AddARankOneMat(
Ptr{Cint},
Ptr{Cdouble},
Cint,
) sdpcone arg2 arg3 arg4 arg5 arg6 arg7 arg8 arg9
) sdpcone blockj vari n alpha ishift ind val nnz
end

function AddSparseVecMat(
Expand Down
19 changes: 19 additions & 0 deletions test/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ module TestDSDP

using Test
import MathOptInterface as MOI
import LowRankOpt as LRO
import DSDP

function runtests()
Expand Down Expand Up @@ -170,6 +171,24 @@ function test_runtests()
return
end

function test_LRO_runtests()
T = Float64
model = MOI.instantiate(
DSDP.Optimizer,
with_bridge_type = T,
with_cache_type = T,
)
LRO.Bridges.add_all_bridges(model, T)
MOI.set(model, MOI.Silent(), true)
config = MOI.Test.Config(
rtol = 1e-2,
atol = 1e-2,
)
MOI.Test.runtests(model, config, test_module = LRO.Test)
return
end


end # module

TestDSDP.runtests()
5 changes: 5 additions & 0 deletions test/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
[deps]
DSDP = "2714ae6b-e930-5b4e-9c21-d0bacf577842"
LowRankOpt = "607ca3ad-272e-43c8-bcbe-fc71b56c935c"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Loading