diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d2a6e76..ef4350f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,12 +15,12 @@ jobs: strategy: fail-fast: false matrix: - version: ['1.6', '1'] # Test against LTS + version: ['1.10', '1'] # Test against LTS os: [ubuntu-latest, macOS-latest, windows-latest] arch: [x64] include: # Also test against 32-bit Linux on LTS. - - version: '1.6' + - version: '1.10' os: ubuntu-latest arch: x86 steps: diff --git a/Project.toml b/Project.toml index b5b4d53..e0c3e91 100644 --- a/Project.toml +++ b/Project.toml @@ -1,13 +1,15 @@ name = "SDPLR" uuid = "56161740-ea4e-4253-9d15-43c62ff94d95" authors = ["Benoît Legat "] -version = "0.1.2" +version = "0.2.0" [deps] +LowRankOpt = "607ca3ad-272e-43c8-bcbe-fc71b56c935c" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" SDPLR_jll = "3a057b76-36a0-51f0-a66f-6d580b8e8efd" [compat] +LowRankOpt = "0.1" MathOptInterface = "1.24" SDPLR_jll = "=100.2.300" -julia = "1.6" +julia = "1.10" diff --git a/README.md b/README.md index 0fd391b..6241e2a 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,9 @@ To use SDPLR with [JuMP](https://github.com/jump-dev/JuMP.jl), use using JuMP, SDPLR model = Model(SDPLR.Optimizer) ``` +In order to use low-rank **constraints** (not to be confused with low-rank +**solutions** detailed in the next section), use the +[LowRankOpt](https://github.com/blegat/LowRankOpt.jl/) extension of JuMP. ## Example: modifying the rank and checking optimality diff --git a/src/MOI_wrapper.jl b/src/MOI_wrapper.jl index 27ff835..1ab5f20 100644 --- a/src/MOI_wrapper.jl +++ b/src/MOI_wrapper.jl @@ -17,12 +17,23 @@ const PIECES_MAP = Dict{String,Int}( "overallsc" => 8, ) +const _LowRankMatrix{F<:AbstractMatrix{Cdouble},D<:AbstractVector{Cdouble}} = + LRO.Factorization{Cdouble,F,D} + +const _SetDotProd{F<:AbstractMatrix{Cdouble},D<:AbstractVector{Cdouble}} = + LRO.SetDotProducts{ + LRO.WITH_SET, + MOI.PositiveSemidefiniteConeTriangle, + LRO.TriangleVectorization{Cdouble,_LowRankMatrix{F,D}}, + } + const SupportedSets = - Union{MOI.Nonnegatives,MOI.PositiveSemidefiniteConeTriangle} + Union{MOI.Nonnegatives,MOI.PositiveSemidefiniteConeTriangle,_SetDotProd} mutable struct Optimizer <: MOI.AbstractOptimizer objective_constant::Float64 objective_sign::Int + dot_product::Vector{Union{Nothing,_LowRankMatrix}} blksz::Vector{Cptrdiff_t} blktype::Vector{Cchar} varmap::Vector{Tuple{Int,Int,Int}} # Variable Index vi -> blk, i, j @@ -51,6 +62,7 @@ mutable struct Optimizer <: MOI.AbstractOptimizer return new( 0.0, 1, + Union{Nothing,_LowRankMatrix}[], Cptrdiff_t[], Cchar[], Tuple{Int,Int,Int}[], @@ -151,6 +163,7 @@ function _new_block(model::Optimizer, set::MOI.Nonnegatives) blk = length(model.blksz) for i in 1:MOI.dimension(set) push!(model.varmap, (blk, i, i)) + push!(model.dot_product, nothing) end return end @@ -162,11 +175,22 @@ function _new_block(model::Optimizer, set::MOI.PositiveSemidefiniteConeTriangle) for j in 1:set.side_dimension for i in 1:j push!(model.varmap, (blk, i, j)) + push!(model.dot_product, nothing) end end return end +function _new_block(model::Optimizer, set::_SetDotProd) + blk = length(model.blksz) + 1 + for i in eachindex(set.vectors) + push!(model.varmap, (-blk, i, i)) + push!(model.dot_product, set.vectors[i].matrix) + end + _new_block(model, set.set) + return +end + function MOI.add_constrained_variables(model::Optimizer, set::SupportedSets) reset_solution!(model) offset = length(model.varmap) @@ -208,6 +232,9 @@ function _next(model::Optimizer, i) return length(model.Aent) end +# For the variables that are not in the dot product, we need to fill the +# `entptr` and `type`. This is because the solver needs to have an entry +# for each block. function _fill_until( model::Optimizer, numblk, @@ -226,6 +253,42 @@ function _fill_until( return end +# We have ∑ αᵢ' * Fᵢ' * Fᵢ' = [F₁ ... Fₖ] * Diag(α₁, .., αₖ) * [F₁' .. Fₖ'] +function merge_low_rank_terms( + ent, + row, + col, + type::Vector{Cchar}, + mats::Vector{Tuple{Cdouble,_LowRankMatrix}}, +) + if isempty(mats) + return + end + type[end] = Cchar('l') + offset = 0 + for (coef, mat) in mats + for i in eachindex(mat.scaling) + push!(ent, coef * mat.scaling[i]) + push!(row, offset + i) + push!(col, offset + i) + end + offset += length(mat.scaling) + end + offset = 0 + for (_, mat) in mats + for j in axes(mat.factor, 2) + for i in axes(mat.factor, 1) + push!(ent, mat.factor[i, j]) + push!(row, i) + push!(col, offset + j) + end + end + offset += length(mat.scaling) + end + empty!(mats) + return +end + function _fill!( model, ent, @@ -235,17 +298,28 @@ function _fill!( type::Vector{Cchar}, func, ) + prev_blk = 0 + mats = Tuple{Cdouble,_LowRankMatrix}[] for t in MOI.Utilities.canonical(func).terms blk, i, j = model.varmap[t.variable.value] - _fill_until(model, blk, entptr, type, length(ent)) - coef = t.coefficient - if i != j - coef /= 2 + if blk != prev_blk + merge_low_rank_terms(ent, row, col, type, mats) + end + _fill_until(model, abs(blk), entptr, type, length(ent)) + if blk < 0 + prev_blk = blk + push!(mats, (t.coefficient, model.dot_product[t.variable.value])) + else + coef = t.coefficient + if i != j + coef /= 2 + end + push!(ent, coef) + push!(row, i) + push!(col, j) end - push!(ent, coef) - push!(row, i) - push!(col, j) end + merge_low_rank_terms(ent, row, col, type, mats) _fill_until(model, length(model.blksz), entptr, type, length(ent)) @assert length(entptr) == length(model.blksz) @assert length(type) == length(model.blksz) @@ -366,6 +440,7 @@ end function MOI.empty!(optimizer::Optimizer) optimizer.objective_constant = 0.0 optimizer.objective_sign = 1 + empty!(optimizer.dot_product) empty!(optimizer.blksz) empty!(optimizer.blktype) empty!(optimizer.varmap) @@ -479,9 +554,10 @@ function MOI.get( # The constraint index corresponds to the variable index of the `1, 1` entry blk, i, j = optimizer.varmap[ci.value] @assert i == j == 1 + blk = abs(blk) # In the Low-Rank case, we just take the factor of the PSD matrix I = (optimizer.Rmap[blk]+1):optimizer.Rmap[blk+1] r = optimizer.R[I] - if S === MOI.PositiveSemidefiniteConeTriangle + if S === MOI.PositiveSemidefiniteConeTriangle || S <: _SetDotProd @assert optimizer.blktype[blk] == Cchar('s') d = optimizer.blksz[blk] return reshape(r, d, div(length(I), d)) @@ -497,12 +573,23 @@ function MOI.get( vi::MOI.VariableIndex, ) MOI.check_result_index_bounds(optimizer, attr) - blk, i, j = optimizer.varmap[vi.value] - I = (optimizer.Rmap[blk]+1):optimizer.Rmap[blk+1] + _blk, i, j = optimizer.varmap[vi.value] + blk = abs(_blk) + I = (optimizer.Rmap[abs(blk)]+1):optimizer.Rmap[abs(blk)+1] if optimizer.blktype[blk] == Cchar('s') d = optimizer.blksz[blk] U = reshape(optimizer.R[I], d, div(length(I), d)) - return U[i, :]' * U[j, :] + if _blk < 0 + # result of dot product + mat = optimizer.dot_product[vi.value] + return sum(eachindex(mat.scaling); init = 0.0) do i + v = mat.factor[:, i] + vU = U' * v + return mat.scaling[i] * (vU' * vU) + end + else + return U[i, :]' * U[j, :] + end else @assert optimizer.blktype[blk] == Cchar('d') return optimizer.R[I[i]]^2 diff --git a/src/SDPLR.jl b/src/SDPLR.jl index ad96885..79310cb 100644 --- a/src/SDPLR.jl +++ b/src/SDPLR.jl @@ -6,6 +6,7 @@ module SDPLR import MathOptInterface as MOI +import LowRankOpt as LRO import SDPLR_jll include("bounds.jl") @@ -138,14 +139,17 @@ function solve( k += 1 @assert CAinfo_entptr[k] <= CAinfo_entptr[k+1] for j in ((CAinfo_entptr[k]+1):CAinfo_entptr[k+1]) - @assert blktype[blk] == CAinfo_type[k] @assert 1 <= CArow[j] <= blksz[blk] @assert 1 <= CAcol[j] <= blksz[blk] if CAinfo_type[k] == Cchar('s') + @assert blktype[blk] == Cchar('s') @assert CArow[j] <= CAcol[j] - else - @assert CAinfo_type[k] == Cchar('d') + elseif CAinfo_type[k] == Cchar('d') + @assert blktype[blk] == Cchar('d') @assert CArow[j] == CAcol[j] + else + @assert CAinfo_type[k] == Cchar('l') + @assert blktype[blk] == Cchar('s') end end end diff --git a/test/Project.toml b/test/Project.toml index b0cb0ca..40f3a37 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -1,5 +1,6 @@ [deps] LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +LowRankOpt = "607ca3ad-272e-43c8-bcbe-fc71b56c935c" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" SDPLR = "56161740-ea4e-4253-9d15-43c62ff94d95" diff --git a/test/runtests.jl b/test/runtests.jl index 0077500..74b5c46 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -8,16 +8,15 @@ module TestSDPLR using Test import LinearAlgebra import MathOptInterface as MOI +import LowRankOpt as LRO import Random import SDPLR function test_runtests() - model = MOI.Bridges.full_bridge_optimizer( - MOI.Utilities.CachingOptimizer( - MOI.Utilities.UniversalFallback(MOI.Utilities.Model{Float64}()), - SDPLR.Optimizer(), - ), - Float64, + model = MOI.instantiate( + SDPLR.Optimizer, + with_bridge_type = Float64, + with_cache_type = Float64, ) MOI.set(model, MOI.Silent(), true) MOI.set(model, MOI.RawOptimizerAttribute("timelim"), 10) @@ -49,6 +48,25 @@ function test_runtests() return end +function test_LRO_runtests() + T = Float64 + model = MOI.instantiate( + SDPLR.Optimizer, + with_bridge_type = T, + with_cache_type = T, + ) + LRO.Bridges.add_all_bridges(model, T) + MOI.set(model, MOI.Silent(), true) + MOI.set(model, MOI.RawOptimizerAttribute("timelim"), 10) + config = MOI.Test.Config( + rtol = 1e-1, + atol = 1e-1, + optimal_status = MOI.LOCALLY_SOLVED, + ) + MOI.Test.runtests(model, config, test_module = LRO.Test) + return +end + function test_RawOptimizerAttribute_UnsupportedAttribute() model = SDPLR.Optimizer() attr = MOI.RawOptimizerAttribute("FooBarBaz") @@ -160,7 +178,7 @@ function test_factor() return end -function _build_simple_model() +function _build_simple_sparse_model() model = SDPLR.Optimizer() X, _ = MOI.add_constrained_variables( model, @@ -174,6 +192,56 @@ function _build_simple_model() return model, X, c end +function _build_simple_rankone_model() + model = SDPLR.Optimizer() + A1 = LRO.positive_semidefinite_factorization([-1.0; 1.0;;]) + A2 = LRO.positive_semidefinite_factorization([1.0; 1.0;;]) + set = LRO.SetDotProducts{LRO.WITH_SET}( + MOI.PositiveSemidefiniteConeTriangle(2), + LRO.TriangleVectorization.([A1, A2]), + ) + @test set isa SDPLR._SetDotProd + @test MOI.supports_add_constrained_variables(model, typeof(set)) + dot_prods_X, _ = MOI.add_constrained_variables(model, set) + dot_prods = dot_prods_X[1:2] + X = dot_prods_X[3:end] + c = MOI.add_constraint( + model, + -1/4 * dot_prods[1] + 1/4 * dot_prods[2], + MOI.EqualTo(1.0), + ) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + obj = 1.0 * X[1] + 1.0 * X[3] + MOI.set(model, MOI.ObjectiveFunction{typeof(obj)}(), obj) + @test MOI.get(model, MOI.TerminationStatus()) == MOI.OPTIMIZE_NOT_CALLED + return model, X, c +end + +function _build_simple_lowrank_model() + model = SDPLR.Optimizer() + A = LRO.Factorization( + [ + -1.0 1.0 + 1.0 1.0 + ], + [-1 / 4, 1 / 4], + ) + set = LRO.SetDotProducts{LRO.WITH_SET}( + MOI.PositiveSemidefiniteConeTriangle(2), + [LRO.TriangleVectorization(A)], + ) + @test set isa SDPLR._SetDotProd + @test set isa SDPLR._SetDotProd{Matrix{Float64},Vector{Float64}} + @test MOI.supports_add_constrained_variables(model, typeof(set)) + X, _ = MOI.add_constrained_variables(model, set) + c = MOI.add_constraint(model, 1.0 * X[1], MOI.EqualTo(1.0)) + MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE) + obj = 1.0 * X[2] + 1.0 * X[4] + MOI.set(model, MOI.ObjectiveFunction{typeof(obj)}(), obj) + @test MOI.get(model, MOI.TerminationStatus()) == MOI.OPTIMIZE_NOT_CALLED + return model, X[2:4], c +end + function _test_simple_model(model, X, c) atol = rtol = 1e-2 @test MOI.get(model, MOI.TerminationStatus()) == MOI.LOCALLY_SOLVED @@ -189,15 +257,29 @@ function _test_simple_model(model, X, c) return end -function test_simple_MOI_wrapper() - model, X, c = _build_simple_model() +function test_simple_sparse_MOI_wrapper() + model, X, c = _build_simple_sparse_model() + MOI.optimize!(model) + _test_simple_model(model, X, c) + return +end + +function test_simple_lowrank_MOI_wrapper() + model, X, c = _build_simple_lowrank_model() + MOI.optimize!(model) + _test_simple_model(model, X, c) + return +end + +function test_simple_rankone_MOI_wrapper() + model, X, c = _build_simple_rankone_model() MOI.optimize!(model) _test_simple_model(model, X, c) return end function _test_limit(attr, val, term) - model, _, _ = _build_simple_model() + model, _, _ = _build_simple_sparse_model() MOI.set(model, MOI.RawOptimizerAttribute(attr), val) MOI.optimize!(model) @test MOI.get(model, MOI.TerminationStatus()) == term @@ -229,7 +311,7 @@ function totaltime() end function test_continuity_between_solve() - model, X, c = _build_simple_model() + model, X, c = _build_simple_sparse_model() MOI.set(model, MOI.RawOptimizerAttribute("majiter"), SDPLR.MAX_MAJITER - 2) @test MOI.get(model, MOI.RawOptimizerAttribute("majiter")) == SDPLR.MAX_MAJITER - 2 @@ -307,15 +389,16 @@ function test_bounds() return end -function test_solve_simple_with_sdplrlib() +function _test_solve_simple_with_sdplrlib(; + CAinfo_entptr, + CAent, + CArow, + CAcol, + CAinfo_type, +) blksz = Cptrdiff_t[2] blktype = Cchar['s'] b = Cdouble[1] - CAinfo_entptr = Csize_t[0, 2, 3] - CAent = Cdouble[1, 1, 0.5] - CArow = Csize_t[1, 2, 1] - CAcol = Csize_t[1, 2, 2] - CAinfo_type = Cchar['s', 's'] # The `925` seed is taken from SDPLR's `main.c` Random.seed!(925) ret, R, lambda, ranks, pieces = SDPLR.solve( @@ -337,6 +420,28 @@ function test_solve_simple_with_sdplrlib() return end +function test_solve_simple_sparse_with_sdplrlib() + _test_solve_simple_with_sdplrlib( + CAinfo_entptr = Csize_t[0, 2, 3], + CAent = Cdouble[1, 1, 0.5], + CArow = Csize_t[1, 2, 1], + CAcol = Csize_t[1, 2, 2], + CAinfo_type = Cchar['s', 's'], + ) + return +end + +function test_solve_simple_lowrank_with_sdplrlib() + _test_solve_simple_with_sdplrlib( + CAinfo_entptr = Csize_t[0, 2, 8], + CAent = Cdouble[1, 1, -0.25, 0.25, -1, 1, 1, 1], + CArow = Csize_t[1, 2, 1, 2, 1, 2, 1, 2], + CAcol = Csize_t[1, 2, 1, 2, 1, 1, 2, 2], + CAinfo_type = Cchar['s', 'l'], + ) + return +end + function test_solve_vibra_with_sdplr_executable() SDPLR.solve_sdpa_file("vibra1.dat-s") return @@ -362,6 +467,37 @@ function test_solve_vibra_with_sdplrlib() @test ranks == Csize_t[9, 9, 1] end +# Test of LowRankOpt's test `test_conic_PositiveSemidefinite_RankOne_polynomial` in low-level SDPLR version +function test_solve_conic_PositiveSemidefinite_RankOne_polynomial() + blksz = [2, 2] + blktype = Cchar['d', 's'] + b = [-3.0, 1.0] + CAent = [-1.0, 1.0, -1.0, 1.0, -1.0, 1.0, -1.0, -1.0, 1.0, -1.0, 1.0, 1.0] + CArow = Csize_t[1, 2, 1, 2, 1, 1, 2, 1, 2, 1, 1, 2] + CAcol = Csize_t[1, 2, 1, 2, 1, 1, 1, 1, 2, 1, 1, 1] + CAinfo_entptr = Csize_t[0, 2, 2, 4, 7, 9, 12] + CAinfo_type = Cchar['d', 's', 'd', 'l', 'd', 'l'] + # The `925` seed is taken from SDPLR's `main.c` + Random.seed!(925) + ret, R, lambda, ranks, pieces = SDPLR.solve( + blksz, + blktype, + b, + CAent, + CArow, + CAcol, + CAinfo_entptr, + CAinfo_type, + ) + @test iszero(ret) + U = reshape(R[3:end], 2, 2) + @test U * U' ≈ [1 -1; -1 1] rtol = 1e-3 + @test lambda ≈ [0, 1] atol = 1e-3 + @test pieces[1:5] == [7, 20, 1, 0, 0] + @test pieces[7:8] == [16, 1] + @test ranks == [1, 2] +end + function runtests() for name in names(@__MODULE__; all = true) if startswith("$(name)", "test_") diff --git a/test/simple_lowrank.jl b/test/simple_lowrank.jl new file mode 100644 index 0000000..3149e01 --- /dev/null +++ b/test/simple_lowrank.jl @@ -0,0 +1,8 @@ +blksz = Cptrdiff_t[2] +blktype = Cchar['s'] +b = Cdouble[1] +CAinfo_entptr = Csize_t[0, 2, 8] +CAent = Cdouble[1, 1, -0.25, 0.25, -1, 1, 1, 1] +CArow = Csize_t[1, 2, 1, 2, 1, 2, 1, 2] +CAcol = Csize_t[1, 2, 1, 2, 1, 1, 2, 2] +CAinfo_type = Cchar['s', 'l']