diff --git a/Project.toml b/Project.toml index d4b30baf..f0f7c4d4 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "DiffOpt" uuid = "930fe3bc-9c6b-11ea-2d94-6184641e85e7" authors = ["Akshay Sharma", "Mathieu Besançon", "Joaquim Dias Garcia", "Benoît Legat", "Oscar Dowson", "Andrew Rosemberg"] -version = "0.5.1" +version = "0.5.2" [deps] BlockDiagonals = "0a1fb500-61f7-11e9-3c65-f5ef3456f9f0" diff --git a/docs/src/usage.md b/docs/src/usage.md index ef6445c7..a851f835 100644 --- a/docs/src/usage.md +++ b/docs/src/usage.md @@ -117,4 +117,70 @@ DiffOpt.reverse_differentiate!(model) @show MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)) == MOI.Parameter(direction_x * 3 / pc_val) @show abs(MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(pc)).value - -direction_x * 3 * p_val / pc_val^2) < 1e-5 -``` \ No newline at end of file +``` + +## Calculating objective sensitivity with respect to parameters (currently only supported for Nonlinear Programs) + +Consider a differentiable model with parameters `p` and `pc` as in the previous example: + +```julia +using JuMP, DiffOpt, HiGHS + +model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) +set_silent(model) + +p_val = 4.0 +pc_val = 2.0 +@variable(model, x) +@variable(model, p in Parameter(p_val)) +@variable(model, pc in Parameter(pc_val)) +@constraint(model, cons, pc * x >= 3 * p) +@objective(model, Min, x^4) +optimize!(model) + +direction_p = 3.0 +MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(direction_p)) +DiffOpt.forward_differentiate!(model) +``` + +Using Lagrangian duality we can easily calculate the objective sensitivity with respect to parameters that appear as constants of the constraints (e.g, `cons` in this case for parameter `p`) - i.e. The objective sensitivity w.r.t. a constant parameter change is given by the optimal dual multiplier, under strong duality. + +On the other hand, if the parameter appears as a coefficient of the constraints, one can calculate the objective sensitivity with respect to the parameter using the sensitivities of the variables with respect to the parameter, \( \frac{\partial x}{\partial p} \), and the gradient of the objective with respect to the variables \( \frac{\partial f}{\partial x} \): + +```math +\frac{\partial f}{\partial p} = \frac{\partial f}{\partial x} \frac{\partial x}{\partial p} +``` + - A consequence of the chain-rule. + + +In order to calculate the objective perturbation with respect to the parameter perturbation vector, we can use the following code: + +```julia +# Always a good practice to clear previously set sensitivities +DiffOpt.empty_input_sensitivities!(model) + +MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(3.0)) +MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p_c), Parameter(3.0)) +DiffOpt.forward_differentiate!(model) + +MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) +``` + +In reverse mode, we can calculate the parameter perturbation with respect to the objective perturbation: + +```julia +# Always a good practice to clear previously set sensitivities +DiffOpt.empty_input_sensitivities!(model) + +MOI.set( + model, + DiffOpt.ReverseObjectiveSensitivity(), + 0.1, +) + +DiffOpt.reverse_differentiate!(model) + +MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)) +``` + +It is important to note that the (reverse) parameter perturbation given an objective perturbation is somewhat equivalent to the perturbation with respect to solution (since one can be calculated from the other). Therefore, one cannot set both the objective sensitivity (`DiffOpt.ReverseObjectiveSensitivity`) and the solution sensitivity (e.g. `DiffOpt.ReverseVariablePrimal`) at the same time - the code will throw an error if you try to do so. diff --git a/src/ConicProgram/ConicProgram.jl b/src/ConicProgram/ConicProgram.jl index ee58c312..abf21231 100644 --- a/src/ConicProgram/ConicProgram.jl +++ b/src/ConicProgram/ConicProgram.jl @@ -450,4 +450,16 @@ function MOI.get( return MOI.get(model.model, attr, ci) end +function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) + return error( + "ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend", + ) +end + +function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) + return error( + "ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend", + ) +end + end diff --git a/src/NonLinearProgram/NonLinearProgram.jl b/src/NonLinearProgram/NonLinearProgram.jl index 6ef26847..f7a04b1e 100644 --- a/src/NonLinearProgram/NonLinearProgram.jl +++ b/src/NonLinearProgram/NonLinearProgram.jl @@ -27,10 +27,11 @@ end Base.@kwdef struct ForwCache primal_Δs::Dict{MOI.VariableIndex,Float64} # Sensitivity for primal variables (indexed by VariableIndex) dual_Δs::Vector{Float64} # Sensitivity for constraints and bounds (indexed by ConstraintIndex) + objective_sensitivity_p::Float64 # Objective Sensitivity wrt parameters end Base.@kwdef struct ReverseCache - Δp::Vector{Float64} # Sensitivity for parameters + Δp::Dict{MOI.ConstraintIndex,Float64} # Sensitivity for parameters end # Define the form of the NLP @@ -525,15 +526,19 @@ function DiffOpt.forward_differentiate!(model::Model; tol = 1e-6) end # Compute Jacobian - Δs = _compute_sensitivity(model; tol = tol) + Δs, df_dp = _compute_sensitivity(model; tol = tol) # Extract primal and dual sensitivities primal_Δs = Δs[1:length(model.cache.primal_vars), :] * Δp # Exclude slacks dual_Δs = Δs[cache.index_duals, :] * Δp # Includes constraints and bounds + # obj sensitivity wrt parameters + objective_sensitivity_p = df_dp * Δp + model.forw_grad_cache = ForwCache(; primal_Δs = Dict(model.cache.primal_vars .=> primal_Δs), dual_Δs = dual_Δs, + objective_sensitivity_p = objective_sensitivity_p, ) end return nothing @@ -545,50 +550,53 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6) form = model.model # Compute Jacobian - Δs = _compute_sensitivity(model; tol = tol) - num_primal = length(cache.primal_vars) - # Fetch primal sensitivities - Δx = zeros(num_primal) - for (i, var_idx) in enumerate(cache.primal_vars) - if haskey(model.input_cache.dx, var_idx) - Δx[i] = model.input_cache.dx[var_idx] + Δs, df_dp = _compute_sensitivity(model; tol = tol) + Δp = if !iszero(model.input_cache.dobj) + model.input_cache.dobj * df_dp + else + num_primal = length(cache.primal_vars) + # Fetch primal sensitivities + Δx = zeros(num_primal) + for (i, var_idx) in enumerate(cache.primal_vars) + if haskey(model.input_cache.dx, var_idx) + Δx[i] = model.input_cache.dx[var_idx] + end end - end - # Fetch dual sensitivities - num_constraints = length(cache.cons) - num_up = length(cache.has_up) - num_low = length(cache.has_low) - Δdual = zeros(num_constraints + num_up + num_low) - for (i, ci) in enumerate(cache.cons) - idx = form.nlp_index_2_constraint[ci] - if haskey(model.input_cache.dy, idx) - Δdual[i] = model.input_cache.dy[idx] + # Fetch dual sensitivities + num_constraints = length(cache.cons) + num_up = length(cache.has_up) + num_low = length(cache.has_low) + Δdual = zeros(num_constraints + num_up + num_low) + for (i, ci) in enumerate(cache.cons) + idx = form.nlp_index_2_constraint[ci] + if haskey(model.input_cache.dy, idx) + Δdual[i] = model.input_cache.dy[idx] + end end - end - for (i, var_idx) in enumerate(cache.primal_vars[cache.has_low]) - idx = form.constraint_lower_bounds[var_idx.value].value - if haskey(model.input_cache.dy, idx) - Δdual[num_constraints+i] = model.input_cache.dy[idx] + for (i, var_idx) in enumerate(cache.primal_vars[cache.has_low]) + idx = form.constraint_lower_bounds[var_idx.value].value + if haskey(model.input_cache.dy, idx) + Δdual[num_constraints+i] = model.input_cache.dy[idx] + end end - end - for (i, var_idx) in enumerate(cache.primal_vars[cache.has_up]) - idx = form.constraint_upper_bounds[var_idx.value].value - if haskey(model.input_cache.dy, idx) - Δdual[num_constraints+num_low+i] = model.input_cache.dy[idx] + for (i, var_idx) in enumerate(cache.primal_vars[cache.has_up]) + idx = form.constraint_upper_bounds[var_idx.value].value + if haskey(model.input_cache.dy, idx) + Δdual[num_constraints+num_low+i] = model.input_cache.dy[idx] + end end + # Extract Parameter sensitivities + Δw = zeros(size(Δs, 1)) + Δw[1:num_primal] = Δx + Δw[cache.index_duals] = Δdual + Δp = Δs' * Δw end - # Extract Parameter sensitivities - Δw = zeros(size(Δs, 1)) - Δw[1:num_primal] = Δx - Δw[cache.index_duals] = Δdual - Δp = Δs' * Δw - - # Order by ConstraintIndex - varorder = - sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value) - Δp = [Δp[form.var2param[var_idx].value] for var_idx in varorder] - - model.back_grad_cache = ReverseCache(; Δp = Δp) + + Δp_dict = Dict{MOI.ConstraintIndex,Float64}( + form.var2ci[var_idx] => Δp[form.var2param[var_idx].value] + for var_idx in keys(form.var2ci) + ) + model.back_grad_cache = ReverseCache(; Δp = Δp_dict) end return nothing end @@ -620,10 +628,16 @@ function MOI.get( ::DiffOpt.ReverseConstraintSet, ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}}, ) where {T} - form = model.model - var_idx = MOI.VariableIndex(ci.value) - p_idx = form.var2param[var_idx].value - return MOI.Parameter{T}(model.back_grad_cache.Δp[p_idx]) + return MOI.Parameter{T}(model.back_grad_cache.Δp[ci]) +end + +function MOI.get(model::Model, ::DiffOpt.ForwardObjectiveSensitivity) + return model.forw_grad_cache.objective_sensitivity_p +end + +function MOI.set(model::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) + model.input_cache.dobj = val + return end end # module NonLinearProgram diff --git a/src/NonLinearProgram/nlp_utilities.jl b/src/NonLinearProgram/nlp_utilities.jl index 01d59978..edcbe6e8 100644 --- a/src/NonLinearProgram/nlp_utilities.jl +++ b/src/NonLinearProgram/nlp_utilities.jl @@ -27,6 +27,13 @@ function _fill_off_diagonal(H::SparseMatrixCSC) return ret end +function _compute_gradient(model::Model) + evaluator = model.cache.evaluator + grad = zeros(length(model.x)) + MOI.eval_objective_gradient(evaluator, grad, model.x) + return grad +end + """ _compute_optimal_hessian(evaluator::MOI.Nonlinear.Evaluator, rows::Vector{JuMP.ConstraintRef}, x::Vector{JuMP.VariableRef}) @@ -104,7 +111,7 @@ function _create_evaluator(form::Form) backend, MOI.VariableIndex.(1:form.num_variables), ) - MOI.initialize(evaluator, [:Hess, :Jac]) + MOI.initialize(evaluator, [:Hess, :Jac, :Grad]) return evaluator end @@ -480,6 +487,11 @@ function _compute_sensitivity(model::Model; tol = 1e-6) # Dual bounds lower ∂s[(num_w+num_cons+1):(num_w+num_cons+num_lower), :] *= _sense_multiplier # Dual bounds upper - ∂s[(num_w+num_cons+num_lower+1):end, :] *= -_sense_multiplier - return ∂s + ∂s[((num_w+num_cons+num_lower+1):end), :] *= -_sense_multiplier + + # dual wrt parameter + primal_idx = [i.value for i in model.cache.primal_vars] + df_dx = _compute_gradient(model)[primal_idx] + df_dp = df_dx'∂s[1:num_vars, :] + return ∂s, df_dp end diff --git a/src/QuadraticProgram/QuadraticProgram.jl b/src/QuadraticProgram/QuadraticProgram.jl index 1f536d01..1ed58887 100644 --- a/src/QuadraticProgram/QuadraticProgram.jl +++ b/src/QuadraticProgram/QuadraticProgram.jl @@ -501,4 +501,16 @@ function MOI.set(model::Model, ::LinearAlgebraSolver, linear_solver) return model.linear_solver = linear_solver end +function MOI.get(::Model, ::DiffOpt.ForwardObjectiveSensitivity) + return error( + "ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + ) +end + +function MOI.set(::Model, ::DiffOpt.ReverseObjectiveSensitivity, val) + return error( + "ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + ) +end + end diff --git a/src/diff_opt.jl b/src/diff_opt.jl index 3bb4de79..83f9063d 100644 --- a/src/diff_opt.jl +++ b/src/diff_opt.jl @@ -15,6 +15,7 @@ Base.@kwdef mutable struct InputCache dx::Dict{MOI.VariableIndex,Float64} = Dict{MOI.VariableIndex,Float64}()# dz for QP dy::Dict{MOI.ConstraintIndex,Float64} = Dict{MOI.ConstraintIndex,Float64}() # Dual sensitivity currently only works for NonLinearProgram + dobj::Float64 = 0.0 # Objective input sensitivity for reverse differentiation # ds # dy #= [d\lambda, d\nu] for QP # FIXME Would it be possible to have a DoubleDict where the value depends @@ -35,6 +36,7 @@ end function Base.empty!(cache::InputCache) empty!(cache.dx) empty!(cache.dy) + cache.dobj = 0.0 empty!(cache.parameter_constraints) empty!(cache.scalar_constraints) empty!(cache.vector_constraints) @@ -191,6 +193,20 @@ MOI.set(model, DiffOpt.ReverseConstraintDual(), ci, value) """ struct ReverseConstraintDual <: MOI.AbstractConstraintAttribute end +""" + ReverseObjectiveSensitivity <: MOI.AbstractModelAttribute + +A `MOI.AbstractModelAttribute` to set input data for reverse differentiation. + +For instance, to set the sensitivity of the parameter perturbation with respect to the +objective function perturbation, do the following: + +```julia +MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), value) +``` +""" +struct ReverseObjectiveSensitivity <: MOI.AbstractModelAttribute end + """ ForwardConstraintDual <: MOI.AbstractConstraintAttribute @@ -206,6 +222,21 @@ struct ForwardConstraintDual <: MOI.AbstractConstraintAttribute end MOI.is_set_by_optimize(::ForwardConstraintDual) = true +""" + ForwardObjectiveSensitivity <: MOI.AbstractModelAttribute + +A `MOI.AbstractModelAttribute` to get output objective sensitivity data from forward differentiation. + +For instance, to get the sensitivity of the objective function with respect to the parameter perturbation, do the following: + +```julia +MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) +``` +""" +struct ForwardObjectiveSensitivity <: MOI.AbstractModelAttribute end + +MOI.is_set_by_optimize(::ForwardObjectiveSensitivity) = true + """ ReverseObjectiveFunction <: MOI.AbstractModelAttribute diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 1b418273..06b0dcbc 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -553,6 +553,12 @@ function reverse_differentiate!(model::Optimizer) "Trying to compute the reverse differentiation on a model with termination status $(st)", ) end + if !iszero(model.input_cache.dobj) && + (!isempty(model.input_cache.dx) || !isempty(model.input_cache.dy)) + error( + "Cannot compute the reverse differentiation with both solution sensitivities and objective sensitivities.", + ) + end diff = _diff(model) MOI.set( diff, @@ -565,6 +571,9 @@ function reverse_differentiate!(model::Optimizer) for (vi, value) in model.input_cache.dy MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value) end + if !iszero(model.input_cache.dobj) + MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) + end return reverse_differentiate!(diff) end @@ -818,6 +827,10 @@ function MOI.get( ) end +function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity) + return MOI.get(_checked_diff(model, attr, :forward_differentiate!), attr) +end + function MOI.supports( ::Optimizer, ::ReverseVariablePrimal, @@ -870,6 +883,11 @@ function MOI.set( return end +function MOI.set(model::Optimizer, ::ReverseObjectiveSensitivity, val) + model.input_cache.dobj = val + return +end + function MOI.get( model::Optimizer, ::ReverseConstraintDual, diff --git a/test/Project.toml b/test/Project.toml index b7775fc1..2893cbd9 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -24,3 +24,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" HiGHS = "1" Ipopt = "1.0.2" SCS = "1" +MLDatasets = "0.7.18" diff --git a/test/conic_program.jl b/test/conic_program.jl index 93f15261..9c26289b 100644 --- a/test/conic_program.jl +++ b/test/conic_program.jl @@ -841,6 +841,39 @@ function test_jump_psd_cone_with_parameter_pv_v_pv() @test dx ≈ 0.0 atol = 1e-4 rtol = 1e-4 end +function test_ObjectiveSensitivity() + model = DiffOpt.conic_diff_model(SCS.Optimizer) + @variable(model, x) + @variable(model, p in MOI.Parameter(1.0)) + @constraint( + model, + con, + [p * x, (2 * x - 3), p * 3 * x] in + MOI.PositiveSemidefiniteConeTriangle(2) + ) + @objective(model, Min, x) + optimize!(model) + direction_p = 2.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + + DiffOpt.forward_differentiate!(model) + + # TODO: Change when implemented + @test_throws ErrorException( + "ForwardObjectiveSensitivity is not implemented for the Conic Optimization backend", + ) MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) + + # Clean up + DiffOpt.empty_input_sensitivities!(model) + + # TODO: Change when implemented + MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), 0.5) + + @test_throws ErrorException( + "ReverseObjectiveSensitivity is not implemented for the Conic Optimization backend", + ) DiffOpt.reverse_differentiate!(model) +end + end # module TestConicProgram.runtests() diff --git a/test/nlp_program.jl b/test/nlp_program.jl index 3d9eb60a..c9dbbc73 100644 --- a/test/nlp_program.jl +++ b/test/nlp_program.jl @@ -157,6 +157,11 @@ function test_analytical_simple(; P = 2) # Number of parameters # Compute derivatives DiffOpt.forward_differentiate!(m) + # test Objective Sensitivity wrt parameters + df_dp = MOI.get(m, DiffOpt.ForwardObjectiveSensitivity()) + @test isapprox(df_dp, dot(dual.(con), Δp); atol = 1e-4) + @test all(isapprox.(dual.(ParameterRef.(p)), dual.(con); atol = 1e-8)) + # Test sensitivities @test_throws ErrorException MOI.get( m.moi_backend.optimizer.model.diff.model, @@ -627,6 +632,146 @@ function test_compute_derivatives_Finite_Diff(; end end +################################################ +#= +# Test Objective Sensitivity wrt Parameters +=# +################################################ + +function test_ObjectiveSensitivity_model1() + # Model 1 + model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) + set_silent(model) + + # Parameters + @variable(model, p ∈ MOI.Parameter(1.5)) + + # Variables + @variable(model, x) + + # Constraints + @constraint(model, x * sin(p) == 1) + @objective(model, Min, sum(x)) + + optimize!(model) + @assert is_solved_and_feasible(model) + + # Set pertubations + Δp = 0.1 + DiffOpt.set_forward_parameter(model, p, Δp) + + # Compute derivatives + DiffOpt.forward_differentiate!(model) + + # Test Objective Sensitivity wrt parameters + df_dp = MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) + @test isapprox(df_dp, -0.0071092; atol = 1e-4) + + # Clean up + DiffOpt.empty_input_sensitivities!(model) + + # Set Too Many Sensitivities + Δf = 0.5 + MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), Δf) + + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, 1.0) + + # Compute derivatives + @test_throws ErrorException DiffOpt.reverse_differentiate!(model) + + DiffOpt.empty_input_sensitivities!(model) + + # Set Reverse Objective Sensitivity + Δf = 0.5 + MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), Δf) + + # Compute derivatives + DiffOpt.reverse_differentiate!(model) + + # Test Objective Sensitivity wrt parameters + dp = MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)).value + + @test isapprox(dp, -0.0355464; atol = 1e-4) +end + +function test_ObjectiveSensitivity_model2() + # Model 2 + model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) + set_silent(model) + + # Parameters + @variable(model, p ∈ MOI.Parameter(1.5)) + + # Variables + @variable(model, x) + + # Constraints + @constraint(model, x * sin(p) >= 1) + @constraint(model, x + p >= 3) + @objective(model, Min, sum(x .^ 2)) + + optimize!(model) + @assert is_solved_and_feasible(model) + + # Set pertubations + Δp = 0.1 + DiffOpt.set_forward_parameter(model, p, Δp) + + # Compute derivatives + DiffOpt.forward_differentiate!(model) + + # Test Objective Sensitivity wrt parameters + df_dp = MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) + @test isapprox(df_dp, -0.3; atol = 1e-4) + + # Clean up + DiffOpt.empty_input_sensitivities!(model) + + # Set Reverse Objective Sensitivity + Δf = 0.5 + MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), Δf) + + # Compute derivatives + DiffOpt.reverse_differentiate!(model) + + # Test Objective Sensitivity wrt parameters + dp = MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)).value + + @test isapprox(dp, -1.5; atol = 1e-4) +end + +function test_ObjectiveSensitivity_subset_parameters() + # Model with 10 parameters, differentiate only w.r.t. 3rd and 7th + model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) + set_silent(model) + + # Parameters and proxies + @variable(model, p[1:10] ∈ MOI.Parameter.(1.5)) + + # Variables + @variable(model, x[1:10]) + + # Constraints (decouple by index; gives us per-parameter duals) + @constraint(model, c[i=1:10], x[i] * sin(p[i]) == 1) + @objective(model, Min, sum(x)) + + optimize!(model) + @assert is_solved_and_feasible(model) + + # Set perturbations only for indices 3 and 7 + Δp3 = 0.1 + Δp7 = -0.2 + DiffOpt.set_forward_parameter(model, p[3], Δp3) + DiffOpt.set_forward_parameter(model, p[7], Δp7) + + # Compute forward derivatives + DiffOpt.forward_differentiate!(model) + + # Objective sensitivity should equal sum over selected params only + df_dp = MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) + @test isapprox(df_dp, 0.007109293; atol = 1e-4) +end + ################################################ #= # Test Sensitivity through Reverse Mode diff --git a/test/quadratic_program.jl b/test/quadratic_program.jl index 31cfa914..205b3b1d 100644 --- a/test/quadratic_program.jl +++ b/test/quadratic_program.jl @@ -349,6 +349,34 @@ function test_differentiating_non_trivial_convex_qp_moi() return end +function test_ObjectiveSensitivity() + model = DiffOpt.quadratic_diff_model(HiGHS.Optimizer) + @variable(model, x) + @variable(model, p in MOI.Parameter(1.0)) + @constraint(model, x >= p) + @objective(model, Min, x) + optimize!(model) + direction_p = 2.0 + DiffOpt.set_forward_parameter(model, p, direction_p) + + DiffOpt.forward_differentiate!(model) + + # TODO: Change when implemented + @test_throws ErrorException( + "ForwardObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + ) MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) + + # Clean up + DiffOpt.empty_input_sensitivities!(model) + + # TODO: Change when implemented + MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), 0.5) + + @test_throws ErrorException( + "ReverseObjectiveSensitivity is not implemented for the Quadratic Optimization backend", + ) DiffOpt.reverse_differentiate!(model) +end + end # module TestQuadraticProgram.runtests()