Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
70aae75
DAE based solvers
ParasPuneetSingh Jul 10, 2025
433e2b2
MOO tests and code updates
ParasPuneetSingh Jul 10, 2025
4e1a5f3
import changes
ParasPuneetSingh Jul 20, 2025
59c2254
MOO Docs updated blackboxoptim.md
ParasPuneetSingh Sep 7, 2024
8b347fc
Update evolutionary.md
ParasPuneetSingh Sep 20, 2024
8f46ec0
Update metaheuristics.md
ParasPuneetSingh Sep 20, 2024
d5f3ea5
Update docs/src/optimization_packages/blackboxoptim.md
Vaibhavdixit02 Sep 22, 2024
716a626
Update Project.toml
ParasPuneetSingh Sep 22, 2024
2aadfdd
Update Project.toml
ParasPuneetSingh Sep 22, 2024
01c2957
Update docs/src/optimization_packages/blackboxoptim.md
Vaibhavdixit02 Sep 22, 2024
2caa0c1
Update metaheuristics.md
ParasPuneetSingh Sep 23, 2024
968e81a
Update Project.toml
ParasPuneetSingh Sep 23, 2024
a5f2efe
Update blackboxoptim.md
ParasPuneetSingh Sep 23, 2024
ce74e9c
Update evolutionary.md
ParasPuneetSingh Sep 25, 2024
d7b7819
Update docs/src/optimization_packages/metaheuristics.md
Vaibhavdixit02 Sep 27, 2024
4254278
Update docs/src/optimization_packages/evolutionary.md
Vaibhavdixit02 Oct 26, 2024
37357a1
Update metaheuristics.md
ParasPuneetSingh Nov 9, 2024
beaa8a1
Update Project.toml
ParasPuneetSingh Nov 9, 2024
3ca4208
Update metaheuristics.md
ParasPuneetSingh Nov 9, 2024
abad46a
Update blackboxoptim.md
ParasPuneetSingh Nov 9, 2024
9df333e
Update metaheuristics.md
ParasPuneetSingh Nov 10, 2024
ce656a5
Update OptimizationBBO.jl
ParasPuneetSingh Jan 29, 2025
55a4fb9
Update runtests.jl
ParasPuneetSingh Jan 29, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ SymbolicAnalysis = "4297ee4d-0239-47d8-ba5d-195ecdf594fe"
Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7"
Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209"
Metaheuristics = "bcdb8e00-2c21-11e9-3065-2b553b22f898"
Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6"

[compat]
AmplNLWriter = "1"
Expand Down Expand Up @@ -93,3 +96,7 @@ SymbolicAnalysis = "0.3"
Symbolics = "6"
Tracker = ">= 0.2"
Zygote = ">= 0.5"
BlackBoxOptim = "0.6"
Metaheuristics = "3"
Evolutionary = "0.11"

18 changes: 18 additions & 0 deletions docs/src/optimization_packages/blackboxoptim.md
Original file line number Diff line number Diff line change
Expand Up @@ -67,3 +67,21 @@ prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0,
sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 100000,
maxtime = 1000.0)
```

## Multi-objective optimization
The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your objective function should return a vector of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example:

```@example MOO-BBO
using OptimizationBBO, Optimization, BlackBoxOptim
using SciMLBase: MultiObjectiveOptimizationFunction
u0 = [0.25, 0.25]
opt = OptimizationBBO.BBO_borg_moea()
function multi_obj_func(x, p)
f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function
f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function
return (f1, f2)
end
mof = MultiObjectiveOptimizationFunction(multi_obj_func)
prob = Optimization.OptimizationProblem(mof, u0; lb = [0.0, 0.0], ub = [2.0, 2.0])
sol = solve(prob, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true))
```
17 changes: 17 additions & 0 deletions docs/src/optimization_packages/evolutionary.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,20 @@ f = OptimizationFunction(rosenbrock)
prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, 1.0])
sol = solve(prob, Evolutionary.CMAES(μ = 40, λ = 100))
```

## Multi-objective optimization
The Rosenbrock and Ackley functions can be optimized using the `Evolutionary.NSGA2()` as follows:

```@example MOO-Evolutionary
using Optimization, OptimizationEvolutionary, Evolutionary
function func(x, p=nothing)::Vector{Float64}
f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function
f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function
return [f1, f2]
end
initial_guess = [1.0, 1.0]
obj_func = MultiObjectiveOptimizationFunction(func)
algorithm = OptimizationEvolutionary.NSGA2()
problem = OptimizationProblem(obj_func, initial_guess)
result = solve(problem, algorithm)
```
43 changes: 43 additions & 0 deletions docs/src/optimization_packages/metaheuristics.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,3 +70,46 @@ sol = solve(prob, ECA(), use_initial = true, maxiters = 100000, maxtime = 1000.0
### With Constraint Equations

While `Metaheuristics.jl` supports such constraints, `Optimization.jl` currently does not relay these constraints.


## Multi-objective optimization
The zdt1 functions can be optimized using the `Metaheuristics.jl` as follows:

```@example MOO-Metaheuristics
using Optimization, OptimizationEvolutionary,OptimizationMetaheuristics, Metaheuristics
function zdt1(x)
f1 = x[1]
g = 1 + 9 * mean(x[2:end])
h = 1 - sqrt(f1 / g)
f2 = g * h
# In this example, we have no constraints
gx = [0.0] # Inequality constraints (not used)
hx = [0.0] # Equality constraints (not used)
return [f1, f2], gx, hx
end
multi_obj_fun = MultiObjectiveOptimizationFunction((x, p) -> zdt1(x))

# Define the problem bounds
lower_bounds = [0.0, 0.0, 0.0]
upper_bounds = [1.0, 1.0, 1.0]

# Define the initial guess
initial_guess = [0.5, 0.5, 0.5]

# Create the optimization problem
prob = OptimizationProblem(multi_obj_fun, initial_guess; lb = lower_bounds, ub = upper_bounds)

nobjectives = 2
npartitions = 100

# reference points (Das and Dennis's method)
weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions)

# Choose the algorithm and solve the problem
sol1 = solve(prob, Metaheuristics.NSGA2(); maxiters = 100, use_initial = true)
sol2 = solve(prob, Metaheuristics.NSGA3(); maxiters = 100, use_initial = true)
sol3 = solve(prob, Metaheuristics.SPEA2(); maxiters = 100, use_initial = true)
sol4 = solve(prob, Metaheuristics.CCMO(NSGA2(N=100, p_m=0.001)))
sol5 = solve(prob, Metaheuristics.MOEAD_DE(weights, options=Options(debug=false, iterations = 250)); maxiters = 100, use_initial = true)
sol6 = solve(prob, Metaheuristics.SMS_EMOA(); maxiters = 100, use_initial = true)
```
16 changes: 12 additions & 4 deletions docs/src/optimization_packages/ode.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ p = []
f_manual = OptimizationFunction(f, SciMLBase.NoAD(); grad = g!)
prob_manual = OptimizationProblem(f_manual, x0)

opt = ODEGradientDescent(dt=0.01)
sol = solve(prob_manual, opt; maxiters=50_000)
opt = ODEGradientDescent()
sol = solve(prob_manual, opt; dt=0.01, maxiters=50_000)

@show sol.u
@show sol.objective
Expand All @@ -39,15 +39,23 @@ sol = solve(prob_manual, opt; maxiters=50_000)

All provided optimizers are **gradient-based local optimizers** that solve optimization problems by integrating gradient-based ODEs to convergence:

* `ODEGradientDescent(dt=...)` — performs basic gradient descent using the explicit Euler method. This is a simple and efficient method suitable for small-scale or well-conditioned problems.
* `ODEGradientDescent()` — performs basic gradient descent using the explicit Euler method. This is a simple and efficient method suitable for small-scale or well-conditioned problems.

* `RKChebyshevDescent()` — uses the ROCK2 solver, a stabilized explicit Runge-Kutta method suitable for stiff problems. It allows larger step sizes while maintaining stability.

* `RKAccelerated()` — leverages the Tsit5 method, a 5th-order Runge-Kutta solver that achieves faster convergence for smooth problems by improving integration accuracy.

* `HighOrderDescent()` — applies Vern7, a high-order (7th-order) explicit Runge-Kutta method for even more accurate integration. This can be beneficial for problems requiring high precision.

You can also define a custom optimizer using the generic `ODEOptimizer(solver; dt=nothing)` constructor by supplying any ODE solver supported by [OrdinaryDiffEq.jl](https://docs.sciml.ai/DiffEqDocs/stable/solvers/ode_solve/).
## DAE-based Optimizers

In addition to ODE-based optimizers, OptimizationODE.jl provides optimizers for differential-algebraic equation (DAE) constrained problems:

* `DAEMassMatrix()` — uses the Rodas5 solver (from OrdinaryDiffEq.jl) for DAE problems with a mass matrix formulation.

* `DAEIndexing()` — uses the IDA solver (from Sundials.jl) for DAE problems with index variable support.

You can also define a custom optimizer using the generic `ODEOptimizer(solver)` or `DAEOptimizer(solver)` constructor by supplying any ODE or DAE solver supported by [OrdinaryDiffEq.jl](https://docs.sciml.ai/DiffEqDocs/stable/solvers/ode_solve/) or [Sundials.jl](https://github.com/SciML/Sundials.jl).

## DAE-based Optimizers

Expand Down
74 changes: 61 additions & 13 deletions lib/OptimizationBBO/src/OptimizationBBO.jl
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,31 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO;
abstol::Union{Number, Nothing} = nothing,
reltol::Union{Number, Nothing} = nothing,
verbose::Bool = false,
num_dimensions::Union{Number, Nothing} = nothing,
fitness_scheme::Union{String, Nothing} = nothing,
kwargs...)
if !isnothing(reltol)
@warn "common reltol is currently not used by $(opt)"
end
mapped_args = (; kwargs...)
mapped_args = (; mapped_args..., Method = opt.method,
SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)])

# Determine number of objectives for multi-objective problems
if isa(prob.f, MultiObjectiveOptimizationFunction)
num_objectives = length(prob.f.cost_prototype)
mapped_args = (; kwargs...)
mapped_args = (; mapped_args..., Method = opt.method,
SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)],
NumDimensions = length(prob.lb),
NumObjectives = num_objectives)
# FitnessScheme should be in opt, not the function
if hasproperty(opt, :FitnessScheme)
mapped_args = (; mapped_args..., FitnessScheme = opt.FitnessScheme)
end
else
mapped_args = (; kwargs...)
mapped_args = (; mapped_args..., Method = opt.method,
SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)],
NumDimensions = length(prob.lb))
end

if !isnothing(callback)
mapped_args = (; mapped_args..., CallbackFunction = callback,
Expand All @@ -86,6 +104,16 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO;
mapped_args = (; mapped_args..., TraceMode = :silent)
end

if isa(prob.f, MultiObjectiveOptimizationFunction)
if isnothing(num_dimensions) && isnothing(fitness_scheme)
mapped_args = (; mapped_args..., NumDimensions = 2, FitnessScheme = BlackBoxOptim.ParetoFitnessScheme{2}(is_minimizing=true))
elseif isnothing(num_dimensions)
mapped_args = (; mapped_args..., NumDimensions = 2, FitnessScheme = fitness_scheme)
elseif isnothing(fitness_scheme)
mapped_args = (; mapped_args..., NumDimensions = num_dimensions, FitnessScheme = BlackBoxOptim.ParetoFitnessScheme{2}(is_minimizing=true))
end
end

return mapped_args
end

Expand Down Expand Up @@ -116,8 +144,7 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{
LC,
UC,
S,
O <:
BBO,
O <: BBO,
D,
P,
C
Expand Down Expand Up @@ -151,16 +178,37 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{
maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters)
maxtime = Optimization._check_and_convert_maxtime(cache.solver_args.maxtime)

_loss = function (θ)
cache.f(θ, cache.p)

# Multi-objective: use out-of-place or in-place as appropriate
if isa(cache.f, MultiObjectiveOptimizationFunction)
if is_inplace(cache.f)
_loss = θ -> (cost = similar(cache.f.cost_prototype); cache.f.f(cost, θ, cache.p); cost)
else
_loss = θ -> cache.f.f(θ, cache.p)
end
else
_loss = θ -> cache.f(θ, cache.p)
end

opt_args = __map_optimizer_args(cache, cache.opt;
callback = cache.callback === Optimization.DEFAULT_CALLBACK ?
nothing : _cb,
cache.solver_args...,
maxiters = maxiters,
maxtime = maxtime)
if isa(cache.f, MultiObjectiveOptimizationFunction)
opt_args = __map_optimizer_args(cache, cache.opt;
callback = cache.callback === Optimization.DEFAULT_CALLBACK &&
cache.data === Optimization.DEFAULT_DATA ?
nothing : _cb,
cache.solver_args...,
maxiters = maxiters,
maxtime = maxtime,
num_dimensions = isnothing(cache.num_dimensions) ? nothing : cache.num_dimensions,
fitness_scheme = isnothing(cache.fitness_scheme) ? nothing : cache.fitness_scheme)
else
opt_args = __map_optimizer_args(cache, cache.opt;
callback = cache.callback === Optimization.DEFAULT_CALLBACK &&
cache.data === Optimization.DEFAULT_DATA ?
nothing : _cb,
cache.solver_args...,
maxiters = maxiters,
maxtime = maxtime)
end

opt_setup = BlackBoxOptim.bbsetup(_loss; opt_args...)

Expand Down
66 changes: 49 additions & 17 deletions lib/OptimizationBBO/test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using OptimizationBBO, Optimization, BlackBoxOptim
using Optimization.SciMLBase
using Optimization.SciMLBase: MultiObjectiveOptimizationFunction
using Test

Expand Down Expand Up @@ -34,6 +35,43 @@ using Test
push!(loss_history, fitness)
return false
end

# Define the initial guess and bounds ONCE for all tests
u0 = [0.25, 0.25]
lb = [0.0, 0.0]
ub = [2.0, 2.0]
opt = OptimizationBBO.BBO_borg_moea()

@testset "In-place Multi-Objective Optimization" begin
function inplace_multi_obj!(cost, x, p)
cost[1] = sum(x .^ 2)
cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10)
return nothing
end
cost_prototype = zeros(2)
mof_inplace = MultiObjectiveOptimizationFunction{true}(inplace_multi_obj!, SciMLBase.NoAD(); cost_prototype=cost_prototype)
prob_inplace = Optimization.OptimizationProblem(mof_inplace, u0; lb=lb, ub=ub)
sol_inplace = solve(prob_inplace, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true))
@test sol_inplace ≠ nothing
@test length(sol_inplace.objective) == 2
@test sol_inplace.objective[1] ≈ 6.9905986e-18 atol=1e-3
@test sol_inplace.objective[2] ≈ 1.7763568e-15 atol=1e-3
end

@testset "Custom coalesce for Multi-Objective" begin
function multi_obj_tuple(x, p)
f1 = sum(x .^ 2)
f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10)
return (f1, f2)
end
mof_coalesce = MultiObjectiveOptimizationFunction{false}(multi_obj_tuple, SciMLBase.NoAD(); cost_prototype=zeros(2))
prob_coalesce = Optimization.OptimizationProblem(mof_coalesce, u0; lb=lb, ub=ub)
sol_coalesce = solve(prob_coalesce, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true))
@test sol_coalesce ≠ nothing
@test sol_coalesce.objective[1] ≈ 6.9905986e-18 atol=1e-3
@test sol_coalesce.objective[2] ≈ 1.7763568e-15 atol=1e-3
end

sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), callback = cb)
# println(fitness_progress_history)
@test !isempty(fitness_progress_history)
Expand All @@ -55,13 +93,7 @@ using Test
maxtime = 5)
end

# Define the initial guess and bounds
u0 = [0.25, 0.25]
lb = [0.0, 0.0]
ub = [2.0, 2.0]

# Define the optimizer
opt = OptimizationBBO.BBO_borg_moea()
# ...existing code...

@testset "Multi-Objective Optimization Tests" begin

Expand All @@ -73,10 +105,10 @@ using Test
return (f1, f2)
end

mof_1 = MultiObjectiveOptimizationFunction(multi_obj_func_1)
mof_1 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_1, SciMLBase.NoAD(); cost_prototype=zeros(2))
prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb = lb, ub = ub)
sol_1 = solve(prob_1, opt, NumDimensions = 2,
FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true))
sol_1 = solve(prob_1, opt, num_dimensions = 2,
fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true))

@test sol_1 ≠ nothing
println("Solution for Sphere and Rastrigin: ", sol_1)
Expand All @@ -100,7 +132,7 @@ using Test
return false
end

mof_1 = MultiObjectiveOptimizationFunction(multi_obj_func_1)
mof_1 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_1, SciMLBase.NoAD(); cost_prototype=zeros(2))
prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb = lb, ub = ub)
sol_1 = solve(prob_1, opt, NumDimensions = 2,
FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true),
Expand All @@ -126,10 +158,10 @@ using Test
return (f1, f2)
end

mof_2 = MultiObjectiveOptimizationFunction(multi_obj_func_2)
mof_2 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_2, SciMLBase.NoAD(); cost_prototype=zeros(2))
prob_2 = Optimization.OptimizationProblem(mof_2, u0; lb = lb, ub = ub)
sol_2 = solve(prob_2, opt, NumDimensions = 2,
FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true))
sol_2 = solve(prob_2, opt, num_dimensions = 2,
fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true))

@test sol_2 ≠ nothing
println("Solution for Rosenbrock and Ackley: ", sol_2)
Expand All @@ -146,10 +178,10 @@ using Test
return (f1, f2)
end

mof_3 = MultiObjectiveOptimizationFunction(multi_obj_func_3)
mof_3 = SciMLBase.MultiObjectiveOptimizationFunction{false}(multi_obj_func_3, SciMLBase.NoAD(); cost_prototype=zeros(2))
prob_3 = Optimization.OptimizationProblem(mof_3, u0; lb = lb, ub = ub)
sol_3 = solve(prob_3, opt, NumDimensions = 2,
FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true))
sol_3 = solve(prob_3, opt, num_dimensions = 2,
fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true))

@test sol_3 ≠ nothing
println("Solution for ZDT1: ", sol_3)
Expand Down
Loading
Loading