Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Performance and data profiles #138

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 86 additions & 0 deletions benchmarks/profiles/data-profile.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
using LaTeXStrings
using PrettyTables, LaTeXStrings
using Random
using LinearAlgebra
using ProximalOperators
using Plots
using BenchmarkProfiles
using NLPModels,
NLPModelsModifiers,
RegularizedProblems,
RegularizedOptimization,
ShiftedProximalOperators,
SolverBenchmark
using Printf

function data_prof_nnmf(
solvers,
solver_names,
nb_prob::Int,
random_seed::Int;
measured::Symbol = :obj, # set to :grad to eval grad
)

m, n, k = 100, 50, 5
λ = 1.0e-1
h = NormL1(λ)
n_solvers = length(solvers)
objdecs = Vector{Float64}[]

current_seed = random_seed
nb_data_min = Int(10^16) # min number of data for each solver on every pb
for i=1:nb_prob
Random.seed!(current_seed)
model, nls_model, A, selected = nnmf_model(m, n, k)
f = LSR1Model(model)
@info "pb $i"
for (solver, name) in zip(solvers, solver_names)
@info "pb: $i solver: $name"
args = name == "R2" ? () : (NormLinf(1.0),)
solver_out = solver(f, h, selected)
objdec = solver_out.solver_specific[:Fhist] + solver_out.solver_specific[:Hhist]
measured == :grad && unique!(objdec)
nb_data_min = min(nb_data_min, length(objdec))
push!(objdecs, objdec)
reset!(f)
end
end

data = zeros(nb_data_min, nb_prob, n_solvers)
idx_objdec = 1
for i=1:nb_prob
for idx_solv in 1:n_solvers
objdec = objdecs[idx_objdec]
data[:, i, idx_solv] .= objdec[1:nb_data_min]
idx_objdec += 1
end
end

data_profile(PlotsBackend(), data, ones(nb_prob), solver_names, legend=:topleft, τ= 1.0e-2)
end

ν = 1.0
ϵ = 1.0e-4
ϵi = 1.0e-4
ϵri = 1.0e-4
maxIter = 200
maxIter_inner = 100
function TR_R2(f, h, selected; ϵ = ϵ, ϵi = ϵi, ϵri = ϵri, maxIter = maxIter, maxIter_inner = maxIter_inner)
opt = ROSolverOptions(ν = ν, ϵa = ϵ, ϵr = ϵ, verbose = 0, maxIter = maxIter)
sub_opt = ROSolverOptions(ϵa = ϵi, ϵr = ϵri, maxIter = maxIter_inner)
TR(f, h, NormLinf(1.0), opt, x0 = f.meta.x0, subsolver_options = sub_opt, selected = selected)
end

function TR_TRDH(f, h, selected; ϵ = ϵ, ϵi = ϵi, ϵri = ϵri, maxIter = maxIter, maxIter_inner = maxIter_inner)
opt = ROSolverOptions(ν = ν, ϵa = ϵ, ϵr = ϵ, verbose = 0, maxIter = maxIter)
sub_opt = ROSolverOptions(ϵa = ϵi, ϵr = ϵri, maxIter = maxIter_inner)
TR(f, h, NormLinf(1.0), opt, x0 = f.meta.x0, subsolver_options = sub_opt, selected = selected, subsolver = TRDH)
end

data_prof_nnmf(
[TR_R2, TR_TRDH],
["TR-R2", "TR-TRDH"],
5,
1234;
measured = :grad, # set to :grad to eval grad
)
125 changes: 125 additions & 0 deletions benchmarks/profiles/perf-profile.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
using LaTeXStrings
using PrettyTables, LaTeXStrings
using Random
using LinearAlgebra
using ProximalOperators
using Plots
using BenchmarkProfiles
using NLPModels,
NLPModelsModifiers,
RegularizedProblems,
RegularizedOptimization,
ShiftedProximalOperators,
SolverBenchmark
using Printf

function benchmark_prof(
pb::Symbol, #:nnmf, :bpdn
solvers,
solver_names,
nb_prob::Int,
random_seed::Int;
measured::Symbol = :obj, # set to :grad to eval grad
)

if pb == :nnmf
m, n, k = 100, 50, 5
λ = 1.0e-1
elseif pb == :bpdn
compound = 1
else
error("Problem not supported")
end
n_solvers = length(solvers)
data = zeros(nb_prob, n_solvers)

current_seed = random_seed
nb_data_min = Int(10^16) # min number of data for each solver on every pb
for i=1:nb_prob
current_seed = random_seed + i - 1
Random.seed!(current_seed)
if pb == :nnmf
model, nls_model, A, selected = nnmf_model(m, n, k)
h = NormL0(λ)
elseif pb == :bpdn
model, nls_model, sol = bpdn_model(compound, bounds = false)
selected = 1:length(sol)
λ = norm(grad(model, zeros(model.meta.nvar)), Inf) / 10
reset!(model)
h = NormL0(λ)
end
f = LSR1Model(model)
@info "pb $i"
for (j, solver, name) in zip(1:n_solvers, solvers, solver_names)
solver_out = solver(f, h, selected)
@info "pb: $i solver: $name status = $(solver_out.status) obj = $(solver_out.objective)"
if solver_out.status == :first_order
data[i, j] = neval_grad(f)
else
data[i, j] = +Inf
end
reset!(f)
end
end

performance_profile(PlotsBackend(), data, solver_names, legend=:bottomright, title = String(measured))
end

ν = 1.0
ϵ = 1.0e-5
ϵi = 1.0e-3
ϵri = 1.0e-6
maxIter = 2000
maxIter_inner = 100
function TR_R2(f, h, selected; ϵ = ϵ, ϵi = ϵi, ϵri = ϵri, maxIter = maxIter, maxIter_inner = maxIter_inner)
opt = ROSolverOptions(ν = ν, ϵa = ϵ, ϵr = ϵ, verbose = 0, maxIter = maxIter)
sub_opt = ROSolverOptions(ϵa = ϵi, ϵr = ϵri, maxIter = maxIter_inner)
TR(f, h, NormLinf(1.0), opt, x0 = f.meta.x0, subsolver_options = sub_opt, selected = selected)
end

function TR_TRDH_PSB(f, h, selected; ϵ = ϵ, ϵi = ϵi, ϵri = ϵri, maxIter = maxIter, maxIter_inner = maxIter_inner)
opt = ROSolverOptions(ν = ν, ϵa = ϵ, ϵr = ϵ, verbose = 0, maxIter = maxIter)
sub_opt = ROSolverOptions(ϵa = ϵi, ϵr = ϵri, maxIter = maxIter_inner, psb = true, reduce_TR = false)
TR(f, h, NormLinf(1.0), opt, x0 = f.meta.x0, subsolver_options = sub_opt, selected = selected, subsolver = TRDH)
end

function TR_TRDH_Andrei(f, h, selected; ϵ = ϵ, ϵi = ϵi, ϵri = ϵri, maxIter = maxIter, maxIter_inner = maxIter_inner)
opt = ROSolverOptions(ν = ν, ϵa = ϵ, ϵr = ϵ, verbose = 0, maxIter = maxIter)
sub_opt = ROSolverOptions(ϵa = ϵi, ϵr = ϵri, maxIter = maxIter_inner, spectral = false, psb = false, reduce_TR = false)
TR(f, h, NormLinf(1.0), opt, x0 = f.meta.x0, subsolver_options = sub_opt, selected = selected, subsolver = TRDH)
end

function TR_TRDH_Spec(f, h, selected; ϵ = ϵ, ϵi = ϵi, ϵri = ϵri, maxIter = maxIter, maxIter_inner = maxIter_inner)
opt = ROSolverOptions(ν = ν, ϵa = ϵ, ϵr = ϵ, verbose = 0, maxIter = maxIter)
sub_opt = ROSolverOptions(ϵa = ϵi, ϵr = ϵri, maxIter = maxIter_inner, spectral = true, reduce_TR = false)
TR(f, h, NormLinf(1.0), opt, x0 = f.meta.x0, subsolver_options = sub_opt, selected = selected, subsolver = TRDH)
end

function TRDH_Spec(f, h, selected; ϵ = ϵ, ϵi = ϵi, ϵri = ϵri, maxIter = maxIter, maxIter_inner = maxIter_inner)
opt = ROSolverOptions(ν = ν, ϵa = ϵ, ϵr = ϵ, maxIter = maxIter, spectral = true, reduce_TR = false)
TRDH(f, h, NormLinf(1.0), opt, x0 = f.meta.x0, selected = selected)
end

function R2_None(f, h, selected; ϵ = ϵ, ϵi = ϵi, ϵri = ϵri, maxIter = maxIter, maxIter_inner = maxIter_inner)
opt = ROSolverOptions(ν = ν, ϵa = ϵ, ϵr = ϵ, maxIter = maxIter, spectral = true, reduce_TR = false)
R2(f, h, opt; x0 = f.meta.x0, selected = selected)
end


# benchmark_prof(
# :nnmf,
# [TRDH_Spec, TR_R2, TR_TRDH_PSB, TR_TRDH_Andrei, TR_TRDH_Spec],
# ["TRDH_Spec", "TR-R2", "TR-TRDH-PSB", "TR_TRDH_Andrei", "TR_TRDH_Spec"],
# 50,
# 1234;
# measured = :grad, # set to :grad to eval grad
# )

benchmark_prof(
:bpdn,
[R2_None, TRDH_Spec, TR_R2, TR_TRDH_PSB],
["R2", "TRDH-Spec", "TR-R2", "TR-TRDH-PSB"],
50,
1234;
measured = :grad, # set to :grad to eval grad
)
Loading