Skip to content

Commit

Permalink
update cases function for test
Browse files Browse the repository at this point in the history
  • Loading branch information
metelkin committed Oct 24, 2024
1 parent c24c25f commit 1fa550c
Show file tree
Hide file tree
Showing 9 changed files with 45 additions and 30 deletions.
9 changes: 7 additions & 2 deletions src/cico_one_pass.jl
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,9 @@ function get_right_endpoint(
local_opt = Opt(local_alg, n_theta)
ftol_abs!(local_opt, scan_tol)
ftol_rel!(local_opt, scan_rtol)
#initial_step!(local_opt, fill(1., n_theta))
# XXX: testing
#is_auto = initial_step(local_opt, theta_init)
#initial_step!(local_opt, is_auto)

# flags to analyze fitting stop
out_of_bound::Bool = false
Expand All @@ -47,7 +49,7 @@ function get_right_endpoint(
# function constraints_func(x) # testing grad methods
# this part is necessary to understand the difference between
# "stop out of bounds" and "stop because of function call error"
# in NLopt >= 1.0.2 we need to throw ForcedStop() to stop optimization
# in NLopt >= 1.0.2 we need to0 throw ForcedStop() to stop optimization
loss_value = try
loss_func(x)
catch e
Expand Down Expand Up @@ -88,6 +90,9 @@ function get_right_endpoint(
# constrain optimizer
opt = Opt(:LN_AUGLAG, n_theta)
ftol_abs!(opt, scan_tol)
# XXX: testing
#is_auto_glob = initial_step(opt, theta_init)
#initial_step!(opt, is_auto_glob)

max_objective!(
opt,
Expand Down
4 changes: 2 additions & 2 deletions src/get_optimal.jl
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,8 @@ function get_optimal(

# initial step for optimization
theta_step_auto = initial_step(opt, theta_init_g) # for testing
theta_step = [(x == 0. ? 1. : copy(x)) for x in theta_init_g] # same as auto
initial_step!(opt, theta_step)
#theta_step = [(x == 0. ? 1. : copy(x)) for x in theta_init_g] # same as auto
initial_step!(opt, theta_step_auto)

# version 1: internal :LN_AUGLAG box constrains
theta_bounds_g = scaling.(theta_bounds, scale)
Expand Down
24 changes: 17 additions & 7 deletions test/cases_func.jl
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,13 @@ function test_alg_interval(
for (f_name, f) in func_dict
#println("Testing $f_name")
@testset "Case $f_name" begin
for i in eachindex(f.x0)
for i in eachindex(f.x1)
ep = get_interval(
f.x0,
f.x1,
i,
f.func,
:CICO_ONE_PASS;
theta_bounds=fill(bounds,length(f.x0)),
theta_bounds=fill(bounds,length(f.x1)),
scan_tol=1e-8,
local_alg = alg.algorithm,
loss_crit = f.loss_crit,
Expand All @@ -50,12 +50,12 @@ function test_alg_interval(
if isa(f.endpoints[i][1], Nothing)
@test isa(ep.result[1].value, Nothing) skip = should_skip
else
@test isapprox(ep.result[1].value, f.endpoints[i][1], atol=tol) skip = should_skip
@test isapprox(ep.result[1].value, f.endpoints[i][1], atol = tol * 10.) skip = should_skip
end
if isa(f.endpoints[i][2], Nothing)
@test isa(ep.result[2].value, Nothing) skip = should_skip
else
@test isapprox(ep.result[2].value, f.endpoints[i][2], atol=tol) skip = should_skip
@test isapprox(ep.result[2].value, f.endpoints[i][2], atol = tol * 10.) skip = should_skip
end
end
end
Expand Down Expand Up @@ -96,7 +96,7 @@ function test_alg_optimal(
@test (result.ret == :XTOL_REACHED || result.ret == :SUCCESS) skip = should_skip
for i in eachindex(f.x0)
if f.x_optim[i] !== nothing
@test isapprox(result.params[i], f.x_optim[i], atol = scan_tol * 10) skip = should_skip
@test isapprox(result.params[i], f.x_optim[i], atol = scan_tol * 10.) skip = should_skip
end
end
end
Expand All @@ -110,7 +110,8 @@ end
test_funcs = Dict(
:f_1p => (
func = f_1p,
x0 = [2.],
x0 = [2.], # to start optimization
x1 = [3.], # to start scan
endpoints = [(1.,5.)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL)],
loss_crit = 9.,
Expand All @@ -121,6 +122,7 @@ test_funcs = Dict(
:f_2p_1im => (
func = f_2p_1im,
x0 = [4.,1.],
x1 = [3.,1.5],
endpoints = [(1.,5.),(nothing,nothing)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),
(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
Expand All @@ -132,6 +134,7 @@ test_funcs = Dict(
:f_2p => (
func = f_2p,
x0 = [4.,5.],
x1 = [3.,4.],
endpoints = [(1.,5.),
(2.,6.)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),
Expand All @@ -144,6 +147,7 @@ test_funcs = Dict(
:f_3p_1im => (
func = f_3p_1im,
x0 = [4.,4.,1.1],
x1 = [3.,4.4,1.1],
endpoints = [(1.,5.),
(nothing,nothing),
(nothing,nothing)],
Expand All @@ -158,6 +162,7 @@ test_funcs = Dict(
:f_3p_1im_dep => (
func = f_3p_1im_dep,
x0 = [4., 3., 2.1],
x1 = [3., 2., 1.5],
endpoints = [(1.,5.),
(2.0-2.0*sqrt(2.),2.0+2.0*sqrt(2.)),
(nothing,nothing)],
Expand All @@ -172,6 +177,7 @@ test_funcs = Dict(
:f_4p_2im => (
func = f_4p_2im,
x0 = [4.,5.,1.1,1.1],
x1 = [3.,4.,1.5,1.5],
endpoints = [(1.,5.),
(2.,6.),
(nothing,nothing),
Expand All @@ -188,6 +194,7 @@ test_funcs = Dict(
:f_4p_3im => (
func = f_4p_3im,
x0 = [4.,4.,1.1,1.1],
x1 = [3.,4.4,1.1,1.5],
endpoints = [(1.,5.),
(nothing,nothing),
(nothing,nothing),
Expand All @@ -204,6 +211,7 @@ test_funcs = Dict(
:f_1p_ex => (
func = f_1p_ex,
x0 = [1.5, 2.],
x1 = [1e-8, 1.5],
endpoints = [(-2+1e-8,2+1e-8), (nothing, nothing)],
status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)],
loss_crit = 9.,
Expand All @@ -214,6 +222,7 @@ test_funcs = Dict(
:f_5p_3im => (
func = f_5p_3im,
x0 = [4., 0.5, 8., 2., 2.],
x1 = [3., 0.1, 8., 2., 1.5],
endpoints = [(1.,5.),
(nothing,log(3)),
(nothing,nothing),
Expand All @@ -232,6 +241,7 @@ test_funcs = Dict(
:f_3p_im => (
func = f_3p_im,
x0 = [4.,0.5,1,],
x1 = [3.,0.1,1.5],
endpoints = [(1.,5.),
(nothing,log(3)),
(nothing,nothing)],
Expand Down
4 changes: 2 additions & 2 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ println("Starting tests for get_optimal")

# experimental tests

#@testset "testing derivative-free algorithms" begin include("test_deriv_free_algs.jl") end
#@testset "gradient-based algorithms" begin include("test_grad_algs.jl") end
@testset "testing derivative-free algorithms" begin include("test_deriv_free_algs.jl") end
@testset "gradient-based algorithms" begin include("test_grad_algs.jl") end

@testset "get_optimal series" begin include("test_get_optimal_series.jl") end
10 changes: 5 additions & 5 deletions test/test_deriv_free_algs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@ dfo_algorithms = [
# good
(algorithm = :LN_NELDERMEAD, skip = []), # Nelder Mead
# unstable
(algorithm = :LN_PRAXIS, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # "PRAXIS" gradient-free local optimization via the "principal-axis method"
(algorithm = :LN_PRAXIS, skip = [:f_1p, :f_1p_ex]), # "PRAXIS" gradient-free local optimization via the "principal-axis method"
# errors
(algorithm = :LN_SBPLX, skip = [:f_3p_1im, :f_3p_1im_dep]), # Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
(algorithm = :LN_NEWUOA, skip = [:f_1p, :f_3p_1im, :f_4p_3im, :f_5p_3im, :f_2p_1im, :f_3p_im, :f_2p]),
(algorithm = :LN_BOBYQA, skip = [:f_1p, :f_3p_1im, :f_1p_ex, :f_4p_3im, :f_5p_3im, :f_2p_1im]), # BOBYQA algorithm for bound constrained optimization without derivatives
(algorithm = :LN_SBPLX, skip = [:f_3p_1im, :f_3p_1im_dep, :f_5p_3im]), # Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces)
(algorithm = :LN_NEWUOA, skip = [:f_1p, :f_3p_1im, :f_4p_2im, :f_1p_ex, :f_4p_3im, :f_5p_3im, :f_2p_1im]),
(algorithm = :LN_BOBYQA, skip = [:f_1p, :f_3p_1im, :f_1p_ex, :f_3p_im, :f_4p_3im, :f_5p_3im, :f_2p_1im]), # BOBYQA algorithm for bound constrained optimization without derivatives
#(algorithm = :LN_COBYLA, skip = []), # Constrained Optimization BY Linear Approximations
]

[test_alg(alg; bounds=(-Inf,Inf)) for alg in dfo_algorithms]
[test_alg_interval(alg; bounds=(-Inf,Inf)) for alg in dfo_algorithms]
2 changes: 1 addition & 1 deletion test/test_get_endpoint_errors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ end

@testset "wrong theta_init in log scale" begin
method = :CICO_ONE_PASS
@test_throws ArgumentError get_endpoint(
@test_throws ArgumentError get_endpoint( # DomainError instead of ArgumentError
[3., 2., -1],
1,
f_3p_1im_dep,
Expand Down
2 changes: 1 addition & 1 deletion test/test_get_endpoint_scan_func.jl
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ end
silent = true
)
@test res0.status == :BORDER_FOUND_BY_SCAN_TOL
@test res0.counter < 1200
@test res0.counter < 1000
@test isapprox(res0.value, 169.; atol=1e-2)
end

Expand Down
4 changes: 2 additions & 2 deletions test/test_get_optimal_series.jl
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ end
end

@testset "scan" begin
[test_alg_optimal(alg; scan_tol = 1e-4) for alg in all_algorithms_scan]
[test_alg_optimal(alg; scan_tol = 1e-6) for alg in all_algorithms_scan]
end

@testset "scan :log" begin
[test_alg_optimal(alg; scan_tol = 1e-4, scale = :log, bounds = (0.,Inf)) for alg in all_algorithms_scan_log]
[test_alg_optimal(alg; scan_tol = 1e-5, scale = :log, bounds = (0.,Inf)) for alg in all_algorithms_scan_log]
end
16 changes: 8 additions & 8 deletions test/test_grad_algs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,13 @@ grad_algorithms_autodiff = [
# good
#:LD_MMA, # Method of Moving Asymptotes #tmp removed from tests
(algorithm = :LD_SLSQP, skip = []), # Sequential Least-Squares Quadratic Programming
(algorithm = :LD_CCSAQ, skip = []), # Conservative convex separable approximation
(algorithm = :LD_CCSAQ, skip = [:f_3p_1im, :f_4p_3im]), # Conservative convex separable approximation
# errors
(algorithm = :LD_LBFGS, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]), # Low-storage BFGS
(algorithm = :LD_TNEWTON_PRECOND_RESTART, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]), # Preconditioned truncated Newton
(algorithm = :LD_LBFGS, skip = [:f_3p_1im, :f_4p_2im, :f_3p_im, :f_4p_3im, :f_5p_3im, :f_2p]), # Low-storage BFGS
(algorithm = :LD_TNEWTON_PRECOND_RESTART, skip = [:f_3p_1im, :f_4p_2im, :f_3p_im, :f_4p_3im, :f_5p_3im, :f_2p]), # Preconditioned truncated Newton
(algorithm = :LD_TNEWTON_PRECOND, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # Same without restarting
(algorithm = :LD_TNEWTON_RESTART, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_2p]), # Same without preconditioning
(algorithm = :LD_TNEWTON, skip = [:f_3p_1im, :f_4p_3im]), # Same without restarting or preconditioning
(algorithm = :LD_TNEWTON_RESTART, skip = [:f_3p_1im, :f_4p_2im, :f_3p_im, :f_5p_3im, :f_2p]), # Same without preconditioning
(algorithm = :LD_TNEWTON, skip = [:f_5p_3im]), # Same without restarting or preconditioning
(algorithm = :LD_VAR2, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]), # Shifted limited-memory variable-metric (rank 2)
(algorithm = :LD_VAR1, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]) # Shifted limited-memory variable-metric (rank 1)
]
Expand All @@ -25,13 +25,13 @@ grad_algorithms_finite = [
(algorithm = :LD_SLSQP, skip = []), # Sequential Least-Squares Quadratic Programming
(algorithm = :LD_CCSAQ, skip = [:f_5p_3im]), # Conservative convex separable approximation
# errors
(algorithm = :LD_LBFGS, skip = [:f_1p, :f_3p_1im, :f_4p_3im, :f_5p_3im, :f_4p_2im, :f_2p_1im, :f_2p]), # Low-storage BFGS
(algorithm = :LD_TNEWTON_PRECOND_RESTART, skip = [:f_3p_1im, :f_3p_1im_dep, :f_4p_3im, :f_5p_3im]), # Preconditioned truncated Newton
(algorithm = :LD_LBFGS, skip = [:f_1p, :f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p_1im, :f_2p]), # Low-storage BFGS
(algorithm = :LD_TNEWTON_PRECOND_RESTART, skip = [:f_3p_1im, :f_3p_1im_dep, :f_3p_im, :f_4p_3im, :f_5p_3im]), # Preconditioned truncated Newton
(algorithm = :LD_TNEWTON_PRECOND, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # Same without restarting
(algorithm = :LD_TNEWTON_RESTART, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # Same without preconditioning
(algorithm = :LD_TNEWTON, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # Same without restarting or preconditioning
(algorithm = :LD_VAR2, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]), # Shifted limited-memory variable-metric (rank 2)
(algorithm = :LD_VAR1, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]) # Shifted limited-memory variable-metric (rank 1)
]

[test_alg_interval(alg; bounds=(-1e10,1e10), loss_grad=:FINITE) for alg in grad_algorithms_finite]
[test_alg_interval(alg; bounds=(-1e10,1e10), loss_grad=:FINITE) for alg in grad_algorithms_finite]

0 comments on commit 1fa550c

Please sign in to comment.