From 1fa550cfcd18f5767472cc7b062399c42abe73ca Mon Sep 17 00:00:00 2001 From: Evgeny Metelkin Date: Thu, 24 Oct 2024 11:23:02 +0300 Subject: [PATCH] update cases function for test --- src/cico_one_pass.jl | 9 +++++++-- src/get_optimal.jl | 4 ++-- test/cases_func.jl | 24 +++++++++++++++++------- test/runtests.jl | 4 ++-- test/test_deriv_free_algs.jl | 10 +++++----- test/test_get_endpoint_errors.jl | 2 +- test/test_get_endpoint_scan_func.jl | 2 +- test/test_get_optimal_series.jl | 4 ++-- test/test_grad_algs.jl | 16 ++++++++-------- 9 files changed, 45 insertions(+), 30 deletions(-) diff --git a/src/cico_one_pass.jl b/src/cico_one_pass.jl index e75c276..04825f6 100644 --- a/src/cico_one_pass.jl +++ b/src/cico_one_pass.jl @@ -38,7 +38,9 @@ function get_right_endpoint( local_opt = Opt(local_alg, n_theta) ftol_abs!(local_opt, scan_tol) ftol_rel!(local_opt, scan_rtol) - #initial_step!(local_opt, fill(1., n_theta)) + # XXX: testing + #is_auto = initial_step(local_opt, theta_init) + #initial_step!(local_opt, is_auto) # flags to analyze fitting stop out_of_bound::Bool = false @@ -47,7 +49,7 @@ function get_right_endpoint( # function constraints_func(x) # testing grad methods # this part is necessary to understand the difference between # "stop out of bounds" and "stop because of function call error" - # in NLopt >= 1.0.2 we need to throw ForcedStop() to stop optimization + # in NLopt >= 1.0.2 we need to0 throw ForcedStop() to stop optimization loss_value = try loss_func(x) catch e @@ -88,6 +90,9 @@ function get_right_endpoint( # constrain optimizer opt = Opt(:LN_AUGLAG, n_theta) ftol_abs!(opt, scan_tol) + # XXX: testing + #is_auto_glob = initial_step(opt, theta_init) + #initial_step!(opt, is_auto_glob) max_objective!( opt, diff --git a/src/get_optimal.jl b/src/get_optimal.jl index bb8cc62..ece9fa7 100644 --- a/src/get_optimal.jl +++ b/src/get_optimal.jl @@ -146,8 +146,8 @@ function get_optimal( # initial step for optimization theta_step_auto = initial_step(opt, theta_init_g) # for testing - theta_step = [(x == 0. ? 1. : copy(x)) for x in theta_init_g] # same as auto - initial_step!(opt, theta_step) + #theta_step = [(x == 0. ? 1. : copy(x)) for x in theta_init_g] # same as auto + initial_step!(opt, theta_step_auto) # version 1: internal :LN_AUGLAG box constrains theta_bounds_g = scaling.(theta_bounds, scale) diff --git a/test/cases_func.jl b/test/cases_func.jl index 11eeb82..d51e0a5 100644 --- a/test/cases_func.jl +++ b/test/cases_func.jl @@ -31,13 +31,13 @@ function test_alg_interval( for (f_name, f) in func_dict #println("Testing $f_name") @testset "Case $f_name" begin - for i in eachindex(f.x0) + for i in eachindex(f.x1) ep = get_interval( - f.x0, + f.x1, i, f.func, :CICO_ONE_PASS; - theta_bounds=fill(bounds,length(f.x0)), + theta_bounds=fill(bounds,length(f.x1)), scan_tol=1e-8, local_alg = alg.algorithm, loss_crit = f.loss_crit, @@ -50,12 +50,12 @@ function test_alg_interval( if isa(f.endpoints[i][1], Nothing) @test isa(ep.result[1].value, Nothing) skip = should_skip else - @test isapprox(ep.result[1].value, f.endpoints[i][1], atol=tol) skip = should_skip + @test isapprox(ep.result[1].value, f.endpoints[i][1], atol = tol * 10.) skip = should_skip end if isa(f.endpoints[i][2], Nothing) @test isa(ep.result[2].value, Nothing) skip = should_skip else - @test isapprox(ep.result[2].value, f.endpoints[i][2], atol=tol) skip = should_skip + @test isapprox(ep.result[2].value, f.endpoints[i][2], atol = tol * 10.) skip = should_skip end end end @@ -96,7 +96,7 @@ function test_alg_optimal( @test (result.ret == :XTOL_REACHED || result.ret == :SUCCESS) skip = should_skip for i in eachindex(f.x0) if f.x_optim[i] !== nothing - @test isapprox(result.params[i], f.x_optim[i], atol = scan_tol * 10) skip = should_skip + @test isapprox(result.params[i], f.x_optim[i], atol = scan_tol * 10.) skip = should_skip end end end @@ -110,7 +110,8 @@ end test_funcs = Dict( :f_1p => ( func = f_1p, - x0 = [2.], + x0 = [2.], # to start optimization + x1 = [3.], # to start scan endpoints = [(1.,5.)], status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL)], loss_crit = 9., @@ -121,6 +122,7 @@ test_funcs = Dict( :f_2p_1im => ( func = f_2p_1im, x0 = [4.,1.], + x1 = [3.,1.5], endpoints = [(1.,5.),(nothing,nothing)], status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL), (:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)], @@ -132,6 +134,7 @@ test_funcs = Dict( :f_2p => ( func = f_2p, x0 = [4.,5.], + x1 = [3.,4.], endpoints = [(1.,5.), (2.,6.)], status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL), @@ -144,6 +147,7 @@ test_funcs = Dict( :f_3p_1im => ( func = f_3p_1im, x0 = [4.,4.,1.1], + x1 = [3.,4.4,1.1], endpoints = [(1.,5.), (nothing,nothing), (nothing,nothing)], @@ -158,6 +162,7 @@ test_funcs = Dict( :f_3p_1im_dep => ( func = f_3p_1im_dep, x0 = [4., 3., 2.1], + x1 = [3., 2., 1.5], endpoints = [(1.,5.), (2.0-2.0*sqrt(2.),2.0+2.0*sqrt(2.)), (nothing,nothing)], @@ -172,6 +177,7 @@ test_funcs = Dict( :f_4p_2im => ( func = f_4p_2im, x0 = [4.,5.,1.1,1.1], + x1 = [3.,4.,1.5,1.5], endpoints = [(1.,5.), (2.,6.), (nothing,nothing), @@ -188,6 +194,7 @@ test_funcs = Dict( :f_4p_3im => ( func = f_4p_3im, x0 = [4.,4.,1.1,1.1], + x1 = [3.,4.4,1.1,1.5], endpoints = [(1.,5.), (nothing,nothing), (nothing,nothing), @@ -204,6 +211,7 @@ test_funcs = Dict( :f_1p_ex => ( func = f_1p_ex, x0 = [1.5, 2.], + x1 = [1e-8, 1.5], endpoints = [(-2+1e-8,2+1e-8), (nothing, nothing)], status = [(:BORDER_FOUND_BY_SCAN_TOL,:BORDER_FOUND_BY_SCAN_TOL),(:SCAN_BOUND_REACHED,:SCAN_BOUND_REACHED)], loss_crit = 9., @@ -214,6 +222,7 @@ test_funcs = Dict( :f_5p_3im => ( func = f_5p_3im, x0 = [4., 0.5, 8., 2., 2.], + x1 = [3., 0.1, 8., 2., 1.5], endpoints = [(1.,5.), (nothing,log(3)), (nothing,nothing), @@ -232,6 +241,7 @@ test_funcs = Dict( :f_3p_im => ( func = f_3p_im, x0 = [4.,0.5,1,], + x1 = [3.,0.1,1.5], endpoints = [(1.,5.), (nothing,log(3)), (nothing,nothing)], diff --git a/test/runtests.jl b/test/runtests.jl index e71aef4..0060456 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -39,7 +39,7 @@ println("Starting tests for get_optimal") # experimental tests -#@testset "testing derivative-free algorithms" begin include("test_deriv_free_algs.jl") end -#@testset "gradient-based algorithms" begin include("test_grad_algs.jl") end +@testset "testing derivative-free algorithms" begin include("test_deriv_free_algs.jl") end +@testset "gradient-based algorithms" begin include("test_grad_algs.jl") end @testset "get_optimal series" begin include("test_get_optimal_series.jl") end diff --git a/test/test_deriv_free_algs.jl b/test/test_deriv_free_algs.jl index 83fd7a7..47939a5 100644 --- a/test/test_deriv_free_algs.jl +++ b/test/test_deriv_free_algs.jl @@ -7,12 +7,12 @@ dfo_algorithms = [ # good (algorithm = :LN_NELDERMEAD, skip = []), # Nelder Mead # unstable - (algorithm = :LN_PRAXIS, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # "PRAXIS" gradient-free local optimization via the "principal-axis method" + (algorithm = :LN_PRAXIS, skip = [:f_1p, :f_1p_ex]), # "PRAXIS" gradient-free local optimization via the "principal-axis method" # errors - (algorithm = :LN_SBPLX, skip = [:f_3p_1im, :f_3p_1im_dep]), # Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces) - (algorithm = :LN_NEWUOA, skip = [:f_1p, :f_3p_1im, :f_4p_3im, :f_5p_3im, :f_2p_1im, :f_3p_im, :f_2p]), - (algorithm = :LN_BOBYQA, skip = [:f_1p, :f_3p_1im, :f_1p_ex, :f_4p_3im, :f_5p_3im, :f_2p_1im]), # BOBYQA algorithm for bound constrained optimization without derivatives + (algorithm = :LN_SBPLX, skip = [:f_3p_1im, :f_3p_1im_dep, :f_5p_3im]), # Subplex (a variant of Nelder-Mead that uses Nelder-Mead on a sequence of subspaces) + (algorithm = :LN_NEWUOA, skip = [:f_1p, :f_3p_1im, :f_4p_2im, :f_1p_ex, :f_4p_3im, :f_5p_3im, :f_2p_1im]), + (algorithm = :LN_BOBYQA, skip = [:f_1p, :f_3p_1im, :f_1p_ex, :f_3p_im, :f_4p_3im, :f_5p_3im, :f_2p_1im]), # BOBYQA algorithm for bound constrained optimization without derivatives #(algorithm = :LN_COBYLA, skip = []), # Constrained Optimization BY Linear Approximations ] -[test_alg(alg; bounds=(-Inf,Inf)) for alg in dfo_algorithms] +[test_alg_interval(alg; bounds=(-Inf,Inf)) for alg in dfo_algorithms] diff --git a/test/test_get_endpoint_errors.jl b/test/test_get_endpoint_errors.jl index ae6b7e1..c142c37 100644 --- a/test/test_get_endpoint_errors.jl +++ b/test/test_get_endpoint_errors.jl @@ -40,7 +40,7 @@ end @testset "wrong theta_init in log scale" begin method = :CICO_ONE_PASS - @test_throws ArgumentError get_endpoint( + @test_throws ArgumentError get_endpoint( # DomainError instead of ArgumentError [3., 2., -1], 1, f_3p_1im_dep, diff --git a/test/test_get_endpoint_scan_func.jl b/test/test_get_endpoint_scan_func.jl index 7721160..944b6e3 100644 --- a/test/test_get_endpoint_scan_func.jl +++ b/test/test_get_endpoint_scan_func.jl @@ -42,7 +42,7 @@ end silent = true ) @test res0.status == :BORDER_FOUND_BY_SCAN_TOL - @test res0.counter < 1200 + @test res0.counter < 1000 @test isapprox(res0.value, 169.; atol=1e-2) end diff --git a/test/test_get_optimal_series.jl b/test/test_get_optimal_series.jl index b052ba6..98226c1 100644 --- a/test/test_get_optimal_series.jl +++ b/test/test_get_optimal_series.jl @@ -52,9 +52,9 @@ end end @testset "scan" begin - [test_alg_optimal(alg; scan_tol = 1e-4) for alg in all_algorithms_scan] + [test_alg_optimal(alg; scan_tol = 1e-6) for alg in all_algorithms_scan] end @testset "scan :log" begin - [test_alg_optimal(alg; scan_tol = 1e-4, scale = :log, bounds = (0.,Inf)) for alg in all_algorithms_scan_log] + [test_alg_optimal(alg; scan_tol = 1e-5, scale = :log, bounds = (0.,Inf)) for alg in all_algorithms_scan_log] end diff --git a/test/test_grad_algs.jl b/test/test_grad_algs.jl index eb39a49..dfe9dc8 100644 --- a/test/test_grad_algs.jl +++ b/test/test_grad_algs.jl @@ -6,13 +6,13 @@ grad_algorithms_autodiff = [ # good #:LD_MMA, # Method of Moving Asymptotes #tmp removed from tests (algorithm = :LD_SLSQP, skip = []), # Sequential Least-Squares Quadratic Programming - (algorithm = :LD_CCSAQ, skip = []), # Conservative convex separable approximation + (algorithm = :LD_CCSAQ, skip = [:f_3p_1im, :f_4p_3im]), # Conservative convex separable approximation # errors - (algorithm = :LD_LBFGS, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]), # Low-storage BFGS - (algorithm = :LD_TNEWTON_PRECOND_RESTART, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]), # Preconditioned truncated Newton + (algorithm = :LD_LBFGS, skip = [:f_3p_1im, :f_4p_2im, :f_3p_im, :f_4p_3im, :f_5p_3im, :f_2p]), # Low-storage BFGS + (algorithm = :LD_TNEWTON_PRECOND_RESTART, skip = [:f_3p_1im, :f_4p_2im, :f_3p_im, :f_4p_3im, :f_5p_3im, :f_2p]), # Preconditioned truncated Newton (algorithm = :LD_TNEWTON_PRECOND, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # Same without restarting - (algorithm = :LD_TNEWTON_RESTART, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_2p]), # Same without preconditioning - (algorithm = :LD_TNEWTON, skip = [:f_3p_1im, :f_4p_3im]), # Same without restarting or preconditioning + (algorithm = :LD_TNEWTON_RESTART, skip = [:f_3p_1im, :f_4p_2im, :f_3p_im, :f_5p_3im, :f_2p]), # Same without preconditioning + (algorithm = :LD_TNEWTON, skip = [:f_5p_3im]), # Same without restarting or preconditioning (algorithm = :LD_VAR2, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]), # Shifted limited-memory variable-metric (rank 2) (algorithm = :LD_VAR1, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]) # Shifted limited-memory variable-metric (rank 1) ] @@ -25,8 +25,8 @@ grad_algorithms_finite = [ (algorithm = :LD_SLSQP, skip = []), # Sequential Least-Squares Quadratic Programming (algorithm = :LD_CCSAQ, skip = [:f_5p_3im]), # Conservative convex separable approximation # errors - (algorithm = :LD_LBFGS, skip = [:f_1p, :f_3p_1im, :f_4p_3im, :f_5p_3im, :f_4p_2im, :f_2p_1im, :f_2p]), # Low-storage BFGS - (algorithm = :LD_TNEWTON_PRECOND_RESTART, skip = [:f_3p_1im, :f_3p_1im_dep, :f_4p_3im, :f_5p_3im]), # Preconditioned truncated Newton + (algorithm = :LD_LBFGS, skip = [:f_1p, :f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p_1im, :f_2p]), # Low-storage BFGS + (algorithm = :LD_TNEWTON_PRECOND_RESTART, skip = [:f_3p_1im, :f_3p_1im_dep, :f_3p_im, :f_4p_3im, :f_5p_3im]), # Preconditioned truncated Newton (algorithm = :LD_TNEWTON_PRECOND, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # Same without restarting (algorithm = :LD_TNEWTON_RESTART, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # Same without preconditioning (algorithm = :LD_TNEWTON, skip = [:f_3p_1im, :f_4p_3im, :f_5p_3im]), # Same without restarting or preconditioning @@ -34,4 +34,4 @@ grad_algorithms_finite = [ (algorithm = :LD_VAR1, skip = [:f_3p_1im, :f_4p_2im, :f_4p_3im, :f_5p_3im, :f_2p]) # Shifted limited-memory variable-metric (rank 1) ] -[test_alg_interval(alg; bounds=(-1e10,1e10), loss_grad=:FINITE) for alg in grad_algorithms_finite] \ No newline at end of file +[test_alg_interval(alg; bounds=(-1e10,1e10), loss_grad=:FINITE) for alg in grad_algorithms_finite]