From 40ff2bc1762b45d48fdff6a41e25cbf007e9fb2c Mon Sep 17 00:00:00 2001 From: Ping Date: Mon, 23 Aug 2021 13:08:35 -0500 Subject: [PATCH 1/2] fixing inconsistency in doc --- docs/src/algorithms/mts.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/src/algorithms/mts.md b/docs/src/algorithms/mts.md index 0f108a18..782dc9b1 100644 --- a/docs/src/algorithms/mts.md +++ b/docs/src/algorithms/mts.md @@ -3,15 +3,15 @@ ## Description MTS: Multiple Trajectory Search for Large-Scale Global Optimization, is a derivative-free heuristic optimization method presented in paper [Lin-Yu Tseng and Chun Chen, 2008](https://sci2s.ugr.es/sites/default/files/files/TematicWebSites/EAMHCO/contributionsCEC08/tseng08mts.pdf). -The main algorihtm `MTS` contains three subroutines `localsearch1`, `localsearch2` and `localsearch3`. This module implements all the optimization methods in the paper. People often use the entire `MTS` or only `localsearch1` to optimize functions, while `localsearch2` or `localsearch3` would rarely be used independently. Therefore, the module only exports `MTS` and `localsearch1`. +The main algorihtm `MTS` contains three subroutines `localsearch1`, `localsearch2` and `localsearch3`. This module implements all the optimization methods in the paper. People often use the entire `MTS` or only `localsearch1` to optimize functions, while `localsearch2` or `localsearch3` would rarely be used independently. Therefore, the module only exports `MTS` and `LS1` (referring `localsearch1`). ## Quick start Using default `MTSOptions()`. `MTS` is used for optimization. ```julia -alg = MTSAlg() # Or LS1Alg() -LS1_options = MTSOptions() +alg = MTSAlg() +options = MTSOptions() m = Model(f) lb = [0, 0] ub = [5, 5] @@ -20,17 +20,17 @@ addvar!(m, lb, ub) result = optimize(model, alg, x0, options = options) ``` -## Options +## Using LS1 -You can choose which algorithm to use by specifying `option.method`. Avaliable list is `[MTS (default), localsearch1, Nonconvex.localsearch2 (not recommended), Nonconvex.localsearch3 (not recommended)]`. +You can also use `LS1` through `LS1Alg` and `LS1Options`. ```julia -alg = MTSAlg() # Or LS1Alg() -LS1_options = MTSOptions(method=localsearch1) +alg = LS1Alg() +options = LS1Options() m = Model(f)) lb = [0, 0] ub = [5, 5] # Must have a box constraint. And (in)equality constraints are not supported in MTS methods. addvar!(m, lb, ub) -result = optimize(model, alg, x0, options = options +result = optimize(model, alg, x0, options = options) ``` From 83fa6125264634133ce333ba1a69ffc8f2b3ca8b Mon Sep 17 00:00:00 2001 From: Ping Date: Tue, 24 Aug 2021 13:30:13 -0500 Subject: [PATCH 2/2] rename LS1 to LocalSearch --- docs/src/algorithms/mts.md | 12 ++++++------ src/Nonconvex.jl | 4 ++-- src/algorithms/mts.jl | 30 +++++++++++++++--------------- test/mts.jl | 8 ++++---- 4 files changed, 27 insertions(+), 27 deletions(-) diff --git a/docs/src/algorithms/mts.md b/docs/src/algorithms/mts.md index 782dc9b1..42ce6d8a 100644 --- a/docs/src/algorithms/mts.md +++ b/docs/src/algorithms/mts.md @@ -3,7 +3,7 @@ ## Description MTS: Multiple Trajectory Search for Large-Scale Global Optimization, is a derivative-free heuristic optimization method presented in paper [Lin-Yu Tseng and Chun Chen, 2008](https://sci2s.ugr.es/sites/default/files/files/TematicWebSites/EAMHCO/contributionsCEC08/tseng08mts.pdf). -The main algorihtm `MTS` contains three subroutines `localsearch1`, `localsearch2` and `localsearch3`. This module implements all the optimization methods in the paper. People often use the entire `MTS` or only `localsearch1` to optimize functions, while `localsearch2` or `localsearch3` would rarely be used independently. Therefore, the module only exports `MTS` and `LS1` (referring `localsearch1`). +The main algorihtm `MTS` contains three subroutines `localsearch1`, `localsearch2` and `localsearch3`. This module implements all the optimization methods in the paper. People often use the entire `MTS` or only `localsearch1` to optimize functions, while `localsearch2` or `localsearch3` would rarely be used independently. Therefore, the module only exports `MTS` and `LocalSearch` (referring `localsearch1`). ## Quick start @@ -20,14 +20,14 @@ addvar!(m, lb, ub) result = optimize(model, alg, x0, options = options) ``` -## Using LS1 +## Using LocalSearch -You can also use `LS1` through `LS1Alg` and `LS1Options`. +You can also use `LocalSearch` through `LocalSearchAlg` and `LocalSearchOptions`. ```julia -alg = LS1Alg() -options = LS1Options() -m = Model(f)) +alg = Alg() +options = LocalSearchOptions() +m = Model(f) lb = [0, 0] ub = [5, 5] # Must have a box constraint. And (in)equality constraints are not supported in MTS methods. diff --git a/src/Nonconvex.jl b/src/Nonconvex.jl index a32db59d..83801b49 100644 --- a/src/Nonconvex.jl +++ b/src/Nonconvex.jl @@ -33,7 +33,7 @@ export Model, HyperoptAlg, BayesOptAlg, MTSAlg, - LS1Alg, + LocalSearchAlg, KKTCriteria, IpoptCriteria, FunctionWrapper, @@ -48,7 +48,7 @@ export Model, HyperoptOptions, BayesOptOptions, MTSOptions, - LS1Options, + LocalSearchOptions, Tolerance, @constructor, RandomSampler, diff --git a/src/algorithms/mts.jl b/src/algorithms/mts.jl index e315c313..1c55adc7 100644 --- a/src/algorithms/mts.jl +++ b/src/algorithms/mts.jl @@ -9,7 +9,7 @@ using Random: randperm # Algs struct MTSAlg <: AbstractOptimizer end -struct LS1Alg <: AbstractOptimizer end +struct LocalSearchAlg <: AbstractOptimizer end # Options @with_kw struct MTSOptions @@ -36,7 +36,7 @@ struct LS1Alg <: AbstractOptimizer end X2_INCR = 0.2 end -@with_kw struct LS1Options +@with_kw struct LocalSearchOptions M = 100 maxiter=200 search_range_tol=1e-15 @@ -66,11 +66,11 @@ end optimal_val::Real end -@params mutable struct LS1Workspace <: Workspace +@params mutable struct LocalSearchWorkspace <: Workspace model::VecModel x0::AbstractVector x::AbstractVector - options::LS1Options + options::LocalSearchOptions enable::BitVector improve::BitVector search_range::AbstractVector @@ -101,27 +101,27 @@ function MTSWorkspace(model::VecModel, x0::AbstractVector, options::MTSOptions; end -function LS1Workspace(model::VecModel, x0::AbstractVector, options::LS1Options; kwargs...) +function LocalSearchWorkspace(model::VecModel, x0::AbstractVector, options::LocalSearchOptions; kwargs...) @unpack box_min, box_max = model M = options.M # Initialize improve and serch range enable = trues(M) improve = trues(M) search_range = [(box_max-box_min) ./ 2 for _ in 1:M] - LS1Workspace(model, x0, copy(x0), options, enable, improve, search_range, x0[1], -1, Inf) + LocalSearchWorkspace(model, x0, copy(x0), options, enable, improve, search_range, x0[1], -1, Inf) end # Exposed workspace constructors -function Workspace(model::VecModel, optimizer::LS1Alg, x0::AbstractVector; options::LS1Options=LS1Options(), kwargs...,) +function Workspace(model::VecModel, optimizer::LocalSearchAlg, x0::AbstractVector; options::LocalSearchOptions=LocalSearchOptions(), kwargs...,) @assert length(x0) > 0 && x0[1] isa AbstractVector if length(model.ineq_constraints) > 0 || length(model.eq_constraints) > 0 - @warn "LS1 does not support (in)equality constraints. Your input would be ignored. " + @warn "LocalSearch does not support (in)equality constraints. Your input would be ignored. " end - return LS1Workspace(model, x0, options) + return LocalSearchWorkspace(model, x0, options) end -# LS1 Workspace constructor without x0 (use method in paper to initialize) -function Workspace(model::VecModel, optimizer::LS1Alg; options::LS1Options=LS1Options(), kwargs...) +# LocalSearch Workspace constructor without x0 (use method in paper to initialize) +function Workspace(model::VecModel, optimizer::LocalSearchAlg; options::LocalSearchOptions=LocalSearchOptions(), kwargs...) x0 = initialize_x(model, options) return Workspace(model, optimizer, x0; options=options) end @@ -132,7 +132,7 @@ end end # Tool functions -function initialize_x(model::VecModel, options::Union{MTSOptions, LS1Options}) +function initialize_x(model::VecModel, options::Union{MTSOptions, LocalSearchOptions}) @unpack box_min, box_max = model @unpack M = options n_vars = getdim(model)[2] @@ -421,15 +421,15 @@ function Workspace(model::VecModel, optimizer::MTSAlg; options::MTSOptions=MTSOp end # Export localsearch1 independently -function localsearch1(workspace::Union{MTSWorkspace, LS1Workspace}) +function localsearch1(workspace::Union{MTSWorkspace, LocalSearchWorkspace}) M = workspace.options.M for i in 1:M _localsearch1(workspace, i) end end -# Export LS1 independently -function optimize!(workspace::LS1Workspace) +# Export LocalSearch independently +function optimize!(workspace::LocalSearchWorkspace) options = workspace.options for iter in 1:options.maxiter if debugging[] && iter % 50 == 0 diff --git a/test/mts.jl b/test/mts.jl index dd1b49f9..1370e68e 100644 --- a/test/mts.jl +++ b/test/mts.jl @@ -30,16 +30,16 @@ test_dim = 2 end end -@testset "Localsearch1" begin - println("Testing Localsearch1... ") +@testset "LocalSearch" begin + println("Testing LocalSearch... ") for F in setdiff(TEST_FUNCTIONS, (ROSENBROCK, )) println("Testing nonconvex function: ", F) m = Model(x -> F(x)) lb = [lu_bounds(F)[1] for _ in 1:test_dim] ub = [lu_bounds(F)[2] for _ in 1:test_dim] addvar!(m, lb, ub) - alg = LS1Alg() - r = Nonconvex.optimize(m, alg, options=LS1Options()) + alg = LocalSearchAlg() + r = Nonconvex.optimize(m, alg, options=LocalSearchOptions()) println(r.minimizer) println(r.minimum) @test abs(r.minimum) < tol(F)