Skip to content

Fix inconsistency in doc #114

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions docs/src/algorithms/mts.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@
## Description

MTS: Multiple Trajectory Search for Large-Scale Global Optimization, is a derivative-free heuristic optimization method presented in paper [Lin-Yu Tseng and Chun Chen, 2008](https://sci2s.ugr.es/sites/default/files/files/TematicWebSites/EAMHCO/contributionsCEC08/tseng08mts.pdf).
The main algorihtm `MTS` contains three subroutines `localsearch1`, `localsearch2` and `localsearch3`. This module implements all the optimization methods in the paper. People often use the entire `MTS` or only `localsearch1` to optimize functions, while `localsearch2` or `localsearch3` would rarely be used independently. Therefore, the module only exports `MTS` and `localsearch1`.
The main algorihtm `MTS` contains three subroutines `localsearch1`, `localsearch2` and `localsearch3`. This module implements all the optimization methods in the paper. People often use the entire `MTS` or only `localsearch1` to optimize functions, while `localsearch2` or `localsearch3` would rarely be used independently. Therefore, the module only exports `MTS` and `LocalSearch` (referring `localsearch1`).

## Quick start

Using default `MTSOptions()`. `MTS` is used for optimization.

```julia
alg = MTSAlg() # Or LS1Alg()
LS1_options = MTSOptions()
alg = MTSAlg()
options = MTSOptions()
m = Model(f)
lb = [0, 0]
ub = [5, 5]
Expand All @@ -20,17 +20,17 @@ addvar!(m, lb, ub)
result = optimize(model, alg, x0, options = options)
```

## Options
## Using LocalSearch

You can choose which algorithm to use by specifying `option.method`. Avaliable list is `[MTS (default), localsearch1, Nonconvex.localsearch2 (not recommended), Nonconvex.localsearch3 (not recommended)]`.
You can also use `LocalSearch` through `LocalSearchAlg` and `LocalSearchOptions`.

```julia
alg = MTSAlg() # Or LS1Alg()
LS1_options = MTSOptions(method=localsearch1)
m = Model(f))
alg = Alg()
options = LocalSearchOptions()
m = Model(f)
lb = [0, 0]
ub = [5, 5]
# Must have a box constraint. And (in)equality constraints are not supported in MTS methods.
addvar!(m, lb, ub)
result = optimize(model, alg, x0, options = options
result = optimize(model, alg, x0, options = options)
```
4 changes: 2 additions & 2 deletions src/Nonconvex.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ export Model,
HyperoptAlg,
BayesOptAlg,
MTSAlg,
LS1Alg,
LocalSearchAlg,
KKTCriteria,
IpoptCriteria,
FunctionWrapper,
Expand All @@ -48,7 +48,7 @@ export Model,
HyperoptOptions,
BayesOptOptions,
MTSOptions,
LS1Options,
LocalSearchOptions,
Tolerance,
@constructor,
RandomSampler,
Expand Down
30 changes: 15 additions & 15 deletions src/algorithms/mts.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ using Random: randperm

# Algs
struct MTSAlg <: AbstractOptimizer end
struct LS1Alg <: AbstractOptimizer end
struct LocalSearchAlg <: AbstractOptimizer end

# Options
@with_kw struct MTSOptions
Expand All @@ -36,7 +36,7 @@ struct LS1Alg <: AbstractOptimizer end
X2_INCR = 0.2
end

@with_kw struct LS1Options
@with_kw struct LocalSearchOptions
M = 100
maxiter=200
search_range_tol=1e-15
Expand Down Expand Up @@ -66,11 +66,11 @@ end
optimal_val::Real
end

@params mutable struct LS1Workspace <: Workspace
@params mutable struct LocalSearchWorkspace <: Workspace
model::VecModel
x0::AbstractVector
x::AbstractVector
options::LS1Options
options::LocalSearchOptions
enable::BitVector
improve::BitVector
search_range::AbstractVector
Expand Down Expand Up @@ -101,27 +101,27 @@ function MTSWorkspace(model::VecModel, x0::AbstractVector, options::MTSOptions;
end


function LS1Workspace(model::VecModel, x0::AbstractVector, options::LS1Options; kwargs...)
function LocalSearchWorkspace(model::VecModel, x0::AbstractVector, options::LocalSearchOptions; kwargs...)
@unpack box_min, box_max = model
M = options.M
# Initialize improve and serch range
enable = trues(M)
improve = trues(M)
search_range = [(box_max-box_min) ./ 2 for _ in 1:M]
LS1Workspace(model, x0, copy(x0), options, enable, improve, search_range, x0[1], -1, Inf)
LocalSearchWorkspace(model, x0, copy(x0), options, enable, improve, search_range, x0[1], -1, Inf)
end

# Exposed workspace constructors
function Workspace(model::VecModel, optimizer::LS1Alg, x0::AbstractVector; options::LS1Options=LS1Options(), kwargs...,)
function Workspace(model::VecModel, optimizer::LocalSearchAlg, x0::AbstractVector; options::LocalSearchOptions=LocalSearchOptions(), kwargs...,)
@assert length(x0) > 0 && x0[1] isa AbstractVector
if length(model.ineq_constraints) > 0 || length(model.eq_constraints) > 0
@warn "LS1 does not support (in)equality constraints. Your input would be ignored. "
@warn "LocalSearch does not support (in)equality constraints. Your input would be ignored. "
end
return LS1Workspace(model, x0, options)
return LocalSearchWorkspace(model, x0, options)
end

# LS1 Workspace constructor without x0 (use method in paper to initialize)
function Workspace(model::VecModel, optimizer::LS1Alg; options::LS1Options=LS1Options(), kwargs...)
# LocalSearch Workspace constructor without x0 (use method in paper to initialize)
function Workspace(model::VecModel, optimizer::LocalSearchAlg; options::LocalSearchOptions=LocalSearchOptions(), kwargs...)
x0 = initialize_x(model, options)
return Workspace(model, optimizer, x0; options=options)
end
Expand All @@ -132,7 +132,7 @@ end
end

# Tool functions
function initialize_x(model::VecModel, options::Union{MTSOptions, LS1Options})
function initialize_x(model::VecModel, options::Union{MTSOptions, LocalSearchOptions})
@unpack box_min, box_max = model
@unpack M = options
n_vars = getdim(model)[2]
Expand Down Expand Up @@ -421,15 +421,15 @@ function Workspace(model::VecModel, optimizer::MTSAlg; options::MTSOptions=MTSOp
end

# Export localsearch1 independently
function localsearch1(workspace::Union{MTSWorkspace, LS1Workspace})
function localsearch1(workspace::Union{MTSWorkspace, LocalSearchWorkspace})
M = workspace.options.M
for i in 1:M
_localsearch1(workspace, i)
end
end

# Export LS1 independently
function optimize!(workspace::LS1Workspace)
# Export LocalSearch independently
function optimize!(workspace::LocalSearchWorkspace)
options = workspace.options
for iter in 1:options.maxiter
if debugging[] && iter % 50 == 0
Expand Down
8 changes: 4 additions & 4 deletions test/mts.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,16 @@ test_dim = 2
end
end

@testset "Localsearch1" begin
println("Testing Localsearch1... ")
@testset "LocalSearch" begin
println("Testing LocalSearch... ")
for F in setdiff(TEST_FUNCTIONS, (ROSENBROCK, ))
println("Testing nonconvex function: ", F)
m = Model(x -> F(x))
lb = [lu_bounds(F)[1] for _ in 1:test_dim]
ub = [lu_bounds(F)[2] for _ in 1:test_dim]
addvar!(m, lb, ub)
alg = LS1Alg()
r = Nonconvex.optimize(m, alg, options=LS1Options())
alg = LocalSearchAlg()
r = Nonconvex.optimize(m, alg, options=LocalSearchOptions())
println(r.minimizer)
println(r.minimum)
@test abs(r.minimum) < tol(F)
Expand Down