Skip to content

Get rewrite up and running #59

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Jan 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
.DS_Store
Manifest.toml
.vscode/settings.json
184 changes: 0 additions & 184 deletions Manifest.toml

This file was deleted.

11 changes: 4 additions & 7 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,16 @@ version = "0.3.0"
[deps]
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"

[compat]
Cbc = "≥ 0.6.0"
JuMP = "~0.21"
MathOptInterface = "~0.9"
HiGHS = "1"
JuMP = "1"
julia = "1"

[extras]
Cbc = "9961bab8-2fa3-5c5a-9d89-47fab24efd76"
Gurobi = "2e9cd046-0924-5485-92f1-d5272153d98b"
HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Test", "Cbc", "Gurobi"]
test = ["Test", "HiGHS"]
93 changes: 68 additions & 25 deletions src/PiecewiseLinearOpt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@ __precompile__()
module PiecewiseLinearOpt

import JuMP
import MathOptInterface
const MOI = MathOptInterface
using LinearAlgebra
using Random

Expand All @@ -26,38 +24,45 @@ end

const VarOrAff = Union{JuMP.VariableRef,JuMP.AffExpr}

include(joinpath("methods", "util.jl"))
include("methods/util.jl")

export Incremental, LogarithmicEmbedding, LogarithmicIndependentBranching, NativeSOS2, ZigZagBinary, ZigZagInteger
include(joinpath("methods", "univariate", "incremental.jl"))
include(joinpath("methods", "univariate", "logarithmic_embedding.jl"))
include(joinpath("methods", "univariate", "logarithmic_independent_branching.jl"))
include(joinpath("methods", "univariate", "native_sos2.jl"))
include(joinpath("methods", "univariate", "zig_zag_binary.jl"))
include(joinpath("methods", "univariate", "zig_zag_integer.jl"))
include("methods/univariate/incremental.jl")

include("methods/univariate/logarithmic_embedding.jl")
include("methods/univariate/logarithmic_independent_branching.jl")
include("methods/univariate/native_sos2.jl")
include("methods/univariate/zig_zag_binary.jl")
include("methods/univariate/zig_zag_integer.jl")
# ConvexCombination has an SOS2 formulation, so defer this until after the
# multivariate formulations are defined
include(joinpath("methods", "univariate", "sos2_formulation_base.jl"))
include("methods/univariate/sos2_formulation_base.jl")

# Consider the colloqial "log" to refer to the embedding formulation
const Logarithmic = LogarithmicEmbedding
export Logarithmic

export K1, NineStencil, OptimalIndendentBranching, OptimalTriangleSelection, SixStencil, UnionJack
include(joinpath("methods", "bivariate", "k1.jl"))
include(joinpath("methods", "bivariate", "nine_stencil.jl"))
include(joinpath("methods", "bivariate", "optimal_independent_branching.jl"))
include(joinpath("methods", "bivariate", "optimal_triangle_selection.jl"))
include(joinpath("methods", "bivariate", "six_stencil.jl"))
include(joinpath("methods", "bivariate", "union_jack.jl"))
include(joinpath("methods", "bivariate", "common.jl"))

export ConvexCombination, DisaggregatedLogarithmic, MultipleChoice, OptimalIndependentBranching, OptimalTriangleSelection
include(joinpath("methods", "multivariate", "convex_combination.jl"))
include(joinpath("methods", "multivariate", "disaggregated_logarithmic.jl"))
include(joinpath("methods", "multivariate", "multiple_choice.jl"))

function formulate_pwl!(model::JuMP.Model, input_vals::Vector{NTuple{D,VarOrAff}}, output_vals::Vector{NTuple{F,VarOrAff}}, pwl::PWLFunction, method::Method, direction::DIRECTION) where {D,F}
export K1, NineStencil, OptimalIndependentBranching, OptimalTriangleSelection, SixStencil, UnionJack
include("methods/bivariate/k1.jl")
include("methods/bivariate/nine_stencil.jl")
include("methods/bivariate/optimal_independent_branching.jl")
include("methods/bivariate/optimal_triangle_selection.jl")
include("methods/bivariate/six_stencil.jl")
include("methods/bivariate/union_jack.jl")
include("methods/bivariate/common.jl")

export ConvexCombination, DisaggregatedLogarithmic, MultipleChoice
include("methods/multivariate/convex_combination.jl")
include("methods/multivariate/disaggregated_logarithmic.jl")
include("methods/multivariate/multiple_choice.jl")

function formulate_pwl!(
model::JuMP.Model,
input_vals::Vector{NTuple{D,VarOrAff}},
output_vals::Vector{NTuple{F,VarOrAff}},
pwl::PWLFunction,
method::Method,
direction::DIRECTION) where {D,F}
error("No support for a R^$D -> R^$F piecewise linear function using the $method method.")
end

Expand Down Expand Up @@ -93,4 +98,42 @@ function piecewiselinear(model::JuMP.Model,
return output_vars
end

function piecewiselinear(
model::JuMP.Model,
input_var::VarOrAff,
pwl::PWLFunction{1,1,SegmentPointRep{1,1}};
method::Method = _default_method(Val(1)),
direction::DIRECTION = Graph,
output_var::Union{Nothing, VarOrAff} = nothing
)
return piecewiselinear(
model,
(input_var,),
pwl;
method = method,
direction = direction,
output_vars = isnothing(output_var) ? nothing : (output_var,)
)[1]
end

function piecewiselinear(
model::JuMP.Model,
input_var_x::VarOrAff,
input_var_y::VarOrAff,
pwl::PWLFunction{2,1,SegmentPointRep{2,1}};
method::Method = _default_method(Val(2)),
direction::DIRECTION = Graph,
output_var::Union{Nothing, VarOrAff} = nothing
)
return piecewiselinear(
model,
(input_var_x, input_var_y),
pwl;
method = method,
direction = direction,
output_vars = isnothing(output_var) ? nothing : (output_var,)
)[1]
end


end # module
8 changes: 6 additions & 2 deletions src/methods/bivariate/common.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
const BivariateSOS2Method = Union{K1, OptimalTriangleSelection, NineStencil, SixStencil, UnionJack}

function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, output_vars::NTuple{F, VarOrAff}, pwl::BivariatePWLFunction{F}, method::BivariateSOS2Method, direction::DIRECTION) where {F}
function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, output_vars::NTuple{F, VarOrAff}, pwl::PWLFunctionPointRep{2, F}, method::BivariateSOS2Method, direction::DIRECTION) where {F}
initPWL!(model)
counter = model.ext[:PWL].counter
counter += 1
Expand Down Expand Up @@ -58,5 +58,9 @@ function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, outp
end
end

formulate_triangle_selection!(model, λ, triangle_direction, method)
formulate_triangle_selection!(model, λ, triangle_direction, method, pwl.structure)
end

function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::BivariateSOS2Method, structure::GridTriangulation)
error("The triangulation structure $structure is not suppported for method $method")
end
2 changes: 1 addition & 1 deletion src/methods/bivariate/k1.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ K1() = K1(Logarithmic())

axis_method(method::K1) = method.axis_method

function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::K1)
function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::K1, structure::K1Triangulation)
n_1, n_2 = size(λ)
@assert size(triangle_direction) == (n_1 - 1, n_2 - 1)
counter = model.ext[:PWL].counter
Expand Down
2 changes: 1 addition & 1 deletion src/methods/bivariate/nine_stencil.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ NineStencil() = NineStencil(Logarithmic())
axis_method(method::NineStencil) = method.axis_method

# TODO: Unit tests for biclique cover
function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::NineStencil)
function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::NineStencil, _::GridTriangulation)
n_1, n_2 = size(λ)
@assert size(triangle_direction) == (n_1 - 1, n_2 - 1)
counter = model.ext[:PWL].counter
Expand Down
Loading