From 10d6e1e6e4ee99ee5229898c2cfd14db1baa8bef Mon Sep 17 00:00:00 2001 From: Truls Flatberg Date: Wed, 8 Jan 2025 13:57:17 +0100 Subject: [PATCH 1/9] Do not track manifest --- .gitignore | 2 + Manifest.toml | 184 -------------------------------------------------- 2 files changed, 2 insertions(+), 184 deletions(-) delete mode 100644 Manifest.toml diff --git a/.gitignore b/.gitignore index e43b0f9..128c98d 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,3 @@ .DS_Store +Manifest.toml +.vscode/settings.json diff --git a/Manifest.toml b/Manifest.toml deleted file mode 100644 index b4d1063..0000000 --- a/Manifest.toml +++ /dev/null @@ -1,184 +0,0 @@ -# This file is machine-generated - editing it directly is not advised - -[[Base64]] -uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" - -[[BinDeps]] -deps = ["Compat", "Libdl", "SHA", "URIParser"] -git-tree-sha1 = "12093ca6cdd0ee547c39b1870e0c9c3f154d9ca9" -uuid = "9e28174c-4ba2-5203-b857-d8d62c4213ee" -version = "0.8.10" - -[[BinaryProvider]] -deps = ["Libdl", "SHA"] -git-tree-sha1 = "c7361ce8a2129f20b0e05a89f7070820cfed6648" -uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232" -version = "0.5.4" - -[[Calculus]] -deps = ["Compat"] -git-tree-sha1 = "f60954495a7afcee4136f78d1d60350abd37a409" -uuid = "49dc2e85-a5d0-5ad3-a950-438e2897f1b9" -version = "0.4.1" - -[[CommonSubexpressions]] -deps = ["Test"] -git-tree-sha1 = "efdaf19ab11c7889334ca247ff4c9f7c322817b0" -uuid = "bbf7d656-a473-5ed7-a52c-81e309532950" -version = "0.2.0" - -[[Compat]] -deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"] -git-tree-sha1 = "84aa74986c5b9b898b0d1acaf3258741ee64754f" -uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" -version = "2.1.0" - -[[DataStructures]] -deps = ["InteractiveUtils", "OrderedCollections", "Random", "Serialization", "Test"] -git-tree-sha1 = "ca971f03e146cf144a9e2f2ce59674f5bf0e8038" -uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" -version = "0.15.0" - -[[Dates]] -deps = ["Printf"] -uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" - -[[DelimitedFiles]] -deps = ["Mmap"] -uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" - -[[DiffResults]] -deps = ["Compat", "StaticArrays"] -git-tree-sha1 = "34a4a1e8be7bc99bc9c611b895b5baf37a80584c" -uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" -version = "0.0.4" - -[[DiffRules]] -deps = ["Random", "Test"] -git-tree-sha1 = "dc0869fb2f5b23466b32ea799bd82c76480167f7" -uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" -version = "0.0.10" - -[[Distributed]] -deps = ["Random", "Serialization", "Sockets"] -uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" - -[[ForwardDiff]] -deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "InteractiveUtils", "LinearAlgebra", "NaNMath", "Random", "SparseArrays", "SpecialFunctions", "StaticArrays", "Test"] -git-tree-sha1 = "4c4d727f1b7e0092134fabfab6396b8945c1ea5b" -uuid = "f6369f11-7733-5829-9624-2563aa707210" -version = "0.10.3" - -[[InteractiveUtils]] -deps = ["Markdown"] -uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" - -[[JuMP]] -deps = ["Calculus", "DataStructures", "ForwardDiff", "LinearAlgebra", "MathOptInterface", "NaNMath", "Random", "SparseArrays", "Statistics"] -git-tree-sha1 = "a37fdb14ee3a04b4df44c20a73da89c57035bdf2" -uuid = "4076af6c-e467-56ae-b986-b466b2749572" -version = "0.19.2" - -[[LibGit2]] -uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" - -[[Libdl]] -uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" - -[[LinearAlgebra]] -deps = ["Libdl"] -uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" - -[[Logging]] -uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" - -[[Markdown]] -deps = ["Base64"] -uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" - -[[MathOptInterface]] -deps = ["Compat", "Unicode"] -git-tree-sha1 = "5d3de69c9220610d0336ab45d3eb8b6ac7a7c807" -uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" -version = "0.8.4" - -[[Mmap]] -uuid = "a63ad114-7e13-5084-954f-fe012c677804" - -[[NaNMath]] -deps = ["Compat"] -git-tree-sha1 = "ce3b85e484a5d4c71dd5316215069311135fa9f2" -uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" -version = "0.3.2" - -[[OrderedCollections]] -deps = ["Random", "Serialization", "Test"] -git-tree-sha1 = "c4c13474d23c60d20a67b217f1d7f22a40edf8f1" -uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" -version = "1.1.0" - -[[Pkg]] -deps = ["Dates", "LibGit2", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] -uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" - -[[Printf]] -deps = ["Unicode"] -uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" - -[[REPL]] -deps = ["InteractiveUtils", "Markdown", "Sockets"] -uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" - -[[Random]] -deps = ["Serialization"] -uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" - -[[SHA]] -uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" - -[[Serialization]] -uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" - -[[SharedArrays]] -deps = ["Distributed", "Mmap", "Random", "Serialization"] -uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383" - -[[Sockets]] -uuid = "6462fe0b-24de-5631-8697-dd941f90decc" - -[[SparseArrays]] -deps = ["LinearAlgebra", "Random"] -uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" - -[[SpecialFunctions]] -deps = ["BinDeps", "BinaryProvider", "Libdl", "Test"] -git-tree-sha1 = "0b45dc2e45ed77f445617b99ff2adf0f5b0f23ea" -uuid = "276daf66-3868-5448-9aa4-cd146d93841b" -version = "0.7.2" - -[[StaticArrays]] -deps = ["LinearAlgebra", "Random", "Statistics"] -git-tree-sha1 = "db23bbf50064c582b6f2b9b043c8e7e98ea8c0c6" -uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "0.11.0" - -[[Statistics]] -deps = ["LinearAlgebra", "SparseArrays"] -uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" - -[[Test]] -deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] -uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" - -[[URIParser]] -deps = ["Test", "Unicode"] -git-tree-sha1 = "6ddf8244220dfda2f17539fa8c9de20d6c575b69" -uuid = "30578b45-9adc-5946-b283-645ec420af67" -version = "0.4.0" - -[[UUIDs]] -deps = ["Random", "SHA"] -uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" - -[[Unicode]] -uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" From 4c533aec88cd1d701532d59e72e4d4291421df2f Mon Sep 17 00:00:00 2001 From: Truls Flatberg Date: Wed, 8 Jan 2025 13:58:02 +0100 Subject: [PATCH 2/9] Bump dependencies and use HiGHS as test solver --- Project.toml | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/Project.toml b/Project.toml index a65be4e..7e205cf 100644 --- a/Project.toml +++ b/Project.toml @@ -6,19 +6,16 @@ version = "0.3.0" [deps] JuMP = "4076af6c-e467-56ae-b986-b466b2749572" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" -MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" [compat] -Cbc = "≥ 0.6.0" -JuMP = "~0.21" -MathOptInterface = "~0.9" +HiGHS = "1" +JuMP = "1" julia = "1" [extras] -Cbc = "9961bab8-2fa3-5c5a-9d89-47fab24efd76" -Gurobi = "2e9cd046-0924-5485-92f1-d5272153d98b" +HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Test", "Cbc", "Gurobi"] +test = ["Test", "HiGHS"] From fa64758d24ebc783136a043ad833781c4cce235d Mon Sep 17 00:00:00 2001 From: Truls Flatberg Date: Wed, 8 Jan 2025 13:58:32 +0100 Subject: [PATCH 3/9] Correct type in dictionary --- src/methods/util.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/methods/util.jl b/src/methods/util.jl index f4a0655..e33e620 100644 --- a/src/methods/util.jl +++ b/src/methods/util.jl @@ -85,7 +85,7 @@ function _canonicalize_triangulation(pwl::PWLFunction{D, F, SegmentPointRep{D, F end x_to_i = [Dict(U[j][i] => i for i in 1:size(xs, j)) for j in 1:D] - canonical_input_segments = Vector{NTuple{D, Float64}}[] + canonical_input_segments = Vector{NTuple{D, Int64}}[] for segment in pwl.segments push!(canonical_input_segments, [ntuple(j -> x_to_i[j][v[j]], D) for v in segment.input_vals]) end From 3fbcfe6b9864f9810ad0f956d7845c8c28aa5f0c Mon Sep 17 00:00:00 2001 From: Truls Flatberg Date: Wed, 8 Jan 2025 14:00:36 +0100 Subject: [PATCH 4/9] Add field for segment structure and constructors to match old interface --- src/types.jl | 96 ++++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 89 insertions(+), 7 deletions(-) diff --git a/src/types.jl b/src/types.jl index 1edb685..8d6e149 100644 --- a/src/types.jl +++ b/src/types.jl @@ -30,17 +30,99 @@ struct SegmentHyperplaneRep{D,F} <: Segment{D,F} funcs::NTuple{F, AffineFunction{D}} end +abstract type SegmentStructure{D} end + +struct Intervals <: SegmentStructure{1} end + +abstract type GridTriangulation <: SegmentStructure{2} end +struct UnstructuredTriangulation <: GridTriangulation end +struct K1Triangulation <: GridTriangulation end +struct UnionJackTriangulation <: GridTriangulation end + + struct PWLFunction{D, F, T <: Segment{D, F}} segments::Vector{T} - meta::Dict - - function PWLFunction{D, F, T}(segments::Vector{T}) where {D, F, T <: Segment} - return new(segments, Dict()) - end + structure::SegmentStructure{D} end const PWLFunctionPointRep{D, F} = PWLFunction{D, F, SegmentPointRep{D, F}} const PWLFunctionHyperplaneRep{D, F} = PWLFunction{D, F, SegmentHyperplaneRep{D, F}} -const UnivariatePWLFunction{F} = PWLFunctionPointRep{1, F} -const BivariatePWLFunction{F} = PWLFunctionPointRep{2, F} +#const UnivariatePWLFunction{F} = PWLFunctionPointRep{1, F} +#const BivariatePWLFunction{F} = PWLFunctionPointRep{2, F} + +const UnivariatePWLFunction = PWLFunctionPointRep{1, 1} +const BivariatePWLFunction = PWLFunctionPointRep{2, 1} + +function PWLFunctionPointRep{1,1}(x::Vector, z::Vector) + + if length(x) != length(z) + error("Mismatch in the number of points and function values") + end + xs = [convert(Float64, xi) for xi in x] + zs = [convert(Float64, zi) for zi in z] + segments = [PiecewiseLinearOpt.SegmentPointRep{1,1}([(xs[i],), (xs[i+1],)], [(zs[i],), (zs[i+1],)]) for i in 1:length(x)-1] + + return UnivariatePWLFunction(segments, Intervals()) +end + +function PWLFunctionPointRep{1,1}(x::Vector, fz::Function) + z = [fz(xi) for xi in x] + return UnivariatePWLFunction(x, z) +end + +function PWLFunctionPointRep{2,1}( + x, + y, + fz::Function; + pattern = :K1, + seed = hash((length(x), length(y))), +) + xs = [convert(Float64, xi) for xi in x] + ys = [convert(Float64, yi) for yi in y] + + segments = SegmentPointRep{2,1}[] + structure = UnstructuredTriangulation() + if pattern == :K1 + structure = K1Triangulation() + elseif pattern == :UnionJack + structure = UnionJackTriangulation() + end + + mt = Random.MersenneTwister(seed) + + # run for each square on [x[i],x[i+1]] × [y[i],y[i+1]] + for i in 1:length(xs)-1, j in 1:length(ys)-1 + xL, xU, yL, yU = xs[i], xs[i+1], ys[j], ys[j+1] + mid1 = 0.5 * (fz(xL, yL) + fz(xU, yU)) + mid2 = 0.5 * (fz(xL, yU) + fz(xU, yL)) + mid3 = fz(0.5 * (xL + xU), 0.5 * (yL + yU)) + diagonal_nw_se = true + if pattern == :Upper + diagonal_nw_se = (mid1 > mid2) + elseif pattern == :Lower + diagonal_nw_se = (mid1 < mid2) + elseif pattern == :BestFit + diagonal_nw_se = (abs(mid1 - mid3) < abs(mid2 - mid3)) + elseif pattern == :K1 + diagonal_nw_se = false + elseif pattern == :UnionJack + diagonal_nw_se = isodd(i + j) + elseif pattern == :Random + diagonal_nw_se = rand(mt, Bool) + end + + if diagonal_nw_se + corners1 = [(xL, yL), (xL, yU), (xU, yL)] # SW, NW, SE + corners2 = [(xU, yL), (xL, yU), (xU, yU)] # SE, NW, NE + else + corners1 = [(xL, yL), (xL, yU), (xU, yL)] # SW, NW, SE + corners2 = [(xU, yL), (xL, yU), (xU, yU)] # SE, NW, NE + end + + push!(segments, SegmentPointRep{2,1}(corners1, [(fz(c...),) for c in corners1])) + push!(segments, SegmentPointRep{2,1}(corners2, [(fz(c...),) for c in corners2])) + end + + return BivariatePWLFunction(segments, structure) +end From 01ea9736a46d62d98a6d17d148c733f4b5da2542 Mon Sep 17 00:00:00 2001 From: Truls Flatberg Date: Wed, 8 Jan 2025 14:01:26 +0100 Subject: [PATCH 5/9] Specialize piecewiselinear for typical 1D and 2D usage --- src/PiecewiseLinearOpt.jl | 94 ++++++++++++++++++++++++++++----------- 1 file changed, 69 insertions(+), 25 deletions(-) diff --git a/src/PiecewiseLinearOpt.jl b/src/PiecewiseLinearOpt.jl index 2a824b7..c09ac94 100644 --- a/src/PiecewiseLinearOpt.jl +++ b/src/PiecewiseLinearOpt.jl @@ -3,8 +3,7 @@ __precompile__() module PiecewiseLinearOpt import JuMP -import MathOptInterface -const MOI = MathOptInterface +const MOI = JuMP.MOI using LinearAlgebra using Random @@ -26,38 +25,45 @@ end const VarOrAff = Union{JuMP.VariableRef,JuMP.AffExpr} -include(joinpath("methods", "util.jl")) +include("methods/util.jl") export Incremental, LogarithmicEmbedding, LogarithmicIndependentBranching, NativeSOS2, ZigZagBinary, ZigZagInteger -include(joinpath("methods", "univariate", "incremental.jl")) -include(joinpath("methods", "univariate", "logarithmic_embedding.jl")) -include(joinpath("methods", "univariate", "logarithmic_independent_branching.jl")) -include(joinpath("methods", "univariate", "native_sos2.jl")) -include(joinpath("methods", "univariate", "zig_zag_binary.jl")) -include(joinpath("methods", "univariate", "zig_zag_integer.jl")) +include("methods/univariate/incremental.jl") + +include("methods/univariate/logarithmic_embedding.jl") +include("methods/univariate/logarithmic_independent_branching.jl") +include("methods/univariate/native_sos2.jl") +include("methods/univariate/zig_zag_binary.jl") +include("methods/univariate/zig_zag_integer.jl") # ConvexCombination has an SOS2 formulation, so defer this until after the # multivariate formulations are defined -include(joinpath("methods", "univariate", "sos2_formulation_base.jl")) +include("methods/univariate/sos2_formulation_base.jl") # Consider the colloqial "log" to refer to the embedding formulation const Logarithmic = LogarithmicEmbedding export Logarithmic -export K1, NineStencil, OptimalIndendentBranching, OptimalTriangleSelection, SixStencil, UnionJack -include(joinpath("methods", "bivariate", "k1.jl")) -include(joinpath("methods", "bivariate", "nine_stencil.jl")) -include(joinpath("methods", "bivariate", "optimal_independent_branching.jl")) -include(joinpath("methods", "bivariate", "optimal_triangle_selection.jl")) -include(joinpath("methods", "bivariate", "six_stencil.jl")) -include(joinpath("methods", "bivariate", "union_jack.jl")) -include(joinpath("methods", "bivariate", "common.jl")) - -export ConvexCombination, DisaggregatedLogarithmic, MultipleChoice, OptimalIndependentBranching, OptimalTriangleSelection -include(joinpath("methods", "multivariate", "convex_combination.jl")) -include(joinpath("methods", "multivariate", "disaggregated_logarithmic.jl")) -include(joinpath("methods", "multivariate", "multiple_choice.jl")) - -function formulate_pwl!(model::JuMP.Model, input_vals::Vector{NTuple{D,VarOrAff}}, output_vals::Vector{NTuple{F,VarOrAff}}, pwl::PWLFunction, method::Method, direction::DIRECTION) where {D,F} +export K1, NineStencil, OptimalIndependentBranching, OptimalTriangleSelection, SixStencil, UnionJack +include("methods/bivariate/k1.jl") +include("methods/bivariate/nine_stencil.jl") +include("methods/bivariate/optimal_independent_branching.jl") +include("methods/bivariate/optimal_triangle_selection.jl") +include("methods/bivariate/six_stencil.jl") +include("methods/bivariate/union_jack.jl") +include("methods/bivariate/common.jl") + +export ConvexCombination, DisaggregatedLogarithmic, MultipleChoice +include("methods/multivariate/convex_combination.jl") +include("methods/multivariate/disaggregated_logarithmic.jl") +include("methods/multivariate/multiple_choice.jl") + +function formulate_pwl!( + model::JuMP.Model, + input_vals::Vector{NTuple{D,VarOrAff}}, + output_vals::Vector{NTuple{F,VarOrAff}}, + pwl::PWLFunction, + method::Method, + direction::DIRECTION) where {D,F} error("No support for a R^$D -> R^$F piecewise linear function using the $method method.") end @@ -93,4 +99,42 @@ function piecewiselinear(model::JuMP.Model, return output_vars end +function piecewiselinear( + model::JuMP.Model, + input_var::VarOrAff, + pwl::PWLFunction{1,1,SegmentPointRep{1,1}}; + method::Method = _default_method(Val(1)), + direction::DIRECTION = Graph, + output_var::Union{Nothing, VarOrAff} = nothing +) + return piecewiselinear( + model, + (input_var,), + pwl; + method = method, + direction = direction, + output_vars = isnothing(output_var) ? nothing : (output_var,) + )[1] +end + +function piecewiselinear( + model::JuMP.Model, + input_var_x::VarOrAff, + input_var_y::VarOrAff, + pwl::PWLFunction{2,1,SegmentPointRep{2,1}}; + method::Method = _default_method(Val(2)), + direction::DIRECTION = Graph, + output_var::Union{Nothing, VarOrAff} = nothing +) + return piecewiselinear( + model, + (input_var_x, input_var_y), + pwl; + method = method, + direction = direction, + output_vars = isnothing(output_var) ? nothing : (output_var,) + )[1] +end + + end # module From 537fafb186f6cf15d2a0380b9b9522fa172d6508 Mon Sep 17 00:00:00 2001 From: Truls Flatberg Date: Wed, 8 Jan 2025 14:03:06 +0100 Subject: [PATCH 6/9] Include segment structure in triangle selection --- src/methods/bivariate/common.jl | 8 ++++++-- src/methods/bivariate/k1.jl | 2 +- src/methods/bivariate/nine_stencil.jl | 2 +- src/methods/bivariate/optimal_independent_branching.jl | 10 ++++++---- src/methods/bivariate/optimal_triangle_selection.jl | 2 +- src/methods/bivariate/six_stencil.jl | 2 +- src/methods/bivariate/union_jack.jl | 2 +- src/methods/univariate/incremental.jl | 2 +- src/methods/univariate/sos2_formulation_base.jl | 4 ++-- 9 files changed, 20 insertions(+), 14 deletions(-) diff --git a/src/methods/bivariate/common.jl b/src/methods/bivariate/common.jl index cb93f00..7b29d6a 100644 --- a/src/methods/bivariate/common.jl +++ b/src/methods/bivariate/common.jl @@ -1,6 +1,6 @@ const BivariateSOS2Method = Union{K1, OptimalTriangleSelection, NineStencil, SixStencil, UnionJack} -function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, output_vars::NTuple{F, VarOrAff}, pwl::BivariatePWLFunction{F}, method::BivariateSOS2Method, direction::DIRECTION) where {F} +function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, output_vars::NTuple{F, VarOrAff}, pwl::PWLFunctionPointRep{2, F}, method::BivariateSOS2Method, direction::DIRECTION) where {F} initPWL!(model) counter = model.ext[:PWL].counter counter += 1 @@ -58,5 +58,9 @@ function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, outp end end - formulate_triangle_selection!(model, λ, triangle_direction, method) + formulate_triangle_selection!(model, λ, triangle_direction, method, pwl.structure) +end + +function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::BivariateSOS2Method, structure::GridTriangulation) + error("The triangulation structure $structure is not suppported for method $method") end diff --git a/src/methods/bivariate/k1.jl b/src/methods/bivariate/k1.jl index 991ed52..049127a 100644 --- a/src/methods/bivariate/k1.jl +++ b/src/methods/bivariate/k1.jl @@ -5,7 +5,7 @@ K1() = K1(Logarithmic()) axis_method(method::K1) = method.axis_method -function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::K1) +function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::K1, structure::K1Triangulation) n_1, n_2 = size(λ) @assert size(triangle_direction) == (n_1 - 1, n_2 - 1) counter = model.ext[:PWL].counter diff --git a/src/methods/bivariate/nine_stencil.jl b/src/methods/bivariate/nine_stencil.jl index 2654e11..55ad85a 100644 --- a/src/methods/bivariate/nine_stencil.jl +++ b/src/methods/bivariate/nine_stencil.jl @@ -6,7 +6,7 @@ NineStencil() = NineStencil(Logarithmic()) axis_method(method::NineStencil) = method.axis_method # TODO: Unit tests for biclique cover -function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::NineStencil) +function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::NineStencil, _::GridTriangulation) n_1, n_2 = size(λ) @assert size(triangle_direction) == (n_1 - 1, n_2 - 1) counter = model.ext[:PWL].counter diff --git a/src/methods/bivariate/optimal_independent_branching.jl b/src/methods/bivariate/optimal_independent_branching.jl index da5a976..035d391 100644 --- a/src/methods/bivariate/optimal_independent_branching.jl +++ b/src/methods/bivariate/optimal_independent_branching.jl @@ -1,9 +1,9 @@ # TODO: Generalize to multivariate case. -struct OptimalIndendentBranching <: Method +struct OptimalIndependentBranching <: Method sub_solver end -function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, output_vars::NTuple{F, VarOrAff}, pwl::BivariatePWLFunction{F}, method::OptimalIndendentBranching, direction::DIRECTION) where {F} +function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, output_vars::NTuple{F, VarOrAff}, pwl::PWLFunctionPointRep{2, F}, method::OptimalIndependentBranching, direction::DIRECTION) where {F} initPWL!(model) counter = model.ext[:PWL].counter counter += 1 @@ -61,7 +61,7 @@ function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, outp t = ceil(Int, log2(2 * (n_1 - 1) * (n_2 - 1))) while true - @show method.sub_solver + #@show method.sub_solver sub_model = JuMP.Model(method.sub_solver) JuMP.@variable(sub_model, x[1:t, J], Bin) JuMP.@variable(sub_model, y[1:t, J], Bin) @@ -86,7 +86,7 @@ function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, outp end end - @show J + #@show J for r in J, s in J # lexicographic ordering on points on grid if r[1] > s[1] || (r[1] == s[1] && r[2] ≥ s[2]) @@ -113,6 +113,7 @@ function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, outp end JuMP.@objective(sub_model, Min, sum(x) + sum(y)) + JuMP.unset_silent(sub_model) JuMP.optimize!(sub_model) if JuMP.primal_status(sub_model) == MOI.FEASIBLE_POINT x_val = JuMP.value.(x) @@ -120,6 +121,7 @@ function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, outp break else t += 1 + @show t end end z = JuMP.@variable(model, [1:t], Bin, base_name = "z_$counter") diff --git a/src/methods/bivariate/optimal_triangle_selection.jl b/src/methods/bivariate/optimal_triangle_selection.jl index 239f8a9..23ad65f 100644 --- a/src/methods/bivariate/optimal_triangle_selection.jl +++ b/src/methods/bivariate/optimal_triangle_selection.jl @@ -7,7 +7,7 @@ OptimalTriangleSelection(sub_solver) = OptimalTriangleSelection(sub_solver, Loga axis_method(method::OptimalTriangleSelection) = method.axis_method -function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::OptimalTriangleSelection) +function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::OptimalTriangleSelection, _::GridTriangulation) counter = model.ext[:PWL].counter n_1, n_2 = size(λ) J = Set((i, j) for i in 1:n_1, j in 1:n_2) diff --git a/src/methods/bivariate/six_stencil.jl b/src/methods/bivariate/six_stencil.jl index 4742116..8e184f2 100644 --- a/src/methods/bivariate/six_stencil.jl +++ b/src/methods/bivariate/six_stencil.jl @@ -6,7 +6,7 @@ SixStencil() = SixStencil(Logarithmic()) axis_method(method::SixStencil) = method.axis_method # TODO: Unit tests for biclique cover -function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::SixStencil) +function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::SixStencil, _::GridTriangulation) n_1, n_2 = size(λ) @assert size(triangle_direction) == (n_1 - 1, n_2 - 1) counter = model.ext[:PWL].counter diff --git a/src/methods/bivariate/union_jack.jl b/src/methods/bivariate/union_jack.jl index b46a9f0..32dc53a 100644 --- a/src/methods/bivariate/union_jack.jl +++ b/src/methods/bivariate/union_jack.jl @@ -5,7 +5,7 @@ UnionJack() = UnionJack(Logarithmic()) axis_method(method::UnionJack) = method.axis_method -function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::UnionJack) +function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.VariableRef}, triangle_direction::Matrix{Bool}, method::UnionJack, _::UnionJackTriangulation) n_1, n_2 = size(λ) @assert size(triangle_direction) == (n_1 - 1, n_2 - 1) counter = model.ext[:PWL].counter diff --git a/src/methods/univariate/incremental.jl b/src/methods/univariate/incremental.jl index 10c1525..9987b4f 100644 --- a/src/methods/univariate/incremental.jl +++ b/src/methods/univariate/incremental.jl @@ -1,7 +1,7 @@ # TODO: Implement bivariate version of the incremental formulation struct Incremental <: Method end -function formulate_pwl!(model::JuMP.Model, input_vars::Tuple{VarOrAff}, output_vars::NTuple{F,VarOrAff}, pwl::UnivariatePWLFunction, method::Incremental, direction::DIRECTION) where {F} +function formulate_pwl!(model::JuMP.Model, input_vars::Tuple{VarOrAff}, output_vars::NTuple{F,VarOrAff}, pwl::PWLFunctionPointRep{1, F}, method::Incremental, direction::DIRECTION) where {F} grid = _continuous_gridpoints_or_die(pwl) xs, ys = grid.input_vals, grid.output_vals diff --git a/src/methods/univariate/sos2_formulation_base.jl b/src/methods/univariate/sos2_formulation_base.jl index c06111b..58f95d7 100644 --- a/src/methods/univariate/sos2_formulation_base.jl +++ b/src/methods/univariate/sos2_formulation_base.jl @@ -73,7 +73,7 @@ end # implementation works just fine. const SOS2Method = Union{LogarithmicEmbedding, LogarithmicIndependentBranching, NativeSOS2, ZigZagBinary, ZigZagInteger} -function formulate_pwl!(model::JuMP.Model, input_vars::Tuple{VarOrAff}, output_vars::NTuple{F,VarOrAff}, pwl::UnivariatePWLFunction{F}, method::SOS2Method, direction::DIRECTION) where {F} +function formulate_pwl!(model::JuMP.Model, input_vars::Tuple{VarOrAff}, output_vars::NTuple{F,VarOrAff}, pwl::PWLFunctionPointRep{1, F}, method::SOS2Method, direction::DIRECTION) where {F} grid = _continuous_gridpoints_or_die(pwl) λ = _create_convex_multiplier_vars(model, grid, input_vars, output_vars, direction) formulate_sos2!(model, λ, method) @@ -92,7 +92,7 @@ function formulate_sos2!(model::JuMP.Model, λ::Vector{T}, method::Method) where push!(segments, SegmentPointRep{D, F}([(d[i],), (d[i+1],)], [output_left, output_right])) end dummy_input_var = JuMP.@variable(model, lower_bound = 0, upper_bound = 1) - dummy_pwl = UnivariatePWLFunction{F}(segments) + dummy_pwl = PWLFunctionPointRep{1, F}(segments, Intervals()) formulate_pwl!(model, (dummy_input_var,), tuple(λ...), dummy_pwl, method, Graph) return nothing end From ba7f02e4bd89eadcbafce20d6af4c09e69d4b876 Mon Sep 17 00:00:00 2001 From: Truls Flatberg Date: Wed, 8 Jan 2025 14:04:30 +0100 Subject: [PATCH 7/9] Update tests to reflect changes, use HiGHS and run all bivariate --- test/runtests.jl | 188 ++++++++++++++++++++++++++--------------------- 1 file changed, 106 insertions(+), 82 deletions(-) diff --git a/test/runtests.jl b/test/runtests.jl index 79fd54a..350a6f4 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,114 +1,138 @@ -using Cbc, Gurobi -using Test +using PiecewiseLinearOpt +using HiGHS +using JuMP using LinearAlgebra +using Test -import JuMP -import MathOptInterface -const MOI = MathOptInterface - -using PiecewiseLinearOpt const PLO = PiecewiseLinearOpt -const methods_1D = (ConvexCombination(), DisaggregatedLogarithmic(), Incremental(), LogarithmicEmbedding(), LogarithmicIndependentBranching(), NativeSOS2(), ZigZagBinary(), ZigZagInteger()) +optimizer = optimizer_with_attributes(HiGHS.Optimizer, MOI.Silent() => true) + +const methods_1D = [ + ConvexCombination(), + DisaggregatedLogarithmic(), + Incremental(), + LogarithmicEmbedding(), + LogarithmicIndependentBranching(), + NativeSOS2(), + ZigZagBinary(), + ZigZagInteger() +] + @testset "Simple univariate" for method in methods_1D - model = JuMP.Model(Gurobi.Optimizer) - JuMP.@variable(model, x) + model = Model(optimizer) + @variable(model, x) s1 = PLO.SegmentPointRep{1,1}([(1.,), (2.,)], [(2.5,), (3.5,)]) s2 = PLO.SegmentPointRep{1,1}([(2.,), (3.,)], [(3.5,), (1.0,)]) - pwl = PLO.PWLFunction{1,1,PLO.SegmentPointRep{1,1}}([s1, s2]) + pwl = PLO.PWLFunction([s1, s2], PLO.Intervals()) y = piecewiselinear(model, (x,), pwl, method=method) - JuMP.@objective(model, Min, y[1]) + @objective(model, Min, y[1]) - JuMP.optimize!(model) + optimize!(model) - @test JuMP.termination_status(model) == MOI.OPTIMAL - @test JuMP.value(x) ≈ 3.0 rtol=1e-4 - @test JuMP.value(y[1]) ≈ 1.0 rtol=1e-4 + @test termination_status(model) == MOI.OPTIMAL + @test value(x) ≈ 3.0 rtol=1e-4 + @test value(y[1]) ≈ 1.0 rtol=1e-4 end -const sos2_methods = (ConvexCombination(), LogarithmicEmbedding(), LogarithmicIndependentBranching(), NativeSOS2(), ZigZagBinary(), ZigZagInteger()) -const methods_2D = (ConvexCombination(), DisaggregatedLogarithmic(), OptimalIndendentBranching(Gurobi.Optimizer), [K1(sos2_method) for sos2_method in methods_1D]..., [NineStencil(sos2_method) for sos2_method in methods_1D]..., [OptimalTriangleSelection(Gurobi.Optimizer, sos2_method) for sos2_method in methods_1D]..., [SixStencil(sos2_method) for sos2_method in methods_1D]..., [UnionJack(sos2_method) for sos2_method in methods_1D]...) -@testset "Simple bivariate" for method in methods_2D - model = JuMP.Model(Gurobi.Optimizer) - JuMP.@variable(model, x[1:2]) +const sos2_methods = [ + ConvexCombination(), + LogarithmicEmbedding(), + LogarithmicIndependentBranching(), + NativeSOS2(), + ZigZagBinary(), + ZigZagInteger() +] + +const methods_2D_gen = [ + ConvexCombination(), + DisaggregatedLogarithmic(), + #OptimalIndependentBranching(optimizer), + [NineStencil(sos2_method) for sos2_method in methods_1D]..., + [OptimalTriangleSelection(optimizer, sos2_method) for sos2_method in methods_1D]..., + [SixStencil(sos2_method) for sos2_method in methods_1D]..., +] + +@testset "Simple bivariate" for method in methods_2D_gen + model = Model(optimizer) + @variable(model, x[1:2]) s1 = PLO.SegmentPointRep{2,1}([(0.0, 0.0), (0.0, 1.0), (1.0, 1.0)], [(0.0,), (1.0,), (2.0,)]) s2 = PLO.SegmentPointRep{2,1}([(0.0, 0.0), (1.0, 0.0), (1.0, 1.0)], [(0.0,), (3.0,), (2.0,)]) - pwl = PLO.PWLFunction{2,1,PLO.SegmentPointRep{2,1}}([s1, s2]) + pwl = PLO.PWLFunction{2,1,PLO.SegmentPointRep{2,1}}([s1, s2], PLO.UnstructuredTriangulation()) y = piecewiselinear(model, (x[1], x[2]), pwl, method = method) - JuMP.@objective(model, Min, y[1]) + @objective(model, Min, y[1]) - JuMP.optimize!(model) + optimize!(model) - @test JuMP.termination_status(model) == MOI.OPTIMAL - @test JuMP.value(x[1]) ≈ 0.0 rtol=1e-4 - @test JuMP.value(x[2]) ≈ 0.0 rtol=1e-4 - @test JuMP.value(y[1]) ≈ 0.0 rtol=1e-4 + @test termination_status(model) == MOI.OPTIMAL + @test value(x[1]) ≈ 0.0 rtol=1e-4 + @test value(x[2]) ≈ 0.0 rtol=1e-4 + @test value(y[1]) ≈ 0.0 rtol=1e-4 end @testset "1D: $method" for method in methods_1D - model = JuMP.Model(Gurobi.Optimizer) - JuMP.@variable(model, x) - segments = PLO.SegmentPointRep{1,1}[] + model = Model(optimizer) + @variable(model, x) d = 7 - xs = range(1,stop=2π, length=(d + 1)) - for i in 1:d - x_l = xs[i] - x_r = xs[i+1] - push!(segments, PLO.SegmentPointRep{1,1}([(x_l,), (x_r,)], [(sin(x_l),), (sin(x_r),)])) - end - pwl = PLO.PWLFunction{1,1,PLO.SegmentPointRep{1,1}}(segments) - y = piecewiselinear(model, (x,), pwl, method=method) - JuMP.@objective(model, Max, y[1]) + xs = collect(range(1,stop=2π, length=(d + 1))) + zs = sin.(xs) + pwl = PLO.UnivariatePWLFunction(xs, zs) + y = piecewiselinear(model, x, pwl, method=method) + @objective(model, Max, y) - JuMP.optimize!(model) + optimize!(model) - @test JuMP.termination_status(model) == MOI.OPTIMAL - @test JuMP.value(x) ≈ 1.75474 rtol=1e-4 - @test JuMP.value(y[1]) ≈ 0.98313 rtol=1e-4 - @test JuMP.objective_value(model) ≈ 0.98313 rtol=1e-4 - @test JuMP.objective_value(model) ≈ JuMP.value(y[1]) rtol=1e-4 + @test termination_status(model) == MOI.OPTIMAL + @test value(x) ≈ 1.75474 rtol=1e-4 + @test value(y) ≈ 0.98313 rtol=1e-4 + @test objective_value(model) ≈ 0.98313 rtol=1e-4 + @test objective_value(model) ≈ value(y) rtol=1e-4 - JuMP.@constraint(model, x ≤ 1.5y[1]) + @constraint(model, x ≤ 1.5y) - JuMP.optimize!(model) + optimize!(model) - @test JuMP.termination_status(model) == MOI.OPTIMAL - @test JuMP.value(x) ≈ 1.36495 rtol=1e-4 - @test JuMP.value(y[1]) ≈ 0.90997 rtol=1e-4 - @test JuMP.objective_value(model) ≈ 0.90997 rtol=1e-4 - @test JuMP.objective_value(model) ≈ JuMP.value(y[1]) rtol=1e-4 + @test termination_status(model) == MOI.OPTIMAL + @test value(x) ≈ 1.36495 rtol=1e-4 + @test value(y) ≈ 0.90997 rtol=1e-4 + @test objective_value(model) ≈ 0.90997 rtol=1e-4 + @test objective_value(model) ≈ value(y) rtol=1e-4 end -# println("\nbivariate tests") -# @testset "2D: $method, $pattern" for method in methods_2D, pattern in patterns_2D -# model = JuMP.Model(JuMP.with_optimizer(Cbc.Optimizer)) -# JuMP.@variable(model, x[1:2]) -# d = range(0,stop=1,length=8) -# f = (x1,x2) -> 2*(x1-1/3)^2 + 3*(x2-4/7)^4 -# z = piecewiselinear(model, x[1], x[2], BivariatePWLFunction(d, d, f, pattern=pattern), method=method) -# JuMP.@objective(model, Min, z) -# -# JuMP.optimize!(model) -# -# @test JuMP.termination_status(model) == MOI.OPTIMAL -# @test JuMP.value(x[1]) ≈ 0.285714 rtol=1e-4 -# @test JuMP.value(x[2]) ≈ 0.571429 rtol=1e-4 -# @test JuMP.value(z) ≈ 0.004535 rtol=1e-3 -# @test JuMP.objective_value(model) ≈ 0.004535 rtol=1e-3 -# @test JuMP.objective_value(model) ≈ JuMP.value(z) rtol=1e-3 -# -# JuMP.@constraint(model, x[1] ≥ 0.6) -# -# JuMP.optimize!(model) -# -# @test JuMP.termination_status(model) == MOI.OPTIMAL -# @test JuMP.value(x[1]) ≈ 0.6 rtol=1e-4 -# @test JuMP.value(x[2]) ≈ 0.571428 rtol=1e-4 -# @test JuMP.value(z) ≈ 0.148753 rtol=1e-4 -# @test JuMP.objective_value(model) ≈ 0.148753 rtol=1e-3 -# @test JuMP.objective_value(model) ≈ JuMP.value(z) rtol=1e-3 -# end +patterns = [:Upper, :Lower, :BestFit, :K1, :UnionJack, :Random] +method_pattern = vec(collect(Iterators.product(methods_2D_gen, patterns))) +k1_methods = [(method, :K1) for method in [K1(sos2_method) for sos2_method in methods_1D]] +uj_methods = [(method, :UnionJack) for method in [UnionJack(sos2_method) for sos2_method in methods_1D]] +append!(method_pattern, k1_methods) +append!(method_pattern, uj_methods) + +@testset "2D: $method, $pattern" for (method, pattern) in method_pattern + model = Model(optimizer) + @variable(model, x[1:2]) + d = range(0,stop=1,length=8) + f = (x1,x2) -> 2*(x1-1/3)^2 + 3*(x2-4/7)^4 + pwl = PLO.BivariatePWLFunction(d, d, f, pattern=pattern) + z = piecewiselinear(model, x[1], x[2], pwl, method=method) + @objective(model, Min, z) + optimize!(model) + @test termination_status(model) == MOI.OPTIMAL + @test value(x[1]) ≈ 0.285714 rtol=1e-4 + @test value(x[2]) ≈ 0.571429 rtol=1e-4 + @test value(z) ≈ 0.004535 rtol=1e-3 + @test objective_value(model) ≈ 0.004535 rtol=1e-3 + @test objective_value(model) ≈ value(z) rtol=1e-3 + + @constraint(model, x[1] ≥ 0.6) + optimize!(model) + + @test termination_status(model) == MOI.OPTIMAL + @test value(x[1]) ≈ 0.6 rtol=1e-4 + @test value(x[2]) ≈ 0.571428 rtol=1e-4 + @test value(z) ≈ 0.148753 rtol=1e-4 + @test objective_value(model) ≈ 0.148753 rtol=1e-3 + @test objective_value(model) ≈ value(z) rtol=1e-3 +end From 2e0f92efdf4cf14d51ad8602c30aa0a838402b22 Mon Sep 17 00:00:00 2001 From: Truls Flatberg Date: Wed, 8 Jan 2025 14:43:03 +0100 Subject: [PATCH 8/9] Fix triangulation --- src/types.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/types.jl b/src/types.jl index 8d6e149..f12fbdc 100644 --- a/src/types.jl +++ b/src/types.jl @@ -116,8 +116,8 @@ function PWLFunctionPointRep{2,1}( corners1 = [(xL, yL), (xL, yU), (xU, yL)] # SW, NW, SE corners2 = [(xU, yL), (xL, yU), (xU, yU)] # SE, NW, NE else - corners1 = [(xL, yL), (xL, yU), (xU, yL)] # SW, NW, SE - corners2 = [(xU, yL), (xL, yU), (xU, yU)] # SE, NW, NE + corners1 = [(xL, yL), (xU, yU), (xU, yL)] # SW, NE, SE + corners2 = [(xL, yL), (xL, yU), (xU, yU)] # SW, NW, NE end push!(segments, SegmentPointRep{2,1}(corners1, [(fz(c...),) for c in corners1])) From ae2c2c6ebf5dd91ec2da19849b8cd9188e4a510b Mon Sep 17 00:00:00 2001 From: Truls Flatberg Date: Thu, 9 Jan 2025 09:18:51 +0100 Subject: [PATCH 9/9] Avoid explicit use of MOI --- src/PiecewiseLinearOpt.jl | 1 - src/methods/bivariate/optimal_independent_branching.jl | 2 +- src/methods/bivariate/optimal_triangle_selection.jl | 2 +- src/methods/univariate/native_sos2.jl | 2 +- 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/PiecewiseLinearOpt.jl b/src/PiecewiseLinearOpt.jl index c09ac94..2a9f70b 100644 --- a/src/PiecewiseLinearOpt.jl +++ b/src/PiecewiseLinearOpt.jl @@ -3,7 +3,6 @@ __precompile__() module PiecewiseLinearOpt import JuMP -const MOI = JuMP.MOI using LinearAlgebra using Random diff --git a/src/methods/bivariate/optimal_independent_branching.jl b/src/methods/bivariate/optimal_independent_branching.jl index 035d391..35ae69b 100644 --- a/src/methods/bivariate/optimal_independent_branching.jl +++ b/src/methods/bivariate/optimal_independent_branching.jl @@ -115,7 +115,7 @@ function formulate_pwl!(model::JuMP.Model, input_vars::NTuple{2, VarOrAff}, outp JuMP.@objective(sub_model, Min, sum(x) + sum(y)) JuMP.unset_silent(sub_model) JuMP.optimize!(sub_model) - if JuMP.primal_status(sub_model) == MOI.FEASIBLE_POINT + if JuMP.primal_status(sub_model) == JuMP.FEASIBLE_POINT x_val = JuMP.value.(x) y_val = JuMP.value.(y) break diff --git a/src/methods/bivariate/optimal_triangle_selection.jl b/src/methods/bivariate/optimal_triangle_selection.jl index 23ad65f..198725a 100644 --- a/src/methods/bivariate/optimal_triangle_selection.jl +++ b/src/methods/bivariate/optimal_triangle_selection.jl @@ -66,7 +66,7 @@ function formulate_triangle_selection!(model::JuMP.Model, λ::Matrix{JuMP.Variab JuMP.@objective(sub_model, Min, sum(x) + sum(y)) JuMP.optimize!(sub_model) - if JuMP.primal_status(sub_model) == MOI.FEASIBLE_POINT + if JuMP.primal_status(sub_model) == JuMP.FEASIBLE_POINT x_val = JuMP.value.(x) y_val = JuMP.value.(y) break diff --git a/src/methods/univariate/native_sos2.jl b/src/methods/univariate/native_sos2.jl index 1de6eeb..d02ecb6 100644 --- a/src/methods/univariate/native_sos2.jl +++ b/src/methods/univariate/native_sos2.jl @@ -1,6 +1,6 @@ struct NativeSOS2 <: Method end function formulate_sos2!(model::JuMP.Model, λ::Vector{T}, method::NativeSOS2) where {T <: VarOrAff} - JuMP.@constraint(model, λ in MOI.SOS2([k for k in 1:length(λ)])) + JuMP.@constraint(model, λ in JuMP.SOS2([k for k in 1:length(λ)])) return nothing end