Skip to content

Commit fc332da

Browse files
authored
Merge pull request #8 from JuliaOptimizationVariationalAnalysis/auto-juliaformatter-pr
[AUTO] JuliaFormatter.jl run
2 parents 9b0d87d + 25de582 commit fc332da

File tree

3 files changed

+31
-22
lines changed

3 files changed

+31
-22
lines changed

src/model/api.jl

+5-1
Original file line numberDiff line numberDiff line change
@@ -283,7 +283,11 @@ function hprod_residual! end
283283
Hop = hess_op_residual(model, x, i)
284284
Computes the Hessian of the i-th residual at x, in linear operator form.
285285
"""
286-
function NLPModels.hess_op_residual(model::AbstractVIModel{T, S}, x::AbstractVector{T}, i::Int) where {T, S}
286+
function NLPModels.hess_op_residual(
287+
model::AbstractVIModel{T, S},
288+
x::AbstractVector{T},
289+
i::Int,
290+
) where {T, S}
287291
@lencheck model.meta.nvar x
288292
Hiv = S(undef, model.meta.nvar)
289293
return hess_op_residual!(model, x, i, Hiv)

src/solvers/projected_reflected_gradient.jl

+25-20
Original file line numberDiff line numberDiff line change
@@ -3,30 +3,35 @@
33
> SIAM Journal on Optimization, 25(1):502–520, 2015.
44
"""
55
function ProjectedReflectedGradientVI(stp::AbstractStopping; rho0::Float64 = 0.5)
6-
xk = stp.current_state.x
7-
xkp = similar(xk)
8-
yk = copy(xk)
9-
rho = rho0
10-
Fx = similar(xk)
11-
12-
OK = update_and_start!(stp)
13-
while !OK
14-
abcresidual!(stp.pb, xk, rho, yk, Fx)
15-
project!(stp.pb, Fx, xk) # possible failure here
16-
yk .= 2 .* xkp .- xk
17-
18-
if norm(xk - xkp, Inf) < stp.meta.atol * rho
19-
stp.meta.optimal = true
20-
end
21-
OK = update_and_stop!(stp, x = xk)
6+
xk = stp.current_state.x
7+
xkp = similar(xk)
8+
yk = copy(xk)
9+
rho = rho0
10+
Fx = similar(xk)
11+
12+
OK = update_and_start!(stp)
13+
while !OK
14+
abcresidual!(stp.pb, xk, rho, yk, Fx)
15+
project!(stp.pb, Fx, xk) # possible failure here
16+
yk .= 2 .* xkp .- xk
17+
18+
if norm(xk - xkp, Inf) < stp.meta.atol * rho
19+
stp.meta.optimal = true
2220
end
23-
24-
return xk
21+
OK = update_and_stop!(stp, x = xk)
2522
end
26-
23+
24+
return xk
25+
end
26+
2727
export ProjectedReflectedGradientVI
2828

29-
function ProjectedReflectedGradientVI(model::AbstractVIModel, x0::AbstractVector; rho0::Float64 = 0.5, kwargs...)
29+
function ProjectedReflectedGradientVI(
30+
model::AbstractVIModel,
31+
x0::AbstractVector;
32+
rho0::Float64 = 0.5,
33+
kwargs...,
34+
)
3035
stp = GenericStopping(model, x0; kwargs...)
3136
return ProjectedReflectedGradientVI(stp, rho0 = rho0)
3237
end

test/runtests.jl

+1-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ end
2323

2424
xr = ones(T, 2)
2525
@test residual(vi, xr) == T[0; 0]
26-
J = T[ 1 0; -20 10]
26+
J = T[1 0; -20 10]
2727
@test jac_residual(vi, xr) == J
2828
v = ones(T, 2)
2929
@test jprod_residual(vi, xr, v) == T[1; -10]

0 commit comments

Comments
 (0)