Skip to content

Commit beb031f

Browse files
authored
remove DiffBase dependency and update ForwardDiff dependency (#94)
1 parent 4f97151 commit beb031f

23 files changed

+466
-446
lines changed

.travis.yml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@ language: julia
33
os:
44
- linux
55
julia:
6-
- 0.5
76
- 0.6
87
- nightly
98
matrix:
@@ -14,7 +13,6 @@ notifications:
1413
script:
1514
- if [[ -a .git/shallow ]]; then git fetch --unshallow; fi
1615
- julia -e 'Pkg.clone(pwd()); Pkg.build("ReverseDiff"); Pkg.test("ReverseDiff"; coverage=VERSION >= v"0.6-")'
17-
1816
after_success:
1917
# push coverage results to Coveralls
2018
- julia -e 'cd(Pkg.dir("ReverseDiff")); Pkg.add("Coverage"); using Coverage; Coveralls.submit(Coveralls.process_folder())'

REQUIRE

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
1-
julia 0.5
2-
DiffBase 0.0.3 0.4.0
3-
ForwardDiff 0.3.4 0.5.0
1+
julia 0.6
2+
DiffResults 0.0.1
3+
DiffRules 0.0.1
4+
NaNMath 0.2.2
5+
SpecialFunctions 0.1.0
6+
ForwardDiff 0.6.0
7+
StaticArrays 0.5.0
48
Compat 0.19.0
59
FunctionWrappers 0.1

examples/gradient.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ const compiled_f_tape = compile(f_tape)
1717
a, b = rand(100, 100), rand(100, 100)
1818
inputs = (a, b)
1919
results = (similar(a), similar(b))
20-
all_results = map(DiffBase.GradientResult, results)
20+
all_results = map(DiffResults.GradientResult, results)
2121
cfg = GradientConfig(inputs)
2222

2323
####################
@@ -31,7 +31,7 @@ cfg = GradientConfig(inputs)
3131
gradient!(results, compiled_f_tape, inputs)
3232

3333
# the same as the above, but in addition to calculating the gradients, the value `f(a, b)`
34-
# is loaded into the the provided `DiffResult` instances (see DiffBase.jl documentation).
34+
# is loaded into the the provided `DiffResult` instances (see DiffResults.jl documentation).
3535
gradient!(all_results, compiled_f_tape, inputs)
3636

3737
# this should be the second fastest method, and also non-allocating

src/ReverseDiff.jl

Lines changed: 8 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -7,28 +7,21 @@ using FunctionWrappers: FunctionWrapper
77

88
using Compat
99

10-
using DiffBase
11-
using DiffBase: DiffResult
10+
using DiffResults
11+
using DiffResults: DiffResult
12+
using DiffRules, SpecialFunctions, NaNMath
1213

1314
using ForwardDiff
1415
using ForwardDiff: Dual, Partials
15-
16-
if VERSION >= v"0.6.0-dev.1024"
17-
const compat_product = Base.Iterators.product
18-
else
19-
const compat_product = Base.product
20-
end
16+
using StaticArrays
2117

2218
# Not all operations will be valid over all of these types, but that's okay; such cases
2319
# will simply error when they hit the original operation in the overloaded definition.
2420
const ARRAY_TYPES = (:AbstractArray, :AbstractVector, :AbstractMatrix, :Array, :Vector, :Matrix)
25-
const REAL_TYPES = (:Bool, :Integer, :Rational, :BigFloat, :BigInt, :AbstractFloat, :Real, :Dual)
21+
const REAL_TYPES = (:Bool, :Integer, :(Irrational{:e}), :(Irrational{}), :Rational, :BigFloat, :BigInt, :AbstractFloat, :Real, :Dual)
2622

27-
const FORWARD_UNARY_SCALAR_FUNCS = (ForwardDiff.AUTO_DEFINED_UNARY_FUNCS..., :-, :abs, :conj)
28-
const FORWARD_BINARY_SCALAR_FUNCS = (:*, :/, :+, :-, :^, :atan2)
29-
const SKIPPED_UNARY_SCALAR_FUNCS = (:isinf, :isnan, :isfinite, :iseven, :isodd, :isreal,
30-
:isinteger)
31-
const SKIPPED_BINARY_SCALAR_FUNCS = (:isequal, :isless, :<, :>, :(==), :(!=), :(<=), :(>=))
23+
const SKIPPED_UNARY_SCALAR_FUNCS = Symbol[:isinf, :isnan, :isfinite, :iseven, :isodd, :isreal, :isinteger]
24+
const SKIPPED_BINARY_SCALAR_FUNCS = Symbol[:isequal, :isless, :<, :>, :(==), :(!=), :(<=), :(>=)]
3225

3326
include("tape.jl")
3427
include("tracked.jl")
@@ -46,6 +39,6 @@ include("api/gradients.jl")
4639
include("api/jacobians.jl")
4740
include("api/hessians.jl")
4841

49-
export DiffBase
42+
export DiffResults
5043

5144
end # module

src/api/Config.jl

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -113,11 +113,11 @@ function JacobianConfig{D,V<:Real}(output::AbstractArray{D}, input::AbstractArra
113113
end
114114

115115
"""
116-
ReverseDiff.JacobianConfig(result::DiffBase.DiffResult, input, tp::InstructionTape = InstructionTape())
116+
ReverseDiff.JacobianConfig(result::DiffResults.DiffResult, input, tp::InstructionTape = InstructionTape())
117117
118-
A convenience method for `JacobianConfig(DiffBase.value(result), input, tp)`.
118+
A convenience method for `JacobianConfig(DiffResults.value(result), input, tp)`.
119119
"""
120-
JacobianConfig(result::DiffResult, input, tp::InstructionTape) = JacobianConfig(DiffBase.value(result), input, tp)
120+
JacobianConfig(result::DiffResult, input, tp::InstructionTape) = JacobianConfig(DiffResults.value(result), input, tp)
121121

122122
#################
123123
# HessianConfig #
@@ -156,7 +156,7 @@ function HessianConfig{D}(input::AbstractArray, ::Type{D}, gtp::InstructionTape
156156
end
157157

158158
"""
159-
ReverseDiff.HessianConfig(result::DiffBase.DiffResult, input::AbstractArray, gtp::InstructionTape = InstructionTape(), jtp::InstructionTape = InstructionTape())
159+
ReverseDiff.HessianConfig(result::DiffResults.DiffResult, input::AbstractArray, gtp::InstructionTape = InstructionTape(), jtp::InstructionTape = InstructionTape())
160160
161161
Like `HessianConfig(input, tp)`, but utilize `result` along with `input` to construct work
162162
buffers.
@@ -165,7 +165,7 @@ Note that `result` and `input` are only used for type and shape information; the
165165
stored or modified in any way.
166166
"""
167167
function HessianConfig(result::DiffResult, input::AbstractArray, gtp::InstructionTape = InstructionTape(), jtp::InstructionTape = InstructionTape())
168-
jcfg = JacobianConfig(DiffBase.gradient(result), input, jtp)
168+
jcfg = JacobianConfig(DiffResults.gradient(result), input, jtp)
169169
gcfg = GradientConfig(jcfg.input, gtp)
170170
return HessianConfig(gcfg, jcfg)
171171
end

src/api/gradients.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ Returns `result`. This method is exactly like `ReverseDiff.gradient(f, input, cf
3333
it stores the resulting gradient(s) in `result` rather than allocating new memory.
3434
3535
`result` can be an `AbstractArray` or a `Tuple` of `AbstractArray`s. The `result` (or any
36-
of its elements, if `isa(result, Tuple)`), can also be a `DiffBase.DiffResult`, in which
36+
of its elements, if `isa(result, Tuple)`), can also be a `DiffResults.DiffResult`, in which
3737
case the primal value `f(input)` (or `f(input...)`, if `isa(input, Tuple)`) will be stored
3838
in it as well.
3939
"""
@@ -71,7 +71,7 @@ Returns `result`. This method is exactly like `ReverseDiff.gradient!(tape, input
7171
stores the resulting gradient(s) in `result` rather than allocating new memory.
7272
7373
`result` can be an `AbstractArray` or a `Tuple` of `AbstractArray`s. The `result` (or any
74-
of its elements, if `isa(result, Tuple)`), can also be a `DiffBase.DiffResult`, in which
74+
of its elements, if `isa(result, Tuple)`), can also be a `DiffResults.DiffResult`, in which
7575
case the primal value `f(input)` (or `f(input...)`, if `isa(input, Tuple)`) will be stored
7676
in it as well.
7777
"""

src/api/hessians.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ end
3333
Returns `result`. This method is exactly like `ReverseDiff.hessian(f, input, cfg)`, except
3434
it stores the resulting Hessian in `result` rather than allocating new memory.
3535
36-
If `result` is a `DiffBase.DiffResult`, the primal value `f(input)` and the gradient
36+
If `result` is a `DiffResults.DiffResult`, the primal value `f(input)` and the gradient
3737
`∇f(input)` will be stored in it along with the Hessian `H(f)(input)`.
3838
"""
3939
function hessian!(result, f, input::AbstractArray, cfg::HessianConfig = HessianConfig(input))
@@ -47,11 +47,11 @@ function hessian!(result::DiffResult, f, input::AbstractArray,
4747
∇f! = (y, x) -> begin
4848
gradient_result = DiffResult(zero(eltype(y)), y)
4949
gradient!(gradient_result, f, x, cfg.gradient_config)
50-
result = DiffBase.value!(result, value(DiffBase.value(gradient_result)))
50+
result = DiffResults.value!(result, value(DiffResults.value(gradient_result)))
5151
return y
5252
end
53-
jacobian!(DiffBase.hessian(result), ∇f!,
54-
DiffBase.gradient(result), input,
53+
jacobian!(DiffResults.hessian(result), ∇f!,
54+
DiffResults.gradient(result), input,
5555
cfg.jacobian_config)
5656
return result
5757
end
@@ -80,7 +80,7 @@ end
8080
Returns `result`. This method is exactly like `ReverseDiff.hessian!(tape, input)`, except
8181
it stores the resulting Hessian in `result` rather than allocating new memory.
8282
83-
If `result` is a `DiffBase.DiffResult`, the primal value `f(input)` and the gradient
83+
If `result` is a `DiffResults.DiffResult`, the primal value `f(input)` and the gradient
8484
`∇f(input)` will be stored in it along with the Hessian `H(f)(input)`.
8585
"""
8686
function hessian!(result::AbstractArray, tape::Union{HessianTape,CompiledHessian}, input::AbstractArray)
@@ -91,8 +91,8 @@ end
9191

9292
function hessian!(result::DiffResult, tape::Union{HessianTape,CompiledHessian}, input::AbstractArray)
9393
seeded_forward_pass!(tape, input)
94-
seeded_reverse_pass!(DiffResult(DiffBase.gradient(result), DiffBase.hessian(result)), tape)
95-
result = DiffBase.value!(result, func_hook(tape)(input))
94+
seeded_reverse_pass!(DiffResult(DiffResults.gradient(result), DiffResults.hessian(result)), tape)
95+
result = DiffResults.value!(result, func_hook(tape)(input))
9696
return result
9797
end
9898

src/api/jacobians.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ Returns `result`. This method is exactly like `ReverseDiff.jacobian(f, input, cf
3434
it stores the resulting Jacobian(s) in `result` rather than allocating new memory.
3535
3636
`result` can be an `AbstractArray` or a `Tuple` of `AbstractArray`s. The `result` (or any
37-
of its elements, if `isa(result, Tuple)`), can also be a `DiffBase.DiffResult`, in which
37+
of its elements, if `isa(result, Tuple)`), can also be a `DiffResults.DiffResult`, in which
3838
case the primal value `f(input)` (or `f(input...)`, if `isa(input, Tuple)`) will be stored
3939
in it as well.
4040
"""
@@ -114,7 +114,7 @@ Returns `result`. This method is exactly like `ReverseDiff.jacobian!(tape, input
114114
stores the resulting Jacobian(s) in `result` rather than allocating new memory.
115115
116116
`result` can be an `AbstractArray` or a `Tuple` of `AbstractArray`s. The `result` (or any
117-
of its elements, if `isa(result, Tuple)`), can also be a `DiffBase.DiffResult`, in which
117+
of its elements, if `isa(result, Tuple)`), can also be a `DiffResults.DiffResult`, in which
118118
case the primal value of the target function will be stored in it as well.
119119
"""
120120
function jacobian!(result, tape::Union{JacobianTape,CompiledJacobian}, input)

src/api/utils.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ function seeded_reverse_pass!(result::AbstractArray, output::AbstractArray, inpu
5858
end
5959

6060
function seeded_reverse_pass!(result::DiffResult, output::AbstractArray, input::TrackedArray, tape)
61-
seeded_reverse_pass!(DiffBase.jacobian(result), output, input, tape)
61+
seeded_reverse_pass!(DiffResults.jacobian(result), output, input, tape)
6262
extract_result_value!(result, output)
6363
return result
6464
end
@@ -94,8 +94,8 @@ function extract_result!(result::AbstractArray, output::TrackedReal, input::Trac
9494
end
9595

9696
function extract_result!(result::DiffResult, output::TrackedReal, input::TrackedArray)
97-
result = DiffBase.value!(result, value(output))
98-
copy!(DiffBase.gradient(result), deriv(input))
97+
result = DiffResults.value!(result, value(output))
98+
copy!(DiffResults.gradient(result), deriv(input))
9999
return result
100100
end
101101

@@ -105,8 +105,8 @@ function extract_result!(result::AbstractArray, output::Number)
105105
end
106106

107107
function extract_result!(result::DiffResult, output::Number)
108-
result = DiffBase.value!(result, output)
109-
fill_zeros!(DiffBase.gradient(result))
108+
result = DiffResults.value!(result, output)
109+
fill_zeros!(DiffResults.gradient(result))
110110
return result
111111
end
112112

@@ -118,12 +118,12 @@ function extract_result_value!(result::Tuple, output)
118118
end
119119

120120
function extract_result_value!(result::DiffResult, output::AbstractArray)
121-
result = DiffBase.value!(value, result, output)
121+
result = DiffResults.value!(value, result, output)
122122
return result
123123
end
124124

125125
function extract_result_value!(result::DiffResult, output::TrackedArray)
126-
result = DiffBase.value!(result, value(output))
126+
result = DiffResults.value!(result, value(output))
127127
return result
128128
end
129129

0 commit comments

Comments
 (0)