2
2
# AbstractConfig #
3
3
# #################
4
4
5
- @compat abstract type AbstractConfig end
5
+ abstract type AbstractConfig end
6
6
7
7
Base. show (io:: IO , cfg:: AbstractConfig ) = print (io, typeof (cfg). name)
8
8
9
9
# #################
10
10
# GradientConfig #
11
11
# #################
12
12
13
- @compat immutable GradientConfig{I} <: AbstractConfig
13
+ struct GradientConfig{I} <: AbstractConfig
14
14
input:: I
15
15
tape:: InstructionTape
16
16
# disable default outer constructor
17
- ( :: Type{ GradientConfig{I}} ){I} (input, tape) = new {I} (input, tape)
17
+ GradientConfig {I} (input, tape) where {I} = new {I} (input, tape)
18
18
end
19
19
20
20
# "private" convienence constructor
21
- _GradientConfig {I} (input:: I , tape:: InstructionTape ) = GradientConfig {I} (input, tape)
21
+ _GradientConfig (input:: I , tape:: InstructionTape ) where {I} = GradientConfig {I} (input, tape)
22
22
23
23
"""
24
24
ReverseDiff.GradientConfig(input, tp::InstructionTape = InstructionTape())
@@ -32,7 +32,7 @@ the target function's output.
32
32
33
33
See `ReverseDiff.gradient` for a description of acceptable types for `input`.
34
34
"""
35
- GradientConfig {T} (input:: AbstractArray{T} , tp:: InstructionTape = InstructionTape ()) = GradientConfig (input, T, tp)
35
+ GradientConfig (input:: AbstractArray{T} , tp:: InstructionTape = InstructionTape ()) where {T} = GradientConfig (input, T, tp)
36
36
37
37
GradientConfig (input:: Tuple , tp:: InstructionTape = InstructionTape ()) = GradientConfig (input, eltype (first (input)), tp)
38
38
@@ -42,28 +42,28 @@ GradientConfig(input::Tuple, tp::InstructionTape = InstructionTape()) = Gradient
42
42
Like `GradientConfig(input, tp)`, except the provided type `D` is assumed to be the element
43
43
type of the target function's output.
44
44
"""
45
- function GradientConfig {D} (input:: Tuple , :: Type{D} , tp:: InstructionTape = InstructionTape ())
45
+ function GradientConfig (input:: Tuple , :: Type{D} , tp:: InstructionTape = InstructionTape ()) where D
46
46
return _GradientConfig (map (x -> track (similar (x), D, tp), input), tp)
47
47
end
48
48
49
- function GradientConfig {D} (input:: AbstractArray , :: Type{D} , tp:: InstructionTape = InstructionTape ())
49
+ function GradientConfig (input:: AbstractArray , :: Type{D} , tp:: InstructionTape = InstructionTape ()) where D
50
50
return _GradientConfig (track (similar (input), D, tp), tp)
51
51
end
52
52
53
53
# #################
54
54
# JacobianConfig #
55
55
# #################
56
56
57
- @compat immutable JacobianConfig{I,O} <: AbstractConfig
57
+ struct JacobianConfig{I,O} <: AbstractConfig
58
58
input:: I
59
59
output:: O
60
60
tape:: InstructionTape
61
61
# disable default outer constructor
62
- ( :: Type{ JacobianConfig{I,O}} ){I,O} (input, output, tape) = new {I,O} (input, output, tape)
62
+ JacobianConfig {I,O} (input, output, tape) where {I,O} = new {I,O} (input, output, tape)
63
63
end
64
64
65
65
# "private" convienence constructor
66
- _JacobianConfig {I,O} (input:: I , output:: O , tape:: InstructionTape ) = JacobianConfig {I,O} (input, output, tape)
66
+ _JacobianConfig (input:: I , output:: O , tape:: InstructionTape ) where {I,O} = JacobianConfig {I,O} (input, output, tape)
67
67
68
68
"""
69
69
ReverseDiff.JacobianConfig(input, tp::InstructionTape = InstructionTape())
@@ -99,14 +99,14 @@ stored or modified in any way.
99
99
100
100
See `ReverseDiff.jacobian` for a description of acceptable types for `input`.
101
101
"""
102
- function JacobianConfig {D} (output:: AbstractArray{D} , input:: Tuple , tp:: InstructionTape = InstructionTape ())
102
+ function JacobianConfig (output:: AbstractArray{D} , input:: Tuple , tp:: InstructionTape = InstructionTape ()) where D
103
103
cfg_input = map (x -> track (similar (x), D, tp), input)
104
104
cfg_output = track! (similar (output, TrackedReal{D,D,Void}), output, tp)
105
105
return _JacobianConfig (cfg_input, cfg_output, tp)
106
106
end
107
107
108
108
# we dispatch on V<:Real here because InstructionTape is actually also an AbstractArray
109
- function JacobianConfig {D,V<:Real} (output:: AbstractArray{D} , input:: AbstractArray{V} , tp:: InstructionTape = InstructionTape ())
109
+ function JacobianConfig (output:: AbstractArray{D} , input:: AbstractArray{V} , tp:: InstructionTape = InstructionTape ()) where {D,V <: Real }
110
110
cfg_input = track (similar (input), D, tp)
111
111
cfg_output = track! (similar (output, TrackedReal{D,D,Void}), output, tp)
112
112
return _JacobianConfig (cfg_input, cfg_output, tp)
@@ -123,7 +123,7 @@ JacobianConfig(result::DiffResult, input, tp::InstructionTape) = JacobianConfig(
123
123
# HessianConfig #
124
124
# ################
125
125
126
- immutable HessianConfig{G<: GradientConfig ,J<: JacobianConfig } <: AbstractConfig
126
+ struct HessianConfig{G<: GradientConfig ,J<: JacobianConfig } <: AbstractConfig
127
127
gradient_config:: G
128
128
jacobian_config:: J
129
129
end
149
149
Like `HessianConfig(input, tp)`, except the provided type `D` is assumed to be the element
150
150
type of the target function's output.
151
151
"""
152
- function HessianConfig {D} (input:: AbstractArray , :: Type{D} , gtp:: InstructionTape = InstructionTape (), jtp:: InstructionTape = InstructionTape ())
152
+ function HessianConfig (input:: AbstractArray , :: Type{D} , gtp:: InstructionTape = InstructionTape (), jtp:: InstructionTape = InstructionTape ()) where D
153
153
jcfg = JacobianConfig (input, D, jtp)
154
154
gcfg = GradientConfig (jcfg. input, gtp)
155
155
return HessianConfig (gcfg, jcfg)
0 commit comments