Skip to content

Commit

Permalink
Type assert for safety
Browse files Browse the repository at this point in the history
  • Loading branch information
avik-pal committed Jun 23, 2024
1 parent 2b00266 commit 66a5cda
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 10 deletions.
24 changes: 17 additions & 7 deletions ext/LuxReverseDiffExt/training.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@ end
# Uncompiled ReverseDiff
@inline function __uncompiled_reverse_diff(obj_fn::F, data, ts::TrainState) where {F}
grads = Lux.recursive_make_zero(ts.parameters)
ts_new = TrainState(TrainingBackendCache{:ReverseDiff, true}(grads, nothing),
obj_fn, ts.model, ts.parameters, ts.states, ts.optimizer_state, ts.step)
ts_new = TrainState(TrainingBackendCache{:ReverseDiff, true}(grads, nothing), obj_fn,
ts.model, ts.parameters, ts.states, ts.optimizer_state, ts.step)
return __uncompiled_reverse_diff(obj_fn, data, ts_new)
end

@inline function __uncompiled_reverse_diff(obj_fn::F, data,
ts::TrainState{<:TrainingBackendCache{:ReverseDiff, FT}}) where {F, FT}
tape = ReverseDiff.InstructionTape()
dparams = FT ? ts.cache.dparameters : Lux.recursive_make_zero!!(ts.cache.dparameters)
tape = ReverseDiff.InstructionTape()
ps_tracked = Lux.recursive_map(
Lux.__Fix3(ReverseDiff.TrackedArray, tape), ts.parameters, dparams)

Expand All @@ -44,12 +44,22 @@ end
# Compiled ReverseDiff
@inline function __compiled_reverse_diff(obj_fn::F, data, ts::TrainState) where {F}
grads = Lux.recursive_make_zero(ts.parameters)
ts_new = TrainState(TrainingBackendCache{:ReverseDiff, true}(grads, nothing),
obj_fn, ts.model, ts.parameters, ts.states, ts.optimizer_state, ts.step)
ts_new = TrainState(TrainingBackendCache{:ReverseDiff, true}(grads, nothing), obj_fn,

Check warning on line 47 in ext/LuxReverseDiffExt/training.jl

View check run for this annotation

Codecov / codecov/patch

ext/LuxReverseDiffExt/training.jl#L45-L47

Added lines #L45 - L47 were not covered by tests
ts.model, ts.parameters, ts.states, ts.optimizer_state, ts.step)
return __compiled_reverse_diff(obj_fn, data, ts_new)

Check warning on line 49 in ext/LuxReverseDiffExt/training.jl

View check run for this annotation

Codecov / codecov/patch

ext/LuxReverseDiffExt/training.jl#L49

Added line #L49 was not covered by tests
end

@inline function __compiled_reverse_diff(obj_fn::F, data,
ts::TrainState{<:TrainingBackendCache{:ReverseDiff, FT}}) where {F, FT}
## Tape hasn't been compiled yet
@inline function __compiled_reverse_diff(obj_fn::F,

Check warning on line 53 in ext/LuxReverseDiffExt/training.jl

View check run for this annotation

Codecov / codecov/patch

ext/LuxReverseDiffExt/training.jl#L53

Added line #L53 was not covered by tests
data,
ts::TrainState{<:TrainingBackendCache{:ReverseDiff, FT, P, Nothing}}) where {
F, FT, P}
dparams = FT ? ts.cache.dparameters : Lux.recursive_make_zero!!(ts.cache.dparameters)
tape = ReverseDiff.InstructionTape()
ps_tracked = Lux.recursive_map(

Check warning on line 59 in ext/LuxReverseDiffExt/training.jl

View check run for this annotation

Codecov / codecov/patch

ext/LuxReverseDiffExt/training.jl#L57-L59

Added lines #L57 - L59 were not covered by tests
Lux.__Fix3(ReverseDiff.TrackedArray, tape), ts.parameters, dparams)

loss, st, stats = obj_fn(ts.model, ps_tracked, ts.states, data)

Check warning on line 62 in ext/LuxReverseDiffExt/training.jl

View check run for this annotation

Codecov / codecov/patch

ext/LuxReverseDiffExt/training.jl#L62

Added line #L62 was not covered by tests

error(1)

Check warning on line 64 in ext/LuxReverseDiffExt/training.jl

View check run for this annotation

Codecov / codecov/patch

ext/LuxReverseDiffExt/training.jl#L64

Added line #L64 was not covered by tests
end
6 changes: 3 additions & 3 deletions src/helpers/recursive_ops.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ common cases.
Any leaves of `x` that are arrays and allow in-place addition will be modified in place.
"""
recursive_add!!(x, y) = recursive_map(__add!!, x, y)
@inline recursive_add!!(x, y) = recursive_map(__add!!, x, y)

"""
recursive_eltype(x)
Expand Down Expand Up @@ -38,7 +38,7 @@ Recursively create a zero value for a nested structure `x`. This is equivalent t
See also [`Lux.recursive_make_zero!!`](@ref).
"""
@inline recursive_make_zero(x) = recursive_map(__zero, x)
@inline recursive_make_zero(x) = recursive_map(__zero, x)::typeof(x)

"""
recursive_make_zero!!(x)
Expand All @@ -48,7 +48,7 @@ in-place zeroing will be modified in place.
See also [`Lux.recursive_make_zero`](@ref) for fully out-of-place version.
"""
@inline recursive_make_zero!!(x) = recursive_map(__zero!!, x)
@inline recursive_make_zero!!(x) = recursive_map(__zero!!, x)::typeof(x)

"""
recursive_map(f, x, args...)
Expand Down

0 comments on commit 66a5cda

Please sign in to comment.