Skip to content

Commit

Permalink
clean up more rebase mess
Browse files Browse the repository at this point in the history
  • Loading branch information
mcabbott committed Nov 23, 2024
1 parent 037d6ef commit 317816c
Show file tree
Hide file tree
Showing 2 changed files with 1 addition and 12 deletions.
6 changes: 1 addition & 5 deletions src/Flux.jl
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import Optimisers: trainable
using Random: default_rng

using Zygote, ChainRulesCore
using Zygote: Params, @adjoint, pullback
using Zygote: @adjoint, pullback
using Zygote.ForwardDiff: value
using EnzymeCore: EnzymeCore

Expand Down Expand Up @@ -59,10 +59,6 @@ export Chain, Dense, Embedding, EmbeddingBag,
setup, train!,
# from Optimsers.jl
destructure, freeze!, thaw!, adjust!, trainables, update!, trainable,
# from Functors.jl
functor, @functor, KeyPath, haskeypath, getkeypath,
# from Train/Optimisers.jl
setup, update!, destructure, freeze!, adjust!, params, trainable, trainables,
withgradient,
# init
glorot_uniform,
Expand Down
7 changes: 0 additions & 7 deletions src/train.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,6 @@ using ..Flux: Flux
using ProgressLogging: @progress, @withprogress, @logprogress
using Zygote: Zygote

# import ..Flux.Optimise: train!, update!, Optimise # during 0.13, we add methods to the old functions

export setup, train!

using ProgressLogging: @progress, @withprogress, @logprogress
Expand Down Expand Up @@ -163,11 +161,6 @@ train!(loss, model::Duplicated, data, opt; cb = nothing) = _enzyme_train!(loss,
# FluxEnzymeExt defines more specific _enzyme_train!(loss, model::Duplicated, data, opt; cb)
_enzyme_train!(loss, model, data, opt; cb = nothing) = throw(ArgumentError("The method `train!(loss, Duplicated(model), data, opt_state)` is only available when Enzyme.jl is loaded"))

# # Following src/deprecations.jl
# function train!(loss, model::Duplicated, data, opt::Optimise.AbstractOptimiser; cb=nothing)
# train!(loss, model, data, _old_to_new(opt); cb)
# end

# This method let you use Optimisers.Descent() without setup, when there is no state
function train!(loss, model::Duplicated, data, rule::Optimisers.AbstractRule; cb=nothing)
train!(loss, model, data, _rule_to_state(model, rule); cb)
Expand Down

0 comments on commit 317816c

Please sign in to comment.