Skip to content

Commit

Permalink
safer gradients, less piracy (#2098)
Browse files Browse the repository at this point in the history
  • Loading branch information
mcabbott authored Nov 3, 2022
1 parent b2b0748 commit 4662c4c
Show file tree
Hide file tree
Showing 3 changed files with 3 additions and 6 deletions.
4 changes: 1 addition & 3 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
name = "Flux"
uuid = "587475ba-b771-5e3f-ad9e-33799f191a9c"
version = "0.13.7"
version = "0.13.8"

[deps]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
ArrayInterface = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
Functors = "d9f16b24-f501-4c13-a1f2-28368ffc5196"
Expand All @@ -27,7 +26,6 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
Adapt = "3.0"
ArrayInterface = "3.1, 4, 5, 6"
CUDA = "3"
ChainRulesCore = "1.12"
Functors = "0.3"
Expand Down
1 change: 0 additions & 1 deletion src/optimise/Optimise.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
module Optimise

using LinearAlgebra
import ArrayInterface

export train!, update!,
Descent, Adam, Momentum, Nesterov, RMSProp,
Expand Down
4 changes: 2 additions & 2 deletions src/optimise/train.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ As a result, the parameters are mutated and the optimizer's internal state may c
The gradient could be mutated as well.
"""
function update!(opt::AbstractOptimiser, x, x̄)
x̄r = ArrayInterface.restructure(x, x̄) # address some cases where Zygote's
# output are not mutable, see #1510
x̄r = copyto!(similar(x̄), x̄) # Flux.Optimise assumes it can mutate the gradient. This is not
# safe due to aliasing, nor guaranteed to be possible, e.g. Fill.
x .-= apply!(opt, x, x̄r)
end

Expand Down

0 comments on commit 4662c4c

Please sign in to comment.