diff --git a/Project.toml b/Project.toml index 85c0d678f4..4ae397e6ff 100644 --- a/Project.toml +++ b/Project.toml @@ -49,7 +49,7 @@ Preferences = "1" ProgressLogging = "0.1" Reexport = "1.0" SpecialFunctions = "2.1.2" -Zygote = "0.6.49" +Zygote = "0.6.67" cuDNN = "1" julia = "1.9" diff --git a/test/layers/basic.jl b/test/layers/basic.jl index f2353378df..95da13f0c9 100644 --- a/test/layers/basic.jl +++ b/test/layers/basic.jl @@ -421,7 +421,7 @@ end m1v = Chain([m1[1], m1[2]]) # vector of layers @test Zygote.hessian_dual(sum∘m1v, [1,2,3]) ≈ Zygote.hessian_dual(sum∘m1, [1,2,3]) - @test_broken Zygote.hessian_dual(sum∘m1v, [1,2,3]) ≈ Zygote.hessian_reverse(sum∘m1v, [1,2,3]) + @test Zygote.hessian_dual(sum∘m1v, [1,2,3]) ≈ Zygote.hessian_reverse(sum∘m1v, [1,2,3]) # NNlib's softmax gradient writes in-place m2 = Chain(Dense(3,4,tanh), Dense(4,2), softmax)