diff --git a/docs/src/introduction/updating_to_v1.md b/docs/src/introduction/updating_to_v1.md index cc9f2279b..4178ae38b 100644 --- a/docs/src/introduction/updating_to_v1.md +++ b/docs/src/introduction/updating_to_v1.md @@ -138,3 +138,5 @@ abstraction. - [`InstanceNorm`](@ref) now supports tracking statistics. - [`RNNCell`](@ref) and [`LSTMCell`](@ref) add `bias_ih` and `bias_hh` to the parameters to align with Pytorch. Both are controlled using `init_bias` and `use_bias`. +- [`ConvTranspose`](@ref) allows `flipkernel=true` via `cross_correlation=true`. This makes + it efficient for MIOpen. diff --git a/src/layers/conv.jl b/src/layers/conv.jl index ed8fa404b..e28c78e1a 100644 --- a/src/layers/conv.jl +++ b/src/layers/conv.jl @@ -432,7 +432,7 @@ function Base.show(io::IO, m::MaxPool) print(io, "MaxPool(", m.k) all(==(0), m.pad) || print(io, ", pad=", PrettyPrinting.tuple_string(m.pad)) m.stride == m.k || print(io, ", stride=", PrettyPrinting.tuple_string(m.stride)) - return print(io, ")") + print(io, ")") end @doc doc""" @@ -502,7 +502,7 @@ function Base.show(io::IO, m::MeanPool) print(io, "MeanPool(", m.k) all(==(0), m.pad) || print(io, ", pad=", PrettyPrinting.tuple_string(m.pad)) m.stride == m.k || print(io, ", stride=", PrettyPrinting.tuple_string(m.stride)) - return print(io, ")") + print(io, ")") end """ diff --git a/src/layers/recurrent.jl b/src/layers/recurrent.jl index 2ba475606..fe039bfd6 100644 --- a/src/layers/recurrent.jl +++ b/src/layers/recurrent.jl @@ -30,7 +30,7 @@ LuxOps.eachslice(x::AbstractMatrix, ::BatchLastIndex) = LuxOps.eachslice(x, Val( function init_rnn_weight(rng::AbstractRNG, init_weight, hidden_dims, dims) if init_weight === nothing bound = inv(sqrt(hidden_dims)) - y = randn32(rng, Float32, dims...) + y = randn32(rng, dims...) @. y = (y - 0.5f0) * 2 * bound return y end