|
2 | 2 | _isactive(m, x) = isnothing(m.active) ? NNlib.within_gradient(x) : m.active |
3 | 3 |
|
4 | 4 | """ |
5 | | - Dropout(p; dims=:, rng = default_rng_value()) |
| 5 | + Dropout(p; dims=:, rng = default_rng()) |
6 | 6 |
|
7 | 7 | Layer implementing [dropout](https://arxiv.org/abs/1207.0580) with the given probability. |
8 | 8 | This is used as a regularisation, i.e. to reduce overfitting. |
@@ -61,9 +61,9 @@ mutable struct Dropout{F<:Real,D,R<:AbstractRNG} |
61 | 61 | active::Union{Bool, Nothing} |
62 | 62 | rng::R |
63 | 63 | end |
64 | | -Dropout(p::Real, dims, active) = Dropout(p, dims, active, default_rng_value()) |
| 64 | +Dropout(p::Real, dims, active) = Dropout(p, dims, active, default_rng()) |
65 | 65 |
|
66 | | -function Dropout(p::Real; dims=:, rng = default_rng_value()) |
| 66 | +function Dropout(p::Real; dims=:, rng = default_rng()) |
67 | 67 | 0 ≤ p ≤ 1 || throw(ArgumentError("Dropout expexts 0 ≤ p ≤ 1, got p = $p")) |
68 | 68 | if p isa Integer # Dropout(0) |
69 | 69 | return p==0 ? identity : zero |
@@ -92,7 +92,7 @@ function Base.show(io::IO, d::Dropout) |
92 | 92 | end |
93 | 93 |
|
94 | 94 | """ |
95 | | - AlphaDropout(p; rng = default_rng_value()) |
| 95 | + AlphaDropout(p; rng = default_rng()) |
96 | 96 |
|
97 | 97 | A dropout layer. Used in |
98 | 98 | [Self-Normalizing Neural Networks](https://arxiv.org/abs/1706.02515). |
@@ -126,8 +126,8 @@ mutable struct AlphaDropout{F,R<:AbstractRNG} |
126 | 126 | new{typeof(p), typeof(rng)}(p, active, rng) |
127 | 127 | end |
128 | 128 | end |
129 | | -AlphaDropout(p, active) = AlphaDropout(p, active, default_rng_value()) |
130 | | -AlphaDropout(p; rng = default_rng_value()) = AlphaDropout(p, nothing, rng) |
| 129 | +AlphaDropout(p, active) = AlphaDropout(p, active, default_rng()) |
| 130 | +AlphaDropout(p; rng = default_rng()) = AlphaDropout(p, nothing, rng) |
131 | 131 |
|
132 | 132 | @functor AlphaDropout |
133 | 133 | trainable(a::AlphaDropout) = (;) |
|
0 commit comments