@@ -131,20 +131,21 @@ end
131131 Conv(weight::AbstractArray, [bias, activation; stride, pad, dilation])
132132
133133Constructs a convolutional layer with the given weight and bias.
134- Accepts the same keywords and has the same defaults as `Conv((4,4), 3 => 7, relu)`.
134+ Accepts the same keywords and has the same defaults as
135+ [`Conv(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref Conv).
135136
136137```jldoctest
137138julia> weight = rand(3, 4, 5);
138139
139140julia> bias = zeros(5);
140141
141- julia> c1 = Conv(weight, bias, sigmoid) # expects 1 spatial dimension
142+ julia> layer = Conv(weight, bias, sigmoid) # expects 1 spatial dimension
142143Conv((3,), 4 => 5, σ) # 65 parameters
143144
144- julia> c1 (randn(100, 4, 64)) |> size
145+ julia> layer (randn(100, 4, 64)) |> size
145146(98, 5, 64)
146147
147- julia> Flux.params(c1 ) |> length
148+ julia> Flux.params(layer ) |> length
1481492
149150```
150151"""
@@ -238,10 +239,10 @@ See also [`Conv`](@ref) for more detailed description of keywords.
238239```jldoctest
239240julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
240241
241- julia> lay = ConvTranspose((5,5), 3 => 7, relu)
242+ julia> layer = ConvTranspose((5,5), 3 => 7, relu)
242243ConvTranspose((5, 5), 3 => 7, relu) # 532 parameters
243244
244- julia> lay (xs) |> size
245+ julia> layer (xs) |> size
245246(104, 104, 7, 50)
246247
247248julia> ConvTranspose((5,5), 3 => 7, stride=2)(xs) |> size
@@ -268,21 +269,22 @@ _channels_out(l::ConvTranspose) = size(l.weight)[end-1]*l.groups
268269 ConvTranspose(weight::AbstractArray, [bias, activation; stride, pad, dilation, groups])
269270
270271Constructs a ConvTranspose layer with the given weight and bias.
271- Accepts the same keywords and has the same defaults as `ConvTranspose((4,4), 3 => 7, relu)`.
272+ Accepts the same keywords and has the same defaults as
273+ [`ConvTranspose(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref ConvTranspose).
272274
273275# Examples
274276```jldoctest
275277julia> weight = rand(3, 4, 5);
276278
277279julia> bias = zeros(4);
278280
279- julia> c1 = ConvTranspose(weight, bias, sigmoid)
281+ julia> layer = ConvTranspose(weight, bias, sigmoid)
280282ConvTranspose((3,), 5 => 4, σ) # 64 parameters
281283
282- julia> c1 (randn(100, 5, 64)) |> size # transposed convolution will increase the dimension size (upsampling)
284+ julia> layer (randn(100, 5, 64)) |> size # transposed convolution will increase the dimension size (upsampling)
283285(102, 4, 64)
284286
285- julia> Flux.params(c1 ) |> length
287+ julia> Flux.params(layer ) |> length
2862882
287289```
288290"""
@@ -356,10 +358,10 @@ See [`Conv`](@ref) for a description of the arguments.
356358```jldoctest
357359julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
358360
359- julia> lay = DepthwiseConv((5,5), 3 => 6, relu; bias=false)
361+ julia> layer = DepthwiseConv((5,5), 3 => 6, relu; bias=false)
360362Conv((5, 5), 3 => 6, relu, groups=3, bias=false) # 150 parameters
361363
362- julia> lay (xs) |> size
364+ julia> layer (xs) |> size
363365(96, 96, 6, 50)
364366
365367julia> DepthwiseConv((5, 5), 3 => 9, stride=2, pad=2)(xs) |> size
@@ -388,21 +390,17 @@ specifying the size of the convolutional kernel;
388390Parameters are controlled by additional keywords, with defaults
389391`init=glorot_uniform` and `bias=true`.
390392
391- CrossCor layer can also be manually constructed by passing in weights and
392- biases. This constructor accepts the layer accepts the same keywords (and has
393- the same defaults) as the `CrossCor((4,4), 3 => 7, relu)` method.
394-
395393See also [`Conv`](@ref) for more detailed description of keywords.
396394
397395# Examples
398396
399397```jldoctest
400398julia> xs = rand(Float32, 100, 100, 3, 50); # a batch of 50 RGB images
401399
402- julia> lay = CrossCor((5,5), 3 => 6, relu; bias=false)
400+ julia> layer = CrossCor((5,5), 3 => 6, relu; bias=false)
403401CrossCor((5, 5), 3 => 6, relu, bias=false) # 450 parameters
404402
405- julia> lay (xs) |> size
403+ julia> layer (xs) |> size
406404(96, 96, 6, 50)
407405
408406julia> CrossCor((5,5), 3 => 7, stride=3, pad=(2,0))(xs) |> size
@@ -422,20 +420,20 @@ end
422420 CrossCor(weight::AbstractArray, [bias, activation; stride, pad, dilation])
423421
424422Constructs a CrossCor layer with the given weight and bias.
425- Accepts the same keywords and has the same defaults as `CrossCor((5,5), 3 => 6, relu)`.
423+ Accepts the same keywords and has the same defaults as
424+ [`CrossCor(k::NTuple{N,Integer}, ch::Pair{<:Integer,<:Integer}, σ; ...)`](@ref CrossCor).
426425
427426# Examples
428427```jldoctest
429428julia> weight = rand(3, 4, 5);
430429
431430julia> bias = zeros(5);
432431
433- julia> lay = CrossCor(weight, bias, relu)
432+ julia> layer = CrossCor(weight, bias, relu)
434433CrossCor((3,), 4 => 5, relu) # 65 parameters
435434
436- julia> lay (randn(100, 4, 64)) |> size
435+ julia> layer (randn(100, 4, 64)) |> size
437436(98, 5, 64)
438-
439437```
440438"""
441439function CrossCor (w:: AbstractArray{T,N} , bias = true , σ = identity;
@@ -667,10 +665,10 @@ julia> m[1](xs) |> size
667665julia> m(xs) |> size
668666(20, 20, 7, 50)
669667
670- julia> lay = MaxPool((5,), pad=2, stride=(3,)) # one-dimensional window
668+ julia> layer = MaxPool((5,), pad=2, stride=(3,)) # one-dimensional window
671669MaxPool((5,), pad=2, stride=3)
672670
673- julia> lay (rand(Float32, 100, 7, 50)) |> size
671+ julia> layer (rand(Float32, 100, 7, 50)) |> size
674672(34, 7, 50)
675673```
676674"""
0 commit comments