1
1
"""
2
2
conv_norm(kernel_size, inplanes::Int, outplanes::Int, activation = relu;
3
- norm_layer = BatchNorm, prenorm = false, preact = false, use_bn = true,
3
+ norm_layer = BatchNorm, revnorm = false, preact = false, use_bn = true,
4
4
stride = 1, pad = 0, dilation = 1, groups = 1, [bias, weight, init])
5
5
6
6
Create a convolution + batch normalization pair with activation.
@@ -12,44 +12,44 @@ Create a convolution + batch normalization pair with activation.
12
12
- `outplanes`: number of output feature maps
13
13
- `activation`: the activation function for the final layer
14
14
- `norm_layer`: the normalization layer used
15
- - `prenorm `: set to `true` to place the batch norm before the convolution
15
+ - `revnorm `: set to `true` to place the batch norm before the convolution
16
16
- `preact`: set to `true` to place the activation function before the batch norm
17
- (only compatible with `prenorm = false`)
17
+ (only compatible with `revnorm = false`)
18
18
- `use_bn`: set to `false` to disable batch normalization
19
- (only compatible with `prenorm = false` and `preact = false`)
19
+ (only compatible with `revnorm = false` and `preact = false`)
20
20
- `stride`: stride of the convolution kernel
21
21
- `pad`: padding of the convolution kernel
22
22
- `dilation`: dilation of the convolution kernel
23
23
- `groups`: groups for the convolution kernel
24
24
- `bias`, `weight`, `init`: initialization for the convolution kernel (see [`Flux.Conv`](#))
25
25
"""
26
26
function conv_norm (kernel_size, inplanes:: Int , outplanes:: Int , activation = relu;
27
- norm_layer = BatchNorm, prenorm = false , preact = false , use_bn = true ,
27
+ norm_layer = BatchNorm, revnorm = false , preact = false , use_bn = true ,
28
28
kwargs... )
29
29
if ! use_bn
30
- if (preact || prenorm )
30
+ if (preact || revnorm )
31
31
throw (ArgumentError (" `preact` only supported with `use_bn = true`" ))
32
32
else
33
33
return [Conv (kernel_size, inplanes => outplanes, activation; kwargs... )]
34
34
end
35
35
end
36
- if prenorm
36
+ if revnorm
37
37
activations = (conv = activation, bn = identity)
38
38
bnplanes = inplanes
39
39
else
40
40
activations = (conv = identity, bn = activation)
41
41
bnplanes = outplanes
42
42
end
43
43
if preact
44
- if prenorm
45
- throw (ArgumentError (" `preact` and `prenorm ` cannot be set at the same time" ))
44
+ if revnorm
45
+ throw (ArgumentError (" `preact` and `revnorm ` cannot be set at the same time" ))
46
46
else
47
47
activations = (conv = activation, bn = identity)
48
48
end
49
49
end
50
50
layers = [Conv (kernel_size, inplanes => outplanes, activations. conv; kwargs... ),
51
51
norm_layer (bnplanes, activations. bn)]
52
- return prenorm ? reverse (layers) : layers
52
+ return revnorm ? reverse (layers) : layers
53
53
end
54
54
55
55
function conv_norm (kernel_size, ch:: Pair{<:Integer, <:Integer} , outplanes,
60
60
61
61
"""
62
62
depthwise_sep_conv_bn(kernel_size, inplanes, outplanes, activation = relu;
63
- prenorm = false, use_bn = (true, true),
63
+ revnorm = false, use_bn = (true, true),
64
64
stride = 1, pad = 0, dilation = 1, [bias, weight, init])
65
65
66
66
Create a depthwise separable convolution chain as used in MobileNetv1.
@@ -79,20 +79,20 @@ See Fig. 3 in [reference](https://arxiv.org/abs/1704.04861v1).
79
79
- `inplanes`: number of input feature maps
80
80
- `outplanes`: number of output feature maps
81
81
- `activation`: the activation function for the final layer
82
- - `prenorm `: set to `true` to place the batch norm before the convolution
83
- - `use_bn`: a tuple of two booleans to specify whether to use batch normalization for the first and second convolution
82
+ - `revnorm `: set to `true` to place the batch norm before the convolution
83
+ - `use_bn`: a tuple of two booleans to specify whether to use normalization for the first and second convolution
84
84
- `stride`: stride of the first convolution kernel
85
85
- `pad`: padding of the first convolution kernel
86
86
- `dilation`: dilation of the first convolution kernel
87
87
- `bias`, `weight`, `init`: initialization for the convolution kernel (see [`Flux.Conv`](#))
88
88
"""
89
- function depthwise_sep_conv_bn (kernel_size, inplanes, outplanes, activation = relu;
90
- prenorm = false , use_bn = (true , true ),
91
- stride = 1 , kwargs... )
89
+ function depthwise_sep_conv_norm (kernel_size, inplanes, outplanes, activation = relu;
90
+ norm_layer = BatchNorm, revnorm = false , use_norm = (true , true ),
91
+ stride = 1 , kwargs... )
92
92
return vcat (conv_norm (kernel_size, inplanes, inplanes, activation;
93
- prenorm , use_bn = use_bn[1 ], stride, groups = inplanes,
93
+ norm_layerm, revnorm , use_bn = use_bn[1 ], stride, groups = inplanes,
94
94
kwargs... ),
95
- conv_norm ((1 , 1 ), inplanes, outplanes, activation; prenorm ,
95
+ conv_norm ((1 , 1 ), inplanes, outplanes, activation; norm_layer, revnorm ,
96
96
use_bn = use_bn[2 ]))
97
97
end
98
98
0 commit comments