Skip to content

Commit c06f0ab

Browse files
committed
fix according to comments
1 parent 8120ab7 commit c06f0ab

File tree

5 files changed

+20
-16
lines changed

5 files changed

+20
-16
lines changed

deploy/python/infer.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ def __init__(self, args):
9595
use_static=False,
9696
use_calib_mode=False)
9797
min_input_shape = {"x": [1, 3, 100, 100]}
98-
max_input_shape = {"x": [1, 3, 2000, 2000]}
98+
max_input_shape = {"x": [1, 3, 2000, 3000]}
9999
opt_input_shape = {"x": [1, 3, 192, 192]}
100100
pred_cfg.set_trt_dynamic_shape_info(
101101
min_input_shape, max_input_shape, opt_input_shape)

paddleseg/models/backbones/hrnet.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -360,7 +360,7 @@ def __init__(self,
360360
reduction_ratio=16,
361361
name=name + '_fc')
362362

363-
self.add = layers.add()
363+
self.add = layers.Add()
364364
self.relu = layers.Activation("relu")
365365

366366
def forward(self, x):
@@ -422,7 +422,7 @@ def __init__(self,
422422
reduction_ratio=16,
423423
name=name + '_fc')
424424

425-
self.add = layers.add()
425+
self.add = layers.Add()
426426
self.relu = layers.Activation("relu")
427427

428428
def forward(self, x):

paddleseg/models/backbones/resnet_vd.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ def __init__(self,
121121

122122
self.shortcut = shortcut
123123
# NOTE: Use the wrap layer for quantization training
124-
self.add = layers.add()
124+
self.add = layers.Add()
125125
self.relu = layers.Activation(act="relu")
126126

127127
def forward(self, inputs):
@@ -177,7 +177,7 @@ def __init__(self,
177177
self.shortcut = shortcut
178178
self.dilation = dilation
179179
self.data_format = data_format
180-
self.add = layers.add()
180+
self.add = layers.Add()
181181
self.relu = layers.Activation(act="relu")
182182

183183
def forward(self, inputs):

paddleseg/models/bisenet.py

+7-3
Original file line numberDiff line numberDiff line change
@@ -131,12 +131,13 @@ def __init__(self, in_dim, out_dim):
131131
self.bn = layers.SyncBatchNorm(in_dim)
132132

133133
self.conv_1x1 = layers.ConvBNReLU(in_dim, out_dim, 1)
134+
self.add = layers.Add()
134135
self.conv_3x3 = nn.Conv2D(out_dim, out_dim, 3, 1, 1)
135136

136137
def forward(self, x):
137138
gap = self.gap(x)
138139
bn = self.bn(gap)
139-
conv1 = self.conv_1x1(bn) + x
140+
conv1 = self.add(self.conv_1x1(bn), x)
140141
return self.conv_3x3(conv1)
141142

142143

@@ -152,9 +153,10 @@ def __init__(self, in_dim, out_dim, expand):
152153
layers.ConvBNReLU(in_dim, in_dim, 3),
153154
layers.DepthwiseConvBN(in_dim, expand_dim, 3),
154155
layers.ConvBN(expand_dim, out_dim, 1))
156+
self.relu = layers.Activation("relu")
155157

156158
def forward(self, x):
157-
return F.relu(self.conv(x) + x)
159+
return self.relu(self.conv(x) + x)
158160

159161

160162
class GatherAndExpansionLayer2(nn.Layer):
@@ -175,8 +177,10 @@ def __init__(self, in_dim, out_dim, expand):
175177
layers.DepthwiseConvBN(in_dim, in_dim, 3, stride=2),
176178
layers.ConvBN(in_dim, out_dim, 1))
177179

180+
self.relu = layers.Activation("relu")
181+
178182
def forward(self, x):
179-
return F.relu(self.branch_1(x) + self.branch_2(x))
183+
return self.relu(self.branch_1(x) + self.branch_2(x))
180184

181185

182186
class DetailBranch(nn.Layer):

paddleseg/models/layers/wrap_functions.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -19,63 +19,63 @@
1919
"""
2020

2121

22-
class add(nn.Layer):
22+
class Add(nn.Layer):
2323
def __init__(self):
2424
super().__init__()
2525

2626
def forward(self, x, y, name=None):
2727
return paddle.add(x, y, name)
2828

2929

30-
class subtract(nn.Layer):
30+
class Subtract(nn.Layer):
3131
def __init__(self):
3232
super().__init__()
3333

3434
def forward(self, x, y, name=None):
3535
return paddle.subtract(x, y, name)
3636

3737

38-
class multiply(nn.Layer):
38+
class Multiply(nn.Layer):
3939
def __init__(self):
4040
super().__init__()
4141

4242
def forward(self, x, y, name=None):
4343
return paddle.multiply(x, y, name)
4444

4545

46-
class divide(nn.Layer):
46+
class Divide(nn.Layer):
4747
def __init__(self):
4848
super().__init__()
4949

5050
def forward(self, x, y, name=None):
5151
return paddle.divide(x, y, name)
5252

5353

54-
class reshape(nn.Layer):
54+
class Reshape(nn.Layer):
5555
def __init__(self):
5656
super().__init__()
5757

5858
def forward(self, x, shape, name=None):
5959
return paddle.reshape(x, shape, name)
6060

6161

62-
class transpose(nn.Layer):
62+
class Transpose(nn.Layer):
6363
def __init__(self):
6464
super().__init__()
6565

6666
def forward(self, x, perm, name=None):
6767
return paddle.transpose(x, perm, name)
6868

6969

70-
class concat(nn.Layer):
70+
class Concat(nn.Layer):
7171
def __init__(self):
7272
super().__init__()
7373

7474
def forward(self, x, axis=0, name=None):
7575
return paddle.concat(x, axis, name)
7676

7777

78-
class flatten(nn.Layer):
78+
class Flatten(nn.Layer):
7979
def __init__(self):
8080
super().__init__()
8181

0 commit comments

Comments
 (0)