Skip to content

Commit 5bbb1db

Browse files
committed
Fix batchnorm typo
1 parent 20564f2 commit 5bbb1db

File tree

2 files changed

+8
-8
lines changed

2 files changed

+8
-8
lines changed

segmentation_models_pytorch/decoders/pspnet/decoder.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -12,17 +12,17 @@ def __init__(
1212
in_channels: int,
1313
out_channels: int,
1414
pool_size: int,
15-
use_bathcnorm: bool = True,
15+
use_batchnorm: bool = True,
1616
):
1717
super().__init__()
1818

1919
if pool_size == 1:
20-
use_bathcnorm = False # PyTorch does not support BatchNorm for 1x1 shape
20+
use_batchnorm = False # PyTorch does not support BatchNorm for 1x1 shape
2121

2222
self.pool = nn.Sequential(
2323
nn.AdaptiveAvgPool2d(output_size=(pool_size, pool_size)),
2424
modules.Conv2dReLU(
25-
in_channels, out_channels, (1, 1), use_batchnorm=use_bathcnorm
25+
in_channels, out_channels, (1, 1), use_batchnorm=use_batchnorm
2626
),
2727
)
2828

@@ -38,7 +38,7 @@ def __init__(
3838
self,
3939
in_channels: int,
4040
sizes: Tuple[int, ...] = (1, 2, 3, 6),
41-
use_bathcnorm: bool = True,
41+
use_batchnorm: bool = True,
4242
):
4343
super().__init__()
4444

@@ -48,7 +48,7 @@ def __init__(
4848
in_channels,
4949
in_channels // len(sizes),
5050
size,
51-
use_bathcnorm=use_bathcnorm,
51+
use_batchnorm=use_batchnorm,
5252
)
5353
for size in sizes
5454
]
@@ -73,7 +73,7 @@ def __init__(
7373
self.psp = PSPModule(
7474
in_channels=encoder_channels[-1],
7575
sizes=(1, 2, 3, 6),
76-
use_bathcnorm=use_batchnorm,
76+
use_batchnorm=use_batchnorm,
7777
)
7878

7979
self.conv = modules.Conv2dReLU(

segmentation_models_pytorch/decoders/upernet/decoder.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -48,14 +48,14 @@ def forward(self, x):
4848

4949

5050
class FPNBlock(nn.Module):
51-
def __init__(self, skip_channels, pyramid_channels, use_bathcnorm=True):
51+
def __init__(self, skip_channels, pyramid_channels, use_batchnorm=True):
5252
super().__init__()
5353
self.skip_conv = (
5454
md.Conv2dReLU(
5555
skip_channels,
5656
pyramid_channels,
5757
kernel_size=1,
58-
use_batchnorm=use_bathcnorm,
58+
use_batchnorm=use_batchnorm,
5959
)
6060
if skip_channels != 0
6161
else nn.Identity()

0 commit comments

Comments
 (0)