@@ -509,20 +509,15 @@ def __init__(
509
509
stages .append (stage )
510
510
self .feature_info += [dict (num_chs = self .embed_dims [stage_id ], reduction = 2 , module = f'stages.{ stage_id } ' )]
511
511
512
-
513
512
self .stages = nn .Sequential (* stages )
514
513
515
- #self.norm = norm_layer(self.num_features)
516
- #self.head = ClassifierHead(self.num_features, num_classes, pool_type=global_pool, drop_rate=drop_rate)
517
-
518
514
# if head_norm_first == true, norm -> global pool -> fc ordering, like most other nets
519
515
# otherwise pool -> norm -> fc, the default DaViT order, similar to ConvNeXt
520
516
# FIXME generalize this structure to ClassifierHead
521
517
self .norm_pre = norm_layer (self .num_features ) if head_norm_first else nn .Identity ()
522
518
self .head = nn .Sequential (OrderedDict ([
523
519
('global_pool' , SelectAdaptivePool2d (pool_type = global_pool )),
524
520
('norm' , nn .Identity () if head_norm_first else norm_layer (self .num_features )),
525
- #('flatten', nn.Flatten(1) if global_pool else nn.Identity()),
526
521
('drop' , nn .Dropout (self .drop_rate )),
527
522
('fc' , nn .Linear (self .num_features , num_classes ) if num_classes > 0 else nn .Identity ())]))
528
523
@@ -550,7 +545,6 @@ def get_classifier(self):
550
545
def reset_classifier (self , num_classes , global_pool = None ):
551
546
if global_pool is not None :
552
547
self .head .global_pool = SelectAdaptivePool2d (pool_type = global_pool )
553
- #self.head.flatten = nn.Flatten(1) if global_pool else nn.Identity()
554
548
self .head .fc = nn .Linear (self .num_features , num_classes ) if num_classes > 0 else nn .Identity ()
555
549
556
550
def forward_features (self , x ):
@@ -563,11 +557,6 @@ def forward_features(self, x):
563
557
return x
564
558
565
559
def forward_head (self , x , pre_logits : bool = False ):
566
- #return self.head(x, pre_logits=pre_logits)
567
- #x = self.head.global_pool(x)
568
- #x = self.norms(x)
569
- #x = self.head.fc(x)
570
- #return self.head.flatten(x)
571
560
x = self .head .global_pool (x )
572
561
x = self .head .norm (x .permute (0 , 2 , 3 , 1 )).permute (0 , 3 , 1 , 2 )
573
562
x = x .squeeze ()
@@ -624,7 +613,7 @@ def _cfg(url='', **kwargs):
624
613
return {
625
614
'url' : url ,
626
615
'num_classes' : 1000 , 'input_size' : (3 , 224 , 224 ), 'pool_size' : (7 , 7 ),
627
- 'crop_pct' : 0.875 , 'interpolation' : 'bilinear ' ,
616
+ 'crop_pct' : 0.850 , 'interpolation' : 'bicubic ' ,
628
617
'mean' : IMAGENET_DEFAULT_MEAN , 'std' : IMAGENET_DEFAULT_STD ,
629
618
'first_conv' : 'patch_embed.proj' , 'classifier' : 'head.fc' ,
630
619
** kwargs
0 commit comments