Skip to content

Commit

Permalink
add scale selection for hrformer
Browse files Browse the repository at this point in the history
  • Loading branch information
taylormordan committed Nov 2, 2023
1 parent e6a695e commit d42356a
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 21 deletions.
9 changes: 7 additions & 2 deletions src/openpifpaf/network/basenetworks.py
Original file line number Diff line number Diff line change
Expand Up @@ -768,12 +768,13 @@ def configure(cls, args: argparse.Namespace):


class HRFormer(BaseNetwork):
scale_level = 0
pretrained = True
unused_parameters = True # For DDP initialization

def __init__(self, name, hrformer_net):
stride = 4
hrformer_backbone, out_features = hrformer_net(self.pretrained)
stride = 4 * (2 ** self.scale_level)
hrformer_backbone, out_features = hrformer_net(self.scale_level, self.pretrained)
super().__init__(name, stride=stride, out_features=out_features)
self.backbone = hrformer_backbone

Expand All @@ -783,11 +784,15 @@ def forward(self, x):
@classmethod
def cli(cls, parser: argparse.ArgumentParser):
group = parser.add_argument_group('HRFormer')
group.add_argument('--hrformer-scale-level',
type=int, default=cls.scale_level,
help='level of the HRFormer pyramid')
assert cls.pretrained
group.add_argument('--hrformer-no-pretrain', dest='hrformer_pretrained',
default=True, action='store_false',
help='use randomly initialized models')

@classmethod
def configure(cls, args: argparse.Namespace):
cls.scale_level = args.hrformer_scale_level
cls.pretrained = args.hrformer_pretrained
54 changes: 35 additions & 19 deletions src/openpifpaf/network/hrformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,38 +153,54 @@ def adapt_hrformer(module):
return module_output


def hrformer(hrformer_config=None,
def hrformer(hrformer_config_fn=None,
scale_level=0,
concat_feature_maps=False,
pretrained=True):
hrformer_backbone = mmpose.models.build_backbone(hrformer_config(multiscale_output=concat_feature_maps))
multiscale_output = (scale_level != 0) or concat_feature_maps
hrformer_config_dict = hrformer_config_fn(multiscale_output=multiscale_output)
hrformer_backbone = mmpose.models.build_backbone(hrformer_config_dict)
if pretrained:
hrformer_backbone.init_weights()
hrformer_backbone = adapt_hrformer(hrformer_backbone)
if concat_feature_maps:
fmp = mmpose.models.FeatureMapProcessor(concat=True)
return torch.nn.Sequential(hrformer_backbone, fmp)
return hrformer_backbone


def hrformersmall(pretrained=True):
out_features = 32
hrformer_backbone = hrformer(hrformer_small_config, concat_feature_maps=False, pretrained=pretrained)
fmp_index = [scale_level, 1, 2, 3]
fmp_index[scale_level] = 0
fmp = mmpose.models.FeatureMapProcessor(select_index=fmp_index, concat=True)
out_features = sum(hrformer_config_dict['extra']['stage4']['num_channels'])
else:
fmp = mmpose.models.FeatureMapProcessor(select_index=scale_level)
out_features = hrformer_config_dict['extra']['stage4']['num_channels'][scale_level]
return torch.nn.Sequential(hrformer_backbone, fmp), out_features


def hrformersmall(scale_level=0, pretrained=True):
hrformer_backbone, out_features = hrformer(hrformer_small_config,
scale_level=scale_level,
concat_feature_maps=False,
pretrained=pretrained)
return hrformer_backbone, out_features


def hrformersmallcat(pretrained=True):
out_features = 480
hrformer_backbone = hrformer(hrformer_small_config, concat_feature_maps=True, pretrained=pretrained)
def hrformersmallcat(scale_level=0, pretrained=True):
hrformer_backbone, out_features = hrformer(hrformer_small_config,
scale_level=scale_level,
concat_feature_maps=True,
pretrained=pretrained)
return hrformer_backbone, out_features


def hrformerbase(pretrained=True):
out_features = 78
hrformer_backbone = hrformer(hrformer_base_config, concat_feature_maps=False, pretrained=pretrained)
def hrformerbase(scale_level=0, pretrained=True):
hrformer_backbone, out_features = hrformer(hrformer_base_config,
scale_level=scale_level,
concat_feature_maps=False,
pretrained=pretrained)
return hrformer_backbone, out_features


def hrformerbasecat(pretrained=True):
out_features = 1170
hrformer_backbone = hrformer(hrformer_base_config, concat_feature_maps=True, pretrained=pretrained)
def hrformerbasecat(scale_level=0, pretrained=True):
hrformer_backbone, out_features = hrformer(hrformer_base_config,
scale_level=scale_level,
concat_feature_maps=True,
pretrained=pretrained)
return hrformer_backbone, out_features

0 comments on commit d42356a

Please sign in to comment.