Skip to content

Commit

Permalink
Update lambda_resnet26rpt weights to 78.9, add better halonet26t weig…
Browse files Browse the repository at this point in the history
…hts at 79.1 with tweak to attention dim
  • Loading branch information
rwightman committed Oct 9, 2021
1 parent 359d317 commit 4276af0
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions timm/models/byoanet.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ def _cfg(url='', **kwargs):

'halonet_h1': _cfg(url='', input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),
'halonet26t': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halonet26t_256-9b4bf0b3.pth',
input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256), crop_pct=0.94),
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halonet26t_a1h_256-3083328c.pth',
input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256)),
'sehalonet33ts': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/sehalonet33ts_256-87e053f9.pth',
input_size=(3, 256, 256), pool_size=(8, 8), min_input_size=(3, 256, 256), crop_pct=0.94),
Expand All @@ -64,8 +64,8 @@ def _cfg(url='', **kwargs):
url='',
min_input_size=(3, 128, 128), input_size=(3, 256, 256), pool_size=(8, 8)),
'lambda_resnet26rpt_256': _cfg(
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lambda_resnet26rpt_a2h_256-482adad8.pth',
fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8)),
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/lambda_resnet26rpt_c_256-ab00292d.pth',
fixed_input_size=True, input_size=(3, 256, 256), pool_size=(8, 8), crop_pct=0.94),

'haloregnetz_b': _cfg(
url='',
Expand Down Expand Up @@ -149,7 +149,7 @@ def _cfg(url='', **kwargs):
stem_type='tiered',
stem_pool='maxpool',
self_attn_layer='halo',
self_attn_kwargs=dict(block_size=8, halo_size=2, dim_head=16)
self_attn_kwargs=dict(block_size=8, halo_size=2)
),
sehalonet33ts=ByoModelCfg(
blocks=(
Expand Down

0 comments on commit 4276af0

Please sign in to comment.