Skip to content

Commit

Permalink
not resuming scheduler
Browse files Browse the repository at this point in the history
  • Loading branch information
johnzhang1999 committed Jul 10, 2019
1 parent e3a3284 commit 89a73fb
Show file tree
Hide file tree
Showing 21 changed files with 5 additions and 2 deletions.
5 changes: 4 additions & 1 deletion scripts/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,13 +151,16 @@ def main():

if args.resume and check_isfile(args.resume):
args.start_epoch = resume_from_checkpoint(args.resume, model, scheduler=scheduler, optimizer=optimizer)

# lr changing is BUG-gy!
if args.lr != optimizer.param_groups[0]['lr']:
old_lr = optimizer.param_groups[0]['lr']
change_lr_to(optimizer,args.lr)
print('Changed optimzer lr from {} to {}.'.format(old_lr,args.lr))

# NOTE: not resuming scheduler
scheduler = torchreid.optim.build_lr_scheduler(optimizer, **lr_scheduler_kwargs(args))

print('Building {}-engine for {}-reid'.format(args.loss, args.app))
engine = build_engine(args, datamanager, model, optimizer, scheduler, experiment)

Expand Down
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
2 changes: 1 addition & 1 deletion torchreid/models/osnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ def forward(self, x):
def osnet_custom(num_classes=1000, loss='softmax', **kwargs):
# custom osnet
return OSNet(num_classes, blocks=[OSBlock, OSBlock, OSBlock], layers=[2, 2, 2],
channels=[64, 256, 384, 512], feature_dim=1024, loss=loss, **kwargs)
channels=[64, 256, 384, 512], feature_dim=2048, loss=loss, **kwargs)

def osnet_x1_0(num_classes=1000, loss='softmax', **kwargs):
# standard size (width x1.0)
Expand Down

0 comments on commit 89a73fb

Please sign in to comment.