| | |
| | | default='', type=str) |
| | | parser.add_argument('--filename_list', dest='filename_list', help='Path to text file containing relative paths for every example.', |
| | | default='', type=str) |
| | | parser.add_argument('--snapshot', dest='snapshot', help='Name of model snapshot.', |
| | | parser.add_argument('--snapshot', dest='snapshot', help='Path of model snapshot.', |
| | | default='', type=str) |
| | | parser.add_argument('--batch_size', dest='batch_size', help='Batch size.', |
| | | default=1, type=int) |
| | | parser.add_argument('--save_viz', dest='save_viz', help='Save images with pose cube.', |
| | | default=False, type=bool) |
| | | parser.add_argument('--iter_ref', dest='iter_ref', default=1, type=int) |
| | | parser.add_argument('--dataset', dest='dataset', help='Dataset type.', default='AFLW2000', type=str) |
| | | |
| | | args = parser.parse_args() |
| | | |
| | |
| | | |
| | | cudnn.enabled = True |
| | | gpu = args.gpu_id |
| | | snapshot_path = os.path.join('output/snapshots', args.snapshot + '.pkl') |
| | | snapshot_path = args.snapshot |
| | | |
| | | # ResNet101 with 3 outputs. |
| | | # model = hopenet.Hopenet(torchvision.models.resnet.Bottleneck, [3, 4, 23, 3], 66) |
| | | # ResNet50 |
| | | model = hopenet.Hopenet(torchvision.models.resnet.Bottleneck, [3, 4, 6, 3], 66) |
| | | model = hopenet.Hopenet(torchvision.models.resnet.Bottleneck, [3, 4, 6, 3], 66, args.iter_ref) |
| | | # ResNet18 |
| | | # model = hopenet.Hopenet(torchvision.models.resnet.BasicBlock, [2, 2, 2, 2], 66) |
| | | |
| | |
| | | |
| | | print 'Loading data.' |
| | | |
| | | # transformations = transforms.Compose([transforms.Scale(224), |
| | | # transforms.RandomCrop(224), transforms.ToTensor()]) |
| | | |
| | | transformations = transforms.Compose([transforms.Scale(224), |
| | | transforms.RandomCrop(224), transforms.ToTensor(), |
| | | transforms.CenterCrop(224), transforms.ToTensor(), |
| | | transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])]) |
| | | |
| | | pose_dataset = datasets.AFLW2000(args.data_dir, args.filename_list, |
| | | if args.dataset == 'AFLW2000': |
| | | pose_dataset = datasets.AFLW2000(args.data_dir, args.filename_list, |
| | | transformations) |
| | | elif args.dataset == 'BIWI': |
| | | pose_dataset = datasets.BIWI(args.data_dir, args.filename_list, transformations) |
| | | elif args.dataset == 'AFLW': |
| | | pose_dataset = datasets.AFLW(args.data_dir, args.filename_list, transformations) |
| | | elif args.dataset == 'AFW': |
| | | pose_dataset = datasets.AFW(args.data_dir, args.filename_list, transformations) |
| | | else: |
| | | print 'Error: not a valid dataset name' |
| | | sys.exit() |
| | | test_loader = torch.utils.data.DataLoader(dataset=pose_dataset, |
| | | batch_size=args.batch_size, |
| | | num_workers=2) |
| | |
| | | label_roll = labels[:,2].float() |
| | | |
| | | pre_yaw, pre_pitch, pre_roll, angles = model(images) |
| | | yaw = angles[:,0].cpu().data |
| | | pitch = angles[:,1].cpu().data |
| | | roll = angles[:,2].cpu().data |
| | | yaw = angles[args.iter_ref][:,0].cpu().data |
| | | pitch = angles[args.iter_ref][:,1].cpu().data |
| | | roll = angles[args.iter_ref][:,2].cpu().data |
| | | |
| | | # Mean absolute error |
| | | print yaw.numpy(), label_yaw.numpy() |
| | | yaw_error += torch.sum(torch.abs(yaw - label_yaw) * 3) |
| | | pitch_error += torch.sum(torch.abs(pitch - label_pitch) * 3) |
| | | roll_error += torch.sum(torch.abs(roll - label_roll) * 3) |