From c495a0f6b13b794bab9f6e3423d5038ce645d816 Mon Sep 17 00:00:00 2001
From: natanielruiz <nataniel777@hotmail.com>
Date: 星期三, 13 九月 2017 21:12:59 +0800
Subject: [PATCH] Batch testing and hopenet training complete

---
 code/hopenet.py |    6 ++++--
 1 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/code/hopenet.py b/code/hopenet.py
index 5bac804..81e645c 100644
--- a/code/hopenet.py
+++ b/code/hopenet.py
@@ -41,7 +41,7 @@
 
 class Hopenet(nn.Module):
     # This is just Hopenet with 3 output layers for yaw, pitch and roll.
-    def __init__(self, block, layers, num_bins):
+    def __init__(self, block, layers, num_bins, iter_ref):
         self.inplanes = 64
         super(Hopenet, self).__init__()
         self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
@@ -62,6 +62,8 @@
         self.fc_finetune = nn.Linear(512 * block.expansion + 3, 3)
 
         self.idx_tensor = Variable(torch.FloatTensor(range(66))).cuda()
+
+        self.iter_ref = iter_ref
 
         for m in self.modules():
             if isinstance(m, nn.Conv2d):
@@ -117,7 +119,7 @@
         angles = []
         angles.append(torch.cat([yaw, pitch, roll], 1))
 
-        for idx in xrange(1):
+        for idx in xrange(self.iter_ref):
             angles.append(self.fc_finetune(torch.cat((angles[-1], x), 1)))
 
         return pre_yaw, pre_pitch, pre_roll, angles

--
Gitblit v1.8.0