From 7f02504ed46be294945d6ed764aeac0b469db801 Mon Sep 17 00:00:00 2001 From: natanielruiz <nataniel777@hotmail.com> Date: 星期六, 12 八月 2017 10:30:55 +0800 Subject: [PATCH] Added AFLW dataset --- code/datasets.py | 40 ++++++++++++++++++++++++++++++++++++++++ 1 files changed, 40 insertions(+), 0 deletions(-) diff --git a/code/datasets.py b/code/datasets.py index 29800fe..f6fcc45 100644 --- a/code/datasets.py +++ b/code/datasets.py @@ -179,6 +179,46 @@ # 2,000 return self.length +class AFLW(Dataset): + def __init__(self, data_dir, filename_path, transform, img_ext='.jpg', annot_ext='.txt', image_mode='RGB'): + self.data_dir = data_dir + self.transform = transform + self.img_ext = img_ext + self.annot_ext = annot_ext + + filename_list = get_list_from_filenames(filename_path) + + self.X_train = filename_list + self.y_train = filename_list + self.image_mode = image_mode + self.length = len(filename_list) + + def __getitem__(self, index): + img = Image.open(os.path.join(self.data_dir, self.X_train[index] + self.img_ext)) + img = img.convert(self.image_mode) + txt_path = os.path.join(self.data_dir, self.y_train[index] + self.annot_ext) + + # We get the pose in radians + annot = open(txt_path, 'r') + line = annot.readline().split(' ') + pose = [float(line[1]), float(line[2]), float(line[3])] + # And convert to degrees. + yaw = pose[0] * 180 / np.pi + pitch = pose[1] * 180 / np.pi + roll = pose[2] * 180 / np.pi + # Bin values + bins = np.array(range(-99, 102, 3)) + labels = torch.LongTensor(np.digitize([yaw, pitch, roll], bins) - 1) + + if self.transform is not None: + img = self.transform(img) + + return img, labels, self.X_train[index] + + def __len__(self): + # Check how many + return self.length + def get_list_from_filenames(file_path): # input: relative path to .txt file with file names # output: list of relative path names -- Gitblit v1.8.0