| | |
| | | "cells": [ |
| | | { |
| | | "cell_type": "code", |
| | | "execution_count": 1, |
| | | "execution_count": 7, |
| | | "metadata": { |
| | | "collapsed": true |
| | | }, |
| | |
| | | }, |
| | | { |
| | | "cell_type": "code", |
| | | "execution_count": 2, |
| | | "execution_count": 8, |
| | | "metadata": { |
| | | "collapsed": true |
| | | }, |
| | |
| | | "source": [ |
| | | "#Change this paths according to your directories\n", |
| | | "images_path = \"/Data/nruiz9/data/facial_landmarks/AFLW/aflw/data/flickr/\"\n", |
| | | "storing_path = \"/Data/nruiz9/data/facial_landmarks/AFLW/aflw_cropped/\"" |
| | | "storing_path = \"/Data/nruiz9/data/facial_landmarks/AFLW/aflw_cropped_loose/\"" |
| | | ] |
| | | }, |
| | | { |
| | | "cell_type": "code", |
| | | "execution_count": 3, |
| | | "execution_count": 9, |
| | | "metadata": { |
| | | "collapsed": false |
| | | }, |
| | | "outputs": [], |
| | | "outputs": [ |
| | | { |
| | | "name": "stdout", |
| | | "output_type": "stream", |
| | | "text": [ |
| | | "Done\n" |
| | | ] |
| | | } |
| | | ], |
| | | "source": [ |
| | | "#Image counter\n", |
| | | "counter = 1\n", |
| | |
| | | " face_h = row[8]\n", |
| | | "\n", |
| | | " #Error correction\n", |
| | | " if(face_x < 0): face_x = 0\n", |
| | | " if(face_y < 0): face_y = 0\n", |
| | | " k = 0.15\n", |
| | | " x_min = face_x - image_w * k\n", |
| | | " x_max = face_x + image_w * (k+1)\n", |
| | | " y_min = face_y - image_h * k\n", |
| | | " y_max = face_y + image_h * (k+1)\n", |
| | | " \n", |
| | | " x_min = int(max(0, x_min))\n", |
| | | " x_max = int(min(image_w, x_max))\n", |
| | | " y_min = int(max(0, y_min))\n", |
| | | " y_max = int(min(image_h, y_max))\n", |
| | | "\n", |
| | | " if(face_w > image_w): \n", |
| | | " face_w = image_w\n", |
| | | " face_h = image_w\n", |
| | | " if(face_h > image_h): \n", |
| | | " face_h = image_h\n", |
| | | " face_w = image_h\n", |
| | | "\n", |
| | | " \n", |
| | | " #Crop the face from the image\n", |
| | | " image_cropped = np.copy(image[face_y:face_y+face_h, face_x:face_x+face_w])\n", |
| | | " image_cropped = np.copy(image[y_min:y_max, x_min:x_max])\n", |
| | | " #Uncomment the lines below if you want to rescale the image to a particular size\n", |
| | | " to_size = 240\n", |
| | | " to_size = 260\n", |
| | | " image_cropped = cv2.resize(image_cropped, (to_size,to_size), interpolation = cv2.INTER_AREA)\n", |
| | | " #Uncomment the line below if you want to use adaptive histogram normalisation\n", |
| | | " #clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(5,5))\n", |