Did I find the right examples for you? yes no Crawl my project Python Jobs
All Samples(19) | Call(19) | Derive(0) | Import(0)
src/e/s/estimate.gender-0.4/estimate/gender/estimateGender.py estimate.gender(Download)
def create_feature_set(db, dbname, indir, basedir, ext, ftdir, objects, r, iod, w, lbp, nb, lt, ls, f, no): """Creates a full dataset matrix out of all the specified files""" dataset = [] face_eyes_norm = bob.ip.FaceEyesNorm(eyes_distance = iod, crop_height = int(w*r), crop_width = w, crop_eyecenter_offset_h = w*r*0.4, crop_eyecenter_offset_w = w*0.5) cropped_image = numpy.ndarray((w*r, w), dtype = numpy.float64)
if dbname == 'banca': eyes = db.annotations(obj.id) face_eyes_norm(img, cropped_image, re_y = eyes['reye'][0], re_x = eyes['reye'][1], le_y = eyes['leye'][0], le_x = eyes['leye'][1]) elif dbname == 'mobio': posdir = '/'.join(indir.split('/')[:-1]+['IMAGE_ANNOTATIONS']) eyes = numpy.loadtxt(str(obj.make_path(posdir,'.pos'))) face_eyes_norm(img, cropped_image, re_y = eyes[1], re_x = eyes[0], le_y = eyes[3], le_x = eyes[2])
pos_path = str(obj.make_path(indir,'.pos')) eyes = numpy.loadtxt(pos_path) face_eyes_norm(img, cropped_image, re_y = eyes[1], re_x = eyes[0], le_y = eyes[3], le_x = eyes[2]) elif dbname == 'lfw': pos_path = str(obj.make_path(basedir+'/databases/lfw_eyec','.pos')) eyes = numpy.genfromtxt(pos_path,dtype='double')[:,1:] face_eyes_norm(img, cropped_image, re_y = eyes[0][1], re_x = eyes[0][0], le_y = eyes[1][1], le_x = eyes[1][0])
src/m/a/maskattack.lbp-1.0.3/maskattack/lbp/script/calclbp.py maskattack.lbp(Download)
color_image, depth_image, eye_pos = obj.load(data_folder) eye_pos = eye_pos.astype(numpy.double) face_eyes_norm = bob.ip.FaceEyesNorm(eyes_distance = sz/2, crop_height = sz, crop_width = sz, crop_eyecenter_offset_h = sz/4, crop_eyecenter_offset_w = sz/2) cropped_color = numpy.ndarray((sz, sz), dtype = numpy.float64) cropped_depth = numpy.ndarray((sz, sz), dtype = numpy.float64)
# crop color image frame = bob.ip.rgb_to_gray(color_image[k,:,:,:]) face_eyes_norm(frame, cropped_color, re_y = eye_pos[k][1], re_x = eye_pos[k][0], le_y = eye_pos[k][3], le_x = eye_pos[k][2]) cropped_color_all[k,0,:,:] = cropped_color
if(sum(eye_pos_d)==0): print 'ERROR: Eyes couldn\'t be registered!!!' face_eyes_norm(frame, cropped_depth, re_y = eye_pos_d[1], re_x = eye_pos_d[0], le_y = eye_pos_d[3], le_x = eye_pos_d[2]) cropped_depth_all[k,0,:,:] = cropped_depth # to save the depth map as an image after intensity normalization
src/m/a/maskattack.study-1.0.0/maskattack/study/accumulate/hdf5_to_grayscale.py maskattack.study(Download)
hdf5_files = sorted([f for f in os.listdir(datadir) if f.endswith('.hdf5')]) face_eyes_norm = bob.ip.FaceEyesNorm(eyes_distance = 65, crop_height = 128, crop_width = 128, crop_eyecenter_offset_h = 32, crop_eyecenter_offset_w = 63.5) cropped_image = numpy.ndarray( (128, 128), dtype = numpy.float64 )
img = color[frame,:,:,:] eye = eyes[frame,:].astype('float') face_eyes_norm(bob.ip.rgb_to_gray(img), cropped_image, re_y = eye[1], re_x = eye[0], le_y = eye[3], le_x = eye[2]) file_name = os.path.join(args.outputdir,f.split('.')[0]+'_%s.png') %str(frame/30+1).zfill(2) bob.io.save(cropped_image.astype('uint8'), file_name)
src/a/n/antispoofing.verification.gmm-1.0.2/antispoofing/verification/features/dct.py antispoofing.verification.gmm(Download)
# Initializes cropper and destination array FEN = bob.ip.FaceEyesNorm( CROP_EYES_D, CROP_H, CROP_W, CROP_OH, CROP_OW) cropped_img = numpy.ndarray((CROP_H, CROP_W), 'float64')
# Extracts and crops a face FEN(img, cropped_img, LH, LW, RH, RW) # Preprocesses a face using Tan and Triggs TT(cropped_img, preprocessed_img)
# Initializes cropper and destination array FEN = bob.ip.FaceEyesNorm( CROP_EYES_D, CROP_H, CROP_W, CROP_OH, CROP_OW) cropped_img = numpy.ndarray((CROP_H, CROP_W), 'float64')
# Extracts and crops a face FEN(img, cropped_img, int(LH), int(LW), int(RH), int(RW)) # Preprocesses a face using Tan and Triggs TT(cropped_img, preprocessed_img)
src/x/b/xbob.paper.tpami2013-1.0.0/xbob/paper/tpami2013/features.py xbob.paper.tpami2013(Download)
# Initializes cropper and destination array fen = bob.ip.FaceEyesNorm( crop_eyes_d, crop_h, crop_w, crop_oh, crop_ow) cropped_img = numpy.ndarray(shape=(crop_h,crop_w), dtype=numpy.float64)
# Extracts and crops a face fen(img, cropped_img, rh, rw, lh, lw) # Preprocesses a face using Tan and Triggs
src/f/a/facereclib-1.2.1/facereclib/preprocessing/FaceCrop.py facereclib(Download)
# generate cropper on the fly cropper = bob.ip.FaceEyesNorm( self.m_cropped_image_size[0] + 2 * self.m_offset, # cropped image height self.m_cropped_image_size[1] + 2 * self.m_offset, # cropped image width self.m_cropped_positions[pair[0]][0] + self.m_offset, # Y of first position (usually: right eye)
src/f/a/facereclib-HEAD/facereclib/preprocessing/FaceCrop.py facereclib(Download)
# generate cropper on the fly cropper = bob.ip.FaceEyesNorm( self.m_cropped_image_size[0] + 2 * self.m_offset, # cropped image height self.m_cropped_image_size[1] + 2 * self.m_offset, # cropped image width self.m_cropped_positions[pair[0]][0] + self.m_offset, # Y of first position (usually: right eye)