I receive an image view
-(void)imagePickerController:(UIImagePickerController *)picker
didFinishPickingMediaWithInfo:(NSDictionary *)info
{
NSString *mediaType = info[UIImagePickerControllerMediaType];
[self dismissViewControllerAnimated:YES completion:nil];
if ([mediaType isEqualToString:(NSString *)kUTTypeImage]) {
UIImage *image = info[UIImagePickerControllerOriginalImage];
//imgvprofileImage.image = image;
//[self detectForFacesInUIImage:[UIImage imageNamed:@"image00.jpg"]];
[self detectForFacesInUIImage:image];
}
else if ([mediaType isEqualToString:(NSString *)kUTTypeMovie])
{
// Code here to support video if enabled
}
}
When I send a photo like this
[self detectForFacesInUIImage:[UIImage imageNamed:@"image00.jpg"]];
The detection works well and finds a face but when I use the image returned from the camera it doesn't work.
[self detectForFacesInUIImage:image]
This is the function i use to detect the face
-(void)detectForFacesInUIImage:(UIImage *)facePicture
{
CIImage* image = [CIImage imageWithCGImage:facePicture.CGImage];
CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyLow forKey:CIDetectorAccuracy]];
NSArray* features = [detector featuresInImage:image];
if (features.count == 0) {
NSLog(@"There is no faces in captured image ") ;
}
for(CIFaceFeature* faceObject in features)
{
CGRect modifiedFaceBounds = faceObject.bounds;
modifiedFaceBounds.origin.y = facePicture.size.height-faceObject.bounds.size.height-faceObject.bounds.origin.y;
[self addSubViewWithFrame:facePicture toRect:modifiedFaceBounds] ;
}
}