2013-04-02 62 views
0

我想提出一个虚拟project.which面数是基于图像editing.In这个项目,我想我怎样才能找到一个PIC iPhone SDK的

计算在那个特定的PIC存在的人的数量,点击相机或使用

通过iPhone图片库。有人知道这是如何可能的。我没有

的想法。 Any help would be appropriated.

回答

0

入住这code.You必须导入以下内容: - CoreImage/CoreImage.h CoreImage/CoreImage.h后使用该代码: -

-(void)markFaces:(UIImageView *)facePicture 
    { 
    // draw a CI image with the previously loaded face detection picture 
    CIImage* image = [CIImage imageWithCGImage:facePicture.image.CGImage]; 

    // create a face detector - since speed is not an issue we'll use a high accuracy 
    // detector 
    CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace 
               context:nil options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]]; 

    // create an array containing all the detected faces from the detector  
    NSArray* features = [detector featuresInImage:image]; 

    // we'll iterate through every detected face. CIFaceFeature provides us 
    // with the width for the entire face, and the coordinates of each eye 
    // and the mouth if detected. Also provided are BOOL's for the eye's and 
    // mouth so we can check if they already exist. 
    for(CIFaceFeature* faceFeature in features) 
    { 
     // get the width of the face 
     CGFloat faceWidth = faceFeature.bounds.size.width; 

     // create a UIView using the bounds of the face 
     UIView* faceView = [[UIView alloc] initWithFrame:faceFeature.bounds]; 

     // add a border around the newly created UIView 
     faceView.layer.borderWidth = 1; 
     faceView.layer.borderColor = [[UIColor redColor] CGColor]; 

     // add the new view to create a box around the face 
     [self.window addSubview:faceView]; 

     if(faceFeature.hasLeftEyePosition) 
     { 
      // create a UIView with a size based on the width of the face 
      UIView* leftEyeView = [[UIView alloc] initWithFrame:CGRectMake(faceFeature.leftEyePosition.x-faceWidth*0.15, faceFeature.leftEyePosition.y-faceWidth*0.15, faceWidth*0.3, faceWidth*0.3)]; 
      // change the background color of the eye view 
      [leftEyeView setBackgroundColor:[[UIColor blueColor] colorWithAlphaComponent:0.3]]; 
      // set the position of the leftEyeView based on the face 
      [leftEyeView setCenter:faceFeature.leftEyePosition]; 
      // round the corners 
      leftEyeView.layer.cornerRadius = faceWidth*0.15; 
      // add the view to the window 
      [self.window addSubview:leftEyeView]; 
     } 

     if(faceFeature.hasRightEyePosition) 
     { 
      // create a UIView with a size based on the width of the face 
      UIView* leftEye = [[UIView alloc] initWithFrame:CGRectMake(faceFeature.rightEyePosition.x-faceWidth*0.15, faceFeature.rightEyePosition.y-faceWidth*0.15, faceWidth*0.3, faceWidth*0.3)]; 
      // change the background color of the eye view 
      [leftEye setBackgroundColor:[[UIColor blueColor] colorWithAlphaComponent:0.3]]; 
      // set the position of the rightEyeView based on the face 
      [leftEye setCenter:faceFeature.rightEyePosition]; 
      // round the corners 
      leftEye.layer.cornerRadius = faceWidth*0.15; 
      // add the new view to the window 
      [self.window addSubview:leftEye]; 
     } 

     if(faceFeature.hasMouthPosition) 
     { 
      // create a UIView with a size based on the width of the face 
      UIView* mouth = [[UIView alloc] initWithFrame:CGRectMake(faceFeature.mouthPosition.x-faceWidth*0.2, faceFeature.mouthPosition.y-faceWidth*0.2, faceWidth*0.4, faceWidth*0.4)]; 
      // change the background color for the mouth to green 
      [mouth setBackgroundColor:[[UIColor greenColor] colorWithAlphaComponent:0.3]]; 
      // set the position of the mouthView based on the face 
      [mouth setCenter:faceFeature.mouthPosition]; 
      // round the corners 
      mouth.layer.cornerRadius = faceWidth*0.2; 
      // add the new view to the window 
      [self.window addSubview:mouth]; 
     } 
    } 
    } 

    -(void)faceDetector 
    { 
    // Load the picture for face detection 
    UIImageView* image = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"facedetectionpic.jpg"]]; 

    // Draw the face detection image 
    [self.window addSubview:image]; 

    // Execute the method used to markFaces in background 
    [self performSelectorInBackground:@selector(markFaces:) withObject:image]; 

    // flip image on y-axis to match coordinate system used by core image 
    [image setTransform:CGAffineTransformMakeScale(1, -1)]; 

    // flip the entire window to make everything right side up 
    [self.window setTransform:CGAffineTransformMakeScale(1, -1)]; 
    } 


    - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 
    { 
    self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; 
    // Override point for customization after application launch. 
    self.viewController = [[ViewController alloc] initWithNibName:@"ViewController" bundle:nil]; 
    self.window.rootViewController = self.viewController; 
    [self.window makeKeyAndVisible]; 
    [self faceDetector]; // execute the faceDetector code 

    return YES; 
    } 
0

为此,您需要执行几个类... Face Wrapper

此外,这是人脸检测的一部分,所以你应该在此下研究。希望这个小titbit帮助!