2012-08-31 113 views

回答

0

如链接提到请在你的Mac系统中打开位于

/Applications/Utilities/Terminal.app

终端应用程序,执行命令提到。

+0

我停在“重建OpenCV的框架,”我下载两个他提供的链接,但如何获得OpenCV源后该怎么办? – Nims

18

给出。如果你想使用OpenCV的iOS上,你应该与OpenCV中提供的官方框架去(如版本2.4.2)。

这里获取最新版本:OpenCV for iOS,它拖放到你的项目,包括到您的项目的前缀是:

ExampleApp中,Prefix.pch:

#ifdef __cplusplus 
    #import <opencv2/opencv.hpp> 
#endif 

你也得将UIImage“转换”为cv :: Mat,以便在OpenCV中使用它。

UIImageCVMatConverter.h:

// 
// UIImageCVMatConverter.h 
// 

#import <Foundation/Foundation.h> 

@interface UIImageCVMatConverter : NSObject { 

} 

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat; 
+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image; 
+ (cv::Mat)cvMatFromUIImage:(UIImage *)image; 
+ (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image; 
+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image; 
+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image; 

@end 

UIImageCVMatConverter.mm:

// 
// UIImageCVMatConverter.mm 
// 

#import "UIImageCVMatConverter.h" 

@implementation UIImageCVMatConverter 

+ (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat withUIImage:(UIImage*)image; 
{ 
    CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage); 
    CGFloat cols = image.size.width; 
    CGFloat rows = image.size.height; 
    CGFloat widthStep = image.size.width; 
    CGContextRef contextRef = CGBitmapContextCreate(NULL, cols, rows, 8, widthStep*4, colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault); 
    CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage); 
    CGContextSetRGBStrokeColor(contextRef, 1, 0, 0, 1); 
    CGImageRef cgImage = CGBitmapContextCreateImage(contextRef); 
    UIImage* result = [UIImage imageWithCGImage:cgImage]; 
    CGImageRelease(cgImage); 
    CGContextRelease(contextRef); 
    CGColorSpaceRelease(colorSpace); 
    return result; 
} 

+(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat 
{ 
    NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()]; 
    CGColorSpaceRef colorSpace; 
    if (cvMat.elemSize() == 1) { 
     colorSpace = CGColorSpaceCreateDeviceGray(); 
    } 
    else { 
     colorSpace = CGColorSpaceCreateDeviceRGB(); 
    } 
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); 
    CGImageRef imageRef = CGImageCreate(cvMat.cols, cvMat.rows, 8, 8 * cvMat.elemSize(), cvMat.step[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault); 
    UIImage *finalImage = [UIImage imageWithCGImage:imageRef]; 
    CGImageRelease(imageRef); 
    CGDataProviderRelease(provider); 
    CGColorSpaceRelease(colorSpace); 
    return finalImage; 
} 

+ (cv::Mat)cvMatFromUIImage:(UIImage *)image 
{ 
    CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage); 
    CGFloat cols = image.size.width; 
    CGFloat rows = image.size.height; 
    cv::Mat cvMat(rows, cols, CV_8UC4); 
    CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, cols, rows, 8, cvMat.step[0], colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault); 
    CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage); 
    CGContextRelease(contextRef); 
    CGColorSpaceRelease(colorSpace); 
    return cvMat; 
} 

+ (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image 
{ 
    cv::Mat cvMat = [UIImageCVMatConverter cvMatFromUIImage:image]; 
    cv::Mat grayMat; 
    if (cvMat.channels() == 1) { 
     grayMat = cvMat; 
    } 
    else { 
     grayMat = cv :: Mat(cvMat.rows,cvMat.cols, CV_8UC1); 
     cv::cvtColor(cvMat, grayMat, CV_BGR2GRAY); 
    } 
    return grayMat; 
} 

+ (UIImage *)scaleAndRotateImageBackCamera:(UIImage *)image 
{ 
    static int kMaxResolution = 640; 
    CGImageRef imgRef = image.CGImage; 
    CGFloat width = CGImageGetWidth(imgRef); 
    CGFloat height = CGImageGetHeight(imgRef); 
    CGAffineTransform transform = CGAffineTransformIdentity; 
    CGRect bounds = CGRectMake(0, 0, width, height); 
    if (width > kMaxResolution || height > kMaxResolution) { 
    CGFloat ratio = width/height; 
    if (ratio > 1) { 
     bounds.size.width = kMaxResolution; 
     bounds.size.height = bounds.size.width/ratio; 
    } 
     else { 
     bounds.size.height = kMaxResolution; 
     bounds.size.width = bounds.size.height * ratio; 
    } 
    } 
    CGFloat scaleRatio = bounds.size.width/width; 
    CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef)); 
    CGFloat boundHeight; 
    UIImageOrientation orient = image.imageOrientation; 
    switch(orient) { 
    case UIImageOrientationUp: 
     transform = CGAffineTransformIdentity; 
     break; 
    case UIImageOrientationUpMirrored: 
     transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0); 
     transform = CGAffineTransformScale(transform, -1.0, 1.0); 
     break; 
    case UIImageOrientationDown: 
     transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height); 
     transform = CGAffineTransformRotate(transform, M_PI); 
     break; 
    case UIImageOrientationDownMirrored: 
     transform = CGAffineTransformMakeTranslation(0.0, imageSize.height); 
     transform = CGAffineTransformScale(transform, 1.0, -1.0); 
     break; 
    case UIImageOrientationLeftMirrored: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width); 
     transform = CGAffineTransformScale(transform, -1.0, 1.0); 
     transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
     break; 
    case UIImageOrientationLeft: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(0.0, imageSize.width); 
     transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
     break; 
    case UIImageOrientationRightMirrored: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeScale(-1.0, 1.0); 
     transform = CGAffineTransformRotate(transform, M_PI/2.0); 
     break; 
    case UIImageOrientationRight: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(imageSize.height, 0.0); 
     transform = CGAffineTransformRotate(transform, M_PI/2.0); 
     break; 
    default: 
     [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"]; 
    } 
    UIGraphicsBeginImageContext(bounds.size); 
    CGContextRef context = UIGraphicsGetCurrentContext(); 
    if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) { 
    CGContextScaleCTM(context, -scaleRatio, scaleRatio); 
    CGContextTranslateCTM(context, -height, 0); 
    } 
    else { 
    CGContextScaleCTM(context, scaleRatio, -scaleRatio); 
    CGContextTranslateCTM(context, 0, -height); 
    } 
    CGContextConcatCTM(context, transform); 
    CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef); 
    UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext(); 
    UIGraphicsEndImageContext(); 
    return returnImage; 
} 

+ (UIImage *)scaleAndRotateImageFrontCamera:(UIImage *)image 
{ 
    static int kMaxResolution = 640; 
    CGImageRef imgRef = image.CGImage; 
    CGFloat width = CGImageGetWidth(imgRef); 
    CGFloat height = CGImageGetHeight(imgRef); 
    CGAffineTransform transform = CGAffineTransformIdentity; 
    CGRect bounds = CGRectMake(0, 0, width, height); 
    if (width > kMaxResolution || height > kMaxResolution) { 
    CGFloat ratio = width/height; 
    if (ratio > 1) { 
     bounds.size.width = kMaxResolution; 
     bounds.size.height = bounds.size.width/ratio; 
    } else { 
     bounds.size.height = kMaxResolution; 
     bounds.size.width = bounds.size.height * ratio; 
    } 
    } 

    CGFloat scaleRatio = bounds.size.width/width; 
    CGSize imageSize = CGSizeMake(CGImageGetWidth(imgRef), CGImageGetHeight(imgRef)); 
    CGFloat boundHeight; 
    UIImageOrientation orient = image.imageOrientation; 
    switch(orient) { 
    case UIImageOrientationUp: 
     transform = CGAffineTransformIdentity; 
     break; 
    case UIImageOrientationUpMirrored: 
     transform = CGAffineTransformMakeTranslation(imageSize.width, 0.0); 
     transform = CGAffineTransformScale(transform, -1.0, 1.0); 
     break; 
    case UIImageOrientationDown: 
     transform = CGAffineTransformMakeTranslation(imageSize.width, imageSize.height); 
     transform = CGAffineTransformRotate(transform, M_PI); 
     break; 
    case UIImageOrientationDownMirrored: 
     transform = CGAffineTransformMakeTranslation(0.0, imageSize.height); 
     transform = CGAffineTransformScale(transform, 1.0, -1.0); 
     break; 
    case UIImageOrientationLeftMirrored: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(imageSize.height, imageSize.width); 
     transform = CGAffineTransformScale(transform, -1.0, 1.0); 
     transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
     break; 
    case UIImageOrientationLeft: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeTranslation(0.0, imageSize.width); 
     transform = CGAffineTransformRotate(transform, 3.0 * M_PI/2.0); 
     break; 
     case UIImageOrientationRight: 
    case UIImageOrientationRightMirrored: 
     boundHeight = bounds.size.height; 
     bounds.size.height = bounds.size.width; 
     bounds.size.width = boundHeight; 
     transform = CGAffineTransformMakeScale(-1.0, 1.0); 
     transform = CGAffineTransformRotate(transform, M_PI/2.0); 
     break; 
     default: 
     [NSException raise:NSInternalInconsistencyException format:@"Invalid image orientation"]; 
    } 
    UIGraphicsBeginImageContext(bounds.size); 
    CGContextRef context = UIGraphicsGetCurrentContext(); 
    if (orient == UIImageOrientationRight || orient == UIImageOrientationLeft) { 
    CGContextScaleCTM(context, -scaleRatio, scaleRatio); 
    CGContextTranslateCTM(context, -height, 0); 
    } 
    else { 
    CGContextScaleCTM(context, scaleRatio, -scaleRatio); 
    CGContextTranslateCTM(context, 0, -height); 
    } 
    CGContextConcatCTM(context, transform); 
    CGContextDrawImage(UIGraphicsGetCurrentContext(), CGRectMake(0, 0, width, height), imgRef); 
    UIImage *returnImage = UIGraphicsGetImageFromCurrentImageContext(); 
    UIGraphicsEndImageContext(); 
    return returnImage; 
} 

@end 

重命名您的视图控制器实现文件* .mm

MyViewController.m -> MyViewController.mm 

和进口在UIImageCVMatConverter在您的视图控制器:

#import "UIImageCVMatConverter.h" 

现在,您可以将您的视图控制器内混合Objective-C和C++代码的OpenCV:

cv::Mat img = [UIImageCVMatConverter cvMatFromUIImage:[UIImage imageNamed:@"my_image.png"]]; 
... 

玩得开心!

+0

嘿拉链文件不提取这里它给出的错误 – Nims

+0

检查了两次。它的工作;) – dom

+0

非常有用:)感谢分享 – Prerna

2

@Nims,如@moosgummi说,它的工作原理,但我也做了以下步骤:

  • 添加库的libC++ dylib
  • 在 “生成设置” - “苹果。 LLVM编译器XX - 语言” - ‘编译来源’ - 对象的C++
+0

随着moosgummi和你的信息,它的工作非常好。感谢分享 :) – Prerna

1

你可以写所有的类方法,或者你可以简单地包括ios.h文件。 它有两种方法已经写入图像处理。

这是我的代码。

对不起所有的评论,我包括他们来显示我的研究进展。

#import "JmBViewController.h" 

@interface JmBViewController() 

@end 

@implementation JmBViewController 

- (void)viewDidLoad { 
[super viewDidLoad]; 
_imgtest = [UIImage imageNamed:@"IMG_0424.PNG"]; 

cv::Mat cvImage; 
UIImageToMat(_imgtest, cvImage); 
if (!cvImage.empty()) { 
    cv::Mat gray; 
    // cv::Mat filteredMat; 
    cv::cvtColor(cvImage, gray, CV_BGRA2GRAY); 
    // cv::GaussianBlur(gray, gray, cv::Size(5, 5), 1.2, 1.2); 
    cv::vector<cv::Vec3f> circles; 

    /* 
    for(size_t i = 0; i < circles.size(); i++) 
    { 
     cv::Point center((cvRound(circles[i][0]), cvRound(circles[i][1]))); 
     int radius = cvRound(circles[i][2]); 
     cv::circle(gray, center, 3, cv::Scalar(0,255,0)); 
     cv::circle(gray, center, radius, cv::Scalar(0,0,255)); 
    } 
    */ 

// for (int i = 1; i < 15; i = i + 2) 

     cv::GaussianBlur(gray, gray, cv::Size(9, 9), 1.5, 1.5); 

     cv::Mat edges; 
     cv::Canny(gray, edges, 0, 50); 
     //gray.setTo(cv::Scalar::all(0)); 
     //gray.setTo(cv::Scalar::all(255), edges); 
     cv::HoughCircles(gray, circles, CV_HOUGH_GRADIENT, 1, 30, 50, 20, 10, 25); 
     for(size_t i = 0; i < circles.size(); i++) 
     { 
      cv::Point center(cvRound(circles[i][0]), cvRound(circles[i][1])); 
      int radius = cvRound(circles[i][2]); 
      cv::circle(cvImage, center, 5, cv::Scalar::all(200), -1, 8, 0);//center 
      cv::circle(cvImage, center, radius, cv::Scalar::all(255), 3, 8, 0);//diamter 
     NSLog(@"Circles: %ld", i+1); 

     // cv::imshow(&"circles i " [ i], gray); 
    } 


    _imgView.image = MatToUIImage(cvImage); 
    } 
    /* 
cv::Mat cvImage; 
cv::Mat grey; 
cv::Mat filteredMat; 
cv::vector<cv::Vec3f> circles; 
// cv::cvtColor(_imgtest, cvImage, CV_BGR2GRAY); 
cv::threshold(grey, filteredMat, 100, 255, CV_THRESH_BINARY); 
[UIImageCVMatConverter cvMatGrayFromUIImage:_imgtest]; 
// cv::cvtColor(cvImage, grey, CV_RGBA2GRAY); 
// UIImageToMat(_imgtest, cvImage); 
cv::HoughCircles(cvImage, circles, CV_HOUGH_GRADIENT, 1, 50); 
// MatToUIImage(cvImage); 
_imgView.image = [UIImageCVMatConverter UIImageFromCVMat:cvImage]; 
_imgView.image = MatToUIImage(cvImage); 
*/ 

// Do any additional setup after loading the view, typically from a nib. 
} 

- (void)didReceiveMemoryWarning 
{ 
[super didReceiveMemoryWarning]; 
// Dispose of any resources that can be recreated. 
} 
/* 

UIImage* MatToUIImage(const cv::Mat& image) { 
NSData *data = [NSData dataWithBytes:image.data length:image.elemSize()*image.total()]; 
CGColorSpaceRef colorSpace; 
if (image.elemSize() == 1) { 
    colorSpace = CGColorSpaceCreateDeviceGray(); 
}else { colorSpace = CGColorSpaceCreateDeviceRGB(); 
} 
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); 

CGImageRef imageRef = CGImageCreate(image.cols, image.rows, 8, 8*image.elemSize(), image.step.p[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, FALSE, kCGRenderingIntentDefault); 
UIImage *finalImage = [UIImage imageWithCGImage:imageRef]; 

return finalImage; 
} 
*/ 


@end 

我希望这有助于!

这里是我在浏览器头文件中的#包括。

#import <UIKit/UIKit.h> 
// #import "UIImageCVMatConverter.h" 
#import <opencv2/highgui/highgui_c.h> 
#import <opencv2/highgui/highgui.hpp> 
#import <opencv2/imgproc/imgproc_c.h> 
#import <opencv2/imgproc/imgproc.hpp> 
#import <opencv2/highgui/ios.h> 
#import <opencv2/core/core_c.h> 
#import <opencv2/core/core.hpp> 

@interface JmBViewController : UIViewController 
@property (weak, nonatomic) IBOutlet UIImageView *imgView; 
@property (weak, nonatomic) UIImage *imgtest; 

@end 

无需编译或使自己的架构,只需下载您的OpenCV的网站想要的版本,把它拖到项目下的框架,确保“将所有文件复制到目标”当被问及被确认Xcode,如果你使用的是iOS。这是我发现的最简单的方法,即在没有所有终端命令和cMake废话的情况下将框架包含到项目中。

0

不要忘记你的所有.m文件转换成.mm文件,还有什么别的都不行