2015-04-18 41 views
0

IOS编码有些新颖;尽管对编程并不陌生;试图了解如何实现Jonathan Wight在几年前如何实施AVFoundation这个有趣的例子。AVFoundation示例方法实现?

所以我复制并粘贴到它自己的NSObject ccamera.h & ccamera.m类文件;然后将其声明为sharedInstance。我可以打电话给startRunning方法......但还有一点点失去了对下一步...

ccamera *newcamera = [ccamera sharedInstance]; 
[newcamera startRunning]; 

任何人可以帮助/明白我需要说下做的,拍摄静止与此代码框架?

感谢

// 
// CCamera.h 
// CCamera 
// 
// Created by Jonathan Wight on 7/12/12. 
// Copyright (c) 2012 Jonathan Wight. All rights reserved. 
// 

#import <Foundation/Foundation.h> 
#import <AVFoundation/AVFoundation.h> 

@interface CCamera : NSObject 

@property (readwrite, nonatomic, assign) AVCaptureDevicePosition captureDevicePosition; 
@property (readwrite, nonatomic, strong) NSString *preset; 
@property (readonly, nonatomic, strong) AVCaptureDevice *captureDevice; 
@property (readonly, nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer; 

+ (CCamera *)sharedInstance; 
- (void)startRunning; 
- (void)stopRunning; 

- (CGSize)size; 

- (void)captureStillCMSampleBuffer:(void (^)(CMSampleBufferRef sampleBuffer, NSError *error))inCompletionBlock; 
- (void)captureStillCVImageBuffer:(void (^)(CVImageBufferRef imageBuffer, NSError *error))inCompletionBlock; 
- (void)captureStillCIImage:(void (^)(CIImage *image, NSError *error))inCompletionBlock; 
- (void)captureStillCGImage:(void (^)(CGImageRef image, NSError *error))inCompletionBlock; 
- (void)captureStillUIImage:(void (^)(UIImage *image, NSError *error))inCompletionBlock; 

@end 



// 
// CCamera.m 
// Camera 
// 
// Created by Jonathan Wight on 7/12/12. 
// Copyright (c) 2012 Jonathan Wight. All rights reserved. 
// 

#import "CCamera.h" 

#import <AVFoundation/AVFoundation.h> 
#import <QuartzCore/QuartzCore.h> 

@interface CCamera() 
@property (readwrite, nonatomic, strong) AVCaptureSession *captureSession; 
@property (readwrite, nonatomic, strong) AVCaptureDevice *captureDevice; 
@property (readwrite, nonatomic, strong) AVCaptureVideoPreviewLayer *previewLayer; 
@property (readwrite, nonatomic, strong) AVCaptureStillImageOutput *imageOutput; 
@end 

#pragma mark - 

@implementation CCamera 

static CCamera *gSharedInstance = NULL; 

+ (CCamera *)sharedInstance 
{ 
static dispatch_once_t sOnceToken = 0; 
dispatch_once(&sOnceToken, ^{ 
    gSharedInstance = [[CCamera alloc] init]; 
    }); 
return(gSharedInstance); 
} 

- (id)init 
{ 
if ((self = [super init]) != NULL) 
    { 
    _captureDevicePosition = AVCaptureDevicePositionUnspecified; 
    _preset = AVCaptureSessionPresetPhoto; 
    } 
return(self); 
} 

- (void)dealloc 
{ 
[_captureSession stopRunning]; 
} 

- (AVCaptureDevice *)captureDevice 
{ 
if (_captureDevice == NULL) 
    { 
    if (self.captureDevicePosition == AVCaptureDevicePositionUnspecified) 
     { 
     _captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 
     } 
    else 
     { 
     for (AVCaptureDevice *theDevice in [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]) 
      { 
      if (theDevice.position == self.captureDevicePosition) 
       { 
       _captureDevice = theDevice; 
       break; 
       } 
      } 
     } 
    } 
return(_captureDevice); 
} 

- (AVCaptureVideoPreviewLayer *)previewLayer 
{ 
if (_previewLayer == NULL) 
    { 
    _previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:self.captureSession]; 
    } 
return(_previewLayer); 
} 

- (void)startRunning 
{ 
NSError *theError = NULL; 

self.captureSession = [[AVCaptureSession alloc] init]; 
self.captureSession.sessionPreset = self.preset; 

AVCaptureDeviceInput *theCaptureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:self.captureDevice error:&theError]; 
[self.captureSession addInput:theCaptureDeviceInput]; 

self.imageOutput = [[AVCaptureStillImageOutput alloc] init]; 
self.imageOutput.outputSettings = @{ 
    (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) 
    }; 
[self.captureSession addOutput:self.imageOutput]; 

[self.captureSession startRunning]; 
} 

- (void)stopRunning 
{ 
[self.captureSession stopRunning]; 

self.captureDevice = NULL; 
self.captureSession = NULL; 
self.imageOutput = NULL; 
self.previewLayer = NULL; 
} 

- (CGSize)size 
{ 
AVCaptureConnection *theConnection = [self.imageOutput.connections objectAtIndex:0]; 

__block BOOL theFinishedFlag = NO; 
__block CGSize theSize; 

[self.imageOutput captureStillImageAsynchronouslyFromConnection:theConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { 

    CVImageBufferRef theImageBuffer = CMSampleBufferGetImageBuffer(imageDataSampleBuffer); 
    theSize = CVImageBufferGetEncodedSize(theImageBuffer); 
    theFinishedFlag = YES; 
    }]; 

while (theFinishedFlag == NO) 
    { 
    [[NSRunLoop mainRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.1]]; 
    } 

return(theSize); 
} 

#pragma mark - 

- (void)captureStillCMSampleBuffer:(void (^)(CMSampleBufferRef sampleBuffer, NSError *error))inCompletionBlock 
{ 
NSParameterAssert(inCompletionBlock != NULL); 

AVCaptureConnection *theConnection = [self.imageOutput.connections objectAtIndex:0]; 

[self.imageOutput captureStillImageAsynchronouslyFromConnection:theConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { 
    inCompletionBlock(imageDataSampleBuffer, error); 
    }]; 
} 

- (void)captureStillCVImageBuffer:(void (^)(CVImageBufferRef imageBuffer, NSError *error))inCompletionBlock 
{ 
NSParameterAssert(inCompletionBlock != NULL); 

[self captureStillCMSampleBuffer:^(CMSampleBufferRef sampleBuffer, NSError *error) { 
    CVImageBufferRef theImageBuffer = NULL; 
    if (sampleBuffer != NULL) 
     { 
     theImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
     } 

    inCompletionBlock(theImageBuffer, error); 
    }]; 
} 

- (void)captureStillCIImage:(void (^)(CIImage *image, NSError *error))inCompletionBlock 
{ 
NSParameterAssert(inCompletionBlock != NULL); 

[self captureStillCVImageBuffer:^(CVImageBufferRef imageBuffer, NSError *error) { 
    CIImage *theImage = NULL; 
    if (imageBuffer != NULL) 
     { 
     theImage = [CIImage imageWithCVPixelBuffer:imageBuffer]; 
     } 
    inCompletionBlock(theImage, error); 
    }]; 
} 

- (void)captureStillCGImage:(void (^)(CGImageRef image, NSError *error))inCompletionBlock 
{ 
NSParameterAssert(inCompletionBlock != NULL); 

[self captureStillCIImage:^(CIImage *image, NSError *error) { 

    CGImageRef theCGImage = NULL; 
    if (image != NULL) 
     { 
     NSDictionary *theOptions = @{ 
      // TODO 
      }; 
     CIContext *theCIContext = [CIContext contextWithOptions:theOptions]; 
     theCGImage = [theCIContext createCGImage:image fromRect:image.extent]; 
     } 

    inCompletionBlock(theCGImage, error); 

    CGImageRelease(theCGImage); 
    }]; 
} 

- (void)captureStillUIImage:(void (^)(UIImage *image, NSError *error))inCompletionBlock 
{ 
NSParameterAssert(inCompletionBlock != NULL); 

[self captureStillCIImage:^(CIImage *image, NSError *error) { 

    UIImage *theUIImage = NULL; 
    if (image != NULL) 
     { 
     theUIImage = [UIImage imageWithCIImage:image]; 
     } 

    inCompletionBlock(theUIImage, error); 
    }]; 
} 

@end

回答