2
A
回答
4
您可以通过使用AVFoundation
- (void)initCapture {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession setSessionPreset:AVCaptureSessionPresetLow];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
[self.captureSession startRunning];
self.customLayer = [CALayer layer];
self.customLayer.frame =CGRectMake(5-25,25, 200,150);
self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer.transform =CATransform3DMakeRotation(M_PI/2.0f, 0, 0, 1);
//[self.view.layer addSublayer:imageView.layer];
//self.customLayer.frame =CGRectMake(0, 0, 200,150);
//self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer insertSublayer:self.customLayer atIndex:4];
//[self.view.layer addSublayer:self.customLayer];
self.customLayer1 = [CALayer layer];
//self.customLayer.frame = self.view.bounds;
self.customLayer1.frame =CGRectMake(165-25, 25, 200, 150);
self.customLayer1.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer1];
self.customLayer2 = [CALayer layer];
//self.customLayer.frame = self.view.bounds;
self.customLayer2.frame =CGRectMake(5-25, 210 +25, 200, 150);
self.customLayer2.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer2];
self.customLayer3 = [CALayer layer];
//self.customLayer.frame = self.view.bounds;
self.customLayer3.frame =CGRectMake(165-25, 210 +25, 200, 150);
self.customLayer3.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer3];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage2 = CGBitmapContextCreateImage(newContext);
/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
[self.customLayer1 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
[self.customLayer2 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
[self.customLayer3 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
// UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
/*We relase the CGImageRef*/
CGImageRelease(newImage2);
// [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}
它工作得很好做到这一点..
http://crayoncoding.blogspot.com/2011/04/iphone-4-camera-views-at-once.html
看到上面的链接查看详细代码
0
你可以尝试拥有4个UIImagePickerControllers。不知道它是否会奏效,但值得一试。
相关问题
- 1. 用iPhone SDK访问摄像头
- 2. 访问摄像头
- 3. iPhone SDK 3.0摄像机访问
- 4. iphone摄像头访问移动网页
- 5. 通过iPhone Web App访问摄像头
- 6. iphone如何从UIWebView访问摄像头?
- 7. 从uiwebview访问摄像头?
- 8. HTML5:摄像头访问
- 9. 访问摄像头失败
- 10. HTML5 - 访问摄像头
- 11. 访问IP摄像头流
- 12. iPhone摄像头流
- 13. 管理摄像头 - 无法访问摄像头
- 14. 作为网络摄像头访问手机摄像头C++
- 15. 实时访问iPhone的摄像头图像
- 16. 如何使用AS3访问iPhone摄像头
- 17. 在iphone上的远程位置访问摄像头
- 18. 用于IOS Iphone 3G的JavaScript API用于摄像头访问
- 19. 使用Xcode的访问后置摄像头,iPhone的iOS5
- 20. 访问iPhone的摄像头,在一个应用程序
- 21. iPhone 5s摄像头LED灯的单独访问
- 22. iOS 7从移动Safari访问iPhone摄像头?
- 23. 如何从服务器端网页访问iphone摄像头?
- 24. 您可以从Mobile Safari访问iPhone摄像头吗?
- 25. iPhone:从Web应用程序访问摄像头和相册
- 26. 只能通过API访问iphone摄像头的数据
- 27. iPhone网络摄像头Feed
- 28. iPhone摄像头和OpenCV
- 29. 在androidemulator上访问摄像头
- 30. 无法访问摄像头(Javascript)?
我假设你想对每一个应用效果,比如o在Mac上。由于需要使用GPU来提供此功能,因此不确定这对iPhone 4是否还能起作用。 – 2011-03-03 07:44:38