2016-03-30 44 views
1

我试图在iPhone上快速创建一个使用相机的照相机。这个想法是,所有像素都需要强度分量,然后平均它们给我一个单一的值。我不需要预览相机。我一直在拼凑一对夫妇的教程,试图让它起作用,到目前为止已经提出了下面的代码。 camDeviceSetup()在ViewDidLoad上运行,cameraSetup()运行在按下按钮上。采样缓冲区代表Swift 2实时视频滤波器

我遇到了启动“videoDeviceOutput!.setSampleBufferDelegate”的错误,它说它不能将类型FirstViewController(视图控制器)的值转换为期望的参数。

let captureSession = AVCaptureSession() 
// If we find a device we'll store it here for later use 
var captureDevice : AVCaptureDevice? 
var videoDeviceOutput: AVCaptureVideoDataOutput? 
// AVCaptureVideoPreviewLayer is a subclass of CALayer that you use to display video as it is being captured by an input device. 
var previewLayer = AVCaptureVideoPreviewLayer() 

func camDeviceSetup() { 
    captureSession.sessionPreset = AVCaptureSessionPreset640x480 
    let devices = AVCaptureDevice.devices() 
    for device in devices { 
     // Make sure this particular device supports video 
     if (device.hasMediaType(AVMediaTypeVideo)) { 
      // Finally check the position and confirm we've got the back camera 
      if(device.position == AVCaptureDevicePosition.Back) { 
       captureDevice = device as? AVCaptureDevice 
      } 
     } 
    } 
    if captureDevice != nil { 
     let err : NSError? = nil 
     captureSession.addInput(try! AVCaptureDeviceInput(device: captureDevice)) 

     if err != nil { 
      print("error: \(err?.localizedDescription)") 
     } 

    } 
} 

func cameraSetup() { 
    previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
    previewLayer.frame = view.bounds 
    view.layer.addSublayer(previewLayer) 

    videoDeviceOutput = AVCaptureVideoDataOutput() 
    videoDeviceOutput!.videoSettings = [kCVPixelBufferPixelFormatTypeKey:Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] 
    videoDeviceOutput!.alwaysDiscardsLateVideoFrames = true 

//This is the line that gets stuck and not sure why 
    videoDeviceOutput!.setSampleBufferDelegate(self, queue: dispatch_queue_create("VideoBuffer", DISPATCH_QUEUE_SERIAL)) 

    if captureSession.canAddOutput(videoDeviceOutput) { 
     captureSession.addOutput(videoDeviceOutput) 
    } 

    captureSession.startRunning() 
} 

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
    // Think once the delegate is correctly set my algorithm for finding light intensity goes here 

} 
+0

该行的问题取决于我没有在我的ViewController顶部的类中声明AVCaptureVideoDataOutputSampleBufferDelegate。 – rmaspero

回答

0

在该行的问题是下到我不在类在我的ViewController的顶部声明AVCaptureVideoDataOutputSampleBufferDelegate。

+0

是否将AVCaptureVideoDataOutputSampleBufferDelegate分配给vc? –