2017-08-23 57 views
-1

我新快速投注也溢出。先进的感谢关注。 基本上我试图建立一个自定义相机,将录制音频视频。这意味着当我播放此视频时,视频将播放声音。几天我试图建立这个定制相机。我已经按照我的教程,但仍然从我的相机丢失东西。我试图按照我的自定义相机只是录制视频。也许它不录音。我不明白。我正在寻找这个答案,没有找到适当的答案。自定义摄像头,视频不在快速播放中音频

这里是我做过什么

import UIKit 
import AVFoundation 
import SVProgressHUD 
import MediaPlayer 
import MobileCoreServices 
import AVKit 
var videoUrl = [AnyObject]() 


class TestViewController: UIViewController { 

@IBOutlet var viewVidioPlayer: UIView! 
@IBOutlet weak var myView: UIView! 

var session: AVCaptureSession? 
var userreponsevideoData = NSData() 
var userreponsethumbimageData = NSData() 

override func viewDidLoad() { 
    super.viewDidLoad() 

} 

override func viewDidAppear(_ animated: Bool) { 
    super.viewDidAppear(animated) 
} 

//这里我创建会话 FUNC了createSession(){

var input: AVCaptureDeviceInput? 
    let movieFileOutput = AVCaptureMovieFileOutput() 
    var prevLayer: AVCaptureVideoPreviewLayer? 
    prevLayer?.frame.size = myView.frame.size 
    session = AVCaptureSession() 
    let error: NSError? = nil 
    do { 
     input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .front)!) } catch {return} 
    if error == nil { 
     session?.addInput(input) 
    } else { 
     print("camera input error: \(String(describing: error))") 
    } 
    prevLayer = AVCaptureVideoPreviewLayer(session: session) 
    prevLayer?.frame.size = myView.frame.size 
    prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill 
    prevLayer?.connection.videoOrientation = .portrait 
    myView.layer.addSublayer(prevLayer!) 
    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] 
    let filemainurl = NSURL(string: ("\(documentsURL.appendingPathComponent("temp"))" + ".mp4")) 


    let maxDuration: CMTime = CMTimeMake(600, 10) 
    movieFileOutput.maxRecordedDuration = maxDuration 
    movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024 
    if self.session!.canAddOutput(movieFileOutput) { 
     self.session!.addOutput(movieFileOutput) 
    } 
    session?.startRunning() 
    movieFileOutput.startRecording(toOutputFileURL: filemainurl! as URL, recordingDelegate: self) 

} 
func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? { 
    let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) 
    for device in devices! { 
     if (device as AnyObject).position == position { 
      return device as? AVCaptureDevice 
     } 
    } 
    return nil 
} 
@IBAction func pressbackbutton(sender: AnyObject) { 
    session?.stopRunning() 

} 

@IBAction func Record(_ sender: Any) { 
    createSession() 
} 
@IBAction func play(_ sender: Any) { 
    self.videoPlay() 
} 
func videoPlay() 
{ 

    let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! 

    do { 
     // Get the directory contents urls (including subfolders urls) 
     let directoryContents = try FileManager.default.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil, options: []) 
     print(directoryContents) 

     // if you want to filter the directory contents you can do like this: 
     videoUrl = directoryContents.filter{ $0.pathExtension == "mp4" } as [AnyObject] 
     print("mp3 urls:",videoUrl) 


     let playerController = AVPlayerViewController() 
     playerController.delegate = self as? AVPlayerViewControllerDelegate 
     let movieURL = videoUrl[0] 


     print(movieURL) 

     let player = AVPlayer(url: movieURL as! URL) 
     playerController.player = player 
     self.addChildViewController(playerController) 
     self.view.addSubview(playerController.view) 
     playerController.view.frame = self.view.frame 

     player.play() 
     player.volume = 1.0 
     player.rate = 1.0 



    } catch let error as NSError { 
     print(error.localizedDescription) 
    } 




    } 
    } 

扩展TestViewController:AVCaptureFileOutputRecordingDelegate {

@available(iOS 4.0, *) 
private func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: URL!, fromConnections connections: [AnyObject]!) { 
    print(fileURL) 
} 
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 
    let filemainurl = outputFileURL 

    do 
    { 
     let asset = AVURLAsset(url: filemainurl! as URL, options: nil) 
     //AVURLAsset(URL: filemainurl as! URL, options: nil) 
     print(asset) 
     let imgGenerator = AVAssetImageGenerator(asset: asset) 
     imgGenerator.appliesPreferredTrackTransform = true 
     let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil) 

     let uiImage = UIImage(cgImage: cgImage) 

     userreponsethumbimageData = try NSData(contentsOf: filemainurl! as URL) 

     print(userreponsethumbimageData.length) 
     print(uiImage) 
     // imageData = UIImageJPEGRepresentation(uiImage, 0.1) 
    } 
    catch let error as NSError 
    { 
     print(error) 
     return 
    } 

    SVProgressHUD.show(with: SVProgressHUDMaskType.clear) 
    let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString 
    if FileManager.default.fileExists(atPath: VideoFilePath) 

    { 
     do 

     { 
      try FileManager.default.removeItem(atPath: VideoFilePath) 
     } 
     catch { } 

    } 
    let tempfilemainurl = NSURL(string: VideoFilePath)! 
    let sourceAsset = AVURLAsset(url: filemainurl! as URL, options: nil) 
    let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)! 
    assetExport.outputFileType = AVFileTypeQuickTimeMovie 
    assetExport.outputURL = tempfilemainurl as URL 

    assetExport.exportAsynchronously {() -> Void in 
     switch assetExport.status 
     { 
     case AVAssetExportSessionStatus.completed: 
      DispatchQueue.main.async(execute: { 
       do 
       { 
        SVProgressHUD .dismiss() 
        self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions()) 
        print("MB - \(self.userreponsevideoData.length) byte") 


       } 
       catch 
       { 
        SVProgressHUD .dismiss() 
        print(error) 
       } 
      }) 
     case AVAssetExportSessionStatus.failed: 
      print("failed \(assetExport.error)") 
     case AVAssetExportSessionStatus.cancelled: 
      print("cancelled \(assetExport.error)") 
     default: 
      print("complete") 
      SVProgressHUD .dismiss() 
     } 

    } 



} 

}

我已经完成了所有工作。所以我不明白这段代码中缺少的是什么。为什么音频不与视频一起播放或者为什么不用视频重新编码音频。

回答

0

使用此Cocopods为您的项目。它使你的工作变得安静。 它具有关于做什么的所有说明,并且还包含一个演示项目,以按照您的预期测试它。

SwiftyCam