2016-05-13 51 views
1

我正在制作一个视频制作应用程序。
因为我需要在第一个视图中记录一个视频,然后在第二个视图中显示这个视频。
对于录制视频我遵循this教程。 因为我根据我的需要在didFinishRecordingToOutputFileAtURL方法中做了一些更改。视频录制问题在ios

这是我更新的方法。

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error 
{ 
    NSLog(@"didFinishRecordingToOutputFileAtURL - enter"); 

    BOOL RecordedSuccessfully = YES; 
    if ([error code] != noErr) 
    { 
     // A problem occurred: Find out if the recording was successful. 
     id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey]; 
     if (value) 
     { 
      RecordedSuccessfully = [value boolValue]; 
     } 
    } 
    else { 
     NSLog(@"didFinishRecordingToOutputFileAtURL error:%@",error); 
    } 
    if (RecordedSuccessfully) 
    { 
     //----- RECORDED SUCESSFULLY ----- 
     NSLog(@"didFinishRecordingToOutputFileAtURL - success"); 
     ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 
     if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) 
     { 
      AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; 
      AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

      AVAsset *asset = [AVAsset assetWithURL:outputFileURL]; 

      [track insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:CMTimeMake(0, 1) error:nil]; 

      NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
      NSString *documentsDirectory = [paths objectAtIndex:0]; 
      NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent: 
            [NSString stringWithFormat:@"%@%d.mov",NSBundle.mainBundle.infoDictionary[@"CFBundleExecutable"],++videoCounter]]; 
      [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil]; 

      NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 

      AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
      instruction.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration); 

      AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track]; 
      AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
      UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; 
      BOOL isVideoAssetPortrait_ = NO; 
      CGAffineTransform videoTransform = videoAssetTrack.preferredTransform; 

      if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) { 
       videoAssetOrientation_ = UIImageOrientationRight; 
       isVideoAssetPortrait_ = YES; 
       if ([[[NSUserDefaults standardUserDefaults] stringForKey:@"orientation"] isEqualToString:@"landscape"]) { 
        videoAssetOrientation_ = UIImageOrientationUp; 
       } 
      } 
      if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) { 
       videoAssetOrientation_ = UIImageOrientationLeft; 
       isVideoAssetPortrait_ = YES; 
      } 
      if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) { 
       videoAssetOrientation_ = UIImageOrientationUp; 
      } 
      if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) { 
       videoAssetOrientation_ = UIImageOrientationDown; 
      } 

      CGSize naturalSize; 
      if(isVideoAssetPortrait_){ 
       naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width); 
      } else { 
       naturalSize = videoAssetTrack.naturalSize; 
      } 

      float renderWidth, renderHeight; 
      if (![self.ratioLabel.text isEqualToString:@"16:9"]) { 
       renderWidth = naturalSize.width; 
       renderHeight = naturalSize.width; 
       NSLog(@"Video:: width=%f height=%f",naturalSize.width,naturalSize.height); 
      } 
      else { 
       renderWidth = naturalSize.width; 
       renderHeight = naturalSize.height; 
       NSLog(@"Video:: width=%f height=%f",naturalSize.width,naturalSize.height); 
      } 
      if (![self.ratioLabel.text isEqualToString:@"16:9"]) 
      { 
       CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoAssetTrack.naturalSize.height, -(videoAssetTrack.naturalSize.width - videoAssetTrack.naturalSize.height) /2); 
       CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2); 
       [layerInstruction setTransform:t2 atTime:kCMTimeZero]; 
      } 
      else 
      { 
       CGAffineTransform t2 = CGAffineTransformMakeRotation(M_PI_2); 
       [layerInstruction setTransform:t2 atTime:kCMTimeZero]; 
      } 

      AVCaptureDevicePosition position = [[VideoInputDevice device] position]; 
      if (position == AVCaptureDevicePositionFront) 
      { 
       /* For front camera only */ 
       CGAffineTransform t = CGAffineTransformMakeScale(-1.0f, 1.0f); 
       t = CGAffineTransformTranslate(t, -videoAssetTrack.naturalSize.width, 0); 
       t = CGAffineTransformRotate(t, (DEGREES_TO_RADIANS(90.0))); 
       t = CGAffineTransformTranslate(t, 0.0f, -videoAssetTrack.naturalSize.width); 
       [layerInstruction setTransform:t atTime:kCMTimeZero]; 
       /* For front camera only */ 
      } 

      [layerInstruction setOpacity:0.0 atTime:asset.duration]; 

      instruction.layerInstructions = [NSArray arrayWithObjects:layerInstruction,nil]; 

      AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition]; 


      mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight); 
      mainCompositionInst.instructions = [NSArray arrayWithObject:instruction]; 
      mainCompositionInst.frameDuration = CMTimeMake(1, 30); 

      AVAssetExportSession *exporter; 
      exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1280x720]; 
      exporter.videoComposition = mainCompositionInst; 
      exporter.outputURL=url; 
      exporter.outputFileType = AVFileTypeQuickTimeMovie; 
      exporter.shouldOptimizeForNetworkUse = YES; 

      [exporter exportAsynchronouslyWithCompletionHandler:^{ 
       dispatch_async(dispatch_get_main_queue(), ^{ 
        self.doneButton.userInteractionEnabled = YES; 
        if(videoAddr==nil) 
        { 
         videoAddr = [[NSMutableArray alloc] init]; 
        } 
        [videoAddr addObject:exporter.outputURL]; 
        [[PreviewLayer connection] setEnabled:YES]; 
        AVAsset *asset = [AVAsset assetWithURL:exporter.outputURL]; 
        NSLog(@"remaining seconds before:%f",lastSecond); 
        double assetDuration = CMTimeGetSeconds(asset.duration); 
        if (assetDuration>3.0) 
         assetDuration = 3.0; 
        lastSecond = lastSecond- assetDuration; 
        NSLog(@"remaining seconds after:%f",lastSecond); 
        self.secondsLabel.text = [NSString stringWithFormat:@"%0.1fs",lastSecond]; 
        self.secondsLabel.hidden = NO; 
        NSData *data = [NSKeyedArchiver archivedDataWithRootObject:videoAddr]; 

        [[NSUserDefaults standardUserDefaults] setObject:data forKey:@"videoAddr"]; 
        [[NSUserDefaults standardUserDefaults] synchronize]; 
        videoURL = outputFileURL; 
        flagAutorotate = NO; 
        self.cancelButton.hidden = self.doneButton.hidden = NO; 
        imgCancel.hidden = imgDone.hidden = NO; 
        if ([[NSUserDefaults standardUserDefaults] boolForKey:@"Vibration"]) 
         AudioServicesPlayAlertSound(kSystemSoundID_Vibrate); 
        [[UIApplication sharedApplication] endIgnoringInteractionEvents]; 
       }); 
      }]; 
     } 
     else { 
      UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:[NSString stringWithFormat:@"Video can not be saved\nPlease free some storage space"] delegate:self cancelButtonTitle:nil otherButtonTitles:nil, nil]; 
      [alert show]; 
      dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2.0 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{ 
       [alert dismissWithClickedButtonIndex:0 animated:YES]; 
      }); 
     } 

    } 
} 

但这里是问题所在。
预览中未准确记录视频。 看到这2个screenShots。

视频录制预览

Video recording preview
视频播放查看

Video Playing View

+1

您的视频记录浏览似乎比你的屏幕大小,我认为这样是在录制过程中隐藏的每一侧某些部分大! – Lion

+0

可能是可能的原因,因为我在iPad(4:3)和视频分辨率测试我给1280 * 720(16:9).... –

+0

测试5秒,不面临此问题。所以这可以解决iPad中的建议弗拉基米尔K –

回答

1

的原因是因为你的iPad屏幕长宽比例是不一样的相机的纵横比。

您可以通过设置AVCaptureVideoPreviewLayervideoGravity财产, 其影响如何内容相对于该层的边界观察修改相机预览大小:

layer.videoGravity = AVLayerVideoGravityResizeAspect; 

但在这种情况下,预览不会全屏。

如果您希望视频具有与预览全屏相同的宽高比,则必须对其进行裁剪。裁剪过程在这里解释:

Exporting AVCaptureSession video in a size that matches the preview layer

Video capture with 1:1 aspect ratio in iOS

+0

谢谢,好友....我会尝试 –

+0

没问题。我希望它有帮助。 –

+0

我需要设置AVLayerVideoGravityResizeAspectFill原因如果我设置AVLayerVideoGravityResizeAspect然后我的1:1预览也将显示为16:9。顺便说一句,感谢您的亲切帮助.... –