2013-05-02 61 views
15

我使用AVMutableComposition创建视频并使用AVAssetExportSession将其导出,并将其预设为AVAssetExportPresetHighestQualityAVMutableComposition视频创建问题

当视频的尺寸很小时,视频创建得很好。但是,当视频尺寸较大时(1280×1920),视频未正确创建。绿色覆盖被添加到视频,如下图:

enter image description here

但是当我使用AVAssetExportPresetPassthrough代替AVAssetExportPresetHighestQuality,创建视频罚款。

任何想法为什么发生这种情况?

在此先感谢!

+3

一些更detials讨好。关于资产,如果你正在合并资产或什么的? – Maverick 2014-10-30 09:16:18

+0

这个问题具体到视网膜iPad? – uchiha 2015-03-03 14:33:05

+1

要添加到@ Maverick的问题,是以编程方式创建的资产?如果是这样,它是从您的应用程序内,还是从其他地方导入它们? – 2015-03-10 01:40:18

回答

1

试试这个代码:

-(void)convertVideo:(NSString *)videoPath{ 
     AVURLAsset *firstAsset=[[AVURLAsset alloc]initWithURL:[NSURL URLWithString: [NSString stringWithFormat:@"%@", videoPath]] options:nil]; 
     AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; 

     //VIDEO TRACK 
     AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

     [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 

     AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
     MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration); 

     CMTime audioTime = kCMTimeIndefinite; 
     CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration); 

     AVAudioSession *audioSession = [AVAudioSession sharedInstance]; 
     [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:nil]; 
     [audioSession setActive:YES error:nil]; 

     if (OS_VERSION>=7) { 
      if (firstAsset!=NULL) { 
       AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 

       [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 
      } 
     } 
     else{ 
      AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
      [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:audioTime error:nil]; 
     } 

     //FIXING ORIENTATION// 
     AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack]; 

     AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
     UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp; 
     BOOL isFirstAssetPortrait_ = NO; 
     CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform; 

     if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0){ 
      FirstAssetOrientation_= UIImageOrientationRight; 
      isFirstAssetPortrait_ = YES; 
     } 
     if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0){ 
      FirstAssetOrientation_ = UIImageOrientationLeft; 
      isFirstAssetPortrait_ = YES; 
     } 
     if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0){ 
      FirstAssetOrientation_ = UIImageOrientationUp; 
     } 
     if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0){ 
      FirstAssetOrientation_ = UIImageOrientationDown; 
     } 

     [FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration]; 

     CGFloat FirstAssetScaleToFitRatio = 0; 


     MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil]; 

     AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; 
     MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; 
     MainCompositionInst.frameDuration = CMTimeMake(1, 30); 

     /*if(isFirstAssetPortrait_){ 
      FirstAssetScaleToFitRatio = 0.67; 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
      naturalSizeFirst = CGSizeMake(FirstAssetTrack.naturalSize.height, FirstAssetTrack.naturalSize.width); 
      }else{ 

       naturalSizeFirst = FirstAssetTrack.naturalSize; 
     } 
     */ 

     CGSize naturalSizeFirst; 
     if(isFirstAssetPortrait_){ 
      FirstAssetScaleToFitRatio = FirstAssetTrack.naturalSize.width/FirstAssetTrack.naturalSize.height; 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
      naturalSizeFirst = CGSizeMake(FirstAssetTrack.naturalSize.height, FirstAssetTrack.naturalSize.width); 

     }else { 
      naturalSizeFirst = FirstAssetTrack.naturalSize; 
     } 

     MainCompositionInst.renderSize = CGSizeMake(naturalSizeFirst.width, naturalSizeFirst.height); 

     NSString *tmpDirectory = NSTemporaryDirectory(); 
     NSString *fname = [NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]; 
     NSString *tmpFile = [tmpDirectory stringByAppendingPathComponent:fname]; 

     NSURL *url = [NSURL fileURLWithPath:tmpFile]; 

     AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
     exporter.outputURL=url; 
     exporter.outputFileType = AVFileTypeQuickTimeMovie; 
     exporter.videoComposition = MainCompositionInst; 
     exporter.shouldOptimizeForNetworkUse = YES; 

     [exporter exportAsynchronouslyWithCompletionHandler:^{ 

      if (AVAssetExportSessionStatusCompleted == exporter.status) { 
       [self.progressStatus setText:@"Converted..."]; 

       selectedVideo = selectedVideo+1; 
      } else if (AVAssetExportSessionStatusFailed == exporter.status) { 
       // a failure may happen because of an event out of your control 
       // for example, an interruption like a phone call comming in 
       // make sure and handle this case appropriately 
       //ALog(@"AVAssetExportSessionStatusFailed %@",exporter.error); 

      } else { 
       //ALog(@"Export Session Status: %d", exporter.status); 
      } 
     }]; 
    } 
}