2015-07-06 41 views
0

我正在尝试将视频与GIF图像结合使用,对此我使用的是MainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];视频图层我设置了GIF图像,但不幸的是它没有动画效果,所以我的问题是可以做到这一点吗?请建议我..是否可以使用视频设置GIF图像?

在此先感谢。

回答

2

苹果对GIF的支持相当有限。

您可以使用此代码从GIF转换为视频: (使用当前代码,gif将被裁剪为480x480。对于某些分辨率,输出图像的颜色会失真,因此请尝试使用您知道工作的固定框架。

用法:

#import "SCGIFConverter.h" 

NSURL *tempFileURL = //create a NSURL to a tempfile for output 

[SCGIFConverter processGIFData:data toFilePath:tempFileURL completed:^(NSString *outputFilePath, NSError *error) 
{ 
    //Now you can access your tempFileURL to read the movie 
    //outputFilePath can be 'nil' if there was a problem 
}]; 

SCGIFConverter.h

FOUNDATION_EXTERN NSString * const kGIF2MP4ConversionErrorDomain; 

typedef enum { 
    kGIF2MP4ConversionErrorInvalidGIFImage = 0, 
    kGIF2MP4ConversionErrorAlreadyProcessing, 
    kGIF2MP4ConversionErrorBufferingFailed, 
    kGIF2MP4ConversionErrorInvalidResolution, 
    kGIF2MP4ConversionErrorTimedOut, 
} kGIF2MP4ConversionError; 


typedef void (^kGIF2MP4ConversionCompleted) (NSString* outputFilePath, NSError* error); 

@interface SCGIFConverter : NSObject 

+ (BOOL) processGIFData: (NSData*) data 
      toFilePath: (NSURL*) outFilePath 
       completed: (kGIF2MP4ConversionCompleted)handler; 

@end 

SCGIFConverter.m

#import <AVFoundation/AVFoundation.h> 
#import <ImageIO/ImageIO.h> 
#import <MobileCoreServices/MobileCoreServices.h> 

#import "SCGIFConverter.h" 

#define FPS 30 

NSString * const kGIF2MP4ConversionErrorDomain = @"GIF2MP4ConversionError"; 

@implementation SCGIFConverter 

+ (BOOL) processGIFData: (NSData*) data 
      toFilePath: (NSURL*) outFilePath 
       completed: (kGIF2MP4ConversionCompleted) completionHandler { 

    [[NSFileManager defaultManager] removeItemAtURL:outFilePath error:nil]; 

    CGImageSourceRef source = CGImageSourceCreateWithData((__bridge CFDataRef)data, NULL); 
    CGImageMetadataRef meta = CGImageSourceCopyMetadataAtIndex(source, 0, NULL); 
    NSLog(@"%@",meta); 
    unsigned char *bytes = (unsigned char*)data.bytes; 
    NSError* error = nil; 

    if(!CGImageSourceGetStatus(source) == kCGImageStatusComplete) { 
     error = [NSError errorWithDomain: kGIF2MP4ConversionErrorDomain 
            code: kGIF2MP4ConversionErrorInvalidGIFImage 
           userInfo: nil]; 
     CFRelease(source); 
     completionHandler(outFilePath.absoluteString, error); 
     return NO; 
    } 

    size_t sourceWidth = bytes[6] + (bytes[7]<<8), sourceHeight = bytes[8] + (bytes[9]<<8); 
    sourceWidth = 480; 
    sourceHeight = 480; 
    //size_t sourceFrameCount = CGImageSourceGetCount(source); 
    __block size_t currentFrameNumber = 0; 
    __block Float64 totalFrameDelay = 0.f; 

    AVAssetWriter* videoWriter = [[AVAssetWriter alloc] initWithURL: outFilePath 
                  fileType: AVFileTypeQuickTimeMovie 
                   error: &error]; 
    if(error) { 
     CFRelease(source); 
     completionHandler(outFilePath.absoluteString, error); 
     return NO; 
    } 

    if(sourceWidth > 6400 || sourceWidth == 0) { 
     CFRelease(source); 
     error = [NSError errorWithDomain: kGIF2MP4ConversionErrorDomain 
            code: kGIF2MP4ConversionErrorInvalidResolution 
           userInfo: nil]; 
     completionHandler(outFilePath.absoluteString, error); 
     return NO; 
    } 

    if(sourceHeight > 4800 || sourceHeight == 0) { 
     CFRelease(source); 
     error = [NSError errorWithDomain: kGIF2MP4ConversionErrorDomain 
            code: kGIF2MP4ConversionErrorInvalidResolution 
           userInfo: nil]; 
     completionHandler(outFilePath.absoluteString, error); 
     return NO; 
    } 

    size_t totalFrameCount = CGImageSourceGetCount(source); 

    if(totalFrameCount <= 0) { 
     CFRelease(source); 
     error = [NSError errorWithDomain: kGIF2MP4ConversionErrorDomain 
            code: kGIF2MP4ConversionErrorInvalidGIFImage 
           userInfo: nil]; 
     completionHandler(outFilePath.absoluteString, error); 
     return NO; 
    } 

    NSDictionary *videoSettings = @{ 
            AVVideoCodecKey : AVVideoCodecH264, 
            AVVideoWidthKey : @(sourceWidth), 
            AVVideoHeightKey : @(sourceHeight) 
            }; 

    AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo 
                       outputSettings: videoSettings]; 
    videoWriterInput.expectsMediaDataInRealTime = YES; 

    NSAssert([videoWriter canAddInput: videoWriterInput], @"Video writer can not add video writer input"); 
    [videoWriter addInput: videoWriterInput]; 

    NSDictionary* attributes = @{ 
           (NSString*)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32ARGB), 
           (NSString*)kCVPixelBufferWidthKey : @(sourceWidth), 
           (NSString*)kCVPixelBufferHeightKey : @(sourceHeight), 
           (NSString*)kCVPixelBufferCGImageCompatibilityKey : @YES, 
           (NSString*)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES 
           }; 

    AVAssetWriterInputPixelBufferAdaptor* adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: videoWriterInput 
                                sourcePixelBufferAttributes: attributes]; 

    [videoWriter startWriting]; 
    [videoWriter startSessionAtSourceTime: CMTimeMakeWithSeconds(0, FPS)]; 

    while(YES) { 
     if(videoWriterInput.isReadyForMoreMediaData) { 
#if DEBUG 
      //NSLog(@"Drawing frame %lu/%lu", currentFrameNumber, totalFrameCount); 
#endif 

      NSDictionary* options = @{(NSString*)kCGImageSourceTypeIdentifierHint : (id)kUTTypeGIF}; 
      CGImageRef imgRef = CGImageSourceCreateImageAtIndex(source, currentFrameNumber, (__bridge CFDictionaryRef)options); 
      if(imgRef) { 
       CFDictionaryRef propertiesT = CGImageSourceCopyProperties(source, NULL); 

       CFDictionaryRef properties = CGImageSourceCopyPropertiesAtIndex(source, currentFrameNumber, NULL); 
       CFDictionaryRef gifProperties = CFDictionaryGetValue(properties, kCGImagePropertyGIFDictionary); 

       if(gifProperties) { 

        CVPixelBufferRef pxBuffer = [self newBufferFrom: imgRef 
               withPixelBufferPool: adaptor.pixelBufferPool 
                 andAttributes: adaptor.sourcePixelBufferAttributes]; 
        if(pxBuffer) { 
         NSNumber* delayTime = CFDictionaryGetValue(gifProperties, kCGImagePropertyGIFDelayTime); 
         if (currentFrameNumber!=0) { 
          totalFrameDelay += delayTime.floatValue; 
         } 
         CMTime time = CMTimeMakeWithSeconds(totalFrameDelay, FPS); 

         if(![adaptor appendPixelBuffer: pxBuffer withPresentationTime: time]) { 
          NSLog(@"Could not save pixel buffer!: %@", videoWriter.error); 
          CFRelease(properties); 
          CGImageRelease(imgRef); 
          CVBufferRelease(pxBuffer); 
          break; 
         } 

         CVBufferRelease(pxBuffer); 
        } 
       } 

       if(properties) CFRelease(properties); 
       CGImageRelease(imgRef); 

       currentFrameNumber++; 
      } 
      else { 
       //was no image returned -> end of file? 
       [videoWriterInput markAsFinished]; 

       void (^videoSaveFinished)(void) = ^{ 
        AVAssetWriter * retainedVideoWriter = videoWriter; 
        completionHandler(outFilePath.absoluteString, nil); 
        retainedVideoWriter = nil; 
       }; 

       if([videoWriter respondsToSelector: @selector(finishWritingWithCompletionHandler:)]) { 
        [videoWriter finishWritingWithCompletionHandler: videoSaveFinished]; 
       } 
       else { 
        [videoWriter finishWriting]; 
        videoSaveFinished(); 
       } 
       break; 
      } 
     } 
     else { 
      //NSLog(@"Was not ready..."); 
      [NSThread sleepForTimeInterval: 0.1]; 
     } 
    }; 

    CFRelease(source); 

    return YES; 
}; 


+ (CVPixelBufferRef) newBufferFrom: (CGImageRef) frame 
       withPixelBufferPool: (CVPixelBufferPoolRef) pixelBufferPool 
        andAttributes: (NSDictionary*) attributes { 
    NSParameterAssert(frame); 

    size_t width = 480;//CGImageGetWidth(frame); 
    size_t height = 480;//CGImageGetHeight(frame); 

    size_t frameHeight = height; 
    size_t frameWidth = CGImageGetWidth(frame)*height/CGImageGetHeight(frame); 
    if (frameWidth<width) { 
     frameWidth = width; 
     frameHeight = CGImageGetHeight(frame)*width/CGImageGetWidth(frame); 
    } 
    CGFloat relax = 0.12; 
    if (frameWidth>width) { 
     CGFloat factor = MAX(width/frameWidth,1-relax); 
     frameWidth*=factor; 
    } 
    if (frameHeight>height) { 
     CGFloat factor = MAX(height/frameHeight,1-relax); 
     frameHeight*=factor; 
    } 

    size_t bpc = 8; 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 

    CVPixelBufferRef pxBuffer = NULL; 
    CVReturn status = kCVReturnSuccess; 

    if(pixelBufferPool) 
     status = CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pxBuffer); 
    else { 
     status = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)attributes, &pxBuffer); 
    } 

    NSAssert(status == kCVReturnSuccess, @"Could not create a pixel buffer"); 

    CVPixelBufferLockBaseAddress(pxBuffer, 0); 
    void *pxData = CVPixelBufferGetBaseAddress(pxBuffer); 

    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pxBuffer); 


    CGContextRef context = CGBitmapContextCreate(pxData, 
               width, 
               height, 
               bpc, 
               bytesPerRow, 
               colorSpace, 
               kCGImageAlphaNoneSkipFirst); 
    NSAssert(context, @"Could not create a context"); 

    CGContextDrawImage(context, 
         CGRectMake(-(frameWidth-(CGFloat)width)/2, -(frameHeight-(CGFloat)height)/2, frameWidth, frameHeight), frame); 

    CVPixelBufferUnlockBaseAddress(pxBuffer, 0); 

    CGContextRelease(context); 
    CGColorSpaceRelease(colorSpace); 

    return pxBuffer; 
} 

@end 
+0

感谢尼尔斯,是否有可能建立透明的视频?因为我想把它放在视频上,就像雪花落在视频上一样.. –

+0

嗨尼尔斯需要你的帮助,我已经从图像数组导出视频,它工作正常,但视频是misup是视频与1080 * 1920的意思是视频设置宽度必须可以被16整除其他明智混乱的任何建议? –

相关问题