2014-09-29 95 views
0

我正在制作一个应用程序,可以制作少量图片的视频。 我的问题是,20或30图像的视频有1秒的持续时间。 我在OnCreate方法和一个计时器创建编码器,我用这个在android中使用jcodec设置图像持续时间

encoder.encodeNativeFrame(pic); 

定时器运行每一秒。

当我按下完成按钮,我添加以下代码

encoder.finish(); 

但是,当我观看的视频,我在一秒钟内查看的所有图像。

我可以设置持续时间吗?例如,每秒添加一张图片? 在此先感谢

+0

你必须使用FRAM动画做到这一点 – 2014-09-29 09:13:26

+0

不,我不需要动画 – Javier 2014-09-29 09:57:06

回答

1

你只是调用像

SequenceEncoder encoder = new SequenceEncoder(Outputfile, Constants.VIDEO_WIDTH, Constants.VIDEO_HEIGHT, durationInSeconds); 

这对我的作品。

+0

能帮助我如何添加jcodec访问该类 – 2017-04-28 11:10:47

+0

添加编译“org.jcodec:jcodec-安卓0.1.9”在应用程序build.gradle,但无法访问一些类 – 2017-04-28 11:11:20

0
public class SequenceEncoder { 
    private SeekableByteChannel ch; 
    private Picture toEncode; 
    private RgbToYuv420 transform; 
    private H264Encoder encoder; 
    private ArrayList<ByteBuffer> spsList; 
    private ArrayList<ByteBuffer> ppsList; 
    private FramesMP4MuxerTrack outTrack; 
    private ByteBuffer _out; 
    private int frameNo; 
    private MP4Muxer muxer; 
    public SequenceEncoder(File out) throws IOException { 
     this.ch = NIOUtils.writableFileChannel(out); 

     // Transform to convert between RGB and YUV 
     transform = new RgbToYuv420(0, 0); 

     // Muxer that will store the encoded frames 
     muxer = new MP4Muxer(ch, Brand.MP4); 

     // Add video track to muxer 
     outTrack = muxer.addTrackForCompressed(TrackType.VIDEO, 1); 

     // Allocate a buffer big enough to hold output frames 
     _out = ByteBuffer.allocate(1920 * 1080 * 6); 

     // Create an instance of encoder 
     encoder = new H264Encoder(); 

     // Encoder extra data (SPS, PPS) to be stored in a special place of 
     // MP4 
     spsList = new ArrayList<ByteBuffer>(); 
     ppsList = new ArrayList<ByteBuffer>(); 

    } 

    public void encodeImage(Bitmap bi) throws IOException { 
     // encodeNativeFrame(AWTUtil.fromBufferedImage(bi)); 
     encodeNativeFrame(fromBitmap(bi)); 
    } 

    public void encodeNativeFrame(Picture pic) throws IOException { 
     if (toEncode == null) { 
      toEncode = Picture.create(pic.getWidth(), pic.getHeight(), 
        ColorSpace.YUV420); 
     } 

     // Perform conversion 
     transform.transform(pic, toEncode); 

     // Encode image into H.264 frame, the result is stored in '_out' buffer 
     _out.clear(); 
     ByteBuffer result = encoder.encodeFrame(_out, toEncode); 

     // Based on the frame above form correct MP4 packet 
     spsList.clear(); 
     ppsList.clear(); 
     H264Utils.encodeMOVPacket(result, spsList, ppsList); 

     // Add packet to video track 
     outTrack.addFrame(new MP4Packet(result, frameNo, 1, 5, frameNo, true, 
       null, frameNo, 0)); 
     frameNo++; 
    } 

    public void finish() throws IOException { 
     // Push saved SPS/PPS to a special storage in MP4 
     outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList)); 

     // Write MP4 header and finalize recording 
     muxer.writeHeader(); 
     NIOUtils.closeQuietly(ch); 
    } 

    public static Picture fromBitmap(Bitmap src) { 
     Picture dst = Picture.create((int) src.getWidth(), 
       (int) src.getHeight(), ColorSpace.RGB); 
     fromBitmap(src, dst); 
     return dst; 
    } 

    public static void fromBitmap(Bitmap src, Picture dst) { 
     int[] dstData = dst.getPlaneData(0); 
     int[] packed = new int[src.getWidth() * src.getHeight()]; 

     src.getPixels(packed, 0, src.getWidth(), 0, 0, src.getWidth(), 
       src.getHeight()); 

     for (int i = 0, srcOff = 0, dstOff = 0; i < src.getHeight(); i++) { 
      for (int j = 0; j < src.getWidth(); j++, srcOff++, dstOff += 3) { 
       int rgb = packed[srcOff]; 
       dstData[dstOff] = (rgb >> 16) & 0xff; 
       dstData[dstOff + 1] = (rgb >> 8) & 0xff; 
       dstData[dstOff + 2] = rgb & 0xff; 
      } 
     } 
    } 

} 
相关问题