在android中用jcodec设置图像持续时间

问题描述 投票:0回答:2

我正在制作一个应用程序,制作一个图片很少的视频。我的问题是,20或30张图像的视频持续时间为1秒。我用OnCreate方法和Timer创建编码器,我用它

 encoder.encodeNativeFrame(pic);

定时器每秒运行一次。

当我按完成按钮时,我添加此代码

 encoder.finish();

但是当我观看视频时,我会在一秒钟内查看所有图像。

我可以设定持续时间吗?例如,每秒添加一个图像?提前致谢

java android jcodec
2个回答
1
投票

你必须像打电话一样

SequenceEncoder encoder = new SequenceEncoder(Outputfile, Constants.VIDEO_WIDTH, Constants.VIDEO_HEIGHT, durationInSeconds);

这个对我有用。


0
投票
public class SequenceEncoder {
    private SeekableByteChannel ch;
    private Picture toEncode;
    private RgbToYuv420 transform;
    private H264Encoder encoder;
    private ArrayList<ByteBuffer> spsList;
    private ArrayList<ByteBuffer> ppsList;
    private FramesMP4MuxerTrack outTrack;
    private ByteBuffer _out;
    private int frameNo;
    private MP4Muxer muxer;
    public SequenceEncoder(File out) throws IOException {
        this.ch = NIOUtils.writableFileChannel(out);

        // Transform to convert between RGB and YUV
        transform = new RgbToYuv420(0, 0);

        // Muxer that will store the encoded frames
        muxer = new MP4Muxer(ch, Brand.MP4);

        // Add video track to muxer
         outTrack = muxer.addTrackForCompressed(TrackType.VIDEO, 1);

        // Allocate a buffer big enough to hold output frames
        _out = ByteBuffer.allocate(1920 * 1080 * 6);

        // Create an instance of encoder
        encoder = new H264Encoder();

        // Encoder extra data ( SPS, PPS ) to be stored in a special place of
        // MP4
        spsList = new ArrayList<ByteBuffer>();
        ppsList = new ArrayList<ByteBuffer>();

    }

    public void encodeImage(Bitmap bi) throws IOException {
        // encodeNativeFrame(AWTUtil.fromBufferedImage(bi));
        encodeNativeFrame(fromBitmap(bi));
    }

    public void encodeNativeFrame(Picture pic) throws IOException {
        if (toEncode == null) {
            toEncode = Picture.create(pic.getWidth(), pic.getHeight(),
                    ColorSpace.YUV420);
        }

        // Perform conversion
        transform.transform(pic, toEncode);

        // Encode image into H.264 frame, the result is stored in '_out' buffer
        _out.clear();
        ByteBuffer result = encoder.encodeFrame(_out, toEncode);

        // Based on the frame above form correct MP4 packet
        spsList.clear();
        ppsList.clear();
        H264Utils.encodeMOVPacket(result, spsList, ppsList);

        // Add packet to video track
        outTrack.addFrame(new MP4Packet(result, frameNo, 1, 5, frameNo, true,
                null, frameNo, 0));
        frameNo++;
    }

    public void finish() throws IOException {
        // Push saved SPS/PPS to a special storage in MP4
        outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList));

        // Write MP4 header and finalize recording
        muxer.writeHeader();
        NIOUtils.closeQuietly(ch);
    }

    public static Picture fromBitmap(Bitmap src) {
        Picture dst = Picture.create((int) src.getWidth(),
                (int) src.getHeight(), ColorSpace.RGB);
        fromBitmap(src, dst);
        return dst;
    }

    public static void fromBitmap(Bitmap src, Picture dst) {
        int[] dstData = dst.getPlaneData(0);
        int[] packed = new int[src.getWidth() * src.getHeight()];

        src.getPixels(packed, 0, src.getWidth(), 0, 0, src.getWidth(),
                src.getHeight());

        for (int i = 0, srcOff = 0, dstOff = 0; i < src.getHeight(); i++) {
            for (int j = 0; j < src.getWidth(); j++, srcOff++, dstOff += 3) {
                int rgb = packed[srcOff];
                dstData[dstOff] = (rgb >> 16) & 0xff;
                dstData[dstOff + 1] = (rgb >> 8) & 0xff;
                dstData[dstOff + 2] = rgb & 0xff;
            }
        }
    }

}
© www.soinside.com 2019 - 2024. All rights reserved.