BebopVideoView到Mat [英] BebopVideoView to Mat

查看:208
本文介绍了BebopVideoView到Mat的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我遇到了这个大问题。

我试着将BebopVideoView转到Mat。
(BebopVideoView是鹦鹉无人机源代码)

I`m try to BebopVideoView to Mat. (BebopVideoView is parrot drone source code)

但是我连续几天失败了。

But I was failed for several days.

这是代码。

package com.hyeonjung.dronecontroll.view;

import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.os.Environment;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

import com.parrot.arsdk.arcontroller.ARCONTROLLER_STREAM_CODEC_TYPE_ENUM;

import com.parrot.arsdk.arcontroller.ARControllerCodec;
import com.parrot.arsdk.arcontroller.ARFrame;

import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;

import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;

public class BebopVideoView extends SurfaceView implements SurfaceHolder.Callback {

    private static final String TAG = "BebopVideoView";
    private static final String VIDEO_MIME_TYPE = "video/avc";
    private static final int VIDEO_DEQUEUE_TIMEOUT = 33000;

    private MediaCodec mMediaCodec;
    private Lock mReadyLock;

    private boolean mIsCodecConfigured = false;

    private ByteBuffer mSpsBuffer;
    private ByteBuffer mPpsBuffer;

    private ByteBuffer[] mBuffers;

    private static final int VIDEO_WIDTH = 640;
    private static final int VIDEO_HEIGHT = 368;

    public byte[] a;
    public Mat k;


    public BebopVideoView(Context context) {
        super(context);
        customInit();
    }

    public BebopVideoView(Context context, AttributeSet attrs) {
        super(context, attrs);
        customInit();
    }

    public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) {
        super(context, attrs, defStyleAttr);
        customInit();
    }

    private void customInit() {
        mReadyLock = new ReentrantLock();
        getHolder().addCallback(this);
    }

    public void displayFrame(ARFrame frame) {
        mReadyLock.lock();

        if ((mMediaCodec != null)) {
            if (mIsCodecConfigured) {
                // Here we have either a good PFrame, or an IFrame
                int index = -1;

                try {
                    index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT);
                } catch (IllegalStateException e) {
                    Log.e(TAG, "Error while dequeue input buffer");
                }
                if (index >= 0) {
                    ByteBuffer b;
                    if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
                        b = mMediaCodec.getInputBuffer(index); // fill inputBuffer with valid data
                    }
                    else {
                        b = mBuffers[index]; // fill inputBuffer with valid data
                        b.clear();
                    }

                    if (b != null) {
                        b.put(frame.getByteData(), 0, frame.getDataSize()); //write to b.
                        getMat(frame);
                        saveMat(k);
                    }

                    try {
                        mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0); //end of stream
                    } catch (IllegalStateException e) {
                        Log.e(TAG, "Error while queue input buffer");
                    }
                }
            }

            // Try to display previous frame
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            int outIndex;
            try {
                outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);

                while (outIndex >= 0) {
                    mMediaCodec.releaseOutputBuffer(outIndex, true);
                    outIndex = mMediaCodec.dequeueOutputBuffer(info, 0);
                }
            } catch (IllegalStateException e) {
                Log.e(TAG, "Error while dequeue input buffer (outIndex)");
            }
        }


        mReadyLock.unlock();
    }

    public void configureDecoder(ARControllerCodec codec) {
        mReadyLock.lock();

        if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) {
            ARControllerCodec.H264 codecH264 = codec.getAsH264();

            mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData());
            mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData());
        }

        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }

        mReadyLock.unlock();
    }

    private void configureMediaCodec() {
        MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
        format.setByteBuffer("csd-0", mSpsBuffer);
        format.setByteBuffer("csd-1", mPpsBuffer);

        mMediaCodec.configure(format, getHolder().getSurface(), null, 0);
        mMediaCodec.start();

        if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
            mBuffers = mMediaCodec.getInputBuffers();
        }

        mIsCodecConfigured = true;
    }

    private void initMediaCodec(String type) {
        try {
            mMediaCodec = MediaCodec.createDecoderByType(type);
        } catch (IOException e) {
            Log.e(TAG, "Exception", e);
        }

        if ((mMediaCodec != null) && (mSpsBuffer != null)) {
            configureMediaCodec();
        }
    }

    private void releaseMediaCodec() {
        if (mMediaCodec != null) {
            if (mIsCodecConfigured) {
                mMediaCodec.stop();
                mMediaCodec.release();
            }
            mIsCodecConfigured = false;
            mMediaCodec = null;
        }
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        mReadyLock.lock();
        initMediaCodec(VIDEO_MIME_TYPE);
        mReadyLock.unlock();
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        mReadyLock.lock();
        releaseMediaCodec();
        mReadyLock.unlock();
    }

    public void getMat(ARFrame frame) {
        k = new Mat();

        k.get(150, 150, frame.getByteData());
        k.put(150, 150, frame.getByteData());

       //or
       //byte[] a= new byte[b.remaining()];
       //b.get(a);
       //k.get(150, 150, a);
       //k.put(150, 150, a);

    }

    public void saveMat (Mat mat) {
        Mat mIntermediateMat = new Mat(150, 150, CvType.CV_8UC1);
        Imgproc.cvtColor(mat, mIntermediateMat, Imgproc.COLOR_GRAY2BGR);

        File path = new File(Environment.getExternalStorageDirectory() + "/data");
        path.mkdirs();
        File file = new File(path, "image.png");
        String filename = file.toString();
        Boolean bool = Imgcodecs.imwrite(filename, mIntermediateMat);

        if (bool)
            Log.i(TAG, "SUCCESS writing image to external storage");
        else
            Log.i(TAG, "Fail writing image to external storage");
    }

}

我想我可以得到一张图片相关数据来自 ByteBuffer b frame.get ByteData()。

I think I can get an image related data from ByteBuffer b or frame.get ByteData ().

我确认 ByteBuffer b frame.getByteData()

I was confirmed ByteBuffer b and frame.getByteData().

有一个char数据类型,范围是-128到127.

There was char data type with a range of -128 to 127.

所以我确认了<$ c的结果$ c> getMat,saveMat ,结果是NULL(Mat k)。

So I was confirmed the result of getMat, saveMat and the result was a NULL(Mat k).

有什么问题?

请帮帮我TT

推荐答案

如果你使用TextureView你可以简单地抓取它的位图并将其转换为Mat。您需要使用TextureView提供的曲面而不是典型的SurfaceView支架。这将需要对mediaCodec生命周期进行一些额外的重构,但这是一个相当微不足道的改变。

If you use a TextureView you can simply grab a bitmap of it and convert it to a Mat. You need to use the TextureView's provided surface rather than the typical SurfaceView holder. This will require some additional refactoring of the mediaCodec lifecycle, but is a fairly trivial change.

public class BebopVideoView extends TextureView implements TextureView.SurfaceTextureListener {

    @Override
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        this.surface = new Surface(surface);
        surfaceCreated = true;
    }

...

}

在configureMediaCodec中使用onSurfaceTextureAvailable中捕获的类级别表面....

And inside of configureMediaCodec use the class level surface captured in onSurfaceTextureAvailable instead....

mediaCodec.configure(format, surface, null, 0);

通过其他一些小调整,您现在可以更好地控制视图。您可以执行setTransform()等更重要的事情,例如getBitmap:

With a couple other minor tweaks you now have a lot more control over the view. You can do things like setTransform() and more importantly in your case getBitmap:

Mat mat = new Mat();
Utils.bitmapToMat(getBitmap(), mat);

这篇关于BebopVideoView到Mat的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆