媒体codeC视频流从摄像机方向错误和放大器;颜色 [英] MediaCodec Video Streaming From Camera wrong orientation & color

查看:738
本文介绍了媒体codeC视频流从摄像机方向错误和放大器;颜色的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我想视频流捕获的直接从相机 Android设备。到目前为止,我已经能够捕捉来自Android摄像头的在previewFrame每一帧(byte []的数据,摄像头摄像头)功能,连接code中的数据和放大器;那么c的数据显示,以表面成功地去$ C $。我用Android的媒体codeC 作为编码和放大器;解码。但在颜色和放大器;视频的方向是不正确的[ 90度旋转。寻找了一段时间后,我发现这YV12toYUV420PackedSemiPlanar功能 - 如果我把它传递给EN codeR颜色之前使用的原始相机数据这一功能出来正确的,但它的还是90度旋转

 公共静态的byte [] YV12toYUV420PackedSemiPlanar(最后一个字节[]输入,最终诠释宽度,最终诠释高度){

    最终诠释框架尺寸=宽*高;
    最终诠释qFrameSize =框架尺寸/ 4;
    byte []的输出=新的字节[input.length]


    System.arraycopy(输入,0,输出0,框架尺寸);
    的for(int i = 0;我≤(qFrameSize);我++)
    {
        字节B =(输入[框架尺寸+ qFrameSize + I  -  32  -  320]);
        输出[框架尺寸+ I * 2 = B;
        输出[框架尺寸+ I * 2 + 1] =(输入[框架尺寸+ I  -  32  -  320]);
    }
    System.arraycopy(输入,0,输出0,框架尺寸); //ÿ
    的for(int i = 0; I< qFrameSize;我++){
        输出[框架尺寸+ I * 2 =输入[框架尺寸+ 1 + qFrameSize]。 // CB(U)
        输出[框架尺寸+ I * 2 + 1] =输入[框架尺寸+ I]。 //铬(Ⅴ)
    }
    返回输出;
}
 

然后我用这个功能,rotateYUV420Degree90 调用YV12toYUV420PackedSemiPlanar功能。这似乎在取向和颜色确定,但输出的视频很失真

 专用字节[] rotateYUV420Degree90(byte []的数据,INT ImageWidth等,诠释imageHeight)
{
    byte []的YUV =新的字节[ImageWidth等* imageHeight * 3/2]。
    //旋转在Y亮度
    INT I = 0;
    为(中间体X = 0 X  - 其中; ImageWidth等; X ++)
    {
        为(中间体Y = imageHeight-1 y与其所连接; = 0; y--)
        {
            YUV [i] =数据[Y * ImageWidth等+ X]
            我++;
        }
    }
    //旋转U和V彩色分量
    I = ImageWidth等* imageHeight * 3 / 2-1;
    为(中间体X = ImageWidth等-1; X大于0; X = X-2)的
    {
        对于(INT Y = 0; Y< imageHeight / 2; Y ++)
        {
            YUV [i] =数据[(ImageWidth等* imageHeight)+(Y * ImageWidth等)+ X]
            一世 - ;
            YUV [I] =数据[(ImageWidth等* imageHeight)+(γ* ImageWidth等)+(X-1)];
            一世 - ;
        }
    }
    返回YUV;
}
 

因为我的颜色规格和相机数据知之甚少的

我不明白我在做什么错。这里是我的总code - 请看看,帮我找到我的错误

在此先感谢。

 公共类MainActivity扩展活动实现SurfaceHolder.Callback {

    相机mCamera;
    FileOutputStream中FOS;
    文件mVideoFile;
    媒体codeC MMEDIA codeC;
    ByteBuffer的[] inputBuffers;
    ByteBuffer的[] outputBuffers;
    MySurfaceView cameraSurfaceView;
    SurfaceView德codedSurfaceView;
    的LinearLayout LL;
    RelativeLayout的RL;
    按钮BTN;
    布尔米previewRunning = FALSE;
    布尔firstTime = TRUE;
    布尔isRunning = FALSE;
    公共静态最终字符串编码=H264;

    私人PlayerThread MPLAYER = NULL;
    处理程序处理程序= NULL;
    公共静态byte []的SPS = NULL;
    公共静态byte []的PPS = NULL;
    公共静态INT frameID = 0;
    的BlockingQueue<框架>队列=新ArrayBlockingQueue<框架>(100);

    私有静态类框架
    {
        公众诠释ID;
        公共字节[] frameData;

        公共帧(INT ID)
        {
            this.id = ID;
        }
    }

    @覆盖
    保护无效的onCreate(包savedInstanceState){
        super.onCreate(savedInstanceState);

        LL =新的LinearLayout(getApplicationContext());
        ll.setOrientation(LinearLayout.VERTICAL);

        cameraSurfaceView =新MySurfaceView(getApplicationContext());
        如果(ENCODING.equalsIgnoreCase(H264))
        {
            cameraSurfaceView.setLayoutParams(新android.widget.FrameLayout.LayoutParams(320,240));
        }
        否则,如果(ENCODING.equalsIgnoreCase(H263))
        {
            cameraSurfaceView.setLayoutParams(新android.widget.FrameLayout.LayoutParams(352,288));
        }
        ll.addView(cameraSurfaceView);

        初始化codeC();
        的setContentView(Ⅱ);

    }

    @覆盖
    保护无效的onPause(){

        super.onPause();
        米previewRunning = FALSE;

        如果(cameraSurfaceView = NULL和放大器;!&安培; cameraSurfaceView.isEnabled())
            cameraSurfaceView.setEnabled(假);
        cameraSurfaceView = NULL;

        如果(mCamera!= NULL)
        {
            mCamera.stop preVIEW();
            mCamera.release();
        }

        System.exit(0);

        MMEDIA codec.stop();
        MMEDIA codec.release();
        MMEDIA codeC = NULL;

    };


    私人无效的init codeC(){

        MediaFormat mediaFormat = NULL;

        如果(MMEDIA codeC!= NULL)
        {
            MMEDIA codec.stop();
            MMEDIA codec.release();
            MMEDIA codeC = NULL;
        }

        如果(ENCODING.equalsIgnoreCase(H264))
        {
            MMEDIA codeC =媒体codec.createEn coderByType(视频/ AVC);
            mediaFormat = MediaFormat.createVideoFormat(视频/ AVC
                    320,
                    240);
        }
        否则,如果(ENCODING.equalsIgnoreCase(H263))
        {
            MMEDIA codeC =媒体codec.createEn coderByType(视频/ 3GPP);
            mediaFormat = MediaFormat.createVideoFormat(视频/ 3GPP
                    352,
                    288);
        }

        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE,125000);
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE,15);
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,5);
        mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE,8000);
        mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT,1);

        尝试
        {
            mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                    媒体codecInfo codecCapabilities.COLOR_FormatYUV420SemiPlanar)。

            MMEDIA codec.configure(mediaFormat,
                    空值,
                    空值,
                    媒体codec.CONFIGURE_FLAG_EN code);
            frameID = 0;
            MMEDIA codec.start();
        }
        赶上(例外五)
        {
            Toast.makeText(getApplicationContext(),mediaformat错误,Toast.LENGTH_LONG).show();
            e.printStackTrace();
        }

    }

    / ** =============================================== ========================= * /
    / **此函数获取比赛阵列的源阵列第一次出现的起始索引。该功能将在将从startIndex位置源阵列搜索。* /
    公共静态INT发现(byte []的来源,byte []的比赛,诠释了startIndex)
    {
        如果(来源== NULL ||比赛== NULL)
        {
            Log.d(恩codeDE code,错误的发现:空);
            返回-1;
        }
        如果(source.length == 0 || match.length == 0)
        {
            Log.d(恩codeDE code,错误的发现:长度为0);
            返回-1;
        }

        INT RET = -1;
        INT SPOS =在startIndex;
        INT MPOS = 0;
        字节M =匹配[MPOS]
        对于(; SPOS&L​​T; source.length; SPOS ++)
        {
            如果(M ==源[SPOS])
            {
                //开始比赛
                如果(MPOS == 0)
                    RET = SPOS;
                //完成比赛
                否则,如果(MPOS == match.length  -  1)
                    返回RET;

                MPOS ++;
                M =匹配[MPOS]
            }
            其他
            {
                RET = -1;
                MPOS = 0;
                M =匹配[MPOS]
            }
        }
        返回RET;
    }


    / ** =============================================== ========================= * /
    / **为H264编码,此功能将检索SPS和放大器; PPS从给定的数据,并将插入的SPS&安培; PPS全局数组。 * /
    公共静态无效getSPS_PPS(byte []的数据,INT startingIndex)
    {
        byte []的spsHeader = {0×00,0×00,0×00,0×01,0x67};
        byte []的ppsHeader = {0×00,0×00,0×00,0×01,0x68};
        byte []的frameHeader = {0×00,0×00,0×00,0×01};

        INT spsStartingIndex = -1;
        INT nextFrameStartingIndex = -1;
        INT ppsStartingIndex = -1;

        spsStartingIndex =发现(数据,spsHeader,startingIndex);
        Log.d(恩codeDE code,spsStartingIndex:+ spsStartingIndex);
        如果(spsStartingIndex> = 0)
        {
            nextFrameStartingIndex =发现(数据,frameHeader,spsStartingIndex + 1);
            INT spsLength = 0;
            如果(nextFrameStartingIndex> = 0)
                spsLength = nextFrameStartingIndex  -  spsStartingIndex;
            其他
                spsLength = data.length  -  spsStartingIndex;
            如果(spsLength大于0)
            {
                SPS =新的字节[spsLength]
                System.arraycopy(数据,spsStartingIndex,SPS,0,spsLength);
            }
        }

        ppsStartingIndex =查找(资料,ppsHeader,startingIndex);
        Log.d(恩codeDE code,ppsStartingIndex:+ ppsStartingIndex);
        如果(ppsStartingIndex> = 0)
        {
            nextFrameStartingIndex =发现(数据,frameHeader,ppsStartingIndex + 1);
            INT ppsLength = 0;
            如果(nextFrameStartingIndex> = 0)
                ppsLength = nextFrameStartingIndex  -  ppsStartingIndex;
            其他
                ppsLength = data.length  -  ppsStartingIndex;
            如果(ppsLength大于0)
            {
                PPS =新的字节[ppsLength]
                System.arraycopy(数据,ppsStartingIndex,聚苯硫醚,0,ppsLength);
            }
        }
    }


    / ** =============================================== ========================= * /
    / **打印的字节数组十六进制* /
    私人无效printByteArray(byte []数组)
    {
        StringBuilder的SB1 =新的StringBuilder();
        对于(字节B:数组)
        {
            sb1.append(的String.Format(%02X,B));
        }
        Log.d(恩codeDE code,sb1.toString());
    }

    公共静态的byte [] YV12toYUV420PackedSemiPlanar(最后一个字节[]输入,最终诠释宽度,最终诠释高度){
        / *
         * COLOR_TI_FormatYUV420PackedSemiPlanar是NV12
         *我们转换通过将相应的U和V字节一起(交叉)。
         * /
        最终诠释框架尺寸=宽*高;
        最终诠释qFrameSize =框架尺寸/ 4;
        byte []的输出=新的字节[input.length]


        System.arraycopy(输入,0,输出0,框架尺寸);
        的for(int i = 0;我≤(qFrameSize);我++)
        {
            字节B =(输入[框架尺寸+ qFrameSize + I  -  32  -  320]);
            输出[框架尺寸+ I * 2 = B;
            输出[框架尺寸+ I * 2 + 1] =(输入[框架尺寸+ I  -  32  -  320]);
        }



        System.arraycopy(输入,0,输出0,框架尺寸); //ÿ

        的for(int i = 0; I< qFrameSize;我++){
            输出[框架尺寸+ I * 2 =输入[框架尺寸+ 1 + qFrameSize]。 // CB(U)
            输出[框架尺寸+ I * 2 + 1] =输入[框架尺寸+ I]。 //铬(Ⅴ)
        }
        返回输出;
    }

    私人字节[] rotateYUV420Degree90(byte []的数据,INT ImageWidth等,诠释imageHeight)
    {
        byte []的YUV =新的字节[ImageWidth等* imageHeight * 3/2]。
        //旋转在Y亮度
        INT I = 0;
        为(中间体X = 0 X  - 其中; ImageWidth等; X ++)
        {
            为(中间体Y = imageHeight-1 y与其所连接; = 0; y--)
            {
                YUV [i] =数据[Y * ImageWidth等+ X]
                我++;
            }
        }
        //旋转U和V彩色分量
        I = ImageWidth等* imageHeight * 3 / 2-1;
        为(中间体X = ImageWidth等-1; X大于0; X = X-2)的
        {
            对于(INT Y = 0; Y< imageHeight / 2; Y ++)
            {
                YUV [i] =数据[(ImageWidth等* imageHeight)+(Y * ImageWidth等)+ X]
                一世 - ;
                YUV [I] =数据[(ImageWidth等* imageHeight)+(γ* ImageWidth等)+(X-1)];
                一世 - ;
            }
        }
        返回YUV;
    }

    / ** =============================================== ========================= * /
    / **当相机接收帧调用该函数的帧数据作为参数。它连接codeS给定的数据,然后存储在frameQueue。 * /
    私人无效连接code(byte []的数据)
    {
        Log.d(恩codeDE code,EN code函数调用);
        inputBuffers = MMEDIA codec.getInputBuffers();
        outputBuffers = MMEDIA codec.getOutputBuffers();

        INT inputBufferIndex = MMEDIA codec.dequeueInputBuffer(0);
        如果(inputBufferIndex> = 0)
        {
            ByteBuffer的INPUTBUFFER = inputBuffers [inputBufferIndex]
            inputBuffer.clear();

            INT大小= inputBuffer.limit();
            //inputBuffer.put(data);

            //颜色的权利,但旋转
            byte []的输出= YV12toYUV420PackedSemiPlanar(资料,320240);
            inputBuffer.put(输出);

            //颜色差不多吧,方向确定,但扭曲
            / * byte []的输出= YV12toYUV420PackedSemiPlanar(资料,320240);
            输出= rotateYUV420Degree90(输出,320240);
            inputBuffer.put(输出); * /

            MMEDIA codec.queueInputBuffer(inputBufferIndex,0 / *偏移* /,大小,0 / * timeUs * /,0);
            Log.d(恩codeDE code,INPUTBUFFER排队);
        }
        其他
        {
            Log.d(恩codeDE code,inputBufferIndex℃下,返回空);
            返回 ;
        }

        媒体codec.BufferInfo bufferInfo =新媒体codec.BufferInfo();
        INT outputBufferIndex = MMEDIA codec.dequeueOutputBuffer(bufferInfo,0);
        Log.d(恩codeDE code,outputBufferIndex =+ outputBufferIndex);
        做
        {
            如果(outputBufferIndex> = 0)
            {
                帧帧=新帧(frameID);
                ByteBuffer的outBuffer = outputBuffers [outputBufferIndex]
                byte []的outData =新的字节[bufferInfo.size]
                字节idrFrameType = 0x65;
                INT数据长度= 0;

                outBuffer.get(outData)以;

                //如果SPS和放大器; PPS还没有准备好,然后
                如果(ENCODING.equalsIgnoreCase(H264)及及((SPS == NULL || SPS.length == 0)||(PPS == NULL || PPS.length == 0)))
                    getSPS_PPS(outData,0);

                数据长度= outData.length;

                //如果帧是IDR帧然后加入SPS&安培;在实际的帧数据前面的PPS
                如果(ENCODING.equalsIgnoreCase(H264)&安培;&安培; outData [4] == idrFrameType)
                {
                    INT totalDataLength =数据长度+ SPS.length + PPS.length;

                    frame.frameData =新的字节[totalDataLength]

                    System.arraycopy(SPS,0,frame.frameData,0,SPS.length);
                    System.arraycopy(PPS,0,frame.frameData,SPS.length,PPS.length);
                    System.arraycopy(outData,0,frame.frameData,SPS.length + PPS.length,DATALENGTH);
                }
                其他
                {
                    frame.frameData =新的字节[数据长度]
                    System.arraycopy(outData,0,frame.frameData,0,数据长度);
                }

                //测试
                Log.d(恩codeDE code,图幅号::+ frameID +::框架尺寸::+ frame.frameData.length +::);
                printByteArray(frame.frameData);

                //如果编码类型是H264和SPS和放大器; PPS是准备好以后,将排入队列帧中的队列
                //如果编码类型是H263然后,排入队列在队列中的帧
                如果((ENCODING.equalsIgnoreCase(H264)&安培;&安培; SPS =空&安培;!&安培; PPS =空&安培;!&安培;!SPS.length = 0&安培;&安培;!PPS.length = 0)|| ENCODING.equalsIgnoreCase(H263))
                {
                    Log.d(恩codeDE code,框架排入队列没有:+(frameID));

                    尝试
                    {
                        queue.put(架);
                    }
                    赶上(InterruptedException的E)
                    {
                        Log.e(恩codeDE code,等待时发生中断);
                        e.printStackTrace();
                    }
                    赶上(NullPointerException异常E)
                    {
                        Log.e(恩codeDE code,框为空);
                        e.printStackTrace();
                    }
                    赶上(抛出:IllegalArgumentException E)
                    {
                        Log.e(恩codeDE code,队列中的问题,插入);
                        e.printStackTrace();
                    }

                    Log.d(恩codeDE code,帧入队队列大小现价:+ queue.size());

                    如果(firstTime)
                    {
                        Log.d(恩codeDE code,增加了表面布局去codeR);
                        SurfaceView SV =新SurfaceView(getApplicationContext());
                        处理程序=新的处理程序();
                        。sv.getHolder()的addCallback(MainActivity.this);
                        sv.setLayoutParams(新android.widget.FrameLayout.LayoutParams(320,240));
                        ll.addView(SV,1);
                        MainActivity.this.setContentView(Ⅱ);
                        firstTime = FALSE;
                    }
                }

                frameID ++;
                MMEDIA codec.releaseOutputBuffer(outputBufferIndex,假);
                outputBufferIndex = MMEDIA codec.dequeueOutputBuffer(bufferInfo,0);

            }
            否则,如果(outputBufferIndex ==媒体codec.INFO_OUTPUT_BUFFERS_CHANGED)
            {
                outputBuffers = MMEDIA codec.getOutputBuffers();
                Log.e(恩codeDE code,的EN codeR输出缓冲区:信息化);
            }
            否则,如果(outputBufferIndex ==媒体codec.INFO_OUTPUT_FORMAT_CHANGED)
            {
                Log.e(恩codeDE code,的EN codeR输出缓冲区:格式改变);
            }
            其他
            {
                Log.e(恩codeDE code,outputBufferIndex的未知值:+ outputBufferIndex);
                // printByteArray(数据);
            }
        }而(outputBufferIndex> = 0);
    }

    私有类MySurfaceView扩展了SurfaceView实现SurfaceHolder.Callback
    {
        SurfaceHolder持有人;
        公共MySurfaceView(上下文的背景下){
            超(上下文);
            支架= this.getHolder();
            holder.addCallback(本);
        }

        公共MySurfaceView(上下文的背景下,ATTRS的AttributeSet){
            超(背景下,ATTRS);
            支架= this.getHolder();
            holder.addCallback(本);
        }

        公共无效surfaceCreated(SurfaceHolder持有者){
            尝试
            {
                尝试
                {
                    如果(mCamera == NULL)
                        mCamera = Camera.open();
                    mCamera.setDisplayOrientation(90);
                    Log.d(恩codeDE code,摄像头打开);
                }
                赶上(例外五)
                {
                    Log.d(恩codeDE code,摄像头打开失败);
                    e.printStackTrace();
                }

                Camera.Parameters p值= mCamera.getParameters();

                如果(ENCODING.equalsIgnoreCase(H264))
                    P.SET previewSize(320,240);
                否则,如果(ENCODING.equalsIgnoreCase(H263))
                    P.SET previewSize(352,288);

                mCamera.setParameters(对);
                mCamera.set previewDisplay(保持器);

                mCamera.set previewCallback(新previewCallback()
                {
                    @覆盖
                    在previewFrame公共无效(byte []的数据,摄像头摄像头)
                    {
                        Log.d(关于previewFrame,调用连接code函数恩codeDE code);
                        EN code(数据);
                    }
                });
                mCamera.start preVIEW();
                米previewRunning = TRUE;
            }
            赶上(IOException异常E)
            {
                Log.e(恩codeDE code,surfaceCreated()::在集合previewDisplay(持有人)函数);
                e.printStackTrace();
            }
            赶上(NullPointerException异常E)
            {
                Log.e(恩codeDE code,surfaceCreated空指针);
                e.printStackTrace();
            }
        }

        公共无效surfaceChanged(SurfaceHolder持有人,INT格式,诠释的宽度,高度INT)
        {
            如果(M previewRunning)
            {
                mCamera.stop preVIEW();
                Log.e(恩codeDE code,preVIEW停);
            }
            尝试
            {
                如果(mCamera == NULL)
                {
                    mCamera = Camera.open();
                    mCamera.setDisplayOrientation(90);
                }

                Camera.Parameters p值= mCamera.getParameters();
                如果(ENCODING.equalsIgnoreCase(H264))
                    P.SET previewSize(320,240);
                否则,如果(ENCODING.equalsIgnoreCase(H263))
                    P.SET previewSize(352,288);

                P.SET previewFormat(ImageFormat.YV12);
                mCamera.setParameters(对);
                mCamera.set previewDisplay(保持器);
                mCamera.unlock();
                mCamera.reconnect();
                mCamera.set previewCallback(新previewCallback()
                {
                    @覆盖
                    在previewFrame公共无效(byte []的数据,摄像头摄像头)
                    {
                        Log.d(关于previewFrame,调用连接code函数恩codeDE code);
                        EN code(数据);
                    }
                });
                Log.d(恩codeDE code,previewCallBack集);
                mCamera.start preVIEW();
                米previewRunning = TRUE;
            }
            赶上(例外五)
            {
                Log.e(恩codeDE code,表面变了:一套preVIEW显示失败);
                e.printStackTrace();
            }

        }

        公共无效surfaceDestroyed(SurfaceHolder持有人)
        {

        }
    }


    @覆盖
    公共无效surfaceCreated(SurfaceHolder持有人)
    {
        Log.d(恩codeDE code,mainActivity surfaceCreated);
    }

    @覆盖
    公共无效surfaceChanged(SurfaceHolder持有人,INT格式,诠释的宽度,高度INT)
    {
        Log.d(恩codeDE code,mainActivity surfaceChanged。);
        如果(MPLAYER == NULL)
        {
            MPLAYER =新PlayerThread(holder.getSurface());
            mPlayer.start();
            Log.d(恩codeDE code,PlayerThread开始);
        }
    }

    @覆盖
    公共无效surfaceDestroyed(SurfaceHolder持有人)
    {
        如果(MPLAYER!= NULL)
        {
            mPlayer.interrupt();
        }
    }

    私有类PlayerThread继承Thread
    {
        //私人MediaExtractor提取;
        私营媒体codeC德codeR;
        私人表面的表面;

        公共PlayerThread(表面表面)
        {
            this.surface =表面;
        }

        @覆盖
        公共无效的run()
        {
            而(SPS == NULL || PPS == NULL || SPS.length == 0 || PPS.length == 0)
            {
                尝试
                {
                    Log.d(恩codeDE code,DE codeR_THREAD :: SPS,PPS还没有准备好);
                    睡眠(1000);
                }赶上(InterruptedException异常E){
                    e.printStackTrace();

                }
            }

            Log.d(恩codeDE code,DE codeR_THREAD :: SPS,PPS READY);

            如果(ENCODING.equalsIgnoreCase(H264))
            {
                德codeR =媒体codec.createDe coderByType(视频/ AVC);
                MediaFormat mediaFormat = MediaFormat.createVideoFormat(视频/ AVC,320,240);
                mediaFormat.setByteBuffer(CSD-0,ByteBuffer.wrap(SPS));
                mediaFormat.setByteBuffer(CSD-1,ByteBuffer.wrap(PPS));
                德coder.configure(mediaFormat,表面/ *表面* /空/ *加密* /,0 / *标志* /);
            }
            否则,如果(ENCODING.equalsIgnoreCase(H263))
            {
                德codeR =媒体codec.createDe coderByType(视频/ 3GPP);
                MediaFormat mediaFormat = MediaFormat.createVideoFormat(视频/ 3GPP,352,288);
                德coder.configure(mediaFormat,表面/ *表面* /空/ *加密* /,0 / *标志* /);
            }

            如果(德codeR == NULL)
            {
                Log.e(德codeActivity,DE codeR_THREAD ::无法找到视频信息!);
                返回;
            }

            德coder.start();
            Log.d(恩codeDE code,DE codeR_THREAD ::德coder.start()被称为);

            ByteBuffer的[] inputBuffers =去coder.getInputBuffers();
            ByteBuffer的[] outputBuffers =去coder.getOutputBuffers();


            INT I = 0;
            而(!Thread.interrupted())
            {
                帧currentFrame = NULL;
                尝试
                {
                    Log.d(恩codeDE code,DE codeR_THREAD ::调用queue.take(),如果在队列中,将等待无边框);
                    currentFrame = queue.take();
                }
                赶上(InterruptedException的E)
                {
                    Log.e(恩codeDE code,DE codeR_THREAD ::而PlayerThread在等待下一帧中断);
                    e.printStackTrace();
                }

                如果(currentFrame == NULL)
                    Log.e(恩codeDE code,DE codeR_THREAD ::空帧出列);
                其他
                    Log.d(恩codeDE code,DE codeR_THREAD ::+ currentFrame.id +无边框出列);

                如果(currentFrame = NULL和放大器;!&安培; currentFrame.frameData = NULL和放大器;!&安培;!currentFrame.frameData.length = 0)
                {
                    Log.d(恩codeDE code,DE codeR_THREAD ::译码帧中没有+ I +,数据长度=+ currentFrame.frameData.length);

                    INT inIndex = 0;
                    而((inIndex =去coder.dequeueInputBuffer(1))℃下)
                        ;

                    如果(inIndex> = 0)
                    {
                        Log.d(恩codeDE code,DE codeR_THREAD ::试样尺寸:+ currentFrame.frameData.length);

                        ByteBuffer的缓冲= inputBuffers [inIndex]
                        buffer.clear();
                        buffer.put(currentFrame.frameData);
                        德coder.queueInputBuffer(inIndex,0,currentFrame.frameData.length,0,0);

                        BufferInfo信息=新BufferInfo();
                        INT outIndex =去coder.dequeueOutputBuffer(资讯,100000);

                        开关(outIndex)
                        {
                        案例媒体codec.INFO_OUTPUT_BUFFERS_CHANGED:
                            Log.e(恩codeDE code,DE codeR_THREAD :: INFO_OUTPUT_BUFFERS_CHANGED);
                            outputBuffers =去coder.getOutputBuffers();
                            打破;
                        案例媒体codec.INFO_OUTPUT_FORMAT_CHANGED:
                            Log.e(恩codeDE code,DE codeR_THREAD ::新格式+得coder.getOutputFormat());

                            打破;
                        案例媒体codec.INFO_TRY_AGAIN_LATER:
                            Log.e(恩codeDE code,DE codeR_THREAD :: dequeueOutputBuffer超时!);
                            打破;
                        默认:
                            Log.d(恩codeDE code,DE codeR_THREAD ::德codeD成功每一步!);
                            ByteBuffer的outbuffer = outputBuffers [outIndex]
                            德coder.releaseOutputBuffer(outIndex,真正的);
                            打破;
                        }
                        我++;
                    }
                }
            }

            德coder.stop();
            德coder.release();

        }
    }
}
 

解决方案

当你旋转的图像具有newWidth = oldHeight,并newHeigth = oldWidth,因为源心不是一个正方形图片。所以,你有一个选择,要么使用裁剪和调整分别转动周期或使用不同的preVIEW大小dispaying,现在你表面间距只小那么surfaceview的或沥青您用于显示任何组件

I'm trying to stream video capturing directly from camera for android devices. So far I have been able to capture each frame from android camera's onPreviewFrame(byte[] data, Camera camera) function, encode the data & then successfully decode the data and show to the surface. I used android's MediaCodec for the encoding & decoding. But the color & the orientation of the video is not correct [ 90 degree rotated ]. After searching a while I have found this YV12toYUV420PackedSemiPlanar function - if I use this function on the raw camera data before passing it to the encoder the color comes out correct but it is still 90 degree rotated.

public static byte[] YV12toYUV420PackedSemiPlanar(final byte[] input, final int width, final int height) {

    final int frameSize = width * height;
    final int qFrameSize = frameSize/4;
    byte[] output = new byte[input.length];


    System.arraycopy(input, 0, output, 0, frameSize);
    for (int i = 0; i < (qFrameSize); i++) 
    {
        byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
        output[frameSize + i*2] =   b;
        output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);            
    }
    System.arraycopy(input, 0, output, 0, frameSize); // Y
    for (int i = 0; i < qFrameSize; i++) {
        output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
        output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
    }
    return output;
}

Then I used this function, rotateYUV420Degree90 after calling YV12toYUV420PackedSemiPlanar function. It seems the orientation and the color is ok but the output video is very distorted.

private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight) 
{
    byte [] yuv = new byte[imageWidth*imageHeight*3/2];
    // Rotate the Y luma
    int i = 0;
    for(int x = 0;x < imageWidth;x++)
    {
        for(int y = imageHeight-1;y >= 0;y--)                               
        {
            yuv[i] = data[y*imageWidth+x];
            i++;
        }
    }
    // Rotate the U and V color components 
    i = imageWidth*imageHeight*3/2-1;
    for(int x = imageWidth-1;x > 0;x=x-2)
    {
        for(int y = 0;y < imageHeight/2;y++)                                
        {
            yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+x];
            i--;
            yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+(x-1)];
            i--;
        }
    }
    return yuv;
}

Because of my very little knowledge on color specs and camera data I cannot understand what I am doing wrong. Here is my total code - please have a look and help me finding my mistake.

Thanks in advance.

public class MainActivity extends Activity implements SurfaceHolder.Callback  {

    Camera mCamera;
    FileOutputStream fos;
    File mVideoFile;
    MediaCodec mMediaCodec;
    ByteBuffer[] inputBuffers;
    ByteBuffer[] outputBuffers;
    MySurfaceView cameraSurfaceView ;
    SurfaceView decodedSurfaceView ;
    LinearLayout ll;
    RelativeLayout rl;
    Button btn;
    boolean mPreviewRunning = false;
    boolean firstTime = true;
    boolean isRunning = false;
    public static final String ENCODING = "h264";  

    private PlayerThread mPlayer = null;
    Handler handler = null;
    public static byte[] SPS = null;
    public static byte[] PPS = null;
    public static int frameID = 0;
    BlockingQueue<Frame> queue = new ArrayBlockingQueue<Frame>(100);

    private static class Frame
    {
        public int id;
        public byte[] frameData;

        public Frame(int id)
        {
            this.id = id;
        }
    }

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        ll = new LinearLayout(getApplicationContext());
        ll.setOrientation(LinearLayout.VERTICAL);

        cameraSurfaceView = new MySurfaceView(getApplicationContext());
        if(ENCODING.equalsIgnoreCase("h264"))
        {
            cameraSurfaceView.setLayoutParams(new android.widget.FrameLayout.LayoutParams(320, 240));
        }
        else if(ENCODING.equalsIgnoreCase("h263"))
        {
            cameraSurfaceView.setLayoutParams(new android.widget.FrameLayout.LayoutParams(352, 288));
        }
        ll.addView(cameraSurfaceView);

        initCodec();
        setContentView(ll);

    }

    @Override
    protected void onPause() {

        super.onPause();
        mPreviewRunning = false;

        if(cameraSurfaceView !=null && cameraSurfaceView.isEnabled())
            cameraSurfaceView.setEnabled(false);
        cameraSurfaceView = null;

        if(mCamera != null)
        {
            mCamera.stopPreview();
            mCamera.release();
        }

        System.exit(0);

        mMediaCodec.stop();
        mMediaCodec.release();
        mMediaCodec = null;

    };


    private void initCodec() {

        MediaFormat mediaFormat = null;

        if(mMediaCodec != null)
        {
            mMediaCodec.stop();
            mMediaCodec.release();
            mMediaCodec = null;
        }

        if(ENCODING.equalsIgnoreCase("h264"))
        {
            mMediaCodec = MediaCodec.createEncoderByType("video/avc");
            mediaFormat = MediaFormat.createVideoFormat("video/avc",
                    320,
                    240);
        }
        else if(ENCODING.equalsIgnoreCase("h263"))
        {
            mMediaCodec = MediaCodec.createEncoderByType("video/3gpp");
            mediaFormat = MediaFormat.createVideoFormat("video/3gpp",
                    352,
                    288);
        }

        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
        mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
        mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
        mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 8000);
        mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);

        try
        {
            mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, 
                    MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);

            mMediaCodec.configure(mediaFormat,
                    null,
                    null,
                    MediaCodec.CONFIGURE_FLAG_ENCODE);
            frameID = 0;
            mMediaCodec.start();
        }
        catch(Exception e)
        {
            Toast.makeText(getApplicationContext(), "mediaformat error", Toast.LENGTH_LONG).show();
            e.printStackTrace();
        }

    }

    /**========================================================================*/
    /** This function gets the starting index of the first appearance of match array in source array. The function will search in source array from startIndex position.*/
    public static int find(byte[] source, byte[] match, int startIndex) 
    {  
        if(source == null || match == null)
        {
            Log.d("EncodeDecode", "ERROR in find : null");
            return -1;
        }
        if(source.length == 0 || match.length == 0)
        {
            Log.d("EncodeDecode", "ERROR in find : length 0");
            return -1;
        }

        int ret = -1;  
        int spos = startIndex;  
        int mpos = 0;  
        byte m = match[mpos];  
        for( ; spos < source.length; spos++ ) 
        {  
            if(m == source[spos]) 
            {  
                // starting match  
                if(mpos == 0)  
                    ret = spos;  
                // finishing match  
                else if(mpos == match.length - 1)  
                    return ret;  

                mpos++;  
                m = match[mpos];  
            }  
            else 
            {  
                ret = -1;  
                mpos = 0;  
                m = match[mpos];  
            }  
        }  
        return ret;  
    }


    /**========================================================================*/
    /** For H264 encoding, this function will retrieve SPS & PPS from the given data and will insert into SPS & PPS global arrays. */
    public static void getSPS_PPS(byte[] data, int startingIndex)
    {
        byte[] spsHeader = {0x00, 0x00, 0x00, 0x01, 0x67};
        byte[] ppsHeader = {0x00, 0x00, 0x00, 0x01, 0x68};
        byte[] frameHeader = {0x00, 0x00, 0x00, 0x01};

        int spsStartingIndex = -1;
        int nextFrameStartingIndex = -1;
        int ppsStartingIndex = -1;

        spsStartingIndex = find(data, spsHeader, startingIndex);
        Log.d("EncodeDecode", "spsStartingIndex: " + spsStartingIndex);
        if(spsStartingIndex >= 0)
        {
            nextFrameStartingIndex = find(data, frameHeader, spsStartingIndex+1);
            int spsLength = 0;
            if(nextFrameStartingIndex>=0)
                spsLength = nextFrameStartingIndex - spsStartingIndex;
            else
                spsLength = data.length - spsStartingIndex;
            if(spsLength > 0)
            {
                SPS = new byte[spsLength];
                System.arraycopy(data, spsStartingIndex, SPS, 0, spsLength);
            }
        }

        ppsStartingIndex = find(data, ppsHeader, startingIndex);
        Log.d("EncodeDecode", "ppsStartingIndex: " + ppsStartingIndex);
        if(ppsStartingIndex >= 0)
        {
            nextFrameStartingIndex = find(data, frameHeader, ppsStartingIndex+1);
            int ppsLength = 0;
            if(nextFrameStartingIndex>=0)
                ppsLength = nextFrameStartingIndex - ppsStartingIndex;
            else
                ppsLength = data.length - ppsStartingIndex;
            if(ppsLength > 0)
            {
                PPS = new byte[ppsLength];
                System.arraycopy(data, ppsStartingIndex, PPS, 0, ppsLength);
            }
        }
    }


    /**========================================================================*/
    /** Prints the byte array in hex */
    private void printByteArray(byte[] array)
    {
        StringBuilder sb1 = new StringBuilder();
        for (byte b : array) 
        {
            sb1.append(String.format("%02X ", b));
        }
        Log.d("EncodeDecode", sb1.toString());
    }

    public static byte[] YV12toYUV420PackedSemiPlanar(final byte[] input, final int width, final int height) {
        /* 
         * COLOR_TI_FormatYUV420PackedSemiPlanar is NV12
         * We convert by putting the corresponding U and V bytes together (interleaved).
         */
        final int frameSize = width * height;
        final int qFrameSize = frameSize/4;
        byte[] output = new byte[input.length];


        System.arraycopy(input, 0, output, 0, frameSize);
        for (int i = 0; i < (qFrameSize); i++) 
        {
            byte b = (input[frameSize + qFrameSize + i - 32 - 320]);
            output[frameSize + i*2] =   b;
            output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);            
        }



        System.arraycopy(input, 0, output, 0, frameSize); // Y

        for (int i = 0; i < qFrameSize; i++) {
            output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U)
            output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V)
        }
        return output;
    }

    private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight) 
    {
        byte [] yuv = new byte[imageWidth*imageHeight*3/2];
        // Rotate the Y luma
        int i = 0;
        for(int x = 0;x < imageWidth;x++)
        {
            for(int y = imageHeight-1;y >= 0;y--)                               
            {
                yuv[i] = data[y*imageWidth+x];
                i++;
            }
        }
        // Rotate the U and V color components 
        i = imageWidth*imageHeight*3/2-1;
        for(int x = imageWidth-1;x > 0;x=x-2)
        {
            for(int y = 0;y < imageHeight/2;y++)                                
            {
                yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+x];
                i--;
                yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+(x-1)];
                i--;
            }
        }
        return yuv;
    }

    /**========================================================================*/
    /** When camera receives a frame this function is called with the frame data as its parameter. It encodes the given data and then stores in frameQueue. */
    private void encode(byte[] data)
    {
        Log.d("EncodeDecode", "ENCODE FUNCTION CALLED");
        inputBuffers = mMediaCodec.getInputBuffers();
        outputBuffers = mMediaCodec.getOutputBuffers();

        int inputBufferIndex = mMediaCodec.dequeueInputBuffer(0);
        if (inputBufferIndex >= 0)
        {
            ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
            inputBuffer.clear();

            int size = inputBuffer.limit();
            //inputBuffer.put(data);

            // color right, but rotated
            byte[] output = YV12toYUV420PackedSemiPlanar(data,320,240);
            inputBuffer.put(output);

            // color almost right, orientation ok but distorted 
            /*byte[] output = YV12toYUV420PackedSemiPlanar(data,320,240);
            output = rotateYUV420Degree90(output,320,240);
            inputBuffer.put(output);*/

            mMediaCodec.queueInputBuffer(inputBufferIndex, 0 /* offset */, size, 0 /* timeUs */, 0);
            Log.d("EncodeDecode", "InputBuffer queued");
        }
        else
        {
            Log.d("EncodeDecode", "inputBufferIndex < 0, returning null");
            return ;
        }

        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
        Log.d("EncodeDecode", "outputBufferIndex = " + outputBufferIndex);
        do
        {
            if (outputBufferIndex >= 0)
            {
                Frame frame = new Frame(frameID);
                ByteBuffer outBuffer = outputBuffers[outputBufferIndex];
                byte[] outData = new byte[bufferInfo.size];
                byte idrFrameType = 0x65;
                int dataLength = 0;

                outBuffer.get(outData);

                // If SPS & PPS is not ready then 
                if(ENCODING.equalsIgnoreCase("h264") && ( (SPS == null || SPS.length ==0) || (PPS == null || PPS.length == 0) ) )
                    getSPS_PPS(outData, 0);

                dataLength = outData.length;

                // If the frame is an IDR Frame then adding SPS & PPS in front of the actual frame data 
                if(ENCODING.equalsIgnoreCase("h264") && outData[4] == idrFrameType)
                {
                    int totalDataLength = dataLength + SPS.length + PPS.length;

                    frame.frameData = new byte[totalDataLength];

                    System.arraycopy(SPS, 0, frame.frameData, 0, SPS.length);
                    System.arraycopy(PPS, 0, frame.frameData, SPS.length, PPS.length);
                    System.arraycopy(outData, 0 , frame.frameData, SPS.length+PPS.length, dataLength);
                }
                else
                {
                    frame.frameData = new byte[dataLength];
                    System.arraycopy(outData, 0 , frame.frameData, 0, dataLength);
                }

                // for testing
                Log.d("EncodeDecode" , "Frame no :: " + frameID + " :: frameSize:: " + frame.frameData.length + " :: ");
                printByteArray(frame.frameData);

                // if encoding type is h264 and sps & pps is ready then, enqueueing the frame in the queue
                // if encoding type is h263 then, enqueueing the frame in the queue
                if( (ENCODING.equalsIgnoreCase("h264") && SPS != null && PPS != null && SPS.length != 0 && PPS.length != 0) || ENCODING.equalsIgnoreCase("h263") )
                {
                    Log.d("EncodeDecode", "enqueueing frame no: " + (frameID));

                    try
                    {
                        queue.put(frame);
                    }
                    catch(InterruptedException e)
                    {
                        Log.e("EncodeDecode", "interrupted while waiting");
                        e.printStackTrace();
                    }
                    catch(NullPointerException e)
                    {
                        Log.e("EncodeDecode", "frame is null");
                        e.printStackTrace();
                    }
                    catch(IllegalArgumentException e)
                    {
                        Log.e("EncodeDecode", "problem inserting in the queue");
                        e.printStackTrace();
                    }

                    Log.d("EncodeDecode", "frame enqueued. queue size now: " + queue.size());

                    if(firstTime)
                    {
                        Log.d("EncodeDecode", "adding a surface to layout for decoder");
                        SurfaceView sv = new SurfaceView(getApplicationContext());
                        handler = new Handler();
                        sv.getHolder().addCallback(MainActivity.this);
                        sv.setLayoutParams(new android.widget.FrameLayout.LayoutParams(320, 240));
                        ll.addView(sv,1);
                        MainActivity.this.setContentView(ll);
                        firstTime = false;
                    }
                }

                frameID++;
                mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);

            }
            else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
            {
                outputBuffers = mMediaCodec.getOutputBuffers();
                Log.e("EncodeDecode","output buffer of encoder : info changed");
            }
            else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED)
            {
                Log.e("EncodeDecode","output buffer of encoder : format changed");
            }
            else
            {
                Log.e("EncodeDecode", "unknown value of outputBufferIndex : " + outputBufferIndex);
                //printByteArray(data);
            }
        } while (outputBufferIndex >= 0);
    }

    private class MySurfaceView extends SurfaceView implements SurfaceHolder.Callback  
    {
        SurfaceHolder holder;
        public MySurfaceView(Context context) {  
            super(context); 
            holder = this.getHolder();
            holder.addCallback(this); 
        }

        public MySurfaceView(Context context, AttributeSet attrs) {  
            super(context,attrs); 
            holder = this.getHolder();
            holder.addCallback(this); 
        }

        public void surfaceCreated(SurfaceHolder holder) {  
            try
            {
                try
                {
                    if(mCamera == null)
                        mCamera = Camera.open();
                    mCamera.setDisplayOrientation(90);
                    Log.d("EncodeDecode","Camera opened");
                }
                catch (Exception e)
                {
                    Log.d("EncodeDecode","Camera open failed");
                    e.printStackTrace();
                }

                Camera.Parameters p = mCamera.getParameters();

                if(ENCODING.equalsIgnoreCase("h264"))
                    p.setPreviewSize(320, 240);
                else if(ENCODING.equalsIgnoreCase("h263"))
                    p.setPreviewSize(352, 288);

                mCamera.setParameters(p);
                mCamera.setPreviewDisplay(holder);

                mCamera.setPreviewCallback(new PreviewCallback()
                {
                    @Override
                    public void onPreviewFrame(byte[] data, Camera camera)
                    { 
                        Log.d("EncodeDecode", "onPreviewFrame, calling encode function");
                        encode(data);
                    }
                });
                mCamera.startPreview();
                mPreviewRunning = true;
            } 
            catch (IOException e) 
            {
                Log.e("EncodeDecode","surfaceCreated():: in setPreviewDisplay(holder) function");
                e.printStackTrace();
            }
            catch (NullPointerException e)
            {
                Log.e("EncodeDecode","surfaceCreated Nullpointer");
                e.printStackTrace();
            }
        }

        public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) 
        {  
            if (mPreviewRunning) 
            {
                mCamera.stopPreview();
                Log.e("EncodeDecode","preview stopped");
            }
            try 
            {
                if(mCamera == null)
                {
                    mCamera = Camera.open();
                    mCamera.setDisplayOrientation(90);
                }

                Camera.Parameters p = mCamera.getParameters();
                if(ENCODING.equalsIgnoreCase("h264"))
                    p.setPreviewSize(320, 240);
                else if(ENCODING.equalsIgnoreCase("h263"))
                    p.setPreviewSize(352, 288);

                p.setPreviewFormat(ImageFormat.YV12);
                mCamera.setParameters(p);
                mCamera.setPreviewDisplay(holder);
                mCamera.unlock();
                mCamera.reconnect();
                mCamera.setPreviewCallback(new PreviewCallback()
                {
                    @Override
                    public void onPreviewFrame(byte[] data, Camera camera)
                    {
                        Log.d("EncodeDecode", "onPreviewFrame, calling encode function");
                        encode(data);
                    }
                });
                Log.d("EncodeDecode", "previewCallBack set");
                mCamera.startPreview();
                mPreviewRunning = true;
            }
            catch (Exception e)
            {
                Log.e("EncodeDecode","surface changed:set preview display failed");
                e.printStackTrace();
            }

        }

        public void surfaceDestroyed(SurfaceHolder holder) 
        {

        }  
    }


    @Override
    public void surfaceCreated(SurfaceHolder holder) 
    {
        Log.d("EncodeDecode", "mainActivity surfaceCreated");
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) 
    {
        Log.d("EncodeDecode", "mainActivity surfaceChanged.");
        if (mPlayer == null) 
        {
            mPlayer = new PlayerThread(holder.getSurface());
            mPlayer.start();
            Log.d("EncodeDecode", "PlayerThread started");
        }
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) 
    {
        if (mPlayer != null) 
        {
            mPlayer.interrupt();    
        }
    }

    private class PlayerThread extends Thread 
    {
        //private MediaExtractor extractor;
        private MediaCodec decoder;
        private Surface surface;

        public PlayerThread(Surface surface) 
        {
            this.surface = surface;
        }

        @Override
        public void run() 
        {
            while(SPS == null || PPS == null || SPS.length == 0 || PPS.length == 0)
            {
                try 
                {
                    Log.d("EncodeDecode", "DECODER_THREAD:: sps,pps not ready yet");
                    sleep(1000);
                } catch (InterruptedException e) {
                    e.printStackTrace();

                }
            }

            Log.d("EncodeDecode", "DECODER_THREAD:: sps,pps READY");

            if(ENCODING.equalsIgnoreCase("h264"))
            {
                decoder = MediaCodec.createDecoderByType("video/avc");
                MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 320, 240);
                mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(SPS));
                mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(PPS));
                decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */);
            }
            else if(ENCODING.equalsIgnoreCase("h263"))
            { 
                decoder = MediaCodec.createDecoderByType("video/3gpp");
                MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/3gpp", 352, 288);
                decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */);
            }

            if (decoder == null) 
            {
                Log.e("DecodeActivity", "DECODER_THREAD:: Can't find video info!");
                return;
            }

            decoder.start();
            Log.d("EncodeDecode", "DECODER_THREAD:: decoder.start() called");

            ByteBuffer[] inputBuffers = decoder.getInputBuffers();
            ByteBuffer[] outputBuffers = decoder.getOutputBuffers();


            int i = 0;
            while(!Thread.interrupted())
            {
                Frame currentFrame = null;
                try 
                {
                    Log.d("EncodeDecode", "DECODER_THREAD:: calling queue.take(), if there is no frame in the queue it will wait");
                    currentFrame = queue.take();
                } 
                catch (InterruptedException e) 
                {
                    Log.e("EncodeDecode","DECODER_THREAD:: interrupted while PlayerThread was waiting for the next frame");
                    e.printStackTrace();
                }

                if(currentFrame == null)
                    Log.e("EncodeDecode","DECODER_THREAD:: null frame dequeued");
                else
                    Log.d("EncodeDecode","DECODER_THREAD:: " + currentFrame.id + " no frame dequeued");

                if(currentFrame != null && currentFrame.frameData != null && currentFrame.frameData.length != 0)
                {
                    Log.d("EncodeDecode", "DECODER_THREAD:: decoding frame no: " + i + " , dataLength = " + currentFrame.frameData.length);

                    int inIndex = 0; 
                    while ((inIndex = decoder.dequeueInputBuffer(1)) < 0)
                        ;

                    if (inIndex >= 0) 
                    {
                        Log.d("EncodeDecode", "DECODER_THREAD:: sample size: " + currentFrame.frameData.length);

                        ByteBuffer buffer = inputBuffers[inIndex];
                        buffer.clear();
                        buffer.put(currentFrame.frameData);
                        decoder.queueInputBuffer(inIndex, 0, currentFrame.frameData.length, 0, 0);

                        BufferInfo info = new BufferInfo();
                        int outIndex = decoder.dequeueOutputBuffer(info, 100000);

                        switch (outIndex) 
                        {
                        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                            Log.e("EncodeDecode", "DECODER_THREAD:: INFO_OUTPUT_BUFFERS_CHANGED");
                            outputBuffers = decoder.getOutputBuffers();
                            break;
                        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                            Log.e("EncodeDecode", "DECODER_THREAD:: New format " + decoder.getOutputFormat());

                            break;
                        case MediaCodec.INFO_TRY_AGAIN_LATER:
                            Log.e("EncodeDecode", "DECODER_THREAD:: dequeueOutputBuffer timed out!");
                            break;
                        default:
                            Log.d("EncodeDecode", "DECODER_THREAD:: decoded SUCCESSFULLY!!!");
                            ByteBuffer outbuffer = outputBuffers[outIndex];
                            decoder.releaseOutputBuffer(outIndex, true);
                            break;
                        }
                        i++;
                    }
                }
            }

            decoder.stop();
            decoder.release();

        }
    }
}

解决方案

When you rotated the image it has newWidth = oldHeight, and newHeigth = oldWidth, since source isnt a square picture. So you have a choice either to use cropping and adjust rotation cycle respectively or to use different preview size for dispaying, now you surfaces pitch just smaller then pitch of surfaceview or whatever component you use for display

这篇关于媒体codeC视频流从摄像机方向错误和放大器;颜色的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆