Android摄像头将无法工作。启动preVIEW失败 [英] Android Camera will not work. startPreview fails

查看:1544
本文介绍了Android摄像头将无法工作。启动preVIEW失败的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我是从LogCat中获得这些错误:

  10-30 00:31:51.494:D / CameraHal(1205):CameraHal setOverlay / 1/00​​000000/00000000
10-30 00:31:51.494:E / CameraHal(1205):正在尝试设置重叠,但是重叠为空!行:3472
10-30 00:31:51.494:W / CameraService(1205):覆盖创建失败 - 重试
...
10-30 00:31:52.526:E / CameraService(1205):覆盖创建失败!
...
10-30 00:31:52.588:E / AndroidRuntime(5040):致命异常:主要
10-30 00:31:52.588:E / AndroidRuntime(5040):java.lang.RuntimeException的:启动preVIEW失败
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.hardware.Camera.start preVIEW(本机方法)
10-30 00:31:52.588:E / AndroidRuntime(5040):在com.matthewmitchell.nightcam.CameraSurfaceView.surfaceCreated(CameraSurfaceView.java:47)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.SurfaceView.updateWindow(SurfaceView.java:544)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.SurfaceView.dispatchDraw(SurfaceView.java:341)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.View.draw(View.java:6743)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.ViewGroup.drawChild(ViewGroup.java:1640)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.View.draw(View.java:6743)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588:E / AndroidRuntime(5040):在com.android.internal.policy.impl.PhoneWindow $ DecorView.draw(PhoneWindow.java:1876)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.ViewRoot.draw(ViewRoot.java:1407)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.ViewRoot.performTraversals(ViewRoot.java:1163)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.view.ViewRoot.handleMessage(ViewRoot.java:1727)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.os.Handler.dispatchMessage(Handler.java:99)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.os.Looper.loop(Looper.java:123)
10-30 00:31:52.588:E / AndroidRuntime(5040):在android.app.ActivityThread.main(ActivityThread.java:4627)
10-30 00:31:52.588:E / AndroidRuntime(5040):在java.lang.reflect.Method.invokeNative(本机方法)
10-30 00:31:52.588:E / AndroidRuntime(5040):在java.lang.reflect.Method.invoke(Method.java:521)
10-30 00:31:52.588:E / AndroidRuntime(5040):在com.android.internal.os.ZygoteInit $ MethodAndArgsCaller.run(ZygoteInit.java:868)
10-30 00:31:52.588:E / AndroidRuntime(5040):在com.android.internal.os.ZygoteInit.main(ZygoteInit.java:626)
10-30 00:31:52.588:E / AndroidRuntime(5040):在dalvik.system.NativeStart.main(本机方法)
 

下面是活动类:

 公共类NightCamActivity延伸活动{
    私人GLSurfaceView mGLView;
    CameraSurfaceView surface_view;

    @覆盖
    公共无效的onCreate(包savedInstanceState){
        super.onCreate(savedInstanceState);
        //创建一个GLSurfaceView实例,并设置
        //作为内容查看此活动
        Debug.out(欢迎);
        surface_view =新CameraSurfaceView(本);
        mGLView =新MySurfaceView(本);
        的setContentView(mGLView);
        addContentView(surface_view,新的LayoutParams(LayoutParams.FILL_PARENT,LayoutParams.FILL_PARENT));
    }

    @覆盖
    保护无效的onPause(){
        super.onPause();
        //下面的调用暂停渲染线程。
        //如果你的OpenGL应用程序会占用大量内存,
        //你应该考虑取消分配对象
        //这里消耗显著内存。
        mGLView.onPause();
    }

    @覆盖
    保护无效onResume(){
        super.onResume();
        //下面的调用将恢复暂停的渲染线程。
        //如果取消分配图形对象进行的onPause()
        //这是一个好地方,重新分配。
        mGLView.onResume();
    }
}
 

MySurfaceView类:

 类MySurfaceView扩展GLSurfaceView {

    公共MySurfaceView(NightCamActivity上下文){
        超(上下文);
        //创建一个OpenGL ES 2.0的范围内。
        Debug.out(Mysurfaceview欢迎);
        setEGLContextClientVersion(2);
        //设置渲染器上的GLSurfaceView画
        MyRenderer渲染器=新MyRenderer();
        renderer.takeContext(上下文);
        context.surface_view.renderer =渲染器;
        setRenderer(渲染);
    }
}
 

CameraSurfaceView类:

 公共类CameraSurfaceView扩展了SurfaceView实现SurfaceHolder.Callback,previewCallback {

    私人摄像头摄像头;
    Camera.Size use_size;
    MyRenderer渲染器;

    公共CameraSurfaceView(上下文的背景下){
        超(上下文);
        SurfaceHolder支架= getHolder();
        holder.addCallback(本);
        Debug.out(初始化CSV);
        相机= Camera.open();
    }

    公共无效surfaceCreated(SurfaceHolder持有者){
        Debug.out(SC);
        尝试 {
            camera.set previewDisplay(保持器);
        }赶上(IOException异常E){
            Debug.out(无法​​设置preVIEW显示摄像头。);
        }
        camera.set previewCallback(本);
    }

    公共无效surfaceDestroyed(SurfaceHolder持有者){
        //面,当我们返回将被销毁,所以停止了preVIEW。
        //因为CameraDevice对象不是共享资源,这是非常
        //重要的是要释放它当活动被暂停。
        尝试 {
            如果(相机!= NULL){
                camera.stop preVIEW();
                camera.release();
            }
        }赶上(例外五){
            Debug.out(摄像机发行失败。);
        }
    }

    公共无效surfaceChanged(SurfaceHolder持有人,INT格式,INT W,INT高){
        Camera.Parameters参数= camera.getParameters();
        名单< Camera.Size>支持previewSizes = parameters.getSupported previewSizes();
        Camera.Size最佳previewSize = getOptimal previewSize(支持previewSizes,W,H);
        如果(最佳previewSize!= NULL){
            parameters.set previewSize(最佳previewSize.width,最佳previewSize.height);
            camera.setParameters(参数);
            camera.start preVIEW();
        }
    }
    静态Camera.Size getOptimal previewSize(名单< Camera.Size>的大小,INT W,INT高){
        最终双ASPECT_TOLERANCE = 0.1;
        最终双MAX_DOWNSIZE = 1.5;

        双targetRatio =(双)W / H;
        如果(大小== NULL)返回NULL;

        Camera.Size optimalSize = NULL;
        双minDiff = Double.MAX_VALUE;

        INT targetHeight = H;

        //试着找一个大小匹配纵横比和尺寸
        对于(Camera.Size大小:大小){
            双率=(双)size.width / size.height;
            双裁员=(双)size.width / W;
            如果(缩小> MAX_DOWNSIZE){
                //如果preVIEW比我们的展示面大了很多忽略它
                //原因 - 在某些手机上没有足够的堆可用来显示较大的preVIEW尺寸
                继续;
            }
            如果(Math.abs(比 -  targetRatio)> ASPECT_TOLERANCE)继续;
            如果(Math.abs(size.height  -  targetHeight)< minDiff){
                optimalSize =大小;
                minDiff = Math.abs(size.height  -  targetHeight);
            }
        }

        //不能找到一个匹配的纵横比,忽略此要求
        //保持max_downsize要求
        如果(optimalSize == NULL){
            minDiff = Double.MAX_VALUE;
            对于(Camera.Size大小:大小){
                双裁员=(双)size.width / W;
                如果(缩小> MAX_DOWNSIZE){
                    继续;
                }
                如果(Math.abs(size.height  -  targetHeight)< minDiff){
                    optimalSize =大小;
                    minDiff = Math.abs(size.height  -  targetHeight);
                }
            }
        }
        //一切失败,只取最接近的匹配
        如果(optimalSize == NULL){
            minDiff = Double.MAX_VALUE;
            对于(Camera.Size大小:大小){
                如果(Math.abs(size.height  -  targetHeight)< minDiff){
                    optimalSize =大小;
                    minDiff = Math.abs(size.height  -  targetHeight);
                }
            }
        }

        返回optimalSize;
    }

    在previewFrame(byte []的数据,摄像机ARG1){公共无效
        Debug.out(preVIEW FRAME:);
        byte []的像素=新的字节[use_size.width * use_size.height * 3]。 ;
        德codeYUV420SP(像素,数据,use_size.width,use_size.height);
        renderer.bindCameraTexture(像素,use_size.width,use_size.height);
    }

    虚空德codeYUV420SP(byte []的RGB,byte []的yuv420sp,诠释的宽度,高度INT){

        最终诠释框架尺寸=宽*高;

        对于(INT J = 0,YP = 0; J<高度; J ++){
            INT UVP =框架尺寸+(J>→1)*宽度,U = 0,V = 0;
            的for(int i = 0; I<宽度;我++,YP ++){
                INT Y =(0xFF的及((int)的yuv420sp [YP])) -  16;
                如果(γ℃,){
                    Y = 0;
                }
                如果((ⅰ&安培; 1)== 0){
                    V =(0xFF的&放大器; yuv420sp [UVP ++]) -  128;
                    U =(0xFF的&放大器; yuv420sp [UVP ++]) -  128;
                }

                INT y1192 = 1192 * Y;
                INT R =(y1192 + 1634 * V);
                INT G =(y1192  -  833 * V  -  400 * U);
                INT B =(y1192 + 2066 * U);

                如果(为r 0){
                    使r = 0;
                }否则,如果(R> 262143){
                    R = 262143;
                }
                如果(克℃,){
                    克= 0;
                }否则,如果(G> 262143){
                    G = 262143;
                }
                如果(b将0){
                    B = 0;
                }否则,如果(B> 262143){
                    B = 262143;
                }
                RGB [YP * 3] =(字节)(b将&10 6);
                RGB [YP * 3 + 1] =(字节)(B个→2);
                RGB [YP * 3 + 2] =(字节)(B个大于10);
            }
        }
    }

}
 

最后MyRender类:

 公共类MyRenderer实现GLSurfaceView.Renderer {
    私人FloatBuffer顶点;
    私人FloatBuffer texcoords;
    私人诠释mProgram;
    私人诠释maPositionHandle;
    私人诠释gvTexCoordHandle;
    私人诠释gvSamplerHandle;
    私有静态上下文的背景下;
    INT [] camera_texture;
    公共无效onSurfaceCreated(GL10未使用的,EGLConfig配置){
        initShapes();
        GLES20.glClearColor(0.0,1.0F,0.2F,1.0F);
        Debug.out(你好初始化。);
        //着色器
        INT vertexShader = 0;
        INT fragmentShader = 0;
        尝试 {
            vertexShader = loadShader(GLES20.GL_VERTEX_SHADER,READFILE(vertex.vsh));
            fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER,READFILE(fragment.fsh));
        }赶上(IOException异常E){
            Debug.out(以下简称着色器也不会被发现。);
            e.printStackTrace();
        }
        mProgram = GLES20.glCreateProgram(); //创建空的OpenGL程序
        GLES20.glAttachShader(mProgram,vertexShader); //添加顶点着色器程序
        GLES20.glAttachShader(mProgram,fragmentShader); //添加片段着色器进行编程
        GLES20.glLinkProgram(mProgram); //创建OpenGL的可执行程序
        //获取手柄
        maPositionHandle = GLES20.glGetAttribLocation(mProgramvPosition);
        gvTexCoordHandle = GLES20.glGetAttribLocation(mPrograma_texCoord);
        gvSamplerHandle = GLES20.glGetAttribLocation(mPrograms_texture);
        GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT,1);
        camera_texture = NULL;
    }


    私人无效initShapes(){
        浮triangleCoords [] = {
            // X,Y,Z
            -1.0F,-1.0F,0.0,
             1.0F,-1.0F,0.0,
             -1.0F,1.0F,0.0,
             1.0F,1.0F,0.0,
        };
        浮texcoordf [] = {
            // X,Y,Z
            -1.0F,-1.0F,
            1.0F,-1.0F,
            -1.0F,1.0F,
            1.0F,1.0F,
        };

        //初始化顶点缓冲顶点
        ByteBuffer的VBB = ByteBuffer.allocateDirect(triangleCoords.length * 4);
        vbb.order(ByteOrder.nativeOrder()); //使用设备硬件的本机字节顺序
        顶点= vbb.asFloatBuffer(); //创建一个从ByteBuffer的浮点缓冲区
        vertices.put(triangleCoords); //添加坐标的FloatBuffer
        vertices.position(0); //设置缓冲区读取第一个坐标
        //初始化顶点缓冲的texcoords
        VBB = ByteBuffer.allocateDirect(texcoordf.length * 4);
        vbb.order(ByteOrder.nativeOrder()); //使用设备硬件的本机字节顺序
        texcoords = vbb.asFloatBuffer(); //创建一个从ByteBuffer的浮点缓冲区
        texcoords.put(texcoordf); //添加坐标的FloatBuffer
        texcoords.position(0); //设置缓冲区读取第一个坐标
    }

    私有静态字符串READFILE(字符串路径)抛出IOException异常{
        AssetManager assetManager = context.getAssets();
        InputStream的流= assetManager.open(路径);
        尝试 {
            返回新的扫描仪(流).useDelimiter(\\ A)下一个()。
        }
        最后 {
            stream.close();
        }
    }

    私人诠释loadShader(整型,字符串着色器code){
        //创建一个顶点着色器类型(GLES20.GL_VERTEX_SHADER)
        //或片段着色器类型(GLES20.GL_FRAGMENT_SHADER)
        INT着色器= GLES20.glCreateShader(类型);
        //添加源$ C ​​$ C到着色器和编译
        GLES20.glShaderSource(着色,着色器code);
        GLES20.glCompileShader(着色);
        返回着色器;
    }

    公共无效onDrawFrame(GL10未使用){
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        如果(camera_texture == NULL){
            返回;
        }
        //添加程序的OpenGL环境
        GLES20.glUseProgram(mProgram);
        // prepare三角形数据
        GLES20.glVertexAttribPointer(maPositionHandle,3,GLES20.GL_FLOAT,假,0,顶点);
        GLES20.glVertexAttribPointer(gvTexCoordHandle,2,GLES20.GL_FLOAT,假,0,texcoords);
        GLES20.glEnableVertexAttribArray(maPositionHandle);
        GLES20.glEnableVertexAttribArray(gvTexCoordHandle);
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,camera_texture [0]);
        GLES20.glUniform1i(gvSamplerHandle,0);
        //绘制三角形
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP,0,4);
        GLES20.glDisableVertexAttribArray(maPositionHandle);
        GLES20.glDisableVertexAttribArray(gvTexCoordHandle);
    }

    公共无效onSurfaceChanged(GL10未使用的,诠释的宽度,高度INT){
        GLES20.glViewport(0,0,宽度,高度);
    }

    公共无效takeContext(上下文ocontext){
        Debug.out(带语境);
        上下文= ocontext;
    }

    无效bindCameraTexture(byte []的数据,INT W,INT高){
        byte []的像素=新的字节[256 * 256 * 3]。
        为(中间体X = 0 X  - 其中; 256; X ++){
            对于(INT Y = 0,X< 256; X ++){
                像素[X * 256 + Y =数据[X * W + Y];
            }
        }
        如果(camera_texture == NULL){
            camera_texture =新INT [1];
        }其他{
            GLES20.glDeleteTextures(1,camera_texture,0);
        }
        GLES20.glGenTextures(1,camera_texture,0);
        INT特克斯= camera_texture [0];
        GLES20.glBindTexture(GL10.GL_TEXTURE_2D,TEX);
        GLES20.glTexImage2D(GL10.GL_TEXTURE_2D,0,GL10.GL_RGB,256,256,0,GL10.GL_RGB,GL10.GL_UNSIGNED_BYTE,ByteBuffer.wrap(像素));
        GLES20.glTexParameterf(GL10.GL_TEXTURE_2D,GL10.GL_TEXTURE_MIN_FILTER,GL10.GL_LINEAR);
    }
}
 

解决方案

我把你的code和得到了同样的错误,因为你。然而,在调试它似乎对我来说,preVIEW可能会失败,因为它似乎像的宽度和高度尺寸是南辕北辙,但交换他们周围,因为我认为它不只是一个个案的方向也扮演一个角色

不管怎样,我已经取代你CameraSurfaceView我自己的(见下文),我认为它现在的作品。有没有例外,但屏幕完全亮绿色(我想这可能是因为我没有vertex.vsh或fragment.vsh文件。

 包stackOverflow.test;

进口java.io.IOException异常;
进口的java.util.List;

进口android.content.Context;
进口android.hardware.Camera;
进口android.hardware.Camera.Size;
进口android.util.AttributeSet;
进口android.util.Log;
进口android.view.Display;
进口android.view.Surface;
进口android.view.SurfaceHolder;
进口android.view.SurfaceView;
进口android.view.View;
进口android.view.ViewGroup;
进口android.view.WindowManager;

公共类CameraSurfaceView扩展的ViewGroup实现SurfaceHolder.Callback
{

私人尺寸M previewSize;
私人列表<大小和GT; mSupported previewSizes;
私人语境mContext;
私人SurfaceView mSurfaceView;
私人SurfaceHolder mHolder;
私人最终字符串变量=CameraSurfaceView;
私人相机mCamera;
私人列表<字符串> mSupportedFlashModes;

公共CameraSurfaceView(上下文的背景下)
{
    超(上下文);
    mContext =背景;
    mCamera = Camera.open();
    setCamera(mCamera);

    mSurfaceView =新SurfaceView(上下文);
    addView(mSurfaceView,0);
    mHolder = mSurfaceView.getHolder();
    mHolder.addCallback(本);
    mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    mHolder.setKeepScreenOn(真正的);
}

公共CameraSurfaceView(上下文的背景下,ATTRS的AttributeSet)
{
    超(背景下,ATTRS);
    mContext =背景;
}

公共无效setSupported previewSizes(名单<大小>支持previewSizes)
{
    mSupported previewSizes =支持previewSizes;
}

公共大小的get previewSize()
{
    返回米previewSize;
}

公共无效setCamera(摄像头摄像头)
{
    mCamera =摄像头;
    如果(mCamera!= NULL)
    {
        mSupported previewSizes = mCamera.getParameters()getSupported previewSizes()。
        mSupportedFlashModes = mCamera.getParameters()getSupportedFlashModes()。
        //将相机设置为自动闪光模式。
        如果(mSupportedFlashModes.contains(Camera.Parameters.FLASH_MODE_AUTO))
        {
            Camera.Parameters参数= mCamera.getParameters();
            parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);
            mCamera.setParameters(参数);
        }
    }
    requestLayout();
}

@覆盖
公共无效surfaceDestroyed(SurfaceHolder持有人)
{
    //面,当我们返回将被销毁,所以停止了preVIEW。
    如果(mCamera!= NULL)
    {
        mCamera.stop preVIEW();
    }
}

@覆盖
公共无效surfaceChanged(SurfaceHolder持有人,INT格式,诠释的宽度,高度INT)
{
    //现在的尺寸是已知的,设置相机参数,并开始
    //在preVIEW。
    如果(mCamera!= NULL)
    {
        Camera.Parameters参数= mCamera.getParameters();
        尺寸previewSize = GET previewSize();
        parameters.set previewSize(previewSize.width,previewSize.height);

        mCamera.setParameters(参数);
        mCamera.start preVIEW();
    }

}

@覆盖
公共无效surfaceCreated(SurfaceHolder持有人)
{
    //表面经创建,获取摄像机,并告诉它在哪里
    //绘制。
    尝试
    {
        如果(mCamera!= NULL)
        {
            mCamera.set previewDisplay(保持器);
        }
    }
    赶上(IOException异常除外)
    {
        Log.e(TAG,IOException异常造成集previewDisplay(),除外);
    }
}

@覆盖
保护无效onMeasure(INT widthMeasureSpec,INT heightMeasureSpec)
{
    最终诠释宽度= resolveSize(getSuggestedMinimumWidth(),widthMeasureSpec);
    最终诠释身高= resolveSize(getSuggestedMinimumHeight(),heightMeasureSpec);
    setMeasuredDimension(宽度,高度);

    如果(mSupported previewSizes!= NULL)
    {
        米previewSize = getOptimal previewSize(mSupported previewSizes,宽,高);
    }
}

@覆盖
保护无效onLayout(布尔改变,诠释离开,诠释顶部,诠释权,诠释下)
{
    如果(改变)
    {
        最后查看cameraView = getChildAt(0);

        最终诠释宽度=右 - 左;
        最终诠释身高=底 - 顶;

        INT previewWidth =宽度;
        INT previewHeight =高度;
        如果(M previewSize!= NULL)
        {
            显示显示=((窗口管理器)mContext.getSystemService(Context.WINDOW_SERVICE))getDefaultDisplay()。

            开关(display.getRotation())
            {
                案例Surface.ROTATION_0:
                    previewWidth = M previewSize.height;
                    previewHeight = M previewSize.width;
                    mCamera.setDisplayOrientation(90);
                    打破;
                案例Surface.ROTATION_90:
                    previewWidth = M previewSize.width;
                    previewHeight = M previewSize.height;
                    打破;
                案例Surface.ROTATION_180:
                    previewWidth = M previewSize.height;
                    previewHeight = M previewSize.width;
                    打破;
                案例Surface.ROTATION_270:
                    previewWidth = M previewSize.width;
                    previewHeight = M previewSize.height;
                    mCamera.setDisplayOrientation(180);
                    打破;
            }
        }

        最终诠释scaledChildHeight = previewHeight *宽/ previewWidth;

        cameraView.layout(0,高度 -  scaledChildHeight,宽度,高度);

    }
}


私人大小getOptimal previewSize(名单<大小>的大小,诠释的宽度,高度INT)
{
    大小optimalSize = NULL;

    最终双ASPECT_TOLERANCE = 0.1;
    双targetRatio =(双)高度/宽度;

    //试图找到一个适合整个屏幕减去左侧的菜单中选择大小匹配。
    对于(尺寸大小:大小)
    {
        如果(size.height =宽度!)继续;
        双率=(双)size.width / size.height;
        如果(比< = targetRatio + ASPECT_TOLERANCE和放大器;&安培;比> = targetRatio  -  ASPECT_TOLERANCE)
        {
            optimalSize =大小;
        }
    }

    //如果我们不能找到一个高宽比相匹配,忽略此要求。
    如果(optimalSize == NULL)
    {
        // TODO:备份的情况下,我们没有得到一个尺寸。
    }

    返回optimalSize;
}

公共无效previewCamera()
{
    尝试
    {
        mCamera.set previewDisplay(mHolder);
        mCamera.start preVIEW();
    }
    赶上(例外五)
    {
        Log.d(TAG,无法启动preVIEW,E);
    }
}


/ *在previewFrame(byte []的数据,摄像机ARG1){公共无效
    Log.d(CameraSurfaceView,preVIEW FRAME:);
    byte []的像素=新的字节[use_size.width * use_size.height * 3]。 ;
    德codeYUV420SP(像素,数据,use_size.width,use_size.height);
    renderer.bindCameraTexture(像素,use_size.width,use_size.height);
} * /

虚空德codeYUV420SP(byte []的RGB,byte []的yuv420sp,诠释的宽度,高度INT){

    最终诠释框架尺寸=宽*高;

    对于(INT J = 0,YP = 0; J<高度; J ++){
        INT UVP =框架尺寸+(J>→1)*宽度,U = 0,V = 0;
        的for(int i = 0; I<宽度;我++,YP ++){
            INT Y =(0xFF的及((int)的yuv420sp [YP])) -  16;
            如果(γ℃,){
                Y = 0;
            }
            如果((ⅰ&安培; 1)== 0){
                V =(0xFF的&放大器; yuv420sp [UVP ++]) -  128;
                U =(0xFF的&放大器; yuv420sp [UVP ++]) -  128;
            }

            INT y1192 = 1192 * Y;
            INT R =(y1192 + 1634 * V);
            INT G =(y1192  -  833 * V  -  400 * U);
            INT B =(y1192 + 2066 * U);

            如果(为r 0){
                使r = 0;
            }否则,如果(R> 262143){
                R = 262143;
            }
            如果(克℃,){
                克= 0;
            }否则,如果(G> 262143){
                G = 262143;
            }
            如果(b将0){
                B = 0;
            }否则,如果(B> 262143){
                B = 262143;
            }
            RGB [YP * 3] =(字节)(b将&10 6);
            RGB [YP * 3 + 1] =(字节)(B个→2);
            RGB [YP * 3 + 2] =(字节)(B个大于10);
        }
    }
  }
}
 

您会发现我评论了你的previewFrame()方法得到它运行得还行context.surface_view.renderer =渲染器。

我不熟悉的OpenGL库,但也许这是足以让你去了。

I'm getting these errors from LogCat:

10-30 00:31:51.494: D/CameraHal(1205): CameraHal setOverlay/1/00000000/00000000
10-30 00:31:51.494: E/CameraHal(1205): Trying to set overlay, but overlay is null!, line:3472
10-30 00:31:51.494: W/CameraService(1205): Overlay create failed - retrying
...
10-30 00:31:52.526: E/CameraService(1205): Overlay Creation Failed!
...
10-30 00:31:52.588: E/AndroidRuntime(5040): FATAL EXCEPTION: main
10-30 00:31:52.588: E/AndroidRuntime(5040): java.lang.RuntimeException: startPreview failed
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.hardware.Camera.startPreview(Native Method)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.matthewmitchell.nightcam.CameraSurfaceView.surfaceCreated(CameraSurfaceView.java:47)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.SurfaceView.updateWindow(SurfaceView.java:544)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.SurfaceView.dispatchDraw(SurfaceView.java:341)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.View.draw(View.java:6743)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.drawChild(ViewGroup.java:1640)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.View.draw(View.java:6743)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.android.internal.policy.impl.PhoneWindow$DecorView.draw(PhoneWindow.java:1876)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewRoot.draw(ViewRoot.java:1407)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewRoot.performTraversals(ViewRoot.java:1163)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewRoot.handleMessage(ViewRoot.java:1727)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.os.Handler.dispatchMessage(Handler.java:99)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.os.Looper.loop(Looper.java:123)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.app.ActivityThread.main(ActivityThread.java:4627)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at java.lang.reflect.Method.invokeNative(Native Method)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at java.lang.reflect.Method.invoke(Method.java:521)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:868)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:626)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at dalvik.system.NativeStart.main(Native Method)

Here is the Activity class:

public class NightCamActivity extends Activity {
    private GLSurfaceView mGLView;
    CameraSurfaceView surface_view;

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        // Create a GLSurfaceView instance and set it
        // as the ContentView for this Activity
        Debug.out("Welcome");
        surface_view = new CameraSurfaceView(this);
        mGLView = new MySurfaceView(this);
        setContentView(mGLView);
        addContentView(surface_view, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
    }

    @Override
    protected void onPause() {
        super.onPause();
        // The following call pauses the rendering thread.
        // If your OpenGL application is memory intensive,
        // you should consider de-allocating objects that
        // consume significant memory here.
        mGLView.onPause();
    }

    @Override
    protected void onResume() {
        super.onResume();
        // The following call resumes a paused rendering thread.
        // If you de-allocated graphic objects for onPause()
        // this is a good place to re-allocate them.
        mGLView.onResume();
    }
}

MySurfaceView class:

class MySurfaceView extends GLSurfaceView{

    public MySurfaceView(NightCamActivity context){
        super(context);
        // Create an OpenGL ES 2.0 context.
        Debug.out("Mysurfaceview welcome");
        setEGLContextClientVersion(2);
        // Set the Renderer for drawing on the GLSurfaceView
        MyRenderer renderer = new MyRenderer();
        renderer.takeContext(context);
        context.surface_view.renderer = renderer;
        setRenderer(renderer);
    }
}

CameraSurfaceView class:

public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback  {

    private Camera camera;
    Camera.Size use_size;
    MyRenderer renderer;

    public CameraSurfaceView(Context context) {
        super(context);
        SurfaceHolder holder = getHolder();
        holder.addCallback(this);
        Debug.out("Init CSV");
        camera = Camera.open();
    }

    public void surfaceCreated(SurfaceHolder holder) {
        Debug.out("SC");
        try {
            camera.setPreviewDisplay(holder);
        } catch (IOException e) {
            Debug.out("Could not set preview display for camera.");
        }
        camera.setPreviewCallback(this);
    }

    public void surfaceDestroyed(SurfaceHolder holder) {
        // Surface will be destroyed when we return, so stop the preview.
        // Because the CameraDevice object is not a shared resource, it's very
        // important to release it when the activity is paused.
        try {
            if (camera != null) {
                camera.stopPreview();  
                camera.release();
            }
        } catch (Exception e) {
            Debug.out("Camera release failure.");
        }
    }

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
        Camera.Parameters parameters = camera.getParameters();
        List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
        Camera.Size optimalPreviewSize = getOptimalPreviewSize(supportedPreviewSizes, w, h);
        if (optimalPreviewSize != null) {
            parameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
            camera.setParameters(parameters);
            camera.startPreview();
        }
    }
    static Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
        final double ASPECT_TOLERANCE = 0.1;
        final double MAX_DOWNSIZE = 1.5;

        double targetRatio = (double) w / h;
        if (sizes == null) return null;

        Camera.Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;

        int targetHeight = h;

        // Try to find an size match aspect ratio and size
        for (Camera.Size size : sizes) {
            double ratio = (double) size.width / size.height;
            double downsize = (double) size.width / w;
            if (downsize > MAX_DOWNSIZE) {
                //if the preview is a lot larger than our display surface ignore it
                //reason - on some phones there is not enough heap available to show the larger preview sizes 
                continue;
            }
            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }

        // Cannot find the one match the aspect ratio, ignore the requirement
        //keep the max_downsize requirement
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Camera.Size size : sizes) {
                double downsize = (double) size.width / w;
                if (downsize > MAX_DOWNSIZE) {
                    continue;
                }
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }
        //everything else failed, just take the closest match
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Camera.Size size : sizes) {
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }

        return optimalSize;
    }

    public void onPreviewFrame(byte[] data, Camera arg1) {
        Debug.out("PREVIEW FRAME:");
        byte[] pixels = new byte[use_size.width * use_size.height * 3]; ;
        decodeYUV420SP(pixels, data, use_size.width,  use_size.height); 
        renderer.bindCameraTexture(pixels, use_size.width,  use_size.height);
    }

    void decodeYUV420SP(byte[] rgb, byte[] yuv420sp, int width, int height) {  

        final int frameSize = width * height;  

        for (int j = 0, yp = 0; j < height; j++) {       
            int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;  
            for (int i = 0; i < width; i++, yp++) {  
                int y = (0xff & ((int) yuv420sp[yp])) - 16;  
                if (y < 0){  
                    y = 0; 
                }
                if ((i & 1) == 0) {  
                    v = (0xff & yuv420sp[uvp++]) - 128;  
                    u = (0xff & yuv420sp[uvp++]) - 128;  
                }  

                int y1192 = 1192 * y;  
                int r = (y1192 + 1634 * v);  
                int g = (y1192 - 833 * v - 400 * u);  
                int b = (y1192 + 2066 * u);  

                if (r < 0){
                    r = 0;               
                }else if (r > 262143){  
                    r = 262143; 
                }
                if (g < 0){                  
                    g = 0;               
                }else if (g > 262143){
                    g = 262143; 
                }
                if (b < 0){                  
                    b = 0;               
                }else if (b > 262143){
                    b = 262143; 
                }
                rgb[yp*3] = (byte) (b << 6);
                rgb[yp*3 + 1] = (byte) (b >> 2);
                rgb[yp*3 + 2] = (byte) (b >> 10);
            }  
        }  
    }  

}

Finally the MyRender class:

public class MyRenderer implements GLSurfaceView.Renderer{
    private FloatBuffer vertices;
    private FloatBuffer texcoords;
    private int mProgram;
    private int maPositionHandle;
    private int gvTexCoordHandle;
    private int gvSamplerHandle;
    private static Context context;
    int[] camera_texture;
    public void onSurfaceCreated(GL10 unused, EGLConfig config) {
        initShapes();
        GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
        Debug.out("Hello init.");
        //Shaders
        int vertexShader = 0;
        int fragmentShader = 0;
        try {
            vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, readFile("vertex.vsh"));
            fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, readFile("fragment.fsh"));
        } catch (IOException e) {
            Debug.out("The shaders could not be found.");
            e.printStackTrace();
        }
        mProgram = GLES20.glCreateProgram();             // create empty OpenGL Program
        GLES20.glAttachShader(mProgram, vertexShader);   // add the vertex shader to program
        GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
        GLES20.glLinkProgram(mProgram);                  // creates OpenGL program executables
        // get handles
        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
        gvTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texCoord");
        gvSamplerHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
        GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
        camera_texture = null;
    }


    private void initShapes(){
        float triangleCoords[] = {
            // X, Y, Z
            -1.0f, -1.0f, 0.0f,
             1.0f, -1.0f, 0.0f,
             -1.0f, 1.0f, 0.0f,
             1.0f,  1.0f, 0.0f,
        }; 
        float texcoordf[] = {
            // X, Y, Z
            -1.0f,-1.0f,
            1.0f,-1.0f,
            -1.0f,1.0f,
            1.0f,1.0f,
        };

        // initialize vertex Buffer for vertices
        ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        vertices = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        vertices.put(triangleCoords);    // add the coordinates to the FloatBuffer
        vertices.position(0);            // set the buffer to read the first coordinate
        // initialize vertex Buffer for texcoords 
        vbb = ByteBuffer.allocateDirect(texcoordf.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        texcoords = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        texcoords.put(texcoordf);    // add the coordinates to the FloatBuffer
        texcoords.position(0);            // set the buffer to read the first coordinate
    }

    private static String readFile(String path) throws IOException {
        AssetManager assetManager = context.getAssets();
        InputStream stream = assetManager.open(path);
        try {
            return new Scanner(stream).useDelimiter("\\A").next();
        }
        finally {
            stream.close();
        }
    }

    private int loadShader(int type, String shaderCode){
        // create a vertex shader type (GLES20.GL_VERTEX_SHADER)
        // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
        int shader = GLES20.glCreateShader(type); 
        // add the source code to the shader and compile it
        GLES20.glShaderSource(shader, shaderCode);
        GLES20.glCompileShader(shader);
        return shader;
    }

    public void onDrawFrame(GL10 unused) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        if(camera_texture == null){
            return;
        }
        // Add program to OpenGL environment
        GLES20.glUseProgram(mProgram);
        // Prepare the triangle data
        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);
        GLES20.glVertexAttribPointer(gvTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texcoords);
        GLES20.glEnableVertexAttribArray(maPositionHandle);
        GLES20.glEnableVertexAttribArray(gvTexCoordHandle);
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, camera_texture[0]);
        GLES20.glUniform1i(gvSamplerHandle, 0);
        // Draw the triangle
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        GLES20.glDisableVertexAttribArray(maPositionHandle);
        GLES20.glDisableVertexAttribArray(gvTexCoordHandle);
    }

    public void onSurfaceChanged(GL10 unused, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    public void takeContext(Context ocontext) {
        Debug.out("Take context");
        context = ocontext;
    }

    void bindCameraTexture(byte[] data,int w,int h) {
        byte[] pixels = new byte[256*256*3];
        for(int x = 0;x < 256;x++){
            for(int y = 0;x < 256;x++){
                pixels[x*256+y] = data[x*w+y];
            }
        }
        if (camera_texture==null){
            camera_texture=new int[1];
        }else{
            GLES20.glDeleteTextures(1, camera_texture, 0);
        }   
        GLES20.glGenTextures(1, camera_texture, 0);
        int tex = camera_texture[0];
        GLES20.glBindTexture(GL10.GL_TEXTURE_2D, tex);
        GLES20.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGB, 256, 256, 0, GL10.GL_RGB, GL10.GL_UNSIGNED_BYTE, ByteBuffer.wrap(pixels));
        GLES20.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
    }
}

解决方案

I took your code and got the same error as you. However, on debugging it appears to me that the preview might be failing because it seemed like the width and height dimensions were round the wrong way but its not just a case of switching them around as I think the orientation also plays a part.

Anyway, I've substituted your CameraSurfaceView with my own (see below) and I think it works now. There is no exception but the screen is completely bright green (I think this might be because I don't have the vertex.vsh or the fragment.vsh files.

package stackOverflow.test;

import java.io.IOException;
import java.util.List;

import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;

public class CameraSurfaceView extends ViewGroup implements SurfaceHolder.Callback
{

private Size mPreviewSize;
private List<Size> mSupportedPreviewSizes;        
private Context mContext;
private SurfaceView mSurfaceView;
private SurfaceHolder mHolder;
private final String TAG = "CameraSurfaceView";
private Camera mCamera;
private List<String> mSupportedFlashModes;

public CameraSurfaceView(Context context)
{
    super(context);
    mContext = context;
    mCamera = Camera.open();        
    setCamera(mCamera);

    mSurfaceView = new SurfaceView(context);
    addView(mSurfaceView, 0);        
    mHolder = mSurfaceView.getHolder();
    mHolder.addCallback(this);
    mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    mHolder.setKeepScreenOn(true);
}

public CameraSurfaceView(Context context, AttributeSet attrs)
{
    super(context, attrs);
    mContext = context;            
}

public void setSupportedPreviewSizes(List<Size> supportedPreviewSizes)
{
    mSupportedPreviewSizes = supportedPreviewSizes;
}

public Size getPreviewSize()
{
    return mPreviewSize;
}

public void setCamera(Camera camera)
{
    mCamera = camera;
    if (mCamera != null)
    {
        mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();                
        mSupportedFlashModes = mCamera.getParameters().getSupportedFlashModes();
        // Set the camera to Auto Flash mode.
        if (mSupportedFlashModes.contains(Camera.Parameters.FLASH_MODE_AUTO))
        {
            Camera.Parameters parameters = mCamera.getParameters();
            parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);             
            mCamera.setParameters(parameters);
        }                   
    }
    requestLayout();
}

@Override
public void surfaceDestroyed(SurfaceHolder holder)
{
    // Surface will be destroyed when we return, so stop the preview.
    if (mCamera != null)
    {
        mCamera.stopPreview();
    }
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
    // Now that the size is known, set up the camera parameters and begin
    // the preview.
    if (mCamera != null)
    {
        Camera.Parameters parameters = mCamera.getParameters();        
        Size previewSize = getPreviewSize();
        parameters.setPreviewSize(previewSize.width, previewSize.height);                

        mCamera.setParameters(parameters);
        mCamera.startPreview();
    }

}

@Override
public void surfaceCreated(SurfaceHolder holder)
{
    // The Surface has been created, acquire the camera and tell it where
    // to draw.
    try
    {
        if (mCamera != null)
        {
            mCamera.setPreviewDisplay(holder);
        }
    }
    catch (IOException exception)
    {
        Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
    }
}

@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec)
{        
    final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
    final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
    setMeasuredDimension(width, height);

    if (mSupportedPreviewSizes != null)
    {
        mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
    }
}

@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom)
{
    if (changed)
    {                            
        final View cameraView = getChildAt(0);          

        final int width = right - left;
        final int height = bottom - top;

        int previewWidth = width;
        int previewHeight = height;
        if (mPreviewSize != null)
        {
            Display display = ((WindowManager)mContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();

            switch (display.getRotation())
            {
                case Surface.ROTATION_0:
                    previewWidth = mPreviewSize.height;
                    previewHeight = mPreviewSize.width;
                    mCamera.setDisplayOrientation(90);
                    break;
                case Surface.ROTATION_90:
                    previewWidth = mPreviewSize.width;
                    previewHeight = mPreviewSize.height;
                    break;
                case Surface.ROTATION_180:
                    previewWidth = mPreviewSize.height;
                    previewHeight = mPreviewSize.width;
                    break;
                case Surface.ROTATION_270:
                    previewWidth = mPreviewSize.width;
                    previewHeight = mPreviewSize.height;
                    mCamera.setDisplayOrientation(180);
                    break;
            }                                    
        }

        final int scaledChildHeight = previewHeight * width / previewWidth;

        cameraView.layout(0, height - scaledChildHeight, width, height);

    }
}


private Size getOptimalPreviewSize(List<Size> sizes, int width, int height)
{           
    Size optimalSize = null;                                

    final double ASPECT_TOLERANCE = 0.1;
    double targetRatio = (double) height / width;

    // Try to find a size match which suits the whole screen minus the menu on the left.
    for (Size size : sizes)
    {
        if (size.height != width) continue;
        double ratio = (double) size.width / size.height;
        if (ratio <= targetRatio + ASPECT_TOLERANCE && ratio >= targetRatio - ASPECT_TOLERANCE)
        {
            optimalSize = size;
        }               
    }

    // If we cannot find the one that matches the aspect ratio, ignore the requirement.
    if (optimalSize == null)
    {
        // TODO : Backup in case we don't get a size.
    }

    return optimalSize;
}

public void previewCamera()
{        
    try 
    {           
        mCamera.setPreviewDisplay(mHolder);         
        mCamera.startPreview();                 
    }
    catch(Exception e)
    {
        Log.d(TAG, "Cannot start preview.", e);    
    }
}


/*public void onPreviewFrame(byte[] data, Camera arg1) { 
    Log.d("CameraSurfaceView", "PREVIEW FRAME:"); 
    byte[] pixels = new byte[use_size.width * use_size.height * 3]; ; 
    decodeYUV420SP(pixels, data, use_size.width,  use_size.height);  
    renderer.bindCameraTexture(pixels, use_size.width,  use_size.height); 
}*/ 

void decodeYUV420SP(byte[] rgb, byte[] yuv420sp, int width, int height) {   

    final int frameSize = width * height;   

    for (int j = 0, yp = 0; j < height; j++) {        
        int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;   
        for (int i = 0; i < width; i++, yp++) {   
            int y = (0xff & ((int) yuv420sp[yp])) - 16;   
            if (y < 0){   
                y = 0;  
            } 
            if ((i & 1) == 0) {   
                v = (0xff & yuv420sp[uvp++]) - 128;   
                u = (0xff & yuv420sp[uvp++]) - 128;   
            }   

            int y1192 = 1192 * y;   
            int r = (y1192 + 1634 * v);   
            int g = (y1192 - 833 * v - 400 * u);   
            int b = (y1192 + 2066 * u);   

            if (r < 0){ 
                r = 0;                
            }else if (r > 262143){   
                r = 262143;  
            } 
            if (g < 0){                   
                g = 0;                
            }else if (g > 262143){ 
                g = 262143;  
            } 
            if (b < 0){                   
                b = 0;                
            }else if (b > 262143){ 
                b = 262143;  
            } 
            rgb[yp*3] = (byte) (b << 6); 
            rgb[yp*3 + 1] = (byte) (b >> 2); 
            rgb[yp*3 + 2] = (byte) (b >> 10); 
        }   
    }   
  }   
}

You'll notice I commented out your onPreviewFrame() method just to get it running and also the line context.surface_view.renderer = renderer.

I'm not familiar with the OpenGL library but perhaps this is enough to get you going again.

这篇关于Android摄像头将无法工作。启动preVIEW失败的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆