Android 相机将无法使用.开始预览失败 [英] Android Camera will not work. startPreview fails

查看:75
本文介绍了Android 相机将无法使用.开始预览失败的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我从 LogCat 收到这些错误:

10-30 00:31:51.494: D/CameraHal(1205): CameraHal setOverlay/1/00000000/0000000010-30 00:31:51.494: E/CameraHal(1205): 试图设置叠加层,但叠加层为空!,行:347210-30 00:31:51.494:W/CameraService(1205):叠加创建失败-重试...10-30 00:31:52.526:E/CameraService(1205):叠加创建失败!...10-30 00:31:52.588:E/AndroidRuntime(5040):致命异常:主要10-30 00:31:52.588: E/AndroidRuntime(5040): java.lang.RuntimeException: startPreview 失败10-30 00:31:52.588:E/AndroidRuntime(5040):在 android.hardware.Camera.startPreview(本地方法)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 com.matthewmitchell.nightcam.CameraSurfaceView.surfaceCreated(CameraSurfaceView.java:47)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.SurfaceView.updateWindow(SurfaceView.java:544)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.SurfaceView.dispatchDraw(SurfaceView.java:341)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.ViewGroup.drawChild(ViewGroup.java:1638)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.View.draw(View.java:6743)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.widget.FrameLayout.draw(FrameLayout.java:352)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.ViewGroup.drawChild(ViewGroup.java:1640)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.ViewGroup.drawChild(ViewGroup.java:1638)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.View.draw(View.java:6743)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.widget.FrameLayout.draw(FrameLayout.java:352)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 com.android.internal.policy.impl.PhoneWindow$DecorView.draw(PhoneWindow.java:1876)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.ViewRoot.draw(ViewRoot.java:1407)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.ViewRoot.performTraversals(ViewRoot.java:1163)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.view.ViewRoot.handleMessage(ViewRoot.java:1727)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.os.Handler.dispatchMessage(Handler.java:99)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.os.Looper.loop(Looper.java:123)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 android.app.ActivityThread.main(ActivityThread.java:4627)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 java.lang.reflect.Method.invokeNative(Native Method)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 java.lang.reflect.Method.invoke(Method.java:521)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:868)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 com.android.internal.os.ZygoteInit.main(ZygoteInit.java:626)10-30 00:31:52.588: E/AndroidRuntime(5040): 在 dalvik.system.NativeStart.main(Native Method)

这是活动类:

公共类 NightCamActivity 扩展 Activity {私人 GLSurfaceView mGLView;CameraSurfaceView 表面视图;@覆盖public void onCreate(Bundle savedInstanceState) {super.onCreate(savedInstanceState);//创建一个 GLSurfaceView 实例并设置它//作为此活动的 ContentViewDebug.out("欢迎");surface_view = new CameraSurfaceView(this);mGLView = new MySurfaceView(this);设置内容视图(mGLView);addContentView(surface_view, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));}@覆盖受保护的无效 onPause() {super.onPause();//以下调用暂停渲染线程.//如果您的 OpenGL 应用程序是内存密集型的,//你应该考虑取消分配那些//这里消耗大量内存.mGLView.onPause();}@覆盖受保护的无效 onResume() {super.onResume();//以下调用恢复暂停的渲染线程.//如果你为 onPause() 取消分配图形对象//这是重新分配它们的好地方.mGLView.onResume();}}

MySurfaceView 类:

class MySurfaceView 扩展了 GLSurfaceView{公共 MySurfaceView(NightCamActivity 上下文){超级(上下文);//创建一个 OpenGL ES 2.0 上下文.Debug.out("欢迎使用Mysurfaceview");setEGLContextClientVersion(2);//设置渲染器以在 GLSurfaceView 上绘图MyRenderer 渲染器 = new MyRenderer();renderer.takeContext(context);context.surface_view.renderer = 渲染器;设置渲染器(渲染器);}}

CameraSurfaceView 类:

public class CameraSurfaceView extends SurfaceView 实现 SurfaceHolder.Callback, PreviewCallback {私人相机;Camera.Size use_size;MyRenderer 渲染器;公共相机表面视图(上下文上下文){超级(上下文);SurfaceHolder 持有人 = getHolder();holder.addCallback(this);Debug.out("初始化 CSV");相机 = Camera.open();}公共无效表面创建(SurfaceHolder 持有人){Debug.out("SC");尝试 {camera.setPreviewDisplay(holder);} catch (IOException e) {Debug.out("无法为相机设置预览显示.");}camera.setPreviewCallback(this);}公共无效surfaceDestroyed(SurfaceHolder持有人){//返回时Surface会被销毁,所以停止预览.//因为CameraDevice对象不是共享资源,所以很//当活动暂停时释放它很重要.尝试 {如果(相机!= null){相机.停止预览();相机.释放();}} 捕获(异常 e){Debug.out("相机释放失败.");}}public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {Camera.Parameters 参数 = camera.getParameters();列表<Camera.Size>supportedPreviewSizes = parameters.getSupportedPreviewSizes();Camera.Size optimizationPreviewSize = getOptimalPreviewSize(supportedPreviewSizes, w, h);if (optimalPreviewSize != null) {parameters.setPreviewSize(optimalPreviewSize.width,optimalPreviewSize.height);相机.setParameters(参数);相机.开始预览();}}静态 Camera.Size getOptimalPreviewSize(Listsizes, int w, int h) {最终双 ASPECT_TOLERANCE = 0.1;最后双 MAX_DOWNSIZE = 1.5;double targetRatio = (double) w/h;if (sizes == null) 返回 null;Camera.Size optimizationSize = null;double minDiff = Double.MAX_VALUE;int targetHeight = h;//尝试找到一个尺寸匹配纵横比和尺寸for (Camera.Size size : 尺寸) {双倍比率 = (double) size.width/size.height;double downsize = (double) size.width/w;如果(缩小> MAX_DOWNSIZE){//如果预览比我们的显示面大很多,忽略它//原因 - 在某些手机上没有足够的可用堆来显示更大的预览尺寸继续;}如果 (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) 继续;if (Math.abs(size.height - targetHeight)  262143){g = 262143;}如果 (b <0){乙 = 0;}否则如果 (b > 262143){b = 262143;}rgb[yp*3] = (byte) (b << 6);rgb[yp*3 + 1] = (byte) (b > > 2);rgb[yp*3 + 2] = (byte) (b > > 10);}}}}

最后是 MyRender 类:

公共类 MyRenderer 实现 GLSurfaceView.Renderer{私有 FloatBuffer 顶点;私有 FloatBuffer texcoords;私人国际mProgram;私人 int maPositionHandle;私人 int gvTexCoordHandle;私人 int gvSamplerHandle;私有静态上下文上下文;int[] 相机纹理;公共无效 onSurfaceCreated(GL10 未使用,EGLConfig 配置){initShapes();GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);Debug.out("Hello init.");//着色器int vertexShader = 0;int fragmentShader = 0;尝试 {vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, readFile("vertex.vsh"));fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, readFile("fragment.fsh"));} catch (IOException e) {Debug.out("找不到着色器.");e.printStackTrace();}mProgram = GLES20.glCreateProgram();//创建空的 OpenGL 程序GLES20.glAttachShader(mProgram, vertexShader);//将顶点着色器添加到程序中GLES20.glAttachShader(mProgram, fragmentShader);//将片段着色器添加到程序中GLES20.glLinkProgram(mProgram);//创建 OpenGL 程序可执行文件//获取句柄maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");gvTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texCoord");gvSamplerHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);相机纹理 = 空;}私人无效initShapes(){浮动三角形坐标[] = {//X, Y, Z-1.0f, -1.0f, 0.0f,1.0f, -1.0f, 0.0f,-1.0f, 1.0f, 0.0f,1.0f, 1.0f, 0.0f,};浮动 texcoordf[] = {//X, Y, Z-1.0f,-1.0f,1.0f,-1.0f,-1.0f,1.0f,1.0f,1.0f,};//为顶点初始化顶点缓冲区ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4);vbb.order(ByteOrder.nativeOrder());//使用设备硬件的原生字节序顶点 = vbb.asFloatBuffer();//从 ByteBuffer 创建一个浮点缓冲区vertices.put(triangleCoords);//将坐标添加到 FloatBuffer顶点位置(0);//设置缓冲区读取第一个坐标//为 texcoords 初始化顶点缓冲区vbb = ByteBuffer.allocateDirect(texcoordf.length * 4);vbb.order(ByteOrder.nativeOrder());//使用设备硬件的原生字节序texcoords = vbb.asFloatBuffer();//从 ByteBuffer 创建一个浮点缓冲区texcoords.put(texcoordf);//将坐标添加到 FloatBuffertexcoords.position(0);//设置缓冲区读取第一个坐标}私有静态字符串 readFile(String path) 抛出 IOException {AssetManager assetManager = context.getAssets();InputStream 流 = assetManager.open(path);尝试 {return new Scanner(stream).useDelimiter("\A").next();}最后 {流.关闭();}}private int loadShader(int type, String shaderCode){//创建一个顶点着色器类型 (GLES20.GL_VERTEX_SHADER)//或片段着色器类型 (GLES20.GL_FRAGMENT_SHADER)int shader = GLES20.glCreateShader(类型);//将源代码添加到着色器并编译它GLES20.glShaderSource(shader, shaderCode);GLES20.glCompileShader(着色器);返回着色器;}公共无效 onDrawFrame(GL10 未使用){GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);if(camera_texture == null){返回;}//添加程序到OpenGL环境GLES20.glUseProgram(mProgram);//准备三角形数据GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);GLES20.glVertexAttribPointer(gvTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texcoords);GLES20.glEnableVertexAttribArray(maPositionHandle);GLES20.glEnableVertexAttribArray(gvTexCoordHandle);GLES20.glActiveTexture(GLES20.GL_TEXTURE0);GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,camera_texture[0]);GLES20.glUniform1i(gvSamplerHandle, 0);//绘制三角形GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);GLES20.glDisableVertexAttribArray(maPositionHandle);GLES20.glDisableVertexAttribArray(gvTexCoordHandle);}public void onSurfaceChanged(GL10 未使用,整数宽度,整数高度){GLES20.glViewport(0, 0, width, height);}公共无效takeContext(上下文ocontext){Debug.out("获取上下文");上下文 = ocontext;}void bindCameraTexture(byte[] data,int w,int h) {字节[]像素=新字节[256*256*3];for(int x = 0;x <256;x++){for(int y = 0;x <256;x++){像素[x*256+y] = 数据[x*w+y];}}如果(相机纹理==空){camera_texture=new int[1];}别的{GLES20.glDeleteTextures(1, camera_texture, 0);}GLES20.glGenTextures(1, camera_texture, 0);int tex = camera_texture[0];GLES20.glBindTexture(GL10.GL_TEXTURE_2D, tex);GLES20.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGB, 256, 256, 0, GL10.GL_RGB, GL10.GL_UNSIGNED_BYTE, ByteBuffer.wrap(pixels));GLES20.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);}}

解决方案

我拿了你的代码,遇到和你一样的错误.但是,在调试时,在我看来,预览可能会失败,因为宽度和高度尺寸似乎是错误的,但这不仅仅是切换它们的情况,因为我认为方向也起作用.

无论如何,我已经用我自己的(见下文)替换了你的 CameraSurfaceView,我认为它现在可以工作了.没有例外,但屏幕完全是亮绿色(我想这可能是因为我没有 vertex.vsh 或 fragment.vsh 文件.

package stackOverflow.test;导入 java.io.IOException;导入 java.util.List;导入 android.content.Context;导入 android.hardware.Camera;导入 android.hardware.Camera.Size;导入 android.util.AttributeSet;导入 android.util.Log;导入 android.view.Display;导入 android.view.Surface;导入 android.view.SurfaceHolder;导入 android.view.SurfaceView;导入 android.view.View;导入 android.view.ViewGroup;导入 android.view.WindowManager;公共类 CameraSurfaceView 扩展了 ViewGroup 实现 SurfaceHolder.Callback{私人大小 mPreviewSize;私人列表<尺寸>mSupportedPreviewSizes;私有上下文 mContext;私人 SurfaceView mSurfaceView;私人 SurfaceHolder mHolder;私人最终字符串标签 = "CameraSurfaceView";私人相机 mCamera;私人列表<字符串>mSupportedFlashModes;公共 CameraSurfaceView(上下文上下文){超级(上下文);mContext = 上下文;mCamera = Camera.open();设置相机(mCamera);mSurfaceView = new SurfaceView(context);添加视图(mSurfaceView,0);mHolder = mSurfaceView.getHolder();mHolder.addCallback(this);mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);mHolder.setKeepScreenOn(true);}public CameraSurfaceView(上下文上下文,AttributeSet attrs){超级(上下文,属性);mContext = 上下文;}public void setSupportedPreviewSizes(List supportedPreviewSizes){mSupportedPreviewSizes = supportedPreviewSizes;}公共尺寸 getPreviewSize(){返回 mPreviewSize;}public void setCamera(相机相机){mCamera = 相机;如果(mCamera != null){mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();mSupportedFlashModes = mCamera.getParameters().getSupportedFlashModes();//将相机设置为自动闪光模式.if (mSupportedFlashModes.contains(Camera.Parameters.FLASH_MODE_AUTO)){Camera.Parameters 参数 = mCamera.getParameters();parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);mCamera.setParameters(参数);}}请求布局();}@覆盖public void surfaceDestroyed(SurfaceHolder holder){//返回时Surface会被销毁,所以停止预览.如果(mCamera != null){mCamera.stopPreview();}}@覆盖public void SurfaceChanged(SurfaceHolder 持有人,int 格式,int 宽度,int 高度){//现在尺寸已知,设置相机参数并开始//预览.如果(mCamera != null){Camera.Parameters 参数 = mCamera.getParameters();尺寸预览尺寸 = getPreviewSize();parameters.setPreviewSize(previewSize.width, previewSize.height);mCamera.setParameters(参数);mCamera.startPreview();}}@覆盖public void surfaceCreated(SurfaceHolder holder){//Surface 已经创建,获取相机并告诉它在哪里//绘制.尝试{如果(mCamera != null){mCamera.setPreviewDisplay(holder);}}捕获(IOException 异常){Log.e(TAG, "由 setPreviewDisplay() 引起的 IOException", 异常);}}@覆盖protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec){final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);setMeasuredDimension(宽度,高度);如果(mSupportedPreviewSizes != null){mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);}}@覆盖protected void onLayout(boolean 改变,int left,int top,int right,int bottom){如果(改变){最终视图 cameraView = getChildAt(0);最终 int 宽度 = 右 - 左;最终 int 高度 = 底部 - 顶部;int previewWidth = 宽度;int previewHeight = 高度;如果(mPreviewSize != null){Display display = ((WindowManager)mContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();开关(display.getRotation()){案例 Surface.ROTATION_0:previewWidth = mPreviewSize.height;previewHeight = mPreviewSize.width;mCamera.setDisplayOrientation(90);休息;案例 Surface.ROTATION_90:previewWidth = mPreviewSize.width;previewHeight = mPreviewSize.height;休息;案例 Surface.ROTATION_180:previewWidth = mPreviewSize.height;previewHeight = mPreviewSize.width;休息;案例 Surface.ROTATION_270:previewWidth = mPreviewSize.width;previewHeight = mPreviewSize.height;mCamera.setDisplayOrientation(180);休息;}}final int scaledChildHeight = previewHeight * width/previewWidth;cameraView.layout(0, height - scaledChildHeight, width, height);}}私人尺寸 getOptimalPreviewSize(列表<尺寸>尺寸,整数宽度,整数高度){尺寸优化尺寸 = null;最终双 ASPECT_TOLERANCE = 0.1;双目标比率=(双)高度/宽度;//尝试找到适合整个屏幕减去左侧菜单的大小匹配.for (尺码 : 尺码){if (size.height != width) 继续;双倍比率 = (double) size.width/size.height;if (ratio <= targetRatio + ASPECT_TOLERANCE && ratio >= targetRatio - ASPECT_TOLERANCE){最佳尺寸 = 尺寸;}}//如果我们找不到与纵横比匹配的那个,则忽略该要求.if (optimalSize == null){//TODO : 备份以防我们没有得到大小.}返回最优大小;}public void previewCamera(){尝试{mCamera.setPreviewDisplay(mHolder);mCamera.startPreview();}捕获(例外 e){Log.d(TAG, "无法开始预览.", e);}}/*public void onPreviewFrame(byte[] data, Camera arg1) {Log.d("CameraSurfaceView", "预览画面:");字节[]像素=新字节[use_size.width * use_size.height * 3];;decodeYUV420SP(像素,数据,use_size.width,use_size.height);renderer.bindCameraTexture(像素,use_size.width,use_size.height);}*/void decodeYUV420SP(byte[] rgb, byte[] yuv420sp, int width, int height) {最终 int frameSize = 宽度 * 高度;for (int j = 0, yp = 0; j <高度; j++) {int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;for (int i = 0; i < width; i++, yp++) {int y = (0xff & ((int) yuv420sp[yp])) - 16;如果(y <0){y = 0;}如果 ((i & 1) == 0) {v = (0xff & yuv420sp[uvp++]) - 128;u = (0xff & yuv420sp[uvp++]) - 128;}int y1192 = 1192 * y;int r = (y1192 + 1634 * v);int g = (y1192 - 833 * v - 400 * u);int b = (y1192 + 2066 * u);如果 (r <0){r = 0;}否则如果(r> 262143){r = 262143;}如果 (g <0){克 = 0;}否则如果 (g > 262143){g = 262143;}如果 (b <0){乙 = 0;}否则如果 (b > 262143){b = 262143;}rgb[yp*3] = (byte) (b << 6);rgb[yp*3 + 1] = (byte) (b > > 2);rgb[yp*3 + 2] = (byte) (b > > 10);}}}}

你会注意到我注释掉了你的 onPreviewFrame() 方法只是为了让它运行,还有行 context.surface_view.renderer = renderer.

我不熟悉 OpenGL 库,但也许这足以让您重新开始.

I'm getting these errors from LogCat:

10-30 00:31:51.494: D/CameraHal(1205): CameraHal setOverlay/1/00000000/00000000
10-30 00:31:51.494: E/CameraHal(1205): Trying to set overlay, but overlay is null!, line:3472
10-30 00:31:51.494: W/CameraService(1205): Overlay create failed - retrying
...
10-30 00:31:52.526: E/CameraService(1205): Overlay Creation Failed!
...
10-30 00:31:52.588: E/AndroidRuntime(5040): FATAL EXCEPTION: main
10-30 00:31:52.588: E/AndroidRuntime(5040): java.lang.RuntimeException: startPreview failed
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.hardware.Camera.startPreview(Native Method)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.matthewmitchell.nightcam.CameraSurfaceView.surfaceCreated(CameraSurfaceView.java:47)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.SurfaceView.updateWindow(SurfaceView.java:544)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.SurfaceView.dispatchDraw(SurfaceView.java:341)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.View.draw(View.java:6743)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.drawChild(ViewGroup.java:1640)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.View.draw(View.java:6743)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.android.internal.policy.impl.PhoneWindow$DecorView.draw(PhoneWindow.java:1876)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewRoot.draw(ViewRoot.java:1407)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewRoot.performTraversals(ViewRoot.java:1163)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewRoot.handleMessage(ViewRoot.java:1727)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.os.Handler.dispatchMessage(Handler.java:99)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.os.Looper.loop(Looper.java:123)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.app.ActivityThread.main(ActivityThread.java:4627)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at java.lang.reflect.Method.invokeNative(Native Method)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at java.lang.reflect.Method.invoke(Method.java:521)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:868)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:626)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at dalvik.system.NativeStart.main(Native Method)

Here is the Activity class:

public class NightCamActivity extends Activity {
    private GLSurfaceView mGLView;
    CameraSurfaceView surface_view;

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        // Create a GLSurfaceView instance and set it
        // as the ContentView for this Activity
        Debug.out("Welcome");
        surface_view = new CameraSurfaceView(this);
        mGLView = new MySurfaceView(this);
        setContentView(mGLView);
        addContentView(surface_view, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
    }

    @Override
    protected void onPause() {
        super.onPause();
        // The following call pauses the rendering thread.
        // If your OpenGL application is memory intensive,
        // you should consider de-allocating objects that
        // consume significant memory here.
        mGLView.onPause();
    }

    @Override
    protected void onResume() {
        super.onResume();
        // The following call resumes a paused rendering thread.
        // If you de-allocated graphic objects for onPause()
        // this is a good place to re-allocate them.
        mGLView.onResume();
    }
}

MySurfaceView class:

class MySurfaceView extends GLSurfaceView{

    public MySurfaceView(NightCamActivity context){
        super(context);
        // Create an OpenGL ES 2.0 context.
        Debug.out("Mysurfaceview welcome");
        setEGLContextClientVersion(2);
        // Set the Renderer for drawing on the GLSurfaceView
        MyRenderer renderer = new MyRenderer();
        renderer.takeContext(context);
        context.surface_view.renderer = renderer;
        setRenderer(renderer);
    }
}

CameraSurfaceView class:

public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback  {

    private Camera camera;
    Camera.Size use_size;
    MyRenderer renderer;

    public CameraSurfaceView(Context context) {
        super(context);
        SurfaceHolder holder = getHolder();
        holder.addCallback(this);
        Debug.out("Init CSV");
        camera = Camera.open();
    }

    public void surfaceCreated(SurfaceHolder holder) {
        Debug.out("SC");
        try {
            camera.setPreviewDisplay(holder);
        } catch (IOException e) {
            Debug.out("Could not set preview display for camera.");
        }
        camera.setPreviewCallback(this);
    }

    public void surfaceDestroyed(SurfaceHolder holder) {
        // Surface will be destroyed when we return, so stop the preview.
        // Because the CameraDevice object is not a shared resource, it's very
        // important to release it when the activity is paused.
        try {
            if (camera != null) {
                camera.stopPreview();  
                camera.release();
            }
        } catch (Exception e) {
            Debug.out("Camera release failure.");
        }
    }

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
        Camera.Parameters parameters = camera.getParameters();
        List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
        Camera.Size optimalPreviewSize = getOptimalPreviewSize(supportedPreviewSizes, w, h);
        if (optimalPreviewSize != null) {
            parameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
            camera.setParameters(parameters);
            camera.startPreview();
        }
    }
    static Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
        final double ASPECT_TOLERANCE = 0.1;
        final double MAX_DOWNSIZE = 1.5;

        double targetRatio = (double) w / h;
        if (sizes == null) return null;

        Camera.Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;

        int targetHeight = h;

        // Try to find an size match aspect ratio and size
        for (Camera.Size size : sizes) {
            double ratio = (double) size.width / size.height;
            double downsize = (double) size.width / w;
            if (downsize > MAX_DOWNSIZE) {
                //if the preview is a lot larger than our display surface ignore it
                //reason - on some phones there is not enough heap available to show the larger preview sizes 
                continue;
            }
            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }

        // Cannot find the one match the aspect ratio, ignore the requirement
        //keep the max_downsize requirement
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Camera.Size size : sizes) {
                double downsize = (double) size.width / w;
                if (downsize > MAX_DOWNSIZE) {
                    continue;
                }
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }
        //everything else failed, just take the closest match
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Camera.Size size : sizes) {
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }

        return optimalSize;
    }

    public void onPreviewFrame(byte[] data, Camera arg1) {
        Debug.out("PREVIEW FRAME:");
        byte[] pixels = new byte[use_size.width * use_size.height * 3]; ;
        decodeYUV420SP(pixels, data, use_size.width,  use_size.height); 
        renderer.bindCameraTexture(pixels, use_size.width,  use_size.height);
    }

    void decodeYUV420SP(byte[] rgb, byte[] yuv420sp, int width, int height) {  

        final int frameSize = width * height;  

        for (int j = 0, yp = 0; j < height; j++) {       
            int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;  
            for (int i = 0; i < width; i++, yp++) {  
                int y = (0xff & ((int) yuv420sp[yp])) - 16;  
                if (y < 0){  
                    y = 0; 
                }
                if ((i & 1) == 0) {  
                    v = (0xff & yuv420sp[uvp++]) - 128;  
                    u = (0xff & yuv420sp[uvp++]) - 128;  
                }  

                int y1192 = 1192 * y;  
                int r = (y1192 + 1634 * v);  
                int g = (y1192 - 833 * v - 400 * u);  
                int b = (y1192 + 2066 * u);  

                if (r < 0){
                    r = 0;               
                }else if (r > 262143){  
                    r = 262143; 
                }
                if (g < 0){                  
                    g = 0;               
                }else if (g > 262143){
                    g = 262143; 
                }
                if (b < 0){                  
                    b = 0;               
                }else if (b > 262143){
                    b = 262143; 
                }
                rgb[yp*3] = (byte) (b << 6);
                rgb[yp*3 + 1] = (byte) (b >> 2);
                rgb[yp*3 + 2] = (byte) (b >> 10);
            }  
        }  
    }  

}

Finally the MyRender class:

public class MyRenderer implements GLSurfaceView.Renderer{
    private FloatBuffer vertices;
    private FloatBuffer texcoords;
    private int mProgram;
    private int maPositionHandle;
    private int gvTexCoordHandle;
    private int gvSamplerHandle;
    private static Context context;
    int[] camera_texture;
    public void onSurfaceCreated(GL10 unused, EGLConfig config) {
        initShapes();
        GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
        Debug.out("Hello init.");
        //Shaders
        int vertexShader = 0;
        int fragmentShader = 0;
        try {
            vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, readFile("vertex.vsh"));
            fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, readFile("fragment.fsh"));
        } catch (IOException e) {
            Debug.out("The shaders could not be found.");
            e.printStackTrace();
        }
        mProgram = GLES20.glCreateProgram();             // create empty OpenGL Program
        GLES20.glAttachShader(mProgram, vertexShader);   // add the vertex shader to program
        GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
        GLES20.glLinkProgram(mProgram);                  // creates OpenGL program executables
        // get handles
        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
        gvTexCoordHandle = GLES20.glGetAttribLocation(mProgram, "a_texCoord");
        gvSamplerHandle = GLES20.glGetAttribLocation(mProgram, "s_texture");
        GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
        camera_texture = null;
    }


    private void initShapes(){
        float triangleCoords[] = {
            // X, Y, Z
            -1.0f, -1.0f, 0.0f,
             1.0f, -1.0f, 0.0f,
             -1.0f, 1.0f, 0.0f,
             1.0f,  1.0f, 0.0f,
        }; 
        float texcoordf[] = {
            // X, Y, Z
            -1.0f,-1.0f,
            1.0f,-1.0f,
            -1.0f,1.0f,
            1.0f,1.0f,
        };

        // initialize vertex Buffer for vertices
        ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        vertices = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        vertices.put(triangleCoords);    // add the coordinates to the FloatBuffer
        vertices.position(0);            // set the buffer to read the first coordinate
        // initialize vertex Buffer for texcoords 
        vbb = ByteBuffer.allocateDirect(texcoordf.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware's native byte order
        texcoords = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        texcoords.put(texcoordf);    // add the coordinates to the FloatBuffer
        texcoords.position(0);            // set the buffer to read the first coordinate
    }

    private static String readFile(String path) throws IOException {
        AssetManager assetManager = context.getAssets();
        InputStream stream = assetManager.open(path);
        try {
            return new Scanner(stream).useDelimiter("\A").next();
        }
        finally {
            stream.close();
        }
    }

    private int loadShader(int type, String shaderCode){
        // create a vertex shader type (GLES20.GL_VERTEX_SHADER)
        // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
        int shader = GLES20.glCreateShader(type); 
        // add the source code to the shader and compile it
        GLES20.glShaderSource(shader, shaderCode);
        GLES20.glCompileShader(shader);
        return shader;
    }

    public void onDrawFrame(GL10 unused) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        if(camera_texture == null){
            return;
        }
        // Add program to OpenGL environment
        GLES20.glUseProgram(mProgram);
        // Prepare the triangle data
        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);
        GLES20.glVertexAttribPointer(gvTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texcoords);
        GLES20.glEnableVertexAttribArray(maPositionHandle);
        GLES20.glEnableVertexAttribArray(gvTexCoordHandle);
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, camera_texture[0]);
        GLES20.glUniform1i(gvSamplerHandle, 0);
        // Draw the triangle
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        GLES20.glDisableVertexAttribArray(maPositionHandle);
        GLES20.glDisableVertexAttribArray(gvTexCoordHandle);
    }

    public void onSurfaceChanged(GL10 unused, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    public void takeContext(Context ocontext) {
        Debug.out("Take context");
        context = ocontext;
    }

    void bindCameraTexture(byte[] data,int w,int h) {
        byte[] pixels = new byte[256*256*3];
        for(int x = 0;x < 256;x++){
            for(int y = 0;x < 256;x++){
                pixels[x*256+y] = data[x*w+y];
            }
        }
        if (camera_texture==null){
            camera_texture=new int[1];
        }else{
            GLES20.glDeleteTextures(1, camera_texture, 0);
        }   
        GLES20.glGenTextures(1, camera_texture, 0);
        int tex = camera_texture[0];
        GLES20.glBindTexture(GL10.GL_TEXTURE_2D, tex);
        GLES20.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGB, 256, 256, 0, GL10.GL_RGB, GL10.GL_UNSIGNED_BYTE, ByteBuffer.wrap(pixels));
        GLES20.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
    }
}

解决方案

I took your code and got the same error as you. However, on debugging it appears to me that the preview might be failing because it seemed like the width and height dimensions were round the wrong way but its not just a case of switching them around as I think the orientation also plays a part.

Anyway, I've substituted your CameraSurfaceView with my own (see below) and I think it works now. There is no exception but the screen is completely bright green (I think this might be because I don't have the vertex.vsh or the fragment.vsh files.

package stackOverflow.test;

import java.io.IOException;
import java.util.List;

import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;

public class CameraSurfaceView extends ViewGroup implements SurfaceHolder.Callback
{

private Size mPreviewSize;
private List<Size> mSupportedPreviewSizes;        
private Context mContext;
private SurfaceView mSurfaceView;
private SurfaceHolder mHolder;
private final String TAG = "CameraSurfaceView";
private Camera mCamera;
private List<String> mSupportedFlashModes;

public CameraSurfaceView(Context context)
{
    super(context);
    mContext = context;
    mCamera = Camera.open();        
    setCamera(mCamera);

    mSurfaceView = new SurfaceView(context);
    addView(mSurfaceView, 0);        
    mHolder = mSurfaceView.getHolder();
    mHolder.addCallback(this);
    mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    mHolder.setKeepScreenOn(true);
}

public CameraSurfaceView(Context context, AttributeSet attrs)
{
    super(context, attrs);
    mContext = context;            
}

public void setSupportedPreviewSizes(List<Size> supportedPreviewSizes)
{
    mSupportedPreviewSizes = supportedPreviewSizes;
}

public Size getPreviewSize()
{
    return mPreviewSize;
}

public void setCamera(Camera camera)
{
    mCamera = camera;
    if (mCamera != null)
    {
        mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();                
        mSupportedFlashModes = mCamera.getParameters().getSupportedFlashModes();
        // Set the camera to Auto Flash mode.
        if (mSupportedFlashModes.contains(Camera.Parameters.FLASH_MODE_AUTO))
        {
            Camera.Parameters parameters = mCamera.getParameters();
            parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);             
            mCamera.setParameters(parameters);
        }                   
    }
    requestLayout();
}

@Override
public void surfaceDestroyed(SurfaceHolder holder)
{
    // Surface will be destroyed when we return, so stop the preview.
    if (mCamera != null)
    {
        mCamera.stopPreview();
    }
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
    // Now that the size is known, set up the camera parameters and begin
    // the preview.
    if (mCamera != null)
    {
        Camera.Parameters parameters = mCamera.getParameters();        
        Size previewSize = getPreviewSize();
        parameters.setPreviewSize(previewSize.width, previewSize.height);                

        mCamera.setParameters(parameters);
        mCamera.startPreview();
    }

}

@Override
public void surfaceCreated(SurfaceHolder holder)
{
    // The Surface has been created, acquire the camera and tell it where
    // to draw.
    try
    {
        if (mCamera != null)
        {
            mCamera.setPreviewDisplay(holder);
        }
    }
    catch (IOException exception)
    {
        Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
    }
}

@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec)
{        
    final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
    final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
    setMeasuredDimension(width, height);

    if (mSupportedPreviewSizes != null)
    {
        mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
    }
}

@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom)
{
    if (changed)
    {                            
        final View cameraView = getChildAt(0);          

        final int width = right - left;
        final int height = bottom - top;

        int previewWidth = width;
        int previewHeight = height;
        if (mPreviewSize != null)
        {
            Display display = ((WindowManager)mContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();

            switch (display.getRotation())
            {
                case Surface.ROTATION_0:
                    previewWidth = mPreviewSize.height;
                    previewHeight = mPreviewSize.width;
                    mCamera.setDisplayOrientation(90);
                    break;
                case Surface.ROTATION_90:
                    previewWidth = mPreviewSize.width;
                    previewHeight = mPreviewSize.height;
                    break;
                case Surface.ROTATION_180:
                    previewWidth = mPreviewSize.height;
                    previewHeight = mPreviewSize.width;
                    break;
                case Surface.ROTATION_270:
                    previewWidth = mPreviewSize.width;
                    previewHeight = mPreviewSize.height;
                    mCamera.setDisplayOrientation(180);
                    break;
            }                                    
        }

        final int scaledChildHeight = previewHeight * width / previewWidth;

        cameraView.layout(0, height - scaledChildHeight, width, height);

    }
}


private Size getOptimalPreviewSize(List<Size> sizes, int width, int height)
{           
    Size optimalSize = null;                                

    final double ASPECT_TOLERANCE = 0.1;
    double targetRatio = (double) height / width;

    // Try to find a size match which suits the whole screen minus the menu on the left.
    for (Size size : sizes)
    {
        if (size.height != width) continue;
        double ratio = (double) size.width / size.height;
        if (ratio <= targetRatio + ASPECT_TOLERANCE && ratio >= targetRatio - ASPECT_TOLERANCE)
        {
            optimalSize = size;
        }               
    }

    // If we cannot find the one that matches the aspect ratio, ignore the requirement.
    if (optimalSize == null)
    {
        // TODO : Backup in case we don't get a size.
    }

    return optimalSize;
}

public void previewCamera()
{        
    try 
    {           
        mCamera.setPreviewDisplay(mHolder);         
        mCamera.startPreview();                 
    }
    catch(Exception e)
    {
        Log.d(TAG, "Cannot start preview.", e);    
    }
}


/*public void onPreviewFrame(byte[] data, Camera arg1) { 
    Log.d("CameraSurfaceView", "PREVIEW FRAME:"); 
    byte[] pixels = new byte[use_size.width * use_size.height * 3]; ; 
    decodeYUV420SP(pixels, data, use_size.width,  use_size.height);  
    renderer.bindCameraTexture(pixels, use_size.width,  use_size.height); 
}*/ 

void decodeYUV420SP(byte[] rgb, byte[] yuv420sp, int width, int height) {   

    final int frameSize = width * height;   

    for (int j = 0, yp = 0; j < height; j++) {        
        int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;   
        for (int i = 0; i < width; i++, yp++) {   
            int y = (0xff & ((int) yuv420sp[yp])) - 16;   
            if (y < 0){   
                y = 0;  
            } 
            if ((i & 1) == 0) {   
                v = (0xff & yuv420sp[uvp++]) - 128;   
                u = (0xff & yuv420sp[uvp++]) - 128;   
            }   

            int y1192 = 1192 * y;   
            int r = (y1192 + 1634 * v);   
            int g = (y1192 - 833 * v - 400 * u);   
            int b = (y1192 + 2066 * u);   

            if (r < 0){ 
                r = 0;                
            }else if (r > 262143){   
                r = 262143;  
            } 
            if (g < 0){                   
                g = 0;                
            }else if (g > 262143){ 
                g = 262143;  
            } 
            if (b < 0){                   
                b = 0;                
            }else if (b > 262143){ 
                b = 262143;  
            } 
            rgb[yp*3] = (byte) (b << 6); 
            rgb[yp*3 + 1] = (byte) (b >> 2); 
            rgb[yp*3 + 2] = (byte) (b >> 10); 
        }   
    }   
  }   
}

You'll notice I commented out your onPreviewFrame() method just to get it running and also the line context.surface_view.renderer = renderer.

I'm not familiar with the OpenGL library but perhaps this is enough to get you going again.

这篇关于Android 相机将无法使用.开始预览失败的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆