需要在ARCORE中播放视频 [英] Need to play Video in ARCORE

查看:21
本文介绍了需要在ARCORE中播放视频的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

正如我们在 ARCore 中所知,我们可以在单击水平平面表面的同时检测 3d 对象.而不是 3d 对象,我需要在用户单击平面表面时显示视频.外观和感觉应该与显示的 3d 对象相同.视频应以预览模式显示,而不是 3d 对象.

As we know in ARCore, we can detect 3d object while click on the horizontal plane surfaces. Instead of 3d object, I need to show Video when User is going to click the PLane Surfaces. The Look and feel should be same like 3d object is displaying. Instead of 3d object, the video should be displayed in preview mode.

在 ARcore 中,他们目前使用的是一种带有 Surfaceview 的相对布局.因此,为了显示视频,我使用 Surfaceview 并附加了媒体播放器.

In ARcore they are currently using one Relativelayout with Surfaceview. So for displaying Video, I am using the Surfaceview and attaching with mediaplayer.

public void onsurfacecreatedvideo(){
    mProgram = createProgram(mVertexShader, mFragmentShader);
    if (mProgram == 0) {
        return;
    }
    maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
    checkGlError("glGetAttribLocation aPosition");
    if (maPositionHandle == -1) {
        throw new RuntimeException("Could not get attrib location for aPosition");
    }
    maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
    checkGlError("glGetAttribLocation aTextureCoord");
    if (maTextureHandle == -1) {
        throw new RuntimeException("Could not get attrib location for aTextureCoord");
    }

    muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
    checkGlError("glGetUniformLocation uMVPMatrix");
    if (muMVPMatrixHandle == -1) {
        throw new RuntimeException("Could not get attrib location for uMVPMatrix");
    }

    muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
    checkGlError("glGetUniformLocation uSTMatrix");
    if (muSTMatrixHandle == -1) {
        throw new RuntimeException("Could not get attrib location for uSTMatrix");
    }


    int[] textures = new int[1];
    GLES20.glGenTextures(1, textures, 0);

    mTextureID = textures[0];
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
    checkGlError("glBindTexture mTextureID");

    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
            GLES20.GL_NEAREST);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
            GLES20.GL_LINEAR);

        /*
         * Create the SurfaceTexture that will feed this textureID,
         * and pass it to the MediaPlayer
         */
    mSurface = new SurfaceTexture(mTextureID);
    mSurface.setOnFrameAvailableListener(this);



    Surface surface = new Surface(mSurface);
    mMediaPlayer.setSurface(surface);
    mMediaPlayer.setScreenOnWhilePlaying(true);

    surface.release();

    mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
        @Override
        public void onPrepared(MediaPlayer mp) {
            Log.i(TAG,"ONPREPArED abhilash");
            setVideoSize();
            mp.start();
        }
    });
    try {
        mMediaPlayer.prepare();
    } catch (IOException t) {
        Log.e(TAG, "media player prepare failed");
    }

    synchronized(this) {
        updateSurface = false;
    }

    mMediaPlayer.start();

}

void ondrawvideo(){
    synchronized(this) {
        if (updateSurface) {
            mSurface.updateTexImage();
            mSurface.getTransformMatrix(mSTMatrix);
            updateSurface = false;
        }
    }

    /////////////
    GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
    GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);


    GLES20.glUseProgram(mProgram);
    checkGlError("glUseProgram");

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);



    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
    GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maPosition");
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    checkGlError("glEnableVertexAttribArray maPositionHandle");

    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
    GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maTextureHandle");
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    checkGlError("glEnableVertexAttribArray maTextureHandle");

    Matrix.setIdentityM(mMVPMatrix, 0);
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    checkGlError("glDrawArrays");
    GLES20.glFinish();

}


        // Visualize planes.
        mPlaneRenderer.drawPlanes(mSession.getAllPlanes(), frame.getPose(), projmtx);


        // Visualize anchors created by touch.
        float scaleFactor = 1.0f;
        for (PlaneAttachment planeAttachment : mTouches) {
            ondrawvideo();
            if (!planeAttachment.isTracking()) {
                continue;
            }


            // Get the current combined pose of an Anchor and Plane in world space. The Anchor
            // and Plane poses are updated during calls to session.update() as ARCore refines
            // its estimate of the world.
            planeAttachment.getPose().toMatrix(mAnchorMatrix, 0);

            // Update and draw the model and its shadow.
            mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor);
            mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor);
            mVirtualObject.draw(viewmtx, projmtx, lightIntensity);
            mVirtualObjectShadow.draw(viewmtx, projmtx, lightIntensity);
        }

    } catch (Throwable t) {
        // Avoid crashing the application due to unhandled exceptions.
        Log.e(TAG, "Exception on the OpenGL thread", t);
    }
}

目前我的输出是这样的.当我点击平面表面时,它的显示如下:

Currently my output is coming like this. When I am click on plane Surfaces, its showing like this:

在此处输入图片描述

如您所见,在图片下方,我需要像这样实现它.我只是标记了在这个特定的 bugdroid 图像中应该播放视频,视频不应超过全屏;它应该只像 bugdroid 图像大小一样显示:

As you can see, below Image, I need to achieve it like this. I just marked that in this particular bugdroid image the video should be played, the video should not be exceed full screen; it should only be shown just like the bugdroid image size:

在此处输入图片描述

推荐答案

我通过创建一个名为 MovieClipRenderer 的新类来实现这一点,该类以 HelloAR 示例中的 ObjectRenderer 类为模型.这将创建一个四边形几何体并在四边形中渲染来自媒体播放器的纹理.四边形固定在一个平面上,因此当用户环顾四周时它不会移动.

I did this by creating a new class called MovieClipRenderer - which modeled after the ObjectRenderer class in the HelloAR sample. This creates a quad geometry and renders the texture from the media player in the quad. The quad is anchored to a plane, so it does not move as the user looks around.

为了进行测试,我使用了以下来源的股票电影:https://www.videvo.net/video/chicken-on-green-screen/3435/并将其添加到 src/main/assets

To test with, I used a stock movie from: https://www.videvo.net/video/chicken-on-green-screen/3435/ and added it to src/main/assets

然后我将渲染器的成员变量添加到HelloArActivity

Then I added the member variable for the renderer to HelloArActivity

  private final MovieClipRenderer mMovieClipRenderer = new MovieClipRenderer();

onSurfaceCreated() 中,我和其他人一起初始化了渲染器

In onSurfaceCreated() I initialized the renderer with the others

 mMovieClipRenderer.createOnGlThread();

为了尝试一下,我通过将点击测试代码稍微更改为:在飞机上第一次点击创建电影锚点:

To try it out, I made the first tap on a plane create the movie anchor by changing hit test code slightly to be:

if (mMovieAnchor == null) {
    mMovieAnchor = hit.createAnchor();
} else {
    mAnchors.add(hit.createAnchor());
}

然后在 onDrawFrame() 的底部我检查了锚点并开始播放:

Then at the bottom of onDrawFrame() I checked for the anchor and started playing it:

    if (mMovieAnchor != null) {
        // Draw chickens!
        if (!mMovieClipRenderer.isStarted()) {
            mMovieClipRenderer.play("chicken.mp4", this);
        }
        mMovieAnchor.getPose().toMatrix(mAnchorMatrix,0);
        mMovieClipRenderer.update(mAnchorMatrix, 0.25f);
        mMovieClipRenderer.draw(mMovieAnchor.getPose(), viewmtx, projmtx);
    }

渲染类很长,但它是非常标准的 GLES 代码,用于创建 OES 纹理和初始化视频播放器、创建四边形的顶点并加载绘制 OES 纹理的片段着色器.

The rendering class is pretty long, but is pretty standard GLES code of creating the OES texture and initializing the video player, creating the vertices of a quad and loading a fragment shader that draws an OES texture.

/**
 * Renders a movie clip with a green screen aware shader.
 * <p>
 * Copyright 2018 Google LLC
 * <p>
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * <p>
 * http://www.apache.org/licenses/LICENSE-2.0
 * <p>
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
public class MovieClipRenderer implements 
     SurfaceTexture.OnFrameAvailableListener {
  private static final String TAG = MovieClipRenderer.class.getSimpleName();

  // Quad geometry
  private static final int COORDS_PER_VERTEX = 3;
  private static final int TEXCOORDS_PER_VERTEX = 2;
  private static final int FLOAT_SIZE = 4;
  private static final float[] QUAD_COORDS = new float[]{
          -1.0f, -1.0f, 0.0f,
          -1.0f, +1.0f, 0.0f,
          +1.0f, -1.0f, 0.0f,
          +1.0f, +1.0f, 0.0f,
  };

  private static final float[] QUAD_TEXCOORDS = new float[]{
          0.0f, 1.0f,
          0.0f, 0.0f,
          1.0f, 1.0f,
          1.0f, 0.0f,
  };

  // Shader for a flat quad.
  private static final String VERTEX_SHADER =
      "uniform mat4 u_ModelViewProjection;\n\n" +
      "attribute vec4 a_Position;\n" +
      "attribute vec2 a_TexCoord;\n\n" +
      "varying vec2 v_TexCoord;\n\n" +
      "void main() {\n" +
      "   gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);\n" +
     "   v_TexCoord = a_TexCoord;\n" +
     "}";

  // The fragment shader samples the video texture, blending to
  //  transparent for the green screen
  //  color.  The color was determined by sampling a screenshot
  //  of the video in an image editor.
  private static final String FRAGMENT_SHADER =
      "#extension GL_OES_EGL_image_external : require\n" +
      "\n" +
      "precision mediump float;\n" +
      "varying vec2 v_TexCoord;\n" +
      "uniform samplerExternalOES sTexture;\n" +
      "\n" +
      "void main() {\n" +
      "    //TODO make this a uniform variable - " +
      " but this is the color of the background. 17ad2b\n" +
      "  vec3 keying_color = vec3(23.0f/255.0f, 173.0f/255.0f, 43.0f/255.0f);\n" +
      "  float thresh = 0.4f; // 0 - 1.732\n" +
      "  float slope = 0.2;\n" +
      "  vec3 input_color = texture2D(sTexture, v_TexCoord).rgb;\n" +
      "  float d = abs(length(abs(keying_color.rgb - input_color.rgb)));\n" +
      "  float edge0 = thresh * (1.0f - slope);\n" +
      "  float alpha = smoothstep(edge0,thresh,d);\n" +
      "  gl_FragColor = vec4(input_color, alpha);\n" +
      "}";

  // Geometry data in GLES friendly data structure.
  private FloatBuffer mQuadVertices;
  private FloatBuffer mQuadTexCoord;

  // Shader program id and parameters.
  private int mQuadProgram;
  private int mQuadPositionParam;
  private int mQuadTexCoordParam;
  private int mModelViewProjectionUniform;
  private int mTextureId = -1;

  // Matrix for the location and perspective of the quad.
  private float[] mModelMatrix = new float[16];

  // Media player,  texture and other bookkeeping.
  private MediaPlayer player;
  private SurfaceTexture videoTexture;
  private boolean frameAvailable = false;
  private boolean started = false;
  private boolean done;
  private boolean prepared;
  private static Handler handler;


  // Lock used for waiting if the player was not yet created.
  private final Object lock = new Object();

  /**
   * Update the model matrix based on the location and scale to draw the quad.
   */
  public void update(float[] modelMatrix, float scaleFactor) {
    float[] scaleMatrix = new float[16];
    Matrix.setIdentityM(scaleMatrix, 0);
    scaleMatrix[0] = scaleFactor;
    scaleMatrix[5] = scaleFactor;
    scaleMatrix[10] = scaleFactor;
    Matrix.multiplyMM(mModelMatrix, 0, modelMatrix, 0, scaleMatrix, 0);
  }

  /**
   * Initialize the GLES objects.  
   * This is called from the GL render thread to make sure
   * it has access to the EGLContext.
   */
  public void createOnGlThread() {

    // 1 texture to hold the video frame.
    int textures[] = new int[1];
    GLES20.glGenTextures(1, textures, 0);
    mTextureId = textures[0];
    int mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
    GLES20.glBindTexture(mTextureTarget, mTextureId);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_S,
       GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_T,
       GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MIN_FILTER,
       GLES20.GL_NEAREST);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MAG_FILTER,
       GLES20.GL_NEAREST);

    videoTexture = new SurfaceTexture(mTextureId);
    videoTexture.setOnFrameAvailableListener(this);

    // Make a quad to hold the movie
    ByteBuffer bbVertices = ByteBuffer.allocateDirect(
         QUAD_COORDS.length * FLOAT_SIZE);
    bbVertices.order(ByteOrder.nativeOrder());
    mQuadVertices = bbVertices.asFloatBuffer();
    mQuadVertices.put(QUAD_COORDS);
    mQuadVertices.position(0);

    int numVertices = 4;
    ByteBuffer bbTexCoords = ByteBuffer.allocateDirect(
            numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
    bbTexCoords.order(ByteOrder.nativeOrder());
    mQuadTexCoord = bbTexCoords.asFloatBuffer();
    mQuadTexCoord.put(QUAD_TEXCOORDS);
    mQuadTexCoord.position(0);

    int vertexShader = loadGLShader(TAG, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
    int fragmentShader = loadGLShader(TAG,
         GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);

    mQuadProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(mQuadProgram, vertexShader);
    GLES20.glAttachShader(mQuadProgram, fragmentShader);
    GLES20.glLinkProgram(mQuadProgram);
    GLES20.glUseProgram(mQuadProgram);

    ShaderUtil.checkGLError(TAG, "Program creation");

    mQuadPositionParam = GLES20.glGetAttribLocation(mQuadProgram, "a_Position");
    mQuadTexCoordParam = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord");
    mModelViewProjectionUniform = GLES20.glGetUniformLocation(
            mQuadProgram, "u_ModelViewProjection");

    ShaderUtil.checkGLError(TAG, "Program parameters");

    Matrix.setIdentityM(mModelMatrix, 0);

    initializeMediaPlayer();
  }

  public void draw(Pose pose, float[] cameraView, float[] cameraPerspective) {
    if (done || !prepared) {
      return;
    }
    synchronized (this) {
      if (frameAvailable) {
        videoTexture.updateTexImage();
        frameAvailable = false;
      }
    }

    float[] modelMatrix = new float[16];
    pose.toMatrix(modelMatrix, 0);

    float[] modelView = new float[16];
    float[] modelViewProjection = new float[16];
    Matrix.multiplyMM(modelView, 0, cameraView, 0, mModelMatrix, 0);
    Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, modelView, 0);

    ShaderUtil.checkGLError(TAG, "Before draw");

    GLES20.glEnable(GL10.GL_BLEND);
    GLES20.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
    GLES20.glUseProgram(mQuadProgram);

    // Set the vertex positions.
    GLES20.glVertexAttribPointer(
            mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
            false, 0, mQuadVertices);
    // Set the texture coordinates.
    GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false, 0, mQuadTexCoord);

    // Enable vertex arrays
    GLES20.glEnableVertexAttribArray(mQuadPositionParam);
    GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);
    GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false,
                              modelViewProjection, 0);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    // Disable vertex arrays
    GLES20.glDisableVertexAttribArray(mQuadPositionParam);
    GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);

    ShaderUtil.checkGLError(TAG, "Draw");
  }

  private void initializeMediaPlayer() {
    if (handler == null)
      handler = new Handler(Looper.getMainLooper());

    handler.post(new Runnable() {
      @Override
      public void run() {
        synchronized (lock) {
          player = new MediaPlayer();
          lock.notify();
        }
      }
    });
  }

  @Override
  public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    synchronized (this) {
      frameAvailable = true;
    }
  }

  public boolean play(final String filename, Context context)
                     throws FileNotFoundException {
    // Wait for the player to be created.
    if (player == null) {
      synchronized (lock) {
        while (player == null) {
          try {
            lock.wait();
          } catch (InterruptedException e) {
            return false;
          }
        }
      }
    }

    player.reset();
    done = false;

    player.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
      @Override
      public void onPrepared(MediaPlayer mp) {
        prepared = true;
        mp.start();
      }
    });
    player.setOnErrorListener(new MediaPlayer.OnErrorListener() {
      @Override
      public boolean onError(MediaPlayer mp, int what, int extra) {
        done = true;
        Log.e("VideoPlayer",
            String.format("Error occured: %d, %d\n", what, extra));
        return false;
      }
    });

    player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
      @Override
      public void onCompletion(MediaPlayer mp) {
        done = true;
      }
    });

    player.setOnInfoListener(new MediaPlayer.OnInfoListener() {
      @Override
      public boolean onInfo(MediaPlayer mediaPlayer, int i, int i1) {
        return false;
      }
    });

    try {
      AssetManager assets = context.getAssets();
      AssetFileDescriptor descriptor = assets.openFd(filename);
      player.setDataSource(descriptor.getFileDescriptor(),
                           descriptor.getStartOffset(),
                           descriptor.getLength());
      player.setSurface(new Surface(videoTexture));
      player.prepareAsync();
      synchronized (this) {
        started = true;
      }
    } catch (IOException e) {
      Log.e(TAG, "Exception preparing movie", e);
      return false;
    }

    return true;
  }

  public synchronized boolean isStarted() {
    return started;
  }

  static int loadGLShader(String tag, int type, String code) {
    int shader = GLES20.glCreateShader(type);
    GLES20.glShaderSource(shader, code);
    GLES20.glCompileShader(shader);

    // Get the compilation status.
    final int[] compileStatus = new int[1];
    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

    // If the compilation failed, delete the shader.
    if (compileStatus[0] == 0) {
      Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
      GLES20.glDeleteShader(shader);
      shader = 0;
    }

    if (shader == 0) {
      throw new RuntimeException("Error creating shader.");
    }

    return shader;
  }
}

这篇关于需要在ARCORE中播放视频的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆