FFmpegFrameRecorder videoBroadcasting音频将超过3G网络的视频帧的速度更快 [英] FFmpegFrameRecorder videoBroadcasting audio comes faster than video frame in 3G Network

查看:679
本文介绍了FFmpegFrameRecorder videoBroadcasting音频将超过3G网络的视频帧的速度更快的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我用 FFmpegFrameRecorder 视频broadcast.Problem是使用以下code,但不能产生完整的视频音频有比自带视频frame.I'm快在音频视频时间戳的问题。

的Java code:

 进口静态com.google code.javacv.cpp.opencv_core.IPL_DEPTH_8U;进口java.io.IOException异常;
进口java.nio.ShortBuffer中;进口android.app.Activity;
进口android.content.Context;
进口android.content.pm.ActivityInfo;
进口android.graphics.Bitmap;
进口android.graphics.Canvas;
进口android.hardware.Camera;
进口android.hardware.Camera previewCallback。
进口android.media.AudioFormat;
进口android.media.AudioRecord;
进口android.media.MediaRecorder;
进口android.os.Bundle;
进口android.os.PowerManager;
进口android.util.Log;
进口android.view.KeyEvent;
进口android.view.SurfaceHolder;
进口android.view.SurfaceView;
进口android.view.View;
进口android.view.View.OnClickListener;
进口android.view.ViewGroup.LayoutParams;
进口android.widget.Button;
进口android.widget.LinearLayout;进口com.google code.javacv.FFmpegFrameRecorder;
进口com.google code.javacv.cpp.opencv_core.IplImage;公共类MainActivity扩展活动实现OnClickListener {    私人最终静态字符串LOG_TAG =MainActivity;    私人PowerManager.WakeLock mWakeLock;    私人字符串ffmpeg_link =;    私人挥发性FFmpegFrameRecorder记录;
    布尔记录= FALSE;
    长STARTTIME = 0;    私人INT sampleAudioRateInHz = 16000;
    私人INT imageWidth = 320;
    私人INT imageHeight = 240;
    私人INT帧率= 24;    私人螺纹audioThread;
    挥发性布尔runAudioThread = TRUE;
    私人AudioRecord audioRecord;
    私人AudioRecordRunnable audioRecordRunnable;    私人CameraView cameraView;
    私人的IplImage yuvIplimage = NULL;    私人按钮recordButton;
    私人的LinearLayout mainLayout;    @覆盖
    公共无效的onCreate(捆绑savedInstanceState){
        super.onCreate(savedInstanceState);        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
        的setContentView(R.layout.activity_main);        InitLayout在();
        initRecorder();
    }    @覆盖
    保护无效onResume(){
        super.onResume();        如果(mWakeLock == NULL){
            电源管理PM =(电源管理)getSystemService(Context.POWER_SERVICE);
            mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
                    LOG_TAG);
            mWakeLock.acquire();
        }
    }    @覆盖
    保护无效的onPause(){
        super.onPause();        如果(mWakeLock!= NULL){
            mWakeLock.release();
            mWakeLock = NULL;
        }
    }    @覆盖
    保护无效的onDestroy(){
        super.onDestroy();        记录= FALSE;
    }    私人无效InitLayout在(){        mainLayout =(的LinearLayout)this.findViewById(R.id.record_layout);        recordButton =(按钮)findViewById(R.id.recorder_control);
        recordButton.setText(开始);
        recordButton.setOnClickListener(本);        cameraView =新CameraView(本);        LinearLayout.LayoutParams layoutParam =新LinearLayout.LayoutParams(
                LayoutParams.MATCH_PARENT,LayoutParams.MATCH_PARENT);
        mainLayout.addView(cameraView,layoutParam);
        Log.v(LOG_TAG,补充说:cameraView到mainLayout);
    }    私人无效initRecorder(){
        Log.w(LOG_TAGinitRecorder);        如果(yuvIplimage == NULL){
            //重建后的帧大小的表面变化的方法设置
            yuvIplimage = IplImage.create(imageWidth,imageHeight,
                    IPL_DEPTH_8U,2);
            // yuvIplimage = IplImage.create(imageWidth,imageHeight,
            // IPL_DEPTH_32S,2);            Log.v(LOG_TAGIplImage.create);
        }        记录=新FFmpegFrameRecorder(ffmpeg_link,imageWidth,
                imageHeight,1);
        Log.v(LOG_TAGFFmpegFrameRecorder:+ ffmpeg_link +imageWidth:
                + imageWidth +imageHeight+ imageHeight);        recorder.setFormat(FLV);
        Log.v(LOG_TAGrecorder.setFormat(\\FLV \\));        recorder.setSampleRate(sampleAudioRateInHz);
        Log.v(LOG_TAGrecorder.setSampleRate(sampleAudioRateInHz));        在表面改变方法//重新设置,以及
        recorder.setFrameRate(帧率);
        Log.v(LOG_TAG,recorder.setFrameRate(帧率));        //创建录音线
        audioRecordRunnable =新AudioRecordRunnable();
        audioThread =新主题(audioRecordRunnable);
    }    //开始捕获
    公共无效的startRecording(){
        尝试{
            recorder.start();
            STARTTIME = System.currentTimeMillis的();
            记录= TRUE;
            audioThread.start();
        }赶上(FFmpegFrameRecorder.Exception E){
            e.printStackTrace();
        }
    }    公共无效STO precording(){
        //这应该停止运行声音线程
        runAudioThread = FALSE;        如果(录像机= NULL&放大器;!&安培;录音){
            记录= FALSE;
            Log.v(LOG_TAG,
                    完成录音,呼吁停止并释放记录);
            尝试{
                recorder.stop();
                recorder.release();
            }赶上(FFmpegFrameRecorder.Exception E){
                e.printStackTrace();
            }
            记录= NULL;
        }
    }    @覆盖
    公共布尔的onkeydown(INT键code,KeyEvent的事件){
        //退出时后退按钮被按下
        如果(键code == KeyEvent.KEY code_BACK){
            如果(录音){
                STO precording();
            }
            完();
            返回true;
        }
        返回super.onKeyDown(键code,事件);
    }    @覆盖
    公共无效的onClick(视图v){
        如果(!录音){
            的StartRecording();
            Log.w(LOG_TAG,启动按钮按下);
            recordButton.setText(停止);
        }其他{
            STO precording();
            Log.w(LOG_TAG,停止按钮按下);
            recordButton.setText(开始);
        }
    }    // ---------------------------------------------
    //音频线,获取和连接codeS音频数据
    // ---------------------------------------------
    类AudioRecordRunnable实现Runnable {        @覆盖
        公共无效的run(){
            //设置线程优先级
            android.os.Process
                    .setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);            //音频
            INT缓冲区大小;
            短[] audioData;
            INT bufferReadResult;            BUFFERSIZE = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
                    AudioFormat.CHANNEL_CONFIGURATION_MONO,
                    AudioFormat.ENCODING_PCM_16BIT);
            audioRecord =新AudioRecord(MediaRecorder.AudioSource.MIC,
                    sampleAudioRateInHz,
                    AudioFormat.CHANNEL_CONFIGURATION_MONO,
                    AudioFormat.ENCODING_PCM_16BIT,缓冲区大小);            audioData =新的短[缓冲区大小]            Log.d(LOG_TAGaudioRecord.startRecording());
            audioRecord.startRecording();            //音频捕获/编码循环
            而(runAudioThread){
                //从audioRecord阅读
                bufferReadResult = audioRecord.read(audioData,0,
                        audioData.length);
                如果(bufferReadResult大于0){
                    // Log.v(LOG_TAGaudioRecord bufferReadResult:+
                    // bufferReadResult);                    //改变这个变量可能没有,尽管它被拾起
                    //是易变
                    如果(录音){
                        尝试{
                            //写入FFmpegFrameRecorder
                            recorder.record(ShortBuffer.wrap(audioData,0,
                                    bufferReadResult));
                        }赶上(FFmpegFrameRecorder.Exception E){
                            Log.v(LOG_TAG,e.getMessage());
                            e.printStackTrace();
                        }
                    }
                }
            }
            Log.v(LOG_TAGAudioThread成品);            / *捕获/编码完成后,发行记录* /
            如果(audioRecord!= NULL){
                audioRecord.stop();
                audioRecord.release();
                audioRecord = NULL;
                Log.v(LOG_TAGaudioRecord发行);
            }
        }
    }    类CameraView扩展了SurfaceView实现SurfaceHolder.Callback,
            $ P $ {pviewCallback        私人布尔previewRunning = FALSE;        私人SurfaceHolder持有人;
        私人相机拍照;        私人字节[] previewBuffer;        长videoTimestamp = 0;        位图位图;
        帆布油画;        公共CameraView(上下文_context){
            超(_context);            支架= this.getHolder();
            holder.addCallback(本);
            holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
        }        @覆盖
        公共无效surfaceCreated(SurfaceHolder持有人){
            相机= Camera.open();            尝试{
                camera.set previewDisplay(支架);
                camera.set previewCallback(本);                Camera.Parameters currentParams = camera.getParameters();
                Log.v(LOG_TAG,
                        preVIEW帧率:
                                + currentParams.get previewFrameRate());
                Log.v(LOG_TAG,
                        preVIEW imageWidth:
                                + currentParams.get previewSize()。width方法
                                +imageHeight:
                                + currentParams.get previewSize()高度)。                //使用这些值
                imageWidth = currentParams.get previewSize()宽。
                imageHeight = currentParams.get previewSize()的高度。
                帧率= currentParams.get previewFrameRate();                位= Bitmap.createBitmap(imageWidth,imageHeight,
                        Bitmap.Config.ALPHA_8);                / *
                 * Log.v(LOG_TAG创建previewBuffer大小:+ imageWidth *
                 * imageHeight *
                 * ImageFormat.getBitsPerPixel(currentParams.get previewFormat
                 *())/ 8); previewBuffer =新的字节[imageWidth * imageHeight *
                 * 图像格式
                 * .getBitsPerPixel(currentParams.get previewFormat())/ 8];
                 * camera.addCallbackBuffer(previewBuffer);
                 * camera.set previewCallbackWithBuffer(本);
                 * /                camera.start preVIEW();
                previewRunning = TRUE;
            }赶上(IOException异常五){
                Log.v(LOG_TAG,e.getMessage());
                e.printStackTrace();
            }
        }        公共无效surfaceChanged(SurfaceHolder架,INT格式,宽度INT,
                INT高度){
            Log.v(LOG_TAG,表面改变:宽+宽+高度
                    +高);            //如果我们想重新设置相机参数,我们将做到这一点
            / *
             *如果{如果(previewRunning){camera.stop preVIEW()(录音!); }
             *
             *尝试{//Camera.Parameters cameraParameters =
             * camera.getParameters(); //p.set$p$pviewSize(imageWidth,
             * imageHeight); //p.set$p$pviewFrameRate(frameRate);
             * //camera.setParameters(cameraParameters);
             *
             * camera.set previewDisplay(支架); camera.start preVIEW();
             * previewRunning = TRUE; }赶上(IOException异常五){
             * Log.e(LOG_TAG,e.getMessage()); e.printStackTrace(); }}
             * /            //获取当前参数
            Camera.Parameters currentParams = camera.getParameters();
            Log.v(LOG_TAG,
                    preVIEW帧率:+ currentParams.get previewFrameRate());
            Log.v(LOG_TAG,
                    preVIEW imageWidth:
                            + currentParams.get previewSize()。width方法
                            +imageHeight:
                            + currentParams.get previewSize()高度)。            //使用这些值
            imageWidth = currentParams.get previewSize()宽。
            imageHeight = currentParams.get previewSize()的高度。
            帧率= currentParams.get previewFrameRate();            如果需要的话//创建yuvIplimage
            yuvIplimage = IplImage.create(imageWidth,imageHeight,
                    IPL_DEPTH_8U,1);
            // yuvIplimage = IplImage.create(imageWidth,imageHeight,
            // IPL_DEPTH_32S,2);
        }        @覆盖
        公共无效surfaceDestroyed(SurfaceHolder持有人){
            尝试{
                camera.set previewCallback(NULL);                previewRunning = FALSE;
                camera.release();            }赶上(RuntimeException的E){
                Log.v(LOG_TAG,e.getMessage());
                e.printStackTrace();
            }
        }        @覆盖
        在previewFrame(字节[]数据,相机摄像头){公共无效            如果(yuvIplimage = NULL&放大器;!&安培;录音){
                videoTimestamp = 1000 *(System.currentTimeMillis的() - startTime时);                //把摄像头preVIEW框右转入yuvIplimage
                //对象
                的System.out.println(数据的价值=============+数据);
                。yuvIplimage.getByteBuffer()把​​(数据);                //常见问题有关的IplImage:
                // - 对于数据的定制加工原料,getByteBuffer()返回
                //一个直接NIO
                //缓冲器缠通过的imageData指向的存储器,并
                // Android的下,我们可以
                //也用缓冲带Bitmap.copyPixelsFromBuffer()和
                // copyPixelsToBuffer()。
                // - 要获得从的IplImage一个BufferedImage,我们可以称之为
                // getBufferedImage()。
                // - 该createFrom()工厂方法可以构造一个的IplImage
                //从一个BufferedImage。
                // - 也有一些副本*()方法用于
                // BufferedImage的< - >的IplImage数据传输。                // 我们试一试..
                //这工作,但只对透明度
                //需要找到合适的位图和配套的IplImage类型                / *
                 * bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer());
                 * //bitmap.setPixel(10,10,Color.MAGENTA);
                 *
                 *帆布=新的Canvas(位图);涂料粉刷=新的油漆();
                 * paint.setColor(Color.GREEN);浮leftx = 20;浮动TOPY =
                 * 20;浮rightx = 50;浮bottomy = 100; RectF矩形=
                 *新RectF(leftx,托皮,rightx,bottomy);
                 * canvas.drawRect(矩形,漆);
                 *
                 * bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer());
                 * /
                // Log.v(LOG_TAG,写作框架);                尝试{                    //获取正确的时间
                    recorder.setTimestamp(videoTimestamp);                    //记录图像分成FFmpegFrameRecorder
                    recorder.record(yuvIplimage);                }赶上(FFmpegFrameRecorder.Exception E){
                    Log.v(LOG_TAG,e.getMessage());
                    e.printStackTrace();
                }
            }
        }
    }
}

清单

 <清单的xmlns:机器人=htt​​p://schemas.android.com/apk/res/android
    包=com.example.javacv.stream.test2
    安卓版code =1
    机器人:=的versionName1.0>    <用途-SDK
        安卓的minSdkVersion =8
        机器人:targetSdkVersion =15/>    <使用许可权的android:NAME =android.permission.INTERNET对/>
    <使用许可权的android:NAME =android.permission.CAMERA/>
    <使用许可权的android:NAME =android.permission.WAKE_LOCK/>
    <使用许可权的android:NAME =android.permission.RECORD_AUDIO/>
    <使用许可权的android:NAME =android.permission.WRITE_EXTERNAL_STORAG​​E/>    <应用
        机器人:图标=@绘制/ ic_launcher
        机器人:标签=@字符串/ APP_NAME
        机器人:主题=@风格/ AppTheme>
        <活动
            机器人:名字=com.example.javacv.stream.test2.MainActivity
            机器人:标签=@字符串/ title_activity_main>
            &所述;意图滤光器>
                <作用机器人:名字=android.intent.action.MAIN/>                <类机器人:名字=android.intent.category.LAUNCHER/>
            &所述; /意图滤光器>
        < /活性GT;
    < /用途>< /清单>


解决方案

这是我们在iOS侧作战以及..基本上用于视频数据包而音频不断去得到下降了已知的问题,这一切都去地狱。低带宽下的一些硬件只是没有发挥好,并超出同步..我不相信有一个坚实的修复,我们必须通过建立我们自己的缓冲区顶部的音频/视频,并重新同步破解它使用时间戳,帧大小,和包数。

恐怕不能张贴code(它不是我的发布),但如果你知道的协议,不应该很难重现。

I'm using FFmpegFrameRecorder for video broadcast.Problem is audio comes faster than video frame.I'm using following code but unable to produce complete video there is problem in audio video timestamp.

Java Code:

import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U;

import java.io.IOException;
import java.nio.ShortBuffer;

import android.app.Activity;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.PowerManager;
import android.util.Log;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup.LayoutParams;
import android.widget.Button;
import android.widget.LinearLayout;

import com.googlecode.javacv.FFmpegFrameRecorder;
import com.googlecode.javacv.cpp.opencv_core.IplImage;

public class MainActivity extends Activity implements OnClickListener {

    private final static String LOG_TAG = "MainActivity";

    private PowerManager.WakeLock mWakeLock;

    private String ffmpeg_link = "";

    private volatile FFmpegFrameRecorder recorder;
    boolean recording = false;
    long startTime = 0;

    private int sampleAudioRateInHz = 16000;
    private int imageWidth = 320;
    private int imageHeight = 240;
    private int frameRate = 24;

    private Thread audioThread;
    volatile boolean runAudioThread = true;
    private AudioRecord audioRecord;
    private AudioRecordRunnable audioRecordRunnable;

    private CameraView cameraView;
    private IplImage yuvIplimage = null;

    private Button recordButton;
    private LinearLayout mainLayout;

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
        setContentView(R.layout.activity_main);

        initLayout();
        initRecorder();
    }

    @Override
    protected void onResume() {
        super.onResume();

        if (mWakeLock == null) {
            PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE);
            mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK,
                    LOG_TAG);
            mWakeLock.acquire();
        }
    }

    @Override
    protected void onPause() {
        super.onPause();

        if (mWakeLock != null) {
            mWakeLock.release();
            mWakeLock = null;
        }
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();

        recording = false;
    }

    private void initLayout() {

        mainLayout = (LinearLayout) this.findViewById(R.id.record_layout);

        recordButton = (Button) findViewById(R.id.recorder_control);
        recordButton.setText("Start");
        recordButton.setOnClickListener(this);

        cameraView = new CameraView(this);

        LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(
                LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
        mainLayout.addView(cameraView, layoutParam);
        Log.v(LOG_TAG, "added cameraView to mainLayout");
    }

    private void initRecorder() {
        Log.w(LOG_TAG, "initRecorder");

        if (yuvIplimage == null) {
            // Recreated after frame size is set in surface change method
            yuvIplimage = IplImage.create(imageWidth, imageHeight,
                    IPL_DEPTH_8U, 2);
            // yuvIplimage = IplImage.create(imageWidth, imageHeight,
            // IPL_DEPTH_32S, 2);

            Log.v(LOG_TAG, "IplImage.create");
        }

        recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth,
                imageHeight, 1);
        Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: "
                + imageWidth + " imageHeight " + imageHeight);

        recorder.setFormat("flv");
        Log.v(LOG_TAG, "recorder.setFormat(\"flv\")");

        recorder.setSampleRate(sampleAudioRateInHz);
        Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)");

        // re-set in the surface changed method as well
        recorder.setFrameRate(frameRate);
        Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)");

        // Create audio recording thread
        audioRecordRunnable = new AudioRecordRunnable();
        audioThread = new Thread(audioRecordRunnable);
    }

    // Start the capture
    public void startRecording() {
        try {
            recorder.start();
            startTime = System.currentTimeMillis();
            recording = true;
            audioThread.start();
        } catch (FFmpegFrameRecorder.Exception e) {
            e.printStackTrace();
        }
    }

    public void stopRecording() {
        // This should stop the audio thread from running
        runAudioThread = false;

        if (recorder != null && recording) {
            recording = false;
            Log.v(LOG_TAG,
                    "Finishing recording, calling stop and release on recorder");
            try {
                recorder.stop();
                recorder.release();
            } catch (FFmpegFrameRecorder.Exception e) {
                e.printStackTrace();
            }
            recorder = null;
        }
    }

    @Override
    public boolean onKeyDown(int keyCode, KeyEvent event) {
        // Quit when back button is pushed
        if (keyCode == KeyEvent.KEYCODE_BACK) {
            if (recording) {
                stopRecording();
            }
            finish();
            return true;
        }
        return super.onKeyDown(keyCode, event);
    }

    @Override
    public void onClick(View v) {
        if (!recording) {
            startRecording();
            Log.w(LOG_TAG, "Start Button Pushed");
            recordButton.setText("Stop");
        } else {
            stopRecording();
            Log.w(LOG_TAG, "Stop Button Pushed");
            recordButton.setText("Start");
        }
    }

    // ---------------------------------------------
    // audio thread, gets and encodes audio data
    // ---------------------------------------------
    class AudioRecordRunnable implements Runnable {

        @Override
        public void run() {
            // Set the thread priority
            android.os.Process
                    .setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

            // Audio
            int bufferSize;
            short[] audioData;
            int bufferReadResult;

            bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz,
                    AudioFormat.CHANNEL_CONFIGURATION_MONO,
                    AudioFormat.ENCODING_PCM_16BIT);
            audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
                    sampleAudioRateInHz,
                    AudioFormat.CHANNEL_CONFIGURATION_MONO,
                    AudioFormat.ENCODING_PCM_16BIT, bufferSize);

            audioData = new short[bufferSize];

            Log.d(LOG_TAG, "audioRecord.startRecording()");
            audioRecord.startRecording();

            // Audio Capture/Encoding Loop
            while (runAudioThread) {
                // Read from audioRecord
                bufferReadResult = audioRecord.read(audioData, 0,
                        audioData.length);
                if (bufferReadResult > 0) {
                    // Log.v(LOG_TAG,"audioRecord bufferReadResult: " +
                    // bufferReadResult);

                    // Changes in this variable may not be picked up despite it
                    // being "volatile"
                    if (recording) {
                        try {
                            // Write to FFmpegFrameRecorder
                            recorder.record(ShortBuffer.wrap(audioData, 0,
                                    bufferReadResult));
                        } catch (FFmpegFrameRecorder.Exception e) {
                            Log.v(LOG_TAG, e.getMessage());
                            e.printStackTrace();
                        }
                    }
                }
            }
            Log.v(LOG_TAG, "AudioThread Finished");

            /* Capture/Encoding finished, release recorder */
            if (audioRecord != null) {
                audioRecord.stop();
                audioRecord.release();
                audioRecord = null;
                Log.v(LOG_TAG, "audioRecord released");
            }
        }
    }

    class CameraView extends SurfaceView implements SurfaceHolder.Callback,
            PreviewCallback {

        private boolean previewRunning = false;

        private SurfaceHolder holder;
        private Camera camera;

        private byte[] previewBuffer;

        long videoTimestamp = 0;

        Bitmap bitmap;
        Canvas canvas;

        public CameraView(Context _context) {
            super(_context);

            holder = this.getHolder();
            holder.addCallback(this);
            holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
        }

        @Override
        public void surfaceCreated(SurfaceHolder holder) {
            camera = Camera.open();

            try {
                camera.setPreviewDisplay(holder);
                camera.setPreviewCallback(this);

                Camera.Parameters currentParams = camera.getParameters();
                Log.v(LOG_TAG,
                        "Preview Framerate: "
                                + currentParams.getPreviewFrameRate());
                Log.v(LOG_TAG,
                        "Preview imageWidth: "
                                + currentParams.getPreviewSize().width
                                + " imageHeight: "
                                + currentParams.getPreviewSize().height);

                // Use these values
                imageWidth = currentParams.getPreviewSize().width;
                imageHeight = currentParams.getPreviewSize().height;
                frameRate = currentParams.getPreviewFrameRate();

                bitmap = Bitmap.createBitmap(imageWidth, imageHeight,
                        Bitmap.Config.ALPHA_8);

                /*
                 * Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth *
                 * imageHeight *
                 * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat
                 * ())/8); previewBuffer = new byte[imageWidth * imageHeight *
                 * ImageFormat
                 * .getBitsPerPixel(currentParams.getPreviewFormat())/8];
                 * camera.addCallbackBuffer(previewBuffer);
                 * camera.setPreviewCallbackWithBuffer(this);
                 */

                camera.startPreview();
                previewRunning = true;
            } catch (IOException e) {
                Log.v(LOG_TAG, e.getMessage());
                e.printStackTrace();
            }
        }

        public void surfaceChanged(SurfaceHolder holder, int format, int width,
                int height) {
            Log.v(LOG_TAG, "Surface Changed: width " + width + " height: "
                    + height);

            // We would do this if we want to reset the camera parameters
            /*
             * if (!recording) { if (previewRunning){ camera.stopPreview(); }
             * 
             * try { //Camera.Parameters cameraParameters =
             * camera.getParameters(); //p.setPreviewSize(imageWidth,
             * imageHeight); //p.setPreviewFrameRate(frameRate);
             * //camera.setParameters(cameraParameters);
             * 
             * camera.setPreviewDisplay(holder); camera.startPreview();
             * previewRunning = true; } catch (IOException e) {
             * Log.e(LOG_TAG,e.getMessage()); e.printStackTrace(); } }
             */

            // Get the current parameters
            Camera.Parameters currentParams = camera.getParameters();
            Log.v(LOG_TAG,
                    "Preview Framerate: " + currentParams.getPreviewFrameRate());
            Log.v(LOG_TAG,
                    "Preview imageWidth: "
                            + currentParams.getPreviewSize().width
                            + " imageHeight: "
                            + currentParams.getPreviewSize().height);

            // Use these values
            imageWidth = currentParams.getPreviewSize().width;
            imageHeight = currentParams.getPreviewSize().height;
            frameRate = currentParams.getPreviewFrameRate();

            // Create the yuvIplimage if needed
            yuvIplimage = IplImage.create(imageWidth, imageHeight,
                    IPL_DEPTH_8U, 1);
            // yuvIplimage = IplImage.create(imageWidth, imageHeight,
            // IPL_DEPTH_32S, 2);
        }

        @Override
        public void surfaceDestroyed(SurfaceHolder holder) {
            try {
                camera.setPreviewCallback(null);

                previewRunning = false;
                camera.release();

            } catch (RuntimeException e) {
                Log.v(LOG_TAG, e.getMessage());
                e.printStackTrace();
            }
        }

        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {

            if (yuvIplimage != null && recording) {
                videoTimestamp = 1000 * (System.currentTimeMillis() - startTime);

                // Put the camera preview frame right into the yuvIplimage
                // object
                System.out.println("value of data=============" + data);
                yuvIplimage.getByteBuffer().put(data);

                // FAQ about IplImage:
                // - For custom raw processing of data, getByteBuffer() returns
                // an NIO direct
                // buffer wrapped around the memory pointed by imageData, and
                // under Android we can
                // also use that Buffer with Bitmap.copyPixelsFromBuffer() and
                // copyPixelsToBuffer().
                // - To get a BufferedImage from an IplImage, we may call
                // getBufferedImage().
                // - The createFrom() factory method can construct an IplImage
                // from a BufferedImage.
                // - There are also a few copy*() methods for
                // BufferedImage<->IplImage data transfers.

                // Let's try it..
                // This works but only on transparency
                // Need to find the right Bitmap and IplImage matching types

                /*
                 * bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer());
                 * //bitmap.setPixel(10,10,Color.MAGENTA);
                 * 
                 * canvas = new Canvas(bitmap); Paint paint = new Paint();
                 * paint.setColor(Color.GREEN); float leftx = 20; float topy =
                 * 20; float rightx = 50; float bottomy = 100; RectF rectangle =
                 * new RectF(leftx,topy,rightx,bottomy);
                 * canvas.drawRect(rectangle, paint);
                 * 
                 * bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer());
                 */
                // Log.v(LOG_TAG,"Writing Frame");

                try {

                    // Get the correct time
                    recorder.setTimestamp(videoTimestamp);

                    // Record the image into FFmpegFrameRecorder
                    recorder.record(yuvIplimage);

                } catch (FFmpegFrameRecorder.Exception e) {
                    Log.v(LOG_TAG, e.getMessage());
                    e.printStackTrace();
                }
            }
        }
    }
}

Manifest

<manifest xmlns:android="http://schemas.android.com/apk/res/android"
    package="com.example.javacv.stream.test2"
    android:versionCode="1"
    android:versionName="1.0" >

    <uses-sdk
        android:minSdkVersion="8"
        android:targetSdkVersion="15" />

    <uses-permission android:name="android.permission.INTERNET" />
    <uses-permission android:name="android.permission.CAMERA" />
    <uses-permission android:name="android.permission.WAKE_LOCK" />
    <uses-permission android:name="android.permission.RECORD_AUDIO" />
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />

    <application
        android:icon="@drawable/ic_launcher"
        android:label="@string/app_name"
        android:theme="@style/AppTheme" >
        <activity
            android:name="com.example.javacv.stream.test2.MainActivity"
            android:label="@string/title_activity_main" >
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />

                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>
    </application>

</manifest>

解决方案

This is a known issue we battled with on the iOS side as well.. Basically the packets for video get dropped while the audio keeps going, and it all goes to hell. Some hardware under low bandwidth just doesn't play nice and goes out of sync.. I don't believe there is a solid fix, we had to hack it by building our own buffer on top for the audio/video and re-synchronizing using timestamps, frame sizes, and packet counts.

Afraid I can't post that code (it's not mine to post) but if you know the protocol, shouldn't be hard to recreate..

这篇关于FFmpegFrameRecorder videoBroadcasting音频将超过3G网络的视频帧的速度更快的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆