使用在Android AudioRecord和媒体codeC编码的AAC音频 [英] Encoding AAC Audio using AudioRecord and MediaCodec on Android

查看:2005
本文介绍了使用在Android AudioRecord和媒体codeC编码的AAC音频的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我试图连接code AAC音频采用了android AudioRecord和媒体codeC。我创建了一个连接codeR类非常相似(<一href="http://stackoverflow.com/questions/13458289/encoding-h-264-from-camera-with-android-media$c$cc">Encoding H.264的摄像头,支持Android的媒体codeC )。有了这个类,我创建AudioRecord的实例,并告诉它读出它的byte []的数据到AudioEn codeR(audioEn coder.offerEn codeR(数据))。

 而(isRecording)
 {
  audioRecord.read(数据,0,Data.length);
  audioEn coder.offerEn codeR(数据);
 }
 

下面是我的设置为我AudioRecord

  INT audioSource = MediaRecorder.AudioSource.MIC;
    INT sampleRateInHz = 44100;
    INT channelConfig = AudioFormat.CHANNEL_IN_MONO;
    INT AudioFormat的= AudioFormat.ENCODING_PCM_16BIT;
    INT bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz,channelConfig,AudioFormat的);
 

我成功地收集了一些byte []数组数据,并将其写入到本地文件。不幸的是,文件不能播放。我做了一些更多的网上搜索,发现相关的职位(<一href="http://stackoverflow.com/questions/18862715/how-to-generate-the-aac-adts-elementary-stream-with-android-media$c$cc">How生成AAC ADTS基本流与Android媒体codeC )。那么,其他人谁是有类似的问题都这么说,主要问题是媒体codeC连接codeR产生的原始AAC流。需要原始AAC流转换为可播放的格式,如ADTS流。于是,我就添加ADTS头。不过,之后我加入了ADTS头(我注释掉在code以下),我AudioEn codeR甚至不会写输出音频文件。 有什么我失踪?我的设置正确吗?

任何建议,意见和观点的欢迎和非常AP preciated。谢谢你们!

 进口android.media.Media codeC;
进口android.media.Media codecInfo;
进口android.media.MediaFormat;
进口android.os.Environment;
进口android.util.Log;

进口java.io.BufferedOutputStream;
进口的java.io.File;
进口java.io.FileOutputStream中;
进口java.io.IOException异常;
进口java.nio.ByteBuffer中;

公共类AudioEn codeR {

    私营媒体codeC媒体codeC;
    私人的BufferedOutputStream OutputStream的;
    私人字符串mediaType的=音频/ MP4A-LATM;

    公共AudioEn codeR(){
        文件F =新的文件(Environment.getExternalStorageDirectory(),下载/ audio_en coded.aac);
        触摸(F);
        尝试 {
            的OutputStream =新的BufferedOutputStream(新的FileOutputStream(F));
            Log.e(AudioEn codeR,OutputStream的初始化);
        }赶上(例外五){
            e.printStackTrace();
        }

        媒体codeC =媒体codec.createEn coderByType(mediaType的);
        最终诠释kSampleRates [] = {8000,11025,22050,44100,48000};
        最终诠释kBitRates [] = {64000,128000};
        MediaFormat mediaFormat = MediaFormat.createAudioFormat(mediaType的,kSampleRates [3],1);
        mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE,媒体codecInfo codecProfileLevel.AACObjectLC。);

        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE,kBitRates [1]);
        媒体codec.configure(mediaFormat,NULL,NULL,媒体codec.CONFIGURE_FLAG_EN code);
        媒体codec.start();
    }

    公共无效的close(){
        尝试 {
            媒体codec.stop();
            媒体codec.release();
            outputStream.flush();
            outputStream.close();
        }赶上(例外五){
            e.printStackTrace();
        }
    }

    //所谓AudioRecord的读
    市民同步无效offerEn codeR(byte []的输入){
        Log.e(AudioEn codeR,input.length +来了);

        尝试 {
            ByteBuffer的[] inputBuffers =媒体codec.getInputBuffers();
            ByteBuffer的[] outputBuffers =媒体codec.getOutputBuffers();
            INT inputBufferIndex =媒体codec.dequeueInputBuffer(-1);
            如果(inputBufferIndex&GT; = 0){
                ByteBuffer的INPUTBUFFER = inputBuffers [inputBufferIndex]
                inputBuffer.clear();

                inputBuffer.put(输入);


                媒体codec.queueInputBuffer(inputBufferIndex,0,input.length,0,0);
            }

            媒体codec.BufferInfo bufferInfo =新媒体codec.BufferInfo();
            INT outputBufferIndex =媒体codec.dequeueOutputBuffer(bufferInfo,0);

////尝试添加ADTS
//而(outputBufferIndex&GT; = 0){
// INT outBitsSize = bufferInfo.size;
// INT outPacketSize = outBitsSize + 7; // 7是ADTS大小
// ByteBuffer的OUTPUTBUFFER = outputBuffers [outputBufferIndex]
//
// outputBuffer.position(bufferInfo.offset);
// outputBuffer.limit(bufferInfo.offset + outBitsSize);
//
// byte []的outData =新的字节[outPacketSize]
// addADTStoPacket(outData,outPacketSize);
//
// outputBuffer.get(outData,7,outBitsSize);
// outputBuffer.position(bufferInfo.offset);
//
//// byte []的outData =新的字节[bufferInfo.size]
// outputStream.write(outData,0,outData.length);
// Log.e(AudioEn codeR,outData.length +写字节);
//
//媒体codec.releaseOutputBuffer(outputBufferIndex,假);
// outputBufferIndex =媒体codec.dequeueOutputBuffer(bufferInfo,0);
//
//}


//没有ADTS头标
            而(outputBufferIndex&GT; = 0){
                ByteBuffer的OUTPUTBUFFER = outputBuffers [outputBufferIndex]
                byte []的outData =新的字节[bufferInfo.size]
                outputBuffer.get(outData)以;
                outputStream.write(outData,0,outData.length);
                Log.e(AudioEn codeR,outData.length +写字节);

                媒体codec.releaseOutputBuffer(outputBufferIndex,假);
                outputBufferIndex =媒体codec.dequeueOutputBuffer(bufferInfo,0);

            }
        }赶上(的Throwable T){
            t.printStackTrace();
        }

    }

    / **
     *添加ADTS头标在每个的开始和每一个数据包的AAC。
     *这是需要媒体codeC连接codeR产生的原始数据包
     * AAC数据。
     *
     *注意packetLen必须在ADTS头本身算。
     ** /
    私人无效addADTStoPacket(byte []的数据包,诠释packetLen){
        INT轮廓= 2; // AAC LC
        //39=Media$c$ccInfo.$c$ccProfileLevel.AACObjectELD;
        INT freqIdx = 4; //44.1KHz
        INT chanCfg = 2; // CPE

        //填写ADTS数据
        包[0] =(字节)为0xFF;
        包[1] =(字节)0xF9;
        包[2] =(字节)(((轮廓-1) -  =;&10 6)+(freqIdx&其中; 2)+(chanCfg&GT;&→2));
        包[3] =(字节)(((chanCfg和3)其中;&10 6)+(packetLen&GT;&GT; 11));
        包[4] =(字节)((packetLen&安培; 0x7FF)GT;→3);
        包[5] =(字节)(((packetLen和7)其中;小于5)+ 0x1F的);
        包[6] =(字节)0xFC有;
    }

    公共无效触摸(文件F)
    {
        尝试 {
            如果(!f.exists())
                f.createNewFile();
        }赶上(IOException异常E){
            e.printStackTrace();
        }
    }
}
 

解决方案

您可以使用Android的的 MediaMuxer 通过媒体codeC创建的原始流打包成一个.MP4文件。奖励:包含在.MP4 AAC数据包不需要ADTS头标

我有一个<一个href="https://github.com/OnlyInAmerica/HWEn$c$crExperiments/blob/audioonly/HWEn$c$crExperiments/src/main/java/net/openwatch/hwen$c$crexperiments/AudioEn$c$cr.java"相对=nofollow> Github上这种技术的工作示例。

I am trying to encode aac audio using android AudioRecord and MediaCodec. I have created a encoder class very similar to (Encoding H.264 from camera with Android MediaCodec). With this class, I created an instance of AudioRecord and tell it to read off its byte[] data to the AudioEncoder (audioEncoder.offerEncoder(Data)).

 while(isRecording) 
 {
  audioRecord.read(Data, 0, Data.length);
  audioEncoder.offerEncoder(Data);
 }

Here is my Setting for my AudioRecord

    int audioSource = MediaRecorder.AudioSource.MIC;
    int sampleRateInHz = 44100;
    int channelConfig = AudioFormat.CHANNEL_IN_MONO;
    int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
    int bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);

I successfully collected some byte[] array data and written it to a local file. Unfortunately the file is not playable. I did some more searching online and found a related post (How to generate the AAC ADTS elementary stream with Android MediaCodec). So, others who are having similar problem are saying the main problem is "The MediaCodec encoder generates the raw AAC stream. The raw AAC stream needs to be converted into a playable format, such as the ADTS stream". So I tried to add the ADTS header. Nevertheless, after I added the ADTS header(I commented out in the code below), my AudioEncoder wouldn't even write the output audio file. Is there anything I'm missing? Is my setup correct?

Any suggestions, comments, and opinions are welcome and very appreciated. thanks guys!

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Environment;
import android.util.Log;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;

public class AudioEncoder {

    private MediaCodec mediaCodec;
    private BufferedOutputStream outputStream;
    private String mediaType = "audio/mp4a-latm";

    public AudioEncoder() {
        File f = new File(Environment.getExternalStorageDirectory(), "Download/audio_encoded.aac");
        touch(f);
        try {
            outputStream = new BufferedOutputStream(new FileOutputStream(f));
            Log.e("AudioEncoder", "outputStream initialized");
        } catch (Exception e){
            e.printStackTrace();
        }

        mediaCodec = MediaCodec.createEncoderByType(mediaType);
        final int kSampleRates[] = { 8000, 11025, 22050, 44100, 48000 };
        final int kBitRates[] = { 64000, 128000 };
        MediaFormat mediaFormat  = MediaFormat.createAudioFormat(mediaType,kSampleRates[3],1);
        mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);

        mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, kBitRates[1]);
        mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.start();
    }

    public void close() {
        try {
            mediaCodec.stop();
            mediaCodec.release();
            outputStream.flush();
            outputStream.close();
        } catch (Exception e){
            e.printStackTrace();
        }
    }

    // called AudioRecord's read
    public synchronized void offerEncoder(byte[] input) {
        Log.e("AudioEncoder", input.length + " is coming");

        try {
            ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
            ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
            int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
            if (inputBufferIndex >= 0) {
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();

                inputBuffer.put(input);


                mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
            }

            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);

////trying to add a ADTS
//            while (outputBufferIndex >= 0) {
//                int outBitsSize   = bufferInfo.size;
//                int outPacketSize = outBitsSize + 7;    // 7 is ADTS size
//                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
//
//                outputBuffer.position(bufferInfo.offset);
//                outputBuffer.limit(bufferInfo.offset + outBitsSize);
//
//                byte[] outData = new byte[outPacketSize];
//                addADTStoPacket(outData, outPacketSize);
//
//                outputBuffer.get(outData, 7, outBitsSize);
//                outputBuffer.position(bufferInfo.offset);
//
////                byte[] outData = new byte[bufferInfo.size];
//                outputStream.write(outData, 0, outData.length);
//                Log.e("AudioEncoder", outData.length + " bytes written");
//
//                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
//                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
//
//            }


//Without ADTS header
            while (outputBufferIndex >= 0) {
                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData);
                outputStream.write(outData, 0, outData.length);
                Log.e("AudioEncoder", outData.length + " bytes written");

                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);

            }
        } catch (Throwable t) {
            t.printStackTrace();
        }

    }

    /**
     *  Add ADTS header at the beginning of each and every AAC packet.
     *  This is needed as MediaCodec encoder generates a packet of raw
     *  AAC data.
     *
     *  Note the packetLen must count in the ADTS header itself.
     **/
    private void addADTStoPacket(byte[] packet, int packetLen) {
        int profile = 2;  //AAC LC
        //39=MediaCodecInfo.CodecProfileLevel.AACObjectELD;
        int freqIdx = 4;  //44.1KHz
        int chanCfg = 2;  //CPE

        // fill in ADTS data
        packet[0] = (byte)0xFF;
        packet[1] = (byte)0xF9;
        packet[2] = (byte)(((profile-1)<<6) + (freqIdx<<2) +(chanCfg>>2));
        packet[3] = (byte)(((chanCfg&3)<<6) + (packetLen>>11));
        packet[4] = (byte)((packetLen&0x7FF) >> 3);
        packet[5] = (byte)(((packetLen&7)<<5) + 0x1F);
        packet[6] = (byte)0xFC;
    }

    public void touch(File f)
    {
        try {
            if(!f.exists())
                f.createNewFile();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}

解决方案

You can use Android's MediaMuxer to package the raw streams created by MediaCodec into a .mp4 file. Bonus: AAC packets contained in a .mp4 don't require the ADTS header.

I've got a working example of this technique on Github.

这篇关于使用在Android AudioRecord和媒体codeC编码的AAC音频的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆