您的位置:首页 > 编程语言 > PHP开发

Rtp 封包 AAC 和H264

2018-03-29 18:33 1016 查看
参考博客  https://blog.csdn.net/chen495810242/article/details/39207305 ,非常好,感谢!!
了解了RTP包文档,下面我们看如何用代码来实现
首先我们从音频入手,简单点
使用Android 音频采集类AudioRecord
package com.xbcx.media.audio;
import java.util.ArrayList;import java.util.List;
import com.xbcx.media.MediaError;import com.xbcx.media.Recorder;import com.xbcx.media.RecordListener;import com.xbcx.util.XbLog;
import android.media.AudioRecord;import android.os.Process;
/*** 使用AudioRecord 采集音频数据* @author gan*/public class AudioRecorder implements Recorder{
private static final String Tag = "AudioRecoder";AudioRecord mAudioRecord;AudioConfig mAudioConfig;List<RecordListener> mRecoderListeners;AudioFrameListener mAudioFrameListener;List<AudioProcessor> mAudioProcessors;public AudioRecorder setAudioConfig(AudioConfig audioConfig) {this.mAudioConfig = audioConfig;return this;}public AudioRecorder setAudioFrameListener(AudioFrameListener audioFrameListener) {this.mAudioFrameListener = audioFrameListener;return this;}public AudioConfig getAudioConfig() {return mAudioConfig;}@Overridepublic void startRecord() {startAudioThread();onRecordStart();}
@Overridepublic void stopRecord() {stopAudioThread();onRecordEnd();}
@Overridepublic void release() {if(isAudioRecording()) {stopRecord();}if(mRecoderListeners!=null) {mRecoderListeners.clear();}if(mAudioProcessors!=null) {mAudioProcessors.clear();}}public boolean isAudioRecording() {return mReadThread!=null;}private void initAudioRecord() {int bufferSize = AudioRecord.getMinBufferSize(mAudioConfig.sampleRate, mAudioConfig.channelConfig, mAudioConfig.audioFormat);mAudioRecord = new AudioRecord(mAudioConfig.audioSource, mAudioConfig.sampleRate, mAudioConfig.channelConfig, mAudioConfig.audioFormat, bufferSize);}private Thread mReadThread;private int mReadBufferSize = 1920;public AudioRecorder setReadBufferSize(int readBufferSize) {this.mReadBufferSize = readBufferSize;return this;}private void startAudioThread() {mReadThread = new Thread(new Runnable() {@Overridepublic void run() {Process.setThreadPriority(Process.THREAD_PRIORITY_AUDIO);initAudioRecord();mAudioRecord.startRecording();if(mAudioProcessors!=null) {for(AudioProcessor processor:mAudioProcessors) {processor.init(AudioRecorder.this);}}int len = 0;byte[] buffer = new byte[mReadBufferSize];try {while (mReadThread != null) {if(mAudioRecord == null) {XbLog.i(Tag, "audiorecord is no start");continue;}else {len = mAudioRecord.read(buffer, 0, mReadBufferSize);if(len>=0) {byte[] data = new byte[len];System.arraycopy(buffer, 0, data, 0, len);if(mAudioProcessors!=null) {for(AudioProcessor processor:mAudioProcessors) {processor.processData(data);}}if(mAudioFrameListener!=null) {mAudioFrameListener.onAudioFrame(data);}}}}} catch (Exception e) {XbLog.i(Tag, "read e:"+e.getMessage());e.printStackTrace();onRecordError(MediaError.Error_Read, "read thread exception");}finally {if(mAudioRecord!=null) {try {mAudioRecord.stop();} catch (Exception e2) {e2.printStackTrace();}mAudioRecord.release();mAudioRecord = null;}try {buffer = null;if(mAudioProcessors!=null) {for(AudioProcessor processor:mAudioProcessors) {processor.release();}}} catch (Exception e2) {e2.printStackTrace();XbLog.i(Tag, "read e2:"+e2.getMessage());}}}}, "audio_read_thread");mReadThread.start();}private void stopAudioThread() {if(mReadThread!=null) {try {Thread t = mReadThread;mReadThread = null;if (t != null) {t.interrupt();t.join();}} catch (InterruptedException e) {e.fillInStackTrace();}}}protected void onRecordStart() {if(mRecoderListeners!=null) {for(RecordListener listener:mRecoderListeners) {listener.onRecoderStart(this);}}}protected void onRecordEnd() {if(mRecoderListeners!=null) {for(RecordListener listener:mRecoderListeners) {listener.onRecoderEnd(this);}}}protected void onRecordError(int error,String message) {if(mRecoderListeners!=null) {for(RecordListener listener:mRecoderListeners) {listener.onRecoderError(this, error, message);}}}
@Overridepublic void addRecoderListener(RecordListener listener) {if(mRecoderListeners == null) {mRecoderListeners = new ArrayList<>();}mRecoderListeners.add(listener);}
@Overridepublic void removeRecoderListener(RecordListener listener) {if(mRecoderListeners!=null) {mRecoderListeners.remove(listener);}}public void addAudioProcessor(AudioProcessor processor) {if(mAudioProcessors==null) {mAudioProcessors = new ArrayList<>();}mAudioProcessors.add(processor);}public void removeAudioProcessor(AudioProcessor processor) {if(mAudioProcessors!=null) {mAudioProcessors.remove(processor);}}public static interface AudioFrameListener{public void onAudioFrame(byte[] data);}public static interface AudioProcessor{public void init(AudioRecorder recoder);public void processData(byte[] data)throws Exception;public void release();}}
使用Android 硬编码 MediaCodec 编码pcm 如何使用MediaCodec 参考 MediaCodec官方文档,非常详细,但是也有很多坑。
http://grepcode.com/file/repository.grepcode.com/java/ext/com.google.android/android/5.1.1_r1/android/media/MediaCodec.java#MediaCodec

package com.xbcx.media.audio;
import android.annotation.SuppressLint;import android.media.MediaCodec;import android.media.MediaCodec.BufferInfo;import android.media.MediaCodecInfo;import android.media.MediaFormat;
import com.xbcx.utils.SDKINT;import com.xbcx.utils.XbLog;
import java.io.IOException;import java.nio.ByteBuffer;
/*** 使用Android MediaCodec 编码PCM* @author gan*/@SuppressLint("NewApi")public class AudioCodec{
final static String Tag = "AudioEncoder";MediaCodec mMediaCodec;int mBufferSize = 1920;ByteBuffer[] mInputBuffers,mOutputBuffers;BufferInfo mBufferInfo = new BufferInfo();AudioCodecFrameListener mAudioCodecFrameListener;public void start(AudioConfig config) throws IOException {start(config.bitRate, config.sampleRate);}public void start(int bitRate,int sampleRate) throws IOException {startMediaCodec(bitRate, sampleRate);}@SuppressWarnings("deprecation")public void encoder(byte[] data, int offset, int length) {int inputIndex = mMediaCodec.dequeueInputBuffer(1000);if(inputIndex>=0) {ByteBuffer inputBuffer = null;if(SDKINT.isMin(android.os.Build.VERSION_CODES.LOLLIPOP)){inputBuffer = mMediaCodec.getInputBuffer(inputIndex);}else {inputBuffer = mInputBuffers[inputIndex];}inputBuffer.clear();inputBuffer.put(data, offset, length);long presentationTimeUs = System.nanoTime() / 1000;mMediaCodec.queueInputBuffer(inputIndex, offset, length, presentationTimeUs, 0);}int index = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10000);if (index >= 0) {if (mBufferInfo.flags == MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {return ;}ByteBuffer outputBuffer = null;if (SDKINT.isMin(android.os.Build.VERSION_CODES.LOLLIPOP)) {outputBuffer = mMediaCodec.getOutputBuffer(index);} else {outputBuffer = mOutputBuffers[index];}// outputBuffer.position(mBufferInfo.offset);// outputBuffer.limit(mBufferInfo.offset + mBufferInfo.size);if(mAudioCodecFrameListener!=null) {mAudioCodecFrameListener.onAudioCodecFrame(outputBuffer, mBufferInfo);}mMediaCodec.releaseOutputBuffer(index, false);} else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {mOutputBuffers = mMediaCodec.getOutputBuffers();} else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {XbLog.i(Tag, "output format changed...");MediaFormat newFormat = mMediaCodec.getOutputFormat();if(mAudioCodecFrameListener!=null) {mAudioCodecFrameListener.onAudioCodecFormatChanged(newFormat);}} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {// Log.v(TAG, "No buffer available...");} else {XbLog.i(Tag, "index:" + index);}}@SuppressWarnings("deprecation")public void startMediaCodec(int bitRate,int sampleRate) throws IOException {mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");MediaFormat format = new MediaFormat();format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);format.setInteger(MediaFormat.KEY_SAMPLE_RATE, sampleRate);format.setInteger(MediaFormat.KEY_AAC_PROFILE,MediaCodecInfo.CodecProfileLevel.AACObjectLC);format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, mBufferSize);mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);mMediaCodec.start();if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {mInputBuffers = mMediaCodec.getInputBuffers();mOutputBuffers = mMediaCodec.getOutputBuffers();}}/*** 停止编码并释放编码资源占用*/public void stopMediaCodec() {mMediaCodec.stop();mMediaCodec.release();}public void release() {if(mMediaCodec!=null) {try {mMediaCodec.stop();} catch (Exception e) {e.printStackTrace();}mMediaCodec.release();}}public AudioCodec setAudioCodecFrameListener(AudioCodecFrameListener audioCodecFrameListener) {this.mAudioCodecFrameListener = audioCodecFrameListener;return this;}public static interface AudioCodecFrameListener{public void onAudioCodecFrame(ByteBuffer buffer, BufferInfo info);public void onAudioCodecFormatChanged(MediaFormat format);}}
通过上边的过程,我们就已经通过Android 采集到了音频并成功编码,接下来就是如何把编码后的数据同过RTP包的形式发送出去。关键代码构建一个Rtp包(这里建议使用Android Studio jni编程 CMake 方式非常方便) 

RtpPacket* RtpSession::buildPacket(const void *data,int len, uint8_t pt,uint32_t timestampinc){bool isAudio = pt == 97? true: false;
RtpPacket* mRtpPacket = new RtpPacket();mRtpPacket->mPlayloadtype = pt;mRtpPacket->mPlayloadlength = len;size_t rtpL = sizeof(RtpOverRtsp);size_t headerlen = sizeof(RtpHeader);size_t packetlen = rtpL+headerlen+len;
if(isAudio){packetlen += 4;}
mRtpPacket->mPacketlength = packetlen;uint8_t *packet = (uint8_t *)malloc(packetlen * sizeof(uint8_t));mRtpPacket->mPacket = packet;RtpOverRtsp *rtpOverRtsp = (RtpOverRtsp *)packet;rtpOverRtsp->rtpS = 0x24;rtpOverRtsp->channel = isAudio? 0x02:0x00;rtpOverRtsp->packetLength = htons(packetlen-rtpL);
RtpConfig *rtpConfig = getRtpConfig(isAudio);RtpHeader *rtpHeader;rtpHeader = (RtpHeader *)(packet+rtpL);rtpHeader->version = RTP_VERSION;rtpHeader->padding = 0;rtpHeader->marker = 1;rtpHeader->extension = 0;rtpHeader->csrccount = mNumCsrcs;rtpHeader->payloadtype = pt;rtpHeader->sequencenumber = htons(rtpConfig->mSequenceNumber++);rtpHeader->timestamp = htonl(timestampinc);rtpHeader->ssrc = htonl(rtpConfig->mSsrc);
size_t mediaHeaderLen = 0;if(isAudio){mediaHeaderLen = addAACHeader(packet + rtpL + headerlen, len);}uint8_t *playLoad = packet + rtpL + headerlen + mediaHeaderLen;memcpy(playLoad,data,len);return mRtpPacket;}
混包方式 http://www.360doc.com/content/13/0906/18/13084517_312677323.shtml
视频 封包 同上边一样 使用相机采集数据,使用Android MediaCodec 硬编码,然后使用FU-A分包方法,完成Rtp包
RtpPacket* RtpSession::buildPacket(const void *data, int len, uint8_t pt, uint32_t timestampinc,uint8_t start,uint8_t end,uint8_t NAL_F,uint8_t NAL_NRI,uint8_t NAL_Type) {bool isAudio = pt == 97? true: false;
RtpPacket* mRtpPacket = new RtpPacket();mRtpPacket->mPlayloadtype = pt;mRtpPacket->mPlayloadlength = len;size_t rtpL = sizeof(RtpOverRtsp);size_t headerlen = sizeof(RtpHeader);size_t packetlen = rtpL+headerlen+len;
if(isAudio){packetlen += 4;}else{packetlen += sizeof(H264FUindicator);packetlen += sizeof(H264FUHeader);}
mRtpPacket->mPacketlength = packetlen;uint8_t *packet = (uint8_t *)malloc(packetlen * sizeof(uint8_t));
mRtpPacket->mPacket = packet;RtpOverRtsp *rtpOverRtsp = (RtpOverRtsp *)packet;rtpOverRtsp->rtpS = 0x24;rtpOverRtsp->channel = isAudio? 0x02:0x00;rtpOverRtsp->packetLength = htons(packetlen-rtpL);
RtpConfig *rtpConfig = getRtpConfig(isAudio);RtpHeader *rtpHeader;rtpHeader = (RtpHeader *)(packet+rtpL);rtpHeader->version = RTP_VERSION;rtpHeader->padding = 0;rtpHeader->marker = end;rtpHeader->extension = 0;rtpHeader->csrccount = mNumCsrcs;rtpHeader->payloadtype = pt;rtpHeader->sequencenumber = htons(rtpConfig->mSequenceNumber++);rtpHeader->timestamp = htonl(timestampinc);rtpHeader->ssrc = htonl(rtpConfig->mSsrc);
size_t mediaHeaderLen = 0;if(isAudio){mediaHeaderLen = addAACHeader(packet + rtpL + headerlen, len);}else{mediaHeaderLen = addH264FUindicator(packet+rtpL+headerlen, NAL_F, NAL_NRI);mediaHeaderLen += addH264FUHeader(packet+rtpL+headerlen+mediaHeaderLen, start, end, NAL_Type);}
uint8_t *playLoad = packet + rtpL + headerlen + mediaHeaderLen;memcpy(playLoad,data,len);
return mRtpPacket;}
源码链接
https://gitee.com/gan30/MediaStream/tree/master
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  Rtp aac h264