您的位置:首页 > 移动开发 > Android开发

android下MediaCodec硬编码(转)

2014-08-18 18:41 615 查看
转载至http://blog.csdn.net/liuhongxiangm/article/details/17584303

刚学习android,工作中涉及android平台硬编硬解,照着上面文章写了个存文件(保存.h264文件)的,因为过程差不多,就把原版的贴上来

但是保存到文件中的yuv数据是黑白的,还在找原因中

 

硬编过程

package com.encode.androidencode;

import java.nio.ByteBuffer;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;

public class AvcEncoder
{

private MediaCodec mediaCodec;
int m_width;
int m_height;
byte[] m_info = null;

private byte[] yuv420 = null;
@SuppressLint("NewApi")
public AvcEncoder(int width, int height, int framerate, int bitrate) {

m_width  = width;
m_height = height;
yuv420 = new byte[width*height*3/2];

mediaCodec = MediaCodec.createEncoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", width, height);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, framerate);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); //关键帧间隔时间 单位s

mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
}

@SuppressLint("NewApi")
public void close() {
try {
mediaCodec.stop();
mediaCodec.release();
} catch (Exception e){
e.printStackTrace();
}
}

@SuppressLint("NewApi")
public int offerEncoder(byte[] input, byte[] output)
{
int pos = 0;
swapYV12toI420(input, yuv420, m_width, m_height);
try {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0)
{
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(yuv420);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, yuv420.length, 0, 0);
}

MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo,0);

while (outputBufferIndex >= 0)
{
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);

if(m_info != null)
{
System.arraycopy(outData, 0,  output, pos, outData.length);
pos += outData.length;

}

else //保存pps sps 只有开始时 第一个帧里有, 保存起来后面用
{
ByteBuffer spsPpsBuffer = ByteBuffer.wrap(outData);
if (spsPpsBuffer.getInt() == 0x00000001)
{
m_info = new byte[outData.length];
System.arraycopy(outData, 0, m_info, 0, outData.length);
}
else
{
return -1;
}
}

mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}

if(output[4] == 0x65) //key frame   编码器生成关键帧时只有 00 00 00 01 65 没有pps sps, 要加上
{
System.arraycopy(output, 0,  yuv420, 0, pos);
System.arraycopy(m_info, 0,  output, 0, m_info.length);
System.arraycopy(yuv420, 0,  output, m_info.length, pos);
pos += m_info.length;
}

} catch (Throwable t) {
t.printStackTrace();
}

return pos;
}
//yv12 转 yuv420p  yvu -> yuv
private void swapYV12toI420(byte[] yv12bytes, byte[] i420bytes, int width, int height)
{
System.arraycopy(yv12bytes, 0, i420bytes, 0,width*height);
System.arraycopy(yv12bytes, width*height+width*height/4, i420bytes, width*height,width*height/4);
System.arraycopy(yv12bytes, width*height, i420bytes, width*height+width*height/4,width*height/4);
}

}
 
UI和摄像头设定 
<pre class="java" name="code">package com.interfaces.androidencode;

import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.util.List;

import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Bundle;
import android.os.StrictMode;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.util.Log;
import android.view.Menu;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import com.encode.androidencode.AvcEncoder;

public class MainActivity extends Activity
implements SurfaceHolder.Callback, PreviewCallback
{

DatagramSocket socket;
InetAddress address;

AvcEncoder avcCodec;
public Camera m_camera;
SurfaceView   m_prevewview;
SurfaceHolder m_surfaceHolder;
int width = 1280;
int height = 720;
int framerate = 20;
int bitrate = 2500000;

byte[] h264 = new byte[width*height*3/2];

@SuppressLint("NewApi")
@Override
protected void onCreate(Bundle savedInstanceState) {

StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder()
.detectDiskReads()
.detectDiskWrites()
.detectAll()   // or .detectAll() for all detectable problems
.penaltyLog()
.build());
StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder()
.detectLeakedSqlLiteObjects()
.detectLeakedClosableObjects()
.penaltyLog()
.penaltyDeath()
.build());

super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

avcCodec = new AvcEncoder(width,height,framerate,bitrate);

m_prevewview = (SurfaceView) findViewById(R.id.SurfaceViewPlay);
m_surfaceHolder = m_prevewview.getHolder(); // 绑定SurfaceView,取得SurfaceHolder对象
m_surfaceHolder.setFixedSize(width, height); // 预览大小設置
m_surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
m_surfaceHolder.addCallback((Callback) this);

try {
socket = new DatagramSocket();
address = InetAddress.getByName("192.168.12.124");
} catch (SocketException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnknownHostException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}

}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3)
{

}

@SuppressLint("NewApi")
@SuppressWarnings("deprecation")
@Override
public void surfaceCreated(SurfaceHolder arg0)
{
try
{
m_camera = Camera.open();
m_camera.setPreviewDisplay(m_surfaceHolder);
Camera.Parameters parameters = m_camera.getParameters();
parameters.setPreviewSize(width, height);
parameters.setPictureSize(width, height);
parameters.setPreviewFormat(ImageFormat.YV12);
m_camera.setParameters(parameters);
m_camera.setPreviewCallback((PreviewCallback) this);
m_camera.startPreview();

} catch (IOException e)
{
e.printStackTrace();
}
}

@Override
public void surfaceDestroyed(SurfaceHolder arg0)
{
m_camera.setPreviewCallback(null); //!!这个必须在前,不然退出出错
m_camera.stopPreview();
m_camera.release();
m_camera = null;
avcCodec.close();
}

@Override
public void onPreviewFrame(byte[] data, Camera camera)
{

Log.v("h264", "h264 start");
int ret = avcCodec.offerEncoder(data,h264);

if(ret > 0)
{
try {
DatagramPacket packet=new DatagramPacket(h264,ret, address,5000);
socket.send(packet);
} catch (IOException e)
{

}
}
Log.v("h264", "h264 end");

}
}
 
运行时把摄像头、网络、SD卡权限加上就可以了


                                            
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  android h264 yuv