您的位置:首页 > 移动开发 > Android开发

Android视频采集

2016-04-22 16:21 465 查看
http://www.rosoo.net/a/201111/15259.html

将之前做过的一个比较实用的在Android实时采集视频,并在PC上显示出采集到的视频的程序,包括PC端和Android端程序,基于Android 1.5 在HTC G3上测试通过。开发平台是Android 1.5,这个程序实现视频流的获取,程序简单地在第20帧到来的时候,写入到文件中。这样就可以拿

TAG: Android 视频采集
相关完整源码可以从这里下载到:

http://bbs.rosoo.net/forum.php?mod=viewthread&tid=8669

2010-10-13晚上 更新~ 将之前做过的一个比较实用的在Android实时采集视频,并在PC上显示出采集到的视频的程序,包括PC端和Android端程序,基于Android 1.5
在HTC G3上测试通过。代码在分界线之后。

之前网上找了很多资料,没有找到如何截取Android视频流。后来发现在Android的拍照视频预览时就可以截取视频数据。每获得一帧就调用一下接口函数。

我的开发平台是Android 1.5,这个程序实现视频流的获取,程序简单地在第20帧到来的时候,写入到文件中。这样就可以拿到电脑上进行分析。

具体请大家参考代码

package com.sunshine;

import java.io.File;

import java.io.RandomAccessFile;

import android.app.Activity;

import android.content.res.Configuration;

import android.graphics.PixelFormat;

import android.hardware.Camera;

import android.os.Bundle;

import android.util.Log;

import android.view.SurfaceHolder;

import android.view.SurfaceView;

import android.view.Window;

import android.view.WindowManager;

import android.view.SurfaceHolder.Callback;

public class AndroidVideo extends Activity implements Callback,

Camera.PictureCallback {

private SurfaceView mSurfaceView = null;

private SurfaceHolder mSurfaceHolder = null;

private Camera mCamera = null;

private boolean mPreviewRunning = false;

@Override

public void onCreate(Bundle savedInstanceState) {

super.onCreate(savedInstanceState);

getWindow().setFormat(PixelFormat.TRANSLUCENT);

requestWindowFeature(Window.FEATURE_NO_TITLE);

getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,

WindowManager.LayoutParams.FLAG_FULLSCREEN);

setContentView(R.layout.main);

mSurfaceView = (SurfaceView) this.findViewById(R.id.surface_camera);

mSurfaceHolder = mSurfaceView.getHolder();

mSurfaceHolder.addCallback(this);

mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);

}

@Override

public void onPictureTaken(byte[] data, Camera camera) {

try {

Log.v("System.out", "get it!");

File file = new File("/sdcard/camera.jpg");

RandomAccessFile raf = new RandomAccessFile(file, "rw");

raf.write(data);

raf.close();

} catch (Exception ex) {

Log.v("System.out", ex.toString());

}

}

@Override

public void surfaceChanged(SurfaceHolder holder, int format, int width,

int height) {

if (mPreviewRunning) {

mCamera.stopPreview();

}

Camera.Parameters p = mCamera.getParameters();

p.setPreviewSize(width, height);

mCamera.setPreviewCallback(new StreamIt());

mCamera.setParameters(p);

try {

mCamera.setPreviewDisplay(holder);

} catch (Exception ex) {

}

mCamera.startPreview();

mPreviewRunning = true;

}

@Override

public void surfaceCreated(SurfaceHolder holder) {

mCamera = Camera.open();

}

@Override

public void surfaceDestroyed(SurfaceHolder holder) {

mCamera.stopPreview();

mPreviewRunning = false;

mCamera.release();

}

@Override

public void onConfigurationChanged(Configuration newConfig) {

try {

super.onConfigurationChanged(newConfig);

if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {

} else if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {

}

} catch (Exception ex) {

}

}

}

class StreamIt implements Camera.PreviewCallback {

private int tick = 1;

@Override

public void onPreviewFrame(byte[] data, Camera camera) {

// TODO Auto-generated method stub

if (tick == 20) {

System.out.println("data len: " + data.length);

try {

File file = new File("/sdcard/pal.pal");

if (!file.exists())

file.createNewFile();

RandomAccessFile raf = new RandomAccessFile(file, "rw");

raf.write(data);

raf.close();

tick++;

} catch (Exception ex) {

Log.v("System.out", ex.toString());

}

}

tick++;

}

}

xml 布局文件

<?xml version="1.0" encoding="utf-8"?>

<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"

android:layout_width="fill_parent" android:layout_height="fill_parent"

android:orientation="vertical">

<SurfaceView android:id="@+id/surface_camera"

android:layout_width="fill_parent" android:layout_height="fill_parent">

</SurfaceView>

</LinearLayout>

注意在项目配置文件中还要加上访问权限

<uses-permission android:name="android.permission.CAMERA" />

通过查资料发现,Android每帧的数据流的格式是YUV420

下面附上一个将 YUV420转成RGB的函数,

staticpublicvoiddecodeYUV420SP(byte[]
rgbBuf,byte[] yuv420sp,intwidth,intheight)
{
finalintframeSize
= width * height;
if(rgbBuf
==null)
thrownewNullPointerException("buffer
'rgbBuf' is null");
if(rgbBuf.length
< frameSize *3)
thrownewIllegalArgumentException("buffer
'rgbBuf' size "
+ rgbBuf.length +" < minimum "+
frameSize *3);

if(yuv420sp
==null)
thrownewNullPointerException("buffer
'yuv420sp' is null");

if(yuv420sp.length
< frameSize *3/2)

thrownewIllegalArgumentException("buffer
'yuv420sp' size "+ yuv420sp.length
+" < minimum "+
frameSize *3/2);

inti
=0, y =0;

intuvp
=0, u =0,
v =0;
inty1192
=0, r =0,
g =0, b =0;

for(intj
=0, yp =0;
j < height; j++) {
uvp = frameSize + (j >>1)
* width;
u =0;

v =0;

for(i
=0; i < width; i++, yp++) {
y = (0xff&
((int) yuv420sp[yp])) -16;

if(y
<0) y =0;

if((i
&1) ==0)
{
v = (0xff&
yuv420sp[uvp++]) -128;
u = (0xff&
yuv420sp[uvp++]) -128;
}

y1192 =1192*
y;
r = (y1192 +1634*
v);
g = (y1192 -833*
v -400* u);
b = (y1192 +2066*
u);

if(r
<0) r =0;elseif(r
>262143) r =262143;

if(g
<0) g =0;elseif(g
>262143) g =262143;

if(b
<0) b =0;elseif(b
>262143) b =262143;

rgbBuf[yp *3]
= (byte)(r >>10);

rgbBuf[yp *3+1]
= (byte)(g >>10);

rgbBuf[yp *3+2]
= (byte)(b >>10);

}
}
}

代码来自http://chenweihuacwh.javaeye.com/blog/571223

感谢cwh643

-----------------------------分界线-------------------------------------------

-----------------------------2010-10-13更新-------------------------------

Android 端

package com.sunshine;

import java.io.DataInputStream;

import java.io.DataOutputStream;

import java.net.Socket;

import android.app.Activity;

import android.content.res.Configuration;

import android.graphics.PixelFormat;

import android.hardware.Camera;

import android.os.Bundle;

import android.view.SurfaceHolder;

import android.view.SurfaceView;

import android.view.View;

import android.view.Window;

import android.view.WindowManager;

import android.view.SurfaceHolder.Callback;

import android.view.View.OnClickListener;

import android.widget.Button;

import android.widget.EditText;

public class AndroidVideo extends Activity implements Callback,OnClickListener{

private SurfaceView mSurfaceView = null;

private SurfaceHolder mSurfaceHolder = null;

private Camera mCamera = null;

private boolean mPreviewRunning = false;

//连接相关

private EditText remoteIP=null;

private Button connect=null;

private String remoteIPStr=null;

//视频数据

private StreamIt streamIt=null;

public static Kit kit=null;

@Override

public void onCreate(Bundle savedInstanceState) {

super.onCreate(savedInstanceState);

getWindow().setFormat(PixelFormat.TRANSLUCENT);

requestWindowFeature(Window.FEATURE_NO_TITLE);

getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,

WindowManager.LayoutParams.FLAG_FULLSCREEN);

setContentView(R.layout.main);

mSurfaceView = (SurfaceView) this.findViewById(R.id.surface_camera);

mSurfaceHolder = mSurfaceView.getHolder();

mSurfaceHolder.addCallback(this);

mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);

remoteIP=(EditText)this.findViewById(R.id.remoteIP);

connect=(Button)this.findViewById(R.id.connect);

connect.setOnClickListener(this);

}

public void surfaceChanged(SurfaceHolder holder, int format, int width,

int height) {

if (mPreviewRunning) {

mCamera.stopPreview();

}

Camera.Parameters p = mCamera.getParameters();

p.setPreviewSize(width, height);

streamIt=new StreamIt();

kit=new Kit();

mCamera.setPreviewCallback(streamIt);

mCamera.setParameters(p);

try {

mCamera.setPreviewDisplay(holder);

} catch (Exception ex) {

}

mCamera.startPreview();

mPreviewRunning = true;

}

public void surfaceCreated(SurfaceHolder holder) {

mCamera = Camera.open();

}

public void surfaceDestroyed(SurfaceHolder holder) {

mCamera.stopPreview();

mPreviewRunning = false;

mCamera.release();

}

@Override

public void onConfigurationChanged(Configuration newConfig) {

try {

super.onConfigurationChanged(newConfig);

if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE) {

} else if (this.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {

}

} catch (Exception ex) {

}

}

class Kit implements Runnable {

private boolean run=true;

// private final int dataLen=57600; //307200 OR 230400 76800 OR 57600

private final int tt=28800;

public void run() {

// TODO Auto-generated method stub

try {

Socket socket = new Socket(remoteIPStr, 8899);

DataOutputStream dos = new DataOutputStream(socket

.getOutputStream());

DataInputStream dis = new DataInputStream(socket

.getInputStream());

while (run) {

dos.write(streamIt.yuv420sp, 0, 28800);

dos.write(streamIt.yuv420sp, 28800, 28800);

dis.readBoolean();

Thread.sleep(155);

}

} catch (Exception ex) {

run=false;

ex.printStackTrace();

}

}

}

@Override

public void onClick(View view) {

// TODO Auto-generated method stub

if(view==connect){//连接函数

remoteIPStr=remoteIP.getText().toString();

new Thread(AndroidVideo.kit).start();

}

}

}

class StreamIt implements Camera.PreviewCallback {

public byte[] yuv420sp =null;

private boolean t=true;

public void onPreviewFrame(byte[] data, Camera camera) {

// TODO Auto-generated method stub

// if(t){

// t=false;

// new Thread(AndroidVideo.kit).start();

// }

yuv420sp=data;

}

}

PC端

import java.awt.Frame;

import java.awt.Graphics;

import java.awt.Point;

import java.awt.Transparency;

import java.awt.color.ColorSpace;

import java.awt.image.BufferedImage;

import java.awt.image.ComponentColorModel;

import java.awt.image.DataBuffer;

import java.awt.image.DataBufferByte;

import java.awt.image.PixelInterleavedSampleModel;

import java.awt.image.Raster;

import java.awt.image.SampleModel;

import java.awt.image.WritableRaster;

import java.io.DataInputStream;

import java.io.DataOutputStream;

import java.net.ServerSocket;

import java.net.Socket;

public class FlushMe extends Frame {

private static final long serialVersionUID = 1L;

private BufferedImage im;

// 图像信息

// private final int width = 480;

// private final int height = 320;

private static final int width = 240;

private static final int height = 160;

private static final int numBands = 3;

private static final int dataLen = 57600;//307200 OR 230400//57600 76800

private static final int tt = 28800;//14400;//28800;

// 图像数组

private byte[] byteArray = new byte[width * height * numBands];// 图像RGB数组

private byte[] yuv420sp = new byte[dataLen];// 图像YUV数组

private static final int[] bandOffsets = new int[] { 0, 1, 2 };

private static final SampleModel sampleModel = new PixelInterleavedSampleModel(

DataBuffer.TYPE_BYTE, width, height, 3, width * 3,

bandOffsets);

// ColorModel

private static final ColorSpace cs=ColorSpace.getInstance(ColorSpace.CS_sRGB);

private static final ComponentColorModel cm=new ComponentColorModel(cs, false, false,

Transparency.OPAQUE, DataBuffer.TYPE_BYTE);

public FlushMe() {

super("Flushing");

updateIM();

setSize(480, 320);

// 窗口关闭方法

this.addWindowListener(new java.awt.event.WindowAdapter() {

public void windowClosing(java.awt.event.WindowEvent e) {

System.exit(0);

}

});

// 窗口居中

this.setLocationRelativeTo(null);

this.setResizable(false);

this.setVisible(true);

this.getData();

}

public void update(Graphics g){

paint(g);

}

public void paint(Graphics g) {

g.drawImage(im, 0, 0, 480, 320, this);

}

public void getData() {

try {

ServerSocket server = new ServerSocket(8899);

Socket socket = server.accept();

DataInputStream dis = new DataInputStream(socket.getInputStream());

DataOutputStream dos = new DataOutputStream(socket.getOutputStream());

while (true) {

for (int i = 0; i < dataLen / tt; i++) {

dis.read(yuv420sp, i * tt, tt);

}

// 得到数据之后立即更新显示

updateIM();

im.flush();

repaint();

dos.writeBoolean(true);

}

} catch (Exception ex) {

ex.printStackTrace();

}

}

private void updateIM() {

try {

// 解析YUV成RGB格式

decodeYUV420SP(byteArray, yuv420sp, width, height);

DataBuffer dataBuffer = new DataBufferByte(byteArray, numBands);

WritableRaster wr = Raster.createWritableRaster(sampleModel,

dataBuffer, new Point(0, 0));

im = new BufferedImage(cm, wr, false, null);

} catch (Exception ex) {

ex.printStackTrace();

}

}

private static void decodeYUV420SP(byte[] rgbBuf, byte[] yuv420sp,

int width, int height) {

final int frameSize = width * height;

if (rgbBuf == null)

throw new NullPointerException("buffer 'rgbBuf' is null");

if (rgbBuf.length < frameSize * 3)

throw new IllegalArgumentException("buffer 'rgbBuf' size "

+ rgbBuf.length + " < minimum " + frameSize * 3);

if (yuv420sp == null)

throw new NullPointerException("buffer 'yuv420sp' is null");

if (yuv420sp.length < frameSize * 3 / 2)

throw new IllegalArgumentException("buffer 'yuv420sp' size "

+ yuv420sp.length + " < minimum " + frameSize * 3 / 2);

int i = 0, y = 0;

int uvp = 0, u = 0, v = 0;

int y1192 = 0, r = 0, g = 0, b = 0;

for (int j = 0, yp = 0; j < height; j++) {

uvp = frameSize + (j >> 1) * width;

u = 0;

v = 0;

for (i = 0; i < width; i++, yp++) {

y = (0xff & ((int) yuv420sp[yp])) - 16;

if (y < 0)

y = 0;

if ((i & 1) == 0) {

v = (0xff & yuv420sp[uvp++]) - 128;

u = (0xff & yuv420sp[uvp++]) - 128;

}

y1192 = 1192 * y;

r = (y1192 + 1634 * v);

g = (y1192 - 833 * v - 400 * u);

b = (y1192 + 2066 * u);

if (r < 0)

r = 0;

else if (r > 262143)

r = 262143;

if (g < 0)

g = 0;

else if (g > 262143)

g = 262143;

if (b < 0)

b = 0;

else if (b > 262143)

b = 262143;

rgbBuf[yp * 3] = (byte) (r >> 10);

rgbBuf[yp * 3 + 1] = (byte) (g >> 10);

rgbBuf[yp * 3 + 2] = (byte) (b >> 10);

}

}

}

public static void main(String[] args) {

Frame f = new FlushMe();

}

}

上个截图



(sundos)
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: