关于手机采集摄像头视频socket实时传播 (由服务端采集发送数据)
2016-10-11 17:47
681 查看
一、具体流程:
1.通过客户端socket请求,服务端接受到请求后,获取socket的输出流对象outs。
2.服务端通过Camera的setPreviewCallback方法回调采集摄像头视频数据,将一张张图片数据数据压缩,发送到客户端,因为是图片,需要在图片开头加个标记,然后指定数据长度发送。
3.客户端接收数据,根据数据开头标记,以及数据的长度,接收数据,按一张张图片的形式,采用handle更新imageview。
思路很简单,主要代码:
服务端代码:
该代码也只做参考作用,代码中有漏洞,需要理解代码之后才能解决
1.通过客户端socket请求,服务端接受到请求后,获取socket的输出流对象outs。
2.服务端通过Camera的setPreviewCallback方法回调采集摄像头视频数据,将一张张图片数据数据压缩,发送到客户端,因为是图片,需要在图片开头加个标记,然后指定数据长度发送。
3.客户端接收数据,根据数据开头标记,以及数据的长度,接收数据,按一张张图片的形式,采用handle更新imageview。
思路很简单,主要代码:
服务端代码:
<div>public class MainActivity extends Activity { TextView tv; SurfaceView surfv; SurfaceHolder surfaceHolder; int screenWidth=300, screenHeight=300; public Handler mHandler = new Handler(){ public void handleMessage(android.os.Message msg) { switch (msg.what) { case CamConstant.INIT_CAMERA: /*if(CamConstant.hadInit){ return; }</div><div> Camera camera = CameraUtil.initCamera(surfaceHolder); if( null != camera){ CamConstant.hadInit = true; }*/ break; case CamConstant.RECYCLE_CAMERA: CameraUtil.recycleCamera(); CamConstant.hadInit = false; break; default: //tv.setText((CharSequence) msg.obj); tv.append((CharSequence) msg.obj+"\n"); break; } }; }; @Override protected void onCreate(Bundle savedInstanceState) { setContentView(R.layout.activity_main); super.onCreate(savedInstanceState); MainApplication.mHandler = this.mHandler; tv = (TextView) findViewById(R.id.textView); surfv = (SurfaceView) findViewById(R.id.surview); DisplayMetrics dm = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(dm); screenWidth = dm.widthPixels;// 获取屏幕分辨率宽度 screenHeight = dm.heightPixels; surfaceHolder = surfv.getHolder(); surfaceHolder.setFixedSize(screenWidth, screenHeight/4*2); surfaceHolder.addCallback(new Callback() { @Override public void surfaceDestroyed(SurfaceHolder holder) { // TODO Auto-generated method stub } @Override public void surfaceCreated(SurfaceHolder holder) { // TODO Auto-generated method stub CameraUtil.initCamera(surfaceHolder); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { // TODO Auto-generated method stub } }); //tv.setMovementMethod(ScrollingMovementMethod.getInstance()); startService(); }; public void startService(){ Intent intent = new Intent(MainActivity.this, CamThreadService.class); startService(intent); } @Override protected void onDestroy() { // TODO Auto-generated method stub CameraUtil.recycleCamera(); CamConstant.hadInit = false; if(ServiceUtil.isServiceRunning(this, "CamService")){ stopService(new Intent(this, CamService.class)); } super.onDestroy(); } }</div>
public class CamThreadService extends Service { Socket mClintSocket; ServerSocket serverSocket; @Override public IBinder onBind(Intent intent) { return null; } @Override public void onCreate() { L.e("启动11111"); // TODO Auto-generated method stub new Thread(new Runnable() { @Override public void run() { // TODO Auto-generated method stub try { L.e("accept000"); sendMSG("accept1111"); serverSocket = new ServerSocket(CamConstant.Camera_Port); while(true){ L.e("accept0001111"); L.e("accept11111"); sendMSG("accept222"); mClintSocket = serverSocket.accept(); L.e("accept22222"); L.e("accept3333"); new Thread(){ public void run() { if(mClintSocket != null){ sendMSG("accept3333"); //初始化 //MainApplication.mHandler.sendEmptyMessage(CamConstant.INIT_CAMERA); L.e("accept4444"); boolean flag = true; while(flag){ if(CameraUtil.mCamera != null){ try { CameraUtil.mCamera.setPreviewCallback(new StreamIt(mClintSocket)); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } flag = false; mClintSocket = null; } } } }; }.start(); } } catch (IOException e) { // TODO Auto-generated catch block L.e("之行报错 停止"); e.printStackTrace(); } } }).start(); super.onCreate(); } /** * 视频流数据相关回调 */ private class StreamIt implements PreviewCallback { private OutputStream out; private InputStream in; private int times = 0; public StreamIt(Socket s) throws IOException { // TODO Auto-generated constructor stub this.out = s.getOutputStream(); this.in = s.getInputStream(); sendMSG("压缩后的数据11"); int[] textures = new int[1]; L.e("压缩后的数据1"); } @Override public void onPreviewFrame(byte[] data, Camera camera) { // TODO Auto-generated method stub Size size = camera.getParameters().getPreviewSize(); try { L.e("压缩后的数据1.5"); if(times == 0){ YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null); if(image != null){ //这是源数据 ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); image.compressToJpeg(new Rect(0, 0, size.width, size.height), 100, outputStream); byte[] srcData = outputStream.toByteArray(); int len = srcData.length; Bitmap src = BitmapFactory.decodeByteArray(srcData, 0, len); //压缩后的数据 byte[] outdata = transImage(src, 640, 480); L.e("压缩后的数据2"); sendMSG("压缩后的数据22"); int datalen = outdata.length; out.write((byte) 0xA0); out.write(intTOBytes(datalen)); out.write(outdata, 0, datalen); if(!src.isRecycled()){ src.recycle(); } } } } catch (Exception e) { // TODO: handle exception e.printStackTrace(); } } /** * 数据转换,将bitmap转换为byte */ private byte[] transImage(Bitmap bitmap, int width, int height){ try{ int bitmapWidth = bitmap.getWidth(); int bitmapHeight = bitmap.getHeight(); //缩放图片的尺寸 float scaleWidth = (float) width/bitmapWidth; float scaleHeight = (float) height/bitmapHeight; Matrix matrix = new Matrix(); matrix.postScale(scaleWidth, scaleHeight); //产生缩放后的Bitmap对象 Bitmap resizeBitemp = Bitmap.createBitmap(bitmap, 0, 0, bitmapWidth, bitmapHeight, matrix, false); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); resizeBitemp.compress(CompressFormat.JPEG, 85, outputStream); byte[] byteArray = outputStream.toByteArray(); outputStream.close(); if(!bitmap.isRecycled()){ bitmap.recycle(); } if(!resizeBitemp.isRecycled()){ resizeBitemp.recycle(); } return byteArray; }catch(Exception ex){ ex.printStackTrace(); MainApplication.mHandler.sendEmptyMessage(CamConstant.RECYCLE_CAMERA); } return null; } /** * 将int 数值转换为4个字节 */ private byte[] intTOBytes(int value){ Log.e("cam",value+"d"); byte[] src = new byte[4]; src[3] = (byte) ((value >> 24) & 0xFF); src[2] = (byte)((value >> 16) & 0xFF); src[1] = (byte)((value >> 8) & 0xFF); src[0] = (byte)(value & 0xFF); return src; } } @Override public void onDestroy() { // TODO Auto-generated method stub try { if(mClintSocket !=null){ mClintSocket.close(); } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } MainApplication.mHandler.sendEmptyMessage(CamConstant.RECYCLE_CAMERA); super.onDestroy(); } public void sendMSG(String text){ Message msg = new Message(); msg.what = 908; msg.obj = text; MainApplication.mHandler.sendMessage(msg); } }
</pre>
客户端代码:
public class MainActivity extends Activity { RevImageThread revImageThread; public static ImageView image; private static TextView textView1; private static Bitmap bitmap; private static final int COMPLETED = 0x111; private MyHandler handler; protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); image=(ImageView)findViewById(R.id.imageView1); textView1 = (TextView) findViewById(R.id.textView1); textView1.setMovementMethod(ScrollingMovementMethod.getInstance()); handler = new MyHandler(); revImageThread = new RevImageThread(handler); new Thread(revImageThread).start(); } static class MyHandler extends Handler{ @Override public void handleMessage(Message msg){ if(msg.obj != null){ textView1.append(msg.obj.toString()+"\n"); } if (msg.what == COMPLETED) { bitmap = (Bitmap)msg.obj; image.setImageBitmap(bitmap); super.handleMessage(msg); } } } public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } }
public class RevImageThread implements Runnable { public Socket s; public ServerSocket ss; //向UI线程发送消息 private Handler handler; private Bitmap bitmap; private static final int COMPLETED = 0x111; public RevImageThread(Handler handler){ this.handler = handler; } public void run() { try { ss = new ServerSocket(ScoketIPandPort.SOCKTPORT); } catch (IOException e2) { // TODO Auto-generated catch block e2.printStackTrace(); } while(true){ try { s = ss.accept(); Log.e("strat","ljq"); new Thread(){ public void run() { try { byte [] buffer = new byte[1024]; InputStream ins = null; ins = s.getInputStream(); Log.v("socket", "socketcc1111"); if(s != null){ Log.v("socket", "socketcc2222"); while(true){ int len = 0; int i = 0; int j = 0; boolean flag = true; /*while(flag){ i += ins.read(); Log.e("socket", ":"+i + " : " + j++); } */ if(ins.read() == 0xA0){ byte[] src = new byte[4]; len = ins.read(src); Log.v("sck", "src3:"+src[3]); Log.v("sck", "src2:"+src[2]); Log.v("sck", "src1:"+src[1]); Log.v("sck", "src0:"+src[0]); Log.v("sck", "src:"+src); Log.v("socket", "socketcc55551:"+len); Log.v("socket", "socketcc55552:"+src); len = bytesToInt(src, 0); Log.v("socket", "socketcc55553:"+src); Log.v("socket", "socketcc55554:"+len); //len = 4000000; byte[] srcData = new byte[len]; sendMSG(len+":len"); int readc = 0; ins.read(srcData, readc, len); Log.v("socket", "srcData0:"+srcData[0]); sendMSG("srcData0:"+srcData[0]); Log.v("socket", "srcData0:"+srcData[1]); Log.v("socket", "srcData0:"+srcData[2]); Log.v("socket", "srcData0:"+srcData[3]); Log.v("socket", "srcData0:"+srcData[4]); Log.v("socket", "srcData0:"+srcData[5]); Log.v("socket", "srcData0:"+srcData[6]); Log.v("socket", "srcData0:"+srcData[7]); Log.v("socket", "srcData0:"+srcData[len-1]); sendMSG(srcData[len-1]+"L"); bitmap = BitmapFactory.decodeByteArray(srcData, 0, len); Message msg =handler.obtainMessage(); msg.what = COMPLETED; msg.obj = bitmap; handler.removeMessages(COMPLETED); handler.sendMessage(msg); } //Message msg = new Message(); /* ByteArrayOutputStream outStream = new ByteArrayOutputStream(); while( (len=ins.read(buffer)) != -1){ outStream.write(buffer, 0, len); } ins.close();byte data[] = outStream.toByteArray(); bitmap = BitmapFactory.decodeByteArray(data, 0, data.length); */ } } } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); sendMSG(e.toString()+"bao cuo " +e.getMessage()); } }; }.start(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } /* if(!s.isClosed()){ s.close(); }*/ //Bitmap bitmap = BitmapFactory.decodeStream(ins); } } public int bytesToInt(byte[] src, int offset){ int value; value = (int)((src[offset] & 0xFF)) | ((src[offset+1] & 0xFF)<<8) | ((src[offset+2] & 0xFF)<<16) | ((src[offset+3] & 0xFF)<<24); return value; } public int bytesToInt2(byte[] src, int offset){ int value; value = (int)((src[offset] & 0xFF<<24) | ((src[offset+1] & 0xFF)<<16) | ((src[offset+2] & 0xFF)<<8) | (src[offset+3] & 0xFF)); return value; } /* public static int bytesToInt2(byte[] src, int offset) { int value; value = (int) ( ((src[offset] & 0xFF)<<24) |((src[offset+1] & 0xFF)<<16) |((src[offset+2] & 0xFF)<<8) |(src[offset+3] & 0xFF)); return value; } */ public void sendMSG(String text){ Message msg = new Message(); msg.what = 908; msg.obj = text; handler.sendMessage(msg); } }
该代码也只做参考作用,代码中有漏洞,需要理解代码之后才能解决
DEMO源码
摄像头MediaCodec 编解码
MediaCodec编解码手机投屏功能的实现
相关文章推荐
- Android摄像头采集的视频数据流如何通过Socket实时发送到目标服务端
- Android摄像头採集的视频数据流怎样通过Socket实时发送到目标服务端
- Android端通过Usb建立Socket通讯(实时发送视频数据)
- Xcode中捕获iphone/ipad/ipod手机摄像头的实时视频数据
- 关于网络发送的数据缓冲例子(读数据和发送采用双线程)主要用于视频的实时传输
- 采集音频和摄像头视频并实时H264编码及AAC编码
- 关于servlet服务端接收客户端发送的List<?>数据的问题
- 基于Socket的Android手机视频实时传输
- 关于socket编程数据发送和接受的感受(wm和pc之间)
- 用udp传播实时采集的数据局域网能播放,不过由于是udp,数据顺序有些混乱,考虑加入rtp/rtcp
- 采集音频和摄像头视频并实时H264编码及AAC编码
- 采集音频和摄像头视频并实时H264编码及AAC编码
- 一个简单的socket服务端和客户端程序:客户端发送数据给服务端,服务端接收到数据后再给客户端发送数据
- 采集音频和摄像头视频并实时H264编码及AAC编码
- 基于Socket的Android手机视频实时传输
- socket客户端发送数据给服务端,服务端排序后返回
- socket客户端数据发送的数据服务端接收不到
- 采集音频和摄像头视频并实时H264编码及AAC编码
- 采集音频和摄像头视频并实时H264编码及AAC编码
- C# 实现 客户端 对实时数据的采集 上传至服务端;在服务端把保存到内存中;供WEB页面调用