您的位置:首页 > 其它

高并发量的数据处理方案

2015-07-14 09:54 405 查看
需求描述:

有5000台左右的设备,每台设备每隔一分钟就会向服务器端上报设备的信息(如设备所在位置经纬度等),现在需要服务端对这些上报请求传输的数据进行处理,并持久化到数据库中;

需求就这样简单,但服务端要处理的并发还是不小的,平均每秒种都会有将近100个请求过来,遇到这样的情况,你会怎么做呢?

我的解决方案是,使用了缓存+批处理操作,代码如下:

package com.jimu.data.servlet;

import org.apache.log4j.Logger;
import com.jimu.data.conn.JimuDataBakConn;

import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.LinkedList;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

public class GpsRecordServlet extends HttpServlet{

private static final long serialVersionUID = 1L;
//写日志操作
private static final Logger logger = Logger.getLogger(GpsRecordServlet.class);
//缓存大小
private static int cacheSize = 2000;
//(临界资源,需同步访问,确保线程安全)缓存链表开关标识
private static int switchFlag = 1;

//交替缓存数据,当存储的数据达到cacheSize时,向数据库中记录当前链表中的数据然后清空
private static LinkedList<HashMap<String,Object>> cacheList1 = new LinkedList<HashMap<String,Object>>();
private static LinkedList<HashMap<String,Object>> cacheList2 = new LinkedList<HashMap<String,Object>>();

//127.0.0.1:8080/jimudata/recordgps.html?tsn=9303ZH201508080107&lng=23.175&lat=113.245&tim=1436756925668
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html;charset=UTF-8");
request.setCharacterEncoding("UTF-8");

String termSn = request.getParameter("tsn"); //设备序列号
String longitude = request.getParameter("lng"); //经度
String latitude = request.getParameter("lat"); //纬度
String gpsTime = request.getParameter("tim"); //gps时间戳
int size = 0;
if(getSwitchFlag() == 1){
size = cacheRecord(termSn,longitude,latitude,gpsTime,getCacheList1());
if(size >= cacheSize){
setSwitchFlag(2);
saveCacheList(getCacheList1());
//清空缓存
getCacheList1().clear();
}
}else if(getSwitchFlag() == 2){
size = cacheRecord(termSn,longitude,latitude,gpsTime,getCacheList2());
if(size >= cacheSize){
setSwitchFlag(1);
saveCacheList(getCacheList2());
//清空缓存
getCacheList2().clear();
}
}
}
public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
doGet(request, response);
}

public int cacheRecord(String ternSn,String longitude,String latitude,String gpsTime,LinkedList<HashMap<String,Object>> cacheList){
HashMap<String,Object> itemMap = new HashMap<String, Object>();
itemMap.put("ternSn",ternSn);
itemMap.put("longitude",longitude);
itemMap.put("latitude",latitude);
itemMap.put("gpsTime",gpsTime);
cacheList.add(itemMap);
return cacheList.size();
}

public void saveCacheList(LinkedList<HashMap<String,Object>> cacheList){
if(cacheList.size() == 0){
return;
}
//数据源JimuBus 对应数据库jimubus,查询数据并返回json数据集
Connection conn = null;
PreparedStatement ps = null;
ResultSet rs = null;
try{
conn = JimuDataBakConn.getConn();
String sql = " INSERT INTO term_gps_record (term_sn,longitude,latitude,gps_time,create_time) VALUES(?,?,?,?,now()) ";
ps = conn.prepareStatement(sql);
//批处理大小
final int batchSize = 500;
int count = 0;
for (HashMap<String,Object> itemMap: cacheList) {
String termSn = itemMap.get("termSn") == null ? "":itemMap.get("termSn").toString();
ps.setString(1,termSn);
String longitude = itemMap.get("longitude") == null ? "":itemMap.get("longitude").toString();
ps.setString(2,longitude);
String latitude = itemMap.get("latitude") == null ? "":itemMap.get("latitude").toString();
ps.setString(3,latitude);
String gpsTime = itemMap.get("gpsTime") == null ? "":itemMap.get("gpsTime").toString();
ps.setString(4,gpsTime);

ps.addBatch();
if(++count % batchSize == 0) {
ps.executeBatch(); //达到批处理大小时,进行批处理操作
ps.clearBatch(); //清空stmt中积攒的sql
}
}
ps.executeBatch(); //insert remaining records

}catch (Exception e) {
e.printStackTrace();
logger.info("批处理出现异常");
}finally{
try{rs.close();rs=null;}catch(Exception e){}
try{ps.close();ps=null;}catch(Exception e){}
try{conn.close();conn=null;}catch(Exception e){}
}
}

//应用关闭时,把缓存中的数据保存到数据库
public void destroy(){
System.out.println("tomcat即将关闭,保存缓存中数据!");
saveCacheList(getCacheList1());
saveCacheList(getCacheList2());
}

private synchronized static int getSwitchFlag(){
return switchFlag;
}

private synchronized static void setSwitchFlag(int flag){
switchFlag = flag;
}

private synchronized static LinkedList<HashMap<String,Object>> getCacheList1(){
return cacheList1;
}

private synchronized static LinkedList<HashMap<String,Object>> getCacheList2(){
return cacheList2;
}

}
如果有不足之处,欢迎指正,如果您有更好的解决方案,可以和大家探讨分享,共同完善!
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签:  数据 批处理 高并发