您的位置:首页 > 数据库 > Redis

JavaWeb项目:整合(Lucene+Redis+SSM)实现搜索功能

2017-08-05 20:56 1076 查看
整体思路:使用Redis做缓存,缓存策略为首次查询从数据库抓取,取出后存入Redis,删除和更新操作在Redis中进行,并且定时同步至数据库;Lucene做全文检索,RAMDirectory和FSDirectory并用,事先在磁盘生成索引,启动后转为RAMDirectory,存取均在RAMDirectory进行,采用“定时从RAMDirectory向磁盘同步”的策略。

Redis部分:

Redis工具类

public class RedisManager {
@Autowired
private ArticleMapper articleMapper;
@Autowired
private RedisTemplate<String,Object> redisTemplate;

//取数据
public Article getArticle(Long id){
if(id == null){
return null;
}
ValueOperations<String, Object> valueOper = redisTemplate.opsForValue();
Object value = valueOper.get("article:"+id);
if(value == null||!(value instanceof Article)){
return null;
}
return (Article)value;
}
//存数据
public void setArticle(Long id,Article value){
if(id == null||value == null){
return ;
}
ValueOperations< String, Object> valueOper = redisTemplate.opsForValue();
valueOper.set( "article:"+String.valueOf(id) , value , 48 , TimeUnit.HOURS);//48小时失效
}
//删除数据
public void delArticle(Long id){
redisTemplate.delete("article:"+String.valueOf(id));
}
//保存待删除id,定时同步至数据库
public void addDelKey(Long id){
ListOperations<String, Object> listOper = redisTemplate.opsForList();
listOper.rightPush("to_delete_keys", "article:"+String.valueOf(id));
}
//判断是否当前key是否已被删除
public boolean isDeletedKey(Long id) {
ListOperations<String, Object> listOper = redisTemplate.opsForList();
List keys = listOper.range("to_delete_keys", 0, -1);
if(keys.contains("article:"+String.valueOf(id))){
return true;
}
return false;
}
//保存待更新id,定时同步至数据库
public void addUpdateKey(Long id){
ListOperations<String, Object> listOper = redisTemplate.opsForList();
listOper.rightPush("to_update_keys", "article:"+String.valueOf(id));
}

//定时任务:同步缓存至数据库
public void redisSync(){
ValueOperations<String, Object> valueOper = redisTemplate.opsForValue();
ListOperations<String, Object> listOper = redisTemplate.opsForList();
//同步更新
List keys = listOper.range("to_update_keys", 0, -1);
Iterator iterator = keys.iterator();
while (iterator.hasNext()) {
String key = (String)iterator.next();
Article article =(Article)valueOper.get(key);
articleMapper.updateArticle(article);
}
redisTemplate.delete("to_update_keys");
//同步删除
keys = listOper.range("to_delete_keys", 0, -1);
iterator = keys.iterator();
while (iterator.hasNext()) {
String key = (String)iterator.next();
articleMapper.deleteArticle(Long.parseLong(key.split(":")[1]));
}
redisTemplate.delete("to_delete_keys");
System.out.println("-----redis同步完成------");
}
}


相关业务逻辑

取数据:

@Override
public Article findArticleById(Long id) {
if(redisManager.isDeletedKey(id)){
return null;
}
Article article = redisManager.getArticle(id);
if(article == null){
article = articleMapper.findArticleById(id);
redisManager.setArticle(id, article);
System.out.println("------数据库中取出!");
}
return article;
}


删除数据 :

@Override
public void deleteArticle(Long id) {
try {
redisManager.delArticle(id);//从redis移除
luceneManager.deleteDocument(id);//删除索引
redisManager.addDelKey(id);//加入待删除队列
} catch (Exception e) {
e.printStackTrace();
}
}


更新数据 :

@Override
public void updateArticle(Article article) {
try {
redisManager.setArticle(article.getId(), article);
luceneManager.updateDocument(article);//更新索引
redisManager.addUpdateKey(article.getId());//加入待更新队列
} catch (Exception e) {
e.printStackTrace();
}
}


添加数据时不经过缓存:

public void addArticle(Article article) {
try {
articleMapper.addArticle(article);
luceneManager.addDocument(article);
redisManager.setArticle(article.getId(), article);
} catch (Exception e) {
e.printStackTrace();
}
}


Lucene部分

Lucene工具类

lucene相关jar包下载

public class LuceneUtil {
private static final String INDEXPATH="D:\\lucene";
private static RAMDirectory ramDirectory;
private static IndexWriter ramWriter;

@Autowired
private ArticleMapper articleMapper;

static{
try {
FSDirectory fsDirectory = FSDirectory.open(Paths.get(INDEXPATH));
ramDirectory = new RAMDirectory(fsDirectory,IOContext.READONCE);
fsDirectory.close();

IndexWriterConfig indexWriterConfig = new IndexWriterConfig(
new SmartChineseAnalyzer());//中文分词器
indexWriterConfig.setIndexDeletionPolicy(
new SnapshotDeletionPolicy(new KeepOnlyLastCommitDeletionPolicy()));

ramWriter = new IndexWriter(ramDirectory, indexWriterConfig);
} catch (IOException e) {
e.printStackTrace();
}
}

//于磁盘创建索引
public void reCreatIndex(){
try {
Path path = Paths.get(INDEXPATH);
//删除原有索引文件
for (File file : path.toFile().listFiles()) {
file.delete();
}
FSDirectory fsDirectory = FSDirectory.open(path);
Analyzer analyzer = new SmartChineseAnalyzer();//中文分词器
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer);
IndexWriter writer = new IndexWriter(fsDirectory, indexWriterConfig);
List<Article> articles = articleMapper.findAll();
for (Article article : articles) {
writer.addDocument(toDocument(article));
}
writer.close();
System.out.println("-----创建索引成功---");
} catch (Exception e) {
e.printStackTrace();
}
}

//实体article对象转document索引对象
public Document toDocument(Article article){
Document doc = new Document();
doc.add(new StringField("id",String.valueOf(article.getId()),Field.Store.YES));
doc.add(new Field("title", article.getTitle(), TextField.TYPE_STORED));
doc.add(new Field("details", article.getDetails(),TextField.TYPE_STORED));

return doc;
}

//添加索引
public synchronized void addDocument(Article article) throws IOException{
ramWriter.addDocument(toDocument(article));
ramWriter.commit();
}

//删除索引
public synchronized void deleteDocument(Long id) throws IOException{
Term term = new Term("id",String.valueOf(id));
ramWriter.deleteDocuments(term);
ramWriter.commit();
}

//搜索
public List<Article> search(String keyword) throws IOException, ParseException, InvalidTokenOffsetsException{
List<Article> list = new ArrayList<Article>();
IndexSearcher indexSearcher = new IndexSearcher(DirectoryReader.open(ramDirectory));
String [] fields = {"title","details"};
Analyzer analyzer = new SmartChineseAnalyzer();
QueryParser queryParser = new MultiFieldQueryParser(fields, analyzer);
Query query = queryParser.parse(keyword);
//      BooleanClause.Occur[] clauses = {BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD};
//      Query query = MultiFieldQueryParser.parse(keyword, fields, clauses, analyzer);
TopDocs hits = indexSearcher.search(query, 20);

//高亮
SimpleHTMLFormatter simpleHTMLFormatter = new SimpleHTMLFormatter("<b><font color='red'>","</font></b>");
Highlighter highlighter = new Highlighter(simpleHTMLFormatter, new QueryScorer(query));

for(ScoreDoc scoreDoc:hits.scoreDocs){
Article article = new Article();
Document doc = indexSearcher.doc(scoreDoc.doc);
article.setId(Long.valueOf(doc.get("id")));
String title = doc.get("title");
String details = doc.get("details");
article.setTitle(highlighter.getBestFragment(analyzer.tokenStream("title", new StringReader(title)), title));
article.setDetails(highlighter.getBestFragment(analyzer.tokenStream("details", new StringReader(details)), details));
list.add(article);
}

return list;
}

//更新索引
public void updateDocument(Article article) throws IOException{
Term term = new Term("id",String.valueOf(article.getId()));
ramWriter.updateDocument(term, toDocument(article));
ramWriter.commit();
}

//同步索引至磁盘
public void indexSync(){
IndexWriterConfig config = null;
SnapshotDeletionPolicy snapshotDeletionPolicy = null;
IndexCommit indexCommit = null;

try {
config = (IndexWriterConfig) ramWriter.getConfig();
snapshotDeletionPolicy = (SnapshotDeletionPolicy) config.getIndexDeletionPolicy();
indexCommit = snapshotDeletionPolicy.snapshot();
config.setIndexCommit(indexCommit);
Collection<String> fileNames = indexCommit.getFileNames();
Path toPath = Paths.get(INDEXPATH);
Directory toDir = FSDirectory.open(toPath);
//删除所有原有索引文件
for (File file : toPath.toFile().listFiles()) {
file.delete();
}
//从ramdir复制新索引文件至磁盘
for (String fileName : fileNames) {
toDir.copyFrom(ramDirectory, fileName, fileName, IOContext.DEFAULT);
}
toDir.close();
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("-----索引同步完成------");
}
}


Spring配置文件

<?xml version="1.0" encoding="UTF-8"?>
<beans  xmlns="http://www.springframework.org/schema/beans"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:p="http://www.springframework.org/schema/p"
xmlns:mvc="http://www.springframework.org/schema/mvc"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation=" http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context.xsd http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop.xsd http://www.springframework.org/schema/mvc http://www.springframework.org/schema/mvc/spring-mvc.xsd">
<!-- 引入properties文件 -->
<context:property-placeholder location="classpath*:properties/*.properties"/>

<!-- redis连接池 -->
<bean id="poolConfig" class="redis.clients.jedis.JedisPoolConfig">
<property name="maxIdle" value="${redis.maxIdle}"/>
<property name="maxWaitMillis" value="${redis.maxWait}"/>
<property name="testOnBorrow" value="${redis.testOnBorrow}"/>
</bean>

<!-- redis服务中心 -->
<bean id="connectionFactory"
class="org.springframework.data.redis.connection.jedis.JedisConnectionFactory">
<property name="poolConfig" ref="poolConfig"/>
<property name="hostName" value="${redis.host}"/>
<property name="port" value="${redis.port}"/>
<property name="password" value="${redis.auth}"/>
<property name="timeout" value="${redis.timeout}"/>
</bean>

<!-- redis模板 -->
<bean id="redisTemplate" class="org.springframework.data.redis.core.RedisTemplate">
<property name="connectionFactory" ref="connectionFactory" />
<property name="keySerializer" >
<bean class="org.springframework.data.redis.serializer.StringRedisSerializer" />
</property>
<!-- JdkSerializationRedisSerializer用于类的序列化 -->
<property name="valueSerializer" >
<bean class="org.springframework.data.redis.serializer.JdkSerializationRedisSerializer" />
</property>
</bean>

<!-- 数据库连接池 -->
<bean id="dataSource" class="com.alibaba.druid.pool.DruidDataSource">
<property name="driverClassName" value="${mysql.driver}"/>
<property name="url" value="${mysql.url}"/>
<property name="username" value="${mysql.user}"/>
<property name="password" value="${mysql.password}"/>
<!-- 配置初始化连接数大小初始化、最小闲置数、最大连接数 -->
<property name="initialSize" value="${druid.initialSize}"/>
<property name="minIdle" value="${druid.minIdle}"/>
<property name="maxActive" value="${druid.maxActive}"/>
<!-- 配置获取连接等待超时的时间 -->
<property name="maxWait" value="${druid.maxWait}"/>
</bean>
<bean id="sqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
<!--dataSource属性指定要用到的连接池-->
<property name="dataSource" ref="dataSource" />
<!--configLocation属性指定mybatis的核心配置文件-->
<property name="configLocation" value="classpath:config/mybatis-config.xml" />
</bean>

<!-- 扫描mapper文件生成dao组件 -->
<bean class="org.mybatis.spring.mapper.MapperScannerConfigurer">
<property name="basePackage" value="com.search.mapper" />
<property name="sqlSessionFactoryBeanName" value="sqlSessionFactory" />
</bean>

<!-- 扫描生成service组件 -->
<context:component-scan base-package="com.search.service"/>

<!-- redis工具 -->
<bean id="redisManager" class="com.search.utils.RedisManager"/>
<!-- lucene工具 -->
<bean id="luceneManager" class="com.search.utils.LuceneUtil"/>

<!-- 定时任务 -->
<!-- redis定时同步任务 -->
<bean id="redisSync" class="org.springframework.scheduling.quartz.MethodInvokingJobDetailFactoryBean">
<property name="targetObject" ref="redisManager"/>
<property name="targetMethod" value="redisSync"/>
</bean>
<!-- 触发器, 使用定时触发 -->
<bean id="redisTrigger"
class="org.springframework.scheduling.quartz.CronTriggerFactoryBean">
<property name="jobDetail" ref="redisSync" />
<property name="cronExpression" value="0 0 0 * * ?" /><!-- 每天零点调度一次 -->
</bean>
<!-- lucene索引定时同步任务  -->
<bean id="indexSync" class="org.springframework.scheduling.quartz.MethodInvokingJobDetailFactoryBean">
<property name="targetObject" ref="luceneManager"/>
<property name="targetMethod" value="indexSync"/>
</bean>
<!-- 触发器, 使用定时触发 -->
<bean id="luceneTrigger" class="org.springframework.scheduling.quartz.SimpleTriggerFactoryBean">
<property name="jobDetail" ref="indexSync" />
<property name="startDelay" value="60000" /><!-- 调度工厂实例化后,经过60s开始执行调度 -->
<property name="repeatInterval" value="7200000" /><!-- 每2小时调度一次 -->
</bean>
<!-- 配置调度工厂 -->
<bean class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="triggers">
<list>
<ref bean="redisTrigger" />
<ref bean="luceneTrigger" />
</list>
</property>
</bean>

</beans>


完整代码

转至:https://github.com/crazylai1996/article-search
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
相关文章推荐