您的位置:首页 > 数据库 > Mongodb

Java上传文件到MongoDB GirdFS

2016-04-03 12:01 489 查看
上传有两种方式,一种是使用UploadFromStream,另外一种是OpenUploadStream方式,这里使用的是第二种方式:

public ObjectId save(){
GridFSBucket gfsbk = GridFSBuckets.create(db, "user_photo");
byte[] data = "Data to upload into GridFS".getBytes(StandardCharsets.UTF_8);
GridFSUploadStream uploadStream = gfsbk .openUploadStream("user_09734");
uploadStream.write(data);
ObjectId id = uploadStream.getFileId();
uploadStream.close();
return id;
}


解析:

1. GridFSBucket gfsbk = GridFSBuckets.create(db, “user_photo”);

创建一个容器,传入一个`MongoDatabase`类实例db,和容器名称"user_photo"作为参数


2. GridFSUploadStream uploadStream = gfsbk .openUploadStream(“user_09734”);

打开上传流,传入一个String参数作为GirdFS文件的文件名


3. ObjectId id = uploadStream.getFileId();

得到刚刚上传的文件的id


Mongo Java驱动GirdFS上传参考资料

下面是我编写的上传帮助类:

package com.huanle.utils.db;

import java.io.ByteArrayOutputStream;
import java.io.IOException;

import org.bson.types.ObjectId;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import com.huanle.utils.model.FileEntity;
import com.mongodb.Block;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.gridfs.GridFSBucket;
import com.mongodb.client.gridfs.GridFSBuckets;
import com.mongodb.client.gridfs.GridFSUploadStream;
import com.mongodb.client.gridfs.model.GridFSFile;
import com.mongodb.gridfs.GridFS;

@Component
public class GirdFSHelper {

private GridFSBucket gfsbk;
private MongoDatabase db;

@Autowired
public GirdFSHelper(MongoDBHelper mongoManager){
this.db = mongoManager.getDB("huanle");
}

/**保存文件到GirdFS,通过openUploadStream方式
* @param bucket 容器名称
* @param file 文件实体
* @return 返回FileEntity
*/
public FileEntity save(String bucket,FileEntity file){
gfsbk = GridFSBuckets.create(db, bucket);
GridFSUploadStream uploadStream = gfsbk.openUploadStream(file.getFileName());
uploadStream.write(file.getContent());
ObjectId id = uploadStream.getFileId();
uploadStream.flush();
uploadStream.close();
FileEntity filter = new FileEntity();
filter.setId(id);
filter = find(bucket,filter);
return filter;
}

public FileEntity download(String bucket,FileEntity filter) throws IOException{
filter = find(bucket,filter);
if(filter==null){
return null;
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
gfsbk = GridFSBuckets.create(db, bucket);
gfsbk.downloadToStream(filter.getId(), out);
filter.setContent(out.toByteArray());
out.close();
return filter;
}

public FileEntity find(String bucket,FileEntity filter){
gfsbk = GridFSBuckets.create(db, bucket);
System.out.println(filter.toBson());
GridFSFile result = gfsbk.find(filter.toBson()).first();
if(result==null){
return null;
}
filter.setId(result.getObjectId());
filter.setSize(result.getLength());
filter.setFileName(result.getFilename());
filter.setUploadTime(result.getUploadDate());
return filter;
}

}

package com.huanle.utils.model;

import java.util.Date;

import javax.validation.constraints.Past;

import org.bson.Document;
import org.bson.types.ObjectId;
import org.hibernate.validator.constraints.Range;

import com.huanle.utils.annotation.FileContent;
import com.huanle.utils.annotation.FileName;
import com.huanle.utils.annotation.PlainString;

/**文件辅助实体类
* @author luchu
*
*/
public class FileEntity {

public static String ID="_id",FILE_NAME="fileName",SIZE="size",UPLOAD_TIME="uploadTime",CONTENT="content";

private ObjectId id;
@PlainString
@FileName
private String fileName;
@Range(max=41943040)
private long size;
@Past
private Date uploadTime;
@FileContent
private byte[] content;

public Document toBson(){
Document doc = new Document();
if(this.id!=null){
doc.append(ID, this.id);
}
if(this.fileName!=null){
doc.append(FILE_NAME, this.fileName);
}
if(this.size!=0){
doc.append(SIZE, this.size);
}
if(this.uploadTime!=null){
doc.append(UPLOAD_TIME, this.uploadTime);
}
return doc;
}

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: