public class HadoopServlet extends javax.servlet.http.HttpServlet implements javax.servlet.Servlet {
static Logger logger = Logger.getLogger(HadoopServlet.class);
public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException{
PrintWriter out=res.getWriter();
res.setContentType("image/jpeg");
java.util.Date date = new java.util.Date();
res.setDateHeader("Expires",date.getTime()+1000*60*60*24);
String path=req.getPathInfo();
path=path.substring(1,path.length());
HadoopFileUtil hUtil=new HadoopFileUtil();
FSDataInputStream inputStream=hUtil.getInputStream(path);
OutputStream os = res.getOutputStream();
byte[] buffer = new byte[400];
int length = 0;
while((length = inputStream.read(buffer))>0){
os.write(buffer,0,length);
}
os.flush();
os.close();
inputStream.close();
}
}
另外,为了避免对hadoop的频繁读取,可以再jsp服务器前放一个squid进行对图片的缓存。
这就是我们图片服务器的架构.
相关阅读:
Ubuntu 13.04上搭建Hadoop环境