dubbo|java dubbo服务导出excel数据量过大解决方案--2(bug修复)

新问题

  • 1.如果调用删除失败后没有补偿机制,这样会导致orderService服务端的内存会越来越少,最终会内存溢出
  • 2.我们的orderService服务是会集群部署的,但是我没有配置hash一致性负载均衡策略,这样取分批拿数据的时候会有大问题,拿到的数据不全,有可能会出现拿不到数据的情况,这是个大BUG
真的上面的2个BUG,代码做了修改,上代码只贴有改动的
  • admin dubbo配置文件

  • orderService dubbo配置文件

  • controller层代码
public void export(HttpServletRequest request, HttpServletResponse response) { // 文件名获取 Map paramsMap = getMap(request); int typeModule = Integer.parseInt ((String) paramsMap.get("typeModule")) ; String typeModuleName = ""; if (typeModule == Constants.BUSINESS_TYPE_TAXI) { typeModuleName = "出租车"; } else if (typeModule == Constants.BUSINESS_TYPE_SPEC) { typeModuleName = "专车"; } else if (typeModule== Constants.BUSINESS_TYPE_EXPRESS) { typeModuleName = "快车"; } String fileName = typeModuleName+"订单" ; String startTime = (String) paramsMap.get("startTime"); String endTime = (String) paramsMap.get("endTime"); //startTime和endTime有一个为空就不拼接时间 if (StringUtils.isNotEmpty(startTime) && StringUtils.isNotEmpty(endTime) ) { startTime = startTime.replaceAll("-", ""); endTime = endTime.replaceAll("-", ""); fileName += startTime + "-" + endTime; } ExcelUtil.setResponseHeader(response,fileName); OutputStream out = null; BufferedOutputStream bos = null; String key = UUID.randomUUID().toString()+"-"+System.currentTimeMillis(); AjaxList ajaxList; try { out = response.getOutputStream(); bos = new BufferedOutputStream(out); ajaxList= orderExportService.orderExportMethods(key,paramsMap,0,0,0); if (ajaxList.isSuccess()) { int count = (int)ajaxList.getData(); String path = request.getSession().getServletContext().getRealPath("/"); String excelPath = path + "excel/"; File excelFile = new File(excelPath); if (!excelFile.exists()) { excelFile.mkdir(); } // 压缩文件 File zip = new File(path + "excel/"+ fileName + ".zip"); //创建excel Workbook book = new HSSFWorkbook(); String file = path + "excel/" + fileName + ".xls"; FileOutputStream fos = new FileOutputStream(file); Sheet sheet = null; for (int i = 0; i <= count; i+=10000) { AjaxList ajaxList1 = orderExportService.orderExportMethods(key, null,i, i + 10000,1); if (ajaxList1.isSuccess()) { sheet = book.createSheet((i+1)+"-"+(i+10000)); List list= (List)ajaxList1.getData(); toExcel(list,book,sheet,fos); } } book.write(fos); fos.flush(); fos.close(); //删除 orderExportService.orderExportMethods(key, null, 0, 0, 2); File []srcfile = new File[1]; srcfile[0] = new File(file); ExcelUtil.zipFiles(srcfile, zip); FileInputStream inStream = new FileInputStream(zip); BufferedInputStream bif = new BufferedInputStream(inStream); byte[] buf = new byte[10240]; int readLength; while ((readLength = bif.read(buf)) != -1) { bos.write(buf, 0, readLength); } bif.close(); List fileNames = new ArrayList<>(); fileNames.add(file); ExcelUtil.deleteFile(fileNames, path + "excel/"+ fileName + ".zip"); } } catch (IOException e1) { e1.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } finally { try { bos.flush(); bos.close(); } catch (IOException e) { e.printStackTrace(); } } }

  • orderService 服务端接口
public interface OrderExportService {/** * 后台-订单-通用订单列表查询 * * @param params 参数 * @param key缓存key * @param type方法作用类型 0-查询总订单量,1-分段从缓存获取数据,2-删除缓存 * @return List */ AjaxList orderExportMethods(String key, Map params, int start, int end, int type); }

  • orderService 服务端接口-实现
@Service public class OrderExportServiceImpl implements OrderExportService {@Autowired private OrderMapper orderMapper; @Override public AjaxList orderExportMethods(String key, Map params, int start, int end, int type) { switch (type) { case 0: return listExtend(params, key); case 1: return getOrderListFromCache(key, start, end); case 2: return deleteOrderListFromCache(key); default: return AjaxList.createError("位置方法类型"); } }/** *根据条件查询订单总数据,放入本地缓存 * @param params * @param key * @return */ private AjaxList listExtend(Map params,String key) { List generalOrderBeanList=orderMapper.listExtend(params); if (CollectionUtils.isEmpty(generalOrderBeanList)) { return AjaxList.createError("获取订单信息失败"); } OrderExportCacheUtil.ordereXportCacheMap.put(key, generalOrderBeanList); returnAjaxList.createSuccess("获取订单信息成功",generalOrderBeanList.size()); }/** *分段获取本地缓存订单数据 * @param key * @param start * @param end * @return */ private AjaxList getOrderListFromCache(String key, int start, int end) { List generalOrderBeanList = OrderExportCacheUtil.getGeneralOrderBeanList(key, start, end); if (CollectionUtils.isEmpty(generalOrderBeanList)) { return AjaxList.createError("获取失败"); } return AjaxList.createSuccess("获取成功", generalOrderBeanList); }/** *删除本地缓存数据 * @param key * @return */ private AjaxList deleteOrderListFromCache(String key) { OrderExportCacheUtil.delectListByKey(key); return AjaxList.createSuccess("删除成功"); } }

  • 缓存工具类 (主要是删除方法有改动,增加删除失败补偿机制)
public class OrderExportCacheUtil { /** *用来缓存 */ public static Map> ordereXportCacheMap = new HashMap<>(); /** * 分段获取generalOrderBeanList * @param key * @param start 包括 * @param end 不包括 * @return */ public static List getGeneralOrderBeanList(String key ,int start,int end) { List generalOrderBeanList = null; if (ordereXportCacheMap.containsKey(key)) { generalOrderBeanList = ordereXportCacheMap.get(key); if (!CollectionUtils.isEmpty(generalOrderBeanList) && generalOrderBeanList.size() > start && end > start) { if (generalOrderBeanList.size() >= end) { return generalOrderBeanList.subList(start, end); } else if (generalOrderBeanList.size() < end) { return generalOrderBeanList.subList(start, generalOrderBeanList.size()); } } } return null; }public static void delectListByKey(String key) { if (ordereXportCacheMap.containsKey(key)) { ordereXportCacheMap.remove(key); } for (Map.Entry> entry : ordereXportCacheMap.entrySet()) { System.out.println("Key = " + entry.getKey() + ", Value = "https://www.it610.com/article/+ entry.getValue()); String entryKey = entry.getKey(); String entryKeyTime = entryKey.split("-")[1]; //清除超过半个小时的缓存数据,删除失败的补偿机制 if (System.currentTimeMillis() - (Long.parseLong(entryKeyTime)) >= 30 * 60 * 1000) { ordereXportCacheMap.remove(entryKey); } } } }

总结
【dubbo|java dubbo服务导出excel数据量过大解决方案--2(bug修复)】写代码想的还是不够周全,这么严重的bug出现,的好好反思下,这次印象非常深刻了。

    推荐阅读