源码:https://gitee.com/antia11/excel-data-import-demo
背景:客户需要每周会将上传一个 Excel 数据文件,数据量单次为 20W 以上,作为其他模块和报表的基础数据。
package com.antia1.demo.service; import com.alibaba.excel.EasyExcel; import com.antia1.demo.entity.ExcelDataEntity; import com.antia1.demo.listener.ExcelDataListener; import com.antia1.demo.mapper.ExcelDataMapper; import com.antia1.demo.util.RespBean; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.web.multipart.MultipartFile; import java.io.IOException; import java.util.Map; /** * Author: anti * Date: 2022/7/23 16:13 */ @Service @Slf4j public class ExcelDataService { @Autowired private ExcelDataMapper excelDataMapper; public RespBean importData(MultipartFile file) throws IOException { //0.获取数据库中的最大id Map<String, Object> idMap = excelDataMapper.getMaxId(); int maxId = Integer.parseInt(idMap.get("maxId") + ""); //1.读取excel EasyExcel.read(file.getInputStream(), ExcelDataEntity.class,new ExcelDataListener(excelDataMapper,maxId)).sheet().doRead(); //2.开始去除重复数据 log.debug("全部导入完成,开始进行数据去重"); int count = excelDataMapper.deleteDuplicates(); log.debug("去除重复数据:{}条",count); return RespBean.ok("导入完成"); } }
package com.antia1.demo.listener; import com.alibaba.excel.context.AnalysisContext; import com.alibaba.excel.event.AnalysisEventListener; import com.antia1.demo.entity.ExcelDataEntity; import com.antia1.demo.mapper.ExcelDataMapper; import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.List; /** * Author: anti * Date: 2022/7/23 16:10 */ @Slf4j public class ExcelDataListener extends AnalysisEventListener<ExcelDataEntity> { private static final int BATCH_COUNT = 1000; private List<ExcelDataEntity> list = new ArrayList<>(); private ExcelDataMapper excelDataMapper; private int primaryKey; private int totalCount; public ExcelDataListener(ExcelDataMapper excelDataMapper, int primaryKey) { this.excelDataMapper = excelDataMapper; this.primaryKey = primaryKey; } @Override public void invoke(ExcelDataEntity excelDataEntity, AnalysisContext analysisContext) { primaryKey ++ ; excelDataEntity.setId(String.valueOf(primaryKey)); list.add(excelDataEntity); if(list.size() >= BATCH_COUNT){ saveData(); list.clear(); } } @Override public void doAfterAllAnalysed(AnalysisContext analysisContext) { saveData(); System.out.println(String.format("数据同步完成,总数量为:%s",totalCount)); } public void saveData(){ if(list.size()>0){ int count = excelDataMapper.insertBatch(list); totalCount += count; } } }
<?xml version="1.0" encoding="UTF-8" ?> <!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd"> <mapper namespace="com.antia1.demo.mapper.ExcelDataMapper"> <!--数据插入--> <insert id="insertBatch" parameterType="java.util.List"> INSERT INTO `demo`.`tb_exceldata` ( `id`, `code`, `desc`, `objectCode`, `projectCode`, `other` ) VALUES <foreach collection="list" item="item" separator=","> (#{item.id}, #{item.code}, #{item.desc}, #{item.objectCode},#{item.projectCode},#{item.other}) </foreach> </insert> <!--查询最大id--> <select id="getMaxId" resultType="java.util.Map"> SELECT IFNULL(MAX(CAST(id AS SIGNED)),0) AS maxId FROM `demo`.`tb_exceldata` </select> <!--去除重复数据--> <delete id="deleteDuplicates"> DELETE FROM `tb_exceldata` WHERE id NOT IN ( SELECT t.id FROM ( SELECT MIN( id ) AS id FROM `tb_exceldata` GROUP BY `code`,`desc`,`objectCode`,`projectCode`,`other`) t ) </delete> </mapper>