代码如下 ,三种方式 5000条数据耗时分别为 3249 900 1305
//for循环,性能差,每次都要进行db操作。
public static void batch1() throws Exception {
SqlSession session = getSqlSession();
UserMapper mapper = session.getMapper(UserMapper.class);
Date start = new Date();
for(int i=0;i<5000;i++){
User u = new User();
u.setSex("1");u.setId("1id"+i);u.setName("name"+i);
mapper.insert(u );
}
System.out.println(new Date().getTime()-start.getTime());
session.commit();
session.close();
}
oralce 批量插入sql示例
<insert id="batchInsert">
INSERT all
<foreach collection="list" item="item" separator=" ">
into IM_STD_MATERIAL_CODE (
gu_id,RELEASE_TIME
) VALUES (
#{item.guId,jdbcType=VARCHAR},
#{item.releaseTime,jdbcType=TIMESTAMP}
)
</foreach>
select 1 from dual
</insert>
//foreach 拼 SQL,性能最高,但是sql长度有限制,每次执行完必须检查
// mysql默认接受sql的大小是1048576(1M),若数据量超过1M会报异常:
// 查看mysql sql最大长度 show variables like '%packet%';show variables like '%net_buffer%';
//可调整MySQL安装目录下的my.ini文件中的max_allowed_packet选项修改支持的sql最大长度
public static void batch2() throws Exception {
SqlSession session = getSqlSession();
UserMapper mapper = session.getMapper(UserMapper.class);
Date start = new Date();
List list = new ArrayList();
for(int i=0;i<5000;i++){
User u = new User();
u.setSex("1");u.setId("2id"+i);u.setName("name2"+i);
list.add(u );
}
mapper.insertBatch(list );
session.commit();
session.close();
System.out.println(new Date().getTime()-start.getTime());
}
//同jdbc的批量操作
public static void batch3() throws Exception {
SqlSession session = getSqlSessionFactory().openSession(ExecutorType.BATCH,false);
UserMapper mapper = session.getMapper(UserMapper.class);
List list = new ArrayList();
Date start = new Date();
for(int i=0;i<5000;i++){
User u = new User();
u.setSex("1");u.setId("3id3"+i);u.setName("name3"+i);
mapper.insert(u );
}
session.commit();
session.close();
System.out.println(new Date().getTime()-start.getTime());
}