• 多线程能否提高jdbc插入速度?


    1. package batchinsert;
    2. import com.alibaba.druid.pool.DruidDataSource;
    3. import lombok.SneakyThrows;
    4. import org.apache.commons.dbutils.QueryRunner;
    5. import java.sql.Connection;
    6. import java.sql.PreparedStatement;
    7. import java.util.Date;
    8. import java.util.Map;
    9. import java.util.UUID;
    10. import java.util.concurrent.CountDownLatch;
    11. import java.util.concurrent.LinkedBlockingQueue;
    12. import java.util.concurrent.ThreadPoolExecutor;
    13. import java.util.concurrent.TimeUnit;
    14. public class BatchInsertRunnable implements Runnable {
    15. CountDownLatch countDownLatch;
    16. int rowCount;
    17. public static DruidDataSource dataSource;
    18. public static String workId= UUID.randomUUID().toString();
    19. public static int batchSize=30;
    20. static{
    21. dataSource = new DruidDataSource();
    22. //dataSource.setDriverClassName(driverClassName);//如果不配置druid会根据url自动识别dbType,然后选择相应的driverClassName
    23. dataSource.setUrl("jdbc:mysql://10.8.4.214:3306/aops_asoe?useUnicode=true&characterEncoding=UTF-8&serverTimezone=UTC&useLocalSessionState=true&useSSL=false&rewriteBatchedStatements=true");
    24. dataSource.setUsername("root");
    25. dataSource.setPassword("root123456");
    26. dataSource.setValidationQuery("SELECT 1");//用来检测连接是否有效
    27. dataSource.setTestOnBorrow(false);//借用连接时执行validationQuery检测连接是否有效,做了这个配置会降低性能
    28. dataSource.setTestOnReturn(false);//归还连接时执行validationQuery检测连接是否有效,做了这个配置会降低性能
    29. //连接空闲时检测,如果连接空闲时间大于timeBetweenEvictionRunsMillis指定的毫秒,执行validationQuery指定的SQL来检测连接是否有效
    30. dataSource.setTestWhileIdle(true);//如果检测失败,则连接将被从池中去除
    31. dataSource.setTimeBetweenEvictionRunsMillis(60000);//1分钟
    32. dataSource.setMaxActive(20);
    33. dataSource.setInitialSize(20);
    34. }
    35. public BatchInsertRunnable(CountDownLatch countDownLatch, int rowCount) {
    36. this.countDownLatch = countDownLatch;
    37. this.rowCount = rowCount;
    38. }
    39. @SneakyThrows
    40. public void dorun() {
    41. Connection connection = dataSource.getConnection();
    42. connection.setAutoCommit(false);
    43. QueryRunner queryRunner=new QueryRunner();
    44. String sql = "insert into asoe_hi_actinst(id,activity_id,activity_name,activity_type,duration,end_time,exec_id,flow_index,proc_def_id,proc_inst_id,start_time) values (?,?,?,?,?,?,?,?,?,?,?)";
    45. PreparedStatement preparedStatement = connection.prepareStatement(sql);
    46. for(int i=1;i<=rowCount;++i){
    47. preparedStatement.setObject(1, UUID.randomUUID().toString());
    48. preparedStatement.setObject(2, UUID.randomUUID().toString());
    49. preparedStatement.setObject(3, UUID.randomUUID().toString());
    50. preparedStatement.setObject(4, UUID.randomUUID().toString());
    51. preparedStatement.setObject(5, 0);
    52. preparedStatement.setObject(6, new Date());
    53. preparedStatement.setObject(7, UUID.randomUUID().toString());
    54. preparedStatement.setObject(8, 0);
    55. preparedStatement.setObject(9,"batch");
    56. preparedStatement.setObject(10,"batch");
    57. preparedStatement.setObject(11, new Date());
    58. preparedStatement.addBatch();
    59. if(i%batchSize==0){
    60. preparedStatement.executeBatch();
    61. preparedStatement.clearBatch();
    62. }
    63. }
    64. if(rowCount%batchSize!=0){
    65. preparedStatement.executeBatch();
    66. preparedStatement.clearBatch();
    67. }
    68. connection.commit();
    69. preparedStatement.close();
    70. connection.close();
    71. countDownLatch.countDown();
    72. }
    73. public static void main(String[] args) throws InterruptedException {
    74. // int threadCount=4;
    75. // int perRowCount=100000;
    76. //191502
    77. //batch=1 271851
    78. // int threadCount=8;
    79. // int perRowCount=50000;
    80. //158645
    81. //batch=1 231818
    82. int threadCount=1;
    83. int perRowCount=400000;
    84. //167135
    85. //batch=1 782624
    86. CountDownLatch latch=new CountDownLatch(threadCount);
    87. ThreadPoolExecutor executor=new ThreadPoolExecutor(threadCount,threadCount ,1000 ,TimeUnit.SECONDS,new LinkedBlockingQueue<>());
    88. DruidDataSource dataSource = BatchInsertRunnable.dataSource;
    89. long start = System.currentTimeMillis();
    90. for(int i=0;i
    91. executor.submit(new BatchInsertRunnable(latch, perRowCount));
    92. }
    93. latch.await();
    94. System.out.println("timeuse:"+(System.currentTimeMillis()-start));
    95. }
    96. @Override
    97. public void run() {
    98. try {
    99. dorun();
    100. }catch (Exception e){
    101. e.printStackTrace();
    102. }
    103. }
    104. }
    105. //timeuse:17268
    106. //timeuse:18669
    107. //timeuse:12135

    如果不用批量插入(batchSize=1),插入400000行

    线程数耗时ms
    1782624
    8231818

    如果用了批量插入

    线程数耗时ms
    1167135
    4191502
    8158645

    结论:不使用批量插入的话,多线程确实能提高插入速度,如果使用批量插入,单线程多线程都差不多(其实和kafka批量提交差不多)。

  • 相关阅读:
    前端菜鸟浅谈Web前端开发技术
    【Reverse】2022 强网杯 game
    Kotlin高仿微信-第58篇-开通VIP
    【教程】Derby数据库安装与使用
    Docker简单使用
    Mqtt是什么
    Mock使用场景
    计算机毕业设计Java健身房信息管理系统(源码+系统+mysql数据库+lw文档)
    安徽身份证网上办理最全攻略
    NoSQL之Redis配置与优化
  • 原文地址:https://blog.csdn.net/u011982711/article/details/127111266