表数据并发查询迁移:多表并发、大表并发

多表并发:

(1)使用list拆分;

(2)拆分后并发执行;

private void migraByList() {
       int readNum = 8;
       int sqlSlicesNum = 4;
       int minThreadNum = 1;
       ExecutorService executorServiceRead = null;
 
      AtomicLong wThread = new AtomicLong(0L);
       executorServiceRead = new ThreadPoolExecutor(readNum, readNum, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue());
 
       
      List averageAssign=new ArrayList();
       if (this.list.size() > minThreadNum) {
          averageAssign = ResotreScriptUtils.averageAssign(this.list, readNum);
       } else {
          averageAssign = ResotreScriptUtils.averageAssign(this.list, 1);
       }      Iterator var8 = averageAssign.iterator();

       while(var8.hasNext()) {         List<ObjInfoDTO> alist = (List)var8.next();
         wThread.incrementAndGet();
          executorServiceRead.execute(new MigraDataThread(this, alist, wThread));
      }

       while(wThread.get() != 0L) {
          try {
             Thread.sleep(2000L);
          } catch (InterruptedException var9) {
            var9.printStackTrace();
         }
      }
 
       if (executorServiceRead != null) {
          try {
             this.logger.info("total migrate rows :" + this.totalRow.get());
             executorServiceRead.shutdown();
             while(!executorServiceRead.awaitTermination(1L, TimeUnit.SECONDS)) {
                Thread.sleep(10L);
             }
          } catch (InterruptedException var10) {
             var10.printStackTrace();
         }      }
 
    }
 

大表并发:

(1)基本对象为sqlmodel(也即一条查询sql)

(2)对大表进行查询分片处理,生成多条查询sql;

(3)使用固定线程池,如果有空闲线程,填充一条sql进行查询;

   private void migraBySi() {
      AtomicInteger count = new AtomicInteger(0);
      int readNum = 8;
       int sqlSlicesNum = 4;
       int minThreadNum = true;
      ExecutorService executorServiceRead = null;
 
       AtomicLong wThread = new AtomicLong(0L);
      executorServiceRead = new ThreadPoolExecutor(readNum, readNum, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue());

      try {
          int si = 0;         label403:         while(true) {            while(true) {
                if (si >= this.list.size() || this.progress.getState() == -1) {                  }                  break label403;               }
               if (wThread.get() >= (long)readNum) {                  break;               }
                ObjInfoDTO obj = (ObjInfoDTO)this.list.get(si);
 

                List<SqlModel> sqlList = new ArrayList();
                DruidPooledConnection connSource = null;
 
                try {
                   ObjInfoDTO<Table> table = new ObjInfoDTO();
                   table.setData(((Table)obj.getData()).getCloneTable());
                   table.setName(obj.getName());
                   table.setSchema(obj.getSchema());
                   table.setTargetSchema(obj.getTargetSchema());
                   connSource = DruidHelper.getInstance().getSourceConnection();
                  List<Column> columns = SourceDBUtil.getbase().getPrepareController().getColumns(connSource, (Table)table.getData());

                   ((Table)table.getData()).setColumns(columns);

                   if (columns == null || columns.isEmpty()) {
                      if (this.progress != null) {
                         this.progress.getValue().incrementAndGet();
                        String process = "Table/Data : " + obj.getName();
                        StringBuilder log = new StringBuilder();
                         log.append(this.progress.getValue().get()).append("/").append(this.progress.getTotalCount()).append(" Create ").append(process);


                      }

                      ++si;
                      continue;
                  }
 
                  Table primKeyColumn = SourceDBUtil.getbase().getPrepareController().getPrimKeyColumn(connSource, (Table)table.getData());
 
                  table.setData(primKeyColumn);
 
                   int size = SourceDBUtil.getbase().getPrepareController().getTableSize(connSource, (Table)table.getData());
 
                   boolean isBigTable = false;
                   if (size > 1000) {
                      isBigTable = true;
                   }                  String string;                  SqlModel sqlModel;
                  if (isBigTable) {
 
                      List<String> bsqlList = SourceDBUtil.getbase().getPrepareController().getSlicesSelectSQL(table, sqlSlicesNum);
                      boolean includeBlob = SourceDBUtil.getbase().getPrepareController().isIncludeBlob(columns);
                     String columnBulider = this.getCopyColumnBulider(columns);                     Iterator var50 = bsqlList.iterator();
                      while(var50.hasNext()) {                        string = (String)var50.next();
                         sqlModel = new SqlModel();
                        sqlModel.setSql(string);
                         sqlModel.setSchemaName(table.getSchema());
                         sqlModel.setTargetSchemaName(table.getTargetSchema());
                         sqlModel.setTableName(table.getName());
                         sqlModel.setIsInLob(includeBlob);
                         sqlModel.setColumns(columns);
                         sqlModel.setColumnBulider(columnBulider);
                         sqlList.add(sqlModel);
                         this.logger.info("SqlList :" + string);
                      }
                   } else {
                      String schemaName = SourceDBUtil.getbase().getKeyWordService().convertorNameToSelf(table.getSchema());
 
                      String tableName = SourceDBUtil.getbase().getKeyWordService().convertorNameToSelf(table.getName());
 
 
                      boolean includeBlob = SourceDBUtil.getbase().getPrepareController().isIncludeBlob(columns);
                      string = "SELECT " + this.getSeleteColumnBuilder(columns) + " FROM " + schemaName + "." + tableName;

                      String columnBulider = this.getCopyColumnBulider(columns);
                      sqlModel = new SqlModel();
                      sqlModel.setSql(string);
                      sqlModel.setSchemaName(table.getSchema());
                      sqlModel.setTableName(table.getName());
                      sqlModel.setTargetSchemaName(table.getTargetSchema());
                      sqlModel.setIsInLob(includeBlob);
                      sqlModel.setColumns(columns);
                      sqlModel.setColumnBulider(columnBulider);
                      sqlList.add(sqlModel);
                      this.logger.info("SqlList :" + string);
                   }
                   int sqlmodei = 0;
                   while(sqlmodei < sqlList.size() && this.progress.getState() != -1) {
                      if (wThread.get() < (long)readNum) {
                         wThread.incrementAndGet();
                        SqlModel sqlModel = (SqlModel)sqlList.get(sqlmodei);
                        this.logger.info("executorServiceRead-----start :" + sqlModel.getTableName());
 
                         executorServiceRead.execute(new ReadThread(this, sqlModel, wThread, count));
                         ++sqlmodei;
                      }                  }
 
                   this.progress.getValue().incrementAndGet();
                } catch (SQLException var38) {
                   ProcedureInfoTO info = MigrateResultUtils.createProcedureInfoTO(obj.getSchema() + "." + obj.getName(), 1, var38.getMessage());
 
                   this.tableCode.addFailTableList(info);
                   this.logger.info("Error loadDataSlit:" + obj.getSchema() + "." + obj.getName() + var38.getMessage());
                } finally {
                   JdbcUtils.close(connSource);
                }
                ++si;
 
                break;            }            while(wThread.get() == (long)readNum && this.progress.getState() != -1) {
                Thread.sleep(200L);
 
 
                   while(wThread.get() != 0L && this.progress.getState() != -1) {
                      Thread.sleep(200L);
 

            }
         }
      } catch (InterruptedException var40) {
 
          var40.printStackTrace();
 
       } finally {
          if (executorServiceRead != null) {
             try {
                this.logger.info("total migrate rows :" + this.totalRow.get());
               executorServiceRead.shutdown();
                while(!executorServiceRead.awaitTermination(1L, TimeUnit.SECONDS)) {
                   Thread.sleep(10L);
                }
             } catch (InterruptedException var37) {
                var37.printStackTrace();
            }         }
 
       }
 
    }
 

注:

核心理念在于,预先对迁移任务进行分析(模糊分析,不需要精确完整的,尽可能地提高分析速度);通过并发多线程等技术对迁移对象进行处理(把对象整合裂解为一个个迁移单位);执行迁移任务,使其能够尽可能地使用系统资源(在执行部分迁移对象的同时,去分析处理其他迁移对象);

系统性能分析:系统性能分析从入门到进阶

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值