java批量生成sqoop任务

用java写了类,用于批量生成sqoop脚本任务,sqoop一行行的写不得不说很累,现在有这东西,看看吧:


package data.clear;

import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import org.apache.commons.io.FileUtils;
import java.io.FileOutputStream;
import   java.io.PrintStream;


public class GeneratePartitions {
	public static void main(String[] args) throws IOException {
		List<String> lines = FileUtils.readLines(new File("d:\\wilson.zhou\\Desktop\\testtest.txt"));
		String template = "sqoop import --connect jdbc:postgresql://10.1.2.130:5432/xmo_dw --table %s --fields-terminated-by '\\t' --null-string '\\\\N' --null-non-string '\\\\N' --target-dir /staging/db_backup/greenplum/%s/%s  --username xmo_summarizer --password YCt452uz";
		String gp_table = "";
		String table = "";
		String partition = "";
		  PrintStream ps=new PrintStream(new FileOutputStream("d:\\wilson.zhou\\Desktop\\adgrouphists_2015.sh"));  
		  System.setOut(ps);  
		for(String line: lines){
			line = line.trim();
			gp_table = line;
			if(line != null && !line.contains("2016") &&line.contains("2015")){
				if(line.startsWith("rtb_summary_ext")){
					table = "rtb_summary_ext";
					partition = line.substring(line.lastIndexOf("_")+1);
					
					
					partition = partition.replace("p", "");
					
//					System.out.println("echo `date '+%m-%d %-H:%-M:%-S'` "+gp_table);
//					System.out.println("mkdir -p /mapr/hkidc.hadoop.iclick/staging/db_backup/greenplum/"+table+"/"+partition+"");
//					System.out.println("psql xmo_dw -c \"copy xmo_dw."+gp_table+" to '/mapr/hkidc.hadoop.iclick/staging/db_backup/greenplum/"+table+"/"+partition+"/"+partition+".csv'\"");

					System.out.println("echo \""+gp_table+"\t"+table+"\t" + partition+"\"");
					System.out.println(String.format(template, gp_table, table, partition)  +" -m 1 --fetch-size 50000");
				}else if(line.startsWith("rtb_summary")){
					table = "rtb_summary";
					partition = line.substring(line.lastIndexOf("_20")+1);
					
					partition = partition.replace("p", "");
					
//					System.out.println("echo `date '+%m-%d %-H:%-M:%-S'` "+gp_table);
//					System.out.println("mkdir -p /mapr/hkidc.hadoop.iclick/staging/db_backup/greenplum/"+table+"/"+partition+"");
//					System.out.println("psql xmo_dw -c \"copy xmo_dw."+gp_table+" to '/mapr/hkidc.hadoop.iclick/staging/db_backup/greenplum/"+table+"/"+partition+"/"+partition+".csv'\"");
					System.out.println("echo \""+gp_table+"\t"+table+"\t" + partition+"\"");
					System.out.println(String.format(template, gp_table, table, partition)  +" -m 1 --fetch-size 50000");
				}
				else if(line.startsWith("eventdatas_old")){
					table = "eventdatas_old";
					partition = line.substring(line.lastIndexOf("_20")+1);
					
					partition = partition.replace("p", "");
					System.out.println("echo \""+gp_table+"\t"+table+"\t" + partition+"\"");
					System.out.println(String.format(template, gp_table, table, partition) + " --split-by id --fetch-size 50000");
					
				}else if(line.startsWith("eventdatas_1")){
						table = "eventdatas";
						partition = line.substring(line.lastIndexOf("_p")+1);
						
						partition = partition.replace("p", "");
						System.out.println("echo \""+gp_table+"\t"+table+"\t" + partition+"\"");
						System.out.println(String.format(template, gp_table, table, partition) + " --split-by id --fetch-size 50000");
						
				
				}else if(line.startsWith("rtb_datas")){
					table = "rtb_datas";
					partition = line.substring(line.lastIndexOf("_p")+2);
				
					partition = partition.replace("p", "");
					System.out.println("echo \""+gp_table+"\t"+table+"\t" + partition+"\"");
					System.out.println(String.format(template, gp_table, table, partition)  + " --split-by id --fetch-size 50000");
				}else if(line.startsWith("clickdatas")){
					table = "clickdatas";
					
					partition = line.substring(line.lastIndexOf("_")+1);
				
					partition = partition.replace("p", "");
//					System.out.println("echo "+gp_table);
//					System.out.println("mkdir -p /mapr/hkidc.hadoop.iclick/staging/db_backup/greenplum/"+table+"/"+partition+"");
//					System.out.println("psql xmo_dw -c \"copy xmo_dw."+gp_table+" to '/mapr/hkidc.hadoop.iclick/staging/db_backup/greenplum/"+table+"/"+partition+"/"+partition+".csv'\"");
					System.out.println("echo \""+gp_table+"\t"+table+"\t" + partition+"\"");
					System.out.println(String.format(template, gp_table, table, partition)  + " --split-by id --fetch-size 50000");
	
				}else if(line.startsWith("imageviews")){
					table = "imageviews";
					partition = line.substring(line.lastIndexOf("_p")+2);
				
					partition = partition.replace("p", "");
					System.out.println("echo \""+gp_table+"\t"+table+"\t" + partition+"\"");
					System.out.println(String.format(template, gp_table, table, partition)  + " --split-by id --fetch-size 50000");
			
				}else if(line.startsWith("adgrouphists")){
					table = "adgrouphists";
					partition = line.substring(line.lastIndexOf("_")+1);
					
					partition = partition.replace("p", "");
					
//					System.out.println("echo `date '+%m-%d %-H:%-M:%-S'` "+gp_table);
//					System.out.println("mkdir -p /mapr/hkidc.hadoop.iclick/staging/db_backup/greenplum/"+table+"/"+partition+"");
//					System.out.println("psql xmo_dw -c \"copy xmo_dw."+gp_table+" to '/mapr/hkidc.hadoop.iclick/staging/db_backup/greenplum/"+table+"/"+partition+"/"+partition+".csv'\"");

					System.out.println("echo \""+gp_table+"\t"+table+"\t" + partition+"\"");
					System.out.println(String.format(template, gp_table, table, partition)  + " --split-by id --fetch-size 50000");
			
				}
				
			}
		}
	}
}


  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值