1.用于启动jar包读hbase表数据到hdfs
#!/bin/bash
source /etc/profile
if [ $# -ne 1 ];then
echo "aaa.sh table"
exit 1
fi
table=$1
hadoop fs -rm -r /user/ads/hbase_tests/$table
hadoop jar ./aasss.jar hbase.hfile.aa.aaExport -Dmapreduce.job.queuename=regular $table /user/ads/hbase_tests/$table
if [ $? -ne 0 ];then
echo "export table fail:"$table
exit 1
else
echo "export table success:"$table
exit 0
fi
2.用于hdfs到HDFS之间传递数据
#!/bin/bash
source /etc/profile
hadoop_home="/home/hbase/bigdate/hadoop"
src_hdfs="hftp://11.1.114.111/user/aa/hbase_aatables/"
#dst_hdfs="hdfs://aaa/home/hbaaaaart/hfileaaaaaaaaaaaaaaaaaaaaaaaaas/"
dst_hdfs="as.as.sa01:/home/hbase/hfiles/"
if [ $# -ne 1 ];then
echo "distcp.sh table"
exit 1
fi
$hadoop_home/bin/hadoop jar ew.jar hbase.ew.Checker precheck $src_hdfs$1 debug
if [ $? -ne 0 ];then
echo "precheck src path error:"$src_hdfs$1
exit 1
fi
$hadoop_home/bin/hadoop jar ew.jar hbase.ew.Checker sizecheck $src_hdfs$1 $dst_hdfs$1 debug
if [ $? -eq 0 ];then
echo "table has been copied:"$1
exit 0
else
echo "remove /home/hfiles/"$1
$hadoop_home/bin/hadoop fs -rm -r /home/hfiles/$1
fi
success=0
for (( i=0; i<1; i++ ))
do
$hadoop_home/bin/hadoop distcp -Dmapreduce.job.acl-view-job=dr.who -Dmapreduce.job.queuename=offline.aaaa.normal -pb -m 20 $src_hdfs$1 $dst_hdfs
if [ $? -ne 0 ];then
echo "distcp table fail:"$1
echo "remove /home/hfiles/"$1
$hadoop_home/bin/hadoop fs -rm -r /home/hfiles/$1
else
echo "distcp table success:"$1
echo "checksize /home/hfiles/"$1
$hadoop_home/bin/hadoop jar ew.jar hbase.ew.Checker sizecheck $src_hdfs$1 $dst_hdfs$1 debug
if [ $? -eq 0 ];then
echo "sizecheck table success:"$1
exit 0
fi
fi
done
exit 1
3.用于hbase存储hdfs上的数据
#!/bin/bash
source /etc/profile
if [ $# -ne 1 ];then
echo "import.sh table"
exit 1
fi
table=$1
hadoop_home="/home/hbase/bigdate/hadoop/"
$hadoop_home/bin/hadoop jar hbase.importer.jar hbase.importer.Checker precheck /home/hfiles/$table debug
if [ $? -ne 0 ];then
echo "precheck rowfile path error:/home/hfiles/"$table
exit 1
fi
$hadoop_home/bin/hadoop jar hbase.importer.jar hbase.importer.Import -Dhbase.mapreduce.bulkload.max.hfiles.perRegion.perFamily=1024 /home/hfiles/$table $table
if [ $? -ne 0 ];then
echo "import table fail:"$table
exit 1
else
echo "import table success:"$table
exit 0
fi