1、列出目录及其子目录和文件
#!/bin/sh
function scandir() {
local cur_dir parent_dir workdir
workdir=$1
cd ${workdir}
if [ ${workdir} = "/" ]
then
cur_dir=""
else
cur_dir=$(pwd)
fi
for dirlist in $(ls ${cur_dir})
do
if test -d ${dirlist}
then
cd ${dirlist}
scandir ${cur_dir}/${dirlist}
cd ..
else
echo ${cur_dir}/${dirlist}
fi
done
}
if test -d $1
then
scandir $1
elif test -f $1
then
echo "you input a file but not a directory,pls reinput and try again"
exit 1
else
echo "the Directory isn't exist which you input,pls input a new one!!"
exit 1
fi
2、上传本地文件到hdfs
#!/bin/sh
function scandir() {
local cur_dir parent_dir workdir
workdir=$1
cd ${workdir}
if [ ${workdir} = "/" ]
then
cur_dir=""
else
cur_dir=$(pwd)
fi
for dirlist in $(ls ${cur_dir})
do
if test -d ${dirlist}
then
cd ${dirlist}
scandir ${cur_dir}/${dirlist}
cd ..
else
filenameTmp=${cur_dir}/${dirlist}
filenametest=`echo $filenameTmp | grep '[0-9]$'`
if [ -n "$filenametest" ]
then
echo "正在上传文件:""$filenameTmp"
filenameDir=`dirname $filenameTmp`
filenameDate=`echo $filenameDir | awk -F '/' '{print $NF}'`
mkfilenameDate="$distdir""$filenameDate"
echo "创建文件上级目录:""$mkfilenameDate"
mkfilenameDateSuc=`hadoop fs -mkdir $mkfilenameDate`
echo "$?"
putfilenameSuc=`hadoop fs -put $filenametest $mkfilenameDate`
if [ $? = "0" ]
then
echo "已成功上传:""$filenameTmp"
fi
fi
fi
done
}
if test -d $1
then
distdir=$2
scandir $1
elif test -f $1
then
echo "you input a file but not a directory,pls reinput and try again"
exit 1
else
echo "the Directory isn't exist which you input,pls input a new one!!"
exit 1
fi
3、mysql备份
[root@bx102 mysqlbackup]# cat backup.sh
#!/bin/sh
date=`date +%Y-%m-%d`
cd /opt/mysqlbackup
mkdir $date
mysqldump -uroot -pvrv123456. bigdata-tsa-platform >/opt/mysqlbackup/$date/bigdata-tsa-platform`date +%Y-%m-%d_%H%M%S`.sql
mysqldump -uroot -pvrv123456. ga_bigdata >/opt/mysqlbackup/$date/ga_bigdata`date +%Y-%m-%d_%H%M%S`.sql
mysqldump -uroot -pvrv123456. vrvbigdata-middleware >/opt/mysqlbackup/$date/vrvbigdata-middleware`date +%Y-%m-%d_%H%M%S`.sql
mysqldump -uroot -pvrv123456. vrvbigdata-platform >/opt/mysqlbackup/$date/vrvbigdata-platform`date +%Y-%m-%d_%H%M%S`.sql
mysqldump -uroot -pvrv123456. vap1.0-106 >/opt/mysqlbackup/$date/vap1.0-106`date +%Y-%m-%d_%H%M%S`.sql
cd /opt/mysqlbackup
rm -rf `find . -name '*.sql' -mtime 7` #删除7天前的备份文件
[root@bx102 mysqlbackup]#
[root@bx102 mysqlbackup]#
[root@bx102 mysqlbackup]# pwd
/opt/mysqlbackup
[root@bx102 mysqlbackup]# ll
total 2168
drwxr-xr-x. 2 root root 4096 Dec 20 21:00 2015-12-20
drwxr-xr-x. 2 root root 4096 Dec 21 21:00 2015-12-21
-rwxr-xr-x. 1 root root 730 Dec 22 11:09 backup.sh
-rw-r--r--. 1 root root 2202088 Dec 22 11:09 vap1.0-106.sql
[root@bx102 mysqlbackup]#
最后加上定时任务
[root@bx102 mysqlbackup]# crontab -l
00 21 * * * /opt/mysqlbackup/backup.sh
4、备份某个目录
[hadoop@bx106 vap_bak]$ cat backup.sh
#!/bin/sh
date=`date +%Y-%m-%d`
cp -r /home/hadoop/vapbackup /opt/vap_bak/vapbackup
cd /opt/vap_bak/;tar zcvf vapbackup_$date.tar.gz vapbackup;rm -rf vapbackup;rm -rf `find . -name '*.tar.gz' -mtime 7` #删除7天前的备份文件
[hadoop@bx106 vap_bak]$ pwd
/opt/vap_bak
[hadoop@bx106 vap_bak]$
设置定时任务
[hadoop@bx106 vap_bak]$ crontab -l
00 23 * * * /opt/vap_bak/backup.sh
[hadoop@bx106 vap_bak]$
5、替换遍历目录下的hive-site.xml文件
#!/bin/bash
for filepwd in `find . -name 'hive-site.xml'`
do
{
num=`expr length $filepwd`
echo ${filepwd:0:$[$num - 13]}
cp /opt/hive/conf/hive-site.xml ${filepwd:0:$[$num - 13]}
}
done
6、一键启动所有drill脚本
[hadoop@bx101 bin]$ ll
total 68
-rw-rw-r--. 1 hadoop hadoop 689 Oct 19 16:05 derby.log
-rwxrwxr-x. 1 hadoop hadoop 192 Dec 18 09:34 drillbit-all.sh
-rwxrwxr-x. 1 hadoop hadoop 6581 Jul 2 2015 drillbit.sh
-rwxrwxr-x. 1 hadoop hadoop 978 Jul 2 2015 drill-conf
-rwxrwxr-x. 1 hadoop hadoop 6342 Jul 2 2015 drill-config.sh
-rwxrwxr-x. 1 hadoop hadoop 964 Jul 2 2015 drill-embedded
-rwxrwxr-x. 1 hadoop hadoop 988 Jul 2 2015 drill-localhost
-rwxrwxr-x. 1 hadoop hadoop 1078 Jul 2 2015 dumpcat
-rw-rw-r--. 1 hadoop hadoop 820 Oct 14 10:03 export.hql
-rwxrwxr-x. 1 hadoop hadoop 104 Jul 2 2015 hadoop-excludes.txt
-rwxrwxr-x. 1 hadoop hadoop 1114 Jul 2 2015 runbit
-rwxrwxr-x. 1 hadoop hadoop 2288 Jul 2 2015 sqlline
-rwxrwxr-x. 1 hadoop hadoop 6006 Jul 2 2015 sqlline.bat
-rwxrwxr-x. 1 hadoop hadoop 1139 Jul 2 2015 submit_plan
[hadoop@bx101 bin]$ cat drillbit-all.sh
#!/usr/bin/env bash
dir=$(dirname $(readlink -f $0))
cat $dir/../conf/slaves |grep -v "^#"|while read ip
do
echo $ip
ssh hadoop@$ip << EOF
/bin/bash $dir/drillbit.sh $1
exit
EOF
done
[hadoop@bx101 bin]$ ./drillbit-all.sh start
bx101
Pseudo-terminal will not be allocated because stdin is not a terminal.
starting drillbit, logging to /opt/apache-drill-1.1.0/log/drillbit.out
bx102
Pseudo-terminal will not be allocated because stdin is not a terminal.
starting drillbit, logging to /opt/apache-drill-1.1.0/log/drillbit.out
bx103
Pseudo-terminal will not be allocated because stdin is not a terminal.
starting drillbit, logging to /opt/apache-drill-1.1.0/log/drillbit.out
bx104
Pseudo-terminal will not be allocated because stdin is not a terminal.
starting drillbit, logging to /opt/apache-drill-1.1.0/log/drillbit.out
bx105
Pseudo-terminal will not be allocated because stdin is not a terminal.
starting drillbit, logging to /opt/apache-drill-1.1.0/log/drillbit.out
bx106
Pseudo-terminal will not be allocated because stdin is not a terminal.
starting drillbit, logging to /opt/apache-drill-1.1.0/log/drillbit.out
[hadoop@bx101 bin]$ jps
6212 NameNode
8010 RunJar
5932 QuorumPeerMain
6678 DFSZKFailoverController
9167 Bootstrap
9606 Jps
7527 HMaster
6784 ResourceManager
7816 RunJar
5769 FsShell
9372 Drillbit
6472 JournalNode
[hadoop@bx101 bin]$