1. svn st | awk '{if ( $1 == "C") { print $2}}' | xargs svn resolve --accept working
2. hadoop fs -ls /home/| grep "log" | tail -60 | awk -F"/" '{if(NF>=3){printf("%s ",\$NF)}}
3. 0 8 * * * cd /da1/ && find ./ -mtime +5 -delete
4. records=`wc -l baike.online.$day | cut -d' ' -f1`
echo $records
if [ $records -lt 7000000 ]; then
echo "data size too small"
echo "Please Check data $records" | mail -s "Baike Data Error"
exit -1
else
echo "data size is ok"
echo "Please Check data $records" | mail -s "Baike Data Error"
fi
echo $records
if [ $records -lt 7000000 ]; then
echo "data size too small"
echo "Please Check data $records" | mail -s "Baike Data Error"
exit -1
else
echo "data size is ok"
echo "Please Check data $records" | mail -s "Baike Data Error"
fi
5. old_dir=`hadoop fs -ls /home/| awk -F ' ' '{print $NF}' | grep baike | head -n 1`
for dir in $old_dir; do
hadoop fs -rmr $dir
done
for dir in $old_dir; do
hadoop fs -rmr $dir
done