impala常用命令

修改shell编码:export LANG='zh_CN.utf8'
进入shell: impala-shell
罗列所有数据库:show databases;
罗列所有表: show tables;


518 hadoop fs -put /home/scb/hxs.txt /wenzhou/table/hxs/
519 hadoop fs -put /home/scb/hxs.txt /wenzhou/table/hxs/hxs1.txt
520 hadoop fs -put /home/scb/001/.part-r-00000.crc /wenzhou/table/hzresult001/
521 hadoop fs -put /home/scb/002/part-r-00000 /wenzhou/table/hzresult002/
522 hadoop fs -put /home/scb/003/part-r-00000 /wenzhou/table/hzresult003/
523 hadoop fs -put /home/scb/004/part-r-00000 /wenzhou/table/hzresult004/
524 SELECT... query
525 ll
526 cd hbase
527 ll
528 cd conf
529 ll
530 cat hbase-site.xml
531 ll
532 cat /etc/hosts
533 jps
534 jps
535 su root
536 hive shell
537 service impala-state-store status
538 service impala-server status
539 service impala-server start
540 su root
541 cd /home/hrf/public/
542 ll
543 hbase shell < 11.txt
544 cd /home/hrf/public/
545 ll
546 hbase shell < 11.txt
547 cd /home/hrf/public/
548 ll
549 hbase shell < article.txt
550 1;2c1;2chhbase shell
551 hbase shell
552 jps
553 stop-hbase.sh
554 stop-all.sh
555 jps
556 start-all.sh
557 jps
558 start-hbase.sh
559 jps
560 hbase shell
561 ll
562 su hadoop
563 exit
564 cd /home/hrf/public/
565 ll
566 hbase shell < article.txt
567 cd /home/hrf/public/
568 ll
569 hbase shell < 11.txt
570 ll
571 hbase shell
572 ll
573 hbase shell < article.txt
574 hbase shell
575 ll
576 ll
577 su - hadoop
578 exit
579 ll
580 cd /home/hrf/public/
581 ll
582 hbase shell
583 ll
584 hbase shell < article.txt
585 cd /home/hrf/public/
586 hbase shell < article.txt
587 rfre驽峰稿欢瀹璺宸甯寤跺矾驽峰稿欢瀹璺宸甯寤跺矾驽峰稿欢瀹璺?
588 jps
589 exit
590 jps
591 ll
592 su hadoop
593 jps
594 hbase shell
595 ll
596 cd /home/hrf/public/
597 ll
598 cat article.txt
599 hbase shell
600 free
601 top
602 hbase shell
603 jps
604 exit
605 jps
606 vi /hadoop/hbase
607 /hadoop/hbase
608 cd /hadoop/hbase
609 ll
610 cd logs
611 ll
612 vi hbase-hadoop-master-master235.log
613 ll
614 tail -f hbase-hadoop-master-master235.log
615 hbase shell
616 list
617 -J-Xmx####m-J-Xmx####m-J-Xmx####mfree
618 -J-Xmx
619 l
620 jps
621 hbase shell
622 cd /home/hrf/public/
623 ll
624 hbase shell < article.txt
625 top
626 ll
627 l
628 exit
629 ll
630 cd /home/hrf/public/
631 ls
632 hbase shell < article.txt
633 cd /home/hrf/public/
634 hbase shell < article.txt
635 cat jps
636 jps
637 impala
638 jps
639 hbase -shell
640 hbase shell
641 jps
642 ps -ef|grep 14379
643 pwd
644 cd ..
645 ls
646 cd ..
647 ls
648 cd scala-2.10.3/
649 ls
650 cd bin/
651 ls
652 jps
653 ls
654 cd /hadoop/soft/
655 ls
656 tar zxvf spark-0.9.0-incubating-bin-hadoop2.tgz
657 ls
658 tar zxvf scala-2.10.3.tgz
659 ls
660 mv scala-2.10.3 spark-0.9.0-incubating-bin-hadoop2 ./..
661 ls
662 cd ..
663 ls
664 vi ~/.bashrc
665 ls'
666 '
667 ls
668 ln -s spark-0.9.0-incubating-bin-hadoop2/ spark
669 ls
670 cd spark
671 ls
672 cd examples/target/
673 ls
674 pwd
675 vi ~/.bashrc
676 ls
677 cd spark-examples_2.10-0.9.0-ic
678 cd ..
679 ls
680 cd ..
681 ls
682 cd /hadoop/
683 ls
684 cd spark
685 ls
686 cd conf/
687 ls
688 mv spark-env.sh.template spark-env.sh
689 ls
690 vi spark-env.sh
691 ls
692 source ~/.bashrc
693 scala -version
694 vi slaves
695 ls
696 cd ..
697 ls
698 cd ..
699 ls
700 scp -r spark-0.9.0-incubating-bin-hadoop2/ scala-2.10.3/ hadoop@slave234:/hadoop/
701 scp -r spark-0.9.0-incubating-bin-hadoop2/ scala-2.10.3/ hadoop@slave233:/hadoop/
702 jps
703 ls
704 pwd
705 cd spark/bin/
706 ls
707 ll
708 . /run-example org.apache.spark.examples.SparkPi spark://master:7077 聽
709 ./run-example org.apache.spark.examples.SparkPi spark://master:7077 聽
710 ./run-example org.apache.spark.examples.SparkPi spark://master:2357077 聽
711 jps
712 su root
713 -shell
714 hbase shell
715 jps
716 ps -ef|grep 14379
717 pwd
718 cd ..
719 ls
720 cd ..
721 ls
722 cd scala-2.10.3/
723 ls
724 cd bin/
725 ls
726 hbase shell
727 hbase shell </home/llm/wp.txt
728 hbase shell </home/llm/ajwp.txt
729 hbase shell </home/llm/SJGJ.txt
730 hbase shell </home/llm/dpa.txt
731 hbase shell </home/llm/gma.txt
732 hbase shell </home/llm/hc.txtttttttttt
733 hbase shell </home/llm/jdc.txt
734 hbase shell </home/llm/jd.txt
735 hbase shell </home/llm/jg.txt
736 hbase shell </home/llm/jz.txt
737 hbase shell </home/llm/lg.txt
738 hbase shell </home/llm/ma.txt
739 hbase shell </home/llm/mhcg.txt
740 hbase shell </home/llm/mhjg.txt
741 hbase shell </home/llm/qza.txt
742 hbase shell </home/llm/sqa.txt
743 hbase shell </home/llm/vehicle_trail.txt
744 hbase shell </home/llm/wb.txt
745 hbase shell </home/llm/wyd.txt
746 hbase shell </home/llm/zj.txt
747 cd
748 hadoop fs -ls /
749 hadoop fs -ls /wenzhou
750 hadoop fs -ls /wenzhou/table
751 hadoop fs -ls /wenzhou/table/hzresult001
752 hadoop fs -get /wenzhou/table/hzresult001/part-r-00000 .
753 scp part-r-00000 patrick@192.168.24.181:/home/patrick/tmp
754 hadoop fs -ls /
755 hadoop fs -ls /wenzhou
756 hadoop fs -ls /wenzhou/table
757 hadoop fs -ls -R /wenzhou/table
758 cd
759 ls
760 cd /hadoop/
761 ls
762 cd
763 ls
764 mkdir hyl
765 ls
766 cd h
767 cd hyl/
768 ls
769 ls -la
770 ls -lar
771 ls -laR
772 hadoop fs -ls /wenzhou/table
773 hadoop fs -ls -R /wenzhou/table
774 hadoop fs -put case7/part-r-00000 /wenzhou/table/hzresult007/
775 hadoop fs -rm /wenzhou/table/hzresult001/part-r-00000
776 hadoop fs -rm /wenzhou/table/hzresult001/.part-r-00000.crc
777 hadoop fs -put case1/part-r-00000 /wenzhou/table/hzresult001/
778 hadoop fs -rm /wenzhou/table/hzresult003/part-r-00000
779 hadoop fs -put case3/part-r-00000 /wenzhou/table/hzresult003/
780 hadoop fs -rm /wenzhou/table/hzresult004/part-r-00000
781 hadoop fs -put case4/part-r-00000 /wenzhou/table/hzresult004
782 hadoop fs -rm /wenzhou/table/hzresult002/part-r-00000
783 hadoop fs -put case2/part-r-00000 /wenzhou/table/hzresult002/
784 hadoop fs -ls /
785 hadoop fs -put case2/part-r-00000 /wenzhou/table/hzresult002/
786 hadoop fs -ls -R /wenzhou/table
787 hadoop fs -put case2/part-r-00000 /wenzhou/table/hzresult002/
788 hadoop dfsadmin --report
789 hadoop dfsadmin -report
790 hadoop fs -put case2/part-r-00000 /wenzhou/table/hzresult002/
791 hadoop fs -put case2/part-r-00000 /wenzhou/table/hzresult002/
792 hadoop fs -rm /wenzhou/table/hzresult002/part-r-00000
793 hadoop fs -put case2/part-r-00000 /wenzhou/table/hzresult002/
794 hadoop fs -rm /wenzhou/table/hzresult002/part-r-00000
795 hadoop fs -ls -R /wenzhou/table/
796 hadoop fs -ls -R /wenzhou/table/hzresult002
797 hadoop fs -put case2/part-r-00000 /wenzhou/table/hzresult002/
798 cd
799 cd hyl/
800 hadoop fs -put case2/part-r-00000 /wenzhou/table/hzresult002/
801 free -m
802 jps
803 stop-yarn.sh
804 jps
805 hadoop fs -put case2/part-r-00000 /wenzhou/table/hzresult002/
806 exit
807 hbase shell
808 cd /home
809 l
810 ls
811 cd hrf/
812 ls
813 cd public/
814 ls
815 hbase shell < article.txt
816 top
817 hbase shell < article.txt
818 jps
819 stop-all.sh
820 jps
821 stop-hbase.sh
822 jps
823 start-all.sh
824 jps
825 start-hbase.sh
826 jps
827 hbase shell
828 hadoop fsadmin -report
829 hadoop dfsadmin -report
830 hadoop dfsadmin -safemode ge
831 hadoop dfsadmin -safemode get
832 hbase shell
833 top
834 jps
835 stop-hbase.sh
836 hbase shell
837 jps
838 free
839 free -m
840 cd /hadoop/hbase
841 ll
842 cd ..
843 cd hbase-config/
844 ll
845 vi hbase-env.sh
846 ll
847 cat /etc/hosts
848 scp hbase-env.sh slave234:/hadoop/hbase-config/
849 scp hbase-env.sh slave233:/hadoop/hbase-config/
850 stop-hbase.sh
851 jps
852 start-hbase.sh
853 jps
854 hbase shell
855 cat /etc/redhat-
856 cat /etc/redhat-release
857 uname -a
858 impala-shell
859 ll
860 vi hbase-env.sh
861 ll
862 cat /etc/hosts
863 scp hbase-site.xml slave234:/hadoop/hbase-config/
864 scp hbase-site.xml slave233:/hadoop/hbase-config/
865 stop-hbase.sh
866 jps
867 start-hbase.sh
868 jps
869 hbase shell
870 jps
871 cd /home/hrf/public/
872 ll
873 hbase shell < article.txt
874 stop-hbase.sh
875 jps
876 cd /hadoop
877 ll
878 start-all.sh
879 jps
880 start-hbase.sh
881 jps
882 hive --service hiveserver &
883 jps
884 service impala-state-store start
885 exit
886 vi /etc/hosts
887 jps
888 cd /var/
889 ls
890 cd log/
891 ls
892 cd ../
893 lsx
894 ls
895 cd /var/log/impala/
896 ls
897 ll
898 vi impalad.master235.impala.log.ERROR.20140613-094218.2404
899 service iptables status
900 su root
901 jps
902 hive
903 service impala-catalog status
904 jps
905 hive
906 jps
907 su root
908 hive
909 ls
910 mysql
911 cd /hadoop/hive
912 ls
913 cd conf/
914 ls
915 vi hive-site.xml
916 mysql
917 mysql root -q hadoop
918 mysql root -q unimas
919 mysql root -q
920 ls
921 vi hive-site.xml
922 mysql hadoop -q
923 mysql hadoop -p
924 ps -ef|grep mysql
925 ls
926 mysql -u hadoop -p
927 hive
928 ls
929 vi hive-site.xml
930 service mysqld satus
931 which mysql
932 whereis mysql
933 ps -ef|grep mysql
934 service impala-state-store status
935 service impala-catlog status
936 service impala-catalog status
937 service impala-catalog start
938 su root
939 hadoop fs -ls /unimas/cs
940 hadoop fs -mkdir /unimas/cs/net_zj_pq/
941 hadoop fs -ls /unimas/cs
942 hadoop fs -ls /unimas/cs/net_zj/
943 hadoop fs -ls /unimas/cs/net_zj_pq
944 hadoop fs -ls /unimas/cs/net_zj_pq/
945 hadoop fs -ls /unimas/cs/
946 hadoop fs -ls /unimas/cs/net_zj_pq/
947 hadoop fs -mkdir /unimas/cs/net_zj_pq/ny=201301
948 hadoop fs -ls /unimas/cs/net_zj_pq/
949 hadoop fs -chmod 777 /unimas/cs/net_zj_pq/
950 clear
951 hadoop fs -ls /unimas/cs/net_zj_pq/
952 hadoop fs -rm /unimas/cs/net_zj_pq/.impala_insert_staging
953 hadoop fs -rm r /unimas/cs/net_zj_pq/.impala_insert_staging
954 hadoop fs -rmdir /unimas/cs/net_zj_pq/.impala_insert_staging
955 hadoop fs -ls /unimas/cs/net_zj_pq/
956 hadoop fs -lsR /unimas/cs/net_zj_pq/
957 hadoop fs -lsr /unimas/cs/net_zj_pq/
958 hadoop fs -rmdir /unimas/cs/net_zj_pq/ny=201301
959 hadoop fs -rmdir /unimas/cs/net_zj_pq/ny=201302
960 hadoop fs -rmdirr /unimas/cs/net_zj_pq/ny=201302
961 hadoop fs -rmdir f /unimas/cs/net_zj_pq/ny=201302
962 hadoop fs -rmr /unimas/cs/net_zj_pq/ny=201302
963 hadoop fs -rmr /unimas/cs/net_zj_pq/ny=2013*
964 hadoop fs -mkdir /unimas/cs/net_zj_pqt/
965 hadoop fs -chmod 777 /unimas/cs/net_zj_pqt/
966 clear
967 exit;
968 jps
969 hive
970 jps
971 ps -ef|grep metastore
972 jps
973 hive --service hiveserver -p 9083 &
974 jps
975 hive
976 jps
977 jps
978 hive
979 su root
980 clear
981 hadoop fs -lsr /unimas/cs/net_zj_pq/
982 hadoop fs -lsr /unimas/cs/
983 hive
984 hive -hiveconf hive.root.logger=DEBUG,console
985 cd /hadoop/
986 ls
987 cd hive
988 ls
989 cd conf/
990 ls
991 vi hive-site.xml
992 ps -ef |grep hive
993 jsp
994 jps
995 exit
996 hbase shell
997 cd /hadoop/
998 hive
999 hbase shell
1000 hive
1001 ls
1002 cd /home/dm/
1003 kls
1004 ls
1005 hadoop fs -ls /unimas/cs/qs_lgy_2011/
1006 hadoop fs -lsr /unimas/cs/qs_lgy_2011/
1007 hadoop fs -rm /unimas/cs/qs_lgy_2011/ny=201301/lg.txt
1008 hadoop fs -rm /unimas/cs/qs_lgy_2011/ny=201302/lg.txt
1009 hadoop fs -rm /unimas/cs/qs_lgy_2011/ny=201303/lg.txt
1010 hadoop fs -rm /unimas/cs/qs_lgy_2011/ny=201304/lg.txt
1011 hadoop fs -put lg.txt /unimas/cs/qs_lgy_2011/ny=201301/lg.txt
1012 hadoop fs -put lg.txt /unimas/cs/qs_lgy_2011/ny=201302/
1013 hadoop fs -put lg.txt /unimas/cs/qs_lgy_2011/ny=201303/
1014 hadoop fs -put lg.txt /unimas/cs/qs_lgy_2011/ny=201304/
1015 history


好吧,这家伙太懒了~
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值