some command of executing hadoop spark hive kafka



sudo service iptables stop


sudo service mysqld start


jobs -l
kill -9 4500


haddop fs -ls /



./nohup ./hive --service metastore &
./hive



//Hive
create table table_0406 (
id int,
name string,
age int)
row format delimited fields terminated by ','
stored as textfile;


load data local inpath '/home/hadoop/hadoop_home/apache-hive-1.2.1-bin/test.txt' into table table_0406;


insert into table_0406(id, name,age) values(200,'lisi',20);


hadoop fs - put test.log /hive/warehousedir/table_0506/


//drop table
drop table table_0406;



//Spark
stop-all.sh
start-all.sh


cd hadoop-home/bin


./nohup ./hive --service metastore &

./hive



//Kafka fen bu shi

// Active MQ,


[hadoop@teradata1 bin]$ ./kafka-topics.sh --create --zookeeper teradata1:2181 --replication-factor 1 --partitions 1 --topic kafka_test


[hadoop@teradata1 bin]$ ./kafka-topics.sh --list -zookeeper teradata1:2181
kafka_test


[hadoop@teradata1 bin]$ ./kafka-topics.sh --describe -zookeeper teradata1:2181
Topic:kafka_test PartitionCount:1 ReplicationFactor:1 Configs:
Topic: kafka_test Partition: 0 Leader: 0 Replicas: 0 Isr: 0


[hadoop@teradata1 bin]$ ./kafka-console-producer.sh --broker-list teradata1:9092 --topic kafka_test


[hadoop@teradata1 bin]$ ./kafka-console-consumer.sh --zookeeper teradata1:2181 --topic kafka_test --from-beginning




Centos2
cd /spark_home/spark-1.6.0...../


bin/run-example org.apache.spark.examples.streaming.KafkaWordCount teradata1 my-consumer-group kafka_test 1











评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值