直接上干货
一、vim /etc/profile
export JAVA_HOME=/usr/local/java/jdk1.8.0_271
export HADOOP_HOME=/usr/local/hadoop-3.3.0
export HBASE_HOME=/usr/local/hbase-2.3.3
export HIVE_HOME=/usr/local/hive-3.1.2
export SQOOP_HOME=/usr/local/sqoop-1.4.7
export SQOOP_SERVER_EXTRA_LIB=$SQOOP_HOME/extra
export HCAT_HOME=$SQOOP_HOME/testdata/hcatalog
export ACCUMULO_HOME=/home/src/java/org/apache/sqoop/accumul
export ZOOKEEPER_HOME=/usr/local/zookeeper-3.5.8
export SPARK_HOME=/usr/local/spark-3.0.1
export LD_LIBRARY_PATH=$HADOOP_HOME/lib/native/:$LD_LIBRARY_PATH
export PATH=$PATH:$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HBASE_HOME/bin:$SQOOP_HOME/bin:$SQOOP_SERVER_EXTRA_LIB:$HIVE_HOME/bin:$HCAT_HOME:$ACCUMULO_HOME:$ZOOKEEPER_HOME/bin:$SPARK_HOME/bin:$SPARK_HOME/sbin
二、vim /usr/local/hadoop-3.3.0/etc/hadoop/core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://master:9000</value>
####注释 : HDFS的URI,文件系统://namenode标识:端口号
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/usr/local/hadoop-3.3.0/tmp</value>
###注释: namenode上本地的hadoop临时文件夹
</property>
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>
</configuration>
三、vim /usr/local/hadoop-3.3.0/etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
<description>副本个数,配置默认是3,应小于datanode机器数量</description>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
<description></description>
</property>
<property>
<name>dfs.datanode.use.datanode.hostname</name>
<value>true</value>
</property>
</configuration>
四、vim /usr/local/hadoop-3.3.0/etc/hadoop/yarn-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.resource.memory-mb</name>
<value>4096</value>
</property>
<property>
<name>yarn.nodemanager.resource.cpu-vcores</name>
<value>2</value>
</property>
<property>
<name>yarn.application.classpath</name>
<value>/usr/local/hadoop-3.3.0/etc/hadoop:/usr/local/hadoop-3.3.0/share/hadoop/common/lib/*:/usr/local/hadoop-3.3.0/share/hadoop/common/*:/usr/local/hadoop-3.3.0/share/hadoop/hdfs:/usr/local/hadoop-3.3.0/share/hadoop/hdfs/lib/*:/usr/local/hadoop-3.3.0/share/hadoop/hdfs/*:/usr/local/hadoop-3.3.0/share/hadoop/mapreduce/*:/usr/local/hadoop-3.3.0/share/hadoop/yarn:/usr/local/hadoop-3.3.0/share/hadoop/yarn/lib/*:/usr/local/hadoop-3.3.0/share/hadoop/yarn/*</value>
</property>
</configuration>
五、创建ZK配置文件
cp /usr/local/zookeeper-3.5.8/conf/zoo_sample.cfg /usr/local/zookeeper-3.5.8/conf/zoo.cfg
六、vi /usr/local/zookeeper-3.5.8/conf/zoo.cfg
maxClientCnxns=300
dataDir=/home/zookeeper/data
server.1=master:2888:3888
七、vim /usr/local/hbase-2.3.3/conf/hbase-site.xml
<property>
<name>hbase.cluster.distributed</name>
<value>true</value>
</property>
<property>
<name>hbase.rootdir</name>
<value>hdfs://master:9000/hbase</value>
</property>
<property>
<name>hbase.zookeeper.property.clientPort</name>
<value>2181</value>
</property>
<property>
<name>hbase.zookeeper.quorum</name>
<value>master</value>
</property>
<property>
<name>hbase.zookeeper.property.dataDir</name>
<value>/home/zookeeper/data</value>
</property>
<property>
<name>zookeeper.znode.parent</name>
<value>/hbase/master</value>
</property>
<property>
<name>hbase.wal.provider</name>
<value>filesystem</value>
</property>
八、复制配置到hbase
cp /usr/local/hadoop-3.3.0/etc/hadoop/core-site.xml /usr/local/hbase-2.3.3/
cp /usr/local/hadoop-3.3.0/etc/hadoop/hdfs-site.xml /usr/local/hbase-2.3.3/
九、vim /usr/local/hive-3.1.2/conf/hive-env.sh
export HADOOP_HOME=/usr/local/hadoop-3.3.0
export HIVE_CONF_DIR=/usr/local/hive-3.1.2/conf
export HIVE_AUX_JARS_PATH=/usr/local/hive-3.1.2/lib
十、复制mysql驱动包到hive
cp mysql-connector-java-8.0.20.jar /usr/local/hive-3.1.2/lib/
十一、vim /usr/local/hive-3.1.2/conf/hive-site.xml
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://[ip]:3306/hive?createDatabaseIfNotExist=true&useUnicode=true&characterEncoding=UTF-8&useSSL=false&allowPublicKeyRetrieval=true</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>Pass@word2020</value>
</property>
<property>
<name>hive.metastore.schema.verification</name>
<value>false</value>
</property>
<property>
<name>datanucleus.readOnlyDatastore</name>
<value>false</value>
</property>
<property>
<name>datanucleus.fixedDatastore</name>
<value>false</value>
</property>
<property>
<name>datanucleus.autoCreateSchema</name>
<value>true</value>
</property>
<property>
<name>datanucleus.schema.autoCreateAll</name>
<value>true</value>
</property>
<property>
<name>datanucleus.autoCreateTables</name>
<value>true</value>
</property>
<property>
<name>datanucleus.autoCreateColumns</name>
<value>true</value>
</property>
<property>
<name>hive.cli.print.header</name>
<value>true</value>
</property>
<property>
<name>hive.cli.print.current.db</name>
<value>true</value>
</property>
<property>
<name>hive.metastore.uris</name>
<value>thrift://master:9083</value>
</property>
</configuration>
十二、vi /usr/local/sqoop-1.4.7/conf/sqoop-env.sh
#Set path to where bin/hadoop is available
export HADOOP_COMMON_HOME=/usr/local/hadoop-3.3.0
#Set path to where hadoop-*-core.jar is available
export HADOOP_MAPRED_HOME=/usr/local/hadoop-3.3.0
#set the path to where bin/hbase is available
export HBASE_HOME=/usr/local/hbase-2.3.3
#Set the path to where bin/hive is available
export HIVE_HOME=/usr/local/hive-3.1.2
#Set the path for where zookeper config dir is
#export ZOOCFGDIR=
十三、复配置文件到sqoop【可以不用】
cp /usr/local/hive-3.1.2/conf/hive-site.xml /usr/local/sqoop-1.4.7/conf/
十四、vim /usr/local/spark-3.0.1/conf/spark-env.sh
export JAVA_HOME=/usr/local/java/jdk1.8.0_271
export SPARK_MASTER_HOST=master