spark-1.4配置

42 篇文章 0 订阅
<pre code_snippet_id="1619510" snippet_file_name="blog_20160322_1_19627" name="code" class="html"><pre code_snippet_id="1619510" snippet_file_name="blog_20160322_1_19627" name="code" class="html"><strong><span style="font-size:14px;">spark-default.conf:</span></strong>

 
 
spark.master                    spark://hadoop:7077
spark.eventLog.enabled          true
spark.eventLog.dir              hdfs://hadoop:9000/spark
spark.serializer                org.apache.spark.serializer.KryoSerializer
spark.driver.cores              2
spark.driver.memory             512m
spark.executor.memory           1g
spark.shuffle.consolidateFiles  true
spark.sql.shuffle.partitions    100
spark.driver.userClassPathFirst true
spark.streaming.blockInterval   100
spark.cleaner.ttl               90000
spark.yarn.historyServer.address        http://hadoop:18088
spark.history.fs.logDirectory   hdfs://hadoop:9000/spark/log
spark.driver.extraLibraryPath   $HADOOP_HOME/lib/native
spark.executor.extraLibraryPath $HADOOP_HOME/lib/native

spark.tachyonStore.url          tachyon://hadoop:19998
spark.tachyonStore.baseURL      /home/hadoop/data/tachyon


spark-env.sh:

export SCALA_HOME=/home/hadoop/scala
export SPARK_MASTER_IP=hadoop
export SPARK_WORKER_MEMORY=3G
export JAVA_HOME=/home/hadoop/jdk
export SPARK_EXECUTOR_INSTANCES=6
export SPARK_WORKER_CORES=3
export SPARK_EXECUTOR_CORES=1
export SPARK_WORKER_INSTANCES=3
export SPARK_DRIVER_MEMORY=1G
# export SPARK_CLASSPATH=$TACHYON_HOME/client/target/tachyon-client-0.6.4-jar-with-dependencies.jar:$SPARK_CLASSPATH

export SPARK_CLASSPATH=/home/hadoop/spark/lib/jblas-1.2.4.jar:$SPARK_CLASSPATH


hive-site.xml:

<configuration>
        <property>
           <name>javax.jdo.option.ConnectionURL</name>
           <value>jdbc:mysql://127.0.0.1:3306/hive?createDatabaseIfNotExist=true</value>
           <description>JDBC connect string for a JDBCmetastore</description>
        </property>
        <property>
           <name>javax.jdo.option.ConnectionDriverName</name>
           <value>com.mysql.jdbc.Driver</value>
           <description>Driver class name for a JDBCmetastore</description>
        </property>
        <property>
           <name>javax.jdo.option.ConnectionUserName</name>
           <value>root</value>
           <description>Username to use against metastoredatabase</description>
        </property>
        <property>
           <name>javax.jdo.option.ConnectionPassword</name>
           <value>root</value>
           <description>password to use against metastoredatabase</description>
        </property>
        <property>
          <name>hive.metastore.uris</name>
          <value>thrift://127.0.0.1:9083</value>
          <description>Thrift URI for the remote metastore. Used by metastore client to connect to remote metastore.</description>
        </property>
        <property>
          <name>hive.server2.thrift.min.worker.threads</name>
          <value>3</value>
        </property>
        <property>
          <name>hive.server2.thrift.max.worker.threads</name>
          <value>20</value>
        </property>
        <property>
          <name>hive.server2.thrift.port</name>
          <value>10000</value>
        </property>
        <property>
          <name>hive.server2.thrift.bind.host</name>
          <value>hadoop</value>
        </property>
</configuration>

core-site.xml:

<configuration>
        <property>
           <name>fs.tachyon.impl</name>
           <value>tachyon.hadoop.TFS</value>
        </property>
</configuration>


mvn -Dhadoop.version=2.6.0-mr2-cdh4.6.0 -Pyarn -Phadoop-2.6 -Dhadoop.version=2.6.0 -Phive -Phive-thriftserver -Dscala-2.11 -Pspark-ganglia-lgpl -DskipTests clean package



more:

http://spark.apache.org/docs/latest/configuration.html

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值