在spark应用提交到集群时,如果我们需要加载本地(驱动节点)的配置文件时,如:
...
spark-submit \
-class ...
-jars ...
-jar ...
file:///yourLocalFilePath/conf.properties
在解析该配置文件时,通常我们将其处理为HashMap形式,通过key-value方式实现便捷的配置参数读取。具体实现方式如下:
package com.gendlee1991.utils
import java.io.{
FileInputStream, FileNotFoundException, InputStreamReader}
import java.util.Properties
import org.apache.commons.logging.LogFactory
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{
FSDataInputStream, Path}
import scala.collection.mutable.HashMap
class BigDataConf(propertiesPath: String) extends Serializable
{
val log = LogFactory.getLog(classOf[BigDataConf])
private val settings = new HashMap[String, String]()
load(propertiesPath)
def load(propertiesPath: String)
{
loadProPertiesFile(propertiesPath)
}
def loadProPertiesFile(propertiesPath: String)
{
var in: FSDataInputStream = null
var inr: InputStreamReader = null
try
{
val conf = new Configuration()
val path = new Path(propertiesPath