目录
1.通过当前类加载器
代码实现(加载数据库连接信息):
public class ConnectionDemo {
@Test
public void test5() throws Exception{
//1. 读取配置文件的基本信息
InputStream is = ConnectionDemo.class.getClassLoader().getResourceAsStream("jdbc.properties");
Properties properties = new Properties();
properties.load(is);
String url = properties.getProperty("url");
String user = properties.getProperty("user");
String passwork = properties.getProperty("password");
String driverName = properties.getProperty("driverName");
//2. 加载驱动
Class.forName(driverName);
//3. 获取连接
Connection con = DriverManager.getConnection(url,user,password);
System.out.println(con);
}
}
配置文件:
jdbc.properties:
url=jdbc:mysql://localhost:3306/test?rewriteBatchedStatements=true
driverName=com.mysql.jdbc.Driver
user=root
password=12345
2.通过系统类加载器
代码实现(加载数据库连接信息):
/**
*
* @Description 获取数据库连接
* @throws IOException
* @throws ClassNotFoundException
* @throws SQLException
* @throws Exception
*/
public static Connection getConnection() throws Exception {
// 读取配置信息
InputStream is = ClassLoader.getSystemClassLoader().getResourceAsStream("jdbc.properties");
Properties pro = new Properties();
pro.load(is);
String url = pro.getProperty("url");
String user = pro.getProperty("user");
String passwork = pro.getProperty("passwork");
String driverName = pro.getProperty("driverName");
// 加载驱动
// Class clazz = Class.forName(driverName);
// Driver driver = (Driver)clazz.newInstance();
// DriverManager.registerDriver(driver);
Class.forName(driverName);
// 获取连接
Connection con = DriverManager.getConnection(url, user, passwork);
return con;
}
配置文件:
jdbc.properties:
url=jdbc:mysql://localhost:3306/test?rewriteBatchedStatements=true
driverName=com.mysql.jdbc.Driver
user=root
passwork=12345
3.通过当前线程的上下文类加载器
代码实现(加载kafka配置信息):
//创建配置对象
Properties pro = new Properties();
pro.load(Thread.currentThread().getContextClassLoader().getResourceAsStream("consumer.properties"));
//获取flume采集的数据
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(pro);
配置文件:
consumer.properties:
bootstrap.servers=hadoop15:9092,hadoop15:9092,hadoop15:9092
key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
value.deserializer=org.apache.kafka.common.serialization.StringDeserializer
group.id=real
enable.auto.commit=true
auto.commit.interval.ms=1000
4.通过流的方式
代码实现(获取druid数据库连接):
public class ConnUtils {
/**
* 获取druid数据库连接池连接
*/
private static DataSource ds ;
static{
Properties pro = new Properties();
FileInputStream fis = null;
try {
// 1. 读取配置文件
fis = new FileInputStream(new File("src//druid.properties"));
pro.load(fis);
// 2. 创建数据库连接池
ds = DruidDataSourceFactory.createDataSource(pro);
} catch (Exception e) {
e.printStackTrace();
}
}
public static Connection getConnection() throws Exception {
// 3. 获取连接
Connection conn = ds.getConnection();
return conn;
}
}
配置信息:
druid.properties:
url=jdbc:mysql:///test
username=root
password=12345
driverClassName=com.mysql.jdbc.Driver
initialSize=10
maxActive=10
5.通过ResourceBundle
代码实现:
package ct.common.constant;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.ResourceBundle;
public class ConfigConstant {
private static Map<String,String> map = new HashMap<>();
static {
//国际化
ResourceBundle ct = ResourceBundle.getBundle("ct");
Enumeration<String> enumerations = ct.getKeys();
while (enumerations.hasMoreElements()){
String key = enumerations.nextElement();
String value = ct.getString(key);
map.put(key,value);
}
}
public static String getVal(String key){
return map.get(key);
}
public static void main(String[] args) {
System.out.println(ConfigConstant.getVal("cf.info"));
}
}
配置文件:
ct.properties:
namespace=ct
table=ct:calllog
topic=ct
cf.caller=caller
cf.info=info