package com.qjzh.bigdata.api.hadoop.hdfs.dao.imp;
import java.io.IOException;
import java.net.URI;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.Logger;
import com.google.gson.Gson;
import com.qjzh.bigdata.api.hadoop.hdfs.dao.HdfsDaoI;
import com.qjzh.bigdata.api.utils.BigDataUtils;
import com.qjzh.bigdata.api.utils.HdfsFile;
import com.qjzh.bigdata.api.utils.PropertiesUtils;
public class HdfsDaoImp implements HdfsDaoI {
private static final Logger log = Logger.getLogger(HdfsDaoImp.class);
private static Configuration conf = new Configuration();
private static String hdfs_url = null;
private static String hdfs_user = null;
private static String mainDir = null;
private static String haNn1;
private static String haNn2;
static {
try {
hdfs_url = PropertiesUtils.getProValue("hdfs_url");
hdfs_user = PropertiesUtils.getProValue("hdfs_user");
mainDir = PropertiesUtils.getProValue("mainDir");
haNn1 = PropertiesUtils.getProValue("hdfs_dfs.ha.nn1");
haNn2 = PropertiesUtils.getProValue("hdfs_dfs.ha.nn2");
} catch (Exception e) {
e.printStackTrace();
}
}
private FileSystem getHdfsSystem() throws Exception{
log.info(" HdfsDaoImp.getHdfsSystem start " );
FileSystem fs = null;
if(haNn1 != null && !"".equals(haNn1)){
conf.set("fs.defaultFS", hdfs_url);
conf.set("dfs.nameservices", "ns1");
conf.set("dfs.ha.namenodes.ns1", "nn1,nn2");
conf.set("dfs.namenode.rpc-address.ns1.nn1", haNn1);
conf.set("dfs.namenode.rpc-address.ns1.nn2", haNn2);
conf.set("dfs.client.failover.proxy.provider.ns1",
"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
}
fs = FileSystem.get(new URI(hdfs_url), conf, hdfs_user);
log.info(" HdfsDaoImp.getHdfsSystem end " );
return fs;
}
public static void main(String[] args) throws Exception {
HdfsDaoImp ser= new HdfsDaoImp();
// String fileType1 = "user.root.log";
// StringBuffer fileContent1 = new StringBuffer("1990 21");
// ser.saveFile(fileType1, fileContent1);
// String fileType2 = "hu.input";
// StringBuffer fileContent2 = new StringBuffer("1991 18");
// ser.saveFile(fileType2, fileContent2);
//
// String fileType3 = "hu.input";
// StringBuffer fileContent3 = new StringBuffer("1992 30");
// ser.saveFile(fileType3, fileContent3);
//
String fileType4 = "test.test4";
List<Map<String, Object>> sourResultMapList = new ArrayList<Map<String,Object>>();
Map<String,Object> map =new HashMap<String, Object>();
map.put("saveType", "123");
map.put("addCounts", 1);
map.put("interCode", "coup");
map.put("tag", null);
map.put("2tag", "2");
sourResultMapList.add(map);
Map<String,Object> map1 =new HashMap<String, Object>();
map1.put("saveType", "234");
map1.put("addCounts", 5);
map1.put("interCode", "coup");
map1.put("2tag", null);
map1.put("tag", "1");
sourResultMapList.add(map1);
// Map<String,Object> totalParam =GsonTools.getHdfsByEntityList(sourResultMapList);
StringBuffer fileContent4 = new StringBuffer((new Gson()).toJson(sourResultMapList));
// System.out.println(fileContent4);
ser.saveFileByDay("20121212",fileType4, fileContent4);
// FileSystem fs = FileSystem.get(new URI(hdfs_url), conf, hdfs_user);
// Path dirPath = new Path(mainDir);
// fs.delete(dirPath);
// List<HdfsFile> list2 = ser.getAllFiles();
// System.out.println(list2);
// ser.getContentByPath("20121212/test/test3/201601291553.json");
System.out.println(getDirectoryName("20121212", fileType4));
}
public boolean saveFileByDay(String yyyyMMdd, String fileType, StringBuffer content)
throws Exception {
log.info("HdfsDaoImp.saveFileByDay start");
FileSystem fs = this.getHdfsSystem();
String filename = getFilename();
String dirname = getDirectoryName(yyyyMMdd,fileType);
Path dirPath = null;
Path filepath = null;
FSDataOutputStream out = null;
try {
dirPath = new Path(dirname);
if (!fs.exists(dirPath)) {
fs.mkdirs(dirPath);
}
filepath = new Path(dirname + "/" + filename);
out = fs.create(filepath);
out.write(content.toString().getBytes());
out.hflush();
System.out.println(filepath.toUri().getPath());
} catch (Exception e) {
log.error("hdfs保存失败",e);
e.printStackTrace();
throw new Exception("入库失败"+e.toString());
} finally {
if (out != null)
try {
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
log.info("HdfsDaoImp.saveFileByDay end");
return true;
}
public List<HdfsFile> getTypeFiles(String yyyyMMdd,String fileType) throws Exception {
log.info("HdfsDaoImp.getTypeFiles start");
List<HdfsFile> listFiles = null;
FileSystem fs = this.getHdfsSystem();
try {
if(fileType.indexOf(".") != -1){
fileType = fileType.replaceAll("\\.", "\\/");
}
Path dirPath = new Path(mainDir+yyyyMMdd+"/"+fileType+"/");
RemoteIterator<LocatedFileStatus> filesIt = fs.listFiles(dirPath,
true);
listFiles = new ArrayList<HdfsFile>();
while (filesIt.hasNext()) {
LocatedFileStatus fileStatus = filesIt.next();
Path filePath = fileStatus.getPath();
String fileName = filePath.getName();
System.out.println(fileStatus);
HdfsFile file = new HdfsFile();
file.setFileName(fileName);
file.setFilePath(filePath.toUri().toString());
listFiles.add(file);
}
} catch (Exception e) {
e.printStackTrace();
}
log.info("HdfsDaoImp.getTypeFiles end");
return listFiles;
}
//删除全部文件。方法慎重
private void delete7gAllFile(){
try {
FileSystem fs = this.getHdfsSystem();
Path dirPath = new Path(mainDir);
RemoteIterator<LocatedFileStatus> filesIt = fs.listFiles(dirPath,
true);
List listFiles = new ArrayList<HdfsFile>();
while (filesIt.hasNext()) {
LocatedFileStatus fileStatus = filesIt.next();
Path filePath = fileStatus.getPath();
fs.deleteOnExit(filePath);
String fileName = filePath.getName();
System.out.println(filePath);
}
} catch (Exception e) {
e.printStackTrace();
}
}
public void getContentByPath(String filePath) throws Exception {
FileSystem fs = this.getHdfsSystem();
Path filepath = null;
FSDataInputStream in = null;
try {
filepath = new Path(mainDir+filePath);
in = fs.open(filepath);
IOUtils.copyBytes(in, System.out, 4096, true);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (in != null)
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
// 目录名称(按数据时间--实际数据要晚于当前时间,因为数据是隔天获取)
private static String getDirectoryName(String yyyyMMdd,String fileType) {
if(fileType.indexOf(".") != -1){
fileType = fileType.replaceAll("\\.", "\\/");
}
try {
yyyyMMdd = BigDataUtils.formatHdfsDirDate(yyyyMMdd);
} catch (Exception e) {
e.printStackTrace();
}
return mainDir + yyyyMMdd+"/"+fileType;
}
// 文件名称(按系统实际时间)
private static String getFilename() {
SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHHmm");
return format.format(new Date()) + ".json";
}
public List<HdfsFile> getAllFiles() throws Exception {
List<HdfsFile> listFiles = null;
FileSystem fs = this.getHdfsSystem();
try {
Path dirPath = new Path(mainDir);
RemoteIterator<LocatedFileStatus> filesIt = fs.listFiles(dirPath,
true);
listFiles = new ArrayList<HdfsFile>();
while (filesIt.hasNext()) {
LocatedFileStatus fileStatus = filesIt.next();
Path filePath = fileStatus.getPath();
String fileName = filePath.getName();
System.out.println(fileStatus);
HdfsFile file = new HdfsFile();
file.setFileName(fileName);
file.setFilePath(filePath.toUri().toString());
listFiles.add(file);
}
} catch (Exception e) {
e.printStackTrace();
}
return listFiles;
}
}
import java.io.IOException;
import java.net.URI;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.Logger;
import com.google.gson.Gson;
import com.qjzh.bigdata.api.hadoop.hdfs.dao.HdfsDaoI;
import com.qjzh.bigdata.api.utils.BigDataUtils;
import com.qjzh.bigdata.api.utils.HdfsFile;
import com.qjzh.bigdata.api.utils.PropertiesUtils;
public class HdfsDaoImp implements HdfsDaoI {
private static final Logger log = Logger.getLogger(HdfsDaoImp.class);
private static Configuration conf = new Configuration();
private static String hdfs_url = null;
private static String hdfs_user = null;
private static String mainDir = null;
private static String haNn1;
private static String haNn2;
static {
try {
hdfs_url = PropertiesUtils.getProValue("hdfs_url");
hdfs_user = PropertiesUtils.getProValue("hdfs_user");
mainDir = PropertiesUtils.getProValue("mainDir");
haNn1 = PropertiesUtils.getProValue("hdfs_dfs.ha.nn1");
haNn2 = PropertiesUtils.getProValue("hdfs_dfs.ha.nn2");
} catch (Exception e) {
e.printStackTrace();
}
}
private FileSystem getHdfsSystem() throws Exception{
log.info(" HdfsDaoImp.getHdfsSystem start " );
FileSystem fs = null;
if(haNn1 != null && !"".equals(haNn1)){
conf.set("fs.defaultFS", hdfs_url);
conf.set("dfs.nameservices", "ns1");
conf.set("dfs.ha.namenodes.ns1", "nn1,nn2");
conf.set("dfs.namenode.rpc-address.ns1.nn1", haNn1);
conf.set("dfs.namenode.rpc-address.ns1.nn2", haNn2);
conf.set("dfs.client.failover.proxy.provider.ns1",
"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
}
fs = FileSystem.get(new URI(hdfs_url), conf, hdfs_user);
log.info(" HdfsDaoImp.getHdfsSystem end " );
return fs;
}
public static void main(String[] args) throws Exception {
HdfsDaoImp ser= new HdfsDaoImp();
// String fileType1 = "user.root.log";
// StringBuffer fileContent1 = new StringBuffer("1990 21");
// ser.saveFile(fileType1, fileContent1);
// String fileType2 = "hu.input";
// StringBuffer fileContent2 = new StringBuffer("1991 18");
// ser.saveFile(fileType2, fileContent2);
//
// String fileType3 = "hu.input";
// StringBuffer fileContent3 = new StringBuffer("1992 30");
// ser.saveFile(fileType3, fileContent3);
//
String fileType4 = "test.test4";
List<Map<String, Object>> sourResultMapList = new ArrayList<Map<String,Object>>();
Map<String,Object> map =new HashMap<String, Object>();
map.put("saveType", "123");
map.put("addCounts", 1);
map.put("interCode", "coup");
map.put("tag", null);
map.put("2tag", "2");
sourResultMapList.add(map);
Map<String,Object> map1 =new HashMap<String, Object>();
map1.put("saveType", "234");
map1.put("addCounts", 5);
map1.put("interCode", "coup");
map1.put("2tag", null);
map1.put("tag", "1");
sourResultMapList.add(map1);
// Map<String,Object> totalParam =GsonTools.getHdfsByEntityList(sourResultMapList);
StringBuffer fileContent4 = new StringBuffer((new Gson()).toJson(sourResultMapList));
// System.out.println(fileContent4);
ser.saveFileByDay("20121212",fileType4, fileContent4);
// FileSystem fs = FileSystem.get(new URI(hdfs_url), conf, hdfs_user);
// Path dirPath = new Path(mainDir);
// fs.delete(dirPath);
// List<HdfsFile> list2 = ser.getAllFiles();
// System.out.println(list2);
// ser.getContentByPath("20121212/test/test3/201601291553.json");
System.out.println(getDirectoryName("20121212", fileType4));
}
public boolean saveFileByDay(String yyyyMMdd, String fileType, StringBuffer content)
throws Exception {
log.info("HdfsDaoImp.saveFileByDay start");
FileSystem fs = this.getHdfsSystem();
String filename = getFilename();
String dirname = getDirectoryName(yyyyMMdd,fileType);
Path dirPath = null;
Path filepath = null;
FSDataOutputStream out = null;
try {
dirPath = new Path(dirname);
if (!fs.exists(dirPath)) {
fs.mkdirs(dirPath);
}
filepath = new Path(dirname + "/" + filename);
out = fs.create(filepath);
out.write(content.toString().getBytes());
out.hflush();
System.out.println(filepath.toUri().getPath());
} catch (Exception e) {
log.error("hdfs保存失败",e);
e.printStackTrace();
throw new Exception("入库失败"+e.toString());
} finally {
if (out != null)
try {
out.close();
} catch (Exception e) {
e.printStackTrace();
}
}
log.info("HdfsDaoImp.saveFileByDay end");
return true;
}
public List<HdfsFile> getTypeFiles(String yyyyMMdd,String fileType) throws Exception {
log.info("HdfsDaoImp.getTypeFiles start");
List<HdfsFile> listFiles = null;
FileSystem fs = this.getHdfsSystem();
try {
if(fileType.indexOf(".") != -1){
fileType = fileType.replaceAll("\\.", "\\/");
}
Path dirPath = new Path(mainDir+yyyyMMdd+"/"+fileType+"/");
RemoteIterator<LocatedFileStatus> filesIt = fs.listFiles(dirPath,
true);
listFiles = new ArrayList<HdfsFile>();
while (filesIt.hasNext()) {
LocatedFileStatus fileStatus = filesIt.next();
Path filePath = fileStatus.getPath();
String fileName = filePath.getName();
System.out.println(fileStatus);
HdfsFile file = new HdfsFile();
file.setFileName(fileName);
file.setFilePath(filePath.toUri().toString());
listFiles.add(file);
}
} catch (Exception e) {
e.printStackTrace();
}
log.info("HdfsDaoImp.getTypeFiles end");
return listFiles;
}
//删除全部文件。方法慎重
private void delete7gAllFile(){
try {
FileSystem fs = this.getHdfsSystem();
Path dirPath = new Path(mainDir);
RemoteIterator<LocatedFileStatus> filesIt = fs.listFiles(dirPath,
true);
List listFiles = new ArrayList<HdfsFile>();
while (filesIt.hasNext()) {
LocatedFileStatus fileStatus = filesIt.next();
Path filePath = fileStatus.getPath();
fs.deleteOnExit(filePath);
String fileName = filePath.getName();
System.out.println(filePath);
}
} catch (Exception e) {
e.printStackTrace();
}
}
public void getContentByPath(String filePath) throws Exception {
FileSystem fs = this.getHdfsSystem();
Path filepath = null;
FSDataInputStream in = null;
try {
filepath = new Path(mainDir+filePath);
in = fs.open(filepath);
IOUtils.copyBytes(in, System.out, 4096, true);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (in != null)
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
// 目录名称(按数据时间--实际数据要晚于当前时间,因为数据是隔天获取)
private static String getDirectoryName(String yyyyMMdd,String fileType) {
if(fileType.indexOf(".") != -1){
fileType = fileType.replaceAll("\\.", "\\/");
}
try {
yyyyMMdd = BigDataUtils.formatHdfsDirDate(yyyyMMdd);
} catch (Exception e) {
e.printStackTrace();
}
return mainDir + yyyyMMdd+"/"+fileType;
}
// 文件名称(按系统实际时间)
private static String getFilename() {
SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHHmm");
return format.format(new Date()) + ".json";
}
public List<HdfsFile> getAllFiles() throws Exception {
List<HdfsFile> listFiles = null;
FileSystem fs = this.getHdfsSystem();
try {
Path dirPath = new Path(mainDir);
RemoteIterator<LocatedFileStatus> filesIt = fs.listFiles(dirPath,
true);
listFiles = new ArrayList<HdfsFile>();
while (filesIt.hasNext()) {
LocatedFileStatus fileStatus = filesIt.next();
Path filePath = fileStatus.getPath();
String fileName = filePath.getName();
System.out.println(fileStatus);
HdfsFile file = new HdfsFile();
file.setFileName(fileName);
file.setFilePath(filePath.toUri().toString());
listFiles.add(file);
}
} catch (Exception e) {
e.printStackTrace();
}
return listFiles;
}
}