package org.apache.hadoop.hdfs.namenode;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.TimeUnit;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.HdfsAuditLogger;
import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringInterner;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.Text;
import org.xml.sax.SAXException;
class CharacterTree {
private final char c;
private CharacterTree[] subTree = new CharacterTree[4];
private int subTreeSize = 0;
private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8;
private final boolean leaf;
private String[] commands;// Only Audit the registed commands.
CharacterTree(char c) {
this(c, false, null);
}
CharacterTree(char c, boolean leaf, String[] commands) {
this.c = c;
this.leaf = leaf;
if (leaf) {
if (commands == null) {
throw new RuntimeException("auth commands must not be null. ");
}
this.commands = commands;
}
}
/**
*
* @param path must not be null.
* @param command
* @return
*/
public boolean isAuth(String path,int index, String command) {
char tempC = path.charAt(index);
for(int i = 0; i < subTreeSize; i++) {
if(tempC == subTree[i].c){
CharacterTree tree = subTree[i];
if(tree.leaf == true) {
if((index == path.length() -1) || ( path.length() > index + 1 && path.charAt(index+1) == '/')){
for (int j = 0; j < tree.commands.length; j++) {
if (command.equals(tree.commands[j])) {
return true;
}
}
}
}
if(index + 1 < path.length()) {
return tree.isAuth(path, index + 1, command);
}
}
}
return false;
}
public boolean isAuth(String path, String command) {
if( path.length() > 1) {
CharacterTree current = this;
int index = 1;
char tempC ;
outer:for(;index < path.length(); index++) {
tempC = path.charAt(index);
for(int i = 0; i < current.subTreeSize; i++) {
if(tempC == current.subTree[i].c){
current = current.subTree[i];
if(current.leaf == true) {
if((index == path.length() -1) || ( path.length() > index + 1 && path.charAt(index+1) == '/')){
for (int j = 0; j < current.commands.length; j++) {
if (command.equals(current.commands[j])) {
return true;
}
}
return false; // exit from for, means current character isleaf but command mismatch.
}
}
continue outer; // If the precedure run here,means find current character
}
}
return false;// current level character mismatch
}
return false;
} else {
return false;
}
}
public synchronized void add(String path, String[] commands) {
add(path,1,commands);
}
// path can with character /
public synchronized void add(String path,int index , String[] commands) {
char c = path.charAt(index);
boolean leaf = (index == path.length() - 1);
int i = 0;
CharacterTree tree = null;
for(; i < subTreeSize && (c < subTree[i].c);i++){
//do nothing
}
if ( subTreeSize > i && c == subTree[i].c){
tree = subTree[i];
}else{
tree = new CharacterTree(c,leaf,commands);
int next = subTreeSize;
//ensure capacity
if(next > subTree.length){
// overflow-conscious code
int oldCapacity = subTree.length;
int newCapacity = oldCapacity * 2;
if (newCapacity > MAX_ARRAY_SIZE ) {
throw new RuntimeException("array can not be expanded.");
}
// minCapacity is usually close to size, so this is a win:
subTree = Arrays.copyOf(subTree, newCapacity);
}
for(; next >= 1 && c < subTree[next-1].c; next--) {
subTree[next] = subTree[next-1];
}
subTree[next] = tree;
subTreeSize++;
}
if (index+1 < path.length()) {
tree.add(path,index+1, commands);
} else {
return;
}
}
}
public class CharacterTreeAuditLogger extends HdfsAuditLogger {
public static final Log auditLog = LogFactory.getLog(FSNamesystem.class.getName() + ".audit");
String auditFile;
private long auditFileCheckInterval;
private transient volatile CharacterTree root = new CharacterTree('/');
private Timer timer = new Timer(true);
private String defaultFS;
private static final ThreadLocal<StringBuilder> auditBuffer=new ThreadLocal<StringBuilder>(){
@Override protected StringBuilder initialValue()
{return new StringBuilder();
}
};
@Override
public void initialize(Configuration conf) {
conf = new Configuration();
logAuditMessage("CharacterTreeAuditLogger is initializing");
auditFile = conf.get("dfs.namenode.audit.file", "/usr/local/hadoop/etc/hadoop/auditfile.xml");
auditFileCheckInterval = conf.getLong("dfs.namenode.audit.file.check.interval", 10000L);
defaultFS = conf.get("fs.defaultFS");
if (defaultFS.endsWith("/")) {
defaultFS = defaultFS.substring(0, defaultFS.length() - 1);
}
timer.scheduleAtFixedRate(new ReloadAuditFileTask(auditFile), 10, auditFileCheckInterval);
}
@Override
public void logAuditEvent(boolean succeeded, String userName, InetAddress addr, String cmd, String src, String dst,
FileStatus status, UserGroupInformation ugi, DelegationTokenSecretManager dtSecretManager) {
if (auditLog.isInfoEnabled() && src != null && cmd != null) {
CharacterTree tempRoot = root;
// if cmd is rename, it will check src and dst, else only src is checked.
if ("rename".equals(cmd)) {
if (!tempRoot.isAuth(dst, cmd)) {
if (!tempRoot.isAuth(src, cmd)) {
return;
}
}
} else {
if (!tempRoot.isAuth(src, cmd)) {
// auditLog.info("!tempRoot.isAuth(src, cmd)" + src+", cmd:" + cmd);
return;
}
}
final StringBuilder sb = auditBuffer.get();
sb.setLength(0);
sb.append("allowed=").append(succeeded).append("\t");
sb.append("ugi=").append(userName).append("\t");
sb.append("ip=").append(addr).append("\t");
sb.append("cmd=").append(cmd).append("\t");
sb.append("src=").append(defaultFS).append(src).append("\t");
sb.append("dst=").append(dst == null ? "" : defaultFS).append(dst).append("\t");
if (null == status) {
sb.append("perm=null");
} else {
sb.append("perm=");
sb.append(status.getOwner()).append(":");
sb.append(status.getGroup()).append(":");
sb.append(status.getPermission());
}
sb.append("\t").append("proto=");
sb.append(NamenodeWebHdfsMethods.isWebHdfsInvocation() ? "webhdfs" : "rpc");
logAuditMessage(sb.toString());
}
}
public void logAuditMessage(String message) {
auditLog.info(message);
}
public static void main(String[] args) {
Configuration conf = new Configuration();
CharacterTreeAuditLogger logger = new CharacterTreeAuditLogger();
logger.initialize(conf);
CharacterTree root = new CharacterTree('/');
root.add("/ab",
"rename,open,delete,listStatus,create,setPermission,getfileinfo,mkdirs".split(","));
root.add("/abc/e",
"rename,open,delete,listStatus,create,setPermission,getfileinfo,mkdirs".split(","));
assert (root.isAuth("/abd", "open") == false);
assert (root.isAuth("/ab/a", "open") == true);
assert (root.isAuth("/ab", "open") == true);
assert (root.isAuth("/abc/e", "open") == true);
root.add("/data/scloud/a", "rename,open,delete,listStatus,create,setPermission,getfileinfo,mkdirs".split(","));
System.out.println("begin:" + System.currentTimeMillis());
String[] paths = new String[]{"/data/scloud/a/b/b", "/data/scloud/a/b/b","/tmp/abc"};
for(long i=0;i<10000 * 10000; i++){
assert (root.isAuth(paths[0], "open") == true);
assert (root.isAuth(paths[1], "aaa") == false);
assert (root.isAuth(paths[2], "open") == false);
}
System.out.println("end :" + System.currentTimeMillis());
List<GarbageCollectorMXBean> gcList = ManagementFactory.getGarbageCollectorMXBeans();
for(GarbageCollectorMXBean gc:gcList) {
System.out.println(gc.getName()+", gc.getCollectionCount():" + gc.getCollectionCount()
+ ", gc.getCollectionTime():" + gc.getCollectionTime()
+", MemoryPoolNames:"+StringUtils.join(gc.getMemoryPoolNames(),","));
}
try {
TimeUnit.SECONDS.sleep(5);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
logger.logAuditEvent(true, "houzhizhen", null, "open", "/user/hefuhua", null, null);
}
class ReloadAuditFileTask extends TimerTask {
private long fileModified = 0;
private String auditFile;
ReloadAuditFileTask(String auditFile){
this.auditFile = auditFile;
}
@Override
public void run() {
CharacterTree tempRoot = new CharacterTree('/');
File file = new File(auditFile);
long lastModified = file.lastModified();
//logAuditMessage("file.lastModified()" + file.lastModified() + ", fileModified" + fileModified);
if (fileModified >= lastModified) {
return;
}
fileModified = lastModified;
// auditLog.info("reloading " + auditFile);
try {
List<PathAndCommands> directoryList = parse(file);
String path;
String commands;
for (PathAndCommands pathAndCommands: directoryList) {
path = pathAndCommands.path;
commands =pathAndCommands.commands;
if (!path.startsWith(defaultFS)) {
// auditLog.info("path.startsWith(defaultFS)" + "path: " + path +",defaultFS:" +defaultFS);
continue;
}
path = path.substring(defaultFS.length());
if(path.endsWith("/")){
path = path.substring(0, path.length() - 1);
}
String[] commandArray = commands.split(",");
for (int i = 0; i < commandArray.length; i++) {
commandArray[i] = commandArray[i].trim();
}
//auditLog.info("tempRoot.add(path, commandArray) " + "path: " + path + ",commands:" + commands);
tempRoot.add(path,1, commandArray);
}
} catch (IOException | SAXException | ParserConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
root = tempRoot;
}
private List<PathAndCommands> parse(File file) throws IOException, SAXException, ParserConfigurationException {
List<PathAndCommands> results = new LinkedList<PathAndCommands>();
DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance();
docBuilderFactory.setIgnoringComments(true);
DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
Document doc= builder.parse( new BufferedInputStream(new FileInputStream(file))) ;
Element root = doc.getDocumentElement();
if (!"audit-config".equals(root.getTagName())){
return results;
}
NodeList nodeList = root.getChildNodes();
for (int i = 0; i < nodeList.getLength(); i++) {
Node propNode = nodeList.item(i);
if (!(propNode instanceof Element)) {
continue;
}
Element audit_directory = (Element)propNode;
if (!"audit-directory".equals(audit_directory.getTagName())){
continue;
}
PathAndCommands pathAndCommands = parsePathAndCommands(audit_directory);
if(pathAndCommands != null){
results.add(pathAndCommands);
}
}
return results;
}
private PathAndCommands parsePathAndCommands(Node directory) {
NodeList fields = directory.getChildNodes();
String path = null;
String commands = null;
for (int j = 0; j < fields.getLength(); j++) {
Node fieldNode = fields.item(j);
if (!(fieldNode instanceof Element))
continue;
Element field = (Element)fieldNode;
if ("path".equals(field.getTagName()) && field.hasChildNodes())
path = StringInterner.weakIntern(
((Text)field.getFirstChild()).getData().trim());
if ("commands".equals(field.getTagName()) && field.hasChildNodes())
commands = StringInterner.weakIntern(
((Text)field.getFirstChild()).getData());
}
if(path != null && commands != null) {
return new PathAndCommands(path,commands);
} else {
return null;
}
}
}
private class PathAndCommands{
final String path;
final String commands;
PathAndCommands(String path,String commands){
this.path = path;
this.commands = commands;
}
}
}
Hadoop CharacterTreeAuditLogger
最新推荐文章于 2021-06-03 17:32:16 发布