import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TransWritable {
private static Logger logger = LoggerFactory.getLogger(TransWritable.class);
private static Set<Writable> transColSet = new HashSet<Writable>();
/**
* Map<String,String>集合转为MapWritable的方法
*/
public static MapWritable transferMapWritable(Map<String,String> map) {
MapWritable mw = new MapWritable();
if (!map.isEmpty()) {
try {
for (String o : map.keySet()) {
if(map.get(o)!=null){
mw.put(new Text(o), new Text(map.get(o)));
}else{
mw.put(new Text(o),NullWritable.get());
}
}
return mw;
} catch (Exception e) {
logger.error("发生异常", e);
}
}
return null;
}
/**
* List集合转为ListWritable的方法
*/
public static ListWritable cleanListWritable(List cleanRuleList) {
List<Writable> li =new ArrayList<Writable>();
ListWritable cleanl ;
if (!cleanRuleList.isEmpty()) {
try {
for (int i = 0; i < cleanRuleList.size(); i++) {
Map map = (Map) cleanRuleList.get(i);
MapWritable cleanlist = transferMapWritable(map);
li.add(cleanlist);
}
cleanl = new ListWritable(li);
return cleanl;
} catch (Exception e) {
logger.error("发生异常", e);
}
}
return null;
}
/**
* Set<String>类型 转为SetWritable的方法
*/
public static SetWritable TransSetWritable(Set<String> set) {
Set<Writable> transColSet = new HashSet<Writable>();
SetWritable transset;
if (!set.isEmpty()) {
try {
for (String str : set) {
transColSet.add(new Text(str));
}
transset = new SetWritable(transColSet);
return transset;
} catch (Exception e) {
logger.error("发生异常", e);
}
<span style="display: none; width: 0px; height: 0px;" id="transmark"></span> }
return null;
}
}
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Iterator;
import java.util.Set;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
public class SetWritable implements Writable {
private Class<? extends Writable> valueClass;
private Class<? extends Set> setClass;
private Set<Writable> values;
public SetWritable() {
}
public SetWritable(Set<Writable> values) {
setClass = values.getClass();
Iterator<Writable> iterator = values.iterator();
while (iterator.hasNext()) {
valueClass = iterator.next().getClass();
break;
}
this.values = values;
}
public Class<? extends Writable> getValueClass() {
return valueClass;
}
@SuppressWarnings("rawtypes")
public Class<? extends Set> getListClass() {
return setClass;
}
public void set(Set<Writable> values) {
this.values = values;
}
public Set<Writable> get() {
return values;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public void readFields(DataInput in) throws IOException {
String setClass = in.readUTF();
try {
this.setClass = (Class<? extends Set>) Class.forName(setClass);
String valueClass = in.readUTF();
this.valueClass = (Class<? extends Writable>) Class
.forName(valueClass);
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
}
int size = in.readInt(); // construct values
try {
values = this.setClass.newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
for (int i = 0; i < size; i++) {
Writable value = WritableFactories.newInstance(this.valueClass);
value.readFields(in); // read a value
values.add(value); // store it in values
}
}
public void write(DataOutput out) throws IOException {
out.writeUTF(setClass.getName());
out.writeUTF(valueClass.getName());
out.writeInt(values.size()); // write values
Iterator<Writable> iterator = values.iterator();
while (iterator.hasNext()) {
iterator.next().write(out);
}
}
public int size() {
return values.size();
}
public boolean isEmpty() {
return values==null ? true :false;
}
}
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
public class ListWritable implements Writable {
private Class<? extends Writable> valueClass;
private Class<? extends List> listClass;
private List<Writable> values;
public ListWritable() {
}
public ListWritable(List<Writable> values) {
listClass = values.getClass();
valueClass = values.get(0).getClass();
this.values = values;
}
public Class<? extends Writable> getValueClass() {
return valueClass;
}
@SuppressWarnings("rawtypes")
public Class<? extends List> getListClass() {
return listClass;
}
public void set(List<Writable> values) {
this.values = values;
}
public List<Writable> get() {
return values;
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public void readFields(DataInput in) throws IOException {
String listClass = in.readUTF();
try {
this.listClass = (Class<? extends List>) Class.forName(listClass);
String valueClass = in.readUTF();
this.valueClass = (Class<? extends Writable>) Class
.forName(valueClass);
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
}
int size = in.readInt(); // construct values
try {
values = this.listClass.newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
for (int i = 0; i < size; i++) {
Writable value = WritableFactories.newInstance(this.valueClass);
value.readFields(in); // read a value
values.add(value); // store it in values
}
}
public void write(DataOutput out) throws IOException {
out.writeUTF(listClass.getName());
out.writeUTF(valueClass.getName());
out.writeInt(values.size()); // write values
Iterator<Writable> iterator = values.iterator();
while (iterator.hasNext()) {
iterator.next().write(out);
}
}
public int size() {
return values.size();
}
public boolean isEmpty() {
return values==null? true :false;
}
}