1. @Override 
  2. public void readFields(DataInput in) throws IOException { 
  3.     uid = in.readLong(); 
  4.     fansNum = in.readInt(); 
  5.     followNum = in.readInt(); 
  6.     feedNum = in.readInt(); 
  7.     depth = in.readInt(); 
  8.     fans.readFields(in); 
  9.     follow.readFields(in); 
  10.     feedList.readFields(in); 
  11.     //nick.readFields(in); 
  12.     nick = in.readUTF(); 
  13. @Override 
  14. public void write(DataOutput out) throws IOException { 
  15.     out.writeLong(uid); 
  16.     out.writeInt(fansNum); 
  17.     out.writeInt(followNum); 
  18.     out.writeInt(feedNum); 
  19.     out.writeInt(depth); 
  20.     fans.write(out); 
  21.     follow.write(out); 
  22.     feedList.write(out); 
  23.     out.writeUTF(nick); 
  24.     //nick.write(out); 

从map传递到reduce里,发现nick里出现了乱码。把出现乱码的文字拿出来单独测试却正常。难以索解。改成String类型后bug消失。

Hadoop版本0.20.2