实现图像数据输入MapReduce的接口,供参考。
package hequn.hadoop.image; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.hadoop.io.Writable; public class ImageWritable implements Writable { private int m_width; private int m_height; private int m_channels; private byte[] m_data; public int getM_width() { return m_width; } public void setM_width(int m_width) { this.m_width = m_width; } public int getM_height() { return m_height; } public void setM_height(int m_height) { this.m_height = m_height; } public int getM_channels() { return m_channels; } public void setM_channels(int m_channels) { this.m_channels = m_channels; } public byte[] getM_data() { return m_data; } public void setM_data(byte[] m_data) { this.m_data = m_data; } public void readFields(DataInput in) throws IOException { // TODO Auto-generated method stub int width = in.readInt(); int height = in.readInt(); int channels = in.readInt(); int length = width * height * channels; byte[] data = new byte[length]; for (int i = 0; i < length; ++i) { data[i] = in.readByte(); } m_width = width; m_height = height; m_channels = channels; m_data = data; } public void write(DataOutput out) throws IOException { // TODO Auto-generated method stub out.writeInt(m_width); out.writeInt(m_height); out.writeInt(m_channels); for (int i = 0; i < m_data.length; ++i) { out.writeByte(m_data[i]); } } }