Hadoop中的序列化

1 简介

序列化和反序列化就是结构化对象和字节流之间的转换,主要用在内部进程的通讯和持久化存储方面。

通讯格式需求
hadoop在节点间的内部通讯使用的是RPC,RPC协议把消息翻译成二进制字节流发送到远程节点,远程节点再通过反序列化把二进制流转成原始的信息。RPC的序列化需要实现以下几点:
1.压缩,可以起到压缩的效果,占用的宽带资源要小。
2.快速,内部进程为分布式系统构建了高速链路,因此在序列化和反序列化间必须是快速的,不能让传输速度成为瓶颈。
3.可扩展的,新的服务端为新的客户端增加了一个参数,老客户端照样可以使用。
4.兼容性好,可以支持多个语言的客户端

存储格式需求
表面上看来序列化框架在持久化存储方面可能需要其他的一些特性,但事实上依然是那四点:
1.压缩,占用的空间更小
2.快速,可以快速读写
3.可扩展,可以以老格式读取老数据
4.兼容性好,可以支持多种语言的读写

hadoop的序列化格式
hadoop自身的序列化存储格式就是实现了Writable接口的类,他只实现了前面两点,压缩和快速。但是不容易扩展,也不跨语言。
我们先来看下Writable接口,Writable接口定义了两个方法:

1.将数据写入到二进制流中

2.从二进制数据流中读取数据

package org.apache.hadoop.io;
public interface Writable {
    void write(java.io.DataOutput p1) throws java.io.IOException;
    void readFields(java.io.DataInput p1) throws java.io.IOException;
}

2 实例

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.StringUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestWritable {
	byte[] bytes = null;
	@Before
	public void init() throws IOException {
		IntWritable writable = new IntWritable(163);
		bytes = serialize(writable);
	}

	@Test
	public void testSerialize() throws IOException {
		// 序列化后的四个字节的字节流
		Assert.assertEquals(bytes.length, 4);
		// big-endian的队列排列
		Assert.assertEquals(StringUtils.byteToHexString(bytes), "000000a3");
	}

	@Test
	public void testDeserialize() throws IOException {
		// 通过调用反序列化方法将bytes的数据读入对象
		IntWritable newWritable = new IntWritable();
		deserialize(newWritable, bytes);
		// 通过调用get方法,获得原始的值163
		Assert.assertEquals(newWritable.get(), 163);
	}

	/**
	 * 序列化
	 * 
	 * @param writable 待序列化对象
	 */
	public static byte[] serialize(Writable writable) throws IOException {
		ByteArrayOutputStream out = new ByteArrayOutputStream();
		DataOutputStream dataOut = new DataOutputStream(out);
		writable.write(dataOut);

		dataOut.close();
		return out.toByteArray();
	}

	/**
	 * 反序列化
	 * 
	 * @param writable 接受序列化后的对象
	 * @param bytes 待反序列化数据流
	 */
	public static byte[] deserialize(Writable writable, byte[] bytes)
			throws IOException {
		ByteArrayInputStream in = new ByteArrayInputStream(bytes);
		DataInputStream dataIn = new DataInputStream(in);
		writable.readFields(dataIn);

		dataIn.close();
		return bytes;
	}
}

import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparator;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestComparator {
	
	// key值的大小进行排序
	RawComparator<IntWritable> comparator;
	IntWritable w1;
	IntWritable w2;

	/**
	 * 获得IntWritable的comparator,并初始化两个IntWritable
	 */
	@SuppressWarnings("unchecked")
	@Before
	public void init() {
		comparator = WritableComparator.get(IntWritable.class);
		w1 = new IntWritable(163);
		w2 = new IntWritable(76);
	}

	/**
	 * 比较两个对象大小
	 */
	@Test
	public void testComparator() {
		Assert.assertTrue(comparator.compare(w1, w2) > 0);
	}

	/**
	 * 序列化后进行直接比较
	 */
	@Test
	public void testcompare() throws IOException {
		byte[] b1 = serialize(w1);
		byte[] b2 = serialize(w2);
		Assert.assertTrue(comparator
				.compare(b1, 0, b1.length, b2, 0, b2.length) > 0);
	}

	/**
	 * 将一个实现了Writable接口的对象序列化成字节流
	 */
	public static byte[] serialize(Writable writable) throws IOException {
		ByteArrayOutputStream out = new ByteArrayOutputStream();
		DataOutputStream dataOut = new DataOutputStream(out);
		writable.write(dataOut);

		dataOut.close();
		return out.toByteArray();
	}
}

3 自定义

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.WritableComparable;

public class InfoBean implements WritableComparable<InfoBean> {

	private String account;
	private double income;
	private double expenses;
	private double surplus;

	public void set(String account, double income, double expenses) {
		this.account = account;
		this.income = income;
		this.expenses = expenses;
		this.surplus = income - expenses;
	}

	@Override
	public void write(DataOutput out) throws IOException {
		out.writeUTF(account);
		out.writeDouble(income);
		out.writeDouble(expenses);
		out.writeDouble(surplus);

	}

	@Override
	public void readFields(DataInput in) throws IOException {
		this.account = in.readUTF();
		this.income = in.readDouble();
		this.expenses = in.readDouble();
		this.surplus = in.readDouble();
	}

	@Override
	public int compareTo(InfoBean o) {
		if (this.income == o.getIncome()) {
			return this.expenses > o.getExpenses() ? 1 : -1;
		}
		return this.income > o.getIncome() ? 1 : -1;
	}

	@Override
	public String toString() {
		return income + "\t" + expenses + "\t" + surplus;
	}

	public String getAccount() {
		return account;
	}

	public void setAccount(String account) {
		this.account = account;
	}

	public double getIncome() {
		return income;
	}

	public void setIncome(double income) {
		this.income = income;
	}

	public double getExpenses() {
		return expenses;
	}

	public void setExpenses(double expenses) {
		this.expenses = expenses;
	}

	public double getSurplus() {
		return surplus;
	}

	public void setSurplus(double surplus) {
		this.surplus = surplus;
	}

}

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparator;

public class TestInfoBean {

	public static void main(String[] args) throws IOException {

		// 序列化
		InfoBean infoBean = new InfoBean();
		infoBean.set("abc", 100, 10);
		byte[] bytes = serialize(infoBean);
		System.out.println(bytes.length);

		// 反序列化
		InfoBean infoBeanRes = new InfoBean();
		deserialize(infoBeanRes, bytes);
		System.out.println(infoBeanRes);

		// 比较
		@SuppressWarnings("unchecked")
		RawComparator<InfoBean> comparator = WritableComparator
				.get(InfoBean.class);
		InfoBean infoBean1 = new InfoBean();
		infoBean1.set("abc", 110, 10);
		InfoBean infoBean2 = new InfoBean();
		infoBean2.set("abc", 100, 10);
		System.out.println(comparator.compare(infoBean1, infoBean2));
	}

	/**
	 * 序列化
	 * 
	 * @param writable 待序列化对象
	 */
	public static byte[] serialize(Writable writable) throws IOException {
		ByteArrayOutputStream out = new ByteArrayOutputStream();
		DataOutputStream dataOut = new DataOutputStream(out);
		writable.write(dataOut);

		dataOut.close();
		return out.toByteArray();
	}

	/**
	 * 反序列化
	 * 
	 * @param writable 接受序列化后的对象
	 * @param bytes 待反序列化数据流
	 */
	public static byte[] deserialize(Writable writable, byte[] bytes)
			throws IOException {
		ByteArrayInputStream in = new ByteArrayInputStream(bytes);
		DataInputStream dataIn = new DataInputStream(in);
		writable.readFields(dataIn);

		dataIn.close();
		return bytes;
	}
}

原贴地址:http://blog.csdn.net/lastsweetop/article/details/9193907


  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值