在前面一篇文章http://blog.csdn.net/tragicjun/article/details/8958154中大致总结了RPC消息序列化的格式,这里从代码层来描述。
Client-side
RPC Header
org.apache.hadoop.ipc.Client.call() --> getConnection() --> Connection.setupIOstreams() --> writeRpcHeader()
out.write(Server.HEADER.array());
out.write(Server.CURRENT_VERSION);
authMethod.write(out);
Protocol Header
org.apache.hadoop.ipc.Client.call() --> getConnection() --> Connection.setupIOstreams() --> writeHeader()
header.write(buf);
// Write out the payload length
int bufLen = buf.getLength();
out.writeInt(bufLen);
out.write(buf.getData(), 0, bufLen);
Connection Header
org.apache.hadoop.ipc.Client.call() --> getConnection() --> Connection.setupIOstreams() --> writeHeader() --> ConnectionHeader.write()
Text.writeString(out, (protocol == null) ? "" : protocol);
if (ugi != null) {
if (authMethod == AuthMethod.KERBEROS) {
// Send effective user for Kerberos auth
out.writeBoolean(true);
out.writeUTF(ugi.getUserName());
out.writeBoolean(false);
}...
RPC Parameter
org.apache.hadoop.ipc.Client.call() --> Connection.sendParam()
d = new DataOutputBuffer();
d.writeInt(call.id);
call.param.write(d);
byte[] data = d.getData();
int dataLength = d.getLength();
out.writeInt(dataLength); //first put the data length
out.write(data, 0, dataLength);//write the data
Invaction
org.apache.hadoop.ipc.Client.call() --> Connection.sendParam() --> Invocation.write()
UTF8.writeString(out, methodName);
out.writeInt(parameterClasses.length);
for (int i = 0; i < parameterClasses.length; i++) {
ObjectWritable.writeObject(out, parameters[i], parameterClasses[i],
conf);
}
org.apache.hadoop.ipc.Server.Handler.run() --> org.apache.hadoop.ipc.Server.setupResponse()
out.writeInt(call.id); // write call id
out.writeInt(status.state); // write status
if (status == Status.SUCCESS) {
rv.write(out);
} else {
WritableUtils.writeString(out, errorClass);
WritableUtils.writeString(out, error);
}