001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018 019package org.apache.hadoop.util; 020 021import java.io.DataInput; 022import java.io.IOException; 023 024import org.apache.hadoop.ipc.RPC; 025import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto; 026import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto; 027import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*; 028import org.apache.hadoop.security.UserGroupInformation; 029import org.apache.htrace.Span; 030import org.apache.htrace.Trace; 031 032import com.google.protobuf.ByteString; 033import org.apache.hadoop.security.rpcauth.RpcAuthMethod; 034 035public abstract class ProtoUtil { 036 037 /** 038 * Read a variable length integer in the same format that ProtoBufs encodes. 039 * @param in the input stream to read from 040 * @return the integer 041 * @throws IOException if it is malformed or EOF. 042 */ 043 public static int readRawVarint32(DataInput in) throws IOException { 044 byte tmp = in.readByte(); 045 if (tmp >= 0) { 046 return tmp; 047 } 048 int result = tmp & 0x7f; 049 if ((tmp = in.readByte()) >= 0) { 050 result |= tmp << 7; 051 } else { 052 result |= (tmp & 0x7f) << 7; 053 if ((tmp = in.readByte()) >= 0) { 054 result |= tmp << 14; 055 } else { 056 result |= (tmp & 0x7f) << 14; 057 if ((tmp = in.readByte()) >= 0) { 058 result |= tmp << 21; 059 } else { 060 result |= (tmp & 0x7f) << 21; 061 result |= (tmp = in.readByte()) << 28; 062 if (tmp < 0) { 063 // Discard upper 32 bits. 064 for (int i = 0; i < 5; i++) { 065 if (in.readByte() >= 0) { 066 return result; 067 } 068 } 069 throw new IOException("Malformed varint"); 070 } 071 } 072 } 073 } 074 return result; 075 } 076 077 078 /** 079 * This method creates the connection context using exactly the same logic 080 * as the old connection context as was done for writable where 081 * the effective and real users are set based on the auth method. 082 * 083 */ 084 public static IpcConnectionContextProto makeIpcConnectionContext( 085 final String protocol, 086 final UserGroupInformation ugi, final RpcAuthMethod authMethod) { 087 IpcConnectionContextProto.Builder result = IpcConnectionContextProto.newBuilder(); 088 if (protocol != null) { 089 result.setProtocol(protocol); 090 } 091 UserInformationProto.Builder ugiProto = UserInformationProto.newBuilder(); 092 if (ugi != null) { 093 /* 094 * In the connection context we send only additional user info that 095 * is not derived from the authentication done during connection setup. 096 */ 097 authMethod.writeUGI(ugi, ugiProto); 098 } 099 result.setUserInfo(ugiProto); 100 return result.build(); 101 } 102 103 public static UserGroupInformation getUgi(IpcConnectionContextProto context) { 104 if (context.hasUserInfo()) { 105 UserInformationProto userInfo = context.getUserInfo(); 106 return getUgi(userInfo); 107 } else { 108 return null; 109 } 110 } 111 112 public static UserGroupInformation getUgi(UserInformationProto userInfo) { 113 UserGroupInformation ugi = null; 114 String effectiveUser = userInfo.hasEffectiveUser() ? userInfo 115 .getEffectiveUser() : null; 116 String realUser = userInfo.hasRealUser() ? userInfo.getRealUser() : null; 117 if (effectiveUser != null) { 118 if (realUser != null) { 119 UserGroupInformation realUserUgi = UserGroupInformation 120 .createRemoteUser(realUser); 121 ugi = UserGroupInformation 122 .createProxyUser(effectiveUser, realUserUgi); 123 } else { 124 ugi = org.apache.hadoop.security.UserGroupInformation 125 .createRemoteUser(effectiveUser); 126 } 127 } 128 return ugi; 129 } 130 131 static RpcKindProto convert(RPC.RpcKind kind) { 132 switch (kind) { 133 case RPC_BUILTIN: return RpcKindProto.RPC_BUILTIN; 134 case RPC_WRITABLE: return RpcKindProto.RPC_WRITABLE; 135 case RPC_PROTOCOL_BUFFER: return RpcKindProto.RPC_PROTOCOL_BUFFER; 136 } 137 return null; 138 } 139 140 141 public static RPC.RpcKind convert( RpcKindProto kind) { 142 switch (kind) { 143 case RPC_BUILTIN: return RPC.RpcKind.RPC_BUILTIN; 144 case RPC_WRITABLE: return RPC.RpcKind.RPC_WRITABLE; 145 case RPC_PROTOCOL_BUFFER: return RPC.RpcKind.RPC_PROTOCOL_BUFFER; 146 } 147 return null; 148 } 149 150 public static RpcRequestHeaderProto makeRpcRequestHeader(RPC.RpcKind rpcKind, 151 RpcRequestHeaderProto.OperationProto operation, int callId, 152 int retryCount, byte[] uuid) { 153 RpcRequestHeaderProto.Builder result = RpcRequestHeaderProto.newBuilder(); 154 result.setRpcKind(convert(rpcKind)).setRpcOp(operation).setCallId(callId) 155 .setRetryCount(retryCount).setClientId(ByteString.copyFrom(uuid)); 156 157 // Add tracing info if we are currently tracing. 158 if (Trace.isTracing()) { 159 Span s = Trace.currentSpan(); 160 result.setTraceInfo(RPCTraceInfoProto.newBuilder() 161 .setParentId(s.getSpanId()) 162 .setTraceId(s.getTraceId()).build()); 163 } 164 165 return result.build(); 166 } 167}