001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018 019package org.apache.hadoop.util; 020 021import java.io.DataInput; 022import java.io.IOException; 023 024import org.apache.hadoop.ipc.RPC; 025import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto; 026import org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto; 027import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.*; 028import org.apache.hadoop.security.UserGroupInformation; 029 030import com.google.protobuf.ByteString; 031import org.apache.hadoop.security.rpcauth.RpcAuthMethod; 032 033public abstract class ProtoUtil { 034 035 /** 036 * Read a variable length integer in the same format that ProtoBufs encodes. 037 * @param in the input stream to read from 038 * @return the integer 039 * @throws IOException if it is malformed or EOF. 040 */ 041 public static int readRawVarint32(DataInput in) throws IOException { 042 byte tmp = in.readByte(); 043 if (tmp >= 0) { 044 return tmp; 045 } 046 int result = tmp & 0x7f; 047 if ((tmp = in.readByte()) >= 0) { 048 result |= tmp << 7; 049 } else { 050 result |= (tmp & 0x7f) << 7; 051 if ((tmp = in.readByte()) >= 0) { 052 result |= tmp << 14; 053 } else { 054 result |= (tmp & 0x7f) << 14; 055 if ((tmp = in.readByte()) >= 0) { 056 result |= tmp << 21; 057 } else { 058 result |= (tmp & 0x7f) << 21; 059 result |= (tmp = in.readByte()) << 28; 060 if (tmp < 0) { 061 // Discard upper 32 bits. 062 for (int i = 0; i < 5; i++) { 063 if (in.readByte() >= 0) { 064 return result; 065 } 066 } 067 throw new IOException("Malformed varint"); 068 } 069 } 070 } 071 } 072 return result; 073 } 074 075 076 /** 077 * This method creates the connection context using exactly the same logic 078 * as the old connection context as was done for writable where 079 * the effective and real users are set based on the auth method. 080 * 081 */ 082 public static IpcConnectionContextProto makeIpcConnectionContext( 083 final String protocol, 084 final UserGroupInformation ugi, final RpcAuthMethod authMethod) { 085 IpcConnectionContextProto.Builder result = IpcConnectionContextProto.newBuilder(); 086 if (protocol != null) { 087 result.setProtocol(protocol); 088 } 089 UserInformationProto.Builder ugiProto = UserInformationProto.newBuilder(); 090 if (ugi != null) { 091 /* 092 * In the connection context we send only additional user info that 093 * is not derived from the authentication done during connection setup. 094 */ 095 authMethod.writeUGI(ugi, ugiProto); 096 } 097 result.setUserInfo(ugiProto); 098 return result.build(); 099 } 100 101 public static UserGroupInformation getUgi(IpcConnectionContextProto context) { 102 if (context.hasUserInfo()) { 103 UserInformationProto userInfo = context.getUserInfo(); 104 return getUgi(userInfo); 105 } else { 106 return null; 107 } 108 } 109 110 public static UserGroupInformation getUgi(UserInformationProto userInfo) { 111 UserGroupInformation ugi = null; 112 String effectiveUser = userInfo.hasEffectiveUser() ? userInfo 113 .getEffectiveUser() : null; 114 String realUser = userInfo.hasRealUser() ? userInfo.getRealUser() : null; 115 if (effectiveUser != null) { 116 if (realUser != null) { 117 UserGroupInformation realUserUgi = UserGroupInformation 118 .createRemoteUser(realUser); 119 ugi = UserGroupInformation 120 .createProxyUser(effectiveUser, realUserUgi); 121 } else { 122 ugi = org.apache.hadoop.security.UserGroupInformation 123 .createRemoteUser(effectiveUser); 124 } 125 } 126 return ugi; 127 } 128 129 static RpcKindProto convert(RPC.RpcKind kind) { 130 switch (kind) { 131 case RPC_BUILTIN: return RpcKindProto.RPC_BUILTIN; 132 case RPC_WRITABLE: return RpcKindProto.RPC_WRITABLE; 133 case RPC_PROTOCOL_BUFFER: return RpcKindProto.RPC_PROTOCOL_BUFFER; 134 } 135 return null; 136 } 137 138 139 public static RPC.RpcKind convert( RpcKindProto kind) { 140 switch (kind) { 141 case RPC_BUILTIN: return RPC.RpcKind.RPC_BUILTIN; 142 case RPC_WRITABLE: return RPC.RpcKind.RPC_WRITABLE; 143 case RPC_PROTOCOL_BUFFER: return RPC.RpcKind.RPC_PROTOCOL_BUFFER; 144 } 145 return null; 146 } 147 148 public static RpcRequestHeaderProto makeRpcRequestHeader(RPC.RpcKind rpcKind, 149 RpcRequestHeaderProto.OperationProto operation, int callId, 150 int retryCount, byte[] uuid) { 151 RpcRequestHeaderProto.Builder result = RpcRequestHeaderProto.newBuilder(); 152 result.setRpcKind(convert(rpcKind)).setRpcOp(operation).setCallId(callId) 153 .setRetryCount(retryCount).setClientId(ByteString.copyFrom(uuid)); 154 return result.build(); 155 } 156}