博主上一篇博客分享了hadoop客户端java API的使用,本章节带领小伙伴们一起来体验下hadoop的内置rpc框架。首先,由于hadoop的内置rpc框架的设计目的是为了内部的组件提供rpc访问的功能,并不是作为专业开源rpc(如dubbo、springcloud、hsf类的软件)的路线来走的,所以其一般不会被我们用于自己的普通web项目里面。但是,作为后台项目,我们其实是可以独立使用hadoop rpc的,而且不需要搭建hadoop环境。
下面来看看使用案例:
在我们前面的章节中,hadoop的namenode和datanode之间、resource manager和node manager直接都是有rpc调用的,以下是一个模拟使用过程:
定义远程调用接口
package com.empire.hadoop.hadoop_rpc.protocol;/** * 类ClientNamenodeProtocol.java的实现描述:模拟 namenode协议功能接口 * * @author arron 2018年11月14日 下午11:09:55 */public interface ClientNamenodeProtocol { /** * client version客户端版本号 */ public static final long versionID = 1L; //会读取这个版本号, 但可以和客户端的不一样, 没有校验 public String getMetaData(String path);}
package com.empire.hadoop.hadoop_rpc.protocol;/** * 类IUserLoginService.java的实现描述: 用户登录 service * * @author arron 2018年11月14日 下午11:30:45 */public interface IUserLoginService { /** * client version协议的版本号 */ public static final long versionID = 100L; public String login(String name, String passwd);}
接口实现
package com.empire.hadoop.hadoop_rpc.service;import com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol;/** * 类MyNameNode.java的实现描述:模拟 namenode协议功能接口实现类 * * @author arron 2018年11月14日 下午11:31:37 */public class MyNameNode implements ClientNamenodeProtocol { //模拟namenode的业务方法之一:查询元数据 public String getMetaData(String path) { return path + ": 3 - {BLK_1,BLK_2} ...."; }}
package com.empire.hadoop.hadoop_rpc.service;import com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService;/** * 类UserLoginServiceImpl.java的实现描述:用户登录service实现类 * * @author arron 2018年11月14日 下午11:32:16 */public class UserLoginServiceImpl implements IUserLoginService { public String login(String name, String passwd) { return name + "logged in successfully..."; }}
远程服务发布
package com.empire.hadoop.hadoop_rpc.service;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.ipc.RPC;import org.apache.hadoop.ipc.RPC.Builder;import org.apache.hadoop.ipc.RPC.Server;import com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol;import com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService;/** * 类PublishServiceUtil.java的实现描述:服务发布工具类 * * @author arron 2018年11月14日 下午11:32:01 */public class PublishServiceUtil { public static void main(String[] args) throws Exception { Builder builder = new RPC.Builder(new Configuration()); builder.setBindAddress("localhost").setPort(8888).setProtocol(ClientNamenodeProtocol.class) .setInstance(new MyNameNode()); Server server = builder.build(); server.start(); Builder builder2 = new RPC.Builder(new Configuration()); builder2.setBindAddress("localhost").setPort(9999).setProtocol(IUserLoginService.class) .setInstance(new UserLoginServiceImpl()); Server server2 = builder2.build(); server2.start(); }}
客户端调用
package com.empire.hadoop.hadoop_rpc.client;import java.net.InetSocketAddress;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.ipc.RPC;import com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol;/** * 类MyHdfsClient.java的实现描述:rpc远程客户端 * * @author arron 2018年11月14日 下午11:30:04 */public class MyHdfsClient { public static void main(String[] args) throws Exception { ClientNamenodeProtocol namenode = RPC.getProxy(ClientNamenodeProtocol.class, 1L, new InetSocketAddress("localhost", 8888), new Configuration()); String metaData = namenode.getMetaData("/angela.mygirl"); System.out.println(metaData); }}
package com.empire.hadoop.hadoop_rpc.client;import java.net.InetSocketAddress;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.ipc.RPC;import com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService;/** * 类UserLoginAction.java的实现描述:用户登录客户端操作 * * @author arron 2018年11月14日 下午11:30:24 */public class UserLoginAction { public static void main(String[] args) throws Exception { IUserLoginService userLoginService = RPC.getProxy(IUserLoginService.class, 100L, new InetSocketAddress("localhost", 9999), new Configuration()); String login = userLoginService.login("angelababy", "1314520"); System.out.println(login); }}
运行效果图:
服务端
[main] DEBUG org.apache.hadoop.ipc.Server - rpcKind=RPC_WRITABLE, rpcRequestWrapperClass=class org.apache.hadoop.ipc.WritableRpcEngine$Invocation, rpcInvoker=org.apache.hadoop.ipc.WritableRpcEngine$Server$WritableRpcInvoker@27c6d458[main] INFO org.apache.hadoop.ipc.CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 100 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler[main] DEBUG org.apache.hadoop.security.SecurityUtil - Setting hadoop.security.token.service.use_ip to true[main] DEBUG org.apache.hadoop.ipc.Server - Server accepts auth methods:[SIMPLE][Socket Reader #1 for port 8887] INFO org.apache.hadoop.ipc.Server - Starting Socket Reader #1 for port 8887[main] DEBUG org.apache.hadoop.ipc.metrics.RpcMetrics - Initialized MetricsRegistry{info=MetricsInfoImpl{name=rpc, description=rpc}, tags=[MetricsTag{info=MetricsInfoImpl{name=port, description=RPC port}, value=8887}], metrics=[]}[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.receivedBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of received bytes], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.sentBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of sent bytes], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcQueueTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Queue time], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Processing time], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.deferredRpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Deferred Processing time], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication failures], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication successes], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization failures], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization successes], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcClientBackoff with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of client backoff requests], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcSlowCalls with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of Slow RPC calls], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public java.lang.String org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnectionsPerUser() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections per user], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.callQueueLength() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Length of the call queue], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public long org.apache.hadoop.ipc.metrics.RpcMetrics.numDroppedConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of dropped connections], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcActivityForPort8887, Aggregate RPC metrics[main] DEBUG org.apache.hadoop.ipc.metrics.RpcDetailedMetrics - MetricsInfoImpl{name=rpcdetailed, description=rpcdetailed}[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.rates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.deferredRpcRates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcDetailedActivityForPort8887, Per method RPC metrics[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of successful kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of failed kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[GetGroups], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeLong org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailuresTotal with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Renewal failures since startup], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Renewal failures since last successful login], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - UgiMetrics, User and group related metrics[main] DEBUG org.apache.hadoop.security.authentication.util.KerberosName - Kerberos krb5 configuration not found, setting default realm to empty[main] DEBUG org.apache.hadoop.security.Groups - Creating new Groups object[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Trying to load the custom-built native-hadoop library...[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - java.library.path=E:\Program Files\Java\jdk1.7.0_80\bin;C:\WINDOWS\Sun\Java\bin;C:\WINDOWS\system32;C:\WINDOWS;E:\Program Files\Java\jdk1.7.0_80\jre\bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin/server;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/lib/amd64;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\iCLS\;C:\Program Files\Intel\Intel(R) Management Engine Components\iCLS\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\WiFi\bin\;C:\Program Files\Common Files\Intel\WirelessCommon\;C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common;D:\Program Files (x86)\VanDyke Software\Clients\;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;E:\Program Files\Java\jdk1.8.0_131\bin;E:\Program Files\Java\jdk1.8.0_131\jre\bin;E:\Program Files\apache-maven-3.5.4\bin;%HADOOP_HOME%\bin;C:\Users\aaron\AppData\Local\Microsoft\WindowsApps;D:\Program Files (x86)\VanDyke Software\Clients\;;E:\sts-bundle\sts-3.9.5.RELEASE;;.[main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable[main] DEBUG org.apache.hadoop.util.PerformanceAdvisory - Falling back to shell based[main] DEBUG org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback - Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping[main] DEBUG org.apache.hadoop.security.Groups - Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000[main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.ipc.ProtocolMetaInfoPB version=1 ProtocolImpl=org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos$ProtocolInfoService$2 protocolClass=org.apache.hadoop.ipc.ProtocolMetaInfoPB[main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_WRITABLE Protocol Name = com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol version=1 ProtocolImpl=com.empire.hadoop.hadoop_rpc.service.MyNameNode protocolClass=com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol[IPC Server Responder] INFO org.apache.hadoop.ipc.Server - IPC Server Responder: starting[IPC Server listener on 8887] INFO org.apache.hadoop.ipc.Server - IPC Server listener on 8887: starting[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: starting[main] INFO org.apache.hadoop.ipc.CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 100 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler[main] DEBUG org.apache.hadoop.ipc.Server - Server accepts auth methods:[SIMPLE][Socket Reader #1 for port 9997] INFO org.apache.hadoop.ipc.Server - Starting Socket Reader #1 for port 9997[main] DEBUG org.apache.hadoop.ipc.metrics.RpcMetrics - Initialized MetricsRegistry{info=MetricsInfoImpl{name=rpc, description=rpc}, tags=[MetricsTag{info=MetricsInfoImpl{name=port, description=RPC port}, value=9997}], metrics=[]}[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.receivedBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of received bytes], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.sentBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of sent bytes], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcQueueTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Queue time], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Processing time], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.deferredRpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Deferred Processing time], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication failures], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication successes], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization failures], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization successes], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcClientBackoff with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of client backoff requests], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcSlowCalls with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of Slow RPC calls], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public java.lang.String org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnectionsPerUser() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections per user], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.callQueueLength() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Length of the call queue], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public long org.apache.hadoop.ipc.metrics.RpcMetrics.numDroppedConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of dropped connections], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcActivityForPort9997, Aggregate RPC metrics[main] DEBUG org.apache.hadoop.ipc.metrics.RpcDetailedMetrics - MetricsInfoImpl{name=rpcdetailed, description=rpcdetailed}[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.rates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.deferredRpcRates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcDetailedActivityForPort9997, Per method RPC metrics[main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.ipc.ProtocolMetaInfoPB version=1 ProtocolImpl=org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos$ProtocolInfoService$2 protocolClass=org.apache.hadoop.ipc.ProtocolMetaInfoPB[main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_WRITABLE Protocol Name = com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService version=100 ProtocolImpl=com.empire.hadoop.hadoop_rpc.service.UserLoginServiceImpl protocolClass=com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService[IPC Server listener on 9997] INFO org.apache.hadoop.ipc.Server - IPC Server listener on 9997: starting[IPC Server Responder] INFO org.apache.hadoop.ipc.Server - IPC Server Responder: starting[IPC Server handler 0 on 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 9997: starting[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running[IPC Server listener on 8887] DEBUG org.apache.hadoop.ipc.Server - Server connection from 127.0.0.1:58401; # active connections: 1; # queued calls: 0[Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - got #-3[Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - Successfully authorized userInfo { effectiveUser: "aaron"}protocol: "com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol"[Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - got #0[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: Call#0 Retry#0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 from 127.0.0.1:58401 for RpcKind RPC_WRITABLE[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.security.UserGroupInformation - PrivilegedAction as:aaron (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2606)[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation - getMetaData[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation - getMetaData[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - Served: getMetaData queueTime= 4 procesingTime= 0[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: responding to Call#0 Retry#0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 from 127.0.0.1:58401[IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: responding to Call#0 Retry#0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 from 127.0.0.1:58401 Wrote 89 bytes.[Socket Reader #1 for port 8887] INFO org.apache.hadoop.ipc.Server - Socket Reader #1 for port 8887: readAndProcess from client 127.0.0.1:58401 threw exception [java.io.IOException: 远程主机强迫关闭了一个现有的连接。]java.io.IOException: 远程主机强迫关闭了一个现有的连接。 at sun.nio.ch.SocketDispatcher.read0(Native Method) at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:43) at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223) at sun.nio.ch.IOUtil.read(IOUtil.java:197) at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:384) at org.apache.hadoop.ipc.Server.channelRead(Server.java:3192) at org.apache.hadoop.ipc.Server.access$2600(Server.java:136) at org.apache.hadoop.ipc.Server$Connection.readAndProcess(Server.java:1984) at org.apache.hadoop.ipc.Server$Listener.doRead(Server.java:1244) at org.apache.hadoop.ipc.Server$Listener$Reader.doRunLoop(Server.java:1100) at org.apache.hadoop.ipc.Server$Listener$Reader.run(Server.java:1071)[Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - Socket Reader #1 for port 8887: disconnecting client 127.0.0.1:58401. Number of active connections: 0[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running[IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running[IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running
客户端
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of successful kerberos logins and latency (milliseconds)], type=DEFAULT, always=false, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of failed kerberos logins and latency (milliseconds)], type=DEFAULT, always=false, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[GetGroups], type=DEFAULT, always=false, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeLong org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailuresTotal with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Renewal failures since startup], type=DEFAULT, always=false, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Renewal failures since last successful login], type=DEFAULT, always=false, sampleName=Ops)[main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - UgiMetrics, User and group related metrics[main] DEBUG org.apache.hadoop.security.SecurityUtil - Setting hadoop.security.token.service.use_ip to true[main] DEBUG org.apache.hadoop.security.authentication.util.KerberosName - Kerberos krb5 configuration not found, setting default realm to empty[main] DEBUG org.apache.hadoop.security.Groups - Creating new Groups object[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Trying to load the custom-built native-hadoop library...[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path[main] DEBUG org.apache.hadoop.util.NativeCodeLoader - java.library.path=E:\Program Files\Java\jdk1.7.0_80\bin;C:\WINDOWS\Sun\Java\bin;C:\WINDOWS\system32;C:\WINDOWS;E:\Program Files\Java\jdk1.7.0_80\jre\bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin/server;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/lib/amd64;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\iCLS\;C:\Program Files\Intel\Intel(R) Management Engine Components\iCLS\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\WiFi\bin\;C:\Program Files\Common Files\Intel\WirelessCommon\;C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common;D:\Program Files (x86)\VanDyke Software\Clients\;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;E:\Program Files\Java\jdk1.8.0_131\bin;E:\Program Files\Java\jdk1.8.0_131\jre\bin;E:\Program Files\apache-maven-3.5.4\bin;%HADOOP_HOME%\bin;C:\Users\aaron\AppData\Local\Microsoft\WindowsApps;D:\Program Files (x86)\VanDyke Software\Clients\;;E:\sts-bundle\sts-3.9.5.RELEASE;;.[main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable[main] DEBUG org.apache.hadoop.util.PerformanceAdvisory - Falling back to shell based[main] DEBUG org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback - Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping[main] DEBUG org.apache.hadoop.security.Groups - Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000[main] DEBUG org.apache.hadoop.security.UserGroupInformation - hadoop login[main] DEBUG org.apache.hadoop.security.UserGroupInformation - hadoop login commit[main] DEBUG org.apache.hadoop.security.UserGroupInformation - using local user:NTUserPrincipal: aaron[main] DEBUG org.apache.hadoop.security.UserGroupInformation - Using user: "NTUserPrincipal: aaron" with name aaron[main] DEBUG org.apache.hadoop.security.UserGroupInformation - User entry: "aaron"[main] DEBUG org.apache.hadoop.security.UserGroupInformation - Assuming keytab is managed externally since logged in from subject.[main] DEBUG org.apache.hadoop.security.UserGroupInformation - UGI loginUser:aaron (auth:SIMPLE)[main] DEBUG org.apache.hadoop.ipc.Server - rpcKind=RPC_WRITABLE, rpcRequestWrapperClass=class org.apache.hadoop.ipc.WritableRpcEngine$Invocation, rpcInvoker=org.apache.hadoop.ipc.WritableRpcEngine$Server$WritableRpcInvoker@271aec5c[main] DEBUG org.apache.hadoop.ipc.Client - getting client out of cache: org.apache.hadoop.ipc.Client@5c00c832[main] DEBUG org.apache.hadoop.ipc.Client - The ping interval is 60000 ms.[main] DEBUG org.apache.hadoop.ipc.Client - Connecting to localhost/127.0.0.1:8887[IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron] DEBUG org.apache.hadoop.ipc.Client - IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron: starting, having connections 1[IPC Parameter Sending Thread #0] DEBUG org.apache.hadoop.ipc.Client - IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron sending #0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484[IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron] DEBUG org.apache.hadoop.ipc.Client - IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron got value #0[main] DEBUG org.apache.hadoop.ipc.RPC - Call: getMetaData 207/angela.mygirl: 3 - {BLK_1,BLK_2} ....
最后总结:hadoop内置rpc可以很方便的开启一个端口来提供远程的rpc服务,其实它内部提供的服务都是通过该rpc框架协议来暴露的。如果大家干兴趣可以去看看各类hadoop提供的服务协议实现。
最后寄语,以上是博主本次文章的全部内容,如果大家觉得博主的文章还不错,请点赞;如果您对博主其它服务器大数据技术或者博主本人感兴趣,请关注博主博客,并且欢迎随时跟博主沟通交流。