博主上一篇博客分享了hadoop客戶端java API的使用,本章節帶領小夥伴們一塊兒來體驗下hadoop的內置rpc框架。首先,因爲hadoop的內置rpc框架的設計目的是爲了內部的組件提供rpc訪問的功能,並非做爲專業開源rpc(如dubbo、springcloud、hsf類的軟件)的路線來走的,因此其通常不會被咱們用於本身的普通web項目裏面。可是,做爲後臺項目,咱們實際上是能夠獨立使用hadoop rpc的,並且不須要搭建hadoop環境。java
下面來看看使用案例:node
在咱們前面的章節中,hadoop的namenode和datanode之間、resource manager和node manager直接都是有rpc調用的,如下是一個模擬使用過程:web
定義遠程調用接口spring
package com.empire.hadoop.hadoop_rpc.protocol; /** * 類ClientNamenodeProtocol.java的實現描述:模擬 namenode協議功能接口 * * @author arron 2018年11月14日 下午11:09:55 */ public interface ClientNamenodeProtocol { /** * client version客戶端版本號 */ public static final long versionID = 1L; //會讀取這個版本號, 但能夠和客戶端的不同, 沒有校驗 public String getMetaData(String path); }
package com.empire.hadoop.hadoop_rpc.protocol; /** * 類IUserLoginService.java的實現描述: 用戶登陸 service * * @author arron 2018年11月14日 下午11:30:45 */ public interface IUserLoginService { /** * client version協議的版本號 */ public static final long versionID = 100L; public String login(String name, String passwd); }
接口實現shell
package com.empire.hadoop.hadoop_rpc.service; import com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol; /** * 類MyNameNode.java的實現描述:模擬 namenode協議功能接口實現類 * * @author arron 2018年11月14日 下午11:31:37 */ public class MyNameNode implements ClientNamenodeProtocol { //模擬namenode的業務方法之一:查詢元數據 public String getMetaData(String path) { return path + ": 3 - {BLK_1,BLK_2} ...."; } }
package com.empire.hadoop.hadoop_rpc.service; import com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService; /** * 類UserLoginServiceImpl.java的實現描述:用戶登陸service實現類 * * @author arron 2018年11月14日 下午11:32:16 */ public class UserLoginServiceImpl implements IUserLoginService { public String login(String name, String passwd) { return name + "logged in successfully..."; } }
遠程服務發佈apache
package com.empire.hadoop.hadoop_rpc.service; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC.Builder; import org.apache.hadoop.ipc.RPC.Server; import com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol; import com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService; /** * 類PublishServiceUtil.java的實現描述:服務發佈工具類 * * @author arron 2018年11月14日 下午11:32:01 */ public class PublishServiceUtil { public static void main(String[] args) throws Exception { Builder builder = new RPC.Builder(new Configuration()); builder.setBindAddress("localhost").setPort(8888).setProtocol(ClientNamenodeProtocol.class) .setInstance(new MyNameNode()); Server server = builder.build(); server.start(); Builder builder2 = new RPC.Builder(new Configuration()); builder2.setBindAddress("localhost").setPort(9999).setProtocol(IUserLoginService.class) .setInstance(new UserLoginServiceImpl()); Server server2 = builder2.build(); server2.start(); } }
客戶端調用服務器
package com.empire.hadoop.hadoop_rpc.client; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.RPC; import com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol; /** * 類MyHdfsClient.java的實現描述:rpc遠程客戶端 * * @author arron 2018年11月14日 下午11:30:04 */ public class MyHdfsClient { public static void main(String[] args) throws Exception { ClientNamenodeProtocol namenode = RPC.getProxy(ClientNamenodeProtocol.class, 1L, new InetSocketAddress("localhost", 8888), new Configuration()); String metaData = namenode.getMetaData("/angela.mygirl"); System.out.println(metaData); } }
package com.empire.hadoop.hadoop_rpc.client; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.RPC; import com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService; /** * 類UserLoginAction.java的實現描述:用戶登陸客戶端操做 * * @author arron 2018年11月14日 下午11:30:24 */ public class UserLoginAction { public static void main(String[] args) throws Exception { IUserLoginService userLoginService = RPC.getProxy(IUserLoginService.class, 100L, new InetSocketAddress("localhost", 9999), new Configuration()); String login = userLoginService.login("angelababy", "1314520"); System.out.println(login); } }
運行效果圖:app
服務端框架
[main] DEBUG org.apache.hadoop.ipc.Server - rpcKind=RPC_WRITABLE, rpcRequestWrapperClass=class org.apache.hadoop.ipc.WritableRpcEngine$Invocation, rpcInvoker=org.apache.hadoop.ipc.WritableRpcEngine$Server$WritableRpcInvoker@27c6d458 [main] INFO org.apache.hadoop.ipc.CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 100 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler [main] DEBUG org.apache.hadoop.security.SecurityUtil - Setting hadoop.security.token.service.use_ip to true [main] DEBUG org.apache.hadoop.ipc.Server - Server accepts auth methods:[SIMPLE] [Socket Reader #1 for port 8887] INFO org.apache.hadoop.ipc.Server - Starting Socket Reader #1 for port 8887 [main] DEBUG org.apache.hadoop.ipc.metrics.RpcMetrics - Initialized MetricsRegistry{info=MetricsInfoImpl{name=rpc, description=rpc}, tags=[MetricsTag{info=MetricsInfoImpl{name=port, description=RPC port}, value=8887}], metrics=[]} [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.receivedBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of received bytes], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.sentBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of sent bytes], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcQueueTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Queue time], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Processing time], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.deferredRpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Deferred Processing time], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication failures], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication successes], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization failures], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization successes], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcClientBackoff with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of client backoff requests], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcSlowCalls with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of Slow RPC calls], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public java.lang.String org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnectionsPerUser() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections per user], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.callQueueLength() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Length of the call queue], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public long org.apache.hadoop.ipc.metrics.RpcMetrics.numDroppedConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of dropped connections], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcActivityForPort8887, Aggregate RPC metrics [main] DEBUG org.apache.hadoop.ipc.metrics.RpcDetailedMetrics - MetricsInfoImpl{name=rpcdetailed, description=rpcdetailed} [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.rates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.deferredRpcRates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcDetailedActivityForPort8887, Per method RPC metrics [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of successful kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Rate of failed kerberos logins and latency (milliseconds)], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[GetGroups], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeLong org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailuresTotal with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Renewal failures since startup], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Renewal failures since last successful login], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - UgiMetrics, User and group related metrics [main] DEBUG org.apache.hadoop.security.authentication.util.KerberosName - Kerberos krb5 configuration not found, setting default realm to empty [main] DEBUG org.apache.hadoop.security.Groups - Creating new Groups object [main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Trying to load the custom-built native-hadoop library... [main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path [main] DEBUG org.apache.hadoop.util.NativeCodeLoader - java.library.path=E:\Program Files\Java\jdk1.7.0_80\bin;C:\WINDOWS\Sun\Java\bin;C:\WINDOWS\system32;C:\WINDOWS;E:\Program Files\Java\jdk1.7.0_80\jre\bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin/server;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/lib/amd64;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\iCLS\;C:\Program Files\Intel\Intel(R) Management Engine Components\iCLS\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\WiFi\bin\;C:\Program Files\Common Files\Intel\WirelessCommon\;C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common;D:\Program Files (x86)\VanDyke Software\Clients\;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;E:\Program Files\Java\jdk1.8.0_131\bin;E:\Program Files\Java\jdk1.8.0_131\jre\bin;E:\Program Files\apache-maven-3.5.4\bin;%HADOOP_HOME%\bin;C:\Users\aaron\AppData\Local\Microsoft\WindowsApps;D:\Program Files (x86)\VanDyke Software\Clients\;;E:\sts-bundle\sts-3.9.5.RELEASE;;. [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable [main] DEBUG org.apache.hadoop.util.PerformanceAdvisory - Falling back to shell based [main] DEBUG org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback - Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping [main] DEBUG org.apache.hadoop.security.Groups - Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000 [main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.ipc.ProtocolMetaInfoPB version=1 ProtocolImpl=org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos$ProtocolInfoService$2 protocolClass=org.apache.hadoop.ipc.ProtocolMetaInfoPB [main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_WRITABLE Protocol Name = com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol version=1 ProtocolImpl=com.empire.hadoop.hadoop_rpc.service.MyNameNode protocolClass=com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol [IPC Server Responder] INFO org.apache.hadoop.ipc.Server - IPC Server Responder: starting [IPC Server listener on 8887] INFO org.apache.hadoop.ipc.Server - IPC Server listener on 8887: starting [IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: starting [main] INFO org.apache.hadoop.ipc.CallQueueManager - Using callQueue: class java.util.concurrent.LinkedBlockingQueue queueCapacity: 100 scheduler: class org.apache.hadoop.ipc.DefaultRpcScheduler [main] DEBUG org.apache.hadoop.ipc.Server - Server accepts auth methods:[SIMPLE] [Socket Reader #1 for port 9997] INFO org.apache.hadoop.ipc.Server - Starting Socket Reader #1 for port 9997 [main] DEBUG org.apache.hadoop.ipc.metrics.RpcMetrics - Initialized MetricsRegistry{info=MetricsInfoImpl{name=rpc, description=rpc}, tags=[MetricsTag{info=MetricsInfoImpl{name=port, description=RPC port}, value=9997}], metrics=[]} [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.receivedBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of received bytes], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.sentBytes with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of sent bytes], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcQueueTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Queue time], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.rpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Processing time], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.ipc.metrics.RpcMetrics.deferredRpcProcessingTime with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Deferred Processing time], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication failures], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthenticationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authentication successes], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization failures], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcAuthorizationSuccesses with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of authorization successes], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcClientBackoff with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of client backoff requests], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableCounterLong org.apache.hadoop.ipc.metrics.RpcMetrics.rpcSlowCalls with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of Slow RPC calls], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public java.lang.String org.apache.hadoop.ipc.metrics.RpcMetrics.numOpenConnectionsPerUser() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of open connections per user], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public int org.apache.hadoop.ipc.metrics.RpcMetrics.callQueueLength() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Length of the call queue], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - method public long org.apache.hadoop.ipc.metrics.RpcMetrics.numDroppedConnections() with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[Number of dropped connections], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcActivityForPort9997, Aggregate RPC metrics [main] DEBUG org.apache.hadoop.ipc.metrics.RpcDetailedMetrics - MetricsInfoImpl{name=rpcdetailed, description=rpcdetailed} [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.rates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation org.apache.hadoop.ipc.metrics.RpcDetailedMetrics.deferredRpcRates with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, value=[], about=, always=false, type=DEFAULT, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - RpcDetailedActivityForPort9997, Per method RPC metrics [main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_PROTOCOL_BUFFER Protocol Name = org.apache.hadoop.ipc.ProtocolMetaInfoPB version=1 ProtocolImpl=org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos$ProtocolInfoService$2 protocolClass=org.apache.hadoop.ipc.ProtocolMetaInfoPB [main] DEBUG org.apache.hadoop.ipc.Server - RpcKind = RPC_WRITABLE Protocol Name = com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService version=100 ProtocolImpl=com.empire.hadoop.hadoop_rpc.service.UserLoginServiceImpl protocolClass=com.empire.hadoop.hadoop_rpc.protocol.IUserLoginService [IPC Server listener on 9997] INFO org.apache.hadoop.ipc.Server - IPC Server listener on 9997: starting [IPC Server Responder] INFO org.apache.hadoop.ipc.Server - IPC Server Responder: starting [IPC Server handler 0 on 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 9997: starting [IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running [IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running [IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running [IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running [IPC Server listener on 8887] DEBUG org.apache.hadoop.ipc.Server - Server connection from 127.0.0.1:58401; # active connections: 1; # queued calls: 0 [Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - got #-3 [Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - Successfully authorized userInfo { effectiveUser: "aaron" } protocol: "com.empire.hadoop.hadoop_rpc.protocol.ClientNamenodeProtocol" [Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - got #0 [IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: Call#0 Retry#0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 from 127.0.0.1:58401 for RpcKind RPC_WRITABLE [IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.security.UserGroupInformation - PrivilegedAction as:aaron (auth:SIMPLE) from:org.apache.hadoop.ipc.Server$Handler.run(Server.java:2606) [IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation - getMetaData [IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.metrics2.lib.MutableRatesWithAggregation - getMetaData [IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - Served: getMetaData queueTime= 4 procesingTime= 0 [IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: responding to Call#0 Retry#0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 from 127.0.0.1:58401 [IPC Server handler 0 on 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server handler 0 on 8887: responding to Call#0 Retry#0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 from 127.0.0.1:58401 Wrote 89 bytes. [Socket Reader #1 for port 8887] INFO org.apache.hadoop.ipc.Server - Socket Reader #1 for port 8887: readAndProcess from client 127.0.0.1:58401 threw exception [java.io.IOException: 遠程主機強迫關閉了一個現有的鏈接。] java.io.IOException: 遠程主機強迫關閉了一個現有的鏈接。 at sun.nio.ch.SocketDispatcher.read0(Native Method) at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:43) at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223) at sun.nio.ch.IOUtil.read(IOUtil.java:197) at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:384) at org.apache.hadoop.ipc.Server.channelRead(Server.java:3192) at org.apache.hadoop.ipc.Server.access$2600(Server.java:136) at org.apache.hadoop.ipc.Server$Connection.readAndProcess(Server.java:1984) at org.apache.hadoop.ipc.Server$Listener.doRead(Server.java:1244) at org.apache.hadoop.ipc.Server$Listener$Reader.doRunLoop(Server.java:1100) at org.apache.hadoop.ipc.Server$Listener$Reader.run(Server.java:1071) [Socket Reader #1 for port 8887] DEBUG org.apache.hadoop.ipc.Server - Socket Reader #1 for port 8887: disconnecting client 127.0.0.1:58401. Number of active connections: 0 [IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running [IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running [IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running [IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running [IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running [IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running [IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running [IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running [IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running [IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running [IPC Server idle connection scanner for port 8887] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 8887: task running [IPC Server idle connection scanner for port 9997] DEBUG org.apache.hadoop.ipc.Server - IPC Server idle connection scanner for port 9997: task running
客戶端less
[main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginSuccess with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of successful kerberos logins and latency (milliseconds)], type=DEFAULT, always=false, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.loginFailure with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Rate of failed kerberos logins and latency (milliseconds)], type=DEFAULT, always=false, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field org.apache.hadoop.metrics2.lib.MutableRate org.apache.hadoop.security.UserGroupInformation$UgiMetrics.getGroups with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[GetGroups], type=DEFAULT, always=false, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeLong org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailuresTotal with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Renewal failures since startup], type=DEFAULT, always=false, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.lib.MutableMetricsFactory - field private org.apache.hadoop.metrics2.lib.MutableGaugeInt org.apache.hadoop.security.UserGroupInformation$UgiMetrics.renewalFailures with annotation @org.apache.hadoop.metrics2.annotation.Metric(valueName=Time, about=, value=[Renewal failures since last successful login], type=DEFAULT, always=false, sampleName=Ops) [main] DEBUG org.apache.hadoop.metrics2.impl.MetricsSystemImpl - UgiMetrics, User and group related metrics [main] DEBUG org.apache.hadoop.security.SecurityUtil - Setting hadoop.security.token.service.use_ip to true [main] DEBUG org.apache.hadoop.security.authentication.util.KerberosName - Kerberos krb5 configuration not found, setting default realm to empty [main] DEBUG org.apache.hadoop.security.Groups - Creating new Groups object [main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Trying to load the custom-built native-hadoop library... [main] DEBUG org.apache.hadoop.util.NativeCodeLoader - Failed to load native-hadoop with error: java.lang.UnsatisfiedLinkError: no hadoop in java.library.path [main] DEBUG org.apache.hadoop.util.NativeCodeLoader - java.library.path=E:\Program Files\Java\jdk1.7.0_80\bin;C:\WINDOWS\Sun\Java\bin;C:\WINDOWS\system32;C:\WINDOWS;E:\Program Files\Java\jdk1.7.0_80\jre\bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin/server;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/bin;E:/Program Files/Java/jdk1.8.0_131/bin/../jre/lib/amd64;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\iCLS\;C:\Program Files\Intel\Intel(R) Management Engine Components\iCLS\;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem;C:\Windows\System32\WindowsPowerShell\v1.0\;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files\Intel\Intel(R) Management Engine Components\DAL;C:\Program Files (x86)\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\Intel(R) Management Engine Components\IPT;C:\Program Files\Intel\WiFi\bin\;C:\Program Files\Common Files\Intel\WirelessCommon\;C:\Program Files (x86)\NVIDIA Corporation\PhysX\Common;D:\Program Files (x86)\VanDyke Software\Clients\;C:\WINDOWS\system32;C:\WINDOWS;C:\WINDOWS\System32\Wbem;C:\WINDOWS\System32\WindowsPowerShell\v1.0\;C:\WINDOWS\System32\OpenSSH\;E:\Program Files\Java\jdk1.8.0_131\bin;E:\Program Files\Java\jdk1.8.0_131\jre\bin;E:\Program Files\apache-maven-3.5.4\bin;%HADOOP_HOME%\bin;C:\Users\aaron\AppData\Local\Microsoft\WindowsApps;D:\Program Files (x86)\VanDyke Software\Clients\;;E:\sts-bundle\sts-3.9.5.RELEASE;;. [main] WARN org.apache.hadoop.util.NativeCodeLoader - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable [main] DEBUG org.apache.hadoop.util.PerformanceAdvisory - Falling back to shell based [main] DEBUG org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback - Group mapping impl=org.apache.hadoop.security.ShellBasedUnixGroupsMapping [main] DEBUG org.apache.hadoop.security.Groups - Group mapping impl=org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback; cacheTimeout=300000; warningDeltaMs=5000 [main] DEBUG org.apache.hadoop.security.UserGroupInformation - hadoop login [main] DEBUG org.apache.hadoop.security.UserGroupInformation - hadoop login commit [main] DEBUG org.apache.hadoop.security.UserGroupInformation - using local user:NTUserPrincipal: aaron [main] DEBUG org.apache.hadoop.security.UserGroupInformation - Using user: "NTUserPrincipal: aaron" with name aaron [main] DEBUG org.apache.hadoop.security.UserGroupInformation - User entry: "aaron" [main] DEBUG org.apache.hadoop.security.UserGroupInformation - Assuming keytab is managed externally since logged in from subject. [main] DEBUG org.apache.hadoop.security.UserGroupInformation - UGI loginUser:aaron (auth:SIMPLE) [main] DEBUG org.apache.hadoop.ipc.Server - rpcKind=RPC_WRITABLE, rpcRequestWrapperClass=class org.apache.hadoop.ipc.WritableRpcEngine$Invocation, rpcInvoker=org.apache.hadoop.ipc.WritableRpcEngine$Server$WritableRpcInvoker@271aec5c [main] DEBUG org.apache.hadoop.ipc.Client - getting client out of cache: org.apache.hadoop.ipc.Client@5c00c832 [main] DEBUG org.apache.hadoop.ipc.Client - The ping interval is 60000 ms. [main] DEBUG org.apache.hadoop.ipc.Client - Connecting to localhost/127.0.0.1:8887 [IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron] DEBUG org.apache.hadoop.ipc.Client - IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron: starting, having connections 1 [IPC Parameter Sending Thread #0] DEBUG org.apache.hadoop.ipc.Client - IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron sending #0 getMetaData(/angela.mygirl), rpc version=2, client version=1, methodsFingerPrint=1770564484 [IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron] DEBUG org.apache.hadoop.ipc.Client - IPC Client (1893360267) connection to localhost/127.0.0.1:8887 from aaron got value #0 [main] DEBUG org.apache.hadoop.ipc.RPC - Call: getMetaData 207 /angela.mygirl: 3 - {BLK_1,BLK_2} ....
最後總結:hadoop內置rpc能夠很方便的開啓一個端口來提供遠程的rpc服務,其實它內部提供的服務都是經過該rpc框架協議來暴露的。若是你們幹興趣能夠去看看各種hadoop提供的服務協議實現。
最後寄語,以上是博主本次文章的所有內容,若是你們以爲博主的文章還不錯,請點贊;若是您對博主其它服務器大數據技術或者博主本人感興趣,請關注博主博客,而且歡迎隨時跟博主溝通交流。