2016-11-30 35 views
0

后,我有可能一个Java应用程序,使Apache的凤凰某些SQL SELECT语句。对于此我使用一个原理用密钥表创建连接。这是支持的连接类:续签到Apache凤凰(使用Kerberos)连接失败正是10小时

public class PhoenixDriverConnect { 
private static Connection conn; 
private static final Logger logger = LoggerFactory.getLogger(PhoenixDriverConnect.class); 
private PhoenixDriverConnect(String DB_URL) { 
    GetProperties getProperties = new GetProperties(); 
    try { 
     Class.forName(getProperties.get("jdbc.driver")); 
    } catch (ClassNotFoundException e) { 
     logger.error(e.getMessage()); 
    } 
    try { 
     DriverManager.deregisterDriver(PhoenixDriver.INSTANCE); 
     conn = DriverManager.getConnection(DB_URL, getProperties.getInfo()); 
     connTime = new DateTime().getMillis(); 
    } catch (SQLException e) { 
     logger.error(e.getMessage()); 
    } 
} 

public static synchronized Connection getConnection(String DB_URL) { 

    // for the first connection conn == null 
    if (conn == null) { 
     logger.info("create new connection...."); 
     new PhoenixDriverConnect(DB_URL); 
     logger.info("create new connection done."); 
    } 

    return conn; 
} 
} 

这里是一个创建连接驱动程序代码:

public synchronized Connection connect(final String url, final Properties info) throws SQLException { 

    String principal = info == null ? null : (String)info.get("DelegationDriver.principal"); 
    String kt = info == null ? null : (String)info.get("DelegationDriver.keytab.file"); 
    String hadoopConfFile = info == null ? null : (String)info.get("hbase_site"); 
    String hbaseConfFile = info == null ? null : (String)info.get("core-site"); 

    Configuration conf = HBaseConfiguration.create(); 

    if (hadoopConfFile != null) { 
     logger.info("Adding conf1: " + hadoopConfFile); 
     conf.addResource(new Path(hadoopConfFile)); 
    } else { 
     logger.info("Hadoop core configuration is not provided"); 
    } 
    if (hbaseConfFile != null) { 
     logger.info("Adding conf2: " + hbaseConfFile); 
     conf.addResource(new Path(hbaseConfFile)); 
    } else { 
     logger.info("HBase configuration is not provided"); 
    }  

    conf.set("hadoop.security.authentication", "kerberos"); 
    conf.set("hbase.security.authentication", "kerberos"); 
    conf.set("hbase.security.authorization", "true"); 


    logger.info("DelegationDriver - connect - principal : " + principal); 
    logger.info("DelegationDriver - connect - keytab file : " + kt); 
    logger.info("DelegationDriver - connect - hadoop configuration file : " + hadoopConfFile); 
    logger.info("DelegationDriver - connect - hbase configuration file : " + hbaseConfFile); 

    UserGroupInformation.setConfiguration(conf); 

    try { 
     if (principal != null) { 
      logger.info("Trying to login with the principal found in the properties (" + principal + ", keytab=" + kt + ")"); 
     if (kt == null) { 
      throw new IllegalArgumentException("keytab is required, no property found"); 
     } 
     if ((kt = kt.trim()).isEmpty()) { 
      throw new IllegalArgumentException("keytab is required, found empty property"); 
     } 
     this.ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, kt); 

     //this.ugi.getLoginUser().reloginFromKeytab(); 
     logger.info("Logged by Kerberos with the principal/keytab found in the properties, ugi=" + (Object)this.ugi + ", ticket=" + (Object)this.ugi.getRealAuthenticationMethod()); 
     } else { 
      logger.info("No principal found in the properties (DelegationDriver.principal and DelegationDriver.keytab.file), trying the current user if any"); 
     this.ugi = UserGroupInformation.getCurrentUser(); 
     } 
    } 
    catch (IOException e) { 
     logger.warning(e.getMessage()); 
     throw new RuntimeException("Can't login, principal found was " + principal + ", keytab=" + kt + '\n' + e.getLocalizedMessage()); 
    } 
    logger.info("Going to connect to Phoenix. UGI = " + (Object)this.ugi); 
    Connection conn = (Connection)this.runWithSQLException(new PrivilegedSQLExceptionAction<Connection>(){ 

     @Override 
     public Connection run() throws SQLException { 
     return DelegationDriver.this.driver.connect(url, info); 
     } 
    }); 
    logger.info("Connection to phoenix done"); 
    return conn; 
} 

这完美的作品。注:我开始在下午5点的应用程序,但在凌晨3点,在整整10小时后我得到这个错误:

org.apache.zookeeper.KeeperException$SessionExpiredException: KeeperErrorCode = Session expired 
    at org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.connectionEvent(ZooKeeperWatcher.java:606) [hbase-client-1.1.1.jar!/:1.1.1] 
    at org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher.process(ZooKeeperWatcher.java:517) [hbase-client-1.1.1.jar!/:1.1.1] 
    at org.apache.zookeeper.ClientCnxn$EventThread.processEvent(ClientCnxn.java:522) [zookeeper-3.4.6.jar!/:3.4.6-1569965] 
    at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:498) [zookeeper-3.4.6.jar!/:3.4.6-1569965] 

当我尝试做一个选择,我得到这个错误:

2016-11-29 09:48:07.491 ERROR 6352 --- [ared--pool2-t18] o.a.hadoop.hbase.ipc.AbstractRpcClient : SASL authentication failed. The most likely cause is missing or invalid credentials. Consider 'kinit'. 
javax.security.sasl.SaslException: GSS initiate failed 
at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:211) ~[na:1.8.0_112] 
at org.apache.hadoop.hbase.security.HBaseSaslRpcClient.saslConnect(HBaseSaslRpcClient.java:179) ~[hbase-client-1.1.1.jar!/:1.1.0] 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupSaslConnection(RpcClientImpl.java:609) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.access$600(RpcClientImpl.java:154) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection$2.run(RpcClientImpl.java:735) ~[hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection$2.run(RpcClientImpl.java:732) ~[hbase-client-1.1.1.jar!/:1.1.1] 
at java.security.AccessController.doPrivileged(Native Method) ~[na:1.8.0_112] 
at javax.security.auth.Subject.doAs(Subject.java:422) ~[na:1.8.0_112] 
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657) ~[hadoop-common-2.7.1.jar!/:na] 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupIOstreams(RpcClientImpl.java:732) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.writeRequest(RpcClientImpl.java:885) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.tracedWriteRequest(RpcClientImpl.java:854) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1180) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:213) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:287) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.execService(ClientProtos.java:32675) [hbase-protocol-1.1.0.jar!/:1.1.0] 
at org.apache.hadoop.hbase.protobuf.ProtobufUtil.execService(ProtobufUtil.java:1615) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:92) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel$1.call(RegionCoprocessorRpcChannel.java:89) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel.callExecService(RegionCoprocessorRpcChannel.java:95) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel.callMethod(CoprocessorRpcChannel.java:56) [hbase-client-1.1.1.jar!/:1.1.1] 
at org.apache.phoenix.coprocessor.generated.MetaDataProtos$MetaDataService$Stub.getTable(MetaDataProtos.java:10665) [phoenix-core-4.4.0-HBase-1.1.jar!/:4.4.0-HBase-1.1] 
at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1290) [phoenix-core-4.4.0-HBase-1.1.jar!/:4.4.0-HBase-1.1] 
at org.apache.phoenix.query.ConnectionQueryServicesImpl$7.call(ConnectionQueryServicesImpl.java:1277) [phoenix-core-4.4.0-HBase-1.1.jar!/:4.4.0-HBase-1.1] 
at org.apache.hadoop.hbase.client.HTable$16.call(HTable.java:1741) [hbase-client-1.1.1.jar!/:1.1.1] 
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [na:1.8.0_112] 
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_112] 
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_112] 
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_112] 
Caused by: org.ietf.jgss.GSSException: No valid credentials provided (Mechanism level: Attempt to obtain new INITIATE credentials failed! (null)) 
at sun.security.jgss.krb5.Krb5InitCredential.getTgt(Krb5InitCredential.java:343) ~[na:1.8.0_112] 
at sun.security.jgss.krb5.Krb5InitCredential.getInstance(Krb5InitCredential.java:145) ~[na:1.8.0_112] 
at sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:122) ~[na:1.8.0_112] 
at sun.security.jgss.krb5.Krb5MechFactory.getMechanismContext(Krb5MechFactory.java:187) ~[na:1.8.0_112] 
at sun.security.jgss.GSSManagerImpl.getMechanismContext(GSSManagerImpl.java:224) ~[na:1.8.0_112] 
at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:212) ~[na:1.8.0_112] 
at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179) ~[na:1.8.0_112] 
at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:192) ~[na:1.8.0_112] ... 29 common frames omitted 
Caused by: javax.security.auth.login.LoginException: Cannot read from System.in 
at com.sun.security.auth.module.Krb5LoginModule.promptForName(Krb5LoginModule.java:865) ~[na:1.8.0_112] 
at com.sun.security.auth.module.Krb5LoginModule.attemptAuthentication(Krb5LoginModule.java:704) ~[na:1.8.0_112] 
at com.sun.security.auth.module.Krb5LoginModule.login(Krb5LoginModule.java:617) ~[na:1.8.0_112] 
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_112] 
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_112] 
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_112] 
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_112] 
at javax.security.auth.login.LoginContext.invoke(LoginContext.java:755) ~[na:1.8.0_112] 
at javax.security.auth.login.LoginContext.access$000(LoginContext.java:195) ~[na:1.8.0_112] 
at javax.security.auth.login.LoginContext$4.run(LoginContext.java:682) ~[na:1.8.0_112] 
at javax.security.auth.login.LoginContext$4.run(LoginContext.java:680) ~[na:1.8.0_112] 
at java.security.AccessController.doPrivileged(Native Method) ~[na:1.8.0_112] 
at javax.security.auth.login.LoginContext.invokePriv(LoginContext.java:680) ~[na:1.8.0_112] 
at javax.security.auth.login.LoginContext.login(LoginContext.java:587) ~[na:1.8.0_112] 
at sun.security.jgss.GSSUtil.login(GSSUtil.java:258) ~[na:1.8.0_112] 
at sun.security.jgss.krb5.Krb5Util.getTicket(Krb5Util.java:158) ~[na:1.8.0_112] 
at sun.security.jgss.krb5.Krb5InitCredential$1.run(Krb5InitCredential.java:335) ~[na:1.8.0_112] 
at sun.security.jgss.krb5.Krb5InitCredential$1.run(Krb5InitCredential.java:331) ~[na:1.8.0_112] 
at java.security.AccessController.doPrivileged(Native Method) ~[na:1.8.0_112] 
at sun.security.jgss.krb5.Krb5InitCredential.getTgt(Krb5InitCredential.java:330) ~[na:1.8.0_112] 
... 36 common frames omitted 

回答

0

你会需要在你的执行语句一个try-catch重新初始化Kerberos主体和连接。

1

首先,Java支持的Kerberos的远非完美:引用Hadoop and Kerberos, The Madness beyond the Gate“......公共API是不同版本的JDK和过于简单化为 [Hadoop的] 认证系统......脆”

其中一个限制是Java不能创建可更新的 Kerberos票证,并且不能更新现有的(例如由kinit创建的)可更新票证。因此,你的loginUserFromKeytabAndReturnUGI()创建票证将10小时后过期(这是票证生命周期的典型设置)。
为了记录在案,Hadoop的权威性库会自动生成一个后台线程尝试更新其UGI票,但无济于事,因为机票是不能再生的。

即使票证是可更新的,它最终会在7天后达到其可再生能源使用寿命(典型设置),并且您必须在某个点重新创建它。

标准的解决方案是产生一个后台线程调用checkTGTAndReloginFromKeytab()周期性 - 由HortonWorks大师看到that post一个非常复杂的解释(这家伙的同事谁写的GitBook关于Hadoop的&的Kerberos)

又见this postthat post有关Kerberos和UGI更多的上下文。