18/01/18 16:18:49 ERROR spark.SparkContext: Error initializing SparkContext.
org.apache.hadoop.security.AccessControlException: hzyaoqin tries to renew a token with renewer hive
at org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.renewToken(AbstractDelegationTokenSecretManager.java:481)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.renewDelegationToken(FSNamesystem.java:6697)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.renewDelegationToken(NameNodeRpcServer.java:571)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.renewDelegationToken(ClientNamenodeProtocolServerSideTranslatorPB.java:1005)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
at org.apache.hadoop.hdfs.DFSClient$Renewer.renew(DFSClient.java:1133)
at org.apache.hadoop.security.token.Token.renew(Token.java:377)
at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider$$anonfun$getTokenRenewalInterval$1$$anonfun$apply$3.apply(HDFSCredentialProvider.scala:84)
at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider$$anonfun$getTokenRenewalInterval$1$$anonfun$apply$3.apply(HDFSCredentialProvider.scala:83)
at scala.Option.map(Option.scala:146)
at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider$$anonfun$getTokenRenewalInterval$1.apply(HDFSCredentialProvider.scala:83)
at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider$$anonfun$getTokenRenewalInterval$1.apply(HDFSCredentialProvider.scala:75)
at scala.Option.flatMap(Option.scala:171)
at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider.getTokenRenewalInterval(HDFSCredentialProvider.scala:75)
at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider.obtainCredentials(HDFSCredentialProvider.scala:55)
at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager$$anonfun$obtainCredentials$2.apply(ConfigurableCredentialManager.scala:82)
at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager$$anonfun$obtainCredentials$2.apply(ConfigurableCredentialManager.scala:80)
at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at scala.collection.Iterator$class.foreach(Iterator.scala:893)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
at scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:206)
at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)
at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager.obtainCredentials(ConfigurableCredentialManager.scala:80)
at org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:403)
at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:882)
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:171)
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:156)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:509)
at yaooqinn.kyuubi.session.KyuubiSession$$anon$1.run(KyuubiSession.scala:121)
Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): hzyaoqin tries to renew a token with renewer hive
at org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.renewToken(AbstractDelegationTokenSecretManager.java:481)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.renewDelegationToken(FSNamesystem.java:6697)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.renewDelegationToken(NameNodeRpcServer.java:571)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.renewDelegationToken(ClientNamenodeProtocolServerSideTranslatorPB.java:1005)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)
at org.apache.hadoop.ipc.Client.call(Client.java:1475)
at org.apache.hadoop.ipc.Client.call(Client.java:1412)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
at com.sun.proxy.$Proxy12.renewDelegationToken(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.renewDelegationToken(ClientNamenodeProtocolTranslatorPB.java:948)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy13.renewDelegationToken(Unknown Source)
at org.apache.hadoop.hdfs.DFSClient$Renewer.renew(DFSClient.java:1131)
... 26 more
2. fix delete log dir bug:
java.io.FileNotFoundException: File does not exist: /home/hadoop/data/apache-spark/spark-2.1.2-bin-2.1.2/operation_logs/hzyaoqin/7c8789b1-5dfc-4eb2-bc9e-243e1ad6446e/f8ef9615-5a13-471d-a091-8ea83f32cd30
at org.apache.commons.io.FileUtils.forceDelete(FileUtils.java:2275)
at org.apache.hadoop.hive.ql.session.OperationLog$LogFile.remove(OperationLog.java:163)
at org.apache.hadoop.hive.ql.session.OperationLog.close(OperationLog.java:121)
at yaooqinn.kyuubi.operation.KyuubiOperation.cleanupOperationLog(KyuubiOperation.scala:190)
at yaooqinn.kyuubi.operation.KyuubiOperation.close(KyuubiOperation.scala:199)
at yaooqinn.kyuubi.operation.OperationManager.closeOperation(OperationManager.scala:131)
at yaooqinn.kyuubi.session.KyuubiSession$$anonfun$close$1.apply(KyuubiSession.scala:319)
at yaooqinn.kyuubi.session.KyuubiSession$$anonfun$close$1.apply(KyuubiSession.scala:318)
at scala.collection.mutable.HashSet.foreach(HashSet.scala:78)
at yaooqinn.kyuubi.session.KyuubiSession.close(KyuubiSession.scala:318)
at yaooqinn.kyuubi.session.SessionManager.closeSession(SessionManager.scala:264)
at yaooqinn.kyuubi.server.BackendService.closeSession(BackendService.scala:78)
at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1$$anonfun$1.apply$mcV$sp(FrontendService.scala:96)
at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1$$anonfun$1.apply(FrontendService.scala:96)
at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1$$anonfun$1.apply(FrontendService.scala:96)
at scala.util.Try$.apply(Try.scala:192)
at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1.apply(FrontendService.scala:96)
at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1.apply(FrontendService.scala:93)
at scala.Option.foreach(Option.scala:257)
at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler.deleteContext(FrontendService.scala:93)
at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:300)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)