Commit Graph

4346 Commits

Author SHA1 Message Date
Kent Yao
125f7f5b84 fixes #3 handle hive conttol exception 2018-03-15 21:48:38 +08:00
Kent Yao
1d9dc0d642 fixes #3 format err log 2018-03-15 20:49:48 +08:00
Kent Yao
2bc0a2f4ed fixes #3 handle parse exception 2018-03-15 20:25:13 +08:00
Kent Yao
b44faa86fa fixes #3 handle parse exception 2018-03-15 20:10:38 +08:00
Kent Yao
8d3f1c7803 fixes #3 handle analysis exception 2018-03-15 17:55:06 +08:00
Kent Yao
abe99da76b fixes #3 initial db switch fails with privileges check 2018-03-15 16:40:25 +08:00
Kent Yao
2a2cb51a40 add docs/issue_template.md 2018-03-14 11:06:28 +08:00
Kent Yao
4580feecc2 minor 2018-03-14 11:03:43 +08:00
Kent Yao
72f81de679 mv kyuubi authentication factory to scala source 2018-03-13 20:28:33 +08:00
Kent Yao
63983ca265 add kyuubi auth factory 2018-03-13 17:20:54 +08:00
Kent Yao
cec8286a60 travis 2018-03-13 11:01:14 +08:00
Kent Yao
dcbdd128de doc style 2018-03-07 16:09:45 +08:00
Kent Yao
c0b1d7fb44
Create CODE_OF_CONDUCT.md 2018-03-07 15:54:22 +08:00
Kent Yao
82839fcb5f
Create CONTRIBUTING.md 2018-03-07 15:51:46 +08:00
Kent Yao
f7c9f66fe5 change build script 2018-03-07 15:40:05 +08:00
Kent Yao
a85fc2c599 add jekyll conf 2018-03-07 15:24:45 +08:00
Kent Yao
f85e63385e add jekyll conf 2018-03-07 15:24:17 +08:00
Kent Yao
45442e3fe1 add hive configuration doc 2018-03-07 15:08:59 +08:00
Kent Yao
3c74836463 readme en / configuration doc kyuui part 2018-03-06 23:52:29 +08:00
Kent Yao
d5bf707015 add scala test plugin 2018-03-06 14:54:37 +08:00
Kent Yao
4b0cec37f0 build status 2018-03-06 11:59:57 +08:00
Kent Yao
d9545ee34f add .travis.yml 2018-03-06 11:35:38 +08:00
Kent Yao
bc938e45b4 readme doc 2018-03-06 11:20:57 +08:00
Kent Yao
b78301ca00 readme cn 2018-01-25 10:10:01 +08:00
Kent Yao
1f7d7993dc mv mvn to build 2018-01-20 00:01:36 +08:00
Kent Yao
58a9a3f998 add stop script 2018-01-19 23:43:24 +08:00
Kent Yao
6fb2cb27e1 use spark-daemon.sh instead of spark-submit 2018-01-19 23:27:08 +08:00
Kent Yao
531136b9de add spark patches 2018-01-19 17:19:49 +08:00
Kent Yao
145bf9e79f add mvn version 2018-01-19 16:30:40 +08:00
Kent Yao
aa90968973 rename start script 2018-01-19 16:08:01 +08:00
Kent Yao
78b44f5d27 1. add build script
2. add start stop script
2018-01-19 15:55:08 +08:00
Kent Yao
004a93792a typo: comment 2018-01-18 17:33:58 +08:00
Kent Yao
b222c58cf4 perform relogin when necessary 2018-01-18 17:19:42 +08:00
Kent Yao
7fa0e05d6a proxy user hzyaoqin tries to renew a token with renewer hive
18/01/18 16:18:49 ERROR spark.SparkContext: Error initializing SparkContext.
org.apache.hadoop.security.AccessControlException: hzyaoqin tries to renew a token with renewer hive
	at org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.renewToken(AbstractDelegationTokenSecretManager.java:481)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.renewDelegationToken(FSNamesystem.java:6697)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.renewDelegationToken(NameNodeRpcServer.java:571)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.renewDelegationToken(ClientNamenodeProtocolServerSideTranslatorPB.java:1005)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)

	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
	at org.apache.hadoop.hdfs.DFSClient$Renewer.renew(DFSClient.java:1133)
	at org.apache.hadoop.security.token.Token.renew(Token.java:377)
	at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider$$anonfun$getTokenRenewalInterval$1$$anonfun$apply$3.apply(HDFSCredentialProvider.scala:84)
	at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider$$anonfun$getTokenRenewalInterval$1$$anonfun$apply$3.apply(HDFSCredentialProvider.scala:83)
	at scala.Option.map(Option.scala:146)
	at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider$$anonfun$getTokenRenewalInterval$1.apply(HDFSCredentialProvider.scala:83)
	at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider$$anonfun$getTokenRenewalInterval$1.apply(HDFSCredentialProvider.scala:75)
	at scala.Option.flatMap(Option.scala:171)
	at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider.getTokenRenewalInterval(HDFSCredentialProvider.scala:75)
	at org.apache.spark.deploy.yarn.security.HDFSCredentialProvider.obtainCredentials(HDFSCredentialProvider.scala:55)
	at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager$$anonfun$obtainCredentials$2.apply(ConfigurableCredentialManager.scala:82)
	at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager$$anonfun$obtainCredentials$2.apply(ConfigurableCredentialManager.scala:80)
	at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
	at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
	at scala.collection.Iterator$class.foreach(Iterator.scala:893)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
	at scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:206)
	at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
	at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)
	at org.apache.spark.deploy.yarn.security.ConfigurableCredentialManager.obtainCredentials(ConfigurableCredentialManager.scala:80)
	at org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:403)
	at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:882)
	at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:171)
	at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56)
	at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:156)
	at org.apache.spark.SparkContext.<init>(SparkContext.scala:509)
	at yaooqinn.kyuubi.session.KyuubiSession$$anon$1.run(KyuubiSession.scala:121)
Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): hzyaoqin tries to renew a token with renewer hive
	at org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.renewToken(AbstractDelegationTokenSecretManager.java:481)
	at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.renewDelegationToken(FSNamesystem.java:6697)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.renewDelegationToken(NameNodeRpcServer.java:571)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.renewDelegationToken(ClientNamenodeProtocolServerSideTranslatorPB.java:1005)
	at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
	at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
	at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698)
	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2043)

	at org.apache.hadoop.ipc.Client.call(Client.java:1475)
	at org.apache.hadoop.ipc.Client.call(Client.java:1412)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
	at com.sun.proxy.$Proxy12.renewDelegationToken(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.renewDelegationToken(ClientNamenodeProtocolTranslatorPB.java:948)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:498)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
	at com.sun.proxy.$Proxy13.renewDelegationToken(Unknown Source)
	at org.apache.hadoop.hdfs.DFSClient$Renewer.renew(DFSClient.java:1131)
	... 26 more
2018-01-18 16:34:27 +08:00
Kent Yao
864542118d 1. set operation log to rootlogdir/operation_logs/username/sessionid
2. fix delete log dir bug:

java.io.FileNotFoundException: File does not exist: /home/hadoop/data/apache-spark/spark-2.1.2-bin-2.1.2/operation_logs/hzyaoqin/7c8789b1-5dfc-4eb2-bc9e-243e1ad6446e/f8ef9615-5a13-471d-a091-8ea83f32cd30
	at org.apache.commons.io.FileUtils.forceDelete(FileUtils.java:2275)
	at org.apache.hadoop.hive.ql.session.OperationLog$LogFile.remove(OperationLog.java:163)
	at org.apache.hadoop.hive.ql.session.OperationLog.close(OperationLog.java:121)
	at yaooqinn.kyuubi.operation.KyuubiOperation.cleanupOperationLog(KyuubiOperation.scala:190)
	at yaooqinn.kyuubi.operation.KyuubiOperation.close(KyuubiOperation.scala:199)
	at yaooqinn.kyuubi.operation.OperationManager.closeOperation(OperationManager.scala:131)
	at yaooqinn.kyuubi.session.KyuubiSession$$anonfun$close$1.apply(KyuubiSession.scala:319)
	at yaooqinn.kyuubi.session.KyuubiSession$$anonfun$close$1.apply(KyuubiSession.scala:318)
	at scala.collection.mutable.HashSet.foreach(HashSet.scala:78)
	at yaooqinn.kyuubi.session.KyuubiSession.close(KyuubiSession.scala:318)
	at yaooqinn.kyuubi.session.SessionManager.closeSession(SessionManager.scala:264)
	at yaooqinn.kyuubi.server.BackendService.closeSession(BackendService.scala:78)
	at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1$$anonfun$1.apply$mcV$sp(FrontendService.scala:96)
	at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1$$anonfun$1.apply(FrontendService.scala:96)
	at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1$$anonfun$1.apply(FrontendService.scala:96)
	at scala.util.Try$.apply(Try.scala:192)
	at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1.apply(FrontendService.scala:96)
	at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler$$anonfun$deleteContext$1.apply(FrontendService.scala:93)
	at scala.Option.foreach(Option.scala:257)
	at yaooqinn.kyuubi.server.FrontendService$FeTServerEventHandler.deleteContext(FrontendService.scala:93)
	at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:300)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
	at java.lang.Thread.run(Thread.java:745)
2018-01-18 15:43:56 +08:00
Kent Yao
1e40804c9a formatted kyuubi configurations 2018-01-18 11:55:37 +08:00
Kent Yao
7f52205752 1. create sc in a new thread; 2. kill yarn app by app name when sc init timeout 2018-01-17 17:15:35 +08:00
Kent Yao
66f86172a6 rename hive related class names in case of class conflicts 2018-01-15 18:58:24 +08:00
Kent Yao
3b9ce670b9 regist kyuubi conf to spark conf 2018-01-11 16:41:40 +08:00
Kent Yao
5b75e65bc4 import KyuubiConf._ 2018-01-11 14:38:49 +08:00
Kent Yao
d50825a659 delete sbin 2018-01-11 11:15:29 +08:00
Kent Yao
fe524b952e add comments 2018-01-11 11:04:23 +08:00
Kent Yao
6910b57f79 naming for kyuubi server 2018-01-10 17:59:08 +08:00
Kent Yao
2024477419 set has row set to be true to avoid null row set 2018-01-08 15:15:10 +08:00
Kent Yao
79010ea2ce init commit for kyuubi 2018-01-05 19:38:54 +08:00
Kent Yao
216c2b4fe3
Initial commit 2017-12-18 17:05:10 +08:00