This commit is contained in:
Kent Yao 2020-09-21 18:44:46 +08:00
parent bae3ac071c
commit cab07c3001
4 changed files with 7 additions and 44 deletions

View File

@ -32,9 +32,8 @@ install:
jobs:
include:
- stage: spark3.0.0
language: scala
script:
- mvn --no-transfer-progress clean install -pl :kyuubi-common,:kyuubi-ha,:kyuubi-main,:kyuubi-spark-sql-engine,:kyuubi-codecov,:kyuubi-download,:kyuubi-assembly -Dmaven.javadoc.skip=true -B -V
- build/mvn --no-transfer-progress clean install -pl :kyuubi-common,:kyuubi-ha,:kyuubi-main,:kyuubi-spark-sql-engine,:kyuubi-codecov,:kyuubi-download,:kyuubi-assembly -Dmaven.javadoc.skip=true -B -V
after_success:
- bash <(curl -s https://codecov.io/bash)

View File

@ -17,15 +17,11 @@
package org.apache.kyuubi.engine
import java.io.File
import java.lang.ProcessBuilder.Redirect
import java.nio.file.{Files, Path, Paths}
import java.nio.file.{Path, Paths}
import java.util.UUID
import scala.collection.JavaConverters._
import org.apache.kyuubi.Utils
trait ProcessBuilderLike {
protected def executable: String

View File

@ -17,9 +17,7 @@
package org.apache.kyuubi.engine.spark
import java.io.{BufferedReader, InputStreamReader}
import java.nio.file.{Files, Path, Paths}
import java.util.concurrent.TimeUnit
import scala.collection.mutable.ArrayBuffer
@ -40,12 +38,15 @@ class SparkProcessBuilder(
val path = env.get("SPARK_HOME").map { sparkHome =>
Paths.get(sparkHome, "bin", "spark-submit").toAbsolutePath
} getOrElse {
val sparkVer = SPARK_COMPILE_VERSION
val hadoopVer = HADOOP_COMPILE_VERSION.take(3)
val hiveVer = if (HIVE_COMPILE_VERSION.take(3).toDouble < 2.3) "-hive1.2" else ""
Paths.get(
"..",
"externals",
"kyuubi-download",
"target",
s"spark-$SPARK_COMPILE_VERSION-bin-hadoop2.7",
s"spark-$sparkVer-bin-hadoop$hadoopVer$hiveVer",
"bin", "spark-submit")
}
path.toAbsolutePath.toFile.getCanonicalPath
@ -76,7 +77,7 @@ class SparkProcessBuilder(
env.get("KYUUBI_WORK_DIR_ROOT").map { root =>
Utils.createTempDir(root, proxyUser)
}.getOrElse {
Utils.createTempDir(proxyUser)
Utils.createTempDir(namePrefix = proxyUser)
}
}
@ -101,41 +102,8 @@ class SparkProcessBuilder(
}
/**
* May need download spark release packages first.
*
* (build/)mvn clean package -pl :kyuubi-download -DskipTests
*/
object SparkProcessBuilder {
private final val CONF = "--conf"
private final val CLASS = "--class"
private final val PROXY_USER = "--proxy-user"
def main(args: Array[String]): Unit = {
val conf = Map("spark.abc" -> "1", "spark.xyz" -> "2", "spark.master" -> "hello")
val sparkProcessBuilder = new SparkProcessBuilder("kent", conf)
print(sparkProcessBuilder.toString)
val start = sparkProcessBuilder.start
// scalastyle:off
if (start.waitFor(1, TimeUnit.MINUTES)) {
val reader = new BufferedReader(new InputStreamReader(start.getInputStream))
var line = reader.readLine()
while(line != null) {
println(line)
line = reader.readLine()
}
reader.close()
} else {
val reader = new BufferedReader(new InputStreamReader(start.getErrorStream))
var line = reader.readLine()
while(line != null) {
println(line)
line = reader.readLine()
}
reader.close()
println("\nnot started")
}
}
}