diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/events/SparkOperationEvent.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/events/SparkOperationEvent.scala index 143ba61f8..caf49fb05 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/events/SparkOperationEvent.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/events/SparkOperationEvent.scala @@ -18,13 +18,14 @@ package org.apache.kyuubi.engine.spark.events import com.fasterxml.jackson.annotation.JsonIgnore +import com.fasterxml.jackson.databind.annotation.{JsonDeserialize, JsonSerialize} import org.apache.spark.scheduler.SparkListenerEvent import org.apache.spark.util.kvstore.KVIndex import org.apache.kyuubi.Utils import org.apache.kyuubi.engine.spark.KyuubiSparkUtil.KVIndexParam import org.apache.kyuubi.engine.spark.operation.SparkOperation -import org.apache.kyuubi.events.KyuubiEvent +import org.apache.kyuubi.events.{ExceptionDeserializer, ExceptionSerializer, KyuubiEvent} /** * A [[SparkOperationEvent]] used to tracker the lifecycle of an operation at Spark SQL Engine side. @@ -60,6 +61,8 @@ case class SparkOperationEvent( createTime: Long, startTime: Long, completeTime: Long, + @JsonSerialize(contentUsing = classOf[ExceptionSerializer]) + @JsonDeserialize(contentUsing = classOf[ExceptionDeserializer]) exception: Option[Throwable], sessionId: String, sessionUser: String, diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/KyuubiSparkEventSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/KyuubiSparkEventSuite.scala new file mode 100644 index 000000000..2a0a05d9c --- /dev/null +++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/spark/kyuubi/KyuubiSparkEventSuite.scala @@ -0,0 +1,56 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.kyuubi + +import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} +import com.fasterxml.jackson.module.scala.DefaultScalaModule +import org.apache.spark.SparkException + +import org.apache.kyuubi.KyuubiFunSuite +import org.apache.kyuubi.engine.spark.events.SparkOperationEvent + +class KyuubiSparkEventSuite extends KyuubiFunSuite { + + test("test exception serializer and deserializer of SparkOperationEvent") { + val exception = new SparkException("message", new Exception("cause")) + val event = new SparkOperationEvent( + "statementId", + "statement", + shouldRunAsync = true, + "state", + 0L, + 0L, + 0L, + 0L, + Some(exception), + "sessionId", + "sessionUser", + None, + None, + None) + val mapper: ObjectMapper = new ObjectMapper().registerModule(DefaultScalaModule) + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + val json = mapper.writeValueAsString(event) + assert(json.contains("\"exception\":{\"Message\":\"message\",\"Stack Trace\":" + + "[{\"Declaring Class\":\"org.apache.spark.kyuubi.KyuubiSparkEventSuite\",")) + val deserializeEvent = mapper.readValue(json, classOf[SparkOperationEvent]) + assert(deserializeEvent.exception.isDefined) + assert(deserializeEvent.exception.get.getMessage === "message") + assert(deserializeEvent.exception.get.getStackTrace.length > 0) + } + +} diff --git a/kyuubi-events/src/main/scala/org/apache/kyuubi/events/JsonProtocol.scala b/kyuubi-events/src/main/scala/org/apache/kyuubi/events/JsonProtocol.scala index 32aef4f51..77e76b938 100644 --- a/kyuubi-events/src/main/scala/org/apache/kyuubi/events/JsonProtocol.scala +++ b/kyuubi-events/src/main/scala/org/apache/kyuubi/events/JsonProtocol.scala @@ -17,7 +17,10 @@ package org.apache.kyuubi.events -import com.fasterxml.jackson.databind.ObjectMapper +import scala.collection.JavaConverters._ + +import com.fasterxml.jackson.core.{JsonGenerator, JsonParser} +import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer, JsonNode, JsonSerializer, ObjectMapper, SerializerProvider} import com.fasterxml.jackson.module.scala.DefaultScalaModule object JsonProtocol { @@ -30,3 +33,85 @@ object JsonProtocol { mapper.readValue(jsonValue, cls) } } + +// Exception serializer and deserializer, copy from org.apache.spark.util.JsonProtocol +class ExceptionSerializer extends JsonSerializer[Exception] { + + override def serialize( + value: Exception, + gen: JsonGenerator, + serializers: SerializerProvider): Unit = { + exceptionToJson(value, gen) + } + + private def exceptionToJson(exception: Exception, g: JsonGenerator): Unit = { + g.writeStartObject() + g.writeStringField("Message", exception.getMessage) + g.writeFieldName("Stack Trace") + stackTraceToJson(exception.getStackTrace, g) + g.writeEndObject() + } + + private def stackTraceToJson(stackTrace: Array[StackTraceElement], g: JsonGenerator): Unit = { + g.writeStartArray() + stackTrace.foreach { line => + g.writeStartObject() + g.writeStringField("Declaring Class", line.getClassName) + g.writeStringField("Method Name", line.getMethodName) + g.writeStringField("File Name", line.getFileName) + g.writeNumberField("Line Number", line.getLineNumber) + g.writeEndObject() + } + g.writeEndArray() + } +} + +class ExceptionDeserializer extends JsonDeserializer[Exception] { + + override def deserialize(jsonParser: JsonParser, ctxt: DeserializationContext): Exception = { + val jsonNode = jsonParser.readValueAsTree[JsonNode]() + exceptionFromJson(jsonNode) + } + + private def exceptionFromJson(json: JsonNode): Exception = { + val message = jsonOption(json.get("Message")).map(_.extractString).orNull + val e = new Exception(message) + e.setStackTrace(stackTraceFromJson(json.get("Stack Trace"))) + e + } + + private def stackTraceFromJson(json: JsonNode): Array[StackTraceElement] = { + jsonOption(json).map(_.extractElements.map { line => + val declaringClass = line.get("Declaring Class").extractString + val methodName = line.get("Method Name").extractString + val fileName = jsonOption(line.get("File Name")).map(_.extractString).orNull + val lineNumber = line.get("Line Number").extractInt + new StackTraceElement(declaringClass, methodName, fileName, lineNumber) + }.toArray).getOrElse(Array[StackTraceElement]()) + } + + private def jsonOption(json: JsonNode): Option[JsonNode] = { + if (json == null || json.isNull) { + None + } else { + Some(json) + } + } + + implicit private class JsonNodeImplicits(json: JsonNode) { + def extractElements: Iterator[JsonNode] = { + require(json.isContainerNode, s"Expected container, got ${json.getNodeType}") + json.elements.asScala + } + + def extractInt: Int = { + require(json.isNumber, s"Expected number, got ${json.getNodeType}") + json.intValue + } + + def extractString: String = { + require(json.isTextual || json.isNull, s"Expected string or NULL, got ${json.getNodeType}") + json.textValue + } + } +}