[KYUUBI #5899] Refine the python/scala interpreter error prompt

# 🔍 Description
## Issue References 🔗

Refine the interpreter error message format for python/scala.
This pull request fixes #

## Describe Your Solution 🔧

Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change.

## Types of changes 🔖

- [ ] Bugfix (non-breaking change which fixes an issue)
- [ ] New feature (non-breaking change which adds functionality)
- [ ] Breaking change (fix or feature that would cause existing functionality to change)

## Test Plan 🧪

#### Behavior Without This Pull Request ⚰️

#### Behavior With This Pull Request 🎉

#### Related Unit Tests

---

# Checklists
## 📝 Author Self Checklist

- [ ] My code follows the [style guidelines](https://kyuubi.readthedocs.io/en/master/contributing/code/style.html) of this project
- [ ] I have performed a self-review
- [ ] I have commented my code, particularly in hard-to-understand areas
- [ ] I have made corresponding changes to the documentation
- [ ] My changes generate no new warnings
- [ ] I have added tests that prove my fix is effective or that my feature works
- [ ] New and existing unit tests pass locally with my changes
- [ ] This patch was not authored or co-authored using [Generative Tooling](https://www.apache.org/legal/generative-tooling.html)

## 📝 Committer Pre-Merge Checklist

- [ ] Pull request title is okay.
- [ ] No license issues.
- [ ] Milestone correctly set?
- [ ] Test coverage is ok
- [ ] Assignees are selected.
- [ ] Minimum number of approvals
- [ ] No changes are requested

**Be nice. Be informative.**

Closes #5899 from turboFei/json_utils.

Closes #5899

8460f18cc [Fei Wang] refine

Authored-by: Fei Wang <fwang12@ebay.com>
Signed-off-by: Fei Wang <fwang12@ebay.com>
This commit is contained in:
Fei Wang 2023-12-21 19:14:23 -08:00
parent 0640a00e3c
commit 42b08ba840
3 changed files with 58 additions and 22 deletions

View File

@ -28,8 +28,6 @@ import javax.ws.rs.core.UriBuilder
import scala.collection.JavaConverters._
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.module.scala.DefaultScalaModule
import org.apache.commons.lang3.StringUtils
import org.apache.spark.SparkFiles
import org.apache.spark.api.python.KyuubiPythonGatewayServer
@ -40,6 +38,7 @@ import org.apache.kyuubi.{KyuubiSQLException, Logging, Utils}
import org.apache.kyuubi.config.KyuubiConf.{ENGINE_SPARK_PYTHON_ENV_ARCHIVE, ENGINE_SPARK_PYTHON_ENV_ARCHIVE_EXEC_PATH, ENGINE_SPARK_PYTHON_HOME_ARCHIVE, ENGINE_SPARK_PYTHON_MAGIC_ENABLED}
import org.apache.kyuubi.config.KyuubiReservedKeys.{KYUUBI_SESSION_USER_KEY, KYUUBI_STATEMENT_ID_KEY}
import org.apache.kyuubi.engine.spark.KyuubiSparkUtil._
import org.apache.kyuubi.engine.spark.util.JsonUtils
import org.apache.kyuubi.operation.{ArrayFetchIterator, OperationHandle, OperationState}
import org.apache.kyuubi.operation.log.OperationLog
import org.apache.kyuubi.session.Session
@ -95,7 +94,8 @@ class ExecutePython(
new ArrayFetchIterator[Row](Array(Row(output, status, ename, evalue, traceback)))
setState(OperationState.FINISHED)
} else {
throw KyuubiSQLException(s"Interpret error:\n$statement\n $response")
throw KyuubiSQLException(s"Interpret error:\n" +
s"${JsonUtils.toPrettyJson(Map("code" -> statement, "response" -> response.orNull))}")
}
}
} catch {
@ -200,12 +200,12 @@ case class SessionPythonWorker(
throw KyuubiSQLException("Python worker process has been exited, please check the error log" +
" and re-create the session to run python code.")
}
val input = ExecutePython.toJson(Map("code" -> code, "cmd" -> "run_code"))
val input = JsonUtils.toJson(Map("code" -> code, "cmd" -> "run_code"))
// scalastyle:off println
stdin.println(input)
// scalastyle:on
stdin.flush()
val pythonResponse = Option(stdout.readLine()).map(ExecutePython.fromJson[PythonResponse](_))
val pythonResponse = Option(stdout.readLine()).map(JsonUtils.fromJson[PythonResponse](_))
// throw exception if internal python code fail
if (internal && !pythonResponse.map(_.content.status).contains(PythonResponse.OK_STATUS)) {
throw KyuubiSQLException(s"Internal python code $code failure: $pythonResponse")
@ -214,7 +214,7 @@ case class SessionPythonWorker(
}
def close(): Unit = {
val exitCmd = ExecutePython.toJson(Map("cmd" -> "exit_worker"))
val exitCmd = JsonUtils.toJson(Map("cmd" -> "exit_worker"))
// scalastyle:off println
stdin.println(exitCmd)
// scalastyle:on
@ -387,19 +387,6 @@ object ExecutePython extends Logging {
sink.close()
file
}
val mapper: ObjectMapper = new ObjectMapper().registerModule(DefaultScalaModule)
def toJson[T](obj: T): String = {
mapper.writeValueAsString(obj)
}
def fromJson[T](json: String, clz: Class[T]): T = {
mapper.readValue(json, clz)
}
def fromJson[T](json: String)(implicit m: Manifest[T]): T = {
mapper.readValue(json, m.runtimeClass).asInstanceOf[T]
}
}
case class PythonResponse(
@ -424,10 +411,10 @@ case class PythonResponseContent(
if (data.filterNot(_._1 == "text/plain").isEmpty) {
data.get("text/plain").map {
case str: String => str
case obj => ExecutePython.toJson(obj)
case obj => JsonUtils.toJson(obj)
}.getOrElse("")
} else {
ExecutePython.toJson(data)
JsonUtils.toJson(data)
}
}
def getEname(): String = {

View File

@ -31,6 +31,7 @@ import org.apache.spark.sql.types.StructType
import org.apache.kyuubi.KyuubiSQLException
import org.apache.kyuubi.engine.spark.KyuubiSparkUtil._
import org.apache.kyuubi.engine.spark.repl.KyuubiSparkILoop
import org.apache.kyuubi.engine.spark.util.JsonUtils
import org.apache.kyuubi.operation.{ArrayFetchIterator, OperationHandle, OperationState}
import org.apache.kyuubi.operation.log.OperationLog
import org.apache.kyuubi.session.Session
@ -119,7 +120,8 @@ class ExecuteScala(
}
}
case Error =>
throw KyuubiSQLException(s"Interpret error:\n$statement\n ${repl.getOutput}")
throw KyuubiSQLException(s"Interpret error:\n" +
s"${JsonUtils.toPrettyJson(Map("code" -> statement, "response" -> repl.getOutput))}")
case Incomplete =>
throw KyuubiSQLException(s"Incomplete code:\n$statement")
}

View File

@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kyuubi.engine.spark.util
import com.fasterxml.jackson.databind.{DeserializationFeature, JsonNode, ObjectMapper}
import com.fasterxml.jackson.module.scala.DefaultScalaModule
object JsonUtils {
val mapper: ObjectMapper = new ObjectMapper()
.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false)
.registerModule(DefaultScalaModule)
def toJson[T](obj: T): String = {
mapper.writeValueAsString(obj)
}
def toPrettyJson[T](obj: T): String = {
mapper.writerWithDefaultPrettyPrinter().writeValueAsString(obj)
}
def fromJson[T](json: String, clz: Class[T]): T = {
mapper.readValue(json, clz)
}
def fromJson[T](json: String)(implicit m: Manifest[T]): T = {
mapper.readValue(json, m.runtimeClass).asInstanceOf[T]
}
def readTree(content: String): JsonNode = {
mapper.readTree(content)
}
}