diff --git a/externals/kyuubi-spark-sql-engine/src/main/resources/python/execute_python.py b/externals/kyuubi-spark-sql-engine/src/main/resources/python/execute_python.py index 6729092f75d..f33c10c4007 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/resources/python/execute_python.py +++ b/externals/kyuubi-spark-sql-engine/src/main/resources/python/execute_python.py @@ -497,7 +497,21 @@ def main(): "content": { "status": "error", "ename": "ValueError", - "evalue": "cannot json-ify %s" % response, + "evalue": "cannot json-ify %s" % result, + "traceback": [], + }, + } + ) + except Exception: + exc_type, exc_value, tb = sys.exc_info() + result = json.dumps( + { + "msg_type": "inspect_reply", + "content": { + "status": "error", + "ename": str(exc_type.__name__), + "evalue": "cannot json-ify %s: %s" + % (result, str(exc_value)), "traceback": [], }, } diff --git a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala index c723dcf4aa8..43587287620 100644 --- a/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala +++ b/kyuubi-server/src/test/scala/org/apache/kyuubi/engine/spark/PySparkTests.scala @@ -187,6 +187,26 @@ class PySparkTests extends WithKyuubiServer with HiveJDBCTestHelper { } } + test("catch all exception when dump the result to json") { + checkPythonRuntimeAndVersion() + withSessionConf()(Map(KyuubiConf.ENGINE_SPARK_PYTHON_MAGIC_ENABLED.key -> "true"))() { + withMultipleConnectionJdbcStatement()({ stmt => + val statement = stmt.asInstanceOf[KyuubiStatement] + statement.executePython("l = [('Alice', 1)]") + statement.executePython("df = spark.createDataFrame(l)") + val errorMsg = intercept[KyuubiSQLException] { + statement.executePython("%json df") + }.getMessage + assert(errorMsg.contains("Object of type DataFrame is not JSON serializable")) + + statement.executePython("df = spark.createDataFrame(l).collect()") + val result = statement.executePython("%json df") + assert(result.next()) + assert(result.getString("output") == "{\"application/json\":[[\"Alice\",1]]}") + }) + } + } + private def runPySparkTest( pyCode: String, output: String): Unit = {