Skip to content

Commit

Permalink
catch all exception
Browse files Browse the repository at this point in the history
  • Loading branch information
turboFei committed Feb 2, 2024
1 parent 4bd259a commit b675299
Show file tree
Hide file tree
Showing 2 changed files with 35 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -497,7 +497,21 @@ def main():
"content": {
"status": "error",
"ename": "ValueError",
"evalue": "cannot json-ify %s" % response,
"evalue": "cannot json-ify %s" % result,
"traceback": [],
},
}
)
except Exception:
exc_type, exc_value, tb = sys.exc_info()
result = json.dumps(
{
"msg_type": "inspect_reply",
"content": {
"status": "error",
"ename": str(exc_type.__name__),
"evalue": "cannot json-ify %s: %s"
% (result, str(exc_value)),
"traceback": [],
},
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,26 @@ class PySparkTests extends WithKyuubiServer with HiveJDBCTestHelper {
}
}

test("catch all exception when dump the result to json") {
checkPythonRuntimeAndVersion()
withSessionConf()(Map(KyuubiConf.ENGINE_SPARK_PYTHON_MAGIC_ENABLED.key -> "true"))() {
withMultipleConnectionJdbcStatement()({ stmt =>
val statement = stmt.asInstanceOf[KyuubiStatement]
statement.executePython("l = [('Alice', 1)]")
statement.executePython("df = spark.createDataFrame(l)")
val errorMsg = intercept[KyuubiSQLException] {
statement.executePython("%json df")
}.getMessage
assert(errorMsg.contains("Object of type DataFrame is not JSON serializable"))

statement.executePython("df = spark.createDataFrame(l).collect()")
val result = statement.executePython("%json df")
assert(result.next())
assert(result.getString("output") == "{\"application/json\":[[\"Alice\",1]]}")
})
}
}

private def runPySparkTest(
pyCode: String,
output: String): Unit = {
Expand Down

0 comments on commit b675299

Please sign in to comment.