diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/ExecuteStatement.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/ExecuteStatement.scala index 8b47e2075a0..3b72132dd44 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/ExecuteStatement.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/operation/ExecuteStatement.scala @@ -172,7 +172,7 @@ class ExecuteStatement( }) } else { val resultSaveEnabled = getSessionConf(OPERATION_RESULT_SAVE_TO_FILE, spark) - lazy val resultSaveThreshold = getSessionConf(OPERATION_RESULT_SAVE_TO_FILE_MINSIZE, spark) + val resultSaveThreshold = getSessionConf(OPERATION_RESULT_SAVE_TO_FILE_MINSIZE, spark) if (hasResultSet && resultSaveEnabled && shouldSaveResultToFs( resultMaxRows, resultSaveThreshold, diff --git a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/session/SparkSQLSessionManager.scala b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/session/SparkSQLSessionManager.scala index aab2d51068f..cadd33ff001 100644 --- a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/session/SparkSQLSessionManager.scala +++ b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/session/SparkSQLSessionManager.scala @@ -181,17 +181,24 @@ class SparkSQLSessionManager private (name: String, spark: SparkSession) } catch { case e: KyuubiSQLException => warn(s"Error closing session ${sessionHandle}", e) + } finally { + var resultPath: Path = null + try { + resultPath = new Path(s"${conf.get(OPERATION_RESULT_SAVE_TO_FILE_DIR)}/" + + s"$engineId/${sessionHandle.identifier}") + val fs = resultPath.getFileSystem(spark.sparkContext.hadoopConfiguration) + if (fs.exists(resultPath)) { + fs.delete(resultPath, true) + info(s"Deleted session result path: $resultPath") + } + } catch { + case e: Throwable => error(s"Error cleaning session result path: $resultPath", e) + } } if (shareLevel == ShareLevel.CONNECTION) { info("Session stopped due to shared level is Connection.") stopSession() } - if (conf.get(OPERATION_RESULT_SAVE_TO_FILE)) { - val path = new Path(s"${conf.get(OPERATION_RESULT_SAVE_TO_FILE_DIR)}/" + - s"$engineId/${sessionHandle.identifier}") - path.getFileSystem(spark.sparkContext.hadoopConfiguration).delete(path, true) - info(s"Delete session result file $path") - } } private def stopSession(): Unit = {